code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1 value |
|---|---|---|---|---|---|
-module(word).
-export([new/2, filter/2, dictionary_size/1, cipherword/1, dictionary/1]).
-record(word, {cipherword, dictionary}).
%% A word has a cipherword which is a string from the ciphertext, and
%% a dictionary which is just a list of plaintext binaries the
%% cipherword might match, given some (possibly empty) key. As we
%% tentatively match each cipherword, the key is extended and a new
%% word is constructed for each cipherword with a reduced dictionary.
%% Creates a new word for Cipherword with a dictionary containing all
%% plaintext strings from Dictionary that could match the ciphertext.
%%
new(Cipherword, Dictionary) ->
new(Cipherword, Dictionary, key:new()).
%% Creates a new word for Cipherword with a dictionary containing all
%% plaintext strings from Dictionary that could match the ciphertext
%% with the given Key.
%%
new(Cipherword, Dictionary, Key) ->
Dictionary2 = filter_dictionary(Cipherword, Dictionary, Key),
#word{cipherword = Cipherword, dictionary = Dictionary2}.
cipherword(This) ->
This#word.cipherword.
dictionary(This) ->
This#word.dictionary.
%% Creates a new word based on this word, but with the dictionary
%% filtered to strings that are compatible with Key.
%%
filter(This, Key) ->
new(This#word.cipherword, This#word.dictionary, Key).
%% Returns a new Dictionary (which is just a list of strings)
%% containing all the strings of the given Dictionary which are
%% compatible with the given Cipherword string and Key.
%%
filter_dictionary(Cipherword, Dictionary, Key) ->
Regexp = make_regexp(Cipherword, Key),
[W || W <- Dictionary,
re:run(W, Regexp, [{capture, none}]) == match].
%% Returns a regular expression that will match all plainwords that
%% are compatible with the given Cipherword and Key, except that
%% multiple cipher letters which are not in Key may match the same
%% plain letter. Those matches need to be tested and rejected later.
%%
%% Some examples when Key is empty:
%% x -> ^[a-z]$
%% xy -> ^[a-z][a-z]$
%% Note that this could math "oo" which is not actually valid
%% because the two letters must be different. This is handled
%% later when extending the Key and it's discovered that "x"
%% and "y" must both map to "o" and the match is rejected.
%% xyx -> ^([a-z])[a-z]\1$
%% Backreferences are used where necessary to ensure that a cipher
%% letter appearing multiple times in the cipherword will match the
%% same plain letter every time.
%% x'y -> ^[a-z]'[a-z]$
%% Non-alpha characterz match themselves. This handles words with
%% apostrophes.
%%
%% Cipher letters that are in Key match their corresponding plain
%% letter. E.g., when Key is {x => b}, then xyx -> ^b[a-z]b$.
%%
%% The regexp used for cipher letters that are not yet in the key
%% isn't actually [a-z] as shown in these examples, it's a character
%% class contining all the plain letters that aren't yet in the key.
%%
make_regexp(Cipherword, Key) ->
% Unknown is a regexp that matches any plain letter that's not yet
% in the key. Unknown cipher letters can match any of these
% plain letters.
Unknown = "[" ++ (lists:seq($a, $z) -- key:values(Key)) ++ "]",
make_regexp(Cipherword, Key, Unknown, Cipherword, dict:new(), "").
%% Generates the regexp by recursing through a cipherword and using
%% some additional values to maintain state. Cipherword, Key, and
%% Unknown are contant. _Cw is the list we recurse through, Backrefs
%% is a dict that keeps track of the unknown cipher letters we've seen
%% so we can create backreferences (it's actually a map of cipher
%% letters to backreference strings), and Accum is where we accumulate
%% the regular expression by appending.
%%
make_regexp(_Cipherword, _Key, _Unknown, _Cw = [], _Backrefs, Accum) ->
{ok, R} = re:compile("^" ++ Accum ++ "$"),
R;
make_regexp(Cipherword, Key, Unknown, _Cw = [Letter | Rest], Backrefs, Accum) ->
case key:get(Key, Letter, not_alpha) of
unknown ->
%% Letter does not yet have a mapping. It will match the
%% set of Unknown letters. But if the letter appears in
%% Cipherword more than once then use a capture group or,
%% if we've already created a capture group for this
%% letter, use its backreference.
case count(Cipherword, Letter) == 1 of
true ->
%% Letter occurs once, it just matches Unknown.
make_regexp(Cipherword, Key, Unknown,
Rest, Backrefs, Accum ++ Unknown);
false ->
%% If we've already made a backreference for this
%% letter then use it. Otherwise make it and add
%% it to Backrefs.
case dict:find(Letter, Backrefs) of
{ok, Backref} ->
make_regexp(Cipherword, Key, Unknown,
Rest, Backrefs, Accum ++ Backref);
error ->
N = dict:size(Backrefs),
Backrefs2 = dict:store(
Letter, ["\\", $1 + N], Backrefs),
make_regexp(Cipherword, Key, Unknown,
Rest, Backrefs2,
Accum ++ "(" ++ Unknown ++ ")")
end
end;
not_alpha ->
%% Non-alphanetic characters match themselves.
make_regexp(Cipherword, Key, Unknown,
Rest, Backrefs, Accum ++ [Letter]);
Plainletter ->
%% Letter maps to Plainletter. Match Plainletter.
make_regexp(Cipherword, Key, Unknown,
Rest, Backrefs, Accum ++ [Plainletter])
end.
%% Returns the number of times Obj occurs in List.
%%
count(List, Obj) ->
erlang:length([E || E <- List, E == Obj]).
%% Returns the size of the word's dictionary.
%%
dictionary_size(This) ->
erlang:length(This#word.dictionary). | src/word.erl | 0.58948 | 0.76105 | word.erl | starcoder |
%%
%% @doc Math functions missing in the standard math module.
%%
-module(maths).
-author("<NAME>").
-export([log/2]).
-export([egcd/2, gcd/2, lcm/2, ilog2/1, isqrt/1]).
-export([crt_garner/2, crt_solver/2]).
-export([mod/2, mod_exp/3, mod_inv/2, mod_linear_equation_solver/3]).
-export([dot_product/2, hadamard_prod/2, pairwise_primes/1, prod/1]).
-export([pow/2]).
-export([factor2/1, jacobi/2, pollard_rho/1]).
%%
%% @doc Log base `B' of `X'.
%%
-spec log(B :: number(), X :: number()) -> float().
log(B, X) -> math:log(X) / math:log(B).
%%
%% @doc Hadamard product (a.k.a. Schur product) of two given vectors.
%%
-spec hadamard_prod([number()], [number()]) -> [number()].
hadamard_prod(A, B) -> lists:zipwith(fun erlang:'*'/2, A, B).
%%
%% @doc Dot product (a.k.a. scalar product) of two given vectors.
%%
-spec dot_product([number()], [number()]) -> number().
dot_product(A, B) -> lists:sum(hadamard_prod(A, B)).
%%
%% @doc Extended Euclidean Algorithm to compute GCD.
%% This identity holds true: `GCD(A, B) = A * U + B * V'.
%%
-spec egcd(A :: pos_integer(), B :: pos_integer()) ->
{GCD :: pos_integer(), U :: integer(), V :: integer()}.
egcd(A, B) when 0 < A, 0 < B -> egcd(A, B, 1, 0, 0, 1).
egcd(0, D, _, _, Ud, Vd) -> {D, Ud, Vd};
egcd(C, D, Uc, Vc, Ud, Vd) ->
Q = D div C,
egcd(D - Q * C, C, Ud - Q * Uc, Vd - Q * Vc, Uc, Vc).
%%
%% @doc Returns the GCD of two positive integers.
%%
-spec gcd(pos_integer(), pos_integer()) -> pos_integer().
gcd(A, B) when A < 0 -> gcd(-A, B);
gcd(A, B) when B < 0 -> gcd(A, -B);
gcd(A, B) -> {GCD, _, _} = egcd(A, B), GCD.
%%
%% @doc Returns the least common multiple (LCM) of two positive integers.
%%
-spec lcm(pos_integer(), pos_integer()) -> pos_integer().
lcm(A, B) -> A * B div gcd(A, B).
%%
%% @doc Floor of the logarithm base 2 of the given integer `N'.
%%
-spec ilog2(pos_integer()) -> non_neg_integer().
ilog2(N) when 0 < N ->
B = bin:integer_to_bitstring(N),
bit_size(B) - 1.
%%
%% @doc Integer square root. Implemented using
%% [https://en.wikipedia.org/wiki/Integer_square_root#Using_bitwise_operations bitwise algorithm].
%%
-spec isqrt(non_neg_integer()) -> non_neg_integer().
isqrt(N) when 0 =< N -> isqrt_shift(N, 2, N bsr 2).
isqrt_shift(N, Shift, 0) -> isqrt_root(N, Shift - 2, 0);
isqrt_shift(N, Shift, N) -> isqrt_root(N, Shift - 2, 0);
isqrt_shift(N, Shift, _) ->
S = Shift + 2,
isqrt_shift(N, S, N bsr S).
isqrt_root(_, Shift, Root) when Shift < 0 -> Root;
isqrt_root(N, Shift, Root) ->
R = Root bsl 1,
Candidate = R + 1,
if
Candidate * Candidate =< N bsr Shift -> isqrt_root(N, Shift - 2, Candidate);
true -> isqrt_root(N, Shift - 2, R)
end.
%%
%% @doc Garner's formula to solve particular case of CRT
%%
%% ```x = a (mod p), x = b (mod q)''' where `p' and `q' are primes.
%%
%% It gives the same result as `crt_solver([A, B], [P, Q]).'
%%
-spec crt_garner({non_neg_integer(), pos_integer()}, {non_neg_integer(), pos_integer()}) -> pos_integer().
crt_garner({A, P}, {B, Q}) ->
mod((A - B) * mod_inv(Q, P), P) * Q + B.
%%
%% @doc Chinese Remainder Theorem solver.
%% For given vectors `A' and `N', solves the equation `x = A (mod N)'
%% or returns `error' if the solution does not exist.
%%
%% The solution exists when `N' is a vector of relatively prime numbers.
%%
%% See [CLRS3] Theorem 31.27.
%%
-spec crt_solver([non_neg_integer()], [pos_integer()]) -> pos_integer() | error.
crt_solver(A, N) when length(A) =:= length(N) ->
try core:zipfold(crtFun(prod(N)), 0, A, N) of
Fold -> Fold
catch
error:badarith -> error
end.
crtFun(Prod) ->
fun(Acc, A, N) ->
M = Prod div N,
MInv = maths:mod_inv(M, N),
mod(Acc + A * M * MInv, Prod)
end.
%%
%% @doc Modulo operation that works properly on negative integers.
%%
-spec mod(integer(), pos_integer()) -> non_neg_integer().
mod(A, M) -> (A rem M + M) rem M.
%%
%% @doc Fast modular exponentiation by repeated squaring.
%%
%% See [CLRS3] Chapter 31.6.
%%
%% If `Base', `Exp', and `Mod' are `b'-bit numbers, then the total
%% number of arithmetic operations required is `O(b)' and
%% the total number of bit operations required is `O(b^3)'.
%%
%% @see crypto:mod_pow/3
%% @see crypto:bytes_to_integer/1
%%
-spec mod_exp(Base :: non_neg_integer(), Exp :: non_neg_integer(), Mod :: pos_integer()) -> non_neg_integer().
mod_exp(Base, Exp, Mod) ->
mod_exp(Base, bin:integer_to_bitstring(Exp), 0, 1, Mod).
mod_exp(_A, <<>>, _C, D, _N) -> D;
mod_exp(A, <<0:1, B/bitstring>>, C, D, N) ->
mod_exp(A, B, 2 * C, D * D rem N, N);
mod_exp(A, <<1:1, B/bitstring>>, C, D, N) ->
mod_exp(A, B, 2 * C + 1, D * D * A rem N, N).
%%
%% @doc Inverse of `B' modulo `N'.
%%
-spec mod_inv(B :: pos_integer(), N :: pos_integer()) -> pos_integer() | error.
mod_inv(B, N) when 0 < B, 0 < N ->
case mod_linear_equation_solver(B, 1, N) of
error -> error;
[A] -> A
end.
%%
%% @doc Solves equation `ax = b (mod n)' or returns `error'
%% if the solution does not exist.
%%
%% See [CLRS3] Chapter 31.4.
%%
-spec mod_linear_equation_solver(integer(), integer(), pos_integer()) -> [non_neg_integer()] | error.
mod_linear_equation_solver(A, B, N) ->
{D, U, _} = egcd(A, N),
mod_linear_equation_solver(B, N, D, U).
mod_linear_equation_solver(B, _, D, _) when B rem D =/= 0 -> error;
mod_linear_equation_solver(B, N, D, U) ->
mod_linear_equation_solver_list(N, D, mod(U * B div D, N)).
mod_linear_equation_solver_list(N, D, X0) ->
[mod(X0 + I * N div D, N) || I <- lists:seq(0, D - 1)].
%%
%% @doc Returns `true' if the numbers in the given list
%% are pairwise relatively prime, otherwise returns `false'.
%%
-spec pairwise_primes([pos_integer()]) -> boolean().
pairwise_primes(List) ->
Ps = [1 < X andalso 1 < Y andalso gcd(X, Y) =:= 1 || X <- List, Y <- List, X < Y],
lists:all(fun(Bool) -> Bool end, Ps).
%%
%% @doc Integer power of another integer
%%
-spec pow(N :: integer(), Exp :: non_neg_integer()) -> integer().
pow(_, 0) -> 1;
pow(N, E) when E rem 2 =:= 0 ->
M = pow(N, E div 2),
M * M;
pow(N, E) when 0 < E -> N * pow(N, E - 1).
%%
%% @doc Returns the product of the numbers in the given list.
%%
-spec prod([number()]) -> number().
prod(Numbers) -> lists:foldl(fun erlang:'*'/2, 1, Numbers).
%%
%% @doc Compute `{s, t}' such that `s' is odd and `n = s * 2^t'.
%%
-spec factor2(pos_integer()) -> {pos_integer(), non_neg_integer()}.
factor2(N) -> factor2(N, 0).
factor2(S, T) when S rem 2 =/= 0 -> {S, T};
factor2(S, T) -> factor2(S div 2, T + 1).
%%
%% @doc Compute [https://en.wikipedia.org/wiki/Jacobi_symbol Jacobi symbol] (and Legendre symbol).
%%
%% See [MvOV1] Chapter 2.4.5. Algorithm 2.149
%%
%% See [CLRS3] Problem 31-4.b
%%
jacobi(A, N) when 2 < N, N rem 2 =/= 0 -> jacobi(A, N, 1).
jacobi(0, _, _) -> 0;
jacobi(A, N, Acc) ->
{A1, E} = factor2(A),
S1 = if
E rem 2 =:= 0 -> 1;
N rem 8 =:= 1 orelse N rem 8 =:= 7 -> 1;
N rem 8 =:= 3 orelse N rem 8 =:= 5 -> -1
end,
S = if
N rem 4 =:= 3 andalso A1 rem 4 =:= 3 -> -S1;
true -> S1
end,
if
A1 =:= 1 -> S * Acc;
true -> jacobi(N rem A1, A1, S * Acc)
end.
%%
%% @doc Pollard's rho heuristic.
%%
%% See [CLRS3] Chapter 31.9, p.976
%%
-spec pollard_rho(pos_integer()) -> pos_integer().
pollard_rho(N) ->
X = rand:uniform(N - 1),
pollard_rho(N, X, X, 1, 2).
pollard_rho(N, X, _, J, J) ->
pollard_rho(N, X, X, J, 2 * J);
pollard_rho(N, X, Y, I, K) ->
Xi = mod(X * X - 1, N),
case gcd(Y - Xi, N) of
1 -> pollard_rho(N, Xi, Y, I + 1, K);
N -> pollard_rho(N, Xi, Y, I + 1, K);
D -> D
end.
%% =============================================================================
%% Unit tests
%% =============================================================================
-include_lib("eunit/include/eunit.hrl").
crt_garner_test() ->
?_assertEqual(crt_solver([4, 5], [7, 11]), crt_garner({4, 7}, {5, 11})).
crt_solver_test_() -> [
?_assertEqual(21, crt_solver([21], [23])),
?_assertEqual(49, crt_solver([4, 5], [5, 11])),
?_assertEqual(10, crt_solver([1, 2, 3], [9, 8, 7])),
?_assertEqual(8458, crt_solver([3, 5], [89, 107])),
?_assertEqual(error, crt_solver([1, 3], [4, 6])) % it can be 9 or 21
].
dot_product_test() ->
?assertEqual(3, dot_product([1, 3, -5], [4, -2, -1])).
egcd_test() ->
?assertEqual({263, 168, -131}, egcd(91261, 117035)).
lcm_test_() -> [
?_assertEqual(4100, lcm(82, 100)),
?_assertEqual(120, lcm(8, 30))].
ilog2_test_() -> [
?_assertEqual(0, ilog2(1)),
?_assertEqual(1, ilog2(3)),
?_assertEqual(2, ilog2(4)),
?_assertEqual(2, ilog2(5)),
?_assertEqual(20, ilog2(1048576))].
isqrt_test_() -> [
?_assertEqual(0, isqrt(0)),
?_assertEqual(1, isqrt(1)),
?_assertEqual(1, isqrt(2)),
?_assertEqual(1, isqrt(3)),
?_assertEqual(2, isqrt(4)),
?_assertEqual(2, isqrt(5)),
?_assertEqual(10, isqrt(100)),
?_assertEqual(79, isqrt(6241)),
?_assertEqual(79, isqrt(6250)),
?_assertEqual(111111110651, isqrt(12345678910111213141516))].
mod_linear_equation_solver_test_() -> [
?_assertEqual(error, mod_linear_equation_solver(2, 3, 4)),
?_assertEqual([6, 16, 26, 36, 46], mod_linear_equation_solver(35, 10, 50)),
?_assertEqual([95, 45], mod_linear_equation_solver(14, 30, 100))].
mod_test_() -> [
?_assertEqual(1, mod(15, 7)),
?_assertEqual(2, mod(-5, 7)),
?_assertEqual(0, mod(0, 17)),
?_assertEqual(-5, -5 rem 7)].
mod_exp_test_() -> [
?_assertEqual(1, mod_exp(0, 0, 5)),
?_assertEqual(1, mod_exp(1, 1, 5)),
?_assertEqual(1, mod_exp(7, 560, 561)),
?_assertEqual(16, mod_exp(12, 34, 56)),
?_assertEqual(199, mod_exp(27, 35, 569)),
?_assertEqual(81, mod_exp(12345, 67890, 103)),
?_assertEqual(81, crypto:bytes_to_integer(crypto:mod_pow(12345, 67890, 103)))].
mod_inv_test_() -> [
?_assertEqual(error, mod_inv(3, 60)),
?_assertEqual(1367, mod_inv(3, 4100)),
?_assertEqual(27, crypto:bytes_to_integer(crypto:mod_pow(3, -1, 60))), %% sic!
?_assertEqual(43, mod_inv(7, 60)),
?_assertEqual(43, crypto:bytes_to_integer(crypto:mod_pow(7, -1, 60))),
?_assertEqual(79, mod_inv(74, 167))].
pow_test() ->
?assertEqual(1048576, pow(2, 20)).
factor2_test_() -> [
?_assertEqual({3, 4}, factor2(48)),
?_assertEqual({79, 1}, factor2(158))].
jacobi_test_() ->
Z21 = [1, 2, 4, 5, 8, 10, 11, 13, 16, 17, 19, 20], [
?_assertEqual(-1, jacobi(pow(2, 251) - 9 - 1174, pow(2, 251) - 9)),
?_assertEqual(-1, jacobi(158, 235)),
?_assertEqual([1, -1, 1, -1, -1, 1, -1, 1, 1, -1, 1, -1], [jacobi(A, 3) || A <- Z21]),
?_assertEqual([1, 1, 1, -1, 1, -1, 1, -1, 1, -1, -1, -1], [jacobi(A, 7) || A <- Z21]),
?_assertEqual([1, -1, 1, 1, -1, -1, -1, -1, 1, 1, -1, 1], [jacobi(A, 21) || A <- Z21]),
?_assertEqual([1, 1, 0, 1, 1, 0, 1, 1], [jacobi(A, 9) || A <- lists:seq(1, 8)]),
?_assertEqual([1, 1, 0, 1, 0, 0, -1, 1, 0, 0, -1, 0, -1, -1], [jacobi(A, 15) || A <- lists:seq(1, 14)])].
-ifdef(STOCHASTIC_TEST).
pollard_rho_test_() -> [
?_assert(lists:member(pollard_rho(455459), [613, 743])),
?_assert(lists:member(pollard_rho(1387), [19, 73]))].
-endif. | lib/ndpar/src/maths.erl | 0.651355 | 0.548371 | maths.erl | starcoder |
-module(jiffy_global).
-export([
new/1,
get_atom_key/1,
get_atom_key/2,
get_string_key/1,
get_string_key/2,
get_binary_key/1,
get_binary_key/2,
put_atom_key/2,
put_string_key/2,
put_binary_key/2,
delete_atom_key/1,
delete_string_key/1,
delete_binary_key/1
]).
-type type() :: {?MODULE, atom()}.
%% @doc
%% Create a module for the Jiffy global instance.
%% @end
-spec new(atom()) -> type().
new(Key) ->
{?MODULE, key_to_module(Key)}.
%% @doc
%% Get the value for `key` or return nil.
%% @end
-spec get_atom_key(atom()) -> any() | nil.
get_atom_key(Key) ->
get_atom_key(Key, nil).
%% @doc
%% Get the value for a string() `key` or return nil.
%% @end
-spec get_string_key(atom()) -> any() | nil.
get_string_key(Key) ->
get_string_key(Key, nil).
%% @doc
%% Get the value for a binary() `key` or return nil.
%% @end
-spec get_binary_key(atom()) -> any() | nil.
get_binary_key(Key) ->
get_binary_key(Key, nil).
%% @doc
%% Get the value for atom() `key` or return `default`.
%% @end
-spec get_atom_key(atom() | type(), any()) -> any().
get_atom_key({?MODULE, Module}, Default) ->
do_get(Module, Default);
get_atom_key(Key, Default) ->
Module = key_to_module(Key),
do_get(Module, Default).
%% @doc
%% Get the value for a string() `key` or return `default`.
%% @end
-spec get_string_key(string(), any()) -> any().
get_string_key(Key, Default) ->
Module = string_key_to_module(Key),
do_get(Module, Default).
%% @doc
%% Get the value for a binary() `key` or return `default`.
%% @end
-spec get_binary_key(binary(), any()) -> any().
get_binary_key(Key, Default) ->
Module = binary_key_to_module(Key),
do_get(Module, Default).
%% @doc
%% Store an `value` at atom() `key`, replaces an existing value if present.
%% @end
-spec put_atom_key(atom() | {?MODULE, module()}, any()) -> ok.
put_atom_key({?MODULE, Module}, Value) ->
do_put(Module, Value);
put_atom_key(Key, Value) ->
Module = key_to_module(Key),
do_put(Module, Value).
%% @doc
%% Store an `value` at a string() `key`, replaces an existing value if present.
%% @end
-spec put_string_key(string(), any()) -> ok.
put_string_key(Key, Value) ->
Module = string_key_to_module(Key),
do_put(Module, Value).
%% @doc
%% Store an `value` at a binary() `key`, replaces an existing value if present.
%% @end
-spec put_binary_key(binary(), any()) -> ok.
put_binary_key(Key, Value) ->
Module = binary_key_to_module(Key),
do_put(Module, Value).
%% @doc
%% Delete value stored at an atom() `key`, no-op if non-existent.
%% @end
-spec delete_atom_key(atom() | {?MODULE, module()}) -> ok.
delete_atom_key(Key) ->
Module = key_to_module(Key),
do_delete(Module).
%% @doc
%% Delete value stored at a string() `key`, no-op if non-existent.
%% @end
-spec delete_string_key(atom() | {?MODULE, module()}) -> ok.
delete_string_key(Key) ->
Module = string_key_to_module(Key),
do_delete(Module).
%% @doc
%% Delete value stored at a binary() `key`, no-op if non-existent.
%% @end
-spec delete_binary_key(atom() | {?MODULE, module()}) -> ok.
delete_binary_key({?MODULE, Module}) ->
do_delete(Module);
delete_binary_key(Key) ->
Module = binary_key_to_module(Key),
do_delete(Module).
%% Private
-spec do_get(atom(), any()) -> any().
do_get(Module, Default) ->
try
Module:value()
catch
error:undef ->
Default
end.
-spec do_put(atom(), any()) -> ok.
do_put(Module, Value) ->
Binary = compile(Module, Value),
code:purge(Module),
{module, Module} = code:load_binary(Module, atom_to_list(Module) ++ ".erl", Binary),
ok.
-spec do_delete(atom()) -> ok.
do_delete(Module) ->
code:purge(Module),
code:delete(Module).
-spec key_to_module(atom()) -> atom.
key_to_module(Key) ->
list_to_atom("jiffy_global:" ++ atom_to_list(Key)).
-spec binary_key_to_module(binary()) -> atom.
binary_key_to_module(Key) ->
list_to_atom("jiffy_global:" ++ binary_to_list(Key)).
-spec string_key_to_module(binary()) -> atom.
string_key_to_module(Key) ->
list_to_atom("jiffy_global:" ++ Key).
-spec compile(atom(), any()) -> binary().
compile(Module, Value) ->
{ok, Module, Binary} =
compile:forms(value_to_abstract(Module, Value), [verbose, report_errors]),
Binary.
-spec value_to_abstract(atom(), any()) -> [erl_syntax:syntaxTree()].
value_to_abstract(Module, Value) ->
Data = erl_syntax:revert(erl_syntax:abstract(Value)),
[
{attribute, 0, module, Module},
{attribute, 0, export, [{value, 0}]},
{function, 0, value, 0, [{clause, 0, [], [], [Data]}]}
]. | src/jiffy_global.erl | 0.630799 | 0.523725 | jiffy_global.erl | starcoder |
%%
%% %CopyrightBegin%
%%
%% Copyright Ericsson AB 2001-2016. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
%% Purpose: Information about the Erlang built-in functions.
-module(erl_bifs).
-export([is_pure/3, is_safe/3, is_exit_bif/3]).
%% =====================================================================
%% is_pure(Module, Name, Arity) -> boolean()
%%
%% Module = Name = atom()
%% Arity = integer()
%%
%% Returns `true' if the function `Module:Name/Arity' does not
%% affect the state, nor depend on the state, although its
%% evaluation is not guaranteed to complete normally for all input.
-spec is_pure(atom(), atom(), arity()) -> boolean().
is_pure(erlang, '*', 2) -> true;
is_pure(erlang, '+', 1) -> true; % (even for non-numbers)
is_pure(erlang, '+', 2) -> true;
is_pure(erlang, '++', 2) -> true;
is_pure(erlang, '-', 1) -> true;
is_pure(erlang, '-', 2) -> true;
is_pure(erlang, '--', 2) -> true;
is_pure(erlang, '/', 2) -> true;
is_pure(erlang, '/=', 2) -> true;
is_pure(erlang, '<', 2) -> true;
is_pure(erlang, '=/=', 2) -> true;
is_pure(erlang, '=:=', 2) -> true;
is_pure(erlang, '=<', 2) -> true;
is_pure(erlang, '==', 2) -> true;
is_pure(erlang, '>', 2) -> true;
is_pure(erlang, '>=', 2) -> true;
is_pure(erlang, 'and', 2) -> true;
is_pure(erlang, 'band', 2) -> true;
is_pure(erlang, 'bnot', 1) -> true;
is_pure(erlang, 'bor', 2) -> true;
is_pure(erlang, 'bsl', 2) -> true;
is_pure(erlang, 'bsr', 2) -> true;
is_pure(erlang, 'bxor', 2) -> true;
is_pure(erlang, 'div', 2) -> true;
is_pure(erlang, 'not', 1) -> true;
is_pure(erlang, 'or', 2) -> true;
is_pure(erlang, 'rem', 2) -> true;
is_pure(erlang, 'xor', 2) -> true;
is_pure(erlang, abs, 1) -> true;
is_pure(erlang, atom_to_binary, 2) -> true;
is_pure(erlang, atom_to_list, 1) -> true;
is_pure(erlang, binary_part, 2) -> true;
is_pure(erlang, binary_part, 3) -> true;
is_pure(erlang, binary_to_atom, 2) -> true;
is_pure(erlang, binary_to_float, 1) -> true;
is_pure(erlang, binary_to_integer, 1) -> true;
is_pure(erlang, binary_to_list, 1) -> true;
is_pure(erlang, binary_to_list, 3) -> true;
is_pure(erlang, bit_size, 1) -> true;
is_pure(erlang, byte_size, 1) -> true;
is_pure(erlang, ceil, 1) -> true;
is_pure(erlang, element, 2) -> true;
is_pure(erlang, float, 1) -> true;
is_pure(erlang, float_to_list, 1) -> true;
is_pure(erlang, float_to_binary, 1) -> true;
is_pure(erlang, floor, 1) -> true;
is_pure(erlang, hash, 2) -> false;
is_pure(erlang, hd, 1) -> true;
is_pure(erlang, integer_to_binary, 1) -> true;
is_pure(erlang, integer_to_list, 1) -> true;
is_pure(erlang, is_atom, 1) -> true;
is_pure(erlang, is_boolean, 1) -> true;
is_pure(erlang, is_binary, 1) -> true;
is_pure(erlang, is_bitstring, 1) -> true;
%% erlang:is_builtin/3 depends on the state (i.e. the version of the emulator).
is_pure(erlang, is_float, 1) -> true;
is_pure(erlang, is_function, 1) -> true;
is_pure(erlang, is_integer, 1) -> true;
is_pure(erlang, is_list, 1) -> true;
is_pure(erlang, is_map, 1) -> true;
is_pure(erlang, is_number, 1) -> true;
is_pure(erlang, is_pid, 1) -> true;
is_pure(erlang, is_port, 1) -> true;
is_pure(erlang, is_record, 2) -> true;
is_pure(erlang, is_record, 3) -> true;
is_pure(erlang, is_reference, 1) -> true;
is_pure(erlang, is_tuple, 1) -> true;
is_pure(erlang, length, 1) -> true;
is_pure(erlang, list_to_atom, 1) -> true;
is_pure(erlang, list_to_binary, 1) -> true;
is_pure(erlang, list_to_float, 1) -> true;
is_pure(erlang, list_to_integer, 1) -> true;
is_pure(erlang, list_to_pid, 1) -> true;
is_pure(erlang, list_to_tuple, 1) -> true;
is_pure(erlang, max, 2) -> true;
is_pure(erlang, min, 2) -> true;
is_pure(erlang, phash, 2) -> false;
is_pure(erlang, pid_to_list, 1) -> true;
is_pure(erlang, round, 1) -> true;
is_pure(erlang, setelement, 3) -> true;
is_pure(erlang, size, 1) -> true;
is_pure(erlang, split_binary, 2) -> true;
is_pure(erlang, term_to_binary, 1) -> true;
is_pure(erlang, tl, 1) -> true;
is_pure(erlang, trunc, 1) -> true;
is_pure(erlang, tuple_size, 1) -> true;
is_pure(erlang, tuple_to_list, 1) -> true;
is_pure(lists, append, 2) -> true;
is_pure(lists, subtract, 2) -> true;
is_pure(math, acos, 1) -> true;
is_pure(math, acosh, 1) -> true;
is_pure(math, asin, 1) -> true;
is_pure(math, asinh, 1) -> true;
is_pure(math, atan, 1) -> true;
is_pure(math, atan2, 2) -> true;
is_pure(math, atanh, 1) -> true;
is_pure(math, ceil, 1) -> true;
is_pure(math, cos, 1) -> true;
is_pure(math, cosh, 1) -> true;
is_pure(math, erf, 1) -> true;
is_pure(math, erfc, 1) -> true;
is_pure(math, exp, 1) -> true;
is_pure(math, floor, 1) -> true;
is_pure(math, fmod, 2) -> true;
is_pure(math, log, 1) -> true;
is_pure(math, log2, 1) -> true;
is_pure(math, log10, 1) -> true;
is_pure(math, pow, 2) -> true;
is_pure(math, sin, 1) -> true;
is_pure(math, sinh, 1) -> true;
is_pure(math, sqrt, 1) -> true;
is_pure(math, tan, 1) -> true;
is_pure(math, tanh, 1) -> true;
is_pure(math, pi, 0) -> true;
is_pure(_, _, _) -> false.
%% =====================================================================
%% is_safe(Module, Name, Arity) -> boolean()
%%
%% Module = Name = atom()
%% Arity = integer()
%%
%% Returns `true' if the function `Module:Name/Arity' is completely
%% effect free, i.e., if its evaluation always completes normally
%% and does not affect the state (although the value it returns
%% might depend on the state).
%%
%% Note: is_function/2 and is_record/3 are NOT safe: is_function(X, foo)
%% and is_record(X, foo, bar) will fail.
-spec is_safe(atom(), atom(), arity()) -> boolean().
is_safe(erlang, '/=', 2) -> true;
is_safe(erlang, '<', 2) -> true;
is_safe(erlang, '=/=', 2) -> true;
is_safe(erlang, '=:=', 2) -> true;
is_safe(erlang, '=<', 2) -> true;
is_safe(erlang, '==', 2) -> true;
is_safe(erlang, '>', 2) -> true;
is_safe(erlang, '>=', 2) -> true;
is_safe(erlang, date, 0) -> true;
is_safe(erlang, get, 0) -> true;
is_safe(erlang, get, 1) -> true;
is_safe(erlang, get_cookie, 0) -> true;
is_safe(erlang, get_keys, 1) -> true;
is_safe(erlang, group_leader, 0) -> true;
is_safe(erlang, is_alive, 0) -> true;
is_safe(erlang, is_atom, 1) -> true;
is_safe(erlang, is_boolean, 1) -> true;
is_safe(erlang, is_binary, 1) -> true;
is_safe(erlang, is_bitstring, 1) -> true;
is_safe(erlang, is_float, 1) -> true;
is_safe(erlang, is_function, 1) -> true;
is_safe(erlang, is_integer, 1) -> true;
is_safe(erlang, is_list, 1) -> true;
is_safe(erlang, is_number, 1) -> true;
is_safe(erlang, is_pid, 1) -> true;
is_safe(erlang, is_port, 1) -> true;
is_safe(erlang, is_reference, 1) -> true;
is_safe(erlang, is_tuple, 1) -> true;
is_safe(erlang, make_ref, 0) -> true;
is_safe(erlang, max, 2) -> true;
is_safe(erlang, min, 2) -> true;
is_safe(erlang, node, 0) -> true;
is_safe(erlang, nodes, 0) -> true;
is_safe(erlang, ports, 0) -> true;
is_safe(erlang, pre_loaded, 0) -> true;
is_safe(erlang, processes, 0) -> true;
is_safe(erlang, registered, 0) -> true;
is_safe(erlang, self, 0) -> true;
is_safe(erlang, term_to_binary, 1) -> true;
is_safe(erlang, time, 0) -> true;
is_safe(_, _, _) -> false.
%% =====================================================================
%% is_exit_bif(Module, Name, Arity) -> boolean()
%%
%% Module = Name = atom()
%% Arity = integer()
%%
%% Returns `true' if the function `Module:Name/Arity' never returns
%% normally, i.e., if it always causes an exception regardless of
%% its arguments.
-spec is_exit_bif(atom(), atom(), arity()) -> boolean().
is_exit_bif(erlang, exit, 1) -> true;
is_exit_bif(erlang, throw, 1) -> true;
is_exit_bif(erlang, error, 1) -> true;
is_exit_bif(erlang, error, 2) -> true;
is_exit_bif(_, _, _) -> false. | lib/compiler/src/erl_bifs.erl | 0.620047 | 0.403391 | erl_bifs.erl | starcoder |
%% ---------------------------------------------------------------------
%%
%% Copyright (c) 2007-2013 Basho Technologies, Inc. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% ---------------------------------------------------------------------
%% @doc The daemon that calculates Riak CS storage on the configured
%% schedule.
-module(riak_cs_storage_d).
-behaviour(gen_fsm).
%% API
-export([start_link/0,
status/0,
start_batch/1,
cancel_batch/0,
pause_batch/0,
resume_batch/0]).
%% gen_fsm callbacks
-export([init/1,
idle/2, idle/3,
calculating/2, calculating/3,
paused/2, paused/3,
handle_event/3,
handle_sync_event/4,
handle_info/3,
terminate/3,
code_change/4]).
-include("riak_cs.hrl").
-define(SERVER, ?MODULE).
-record(state, {
schedule, %% the times that storage is calculated
last, %% the last time a calculation was scheduled
current, %% what schedule we're calculating for now
next, %% the next scheduled time
riak, %% client we're currently using
batch_start, %% the time we actually started
batch_count=0, %% count of users processed so far
batch_skips=0, %% count of users skipped so far
batch=[], %% users left to process in this batch
recalc %% recalculate a user's storage for this period?
}).
%%%===================================================================
%%% API
%%%===================================================================
%% @doc Starting the server also verifies the storage schedule. If
%% the schedule contains invalid elements, an error will be printed in
%% the logs.
start_link() ->
gen_fsm:start_link({local, ?SERVER}, ?MODULE, [], []).
%% @doc Status is returned as a 2-tuple of `{State, Details}'. State
%% should be `idle', `calculating', or `paused'. When `idle' the
%% details (a proplist) will include the schedule, as well as the
%% times of the last calculation and the next planned calculation.
%% When `calculating' or `paused' details also the scheduled time of
%% the active calculation, the number of seconds the process has been
%% calculating so far, and counts of how many users have been
%% processed and how many are left.
status() ->
gen_fsm:sync_send_event(?SERVER, status).
%% @doc Force a calculation and archival manually. The `current'
%% property returned from a {@link status/0} call will show the most
%% recently passed schedule time, but calculations will be stored with
%% the time at which they happen, as expected.
%%
%% Allowed options are:
%% <dl>
%% <dt>`recalc'</dt>
%% <dd>Recalculate the storage for each user, even if that user
%% already has a calculation stored for this time period. Default is
%% `false', such that restarting a canceled batch does not require
%% redoing the work that happened before cancellation.</dd>
%% </dl>
start_batch(Options) ->
gen_fsm:sync_send_event(?SERVER, {manual_batch, Options}, infinity).
%% @doc Cancel the calculation currently in progress. Returns `ok' if
%% a batch was canceled, or `{error, no_batch}' if there was no batch
%% in progress.
cancel_batch() ->
gen_fsm:sync_send_event(?SERVER, cancel_batch, infinity).
%% @doc Pause the calculation currently in progress. Returns `ok' if
%% a batch was paused, or `{error, no_batch}' if there was no batch in
%% progress. Also returns `ok' if there was a batch in progress that
%% was already paused.
pause_batch() ->
gen_fsm:sync_send_event(?SERVER, pause_batch, infinity).
%% @doc Resume the batch currently in progress. Returns `ok' if a
%% batch was resumed, or `{error, no_batch}' if there was no batch in
%% progress. Also returns `ok' if there was a batch in progress that
%% was not paused.
resume_batch() ->
gen_fsm:sync_send_event(?SERVER, resume_batch, infinity).
%%%===================================================================
%%% gen_fsm callbacks
%%%===================================================================
%% @doc Read the storage schedule and go to idle.
init([]) ->
Schedule = read_storage_schedule(),
SchedState = schedule_next(#state{schedule=Schedule},
calendar:universal_time()),
ok = rts:check_bucket_props(?STORAGE_BUCKET),
{ok, idle, SchedState}.
%% Asynchronous events
%% @doc Transitions out of idle are all synchronous events
idle(_, State) ->
{next_state, idle, State}.
%% @doc Async transitions from calculating are all due to messages the
%% FSM sends itself, in order to have opportunities to handle messages
%% from the outside world (like `status').
calculating(continue, #state{batch=[], current=Current}=State) ->
%% finished with this batch
_ = lager:info("Finished storage calculation in ~b seconds.",
[elapsed(State#state.batch_start)]),
riak_cs_riakc_pool_worker:stop(State#state.riak),
NewState = State#state{riak=undefined,
last=Current,
current=undefined},
{next_state, idle, NewState};
calculating(continue, State) ->
%% more to do yet
NewState = calculate_next_user(State),
gen_fsm:send_event(?SERVER, continue),
{next_state, calculating, NewState};
calculating(_, State) ->
{next_state, calculating, State}.
paused(_, State) ->
{next_state, paused, State}.
%% Synchronous events
idle(status, _From, State) ->
Props = [{schedule, State#state.schedule},
{last, State#state.last},
{next, State#state.next}],
{reply, {ok, {idle, Props}}, idle, State};
idle({manual_batch, Options}, _From, State) ->
NewState = start_batch(Options, calendar:universal_time(), State),
{reply, ok, calculating, NewState};
idle(cancel_batch, _From, State) ->
{reply, {error, no_batch}, idle, State};
idle(pause_batch, _From, State) ->
{reply, {error, no_batch}, idle, State};
idle(resume_batch, _From, State) ->
{reply, {error, no_batch}, idle, State};
idle(_, _From, State) ->
{reply, ok, idle, State}.
calculating(status, _From, State) ->
Props = [{schedule, State#state.schedule},
{last, State#state.last},
{current, State#state.current},
{next, State#state.next},
{elapsed, elapsed(State#state.batch_start)},
{users_done, State#state.batch_count},
{users_skipped, State#state.batch_skips},
{users_left, length(State#state.batch)}],
{reply, {ok, {calculating, Props}}, calculating, State};
calculating({manual_batch, _Options}, _From, State) ->
%% this is the manual user request to begin a batch
{reply, {error, already_calculating}, calculating, State};
calculating(pause_batch, _From, State) ->
_ = lager:info("Pausing storage calcluation"),
{reply, ok, paused, State};
calculating(cancel_batch, _From, #state{current=Current}=State) ->
%% finished with this batch
_ = lager:info("Canceled storage calculation after ~b seconds.",
[elapsed(State#state.batch_start)]),
riak_cs_riakc_pool_worker:stop(State#state.riak),
NewState = State#state{riak=undefined,
last=Current,
current=undefined,
batch=[]},
{reply, ok, idle, NewState};
calculating(_, _From, State) ->
{reply, ok, calculating, State}.
paused(status, From, State) ->
{reply, {ok, {_, Status}}, _, State} = calculating(status, From, State),
{reply, {ok, {paused, Status}}, paused, State};
paused(resume_batch, _From, State) ->
_ = lager:info("Resuming storage calculation"),
gen_fsm:send_event(?SERVER, continue),
{reply, ok, calculating, State};
paused(cancel_batch, From, State) ->
calculating(cancel_batch, From, State);
paused(_, _From, State) ->
{reply, ok, paused, State}.
%% @doc there are no all-state events for this fsm
handle_event(_Event, StateName, State) ->
{next_state, StateName, State}.
%% @doc there are no all-state events for this fsm
handle_sync_event(_Event, _From, StateName, State) ->
Reply = ok,
{reply, Reply, StateName, State}.
handle_info({start_batch, Next}, idle, #state{next=Next}=State) ->
%% next is scheduled immediately in order to generate warnings if
%% the current calculation runs over time (see next clause)
NewState = schedule_next(start_batch([], Next, State), Next),
{next_state, calculating, NewState};
handle_info({start_batch, Next}, InBatch,
#state{next=Next, current=Current}=State) ->
_ = lager:error("Unable to start storage calculation for ~p"
" because ~p is still working. Skipping forward...",
[Next, Current]),
NewState = schedule_next(State, Next),
{next_state, InBatch, NewState};
handle_info(_Info, StateName, State) ->
{next_state, StateName, State}.
%% @doc TODO: log warnings if this fsm is asked to terminate in the
%% middle of running a calculation
terminate(_Reason, _StateName, _State) ->
ok.
%% @doc this fsm has no special upgrade process
code_change(_OldVsn, StateName, State, _Extra) ->
{ok, StateName, State}.
%%%===================================================================
%%% Internal functions
%%%===================================================================
%% @doc The schedule will contain all valid times found in the
%% configuration, and will be sorted in day order.
read_storage_schedule() ->
lists:usort(read_storage_schedule1()).
read_storage_schedule1() ->
case application:get_env(riak_cs, storage_schedule) of
undefined ->
_ = lager:warning("No storage schedule defined."
" Calculation must be triggered manually."),
[];
{ok, Sched} ->
case catch parse_time(Sched) of
{ok, Time} ->
%% user provided just one time
[Time];
{'EXIT',_} when is_list(Sched) ->
Times = [ {S, catch parse_time(S)} || S <- Sched ],
_ = case [ X || {X,{'EXIT',_}} <- Times ] of
[] -> ok;
Bad ->
_ = lager:error(
"Ignoring bad storage schedule elements ~p",
[Bad])
end,
case [ Parsed || {_, {ok, Parsed}} <- Times] of
[] ->
_ = lager:warning(
"No storage schedule defined."
" Calculation must be triggered manually."),
[];
Good ->
Good
end;
_ ->
_ = lager:error(
"Invalid storage schedule defined."
" Calculation must be triggered manually."),
[]
end
end.
%% @doc Time is allowed as a `{Hour, Minute}' tuple, or as an `"HHMM"'
%% string. This function purposely fails (with function or case
%% clause currently) to allow {@link read_storage_schedule1/0} to pick
%% out the bad eggs.
parse_time({Hour, Min}) when (Hour >= 0 andalso Hour =< 23),
(Min >= 0 andalso Min =< 59) ->
{ok, {Hour, Min}};
parse_time(HHMM) when is_list(HHMM) ->
case io_lib:fread("~2d~2d", HHMM) of
{ok, [Hour, Min], []} ->
%% make sure numeric bounds apply
parse_time({Hour, Min})
end.
%% @doc Actually kick off the batch. After calling this function, you
%% must advance the FSM state to `calculating'.
start_batch(Options, Time, State) ->
BatchStart = calendar:universal_time(),
Recalc = true == proplists:get_value(recalc, Options),
%% TODO: probably want to do this fetch streaming, to avoid
%% accidental memory pressure at other points
%% this does not check out a worker from the riak connection pool;
%% instead it creates a fresh new worker, the idea being that we
%% don't want to foul up the storage calculation just because the
%% pool is empty; pool workers just happen to be literally the
%% socket process, so "starting" one here is the same as opening a
%% connection, and avoids duplicating the configuration lookup code
{ok, Riak} = riak_cs_riakc_pool_worker:start_link([]),
Batch = fetch_user_list(Riak),
gen_fsm:send_event(?SERVER, continue),
State#state{batch_start=BatchStart,
current=Time,
riak=Riak,
batch=Batch,
batch_count=0,
batch_skips=0,
recalc=Recalc}.
%% @doc Grab the whole list of Riak CS users.
fetch_user_list(Riak) ->
case riakc_pb_socket:list_keys(Riak, ?USER_BUCKET) of
{ok, Users} -> Users;
{error, Error} ->
_ = lager:error("Storage calculator was unable"
" to fetch list of users (~p)",
[Error]),
[]
end.
%% @doc Compute storage for the next user in the batch.
calculate_next_user(#state{riak=Riak,
batch=[User|Rest],
recalc=Recalc}=State) ->
Start = calendar:universal_time(),
case recalc(Recalc, Riak, User, Start) of
true ->
_ = case riak_cs_storage:sum_user(Riak, User) of
{ok, BucketList} ->
End = calendar:universal_time(),
store_user(State, User, BucketList, Start, End);
{error, Error} ->
_ = lager:error("Error computing storage for user ~s (~p)",
[User, Error])
end,
State#state{batch=Rest, batch_count=1+State#state.batch_count};
false ->
State#state{batch=Rest, batch_skips=1+State#state.batch_skips}
end.
recalc(true, _Riak, _User, _Time) ->
%% the user demanded recalculations
true;
recalc(false, Riak, User, Time) ->
{ok, Period} = riak_cs_storage:archive_period(),
{Start, End} = rts:slice_containing(Time, Period),
case riak_cs_storage:get_usage(Riak, User, Start, End) of
{[], _} ->
%% No samples were found for this time period (or all
%% attempts ended in error); calculate
true;
_ ->
%% A sample was found; do not recalc
false
end.
%% @doc Archive a user's storage calculation.
store_user(#state{riak=Riak}, User, BucketList, Start, End) ->
Obj = riak_cs_storage:make_object(User, BucketList, Start, End),
case riakc_pb_socket:put(Riak, Obj) of
ok -> ok;
{error, Error} ->
_ = lager:error("Error storing storage for user ~s (~p)",
[User, Error])
end.
%% @doc How many seconds have passed from `Time' to now.
elapsed(Time) ->
elapsed(Time, calendar:universal_time()).
%% @doc How many seconds are between `Early' and `Late'. Warning:
%% this will be negative if `Early' is later than `Late'.
elapsed(Early, Late) ->
calendar:datetime_to_gregorian_seconds(Late)
-calendar:datetime_to_gregorian_seconds(Early).
%% @doc Setup the automatic trigger to start the next scheduled batch
%% calculation. "Next" is defined as the scheduled time occurring
%% soonest after the `Last' parameter, that has not also already
%% passed by the wall clock. If the next scheduled time <em>has</em>
%% already passed, an error is printed to the logs, and the next time
%% that has not already passed is found and scheduled instead.
schedule_next(#state{schedule=[]}=State, _) ->
%% nothing to schedule, all triggers manual
State;
schedule_next(#state{schedule=Schedule}=State, Last) ->
NextTime = next_target_time(Last, Schedule),
case elapsed(calendar:universal_time(), NextTime) of
D when D > 0 ->
_ = lager:info("Scheduling next storage calculation for ~p",
[NextTime]),
erlang:send_after(D*1000, self(), {start_batch, NextTime}),
State#state{next=NextTime};
_ ->
_ = lager:error("Missed start time for storage calculation at ~p,"
" skipping to next scheduled time...",
[NextTime]),
%% just skip everything until the next scheduled time from now
schedule_next(State, calendar:universal_time())
end.
%% @doc Find the next scheduled time after the given time.
next_target_time({Day, {LH, LM,_}}, Schedule) ->
RemainingInDay = lists:dropwhile(
fun(Sched) -> Sched =< {LH, LM} end, Schedule),
case RemainingInDay of
[] ->
[{NH, NM}|_] = Schedule,
{next_day(Day), {NH, NM, 0}};
[{NH, NM}|_] ->
{Day, {NH, NM, 0}}
end.
next_day(Day) ->
{DayP,_} = calendar:gregorian_seconds_to_datetime(
86400+calendar:datetime_to_gregorian_seconds(
{Day, {0,0,1}})),
DayP. | src/riak_cs_storage_d.erl | 0.762159 | 0.425068 | riak_cs_storage_d.erl | starcoder |
% Licensed under the Apache License, Version 2.0 (the "License"); you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
-module(couch_file).
-behaviour(gen_server).
-include("couch_db.hrl").
-define(SIZE_BLOCK, 4096).
-record(file, {
fd,
tail_append_begin=0 % 09 UPGRADE CODE
}).
-export([open/1, open/2, close/1, bytes/1, sync/1, append_binary/2,old_pread/3]).
-export([append_term/2, pread_term/2, pread_iolist/2, write_header/2]).
-export([pread_binary/2, read_header/1, truncate/2, upgrade_old_header/2]).
-export([init/1, terminate/2, handle_call/3, handle_cast/2, code_change/3, handle_info/2]).
%%----------------------------------------------------------------------
%% Args: Valid Options are [create] and [create,overwrite].
%% Files are opened in read/write mode.
%% Returns: On success, {ok, Fd}
%% or {error, Reason} if the file could not be opened.
%%----------------------------------------------------------------------
open(Filepath) ->
open(Filepath, []).
open(Filepath, Options) ->
case gen_server:start_link(couch_file,
{Filepath, Options, self(), Ref = make_ref()}, []) of
{ok, Fd} ->
{ok, Fd};
ignore ->
% get the error
receive
{Ref, Pid, Error} ->
case process_info(self(), trap_exit) of
{trap_exit, true} -> receive {'EXIT', Pid, _} -> ok end;
{trap_exit, false} -> ok
end,
Error
end;
Error ->
Error
end.
%%----------------------------------------------------------------------
%% Purpose: To append an Erlang term to the end of the file.
%% Args: Erlang term to serialize and append to the file.
%% Returns: {ok, Pos} where Pos is the file offset to the beginning the
%% serialized term. Use pread_term to read the term back.
%% or {error, Reason}.
%%----------------------------------------------------------------------
append_term(Fd, Term) ->
append_binary(Fd, term_to_binary(Term)).
%%----------------------------------------------------------------------
%% Purpose: To append an Erlang binary to the end of the file.
%% Args: Erlang term to serialize and append to the file.
%% Returns: {ok, Pos} where Pos is the file offset to the beginning the
%% serialized term. Use pread_term to read the term back.
%% or {error, Reason}.
%%----------------------------------------------------------------------
append_binary(Fd, Bin) ->
Size = iolist_size(Bin),
gen_server:call(Fd, {append_bin, [<<Size:32/integer>>, Bin]}, infinity).
%%----------------------------------------------------------------------
%% Purpose: Reads a term from a file that was written with append_term
%% Args: Pos, the offset into the file where the term is serialized.
%% Returns: {ok, Term}
%% or {error, Reason}.
%%----------------------------------------------------------------------
pread_term(Fd, Pos) ->
{ok, Bin} = pread_binary(Fd, Pos),
{ok, binary_to_term(Bin)}.
%%----------------------------------------------------------------------
%% Purpose: Reads a binrary from a file that was written with append_binary
%% Args: Pos, the offset into the file where the term is serialized.
%% Returns: {ok, Term}
%% or {error, Reason}.
%%----------------------------------------------------------------------
pread_binary(Fd, Pos) ->
{ok, L} = pread_iolist(Fd, Pos),
{ok, iolist_to_binary(L)}.
pread_iolist(Fd, Pos) ->
{ok, LenIolist, NextPos} =read_raw_iolist(Fd, Pos, 4),
<<Len:32/integer>> = iolist_to_binary(LenIolist),
{ok, Iolist, _} = read_raw_iolist(Fd, NextPos, Len),
{ok, Iolist}.
read_raw_iolist(Fd, Pos, Len) ->
BlockOffset = Pos rem ?SIZE_BLOCK,
TotalBytes = calculate_total_read_len(BlockOffset, Len),
{ok, <<RawBin:TotalBytes/binary>>, HasPrefixes} = gen_server:call(Fd, {pread, Pos, TotalBytes}, infinity),
if HasPrefixes ->
{ok, remove_block_prefixes(BlockOffset, RawBin), Pos + TotalBytes};
true ->
% 09 UPGRADE CODE
<<ReturnBin:Len/binary, _/binary>> = RawBin,
{ok, [ReturnBin], Pos + Len}
end.
%%----------------------------------------------------------------------
%% Purpose: The length of a file, in bytes.
%% Returns: {ok, Bytes}
%% or {error, Reason}.
%%----------------------------------------------------------------------
% length in bytes
bytes(Fd) ->
gen_server:call(Fd, bytes, infinity).
%%----------------------------------------------------------------------
%% Purpose: Truncate a file to the number of bytes.
%% Returns: ok
%% or {error, Reason}.
%%----------------------------------------------------------------------
truncate(Fd, Pos) ->
gen_server:call(Fd, {truncate, Pos}, infinity).
%%----------------------------------------------------------------------
%% Purpose: Ensure all bytes written to the file are flushed to disk.
%% Returns: ok
%% or {error, Reason}.
%%----------------------------------------------------------------------
sync(Fd) ->
gen_server:call(Fd, sync, infinity).
%%----------------------------------------------------------------------
%% Purpose: Close the file. Is performed asynchronously.
%% Returns: ok
%%----------------------------------------------------------------------
close(Fd) ->
Result = gen_server:cast(Fd, close),
catch unlink(Fd),
Result.
% 09 UPGRADE CODE
old_pread(Fd, Pos, Len) ->
{ok, <<RawBin:Len/binary>>, false} = gen_server:call(Fd, {pread, Pos, Len}, infinity),
{ok, RawBin}.
% 09 UPGRADE CODE
upgrade_old_header(Fd, Sig) ->
gen_server:call(Fd, {upgrade_old_header, Sig}, infinity).
read_header(Fd) ->
case gen_server:call(Fd, find_header, infinity) of
{ok, Bin} ->
{ok, binary_to_term(Bin)};
Else ->
Else
end.
write_header(Fd, Data) ->
Bin = term_to_binary(Data),
Md5 = erlang:md5(Bin),
% now we assemble the final header binary and write to disk
FinalBin = <<Md5/binary, Bin/binary>>,
gen_server:call(Fd, {write_header, FinalBin}, infinity).
init_status_error(ReturnPid, Ref, Error) ->
ReturnPid ! {Ref, self(), Error},
ignore.
% server functions
init({Filepath, Options, ReturnPid, Ref}) ->
case lists:member(create, Options) of
true ->
filelib:ensure_dir(Filepath),
case file:open(Filepath, [read, write, raw, binary]) of
{ok, Fd} ->
{ok, Length} = file:position(Fd, eof),
case Length > 0 of
true ->
% this means the file already exists and has data.
% FYI: We don't differentiate between empty files and non-existant
% files here.
case lists:member(overwrite, Options) of
true ->
{ok, 0} = file:position(Fd, 0),
ok = file:truncate(Fd),
ok = file:sync(Fd),
couch_stats_collector:track_process_count(
{couchdb, open_os_files}),
{ok, #file{fd=Fd}};
false ->
ok = file:close(Fd),
init_status_error(ReturnPid, Ref, file_exists)
end;
false ->
couch_stats_collector:track_process_count(
{couchdb, open_os_files}),
{ok, #file{fd=Fd}}
end;
Error ->
init_status_error(ReturnPid, Ref, Error)
end;
false ->
% open in read mode first, so we don't create the file if it doesn't exist.
case file:open(Filepath, [read, raw]) of
{ok, Fd_Read} ->
{ok, Fd} = file:open(Filepath, [read, write, raw, binary]),
ok = file:close(Fd_Read),
couch_stats_collector:track_process_count({couchdb, open_os_files}),
{ok, #file{fd=Fd}};
Error ->
init_status_error(ReturnPid, Ref, Error)
end
end.
terminate(_Reason, _Fd) ->
ok.
handle_call({pread, Pos, Bytes}, _From, #file{fd=Fd,tail_append_begin=TailAppendBegin}=File) ->
{ok, Bin} = file:pread(Fd, Pos, Bytes),
{reply, {ok, Bin, Pos >= TailAppendBegin}, File};
handle_call(bytes, _From, #file{fd=Fd}=File) ->
{reply, file:position(Fd, eof), File};
handle_call(sync, _From, #file{fd=Fd}=File) ->
{reply, file:sync(Fd), File};
handle_call({truncate, Pos}, _From, #file{fd=Fd}=File) ->
{ok, Pos} = file:position(Fd, Pos),
{reply, file:truncate(Fd), File};
handle_call({append_bin, Bin}, _From, #file{fd=Fd}=File) ->
{ok, Pos} = file:position(Fd, eof),
Blocks = make_blocks(Pos rem ?SIZE_BLOCK, Bin),
case file:pwrite(Fd, Pos, Blocks) of
ok ->
{reply, {ok, Pos}, File};
Error ->
{reply, Error, File}
end;
handle_call({write_header, Bin}, _From, #file{fd=Fd}=File) ->
{ok, Pos} = file:position(Fd, eof),
BinSize = size(Bin),
case Pos rem ?SIZE_BLOCK of
0 ->
Padding = <<>>;
BlockOffset ->
Padding = <<0:(8*(?SIZE_BLOCK-BlockOffset))>>
end,
FinalBin = [Padding, <<1, BinSize:32/integer>> | make_blocks(1, [Bin])],
{reply, file:pwrite(Fd, Pos, FinalBin), File};
handle_call({upgrade_old_header, Prefix}, _From, #file{fd=Fd}=File) ->
case (catch read_old_header(Fd, Prefix)) of
{ok, Header} ->
{ok, TailAppendBegin} = file:position(Fd, eof),
Bin = term_to_binary(Header),
Md5 = erlang:md5(Bin),
% now we assemble the final header binary and write to disk
FinalBin = <<Md5/binary, Bin/binary>>,
{reply, ok, _} = handle_call({write_header, FinalBin}, ok, File),
ok = write_old_header(Fd, <<"upgraded">>, TailAppendBegin),
{reply, ok, File#file{tail_append_begin=TailAppendBegin}};
_Error ->
case (catch read_old_header(Fd, <<"upgraded">>)) of
{ok, TailAppendBegin} ->
{reply, ok, File#file{tail_append_begin = TailAppendBegin}};
_Error2 ->
{reply, ok, File}
end
end;
handle_call(find_header, _From, #file{fd=Fd}=File) ->
{ok, Pos} = file:position(Fd, eof),
{reply, find_header(Fd, Pos div ?SIZE_BLOCK), File}.
% 09 UPGRADE CODE
-define(HEADER_SIZE, 2048). % size of each segment of the doubly written header
% 09 UPGRADE CODE
read_old_header(Fd, Prefix) ->
{ok, Bin} = file:pread(Fd, 0, 2*(?HEADER_SIZE)),
<<Bin1:(?HEADER_SIZE)/binary, Bin2:(?HEADER_SIZE)/binary>> = Bin,
Result =
% read the first header
case extract_header(Prefix, Bin1) of
{ok, Header1} ->
case extract_header(Prefix, Bin2) of
{ok, Header2} ->
case Header1 == Header2 of
true ->
% Everything is completely normal!
{ok, Header1};
false ->
% To get here we must have two different header versions with signatures intact.
% It's weird but possible (a commit failure right at the 2k boundary). Log it and take the first.
?LOG_INFO("Header version differences.~nPrimary Header: ~p~nSecondary Header: ~p", [Header1, Header2]),
{ok, Header1}
end;
Error ->
% error reading second header. It's ok, but log it.
?LOG_INFO("Secondary header corruption (error: ~p). Using primary header.", [Error]),
{ok, Header1}
end;
Error ->
% error reading primary header
case extract_header(Prefix, Bin2) of
{ok, Header2} ->
% log corrupt primary header. It's ok since the secondary is still good.
?LOG_INFO("Primary header corruption (error: ~p). Using secondary header.", [Error]),
{ok, Header2};
_ ->
% error reading secondary header too
% return the error, no need to log anything as the caller will be responsible for dealing with the error.
Error
end
end,
case Result of
{ok, {pointer_to_header_data, Ptr}} ->
pread_term(Fd, Ptr);
_ ->
Result
end.
% 09 UPGRADE CODE
extract_header(Prefix, Bin) ->
SizeOfPrefix = size(Prefix),
SizeOfTermBin = ?HEADER_SIZE -
SizeOfPrefix -
16, % md5 sig
<<HeaderPrefix:SizeOfPrefix/binary, TermBin:SizeOfTermBin/binary, Sig:16/binary>> = Bin,
% check the header prefix
case HeaderPrefix of
Prefix ->
% check the integrity signature
case erlang:md5(TermBin) == Sig of
true ->
Header = binary_to_term(TermBin),
{ok, Header};
false ->
header_corrupt
end;
_ ->
unknown_header_type
end.
% 09 UPGRADE CODE
write_old_header(Fd, Prefix, Data) ->
TermBin = term_to_binary(Data),
% the size of all the bytes written to the header, including the md5 signature (16 bytes)
FilledSize = size(Prefix) + size(TermBin) + 16,
{TermBin2, FilledSize2} =
case FilledSize > ?HEADER_SIZE of
true ->
% too big!
{ok, Pos} = append_binary(Fd, TermBin),
PtrBin = term_to_binary({pointer_to_header_data, Pos}),
{PtrBin, size(Prefix) + size(PtrBin) + 16};
false ->
{TermBin, FilledSize}
end,
ok = file:sync(Fd),
% pad out the header with zeros, then take the md5 hash
PadZeros = <<0:(8*(?HEADER_SIZE - FilledSize2))>>,
Sig = erlang:md5([TermBin2, PadZeros]),
% now we assemble the final header binary and write to disk
WriteBin = <<Prefix/binary, TermBin2/binary, PadZeros/binary, Sig/binary>>,
?HEADER_SIZE = size(WriteBin), % sanity check
DblWriteBin = [WriteBin, WriteBin],
ok = file:pwrite(Fd, 0, DblWriteBin),
ok = file:sync(Fd).
handle_cast(close, Fd) ->
{stop,normal,Fd}.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
handle_info({'EXIT', _, Reason}, Fd) ->
{stop, Reason, Fd}.
find_header(_Fd, -1) ->
no_valid_header;
find_header(Fd, Block) ->
case (catch load_header(Fd, Block)) of
{ok, Bin} ->
{ok, Bin};
_Error ->
find_header(Fd, Block -1)
end.
load_header(Fd, Block) ->
{ok, <<1>>} = file:pread(Fd, Block*?SIZE_BLOCK, 1),
{ok, <<HeaderLen:32/integer>>} = file:pread(Fd, (Block*?SIZE_BLOCK) + 1, 4),
TotalBytes = calculate_total_read_len(1, HeaderLen),
{ok, <<RawBin:TotalBytes/binary>>} =
file:pread(Fd, (Block*?SIZE_BLOCK) + 5, TotalBytes),
<<Md5Sig:16/binary, HeaderBin/binary>> =
iolist_to_binary(remove_block_prefixes(1, RawBin)),
Md5Sig = erlang:md5(HeaderBin),
{ok, HeaderBin}.
calculate_total_read_len(0, FinalLen) ->
calculate_total_read_len(1, FinalLen) + 1;
calculate_total_read_len(BlockOffset, FinalLen) ->
case ?SIZE_BLOCK - BlockOffset of
BlockLeft when BlockLeft >= FinalLen ->
FinalLen;
BlockLeft ->
FinalLen + ((FinalLen - BlockLeft) div (?SIZE_BLOCK -1)) +
if ((FinalLen - BlockLeft) rem (?SIZE_BLOCK -1)) == 0 -> 0;
true -> 1 end
end.
remove_block_prefixes(_BlockOffset, <<>>) ->
[];
remove_block_prefixes(0, <<_BlockPrefix,Rest/binary>>) ->
remove_block_prefixes(1, Rest);
remove_block_prefixes(BlockOffset, Bin) ->
BlockBytesAvailable = ?SIZE_BLOCK - BlockOffset,
case size(Bin) of
Size when Size > BlockBytesAvailable ->
<<DataBlock:BlockBytesAvailable/binary,Rest/binary>> = Bin,
[DataBlock | remove_block_prefixes(0, Rest)];
_Size ->
[Bin]
end.
make_blocks(_BlockOffset, []) ->
[];
make_blocks(0, IoList) ->
[<<0>> | make_blocks(1, IoList)];
make_blocks(BlockOffset, IoList) ->
case split_iolist(IoList, (?SIZE_BLOCK - BlockOffset), []) of
{Begin, End} ->
[Begin | make_blocks(0, End)];
_SplitRemaining ->
IoList
end.
split_iolist(List, 0, BeginAcc) ->
{lists:reverse(BeginAcc), List};
split_iolist([], SplitAt, _BeginAcc) ->
SplitAt;
split_iolist([<<Bin/binary>> | Rest], SplitAt, BeginAcc) when SplitAt > size(Bin) ->
split_iolist(Rest, SplitAt - size(Bin), [Bin | BeginAcc]);
split_iolist([<<Bin/binary>> | Rest], SplitAt, BeginAcc) ->
<<Begin:SplitAt/binary,End/binary>> = Bin,
split_iolist([End | Rest], 0, [Begin | BeginAcc]);
split_iolist([Sublist| Rest], SplitAt, BeginAcc) when is_list(Sublist) ->
case split_iolist(Sublist, SplitAt, BeginAcc) of
{Begin, End} ->
{Begin, [End | Rest]};
SplitRemaining ->
split_iolist(Rest, SplitAt - (SplitAt - SplitRemaining), [Sublist | BeginAcc])
end;
split_iolist([Byte | Rest], SplitAt, BeginAcc) when is_integer(Byte) ->
split_iolist(Rest, SplitAt - 1, [Byte | BeginAcc]). | src/couchdb/couch_file.erl | 0.514644 | 0.415492 | couch_file.erl | starcoder |
%% Copyright 2019, JobTeaser
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(totp).
-export([generate/5, generate/3, generate/2, generate_with_time_period/3,
time_period/3,
current_timestamp/0]).
-export_type([timestamp/0, time_period/0]).
-type timestamp() :: integer().
%% A UNIX timestamp in seconds.
-type time_period() :: integer().
%% A period of time identified by the number of time steps that separate it
%% from an initial time value.
%% @doc Generate a time-based one-time password.
%%
%% `InitialTime' and `TimeStep' are the algorithm parameters designated as
%% `T0' and `X' in <a
%% href="https://tools.ietf.org/html/rfc6238#section-4.1">RFC 6238 4.1</a>.
%%
%% We only support the HMAC-SHA1 key derivation algorithm: it is the only
%% mandatory one, it is the one everyone uses, and RFC 4226 (HOTP) only
%% specifies truncation for HMAC-SHA1.
%%
%% See <a href="https://tools.ietf.org/html/rfc6238#section-4.2">RFC 6238
%% 4.2</a>.
-spec generate(Key, InitialTime, TimeStep, Time, NbDigits)
-> Password when
Key :: binary(),
InitialTime :: timestamp(),
TimeStep :: pos_integer(),
Time :: timestamp(),
NbDigits :: pos_integer(),
Password :: non_neg_integer().
generate(Key, InitialTime, TimeStep, Time, NbDigits) ->
TimePeriod = time_period(InitialTime, TimeStep, Time),
generate_with_time_period(Key, TimePeriod, NbDigits).
%% @doc Generate a time-based one-time password using the default parameters
%% specified in <a href="https://tools.ietf.org/html/rfc6238#section-4.1">RFC
%% 6238 4.1</a>.
%%
%% @see generate/5
-spec generate(Key, Time, NbDigits) -> Password when
Key :: binary(),
Time :: timestamp(),
NbDigits :: pos_integer(),
Password :: non_neg_integer().
generate(Key, Time, NbDigits) ->
generate(Key, 0, 30, Time, NbDigits).
%% @doc Generate a time-based one-time password using the default parameters
%% specified in <a href="https://tools.ietf.org/html/rfc6238#section-4.1">RFC
%% 6238 4.1</a> and the current timestamp.
%%
%% @see generate/3
-spec generate(Key, NbDigits) -> Password when
Key :: binary(),
NbDigits :: pos_integer(),
Password :: non_neg_integer().
generate(Key, NbDigits) ->
generate(Key, current_timestamp(), NbDigits).
%% @doc Generate a time-based one-time password using the default parameters
%% specified in <a href="https://tools.ietf.org/html/rfc6238#section-4.1">RFC
%% 6238 4.1</a>. The only difference with `generate/5' is that this function
%% uses a time period directly instead of a timestamp.
%%
%% @see generate/5
-spec generate_with_time_period(Key, TimePeriod, NbDigits)
-> Password when
Key :: binary(),
TimePeriod :: time_period(),
NbDigits :: pos_integer(),
Password :: non_neg_integer().
generate_with_time_period(Key, TimePeriod, NbDigits) ->
hotp:generate(Key, TimePeriod, NbDigits).
%% @doc Return the time period a timestamp is in.
-spec time_period(InitialTime, TimeStep, Time) -> non_neg_integer() when
InitialTime :: timestamp(),
TimeStep :: time_period(),
Time :: timestamp().
time_period(InitialTime, TimeStep, Time) ->
trunc(math:floor((Time - InitialTime) / TimeStep)).
%% @doc Return the current UNIX timestamp in seconds.
%%
%% The function is provided both as a convenience, and to make sure that the
%% right clock is used: RFC 6238 clearly specify that the algorithm is based
%% on UNIX timestamps (which has its importance, for example for leap
%% seconds).
-spec current_timestamp() -> timestamp().
current_timestamp() ->
os:system_time(second). | src/totp.erl | 0.67405 | 0.681859 | totp.erl | starcoder |
-module(shapes).
-export([area/1, perimeter/1, bits/1, bits_tail/1, test/0]).
% Here's the model of the Data Structures I've used to describe triangles, circles and rectangles
% I think the triangle might be a bit verbose, having each coordinate described as a separate, nested tuple
% { triangle, { {AX, AY}, {BX, BY}, {CX, CY} } }
% { circle, { RADIUS } }
% { rectangle, { HEIGHT, WIDTH } }
%%%% UTILITIES %%%%
point_distance( { AX, AY }, { BX, BY } ) ->
math:sqrt( math:pow(BX - AX, 2) + math:pow(BY - AY, 2) ).
%%%% AREA %%%%
area({ triangle, { {AX, AY}, {BX, BY}, {CX, CY} } }) ->
A = point_distance( { AX, AY }, { BX, BY } ),
B = point_distance( { BX, BY }, { CX, CY } ),
C = point_distance( { CX, CY }, { AX, AY } ),
S = (A + B + C) / 2, % calculate semi-perimeter
math:pow( S * (( S - A ) * ( S - B ) * ( S - C )), 0.5);
area({ circle, { RADIUS }}) ->
math:pi() * RADIUS * RADIUS;
area({ rectangle, { HEIGHT, WIDTH } }) ->
HEIGHT * WIDTH.
%%%% PERIMETER %%%%
perimeter({ circle, { RADIUS } }) ->
2 * math:pi() * RADIUS;
perimeter({ rectangle, { HEIGHT, WIDTH } }) ->
(HEIGHT * 2) + (WIDTH * 2);
perimeter({ triangle, { {AX, AY}, {BX, BY}, {CX, CY} } }) ->
A = point_distance( { AX, AY }, { BX, BY } ),
B = point_distance( { BX, BY }, { CX, CY } ),
C = point_distance( { CX, CY }, { AX, AY } ),
A + B + C.
%%%% ENCLOSE %%%%
enclose({ rectangle, { HEIGHT, WIDTH } }) ->
{ rectangle, { HEIGHT, WIDTH } };
enclose({ circle, { RADIUS } }) ->
{ rectangle, { RADIUS * 2, RADIUS * 2 } };
enclose({ triangle, { {AX, AY}, {BX, BY}, {CX, CY} } }) ->
WIDTH = lists:max([AX, BX, CX]) - lists:min([AX, BX, CX]),
HEIGHT = lists:max([AY, BY, CY]) - lists:min([AY, BY, CY]),
{ rectangle, { WIDTH, HEIGHT } }.
%%%% BITS %%%%
bits(0) ->
0;
bits(N) when N >= 0 ->
(N rem 2) + bits(N div 2).
bits_tail(N) ->
bits_tail(N, 0).
bits_tail(0, ACC) ->
ACC;
bits_tail(N, ACC) ->
bits_tail(N div 2, ACC + (N rem 2)).
%%%% TESTS %%%%
test_perimeter() ->
30 = perimeter({rectangle, {5, 10}}),
31.41592653589793 = perimeter({circle, {5}}),
21.05997848692523 = perimeter({triangle, {{ 1, 1 }, { 5, 5 }, { 10, 1 }}}),
ok.
test_area() ->
3.141592653589793 = area({circle, { 1 } }),
200 = area({rectangle, { 100, 2 }}),
17.999999999999996 = area({triangle, {{ 1, 1 }, { 5, 5 }, { 10, 1 }}}),
ok.
test_enclose() ->
{ rectangle, { 10, 10 } } = enclose({circle, { 5 }}),
{ rectangle, { 20, 20 } } = enclose({rectangle, { 20, 20 }}),
{ rectangle, { 20, 10 } } = enclose({triangle, { { 0, 0 }, { 10, 10 }, { 20, 0 } } }).
test_bits() ->
3 = bits(7),
1 = bits(8),
3 = bits_tail(7),
4 = bits_tail(8).
test() ->
test_area(),
test_perimeter(),
test_enclose(),
test_bits(),
ok. | 1.24/shapes.erl | 0.517815 | 0.629661 | shapes.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% Copyright (c) 2012 Basho Technologies, Inc.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
-module(verify_down).
-behavior(riak_test).
-export([confirm/0]).
-include_lib("eunit/include/eunit.hrl").
confirm() ->
Nodes = rt:deploy_nodes(3),
[Node1, Node2, Node3] = Nodes,
%% Join node2 to node1 and wait for cluster convergence
lager:info("Join ~p to ~p", [Node2, Node1]),
rt:join(Node2, Node1),
?assertEqual(ok, rt:wait_until_nodes_ready([Node1, Node2])),
?assertEqual(ok, rt:wait_until_no_pending_changes([Node1, Node2])),
%% Shutdown node2
lager:info("Stopping ~p", [Node2]),
rt:stop(Node2),
?assertEqual(ok, rt:wait_until_unpingable(Node2)),
Remaining = Nodes -- [Node2],
%% Join node3 to node1
lager:info("Join ~p to ~p", [Node3, Node1]),
rt:join(Node3, Node1),
?assertEqual(ok, rt:wait_until_all_members(Remaining, [Node3])),
%% Ensure node3 remains in the joining state
lager:info("Ensure ~p remains in the joining state", [Node3]),
[?assertEqual(joining, rt:status_of_according_to(Node3, Node)) || Node <- Remaining],
%% Mark node2 as down and wait for ring convergence
lager:info("Mark ~p as down", [Node2]),
rt:down(Node1, Node2),
?assertEqual(ok, rt:wait_until_ring_converged(Remaining)),
[?assertEqual(down, rt:status_of_according_to(Node2, Node)) || Node <- Remaining],
%% Ensure node3 is now valid
[?assertEqual(valid, rt:status_of_according_to(Node3, Node)) || Node <- Remaining],
%% Restart node2 and wait for ring convergence
lager:info("Restart ~p and wait for ring convergence", [Node2]),
rt:start(Node2),
?assertEqual(ok, rt:wait_until_nodes_ready([Node2])),
?assertEqual(ok, rt:wait_until_ring_converged(Nodes)),
%% Verify that all three nodes are ready
lager:info("Ensure all nodes are ready"),
?assertEqual(ok, rt:wait_until_nodes_ready(Nodes)),
pass. | test/verify_down.erl | 0.663451 | 0.450118 | verify_down.erl | starcoder |
% Licensed under the Apache License, Version 2.0 (the "License"); you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
-module(kai_connection_SUITE).
-compile(export_all).
-include("kai.hrl").
-include("kai_test.hrl").
all() -> [test1, test2].
test1_api_start() ->
{ok, ListeningSocket} =
gen_tcp:listen(11012, [binary, {packet, 4}, {reuseaddr, true}]),
test1_api_accpet(ListeningSocket).
test1_api_accpet(ListeningSocket) ->
{ok, ApiSocket} = gen_tcp:accept(ListeningSocket),
Pid = spawn(?MODULE, test1_api_proc, [ApiSocket]),
gen_tcp:controlling_process(ApiSocket, Pid),
test1_api_accpet(ListeningSocket).
test1_api_proc(ApiSocket) ->
receive
{tcp, ApiSocket, _Bin} ->
gen_tcp:send(ApiSocket, term_to_binary(ok))
end.
test1_api_send(Pid) ->
{ok, Socket} = kai_connection:lease(?NODE2, self()),
ok = gen_tcp:send(Socket, term_to_binary(ok)),
Pid ! receive {tcp, Socket, Bin} -> binary_to_term(Bin) end.
test1() -> [].
test1(_Conf) ->
kai_config:start_link([
{hostname, "localhost"},
{rpc_port, 11011},
{max_connections, 32},
{n, 3},
{number_of_buckets, 8},
{number_of_virtual_nodes, 2}
]),
kai_connection:start_link(),
spawn_link(?MODULE, test1_api_start, []),
% lease and return
{ok, Socket1} = kai_connection:lease(?NODE2, self()),
{ok, Connections} = kai_connection:connections(),
?assertEqual(1, length(Connections)),
{ok, Socket2} = kai_connection:lease(?NODE2, self()),
{ok, Connections2} = kai_connection:connections(),
?assertEqual(2, length(Connections2)),
?assert(Socket1 =/= Socket2),
ok = kai_connection:return(Socket1),
{ok, Connections3} = kai_connection:connections(),
?assertEqual(2, length(Connections3)),
{ok, Socket3} = kai_connection:lease(?NODE2, self(), [{active, true}, {packet, 4}]),
?assert(Socket1 =:= Socket3),
ok = kai_connection:close(Socket3),
{ok, Connections4} = kai_connection:connections(),
?assertEqual(1, length(Connections4)),
% send and receive at different processes
spawn_link(?MODULE, test1_api_send, [self()]),
?assert(receive ok -> true; _ -> false end),
spawn_link(?MODULE, test1_api_send, [self()]),
?assert(receive ok -> true; _ -> false end),
kai_config:stop(),
kai_connection:stop().
test2() -> [].
test2(_Conf) ->
MaxConnections = 32,
kai_config:start_link([
{hostname, "localhost"},
{rpc_port, 11011},
{max_connections, MaxConnections},
{n, 3},
{number_of_buckets, 8},
{number_of_virtual_nodes, 2}
]),
kai_connection:start_link(),
spawn_link(?MODULE, test1_api_start, []),
% lease MaxConnections connections
Sockets =
lists:map(
fun(_X) ->
{ok, Socket} = kai_connection:lease(?NODE2, self()),
Socket
end,
lists:seq(1, MaxConnections + 1)
),
% # of connections can be greater than MaxConnections, because all
% connections are in use
{ok, Connections} = kai_connection:connections(),
?assertEqual(MaxConnections + 1, length(Connections)),
% # of connections equals to MaxConnections, because a connection has
% been returned
[Socket|_] = Sockets,
ok = kai_connection:return(Socket),
{ok, Connections2} = kai_connection:connections(),
?assertEqual(MaxConnections, length(Connections2)),
kai_config:stop(),
kai_connection:stop(). | test/kai_connection_SUITE.erl | 0.693577 | 0.501831 | kai_connection_SUITE.erl | starcoder |
% Licensed under the Apache License, Version 2.0 (the "License"); you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
% Maintain cluster membership and stability notifications for replications.
% On changes to cluster membership, broadcast events to `replication` gen_event.
% Listeners will get `{cluster, stable}` or `{cluster, unstable}` events.
%
% Cluster stability is defined as "there have been no nodes added or removed in
% last `QuietPeriod` seconds". QuietPeriod value is configurable. To ensure a
% speedier startup, during initialization there is a shorter StartupPeriod
% in effect (also configurable).
%
% This module is also in charge of calculating ownership of replications based
% on where their _replicator db documents shards live.
-module(couch_replicator_clustering).
-behaviour(gen_server).
-behaviour(config_listener).
-behaviour(mem3_cluster).
-export([
start_link/0
]).
-export([
init/1,
terminate/2,
handle_call/3,
handle_info/2,
handle_cast/2,
code_change/3
]).
-export([
owner/2,
is_stable/0,
link_cluster_event_listener/3
]).
% config_listener callbacks
-export([
handle_config_change/5,
handle_config_terminate/3
]).
% mem3_cluster callbacks
-export([
cluster_stable/1,
cluster_unstable/1
]).
-include_lib("couch/include/couch_db.hrl").
-include_lib("mem3/include/mem3.hrl").
-define(DEFAULT_QUIET_PERIOD, 60). % seconds
-define(DEFAULT_START_PERIOD, 5). % seconds
-define(RELISTEN_DELAY, 5000).
-record(state, {
mem3_cluster_pid :: pid(),
cluster_stable :: boolean()
}).
-spec start_link() -> {ok, pid()} | ignore | {error, term()}.
start_link() ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
% owner/2 function computes ownership for a {DbName, DocId} tuple
% `unstable` if cluster is considered to be unstable i.e. it has changed
% recently, or returns node() which of the owner.
%
-spec owner(Dbname :: binary(), DocId :: binary()) -> node() | unstable.
owner(<<"shards/", _/binary>> = DbName, DocId) ->
case is_stable() of
false ->
unstable;
true ->
owner_int(DbName, DocId)
end;
owner(_DbName, _DocId) ->
node().
-spec is_stable() -> true | false.
is_stable() ->
gen_server:call(?MODULE, is_stable).
-spec link_cluster_event_listener(atom(), atom(), list()) -> pid().
link_cluster_event_listener(Mod, Fun, Args)
when is_atom(Mod), is_atom(Fun), is_list(Args) ->
CallbackFun =
fun(Event = {cluster, _}) -> erlang:apply(Mod, Fun, Args ++ [Event]);
(_) -> ok
end,
{ok, Pid} = couch_replicator_notifier:start_link(CallbackFun),
Pid.
% Mem3 cluster callbacks
cluster_unstable(Server) ->
ok = gen_server:call(Server, set_unstable),
couch_replicator_notifier:notify({cluster, unstable}),
couch_stats:update_gauge([couch_replicator, cluster_is_stable], 0),
couch_log:notice("~s : cluster unstable", [?MODULE]),
Server.
cluster_stable(Server) ->
ok = gen_server:call(Server, set_stable),
couch_replicator_notifier:notify({cluster, stable}),
couch_stats:update_gauge([couch_replicator, cluster_is_stable], 1),
couch_log:notice("~s : cluster stable", [?MODULE]),
Server.
% gen_server callbacks
init([]) ->
ok = config:listen_for_changes(?MODULE, nil),
Period = abs(config:get_integer("replicator", "cluster_quiet_period",
?DEFAULT_QUIET_PERIOD)),
StartPeriod = abs(config:get_integer("replicator", "cluster_start_period",
?DEFAULT_START_PERIOD)),
couch_stats:update_gauge([couch_replicator, cluster_is_stable], 0),
{ok, Mem3Cluster} = mem3_cluster:start_link(?MODULE, self(), StartPeriod,
Period),
{ok, #state{mem3_cluster_pid = Mem3Cluster, cluster_stable = false}}.
terminate(_Reason, _State) ->
ok.
handle_call(is_stable, _From, #state{cluster_stable = IsStable} = State) ->
{reply, IsStable, State};
handle_call(set_stable, _From, State) ->
{reply, ok, State#state{cluster_stable = true}};
handle_call(set_unstable, _From, State) ->
{reply, ok, State#state{cluster_stable = false}}.
handle_cast({set_period, Period}, #state{mem3_cluster_pid = Pid} = State) ->
ok = mem3_cluster:set_period(Pid, Period),
{noreply, State}.
handle_info(restart_config_listener, State) ->
ok = config:listen_for_changes(?MODULE, nil),
{noreply, State}.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%% Internal functions
handle_config_change("replicator", "cluster_quiet_period", V, _, S) ->
ok = gen_server:cast(?MODULE, {set_period, list_to_integer(V)}),
{ok, S};
handle_config_change(_, _, _, _, S) ->
{ok, S}.
handle_config_terminate(_, stop, _) -> ok;
handle_config_terminate(_S, _R, _St) ->
Pid = whereis(?MODULE),
erlang:send_after(?RELISTEN_DELAY, Pid, restart_config_listener).
-spec owner_int(binary(), binary()) -> node().
owner_int(ShardName, DocId) ->
DbName = mem3:dbname(ShardName),
Live = [node() | nodes()],
Shards = mem3:shards(DbName, DocId),
Nodes = [N || #shard{node=N} <- Shards, lists:member(N, Live)],
mem3:owner(DbName, DocId, Nodes).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
replicator_clustering_test_() ->
{
setup,
fun setup_all/0,
fun teardown_all/1,
{
foreach,
fun setup/0,
fun teardown/1,
[
t_stable_callback(),
t_unstable_callback()
]
}
}.
t_stable_callback() ->
?_test(begin
?assertEqual(false, is_stable()),
cluster_stable(whereis(?MODULE)),
?assertEqual(true, is_stable())
end).
t_unstable_callback() ->
?_test(begin
cluster_stable(whereis(?MODULE)),
?assertEqual(true, is_stable()),
cluster_unstable(whereis(?MODULE)),
?assertEqual(false, is_stable())
end).
setup_all() ->
meck:expect(couch_log, notice, 2, ok),
meck:expect(config, get, fun(_, _, Default) -> Default end),
meck:expect(config, listen_for_changes, 2, ok),
meck:expect(couch_stats, update_gauge, 2, ok),
meck:expect(couch_replicator_notifier, notify, 1, ok).
teardown_all(_) ->
meck:unload().
setup() ->
meck:reset([
config,
couch_log,
couch_stats,
couch_replicator_notifier
]),
stop_clustering_process(),
{ok, Pid} = start_link(),
Pid.
teardown(Pid) ->
stop_clustering_process(Pid).
stop_clustering_process() ->
stop_clustering_process(whereis(?MODULE)).
stop_clustering_process(undefined) ->
ok;
stop_clustering_process(Pid) when is_pid(Pid) ->
Ref = erlang:monitor(process, Pid),
unlink(Pid),
exit(Pid, kill),
receive {'DOWN', Ref, _, _, _} -> ok end.
-endif. | src/couch_replicator/src/couch_replicator_clustering.erl | 0.719876 | 0.421254 | couch_replicator_clustering.erl | starcoder |
% Licensed under the Apache License, Version 2.0 (the "License"); you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
-module(vtree_insert).
-include("vtree.hrl").
-include("couch_db.hrl").
-export([insert/2]).
-ifdef(makecheck).
-compile(nowarn_export_all).
-compile(export_all).
-endif.
-spec insert(Vt :: #vtree{}, Nodes :: [#kv_node{}]) -> #vtree{}.
insert(Vt, []) ->
Vt;
insert(#vtree{root=nil}=Vt, Nodes) ->
% If we would do single inserts, the first node that was inserted would
% have set the original Mbb `MbbO`
MbbO = (hd(Nodes))#kv_node.key,
Threshold = Vt#vtree.kv_chunk_threshold,
case ?ext_size(Nodes) > Threshold of
true ->
{Nodes2, Rest} = vtree_modify:get_overflowing_subset(
Threshold, Nodes),
KpNodes = vtree_modify:write_nodes(Vt, Nodes2, MbbO),
Root = vtree_modify:write_new_root(Vt, KpNodes),
Vt2 = Vt#vtree{root=Root},
% NOTE vmx 2012-09-20: The value of `ArbitraryBulkSize` is
% arbitrary, might be worth spending more benchmarking
% NOTE vmx 2012-09-20: You can call it premature optimization, but it's
% really worth it. In the future the initial index building should be
% replaces with something better
ArbitraryBulkSize = round(math:log(Threshold)+50),
Vt3 = insert_in_bulks(Vt2, Rest, ArbitraryBulkSize),
Vt3;
false ->
[Root] = vtree_modify:write_nodes(Vt, Nodes, MbbO),
Vt#vtree{root=Root}
end;
insert(Vt, Nodes) ->
Root = Vt#vtree.root,
PartitionedNodes = [Nodes],
KpNodes = insert_multiple(Vt, PartitionedNodes, [Root]),
NewRoot = vtree_modify:write_new_root(Vt, KpNodes),
Vt#vtree{root=NewRoot}.
% NOTE vmx 2013-03-12: It might make sense to change the bulk size from
% using the number of nodes, to a byte size based way.
% Insert the data in a certain chunks
-spec insert_in_bulks(Vt :: #vtree{}, Nodes :: [#kv_node{}],
BulkSize :: non_neg_integer()) -> #vtree{}.
insert_in_bulks(Vt, [], _BulkSize) ->
Vt;
insert_in_bulks(Vt, Nodes, BulkSize) when length(Nodes) > BulkSize ->
{Insert, Rest} = lists:split(BulkSize, Nodes),
Vt2 = insert(Vt, Insert),
insert_in_bulks(Vt2, Rest, BulkSize);
insert_in_bulks(Vt, Nodes, _BulkSize) ->
insert(Vt, Nodes).
-spec insert_multiple(Vt :: #vtree{}, ToInsert :: [#kv_node{}],
Existing :: [#kp_node{}]) -> [#kp_node{}].
insert_multiple(Vt, ToInsert, Existing) ->
ModifyFuns = {fun insert_nodes/2, fun partition_nodes/3},
vtree_modify:modify_multiple(Vt, ModifyFuns, ToInsert, Existing, []).
-spec insert_nodes(ToInsert :: [#kv_node{}], Existing :: [#kv_node{}]) ->
[#kv_node{}].
insert_nodes(ToInsert, Existing) ->
ToInsert ++ Existing.
% Partitions a list of nodes according to a list of MBBs which are given by
% KP-nodes.
-spec partition_nodes(ToPartition :: [#kv_node{}], KpNodes :: [#kp_node{}],
Less :: lessfun()) -> [[#kv_node{}]].
partition_nodes(ToPartition, KpNodes, Less) ->
Partitions0 = [[] || _ <- lists:seq(1, length(KpNodes))],
PartitionMbbs = [Node#kp_node.key || Node <- KpNodes],
% Choose the partition the new node should be inserted to.
% vtree_choode:choose_subtree/3 expects a list of 2-tuples with the MBB
% and any value you like. We use the index in the list as second element
% in the tuple, so we can insert the new nodes there easily.
NodesNumbered = lists:zip(PartitionMbbs,
lists:seq(1, length(PartitionMbbs))),
lists:foldl(fun(Node, Partitions) ->
{_, NodeIndex} = vtree_choose:choose_subtree(
NodesNumbered, Node#kv_node.key,
Less),
add_to_nth(NodeIndex, Node, Partitions)
end, Partitions0, ToPartition).
% Add some value to a certain position in a list of lists
% `N` is the index starting at 1 for the first element
-spec add_to_nth(N :: pos_integer(), Element :: any(),
ListOfLists :: [list()]) -> [list()].
add_to_nth(N, Element, ListOfLists) ->
{A, [Nth|B]} = lists:split(N-1, ListOfLists),
A ++ [[Element|Nth]] ++ B. | vtree/src/vtree_insert.erl | 0.658088 | 0.527256 | vtree_insert.erl | starcoder |
%%%-------------------------------------------------------------------
%%% @doc
%%% A set of generic optics that can be applied to multiple container
%%% types.
%%%
%%% Intended both as a convenience and to support optic creation from
%%% parsed paths in optic_path.
%%%
%%% Because of the ambiguous types they support, these optics do not
%%% support the standard optic options. Instead, they always skip
%%% unexpected types and never create missing values.
%%% @end
%%%-------------------------------------------------------------------
-module(optic_generic).
%% API
-export([key/1,
index/1]).
%%%===================================================================
%%% API
%%%===================================================================
%% @doc
%% Focus on the value of many different key/value like mappings.
%% Understands how to focus on maps, property lists, dicts, orddicts
%% and gb_trees. Does not support the usual optic options.
%%
%% Example:
%%
%% ```
%% > optic:get([optic_generic:key(first)], #{first => 1}).
%% {ok,[1]}
%% '''
%% @end
%% @param Key The key to focus on.
%% @returns An opaque optic record.
-spec key(Key) -> optic:optic() when
Key :: term().
key(Key) ->
Fold =
fun (Fun, Acc, Map) when is_map(Map) ->
case Map of
#{Key:=Value} ->
{ok, Fun(Value, Acc)};
_ ->
{ok, Acc}
end;
(Fun, Acc, List) when is_list(List) ->
% This might be a proplist or an orddict.
Values = proplists:get_all_values(Key, List),
{ok, lists:foldl(Fun, Acc, Values)};
(Fun, Acc, Unknown) ->
case {is_dict(Unknown), is_gb_tree(Unknown)} of
{true, _} ->
% It's bad if is_dict & is_gb_tree ever both return
% true, but I can't justify throwing an error when
% this is already doing such fuzzy guessing of types.
Dict = Unknown,
case dict:find(Key, Dict) of
{ok, Value} ->
{ok, Fun(Value, Acc)};
error ->
{ok, Acc}
end;
{false, true} ->
Tree = Unknown,
case gb_trees:lookup(Key, Tree) of
{value, Value} ->
{ok, Fun(Value, Acc)};
none ->
{ok, Acc}
end;
{false, false} ->
{ok, Acc}
end
end,
MapFold =
fun (Fun, Acc, Map) when is_map(Map) ->
case Map of
#{Key:=Value} ->
{NewValue, NewAcc} = Fun(Value, Acc),
{ok, {Map#{Key:=NewValue}, NewAcc}};
_ ->
{ok, {Map, Acc}}
end;
(Fun, Acc, List) when is_list(List) ->
% This might be a proplist or an orddict.
{ok, lists:mapfoldl(fun (Elem, InnerAcc) ->
case proplists:is_defined(Key, [Elem]) of
true ->
Value = proplists:get_value(Key, [Elem]),
{NewValue, NewAcc} = Fun(Value, InnerAcc),
{{Key, NewValue}, NewAcc};
false ->
{Elem, InnerAcc}
end
end,
Acc,
List)};
(Fun, Acc, Unknown) ->
case {is_dict(Unknown), is_gb_tree(Unknown)} of
{true, _} ->
% It's bad if is_dict & is_gb_tree ever both return
% true, but I can't justify throwing an error when
% this is already doing such fuzzy guessing of types.
Dict = Unknown,
case dict:find(Key, Dict) of
{ok, Value} ->
{NewValue, NewAcc} = Fun(Value, Acc),
{ok, {dict:store(Key, NewValue, Dict), NewAcc}};
error ->
{ok, {Dict, Acc}}
end;
{false, true} ->
Tree = Unknown,
case gb_trees:lookup(Key, Tree) of
{value, Value} ->
{NewValue, NewAcc} = Fun(Value, Acc),
{ok, {gb_trees:update(Key, NewValue, Tree), NewAcc}};
none ->
{ok, {Tree, Acc}}
end;
{false, false} ->
{ok, {Unknown, Acc}}
end
end,
optic:new(MapFold, Fold).
%% @doc
%% Focus on an element of a list like container. Indexing begins at 1.
%% Understands how to focus on lists and tuples. Does not support the
%% usual optic options.
%%
%% Example:
%%
%% ```
%% > optic:get([optic_generic:index(3)], [1, 2, 3]).
%% {ok,[3]}
%% '''
%% @end
%% @param Index The one based index of the element to focus on.
%% @returns An opaque optic record.
-spec index(Index) -> optic:optic() when
Index :: non_neg_integer().
index(Index) ->
Fold =
fun (Fun, Acc, List) when Index =< length(List) ->
Elem = lists:nth(Index, List),
{ok, Fun(Elem, Acc)};
(Fun, Acc, Tuple) when Index =< tuple_size(Tuple) ->
Elem = erlang:element(Index, Tuple),
{ok, Fun(Elem, Acc)};
(_Fun, Acc, _Data) ->
{ok, Acc}
end,
MapFold =
fun (Fun, Acc, List) when Index =< length(List) ->
{Before, [Head | Tail]} = lists:split(Index - 1, List),
{NewHead, NewAcc} = Fun(Head, Acc),
{ok, {Before ++ [NewHead] ++ Tail, NewAcc}};
(Fun, Acc, Tuple) when Index =< tuple_size(Tuple) ->
Elem = erlang:element(Index, Tuple),
{NewElem, NewAcc} = Fun(Elem, Acc),
{ok, {erlang:setelement(Index, Tuple, NewElem), NewAcc}};
(_Fun, Acc, Unknown) ->
{ok, {Unknown, Acc}}
end,
optic:new(MapFold, Fold).
%%%===================================================================
%%% Internal Functions
%%%===================================================================
is_dict(Unknown) ->
try dict:size(Unknown) of
_ ->
true
catch
error:function_clause ->
false
end.
is_gb_tree(Unknown) ->
try gb_trees:size(Unknown) of
_ ->
true
catch
error:function_clause ->
false
end. | src/optic_generic.erl | 0.559049 | 0.550366 | optic_generic.erl | starcoder |
%%%------------------------------------------------------------------------
%% Copyright 2020, OpenTelemetry Authors
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% @doc A TextMap Propagator is a Propagator that performs injection and
%% extraction with ASCII keys and values.
%%
%% An example of
%% configuring the TextMap Propagator to inject and extract Baggage and
%% TraceContext:
%%
%% ```
%% {text_map_propagators, [trace_context, baggage]},
%% '''
%%
%% The propagators are then used at the points that cross service
%% communication is performed. By default `inject' and `extract' work on a
%% generic list of 2-tuple's with binary string keys and values. A user
%% defined function for setting a key/value in the carrier and for getting
%% the value of a key from a carrier can be passed as an argument. For
%% example, injecting and extracting to and from Hackney headers could be
%% done with <a href="https://github.com/benoitc/hackney">Hackney</a> specific functions:
%%
%% ```
%% set_header(Headers, Key, Value) ->
%% hackney_headers:store(Key, Value, Headers).
%%
%% some_fun_calling_hackney() ->
%% Headers = otel_propagator_text_map:inject(hackney_headers:new(), fun set_header/2),
%% ...
%% '''
%%
%% An example of extraction in an <a href="https://github.com/elli-lib/elli">Elli</a> request handler:
%%
%% ```
%% get_header(Req, Key) ->
%% elli_request:get_header(Key, Req, Default).
%%
%% handle(Req, _Args) ->
%% otel_propagator_text_map:extract(Req, fun get_header/2),
%% ...
%% {ok, [], <<"hello world">>}.
%% '''
%% @end
%%%-------------------------------------------------------------------------
-module(otel_propagator_text_map).
-behaviour(otel_propagator).
-export([fields/1,
inject/1,
inject/2,
inject/3,
inject_from/2,
inject_from/3,
inject_from/4,
extract/1,
extract/2,
extract/4,
extract_to/2,
extract_to/3,
extract_to/5]).
-export([default_carrier_get/2,
default_carrier_set/3,
default_carrier_keys/1]).
-include_lib("kernel/include/logger.hrl").
%% Sets a value into a carrier
-callback inject(otel_ctx:t(), otel_propagator:carrier(), carrier_set(), propagator_options()) -> otel_propagator:carrier().
%% Extracts values from a carrier and sets them in the context
-callback extract(otel_ctx:t(), otel_propagator:carrier(), carrier_keys(), carrier_get(), propagator_options()) -> term().
%% Returns all the keys the propagator sets with `inject'
-callback fields(propagator_options()) -> [field_key()].
%% a textmap propagator can have any term associated with it
-type propagator_options() :: term().
-type field_key() :: unicode:latin1_binary().
-type field_value() :: unicode:latin1_binary().
%% return all matching keys from the carrier
%% for example: with the jaeger propagation format this would be
%% all keys found with prefix "uberctx-"
-type carrier_keys() :: fun((otel_propagator:carrier()) -> [unicode:latin1_binary()]).
-type carrier_get() :: fun((otel_propagator:carrier(), unicode:latin1_binary()) -> unicode:latin1_binary() | undefined).
-type carrier_set() :: fun((otel_propagator:carrier(), unicode:latin1_binary(), unicode:latin1_binary()) -> otel_propagator:carrier()).
-type default_text_map_carrier() :: [{unicode:latin1_binary(), unicode:latin1_binary()}].
%% 2-tuple form is a textmap propagator with options
%% an empty list is passed for options if the propagator is a module with no options
-type t() :: module() | {module(), propagator_options()}.
-export_type([t/0,
carrier_set/0,
carrier_get/0,
carrier_keys/0,
propagator_options/0]).
-spec fields(otel_propagator:t()) -> [field_key()].
fields(Propagator) when is_atom(Propagator) ->
Propagator:fields([]);
fields({Module, Options}) ->
Module:fields(Options).
-spec inject(otel_propagator:carrier()) -> otel_propagator:carrier().
inject(Carrier) ->
Propagator = opentelemetry:get_text_map_injector(),
inject(Propagator, Carrier, fun default_carrier_set/3).
-spec inject(otel_propagator:t(), otel_propagator:carrier()) -> otel_propagator:carrier().
inject(Propagator, Carrier) ->
inject(Propagator, Carrier, fun default_carrier_set/3).
-spec inject(otel_propagator:t(), otel_propagator:carrier(), fun()) -> otel_propagator:carrier().
inject(Propagator, Carrier, CarrierSetFun) ->
Context = otel_ctx:get_current(),
inject_from(Context, Propagator, Carrier, CarrierSetFun).
-spec inject_from(otel_ctx:t(), otel_propagator:carrier()) -> otel_propagator:carrier().
inject_from(Context, Carrier) ->
Propagator = opentelemetry:get_text_map_injector(),
inject_from(Context, Propagator, Carrier, fun default_carrier_set/3).
-spec inject_from(otel_ctx:t(), otel_propagator:t(), otel_propagator:carrier()) -> otel_propagator:carrier().
inject_from(Context, Propagator, Carrier) ->
inject_from(Context, Propagator, Carrier, fun default_carrier_set/3).
-spec inject_from(otel_ctx:t(), otel_propagator:t(), otel_propagator:carrier(), fun()) -> otel_propagator:carrier().
inject_from(Context, Module, Carrier, CarrierSetFun) when is_atom(Module) ->
Module:inject(Context, Carrier, CarrierSetFun, []);
inject_from(Context, {Module, Options}, Carrier, CarrierSetFun) ->
Module:inject(Context, Carrier, CarrierSetFun, Options).
-spec extract(otel_propagator:carrier()) -> otel_ctx:t().
extract(Carrier) ->
Propagator = opentelemetry:get_text_map_extractor(),
extract(Propagator, Carrier, fun default_carrier_keys/1, fun default_carrier_get/2).
-spec extract(otel_propagator:t(), otel_propagator:carrier()) -> otel_ctx:t().
extract(Propagator, Carrier) ->
extract(Propagator, Carrier, fun default_carrier_keys/1, fun default_carrier_get/2).
-spec extract(otel_propagator:t(), otel_propagator:carrier(), fun(), fun()) -> otel_ctx:t().
extract(Propagator, Carrier, CarrierKeysFun, CarrierGetFun) ->
Context = otel_ctx:get_current(),
Context1 = extract_to(Context, Propagator, Carrier, CarrierKeysFun, CarrierGetFun),
otel_ctx:attach(Context1).
-spec extract_to(otel_ctx:t(), otel_propagator:carrier()) -> otel_ctx:t().
extract_to(Context, Carrier) ->
Propagator = opentelemetry:get_text_map_extractor(),
extract_to(Context, Propagator, Carrier, fun default_carrier_keys/1, fun default_carrier_get/2).
-spec extract_to(otel_ctx:t(), otel_propagator:t(), otel_propagator:carrier()) -> otel_ctx:t().
extract_to(Context, Propagator, Carrier) ->
extract_to(Context, Propagator, Carrier, fun default_carrier_keys/1, fun default_carrier_get/2).
-spec extract_to(otel_ctx:t(), otel_propagator:t(), otel_propagator:carrier(), fun(), fun()) -> otel_ctx:t().
extract_to(Context, Module, Carrier, CarrierKeysFun, CarrierGetFun) when is_atom(Module) ->
Module:extract(Context, Carrier, CarrierKeysFun, CarrierGetFun, []);
extract_to(Context, {Module, Options}, Carrier, CarrierKeysFun, CarrierGetFun) ->
Module:extract(Context, Carrier, CarrierKeysFun, CarrierGetFun, Options).
%% case-insensitive finding of a key string in a list of ASCII strings
%% if there are multiple entries in the list for the same key the values
%% will be combined and separated by commas. This is the method defined
%% in RFC7230 for HTTP headers.
-spec default_carrier_get(field_key(), default_text_map_carrier()) -> field_value() | undefined.
default_carrier_get(Key, List) ->
default_carrier_get(Key, List, []).
default_carrier_get(_, [], []) ->
undefined;
default_carrier_get(_, [], Result) ->
unicode:characters_to_binary(lists:join($,, lists:reverse(Result)), latin1);
default_carrier_get(Key, [{H, V} | Rest], Result) ->
case string:equal(Key, H, true, none) of
true ->
default_carrier_get(Key, Rest, [V | Result]);
false ->
default_carrier_get(Key, Rest, Result)
end.
%% case-insensitive ASCII string based lists:keyreplace
-spec default_carrier_set(field_key(), field_value(), default_text_map_carrier())
-> default_text_map_carrier().
default_carrier_set(Key, Value, []) ->
[{Key, Value}];
default_carrier_set(Key, Value, [{H, _}=Elem | Rest]) ->
case string:equal(Key, H, true, none) of
true ->
[{Key, Value} | Rest];
false ->
[Elem | default_carrier_set(Key, Value, Rest)]
end.
-spec default_carrier_keys(default_text_map_carrier()) -> [field_key()].
default_carrier_keys([]) ->
[];
default_carrier_keys([{K, _} | Rest]) ->
[K | default_carrier_keys(Rest)]. | apps/opentelemetry_api/src/otel_propagator_text_map.erl | 0.761893 | 0.566438 | otel_propagator_text_map.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% Copyright (c) 2014 SyncFree Consortium. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
-module(antidotec_datatype).
-ifdef(EQC).
-include_lib("eqc/include/eqc.hrl").
-include_lib("eunit/include/eunit.hrl").
-define(QC_OUT(P), eqc:on_output(fun(Fmt, Args) -> io:format(user, Fmt, Args) end, P)).
-compile(export_all).
-endif.
-define(MODULES, [antidotec_counter, antidotec_set, antidotec_reg]).
-export([module_for_type/1,
module_for_term/1]).
-export_type([datatype/0, update/0]).
-type maybe(T) :: T | undefined.
-type datatype() :: term().
-type typename() :: atom().
-type update() :: [term()].
%% @doc Constructs a new container for the type with the specified
%% value and key. This should only be used internally by the client code.
-callback new(Value::term()) -> datatype().
%% @doc Returns the original, unmodified value of the object. This does
%% not include the execution of any locally-queued operations.
-callback value(datatype()) -> term().
%% @doc Returns the local value of the object, with the local operations applied.
-callback dirty_value(datatype()) -> term().
%% @doc Extracts the list of operations to be append to the object's log.
%% 'undefined' should be returned if the type is unmodified.
-callback to_ops(term(), datatype()) -> update().
%% @doc Determines whether the given term is the type managed by the
%% container module.
-callback is_type(datatype()) -> boolean().
%% @doc Determines the symbolic name of the container's type, e.g.
%% antidote_set, antidote_map, antidote_counter.
-callback type() -> typename().
%% @doc Returns the module name for the container of the given CRDT data-type.
-spec module_for_type(set | counter | reg) ->
antidotec_counter | antidotec_set | antidotec_reg.
module_for_type(set) -> antidotec_set;
module_for_type(counter) -> antidotec_counter;
module_for_type(reg) -> antidotec_reg.
%% @doc Returns the container module name for the given term.
%% Returns undefined if the module is not known.
-spec module_for_term(datatype()) -> maybe(module()).
module_for_term(T) ->
lists:foldl(fun(Mod, undefined) ->
case Mod:is_type(T) of
true -> Mod;
false -> undefined
end;
(_, Mod) ->
Mod
end, undefined, ?MODULES). | _build/default/rel/rcl_bench/lib/antidotec_pb-0.2.9/src/antidotec_datatype.erl | 0.603114 | 0.402216 | antidotec_datatype.erl | starcoder |
% Copyright 2017-2018 <NAME>
%
% Licensed under the Apache License, Version 2.0 (the "License");
% you may not use this file except in compliance with the License.
% You may obtain a copy of the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS,
% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
% See the License for the specific language governing permissions and
% limitations under the License.
-module(statser_calc).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-include("statser.hrl").
-export([median/1,
percentile/2,
percentile/3,
safe_average/1,
safe_length/1,
safe_invert/1,
safe_div/2,
safe_max/1,
safe_min/1,
safe_pow/1,
safe_range/1,
safe_stddev/1,
safe_sum/1,
safe_diff/1,
safe_substract/2,
safe_square_root/1]).
-type variadic_metric() :: metric_tuple() | metric_value().
-type variadic_metrics() :: [variadic_metric()].
-spec safe_average(variadic_metrics()) -> number().
safe_average(Values) ->
safe_average(Values, 1, 0.0).
safe_average([], _Cnt, Avg) -> Avg;
safe_average([{_TS, null} | Vs], Cnt, Avg) ->
safe_average(Vs, Cnt, Avg);
safe_average([{_TS, Val} | Vs], Cnt, Avg) ->
NewAvg = Avg + (Val - Avg) / Cnt,
safe_average(Vs, Cnt + 1, NewAvg);
safe_average([null | Vs], Cnt, Avg) ->
safe_average(Vs, Cnt, Avg);
safe_average([Val | Vs], Cnt, Avg) ->
NewAvg = Avg + (Val - Avg) / Cnt,
safe_average(Vs, Cnt + 1, NewAvg).
-spec safe_length(variadic_metrics()) -> integer().
safe_length(Vs) ->
safe_length(Vs, 0).
safe_length([], Len) -> Len;
safe_length([null | Xs], Len) ->
safe_length(Xs, Len);
safe_length([{_TS, null} | Xs], Len) ->
safe_length(Xs, Len);
safe_length([_ | Xs], Len) ->
safe_length(Xs, Len + 1).
-spec safe_max(variadic_metrics()) -> metric_value().
safe_max(Vs) ->
safe_max(Vs, null).
safe_max([], Max) -> Max;
safe_max([{_TS, V} | Vs], null) ->
safe_max(Vs, V);
safe_max([{_TS, null} | Vs], Max) ->
safe_max(Vs, Max);
safe_max([{_TS, Val} | Vs], Max) ->
safe_max(Vs, max(Val, Max));
safe_max([null | Vs], Max) ->
safe_max(Vs, Max);
safe_max([Val | Vs], null) ->
safe_max(Vs, Val);
safe_max([Val | Vs], Max) ->
safe_max(Vs, max(Val, Max)).
-spec safe_pow(variadic_metrics()) -> metric_value().
safe_pow([]) -> null;
safe_pow([null | _Vs]) -> null;
safe_pow([{_TS, null} | _Vs]) -> null;
safe_pow([{_TS, V} | Vs]) ->
safe_pow(Vs, V);
safe_pow([V | Vs]) ->
safe_pow(Vs, V).
safe_pow([], Current) -> Current;
safe_pow([{_TS, null} | _Vs], _Current) -> null;
safe_pow([null | _Vs], _Current) -> null;
safe_pow([{_TS, Val} | Vs], Current) ->
safe_pow(Vs, math:pow(Current, Val));
safe_pow([Val | Vs], Current) ->
safe_pow(Vs, math:pow(Current, Val)).
-spec safe_min(variadic_metrics()) -> metric_value().
safe_min(Vs) ->
safe_min(Vs, null).
safe_min([], Min) -> Min;
safe_min([{_TS, null} | Vs], Min) ->
safe_min(Vs, Min);
safe_min([{_TS, Val} | Vs], Min) ->
safe_min(Vs, min(Val, Min));
safe_min([null | Vs], Min) ->
safe_min(Vs, Min);
safe_min([Val | Vs], Min) ->
safe_min(Vs, min(Val, Min)).
-spec safe_max_compare(metric_value(), metric_value()) -> metric_value().
safe_max_compare(A, null) -> A;
safe_max_compare(null, B) -> B;
safe_max_compare(A, B) -> max(A, B).
-spec safe_range(variadic_metrics()) -> metric_value().
safe_range(Vs) ->
safe_range(Vs, {null, null}).
safe_range([], {Min, Max}) ->
safe_substract(Max, Min);
safe_range([{_TS, null} | Vs], MinMax) ->
safe_range(Vs, MinMax);
safe_range([{_TS, Val} | Vs], {Min, Max}) ->
MinMax = {min(Val, Min), safe_max_compare(Val, Max)},
safe_range(Vs, MinMax);
safe_range([null | Vs], MinMax) ->
safe_range(Vs, MinMax);
safe_range([Val | Vs], {Min, Max}) ->
MinMax = {min(Val, Min), safe_max_compare(Val, Max)},
safe_range(Vs, MinMax).
-spec safe_diff(variadic_metrics()) -> number().
safe_diff([]) -> 0;
safe_diff([{_TS, Value} | Vs]) -> safe_diff(Vs, Value);
safe_diff([Value | Vs]) -> safe_diff(Vs, Value).
safe_diff([], null) -> 0;
safe_diff([], Acc) -> Acc;
safe_diff([null | Vs], Acc) ->
safe_diff(Vs, Acc);
safe_diff([{_TS, null} | Vs], Acc) ->
safe_diff(Vs, Acc);
safe_diff([{_TS, Value} | Vs], null) ->
safe_diff(Vs, Value);
safe_diff([{_TS, Value} | Vs], Acc) ->
safe_diff(Vs, Acc - Value);
safe_diff([Value | Vs], null) ->
safe_diff(Vs, Value);
safe_diff([Value | Vs], Acc) ->
safe_diff(Vs, Acc - Value).
-spec safe_substract(metric_value(), metric_value()) -> metric_value().
safe_substract(_A, null) -> null;
safe_substract(null, _B) -> null;
safe_substract(A, B) -> A - B.
-spec safe_invert(metric_value()) -> metric_value().
safe_invert(null) -> null;
safe_invert(Value) -> math:pow(Value, -1).
-spec safe_stddev(variadic_metrics()) -> metric_value().
safe_stddev(Vs) ->
safe_stddev(Vs, safe_average(Vs), 0.0, 0).
safe_stddev([], _Avg, Sum, Len) when Len > 0 ->
math:sqrt(Sum / Len);
safe_stddev([], _Avg, _Sum, _Len) -> null;
safe_stddev([{_TS, null} | Vs], Avg, Sum, Len) ->
safe_stddev(Vs, Avg, Sum, Len);
safe_stddev([{_TS, V} | Vs], Avg, Sum, Len) ->
Dev = math:pow(V - Avg, 2),
safe_stddev(Vs, Avg, Sum + Dev, Len + 1);
safe_stddev([null | Vs], Avg, Sum, Len) ->
safe_stddev(Vs, Avg, Sum, Len);
safe_stddev([V | Vs], Avg, Sum, Len) ->
Dev = math:pow(V - Avg, 2),
safe_stddev(Vs, Avg, Sum + Dev, Len + 1).
-spec safe_sum(variadic_metrics()) -> number().
safe_sum(Vs) ->
safe_sum(Vs, 0).
safe_sum([], Acc) -> Acc;
safe_sum([null | Vs], Acc) ->
safe_sum(Vs, Acc);
safe_sum([{_TS, null} | Vs], Acc) ->
safe_sum(Vs, Acc);
safe_sum([{_TS, Value} | Vs], Acc) ->
safe_sum(Vs, Acc + Value);
safe_sum([Value | Vs], Acc) ->
safe_sum(Vs, Acc + Value).
-spec safe_square_root(metric_value()) -> metric_value().
safe_square_root(null) -> null;
safe_square_root(Value) -> math:pow(Value, 0.5).
-spec safe_div(variadic_metric(), variadic_metric()) -> variadic_metric().
safe_div(_A, 0) -> null;
safe_div(_A, null) -> null;
safe_div({TS, _A}, {_, 0}) -> {TS, null};
safe_div({TS, _A}, {_, null}) -> {TS, null};
safe_div(null, _B) -> null;
safe_div({TS, null}, _B) -> {TS, null};
safe_div({TS, A}, {_, B}) -> {TS, A / B};
safe_div({TS, A}, B) -> {TS, A / B};
safe_div(A, B) -> A / B.
-spec sort_non_null(variadic_metrics()) -> {variadic_metrics(), integer()}.
sort_non_null(Values) ->
sort_non_null(Values, [], 0).
sort_non_null([], Acc, Len) ->
{lists:sort(Acc), Len};
sort_non_null([null | Vs], Acc, Len) ->
sort_non_null(Vs, Acc, Len);
sort_non_null([{_TS, null} | Vs], Acc, Len) ->
sort_non_null(Vs, Acc, Len);
sort_non_null([{_TS, Val} | Vs], Acc, Len) ->
sort_non_null(Vs, [Val | Acc], Len + 1);
sort_non_null([Val | Vs], Acc, Len) ->
sort_non_null(Vs, [Val | Acc], Len + 1).
-spec median([metric_value()]) -> metric_value().
median(Values) ->
percentile(Values, 50, true).
-spec percentile([metric_value()], integer()) -> metric_value().
percentile(Values, N) ->
percentile(Values, N, false).
-spec percentile([metric_value()], integer(), boolean()) -> metric_value().
percentile(Values, N, Interpolate) ->
{Sorted, Len} = sort_non_null(Values),
FractionalRank = (N / 100.0) * (Len + 1),
Rank0 = statser_util:floor(FractionalRank),
RankFraction = FractionalRank - Rank0,
Rank =
if Interpolate == true -> Rank0;
true -> Rank0 + statser_util:ceiling(RankFraction)
end,
Percentile =
if Len == 0 -> null;
Rank == 0 -> hd(Sorted);
Rank > Len -> lists:nth(Len, Sorted);
true -> lists:nth(Rank, Sorted)
end,
if Interpolate == true andalso Len > Rank ->
NextValue = lists:nth(Rank + 1, Sorted),
Percentile + RankFraction * (NextValue - Percentile);
true ->
Percentile
end.
%%
%% TESTS
%%
-ifdef(TEST).
sort_non_null_test_() ->
[?_assertEqual({[], 0}, sort_non_null([])),
?_assertEqual({[], 0}, sort_non_null([null, null])),
?_assertEqual({[1], 1}, sort_non_null([1])),
?_assertEqual({[1, 2], 2}, sort_non_null([1, null, 2])),
?_assertEqual({[1, 2], 2}, sort_non_null([2, null, 1]))
].
safe_average_test_() ->
[?_assertEqual(4.0, safe_average([4])),
?_assertEqual(3.0, safe_average([2, 4])),
?_assertEqual(3.0, safe_average([2, 4, 4, 2])),
?_assertEqual(0.0, safe_average([])),
?_assertEqual(0.0, safe_average([null])),
?_assertEqual(3.0, safe_average([2, 4, null])),
?_assertEqual(4.0, safe_average([null, 4, null])),
?_assertEqual(3.0, safe_average([{0, 2}, {0, 4}, {0, null}])),
?_assertEqual(4.0, safe_average([{0, null}, {0, 4}, {0, null}]))
].
safe_div_test_() ->
[?_assertEqual(5.0, safe_div(10, 2)),
?_assertEqual(0.0, safe_div(0, 2)),
?_assertEqual(null, safe_div(123.1, 0)),
?_assertEqual(null, safe_div(null, 2)),
?_assertEqual(null, safe_div(3, null)),
?_assertEqual({100, 5.0}, safe_div({100, 10}, {100, 2})),
?_assertEqual({100, null}, safe_div({100, null}, {100, 2})),
?_assertEqual({100, null}, safe_div({100, 20.0}, {100, null}))
].
safe_pow_test_() ->
[?_assertEqual(null, safe_pow([])),
?_assertEqual(null, safe_pow([1, 2, 4, 3, null, 2])),
?_assertEqual(1, safe_pow([1])),
?_assertEqual(math:pow(1, 2), safe_pow([1, 2])),
?_assertEqual(math:pow(1, 2), safe_pow([{100, 1}, {110, 2}]))
].
safe_range_test_() ->
[?_assertEqual(null, safe_range([null, null])),
?_assertEqual(null, safe_range([null])),
?_assertEqual(null, safe_range([])),
?_assertEqual(1, safe_range([1, 2])),
?_assertEqual(2, safe_range([3, 1])),
?_assertEqual(2, safe_range([3, null, 1])),
?_assertEqual(5, safe_range([3, null, -1, 2, 1, 3, 4, 1])),
?_assertEqual(0, safe_range([1]))
].
safe_diff_test_() ->
[?_assertEqual(0, safe_diff([])),
?_assertEqual(1, safe_diff([1])),
?_assertEqual(0, safe_diff([1, 1])),
?_assertEqual(-5, safe_diff([1, 1, 2, 3])),
?_assertEqual(-5, safe_diff([null, 1, 1, 2, null, 3, null])),
?_assertEqual(0, safe_diff([null, null, null]))
].
safe_stddev_test_() ->
[?_assertEqual(null, safe_stddev([])),
?_assertEqual(null, safe_stddev([null])),
?_assertEqual(0.0, safe_stddev([null, 1])),
?_assertEqual(math:sqrt(8/3), safe_stddev([2, 4, null, 6]))
].
safe_length_test_() ->
[?_assertEqual(0, safe_length([])),
?_assertEqual(0, safe_length([null])),
?_assertEqual(1, safe_length([null, 99, null])),
?_assertEqual(1, safe_length([{100, null}, {110, 99}, {120, null}]))
].
percentile_test_() ->
[?_assertEqual(null, percentile([], 50)),
?_assertEqual(null, percentile([null], 50)),
?_assertEqual(1, percentile([1], 50)),
?_assertEqual(1, percentile([1, null, null], 50)),
?_assertEqual(2, percentile([1, null, 2], 50)),
?_assertEqual(2, percentile([2, null, 1], 50)),
?_assertEqual(1.5, percentile([2, null, 1], 50, true)),
?_assertEqual(2, percentile([2, null, 1, 3], 50)),
?_assertEqual(2.0, percentile([2, null, 1, 3], 50, true)),
?_assertEqual(1, percentile([2, 1, 3], 10)),
?_assertEqual(1.0, percentile([2, 1, 3], 10, true)),
?_assertEqual(3, percentile([2, 1, 3], 99)),
?_assertEqual(3, percentile([2, 1, 3], 99, true))
].
-endif. | src/statser_calc.erl | 0.78789 | 0.4206 | statser_calc.erl | starcoder |
%%--------------------------------------------------------------------
%% Copyright (c) 2022 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_retainer_index).
-export([
foreach_index_key/3,
to_index_key/2,
index_score/2,
select_index/2,
condition/1,
condition/2,
restore_topic/1
]).
-export_type([index/0]).
-type index() :: list(pos_integer()).
%% @doc Index key is a term that can be effectively searched in the index table.
-type index_key() :: {index(), {emqx_topic:words(), emqx_topic:words()}}.
-type match_pattern_part() :: term().
%%--------------------------------------------------------------------
%% API
%%--------------------------------------------------------------------
%% @doc Given words of a concrete topic (`Tokens') and a list of `Indices',
%% constructs index keys for the topic and each of the indices.
%% `Fun' is called with each of these keys.
-spec foreach_index_key(fun((index_key()) -> any()), list(index()), emqx_topic:words()) -> ok.
foreach_index_key(_Fun, [], _Tokens) ->
ok;
foreach_index_key(Fun, [Index | Indices], Tokens) ->
Key = to_index_key(Index, Tokens),
_ = Fun(Key),
foreach_index_key(Fun, Indices, Tokens).
%% @doc Given a concrete topic and an index
%% returns the corresponding index key.
%%
%% In an index key words from indexed and unindexed positions are split.
%%
%% E.g given `[2, 3]' index and `[<<"a">>, <<"b">>, <<"c">>, <<"d">>]' topic,
%% returns `{[2, 3], {[<<"b">>, <<"c">>], [<<"a">>, <<"d">>]}}' term.
%%
%% @see foreach_index_key/3
-spec to_index_key(index(), emqx_topic:words()) -> index_key().
to_index_key(Index, Tokens) ->
{Index, split_index_tokens(Index, Tokens, 1, [], [])}.
%% @doc Given an index and a wildcard topic
%% returns the length of the constant prefix of the
%% according index key.
%%
%% E.g. for `[2,3]' index and <code>['+', <<"b">>, '+', <<"d">>]</code> wildcard topic
%% the score is `1', because the according index key pattern is
%% <code>{[<<"b">>, '_'], ['_', <<"d">>]}</code>.
%%
%% @see foreach_index_key/3
%% @see to_index_key/2
-spec index_score(index(), emqx_topic:words()) -> non_neg_integer().
index_score(Index, Tokens) ->
index_score(Index, Tokens, 1, 0).
%% @doc Given a list of indices and a wildcard topic
%% returns index with the best score.
%%
%% Returns `undefined' if there are no indices with score `> 0'.
%%
%% @see index_score/2
-spec select_index(emqx:words(), list(index())) -> index() | undefined.
select_index(Tokens, Indices) ->
select_index(Tokens, Indices, 0, undefined).
%% @doc For an index and a wildcard topic
%% returns a matchspec pattern for the corresponding index key.
%%
%% E.g. for `[2, 3]' index and <code>['+', <<"b">>, '+', <<"d">>]</code> wildcard topic
%% returns <code>{[2, 3], {[<<"b">>, '_'], ['_', <<"d">>]}}</code> pattern.
-spec condition(index(), emqx_topic:words()) -> match_pattern_part().
condition(Index, Tokens) ->
{Index, condition(Index, Tokens, 1, [], [])}.
%% @doc Returns a matchspec pattern for a wildcard topic.
%%
%% E.g. for <code>['+', <<"b">>, '+', <<"d">>, '#']</code> wildcard topic
%% returns <code>['_', <<"b">>, '_', <<"d">> | '_']</code> pattern.
-spec condition(emqx_topic:words()) -> match_pattern_part().
condition(Tokens) ->
Tokens1 = [
case W =:= '+' of
true -> '_';
_ -> W
end
|| W <- Tokens
],
case length(Tokens1) > 0 andalso lists:last(Tokens1) =:= '#' of
false -> Tokens1;
_ -> (Tokens1 -- ['#']) ++ '_'
end.
%% @doc Restores concrete topic from its index key representation.
%%
%% E.g given `{[2, 3], {[<<"b">>, <<"c">>], [<<"a">>, <<"d">>]}}' index key
%% returns `[<<"a">>, <<"b">>, <<"c">>, <<"d">>]' topic.
-spec restore_topic(index_key()) -> emqx_topic:words().
restore_topic({Index, {IndexTokens, OtherTokens}}) ->
restore_topic(Index, IndexTokens, OtherTokens, 1, []).
%%--------------------------------------------------------------------
%% Private
%%--------------------------------------------------------------------
split_index_tokens([NIndex | OtherIndex], [Token | Tokens], N, IndexTokens, OtherTokens) when
NIndex == N
->
split_index_tokens(OtherIndex, Tokens, N + 1, [Token | IndexTokens], OtherTokens);
split_index_tokens([_NIndex | _] = Index, [Token | Tokens], N, IndexTokens, OtherTokens) ->
split_index_tokens(Index, Tokens, N + 1, IndexTokens, [Token | OtherTokens]);
split_index_tokens([], Tokens, _N, IndexTokens, OtherTokens) ->
{lists:reverse(IndexTokens), lists:reverse(OtherTokens) ++ Tokens};
split_index_tokens(_Index, [], _N, IndexTokens, OtherTokens) ->
{lists:reverse(IndexTokens), lists:reverse(OtherTokens)}.
index_score([N | _Index], [Ph | _Tokens], N, Score) when
Ph =:= '+'; Ph =:= '#'
->
Score;
index_score([N | Index], [_Word | Tokens], N, Score) ->
index_score(Index, Tokens, N + 1, Score + 1);
index_score(Index, [_Word | Tokens], N, Score) ->
index_score(Index, Tokens, N + 1, Score);
index_score([], _Tokens, _N, Score) ->
Score;
index_score(_Index, [], _N, Score) ->
Score.
select_index(_Tokens, [], _MaxScore, SelectedIndex) ->
SelectedIndex;
select_index(Tokens, [Index | Indices], MaxScore, SelectedIndex) ->
Score = index_score(Index, Tokens),
case Score > MaxScore of
true ->
select_index(Tokens, Indices, Score, Index);
false ->
select_index(Tokens, Indices, MaxScore, SelectedIndex)
end.
condition([_NIndex | _OtherIndex], ['#' | _OtherTokens], _N, IndexMatch, OtherMatch) ->
{lists:reverse(IndexMatch) ++ '_', lists:reverse(OtherMatch) ++ '_'};
condition([], ['#' | _OtherTokens], _N, IndexMatch, OtherMatch) ->
{lists:reverse(IndexMatch), lists:reverse(OtherMatch) ++ '_'};
condition([], Tokens, _N, IndexMatch, OtherMatch) ->
{lists:reverse(IndexMatch), lists:reverse(OtherMatch) ++ condition(Tokens)};
condition([_NIndex | _OtherIndex], [], _N, IndexMatch, OtherMatch) ->
{lists:reverse(IndexMatch) ++ '_', lists:reverse(OtherMatch)};
condition([NIndex | OtherIndex], ['+' | OtherTokens], N, IndexMatch, OtherMatch) when
NIndex =:= N
->
condition(OtherIndex, OtherTokens, N + 1, ['_' | IndexMatch], OtherMatch);
condition(Index, ['+' | OtherTokens], N, IndexMatch, OtherMatch) ->
condition(Index, OtherTokens, N + 1, IndexMatch, ['_' | OtherMatch]);
condition([NIndex | OtherIndex], [Token | OtherTokens], N, IndexMatch, OtherMatch) when
NIndex =:= N, is_binary(Token)
->
condition(OtherIndex, OtherTokens, N + 1, [Token | IndexMatch], OtherMatch);
condition(Index, [Token | OtherTokens], N, IndexMatch, OtherMatch) when
is_binary(Token)
->
condition(Index, OtherTokens, N + 1, IndexMatch, [Token | OtherMatch]).
restore_topic(_Index, [], OtherTokens, _N, Tokens) ->
lists:reverse(Tokens) ++ OtherTokens;
restore_topic([NIndex | OtherIndex], [IndexToken | OtherIndexTokens], OtherTokens, N, Tokens) when
NIndex =:= N
->
restore_topic(OtherIndex, OtherIndexTokens, OtherTokens, N + 1, [IndexToken | Tokens]);
restore_topic(OtherIndex, IndexTokens, [Token | OtherTokens], N, Tokens) ->
restore_topic(OtherIndex, IndexTokens, OtherTokens, N + 1, [Token | Tokens]). | apps/emqx_retainer/src/emqx_retainer_index.erl | 0.6488 | 0.521167 | emqx_retainer_index.erl | starcoder |
%%==============================================================================
%% Copyright 2020,2021 <NAME> <<EMAIL>>.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%====================================================================================
%%====================================================================================
%% Xtra Small Crypto Lib, for pinblock encryption and mobile application simulation
%%------------------------------------------------------------------------------------
%% 2018-2021
%%====================================================================================
-module(cryptoxs).
-compile(export_all).
get_test_visa_3des()->
{"BCD94A49B9AE4F94","D5A1ADEAC10D023B","BCD94A49B9AE4F94"}.
get_test_aes128()->
<<"1234567890123456">>.
get_test_aes256()->
<<"12345678901234561234567890123456">>.
encrypt(des3,ecb,{_K1,_K2,_K3}=T,Text) ->
KL = tuple_to_list(T),
KLX = lists:map(fun(X) -> hex:hex_to_bin(X) end,KL),
IV= <<0:64>>,
Text1=utils:to_binary(Text),
crypto:crypto_one_time(des_ede3_cbc,KLX,IV,Text1,true);
encrypt(aes128,ecb,K,Text)->
IV= <<0:128>>,
Text1=utils:to_binary(Text),
crypto:crypto_one_time(aes_128_cbc,[K],IV,Text1,true).
encrypt(des3,cbc,{_K1,_K2,_K3}=T,IV,Text) ->
KL = tuple_to_list(T),
KLX = lists:map(fun(X) -> hex:hex_to_bin(X) end,KL),
IV1=utils:to_binary(IV),
Text1=utils:to_binary(Text),
crypto:crypto_one_time(des_ede3_cbc,KLX,IV1,Text1,true);
encrypt(aes128,cbc,K,IV,Text)->
IV1=utils:to_binary(IV),
Text1=utils:to_binary(Text),
crypto:crypto_one_time(aes_128_cbc,[K],IV1,Text1,true).
decrypt(des3,ecb,{_K1,_K2,_K3}=T,Encrypted) ->
KL = tuple_to_list(T),
KLX = lists:map(fun(X) -> hex:hex_to_bin(X) end,KL),
IV= <<0:64>>,
crypto:crypto_one_time(des_ede3_cbc,KLX,IV,Encrypted,false);
decrypt(aes128,ecb,K,Encrypted)->
IV= <<0:128>>,
crypto:crypto_one_time(aes_128_ecb,[K],IV,Encrypted,false).
decrypt(des3,cbc,{_K1,_K2,_K3}=T,IV,Encrypted) ->
KL=tuple_to_list(T),
KLX=lists:map(fun(X) -> hex:hex_to_bin(X) end,KL),
IV1=utils:to_binary(IV),
crypto:crypto_one_time(des_ede3_cbc,KLX,IV1,Encrypted,false);
decrypt(aes128,cbc,K,IV,Text)->
Text1=utils:to_binary(Text),
IV1=utils:to_binary(IV),
crypto:crypto_one_time(aes_128_cbc,[K],IV1,Text1,false).
pad(T,LEN)->
T1 = if is_list(T) ->
list_to_binary(T);
true -> T
end,
PAD_LEN= case (byte_size(T1) rem LEN) of
0 -> 0;
N -> LEN-N
end,
PAD_LEN_BITS=PAD_LEN*8,
<<T1/binary,0:PAD_LEN_BITS>>.
unpad(X)->
case binary:match(X,<<0>>) of
nomatch -> X;
{P,_} -> binary:part(X,{0,P})
end. | src/cryptoxs.erl | 0.569853 | 0.429071 | cryptoxs.erl | starcoder |
%% ---------------------------------------------------------------------
%%
%% Copyright (c) 2007-2013 Basho Technologies, Inc. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% ---------------------------------------------------------------------
%% @doc Riak Time Samples.
%% Each sample is stored as a JSON object with `start_time' and
%% `end_time' fields, expressed as strings in ISO8601 format. The
%% object stored in riak is keyed by the start of the slice in which
%% the `Start' time falls.
%% Slices start at midnight and progress through the day (all times
%% are UTC). If the period does not evenly divide the day, the final
%% slice will be truncated at midnight.
%% When multiple samples for a time+postfix are stored, each is
%% expressed as a sibling of the Riak object. (TODO: compaction) It
%% is therefore important to ensure that `allow_mult=true' is set for
%% the bucket in which the samples are stored. Use the {@link
%% check_bucket_props/1} function to check and set this property.
%% The `Data' argument passed to {@link new_sample/6} is expected to
%% be a proplist suitable for inclusion in the Mochijson2 structure
%% that includes the start and end times.
-module(rts).
-export([
new_sample/6,
find_samples/6,
slice_containing/2,
next_slice/2,
iso8601/1,
check_bucket_props/1,
check_bucket_props/2
]).
-include("rts.hrl").
-ifdef(TEST).
-ifdef(EQC).
-compile([export_all]).
-include_lib("eqc/include/eqc.hrl").
-endif.
-include_lib("eunit/include/eunit.hrl").
-endif.
-export_type([slice/0]).
-type datetime() :: calendar:datetime().
-type slice() :: {Start :: datetime(), End :: datetime()}.
-type riakc_pb_socket() :: pid().
-type mochijson2() :: term().
%% @doc Just create the new sample object (don't store it).
-spec new_sample(binary(), iolist(),
datetime(), datetime(),
integer(), mochijson2())
-> riakc_obj:riakc_obj().
new_sample(Bucket, KeyPostfix, Start, End, Period, Data) ->
Slice = slice_containing(Start, Period),
Key = slice_key(Slice, KeyPostfix),
MJSON = {struct, [{?START_TIME, iso8601(Start)},
{?END_TIME, iso8601(End)}
|Data]},
Body = iolist_to_binary(mochijson2:encode(MJSON)),
riakc_obj:new(Bucket, Key, Body, "application/json").
%% @doc Fetch all of the samples from riak that overlap the specified
%% time period for the given key postfix.
%%
%% This implementation reads each slice object from riak, and does the
%% extraction/etc. on the client side. It would be a a trivial
%% modification to do this via MapReduce instead.
-spec find_samples(riakc_pb_socket(), binary(), iolist(),
datetime(), datetime(), integer()) ->
{Samples::[mochijson2()], Errors::[{slice(), Reason::term()}]}.
find_samples(Riak, Bucket, KeyPostfix, Start, End, Period) ->
Slices = slices_filling(Start, End, Period),
Puller = sample_puller(Riak, Bucket, KeyPostfix),
{Samples, Errors} = lists:foldl(Puller, {[], []}, Slices),
{lists:filter(sample_in_bounds(Start, End), Samples), Errors}.
%% @doc Make a thunk that lists:filter can use to filter samples for a
%% given time period. Samples are stored in groups, an a user may
%% request some, but not all, samples from a group.
-spec sample_in_bounds(datetime(), datetime())
-> fun( (list()) -> boolean() ).
sample_in_bounds(Start, End) ->
Start8601 = iso8601(Start),
End8601 = iso8601(End),
fun(Sample) ->
{?START_TIME, SampleStart}
= lists:keyfind(?START_TIME, 1, Sample),
{?END_TIME, SampleEnd}
= lists:keyfind(?END_TIME, 1, Sample),
(SampleStart < End8601) and (SampleEnd > Start8601)
end.
%% @doc Make a thunk that looks up samples for a given bucket+prefix.
-spec sample_puller(riakc_pb_socket(), binary(), iolist()) -> fun().
sample_puller(Riak, Bucket, Postfix) ->
fun(Slice, {Samples, Errors}) ->
case riakc_pb_socket:get(
Riak, Bucket, slice_key(Slice, Postfix)) of
{ok, Object} ->
RawSamples =
[ catch element(2, {struct,_}=mochijson2:decode(V))
|| V <- riakc_obj:get_values(Object) ],
{NewSamples, EncodingErrors} =
lists:partition(fun({'EXIT',_}) -> false;
(_) -> true
end,
RawSamples),
{NewSamples++Samples,
[{Slice, {encoding, length(EncodingErrors)}}
|| EncodingErrors /= []]
++Errors};
{error, notfound} ->
%% this is normal - we ask for all possible
%% archives, and just deal with the ones that exist
{Samples, Errors};
{error, Error} ->
{Samples, [{Slice, Error}|Errors]}
end
end.
%% @doc Make the key for this slice+postfix. Note: this must be the
%% actual slice, not just any two times (the times are not realigned
%% to slice boundaries before making the key).
-spec slice_key(slice(), iolist()) -> binary().
slice_key({SliceStart, _}, Postfix) ->
iolist_to_binary([iso8601(SliceStart),".",Postfix]).
%% @doc Get the slice containing the `Time', given a period.
-spec slice_containing(datetime(), integer()) -> slice().
slice_containing({{_,_,_},{H,M,S}}=Time, Period) ->
Rem = ((H*60+M)*60+S) rem Period,
Sec = dtgs(Time),
cut_at_midnight({gsdt(Sec-Rem), gsdt(Sec+Period-Rem)}).
%% @doc Get the slice following the one given.
-spec next_slice(Slice::slice(), integer()) -> slice().
next_slice({_,Prev}, Period) ->
cut_at_midnight({Prev, gsdt(dtgs(Prev)+Period)}).
%% @doc ensure that slices do not leak across day boundaries
-spec cut_at_midnight(slice()) -> slice().
cut_at_midnight({{SameDay,_},{SameDay,_}}=Slice) ->
Slice;
cut_at_midnight({Start,{NextDay,_}}) ->
%% TODO: this is broken if Period is longer than 1 day
{Start, {NextDay, {0,0,0}}}.
%% @doc Get all slices covering the period from `Start' to `End'.
-spec slices_filling(datetime(), datetime(), integer())
-> [slice()].
slices_filling(Start, End, Period) when Start > End ->
slices_filling(End, Start, Period);
slices_filling(Start, End, Period) ->
{_, Last} = slice_containing(End, Period),
lists:reverse(fill(Period, Last, [slice_containing(Start, Period)])).
%% @doc Add slices to `Fill' until we've covered `Last'.
-spec fill(integer(), datetime(), [slice()]) -> [slice()].
fill(_, Last, [{_,Latest}|_]=Fill) when Latest >= Last ->
%% just in case our iterative math is borked, checking >= instead
%% of == should guarantee we stop anyway
Fill;
fill(Period, Last, [Prev|_]=Fill) ->
Next = next_slice(Prev, Period),
fill(Period, Last, [Next|Fill]).
%% @doc convenience
-spec dtgs(datetime()) -> integer().
dtgs(DT) -> calendar:datetime_to_gregorian_seconds(DT).
-spec gsdt(integer()) -> datetime().
gsdt(S) -> calendar:gregorian_seconds_to_datetime(S).
%% @doc Produce an ISO8601-compatible representation of the given time.
-spec iso8601(calendar:datetime()) -> binary().
iso8601({{Y,M,D},{H,I,S}}) ->
iolist_to_binary(
io_lib:format("~4..0b~2..0b~2..0bT~2..0b~2..0b~2..0bZ",
[Y, M, D, H, I, S])).
%% @doc Attempt to check and set `allow_mult=true' on the named
%% bucket. A warning is printed in the logs if this operation fails.
-spec check_bucket_props(binary()) -> ok | {error, term()}.
check_bucket_props(Bucket) ->
case riak_cs_utils:riak_connection() of
{ok, Riak} ->
try
check_bucket_props(Bucket, Riak)
after
riak_cs_utils:close_riak_connection(Riak)
end;
{error, Reason} ->
_ = lager:warning(
"Unable to verify ~s bucket settings (~p).",
[Bucket, Reason]),
{error, Reason}
end.
check_bucket_props(Bucket, Riak) ->
case catch riakc_pb_socket:get_bucket(Riak, Bucket) of
{ok, Props} ->
case lists:keyfind(allow_mult, 1, Props) of
{allow_mult, true} ->
_ = lager:debug("~s bucket was"
" already configured correctly.",
[Bucket]),
ok;
_ ->
case catch riakc_pb_socket:set_bucket(
Riak, Bucket,
[{allow_mult, true}]) of
ok ->
_ = lager:info("Configured ~s"
" bucket settings.",
[Bucket]),
ok;
{_error, Reason} ->
_ = lager:warning("Unable to configure ~s"
" bucket settings (~p).",
[Bucket, Reason]),
{error, Reason}
end
end;
{_error, Reason} ->
_ = lager:warning(
"Unable to verify ~s bucket settings (~p).",
[Bucket, Reason]),
{error, Reason}
end.
-ifdef(TEST).
-ifdef(EQC).
iso8601_test() ->
true = eqc:quickcheck(iso8601_roundtrip_prop()).
%% make sure that iso8601 roundtrips with datetime
iso8601_roundtrip_prop() ->
%% iso8601 & datetime don't actually care if the date was valid,
%% but writing a valid generator was fun
%% datetime/1 is exported from riak_cs_wm_usage when TEST and
%% EQC are defined
?FORALL(T, datetime_g(),
is_binary(iso8601(T)) andalso
{ok, T} == riak_cs_wm_usage:datetime(iso8601(T))).
slice_containing_test() ->
true = eqc:quickcheck(slice_containing_prop()).
%% make sure that slice_containing returns a slice that is the length
%% of the archive period, and actually surrounds the given time
slice_containing_prop() ->
?FORALL({T, I}, {datetime_g(), valid_period_g()},
begin
{S, {_,ET}=E} = slice_containing(T, I),
?WHENFAIL(
io:format(user, "Containing slice: {~p, ~p}~n", [S, E]),
%% slice actually surrounds time
S =< T andalso T =< E andalso
%% slice length is the configured period ...
(I == datetime_diff(S, E) orelse
%% ... or less if it's the last period of the day
(I > datetime_diff(S, E) andalso ET == {0,0,0})) andalso
%% start of slice is N periods from start of day
0 == datetime_diff(S, {element(1, S),{0,0,0}}) rem I)
end).
datetime_diff(S, E) ->
abs(calendar:datetime_to_gregorian_seconds(E)
-calendar:datetime_to_gregorian_seconds(S)).
next_slice_test() ->
true = eqc:quickcheck(next_slice_prop()).
%% make sure the "next" slice is starts at the end of the given slice,
%% and is the length of the configured period
next_slice_prop() ->
?FORALL({T, I}, {datetime_g(), valid_period_g()},
begin
{S1, E1} = slice_containing(T, I),
{S2, {_,ET}=E2} = next_slice({S1, E1}, I),
?WHENFAIL(
io:format(user,
"Slice Containing: {~p, ~p}~n"
"Next Slice: {~p, ~p}~n",
[S1, E1, S2, E2]),
%% next starts when prev ended
S2 == E1 andalso
%% slice length is the configured period ...
(I == datetime_diff(S2, E2) orelse
%% ... or less if it's the last period of the day
(I >= datetime_diff(S2, E2) andalso ET == {0,0,0})))
end).
slices_filling_test() ->
true = eqc:quickcheck(slices_filling_prop()).
%% make sure that slices_filling produces a list of slices, where the
%% first slice contains the start time, the last slice contains the
%% last time, and the number of slices is equal to the number of
%% periods between the start of the first slice and the end of the
%% last slice; slices are not checked for contiguousness, since we
%% know that the function uses next_slice, and next_slice is tested
%% elsewhere
slices_filling_prop() ->
?FORALL({T0, I, M, R}, {datetime_g(), valid_period_g(), int(), int()},
begin
T1 = calendar:gregorian_seconds_to_datetime(
I*M+R+calendar:datetime_to_gregorian_seconds(T0)),
Slices = slices_filling(T0, T1, I),
[Early, Late] = lists:sort([T0, T1]),
{SF,EF} = hd(Slices),
{SL,EL} = lists:last(Slices),
?WHENFAIL(
io:format("SF: ~p~nT0: ~p~nEF: ~p~n~n"
"SL: ~p~nT1: ~p~nEL: ~p~n~n"
"# slices: ~p~n",
[SF, T0, EF, SL, T1, EL, Slices]),
eqc:conjunction(
[{start_first, SF =< Early},
{end_first, Early =< EF},
{start_last, SL =< Late},
{end_last, Late =< EL},
{count, length(Slices) ==
mochinum:int_ceil(datetime_diff(SF, EL) / I)}]))
end).
make_object_test() ->
true = eqc:quickcheck(make_object_prop()).
%% check that an archive object is in the right bucket, with a key
%% containing the end time and the username, with application/json as
%% the content type, and a value that is a JSON representation of the
%% sum of each access metric plus start and end times
make_object_prop() ->
?FORALL({Bucket, Postfix,
T0, T1, Period},
{string_g(), string_g(),
datetime_g(), datetime_g(), valid_period_g()},
begin
{Start, End} = list_to_tuple(lists:sort([T0, T1])),
{SliceStart,_} = slice_containing(Start, Period),
Obj = new_sample(Bucket, Postfix, Start, End, Period, []),
{struct, MJ} = mochijson2:decode(
riakc_obj:get_update_value(Obj)),
?WHENFAIL(
io:format(user, "keys: ~p~n", [MJ]),
eqc:conjunction(
[{bucket, Bucket == riakc_obj:bucket(Obj)},
{key_user, 0 /= string:str(
binary_to_list(riakc_obj:key(Obj)),
binary_to_list(Postfix))},
{key_time, 0 /= string:str(
binary_to_list(riakc_obj:key(Obj)),
binary_to_list(iso8601(SliceStart)))},
{ctype, "application/json" ==
riakc_obj:md_ctype(
riakc_obj:get_update_metadata(Obj))},
{start_time, rts:iso8601(Start) ==
proplists:get_value(?START_TIME, MJ)},
{end_time, rts:iso8601(End) ==
proplists:get_value(?END_TIME, MJ)}]))
end).
string_g() ->
?LET(L, ?SUCHTHAT(X, list(char()), X /= []), list_to_binary(L)).
%% generate a valid datetime tuple; years are 1970-2200, to keep them
%% more relevant-ish
datetime_g() ->
?LET({Y, M}, {choose(1970, 2200), choose(1, 12)},
{{Y, M, valid_day_g(Y, M)},
{choose(0, 23), choose(0, 59), choose(0, 59)}}).
valid_day_g(Year, 2) ->
case {Year rem 4, Year rem 100, Year rem 400} of
{_, _, 0} -> 29;
{_, 0, _} -> 28;
{0, _, _} -> 29;
{_, _, _} -> 28
end;
valid_day_g(_Year, Month) ->
case lists:member(Month, [4, 6, 9, 11]) of
true -> 30;
false -> 31
end.
%% not exhaustive, but a good selection
valid_period_g() ->
elements([1,10,100,
2,4,8,16,32,
3,9,27,
6,18,54,
12,24,48,96,
60,600,3600,21600, % 1min, 10min, 1hr, 6hr
86400,86401,86500]). % 1day, over 1 day (will be trunc'd)
-endif. % EQC
-endif. % TEST | src/rts.erl | 0.621426 | 0.46873 | rts.erl | starcoder |
-module(ewkb).
-include("epgsql_geometry.hrl").
-export([decode_geometry/1, encode_geometry/1]).
-type geom_type() :: geometry
| point %
| line_string%
| polygon%
| multi_point%
| multi_line_string%
| multi_polygon%
| geometry_collection%
| circular_string%
| compound_curve%
| curve_polygon%
| multi_curve%
| multi_surface%
| curve%
| surface%
| polyhedral_surface%
| tin%
| triangle.%
decode_geometry(Binary) ->
{Geometry, <<>>} = decode_geometry_data(Binary),
Geometry.
encode_geometry(Geometry) ->
Type = encode_type(Geometry),
PointType = encode_point_type(Geometry),
Data = encode_geometry_data(Geometry),
<<1, Type/binary, PointType/binary, Data/binary>>.
encode_geometry_data(#point{ point_type = '2d', x = X, y = Y }) ->
Xbin = encode_float64(X),
Ybin = encode_float64(Y),
<<Xbin/binary, Ybin/binary>>;
encode_geometry_data(#point{ point_type = '2dm', x = X, y = Y, m = M }) ->
Xbin = encode_float64(X),
Ybin = encode_float64(Y),
Mbin = encode_float64(M),
<<Xbin/binary, Ybin/binary, Mbin/binary>>;
encode_geometry_data(#point{ point_type = '3d', x = X, y = Y, z = Z }) ->
Xbin = encode_float64(X),
Ybin = encode_float64(Y),
Zbin = encode_float64(Z),
<<Xbin/binary, Ybin/binary, Zbin/binary>>;
encode_geometry_data(#point{ point_type = '3dm', x = X, y = Y, z = Z, m = M }) ->
Xbin = encode_float64(X),
Ybin = encode_float64(Y),
Zbin = encode_float64(Z),
Mbin = encode_float64(M),
<<Xbin/binary, Ybin/binary, Zbin/binary, Mbin/binary>>;
encode_geometry_data({SimpleCollection, _, Data})
when SimpleCollection == line_string;
SimpleCollection == circular_string;
SimpleCollection == polygon;
SimpleCollection == triangle ->
encode_collection(Data);
encode_geometry_data({TypedCollection, _, Data})
when
TypedCollection == multi_point;
TypedCollection == multi_line_string;
TypedCollection == multi_curve;
TypedCollection == multi_polygon;
TypedCollection == multi_surface;
TypedCollection == compound_curve;
TypedCollection == curve_polygon;
TypedCollection == geometry_collection;
TypedCollection == polyhedral_surface;
TypedCollection == tin ->
encode_typed_collection(Data).
encode_collection(Collection) when is_list(Collection) ->
Length = length(Collection),
LengthBin = encode_int32(Length),
CollectionBin = lists:foldl(
fun(Element, Acc) ->
ElementBin = encode_geometry_data(Element),
<<Acc/binary, ElementBin/binary>>
end,
<<>>,
Collection),
<<LengthBin/binary, CollectionBin/binary>>.
encode_typed_collection(Collection) when is_list(Collection) ->
Length = length(Collection),
LengthBin = encode_int32(Length),
CollectionBin = lists:foldl(
fun(Element, Acc) ->
ElementBin = encode_geometry(Element),
<<Acc/binary, ElementBin/binary>>
end,
<<>>,
Collection),
<<LengthBin/binary, CollectionBin/binary>>.
encode_int32(Int) when is_integer(Int) ->
<<Int:1/little-integer-unit:32>>.
encode_float64(Int) when is_number(Int) ->
<<Int:1/little-float-unit:64>>.
-spec decode_geometry_data(binary()) -> {geometry(), binary()}.
decode_geometry_data(Binary) ->
<<1, TypeCode:2/binary, SubtypeCode:2/binary, Data/binary>> = Binary,
Type = decode_type(TypeCode),
Subtype = decode_point_type(SubtypeCode),
decode_geometry_data(Type, Subtype, Data).
-spec decode_geometry_data(geom_type(), point_type(), binary()) -> {geometry(), binary()}.
decode_geometry_data(curve, _, _) -> error({curve, not_supported});
decode_geometry_data(surface, _, _) -> error({surface, not_supported});
decode_geometry_data(geometry, _, _) -> error({geometry, not_supported});
decode_geometry_data(point, PointType, Data) ->
decode_point(PointType, Data);
decode_geometry_data(LineType, PointType, Data)
when LineType == line_string;
LineType == circular_string ->
{Points, Rest} = decode_collection(point, PointType, Data),
{{LineType, PointType, Points}, Rest};
decode_geometry_data(polygon, PointType, Data) ->
{Lines, Rest} = decode_collection(line_string, PointType, Data),
{#polygon{ point_type = PointType, rings = Lines }, Rest};
decode_geometry_data(triangle, PointType, Data) ->
{#polygon{ rings = Rings }, Rest} = decode_geometry_data(polygon, PointType, Data),
{#triangle{ point_type = PointType, rings = Rings }, Rest};
decode_geometry_data(Collection, PointType, Data)
when
Collection == multi_point;
Collection == multi_line_string;
Collection == multi_curve;
Collection == multi_polygon;
Collection == multi_surface;
Collection == compound_curve;
Collection == curve_polygon;
Collection == geometry_collection;
Collection == polyhedral_surface;
Collection == tin ->
{Lines, Rest} = decode_typed_collection(Data),
{{Collection, PointType, Lines}, Rest}.
-spec decode_collection(geom_type(), point_type(), binary()) -> {[geometry()], binary()}.
decode_collection(Type, PointType, Data) ->
{Length, CountRest} = decode_int32(Data),
lists:foldl(
fun(_, {Geoms, Rest}) ->
{Geom, R} = decode_geometry_data(Type, PointType, Rest),
{Geoms ++ [Geom], R}
end,
{[], CountRest},
lists:seq(1, Length)).
-spec decode_typed_collection(binary()) -> {[geometry()], binary()}.
decode_typed_collection(Data) ->
{Length, CountRest} = decode_int32(Data),
lists:foldl(
fun(_, {Geoms, Rest}) ->
{Geom, R} = decode_geometry_data(Rest),
{Geoms ++ [Geom], R}
end,
{[], CountRest},
lists:seq(1, Length)).
-spec decode_int32(binary()) -> {integer(), binary()}.
decode_int32(<<Hex:4/binary, Rest/binary>>) ->
<<Int:1/little-integer-unit:32>> = Hex,
{Int, Rest}.
-spec decode_float64(binary()) -> {float(), binary()}.
decode_float64(<<Hex:8/binary, Rest/binary>>) ->
<<Float:1/little-float-unit:64>> = Hex,
{Float, Rest}.
decode_point(PointType, Data) ->
{Values, Rest} = lists:foldl(
fun(_, {Values, Rest}) ->
{Value, R} = decode_float64(Rest),
{Values ++ [Value], R}
end,
{[], Data},
lists:seq(1, point_size(PointType))),
Point = case {PointType, Values} of
{'2d', [X,Y]} ->
#point{ point_type = PointType, x = X, y = Y };
{'2dm', [X,Y,M]} ->
#point{ point_type = PointType, x = X, y = Y, m = M };
{'3d', [X,Y,Z]} ->
#point{ point_type = PointType, x = X, y = Y, z = Z };
{'3dm', [X,Y,Z,M]} ->
#point{ point_type = PointType, x = X, y = Y, z = Z, m = M }
end,
{Point, Rest}.
-spec point_size(point_type()) -> 2..4.
point_size('2d') -> 2;
point_size('2dm') -> 3;
point_size('3d') -> 3;
point_size('3dm') -> 4.
-spec decode_type(binary()) -> geom_type().
decode_type(<<0,0>>) -> geometry;
decode_type(<<1,0>>) -> point;
decode_type(<<2,0>>) -> line_string;
decode_type(<<3,0>>) -> polygon;
decode_type(<<4,0>>) -> multi_point;
decode_type(<<5,0>>) -> multi_line_string;
decode_type(<<6,0>>) -> multi_polygon;
decode_type(<<7,0>>) -> geometry_collection;
decode_type(<<8,0>>) -> circular_string;
decode_type(<<9,0>>) -> compound_curve;
decode_type(<<10,0>>) -> curve_polygon;
decode_type(<<11,0>>) -> multi_curve;
decode_type(<<12,0>>) -> multi_surface;
decode_type(<<13,0>>) -> curve;
decode_type(<<14,0>>) -> surface;
decode_type(<<15,0>>) -> polyhedral_surface;
decode_type(<<16,0>>) -> tin;
decode_type(<<17,0>>) -> triangle.
-spec encode_type(geometry() | geom_type()) -> binary().
encode_type(Geometry) when is_tuple(Geometry) ->
encode_type(element(1, Geometry));
encode_type(geometry) -> <<00, 0>>;
encode_type(point) -> <<01, 0>>;
encode_type(line_string) -> <<02, 0>>;
encode_type(polygon) -> <<03, 0>>;
encode_type(multi_point) -> <<04, 0>>;
encode_type(multi_line_string) -> <<05, 0>>;
encode_type(multi_polygon) -> <<06, 0>>;
encode_type(geometry_collection) -> <<07, 0>>;
encode_type(circular_string) -> <<08, 0>>;
encode_type(compound_curve) -> <<09, 0>>;
encode_type(curve_polygon) -> <<10, 0>>;
encode_type(multi_curve) -> <<11, 0>>;
encode_type(multi_surface) -> <<12, 0>>;
encode_type(curve) -> <<13, 0>>;
encode_type(surface) -> <<14, 0>>;
encode_type(polyhedral_surface) -> <<15, 0>>;
encode_type(tin) -> <<16, 0>>;
encode_type(triangle) -> <<17, 0>>.
-spec decode_point_type(binary()) -> point_type().
decode_point_type(<<0,0>>) -> '2d';
decode_point_type(<<0, 64>>) -> '2dm';
decode_point_type(<<0, 128>>) -> '3d';
decode_point_type(<<0, 192>>) -> '3dm'.
-spec encode_point_type(geometry() | point_type()) -> binary().
encode_point_type(Geometry) when is_tuple(Geometry) ->
encode_point_type(element(2, Geometry));
encode_point_type('2d') -> <<0,0>>;
encode_point_type('2dm') -> <<0,64>>;
encode_point_type('3d') -> <<0,128>>;
encode_point_type('3dm') -> <<0,192>>. | src/ewkb.erl | 0.505371 | 0.541773 | ewkb.erl | starcoder |
%%%=============================================================================
%% Copyright 2012- Klarna AB
%% Copyright 2015- AUTHORS
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% @doc Json schema validation module.
%%
%% This module is the core of jesse, it implements the validation functionality
%% according to the standard.
%% @end
%%%=============================================================================
-module(jesse_lib).
%% API
-export([ empty_if_not_found/1
, is_array/1
, is_json_object/1
, is_json_object_empty/1
, is_null/1
, normalize_and_sort/1
, is_equal/2
]).
%% Includes
-include("jesse_schema_validator.hrl").
%%% API
%% @doc Returns an empty list if the given value is ?not_found.
-spec empty_if_not_found(Value :: any()) -> any().
empty_if_not_found(?not_found) ->
[];
empty_if_not_found(Value) ->
Value.
%% @doc Checks if the given value is json `array'.
%% This check is needed since objects in `jsx' are lists (proplists).
-spec is_array(Value :: any()) -> boolean().
is_array(Value)
when is_list(Value) ->
not is_json_object(Value);
is_array(_) ->
false.
%% @doc A naive check if the given data is a json object.
%% Supports two main formats of json representation:
%% 1) mochijson2 format (`{struct, proplist()}')
%% 2) jiffy format (`{proplist()}')
%% 3) jsx format (`[{binary() | atom(), any()}]')
%% Returns `true' if the given data is an object, otherwise `false' is returned.
-spec is_json_object(Value :: any()) -> boolean().
?IF_MAPS(
is_json_object(Map)
when erlang:is_map(Map) ->
true;
)
is_json_object({struct, Value})
when is_list(Value) ->
true;
is_json_object({Value})
when is_list(Value) ->
true;
%% handle `jsx' empty objects
is_json_object([{}]) ->
true;
%% very naive check. checks only the first element.
is_json_object([{Key, _Value} | _])
when is_binary(Key) orelse is_atom(Key)
andalso Key =/= struct ->
true;
is_json_object(_) ->
false.
%% @doc Checks if the given value is json `null'.
-spec is_null(Value :: any()) -> boolean().
is_null(null) ->
true;
is_null(_Value) ->
false.
%% @doc check if json object is_empty.
-spec is_json_object_empty(Value :: any()) -> boolean().
is_json_object_empty({struct, Value})
when is_list(Value) andalso Value =:= [] ->
true;
is_json_object_empty({Value})
when is_list(Value)
andalso Value =:= [] ->
true;
%% handle `jsx' empty objects
is_json_object_empty([{}]) ->
true;
?IF_MAPS(
is_json_object_empty(Map)
when erlang:is_map(Map) ->
maps:size(Map) =:= 0;
)
is_json_object_empty(_) ->
false.
%% @doc Returns a JSON object in which all lists for
%% which order is not relevant will be sorted. In this way, there
%% will be no differences between objects that contain one of those
%% lists with the same elements but in different order. Lists for
%% which order is relevant, e.g. JSON arrays, keep their original
%% order and will be considered diffferent if the order is different.
-spec normalize_and_sort(Value :: any()) -> any().
normalize_and_sort(Value) ->
normalize_and_sort_check_object(Value).
%% This code would look much better if we could use normalize_and_sort_check_object
%% as a guard expression, but that is not possible. So we need to check
%% in every recurssion step, first if the Value is a JSON object with
%% properties, and in that case call a different function for this values.
%% @private
-spec normalize_and_sort_check_object(Value :: any()) -> any().
normalize_and_sort_check_object(Value) ->
case jesse_lib:is_json_object(Value) of
true -> normalize_and_sort_object(Value);
false -> normalize_and_sort_non_object(Value)
end.
%% This function covers the recursion over:
%% - properties within an object, seen as tuples. In that case, we run
%% the normalization/ordering over the values of these properties.
%% - JSON arrays, seen as lists. In that case, we keep the order of
%% the list and run the normalization/ordering over each of the values
%% in the list.
%% - Basic JSON types. In that case, we just return the value.
%% @private
-spec normalize_and_sort_non_object(Value :: any()) -> any().
normalize_and_sort_non_object({Key, Val}) ->
{Key, normalize_and_sort_check_object(Val)};
normalize_and_sort_non_object(Value) when is_list(Value) ->
[normalize_and_sort_check_object(X) || X <- Value];
normalize_and_sort_non_object(Value) when is_number(Value) ->
float(Value);
normalize_and_sort_non_object(Value) ->
Value.
%% This function runs the normalization/ordering over the properties
%% of a JSON object. If the object is not formatted as a list (e.g. a
%% map), it is unwrapped into a list. Then the list of properties is
%% order so that its original odering is not relevant, and we run
%% the normalization/ordering through each of the properties.
%% @private
-spec normalize_and_sort_object(Value :: any()) -> any().
normalize_and_sort_object(Value) when is_list(Value) ->
{struct, lists:sort([normalize_and_sort_check_object(X) || X <- Value])};
normalize_and_sort_object(Value) ->
normalize_and_sort_object(jesse_json_path:unwrap_value(Value)).
%%=============================================================================
%% @doc Returns `true' if given values (instance) are equal, otherwise `false'
%% is returned.
%%
%% Two instance are consider equal if they are both of the same type
%% and:
%% <ul>
%% <li>are null; or</li>
%%
%% <li>are booleans/numbers/strings and have the same value; or</li>
%%
%% <li>are arrays, contains the same number of items, and each item in
%% the array is equal to the corresponding item in the other array;
%% or</li>
%%
%% <li>are objects, contains the same property names, and each property
%% in the object is equal to the corresponding property in the other
%% object.</li>
%% </ul>
-spec is_equal(Value1 :: any(), Value2 :: any()) -> boolean().
is_equal(Value1, Value2) ->
case jesse_lib:is_json_object(Value1)
andalso jesse_lib:is_json_object(Value2) of
true -> compare_objects(Value1, Value2);
false -> case is_list(Value1) andalso is_list(Value2) of
true -> compare_lists(Value1, Value2);
false -> Value1 == Value2
end
end.
%% @private
compare_lists(Value1, Value2) ->
case length(Value1) =:= length(Value2) of
true -> compare_elements(Value1, Value2);
false -> false
end.
%% @private
compare_elements(Value1, Value2) ->
lists:all( fun({Element1, Element2}) ->
is_equal(Element1, Element2)
end
, lists:zip(Value1, Value2)
).
%% @private
compare_objects(Value1, Value2) ->
case length(unwrap(Value1)) =:= length(unwrap(Value2)) of
true -> compare_properties(Value1, Value2);
false -> false
end.
%% @private
compare_properties(Value1, Value2) ->
lists:all( fun({PropertyName1, PropertyValue1}) ->
case get_value(PropertyName1, Value2) of
?not_found -> false;
PropertyValue2 -> is_equal(PropertyValue1,
PropertyValue2)
end
end
, unwrap(Value1)
).
%%=============================================================================
%% Wrappers
%% @private
get_value(Key, Schema) ->
jesse_json_path:value(Key, Schema, ?not_found).
%% @private
unwrap(Value) ->
jesse_json_path:unwrap_value(Value). | src/jesse_lib.erl | 0.791741 | 0.473353 | jesse_lib.erl | starcoder |
%% @doc
%% Counter is a Metric that represents a single numerical value that only ever
%% goes up. That implies that it cannot be used to count items whose number can
%% also go down, e.g. the number of currently running processes. Those
%% "counters" are represented by {@link prometheus_gauge}.
%%
%% A Counter is typically used to count requests served, tasks completed, errors
%% occurred, etc.
%%
%% Examople use cases for Counters:
%% <ul>
%% <li>Number of requests processed</li>
%% <li>Number of items that were inserted into a queue</li>
%% <li>Total amount of data a system has processed</li>
%% </ul>
%%
%% Use the
%% <a href="https://prometheus.io/docs/querying/functions/#rate()">rate()</a>/<a
%% href="https://prometheus.io/docs/querying/functions/#irate()">irate()</a>
%% functions in Prometheus to calculate the rate of increase of a Counter.
%% By convention, the names of Counters are suffixed by `_total'.
%%
%% To create a counter use either {@link new/1} or {@link declare/1},
%% the difference is that {@link new/1} will raise
%% {:mf_already_exists, {Registry, Name}, Message} error if counter with
%% the same `Registry', `Name' and `Labels' combination already exists.
%% Both accept `Spec' [proplist](http://erlang.org/doc/man/proplists.html)
%% with the same set of keys:
%%
%% - `Registry' - optional, default is `default';
%% - `Name' - required, can be an atom or a string;
%% - `Help' - required, must be a string;
%% - `Labels' - optional, default is `[]'.
%%
%% Example:
%% <pre lang="erlang">
%% -module(my_service_instrumenter).
%%
%% -export([setup/0,
%% inc/1]).
%%
%% setup() ->
%% prometheus_counter:declare([{name, my_service_requests_total},
%% {help, "Requests count"},
%% {labels, caller}]).
%%
%% inc(Caller) ->
%% prometheus_counter:inc(my_service_requests_total, [Caller]).
%%
%% </pre>
%% @end
-module(prometheus_counter).
%%% metric
-export([new/1,
declare/1,
deregister/1,
deregister/2,
set_default/2,
inc/1,
inc/2,
inc/3,
inc/4,
remove/1,
remove/2,
remove/3,
reset/1,
reset/2,
reset/3,
value/1,
value/2,
value/3,
values/2]).
%%% collector
-export([deregister_cleanup/1,
collect_mf/2,
collect_metrics/2]).
-include("prometheus.hrl").
-behaviour(prometheus_metric).
-behaviour(prometheus_collector).
%%====================================================================
%% Macros
%%====================================================================
-define(TABLE, ?PROMETHEUS_COUNTER_TABLE).
-define(ISUM_POS, 2).
-define(FSUM_POS, 3).
-define(WIDTH, 16).
%%====================================================================
%% Metric API
%%====================================================================
%% @doc Creates a counter using `Spec'.
%%
%% Raises `{missing_metric_spec_key, Key, Spec}' error if required `Soec' key
%% is missing.<br/>
%% Raises `{invalid_metric_name, Name, Message}' error if metric `Name'
%% is invalid.<br/>
%% Raises `{invalid_metric_help, Help, Message}' error if metric `Help'
%% is invalid.<br/>
%% Raises `{invalid_metric_labels, Labels, Message}' error if `Labels'
%% isn't a list.<br/>
%% Raises `{invalid_label_name, Name, Message}' error if `Name' isn't a valid
%% label name.<br/>
%% Raises `{mf_already_exists, {Registry, Name}, Message}' error if a counter
%% with the same `Spec' already exists.
%% @end
new(Spec) ->
prometheus_metric:insert_new_mf(?TABLE, ?MODULE, Spec).
%% @doc Creates a counter using `Spec', if a counter with the same `Spec' exists
%% returns `false'.
%%
%% Raises `{missing_metric_spec_key, Key, Spec}' error if required `Soec' key
%% is missing.<br/>
%% Raises `{invalid_metric_name, Name, Message}' error if metric `Name'
%% is invalid.<br/>
%% Raises `{invalid_metric_help, Help, Message}' error if metric `Help'
%% is invalid.<br/>
%% Raises `{invalid_metric_labels, Labels, Message}' error if `Labels'
%% isn't a list.<br/>
%% Raises `{invalid_label_name, Name, Message}' error if `Name' isn't a valid
%% label name.
%% @end
declare(Spec) ->
prometheus_metric:insert_mf(?TABLE, ?MODULE, Spec).
%% @equiv deregister(default, Name)
deregister(Name) ->
deregister(default, Name).
%% @doc
%% Removes all counter series with name `Name' and
%% removes Metric Family from `Registry'.
%%
%% After this call new/1 for `Name' and `Registry' will succeed.
%%
%% Returns `{true, _}' if `Name' was a registered counter.
%% Otherwise returns `{true, _}'.
%% @end
deregister(Registry, Name) ->
MFR = prometheus_metric:deregister_mf(?TABLE, Registry, Name),
NumDeleted = ets:select_delete(?TABLE, deregister_select(Registry, Name)),
{MFR, NumDeleted > 0}.
%% @private
set_default(Registry, Name) ->
ets:insert_new(?TABLE, {key(Registry, Name, []), 0, 0}).
%% @equiv inc(default, Name, [], 1)
inc(Name) ->
inc(default, Name, [], 1).
%% @doc If the second argument is a list, equivalent to
%% <a href="#inc-4"><tt>inc(default, Name, LabelValues, 1)</tt></a>
%% otherwise equivalent to
%% <a href="#inc-4"><tt>inc(default, Name, [], Value)</tt></a>.
inc(Name, LabelValues) when is_list(LabelValues)->
inc(default, Name, LabelValues, 1);
inc(Name, Value) ->
inc(default, Name, [], Value).
%% @equiv inc(default, Name, LabelValues, Value)
inc(Name, LabelValues, Value) ->
inc(default, Name, LabelValues, Value).
%% @doc Increments the counter identified by `Registry', `Name'
%% and `LabelValues' by `Value'.
%%
%% Raises `{invalid_value, Value, Message}' if `Value'
%% isn't a positive number.<br/>
%% Raises `{unknown_metric, Registry, Name}' error if counter with named `Name'
%% can't be found in `Registry'.<br/>
%% Raises `{invalid_metric_arity, Present, Expected}' error if labels count
%% mismatch.
%% @end
inc(Registry, Name, LabelValues, Value) when is_integer(Value), Value >= 0 ->
try
ets:update_counter(?TABLE,
key(Registry, Name, LabelValues),
{?ISUM_POS, Value})
catch error:badarg ->
insert_metric(Registry, Name, LabelValues, Value, fun inc/4)
end,
ok;
inc(Registry, Name, LabelValues, Value) when is_number(Value), Value >= 0 ->
Key = key(Registry, Name, LabelValues),
case ets:select_replace(?TABLE,
[{{Key, '$1', '$2'},
[],
[{{{Key}, '$1', {'+', '$2', Value}}}]}]) of
0 ->
insert_metric(Registry, Name, LabelValues, Value, fun inc/4);
1 ->
ok
end;
inc(_Registry, _Name, _LabelValues, Value) ->
erlang:error({invalid_value, Value,
"inc accepts only non-negative numbers"}).
%% @equiv remove(default, Name, [])
remove(Name) ->
remove(default, Name, []).
%% @equiv remove(default, Name, LabelValues)
remove(Name, LabelValues) ->
remove(default, Name, LabelValues).
%% @doc Removes counter series identified by `Registry', `Name'
%% and `LabelValues'.
%%
%% Raises `{unknown_metric, Registry, Name}' error if counter with name `Name'
%% can't be found in `Registry'.<br/>
%% Raises `{invalid_metric_arity, Present, Expected}' error if labels count
%% mismatch.
%% @end
remove(Registry, Name, LabelValues) ->
prometheus_metric:check_mf_exists(?TABLE, Registry, Name, LabelValues),
case lists:flatten([ets:take(?TABLE,
{Registry, Name, LabelValues, Scheduler})
|| Scheduler <- schedulers_seq()]) of
[] -> false;
_ -> true
end.
%% @equiv reset(default, Name, [])
reset(Name) ->
reset(default, Name, []).
%% @equiv reset(default, Name, LabelValues)
reset(Name, LabelValues) ->
reset(default, Name, LabelValues).
%% @doc Resets the value of the counter identified by `Registry', `Name'
%% and `LabelValues'.
%%
%% Raises `{unknown_metric, Registry, Name}' error if counter with name `Name'
%% can't be found in `Registry'.<br/>
%% Raises `{invalid_metric_arity, Present, Expected}' error if labels count
%% mismatch.
%% @end
reset(Registry, Name, LabelValues) ->
prometheus_metric:check_mf_exists(?TABLE, Registry, Name, LabelValues),
case lists:usort([ets:update_element(?TABLE,
{Registry, Name, LabelValues, Scheduler},
[{?ISUM_POS, 0}, {?FSUM_POS, 0}])
|| Scheduler <- schedulers_seq()]) of
[_, _] -> true;
[true] -> true;
_ -> false
end.
%% @equiv value(default, Name, [])
value(Name) ->
value(default, Name, []).
%% @equiv value(default, Name, LabelValues)
value(Name, LabelValues) ->
value(default, Name, LabelValues).
%% @doc Returns the value of the counter identified by `Registry', `Name'
%% and `LabelValues'. If there is no counter for `LabelValues',
%% returns `undefined'.
%%
%% Raises `{unknown_metric, Registry, Name}' error if counter named `Name'
%% can't be found in `Registry'.<br/>
%% Raises `{invalid_metric_arity, Present, Expected}' error if labels count
%% mismatch.
%% @end
value(Registry, Name, LabelValues) ->
prometheus_metric:check_mf_exists(?TABLE, Registry, Name, LabelValues),
case ets:select(?TABLE, [{{{Registry, Name, LabelValues, '_'}, '$1', '$2'},
[],
[{'+', '$1', '$2'}]}]) of
[] -> undefined;
List -> lists:sum(List)
end.
values(Registry, Name) ->
case prometheus_metric:check_mf_exists(?TABLE, Registry, Name) of
false -> [];
MF ->
Labels = prometheus_metric:mf_labels(MF),
MFValues = load_all_values(Registry, Name),
[begin
Value = reduce_label_values(LabelValues, MFValues),
{lists:zip(Labels, LabelValues), Value}
end ||
LabelValues <- collect_unique_labels(MFValues)]
end.
%%====================================================================
%% Collector API
%%====================================================================
%% @private
deregister_cleanup(Registry) ->
prometheus_metric:deregister_mf(?TABLE, Registry),
true = ets:match_delete(?TABLE, {{Registry, '_', '_', '_'}, '_', '_'}),
ok.
%% @private
collect_mf(Registry, Callback) ->
[Callback(create_counter(Name, Help, {CLabels, Labels, Registry})) ||
[Name, {Labels, Help}, CLabels, _, _] <- prometheus_metric:metrics(?TABLE,
Registry)],
ok.
%% @private
collect_metrics(Name, {CLabels, Labels, Registry}) ->
MFValues = load_all_values(Registry, Name),
[begin
Value = reduce_label_values(LabelValues, MFValues),
prometheus_model_helpers:counter_metric(
CLabels ++ lists:zip(Labels, LabelValues), Value)
end ||
LabelValues <- collect_unique_labels(MFValues)].
%%====================================================================
%% Private Parts
%%====================================================================
deregister_select(Registry, Name) ->
[{{{Registry, Name, '_', '_'}, '_', '_'}, [], [true]}].
insert_metric(Registry, Name, LabelValues, Value, ConflictCB) ->
prometheus_metric:check_mf_exists(?TABLE, Registry, Name, LabelValues),
Counter = {key(Registry, Name, LabelValues), 0, Value},
case ets:insert_new(?TABLE, Counter) of
false -> %% some sneaky process already inserted
ConflictCB(Registry, Name, LabelValues, Value);
true ->
ok
end.
load_all_values(Registry, Name) ->
ets:match(?TABLE, {{Registry, Name, '$1', '_'}, '$2', '$3'}).
schedulers_seq() ->
lists:seq(0, ?WIDTH-1).
key(Registry, Name, LabelValues) ->
X = erlang:system_info(scheduler_id),
Rnd = X band (?WIDTH-1),
{Registry, Name, LabelValues, Rnd}.
collect_unique_labels(MFValues) ->
lists:usort([L || [L, _, _] <- MFValues]).
reduce_label_values(Labels, MFValues) ->
lists:sum([I + F || [L, I, F] <- MFValues, L == Labels]).
create_counter(Name, Help, Data) ->
prometheus_model_helpers:create_mf(Name, Help, counter, ?MODULE, Data). | src/metrics/prometheus_counter.erl | 0.798854 | 0.55935 | prometheus_counter.erl | starcoder |
%% @author <NAME> <<EMAIL>> [http://yarivsblog.com]
%% @version 0.9.3
%% @copyright <NAME> 2006-2007
%%
%% @doc ErlTL is a simple Erlang template language.
%%
%% == Introduction ==
%% ErlTL is a template language used for creating Erlang modules that
%% programatically generate iolists (nested lists of strings and/or binaries)
%% whose contents are mostly known at compile time.
%% ErlTL templates are generally less verbose and more readable than
%% the equivalent hand-written Erlang code.
%% A common use-case for ErlTL is the generation of
%% dynamic HTML in web applications.
%%
%% ErlTL emphasizes speed, simplicity, reusability and good error reporting.
%% The ErlTL compiler transforms template files into Erlang modules whose
%% whose functions are exported so they can be used in other modules.
%% By compiling to BEAM, ErlTL doesn't add any overhead to writing a template's
%% logic in pure Erlang.
%%
%% == Tag Reference ==
%%
%% An ErlTL template can be composed of the following tags:
%%
%% `<% [Exprs] %>' <br/>Erlang expression tag. This tag contains one or more
%% Erlang expressions that are evaluated in runtime.
%% For the template to return a valid iolist, the results of embedded Erlang
%% expressions must be strings or binaries.
%%
%% `<%@ [FuncDecl] %>' <br/>Function declaration tag.
%% An ErlTL template compiles into an Erlang module containing one or more
%% functions. This module always contains a function named 'render'
%% that accepts a single parameter called 'Data'. The 'render' function
%% corresponds to the area at the top of an ErlTL file, above all other
%% function declarations.
%%
%% You can use the function declaration tag to add more functions to
%% an ErlTL template.
%% ErlTL functions return the iolist described by to the template region
%% between its declaration and the next function declaration, or the end of
%% the file. To facilitate pattern-matching, ErlTL translates consecutive
%% function declarations with the same name and arity into a single function
%% declaration with multiple clauses, as in the following example:
%%
%% ```
%% <%@ volume_desc(Val) when Val >= 20 %> Big
%% <%@ volume_desc(Val) when Val >= 10 %> Medium
%% <%@ volume_desc(Val) %> Small
%% '''
%%
%% Function declarations have 2 possible forms: basic and full.
%% A full function declaration contains a complete Erlang function
%% declaration up to the '->' symbol, e.g.
%% `"<%@ my_func(A, B = [1,2 | _]) when is_integer(A) %>"'.
%%
%% A basic function declaration contains only the name of the function,
%% e.g. "`<%@ my_func %>'". This declaration is equivalent to
%% `"<%@ my_func(Data) %>"'.
%%
%% `<%~ [TopForms] %>' <br/>Top-level forms tag.
%% This tag, which may appear only at the very top of an ErlTL file, can
%% contain any legal top-level Erlang form. This includes module attributes,
%% compiler directives, and even complete functions.
%%
%% `<%? [TopExprs] %>' <br/>Top-level expressions tag.
%% This tag, which may appear only at the top of ErlTL functions, contains
%% Erlang expressions whose result isn't part of the function's return value.
%% This is used primarily for "unpacking" the Data parameter and binding
%% its elements to local variables prior to using them in the body of a
%% function.
%%
%% `<%! [Comment] %>' <br/>Comment tag. The contents of this tag are
%% used for documentation only. They are discarded in compile-time.
%%
%% Following is an sample ErlTL template that uses the ErlTL tags above
%% (you can find this code under test/erltl):
%%
%% ```
%% <%~
%% %% date: 10/21/2006
%% -author("<NAME>").
%% -import(widgets, [foo/1, bar/2, baz/3]).
%% %>
%% <%! This is a sample ErlTL template that renders a list of albums %>
%% <html>
%% <body>
%% <% [album(A) || A <- Data] %>
%% </body>
%% </html>
%%
%% <%@ album({Title, Artist, Songs}) %>
%% Title: <b><% Title %></b><br>
%% Artist: <b><% Artist %></b><br>
%% Songs: <br>
%% <table>
%% <% [song(Number, Name) || {Number, Name} <- Songs] %>
%% </table>
%%
%% <%@ song(Number, Name) when size(Name) > 15 %>
%% <%? <<First:13/binary, Rest/binary>> = Name %>
%% <% song(Number, [First, <<"...">>]) %>
%%
%% <%@ song(Number, Name) %>
%% <%?
%% Class =
%% case Number rem 2 of
%% 0 -> <<"even">>;
%% 1 -> <<"odd">>
%% end
%% %>
%% <tr>
%% <td class="<% Class %>"><% integer_to_list(Number) %></td>
%% <td class="<% Class %>"><% Name %></td>
%% </tr>
%% '''
%%
%% @end
%% Copyright (c) <NAME> 2006-2007
%%
%% Permission is hereby granted, free of charge, to any person obtaining a
%% copy of this software and associated documentation files (the
%% "Software"), to deal in the Software without restriction, including
%% without limitation the rights to use, copy, modify, merge, publish,
%% distribute, sublicense, and/or sell copies of the Software, and to
%% permit persons to whom the Software is furnished to do so, subject to
%% the following conditions:
%%
%% The above copyright notice and this permission notice shall be included
%% in all copies or substantial portions of the Software.
%%
%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
%% OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
%% MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
%% IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
%% CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
%% TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
%% SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-module(erltl).
-author("<NAME> (<EMAIL>, http://yarivsblog.com)").
-export([compile/1, compile/2, forms_for_file/1,
forms_for_file/2, forms_for_data/2, forms_for_data/3]).
-define(L(Msg), io:format("~b ~p~n", [?LINE, Msg])).
%% @doc Compile the ErlTL file with the default options:
%% `[{outdir, FileDir}, report_errors, report_warnings, nowarn_unused_vars]'.
%% (FileDir is the directory of the source file.)
%%
%% After compilation, the resulting BEAM is loaded into the VM
%% (the old code is purged if necessary).
%%
%% @spec compile(FileName::string()) -> ok | {error, Err}
compile(FileName) ->
compile(FileName, [{outdir, filename:dirname(FileName)},
report_errors, report_warnings, nowarn_unused_vars]).
%% @doc Compile the ErlTL file with user-defined options. The options are
%% described in the documentation for the 'compile' module.
%% For more information, visit
%% [http://erlang.org/doc/doc-5.5.1/lib/compiler-4.4.1/doc/html/compile.html]
%%
%% @spec compile(FileName::string(), Options::[option()]) -> ok | {error, Err}
compile(FileName, Options) ->
IncludePaths = lists:foldl(
fun({i, Path}, Acc) ->
[Path | Acc];
(_Other, Acc) ->
Acc
end, [], Options),
case forms_for_file(FileName, IncludePaths) of
{ok, Forms} ->
case compile:forms(Forms,
Options) of
{ok, Module, Bin} ->
OutDir = case lists:keysearch(outdir, 1, Options)
of
{value, {outdir, Val}} -> Val;
false -> filename:dirname(FileName)
end,
BaseName = filename:rootname(filename:basename(FileName)),
case file:write_file(OutDir ++ ['/' | BaseName] ++
".beam", Bin) of
ok ->
code:purge(Module),
case code:load_binary(
Module, atom_to_list(Module), Bin)
of
{module, _Module} ->
ok;
Err ->
Err
end;
{error, _} = Err ->
Err
end;
Err ->
Err
end;
Err -> Err
end.
%% @equiv forms_for_file(Filename, [])
forms_for_file(FileName) ->
forms_for_file(FileName, []).
%% @doc Parse the ErlTL file and return its representation in Erlang
%% abstract forms.
%% @spec forms_for_file(FileName::string(),
%% IncludePaths::[string()]) -> {ok, [form()]} | {error, Err}
forms_for_file(FileName, IncludePaths) ->
case file:read_file(FileName) of
{ok, Binary} ->
BaseName = filename:rootname(filename:basename(FileName)),
forms_for_data(Binary, list_to_atom(BaseName), IncludePaths);
Err ->
Err
end.
%% @equiv forms_form_data(Data, ModuleName, [])
forms_for_data(Data, ModuleName) ->
forms_for_data(Data, ModuleName, []).
%% @doc Parse the raw text of an ErlTL template and return its
%% representation in abstract forms.
%% @spec forms_for_data(Data::binary() | string(), ModuleName::atom(),
%% IncludePaths::[string()]) ->
%% {ok, [form()]} | {error, Err}
forms_for_data(Data, ModuleName, IncludePaths) when is_binary(Data) ->
forms_for_data(binary_to_list(Data), ModuleName, IncludePaths);
forms_for_data(Data, ModuleName, IncludePaths) ->
Lines = make_lines(Data),
case forms(Lines, ModuleName) of
{ok, Forms} ->
case catch lists:map(
fun({attribute, _, include, Include}) ->
process_include(
Include, [[], ["."] |
IncludePaths]);
(Form) ->
Form
end, Forms)
of
{'EXIT', Err} ->
{error, Err};
Res ->
{ok, lists:flatten(Res)}
end;
Err ->
Err
end.
process_include(Include, []) ->
exit({file_not_found, Include});
process_include(Include, [Path | Rest]) ->
case epp:parse_file(Path ++ "/" ++ Include, [], []) of
{error, enoent} ->
process_include(Include, Rest);
{ok, IncludeForms} ->
lists:sublist(
IncludeForms,
2,
length(IncludeForms) - 2)
end.
make_lines(Str) ->
make_lines(Str, [], []).
make_lines([], [], Result) -> lists:reverse(Result);
make_lines([], Acc, Result) -> lists:reverse([lists:reverse(Acc) | Result]);
make_lines([10 | Tail], Acc, Result) ->
make_lines(Tail, [], [lists:reverse(Acc) | Result]);
make_lines([Head | Tail], Acc, Result) ->
make_lines(Tail, [Head | Acc], Result).
forms(Lines, Module) ->
case catch parse(Lines) of
{'EXIT', Err} ->
{error, Err};
Forms ->
{ok, [{attribute,1,module,Module},
{attribute,2,file,{atom_to_list(Module),1}},
{attribute,3,compile,export_all}] ++ lists:reverse(Forms)}
end.
parse(Lines) ->
parse(Lines, binary, 1, [], [], [], []).
parse([], _State, _LineNo, TopExprs, Exprs, AllForms, ChunkAcc) ->
[FirstForms | OtherForms] = initial_forms(AllForms),
combine_forms(
embed_exprs(FirstForms, TopExprs, last_exprs(ChunkAcc, Exprs)) ++
OtherForms);
parse([Line | OtherLines], State, LineNo, TopExprs, Exprs,
AllForms, ChunkAcc) ->
case scan(Line, State) of
more ->
Line1 = case State of
binary -> Line ++ "\n";
_ -> Line
end,
parse(OtherLines, State, LineNo+1, TopExprs, Exprs,
AllForms, [{Line1, LineNo} | ChunkAcc]);
{ok, Chunk, NextState, NextChunk} ->
Chunks = [{Chunk, LineNo} | ChunkAcc],
Result = case State of
func_decl -> {func_decl, parse_func_decl(Chunks)};
binary -> {exprs, parse_binary(Chunks)};
top_exprs -> {top_exprs, parse_exprs(Chunks)};
forms -> {forms, parse_forms(Chunks)};
erlang -> {exprs, parse_exprs(Chunks)};
comment -> comment
end,
case Result of
comment ->
{NextLines, LineDiff} =
skip_line_break([NextChunk | OtherLines]),
parse(NextLines,
NextState, LineNo + LineDiff,
TopExprs, Exprs, AllForms, []);
{func_decl, BasicForms} ->
[CurrentForms | PreviousForms] = initial_forms(AllForms),
AllForms1 =
embed_exprs(CurrentForms, TopExprs, Exprs) ++
PreviousForms,
NewForms = combine_forms(AllForms1),
{NextLines, LineDiff} =
skip_line_break([NextChunk | OtherLines]),
parse(NextLines,
NextState, LineNo + LineDiff, [], [],
[BasicForms | NewForms], []);
{exprs, []} ->
NextLineNo = case Chunk of
[] -> LineNo;
_ -> LineNo + 1
end,
parse([NextChunk | OtherLines],
NextState, NextLineNo, TopExprs,
Exprs, AllForms, []);
{exprs, Exprs1} ->
parse([NextChunk | OtherLines],
NextState, LineNo, TopExprs,
Exprs1 ++ Exprs,
initial_forms(AllForms), []);
{top_exprs, TopExprs1} ->
case Exprs of
[] ->
parse([NextChunk | OtherLines], NextState, LineNo,
TopExprs1 ++ TopExprs,
Exprs, AllForms, []);
_ ->
error(misplaced_top_exprs, LineNo, Line,
"top expressions must appear before all "
"other expressions in a function")
end;
{forms, Forms} ->
case AllForms of
[] ->
{NextLines, LineDiff} =
skip_line_break([NextChunk | OtherLines]),
parse(NextLines,
NextState, LineNo + LineDiff, [], [],
initial_forms([]) ++ Forms, []);
_ -> error(misplaced_top_declaration, LineNo, Line,
"top-level declarations must appear at the "
"top of a file")
end
end
end.
combine_forms([{function,L,FuncName,Arity,Clauses},
{function,_,LastFuncName,LastArity,LastClauses} |
PreviousForms]) when LastFuncName == FuncName,
LastArity == Arity ->
[{function,L,FuncName,Arity, LastClauses ++ Clauses} | PreviousForms];
combine_forms(Forms) -> Forms.
scan(Line, State) ->
Delim =
case State of
binary -> {"<%",
[{$@, func_decl},
{$!, comment},
{$?, top_exprs},
{$~, forms}],
erlang};
_ -> {"%>", [], binary}
end,
scan1(Line, Delim).
scan1(Line, {Delim, Options, Default}) ->
case string:str(Line, Delim) of
0 -> more;
Pos ->
{First, [_,_ | Rest]} = lists:split(Pos-1, Line),
{NextState, NextChunk} =
case Rest of
[] -> {Default, Rest};
[FirstChar | OtherChars] ->
case lists:keysearch(FirstChar, 1, Options) of
{value, {_, NextState1}} ->
{NextState1, OtherChars};
false -> {Default, Rest}
end
end,
{ok, First, NextState, NextChunk}
end.
initial_forms([]) -> [parse_func_decl([{"render", 1}])];
initial_forms(AllForms) -> AllForms.
skip_line_break([[] | Lines]) -> {Lines, 1};
skip_line_break(Lines) -> {Lines, 0}.
last_exprs([], Exprs) -> Exprs;
last_exprs(ChunkAcc, Exprs) ->
parse_binary(ChunkAcc) ++ Exprs.
parse_binary([]) -> [];
parse_binary([{[],_}]) -> [];
parse_binary(Fragments) ->
BinElems =
lists:foldl(
fun({Chars, LineNo}, Acc) ->
Elems = lists:foldl(
fun(Char, BinElems) ->
[{bin_element,LineNo,{integer,LineNo,Char},
default,default} | BinElems]
end, [], lists:reverse(Chars)),
Elems ++ Acc
end, [], Fragments),
[{bin,1,BinElems}].
parse_exprs([]) -> [];
parse_exprs(Fragments) ->
Tokens = lists:foldl(
fun({Frag, LineNo}, Acc) ->
case erl_scan:string(Frag, LineNo) of
{ok, Toks, _} -> Toks ++ Acc;
{error, Err, _} -> error(scan_error, LineNo, Frag, Err)
end
end, [{dot,1}], Fragments),
Tokens1 = Tokens,
case erl_parse:parse_exprs(Tokens1) of
{ok, Exprs} -> [{block,1, Exprs}];
{error, Msg} -> exit({parse_error, Msg})
end.
parse_forms([]) -> [];
parse_forms(Fragments) ->
FormsTokens =
lists:foldl(
fun({Frag, LineNo}, Acc) ->
case erl_scan:string(Frag, LineNo) of
{ok, Toks, _} ->
lists:foldl(
fun({dot,_} = Tok, [Form | Rest]) ->
[[Tok] | [Form | Rest]];
(Tok, [Form | Rest]) ->
[[Tok | Form] | Rest]
end, Acc, lists:reverse(Toks));
{error, Err, _} ->
error(scan_error, LineNo, Frag, Err)
end
end, [[]], Fragments),
lists:foldl(
fun([], Acc) -> Acc;
(FormTokens, Acc) ->
case erl_parse:parse_form(FormTokens) of
{ok, Form} ->
[Form | Acc];
{error, Err} -> exit({parse_error, Err})
end
end, [], FormsTokens).
parse_func_decl(Fragments) ->
{FuncDecl, LineNo} =
lists:foldl(
fun({Chars, LineNo}, {Acc, FirstLine}) ->
Elems = lists:foldl(
fun(Char, Chars1) ->
[Char | Chars1]
end, [], lists:reverse(Chars)),
FirstLine1 = case FirstLine of
undefined -> LineNo;
_ -> FirstLine
end,
{Elems ++ Acc, FirstLine1}
end, {[], undefined}, Fragments),
case erl_scan:string(FuncDecl) of
{ok, [{atom,_,FuncName}], _} ->
[{full_form,
{function, LineNo, FuncName, 0,
[{clause, LineNo, [], [],
[{call,LineNo,{atom,LineNo,FuncName},
[{atom,1,undefined}]}]
}]
}},
{empty_form,
{function, LineNo, FuncName, 1,
[{clause, LineNo, [{var,LineNo,'Data'}], [], []}]}}];
{ok, _, _} ->
case erl_scan:string(FuncDecl ++ " -> funky_func.") of
{ok, Toks, _} ->
case erl_parse:parse_form(Toks) of
{ok, Form} ->
[{empty_form,
change_line_numbers(LineNo, Form)}];
{error, Msg} ->
error(parse_error, LineNo, FuncDecl, Msg)
end;
{error, Msg, _} ->
error(scan_error, LineNo, FuncDecl, Msg)
end;
{error, Msg, _} -> error(scan_error, LineNo, FuncDecl, Msg)
end.
error(Type, Line, Chunk, Msg) ->
exit({Type, {{line, Line}, {chunk, Chunk}, {msg, Msg}}}).
embed_exprs(Forms, TopExprs, Exprs) when is_list(Forms) ->
[embed_exprs(Form, TopExprs, Exprs) || Form <- Forms];
embed_exprs({full_form, Form}, _TopExprs, _Exprs) -> Form;
embed_exprs({empty_form,
{function,Line,FuncName,Arity,
[{clause, Line1, Params, Guards, _UnusedExprs}]}},
TopExprs, Exprs) ->
{function,Line,FuncName,Arity,[{clause,Line1,Params,Guards,
lists:reverse(TopExprs) ++
[cons_exprs(lists:reverse(Exprs))]}]}.
cons_exprs([]) -> {nil,1};
cons_exprs([{bin,L,BinElems}, {bin,_,BinElems1} | Rest]) ->
cons_exprs([{bin,L,BinElems ++ BinElems1} | Rest]);
cons_exprs([Expr|Rest]) ->
{cons,1,Expr,cons_exprs(Rest)}.
change_line_numbers(L, Exprs) when is_list(Exprs) ->
lists:foldl(
fun(Expr, Acc) ->
[change_line_numbers(L, Expr) | Acc]
end, [], lists:reverse(Exprs));
change_line_numbers(L, Expr) when is_tuple(Expr) ->
Expr1 = case is_integer(element(2, Expr)) of
true -> setelement(2, Expr, L);
false -> Expr
end,
Elems = tuple_to_list(Expr1),
NewElems =
lists:foldl(
fun(Elem, Acc) ->
NewElem = change_line_numbers(L, Elem),
[NewElem | Acc]
end, [], lists:reverse(Elems)),
list_to_tuple(NewElems);
change_line_numbers(_L, Expr) ->
Expr. | src/erltl/erltl.erl | 0.558809 | 0.54825 | erltl.erl | starcoder |
%%%------------------------------------------------------------------------
%% Copyright 2019, OpenTelemetry Authors
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% @doc
%% @end
%%%-------------------------------------------------------------------------
-module(ot_propagation).
-export([http_inject/1,
http_extract/1]).
-type extractor(T) :: {fun((T, ot_ctx:namespace(), {fun(), term()}) -> ok), term()} |
{fun((T, ot_ctx:namespace(), ot_ctx:key(), {fun(), term()}) -> ok), term()}.
-type injector(T) :: {fun((T, ot_ctx:namespace(), {fun(), term()}) -> T), term()} |
{fun((T, ot_ctx:namespace(), ot_ctx:key(), {fun(), term()}) -> T), term()}.
-type http_headers() :: [{binary(), binary()}].
-type http_injector() :: injector(http_headers()).
-type http_extractor() :: extractor(http_headers()).
-export_type([extractor/1,
injector/1,
http_injector/0,
http_extractor/0,
http_headers/0]).
http_inject(Headers) ->
Injectors = opentelemetry:get_http_injector(),
run_injectors(Headers, Injectors).
http_extract(Headers) ->
Extractors = opentelemetry:get_http_extractor(),
run_extractors(Headers, Extractors).
run_extractors(Headers, Extractors) ->
lists:foldl(fun({Extract, {Namespace, FromText}}, ok) ->
Extract(Headers, Namespace, FromText),
ok;
({Extract, {Namespace, Key, FromText}}, ok) ->
Extract(Headers, Namespace, Key, FromText),
ok;
(_, ok) ->
ok
end, ok, Extractors).
run_injectors(Headers, Injectors) ->
lists:foldl(fun({Inject, {Namespace, ToText}}, HeadersAcc) ->
Inject(HeadersAcc, Namespace, ToText);
({Inject, {Namespace, Key, ToText}}, HeadersAcc) ->
Inject(HeadersAcc, Namespace, Key, ToText);
(_, HeadersAcc) ->
HeadersAcc
end, Headers, Injectors). | src/ot_propagation.erl | 0.592549 | 0.412264 | ot_propagation.erl | starcoder |
%%% @doc Given a list of integers (the program) figure out Huffman encoding
%%% for this program and the decoding tree.
%%% Opcodes in the code then can be encoded using opcode frequency tree
%%% (either dynamically calculated by uasm_stats or hardcoded).
%%% @end
-module(uasm_huffman).
-export([
decode/2,
encode/1,
encode_funs/1,
encode_labels/1,
encode_tree/2,
merge_bits_into_binary/1
]).
%% @doc Given Huffman tree (a dict), encode it as a chain of bit 0/1 prefixed
%% node values of fixed length (LeafBits)
encode_tree(Leaf, LeafBits) when is_integer(Leaf) ->
[<<1:1>>, <<Leaf:LeafBits>>];
encode_tree({L, R}, LeafBits) ->
[<<0:1>>, encode_tree(L, LeafBits), encode_tree(R, LeafBits)].
%% @doc Given the module (for labels) and list of compiled funs with operators
%% {Opcode, Arg} produce frequency tree and encode each fun separately.
encode_funs(#{'$' := module, funs := Funs}) ->
%% ETS table created by uasm_stats during ASM compilation stage
Tree = tree(ets:tab2list(instr_stat)),
Dict = dict:from_list(codewords(Tree)),
Funs1 = lists:map(
fun({FunArity, #{'$' := e4fun, output := Code}}) ->
FunObject1 = encode_one_fun(Code, Dict),
FunObject1#{
tree => Tree,
name => FunArity
}
end,
Funs),
#{'$' => huffman,
tree => Tree,
output => Funs1}.
%% @doc A helper which unwraps one opcode and its args, OR marks the bit
%% position for a label into the 'labels' key of the state
encode_one_op_fold({label, F},
State = #{labels := L0,
output_position := OutPos}) ->
L1 = orddict:store(F, OutPos, L0),
State#{labels => L1};
encode_one_op_fold({Op, Args},
State = #{output := O1,
dict := Dict,
output_position := OutPos}) ->
OpBits = dict:fetch(Op, Dict),
Output = [[OpBits | Args] | O1],
OutputBitSize = bit_size(OpBits) + bit_size_list(Args, 0),
State#{output => Output,
output_position => OutPos + OutputBitSize}.
%% @doc Calculates bit size of [<<Bits1:X1>>, <<Bits2:X2>>, ...]
bit_size_list([], A) -> A;
bit_size_list([Bits | T], A) -> bit_size_list(T, A + bit_size(Bits)).
%% @doc Given function code (list of {Op, [Args]}) encode it as compressed
%% opcodes + compacted args.
%% Returns: an one_fun map with labels [{F, BitPos}, ...],
%% intermediate output (for debug) and bytes output (binary)
encode_one_fun(FunCode, Dict) ->
#{output := Out1,
labels := Labels} = lists:foldl(fun encode_one_op_fold/2,
#{dict => Dict,
output => [],
labels => orddict:new(),
output_position => 0},
FunCode),
io:format("~p~n", [Labels]),
OutBinary = merge_bits_into_binary(Out1),
#{'$' => one_fun,
output_intermediate => Out1,
labels => Labels,
output => OutBinary}.
%% @doc Labels are pairs of {Label, Bit Offset} followed by an encoded NIL []
encode_labels(Labels) ->
LabelsBin = [[uasm_encode_int:varlength_unsigned(F),
uasm_encode_int:varlength_unsigned(Offset)]
|| {F, Offset} <- Labels],
[LabelsBin, uasm_encode_int:encode([], auto_bits)].
%% @doc Given a possibly nested list of bit strings create a single binary
merge_bits_into_binary(Out1) ->
%% Given
%% [<<Val1:Bits1>>, <<Val2:Bits2>>, {label, 1}, <<Val3:Bits3>>...]
%% Ignore special tuple parts, take only bit codes
<< <<Piece/bitstring>> || Piece <- lists:flatten(Out1), is_bitstring(Piece)>>.
encode(Text) when is_list(Text) ->
Tree = tree(freq_table(Text)),
Dict = dict:from_list(codewords(Tree)),
Code = << <<(dict:fetch(Char, Dict))/bitstring>> || Char <- Text>>,
EncodedTree0 = encode_tree(Tree, 8),
EncodedTree = merge_bits_into_binary(EncodedTree0),
#{'$' => huffman,
code => Code,
tree => Tree,
tree_encoded => EncodedTree,
dict => Dict}.
decode(Code, Tree) ->
decode(Code, Tree, Tree, []).
%%main(Input) ->
%% {Code, Tree, Dict} = encode(Input),
%% [begin
%% io:format("~s: ", [[Key]]),
%% print_bits(Value)
%% end || {Key, Value} <- lists:sort(dict:to_list(Dict))],
%% io:format("encoded: "),
%% print_bits(Code),
%% io:format("decoded: "),
%% io:format("~s\n", [decode(Code, Tree)]).
decode(<<>>, _, _, Result) ->
lists:reverse(Result);
decode(<<0:1, Rest/bits>>, Tree, {L = {_, _}, _R}, Result) ->
decode(<<Rest/bits>>, Tree, L, Result);
decode(<<0:1, Rest/bits>>, Tree, {L, _R}, Result) ->
decode(<<Rest/bits>>, Tree, Tree, [L | Result]);
decode(<<1:1, Rest/bits>>, Tree, {_L, R = {_, _}}, Result) ->
decode(<<Rest/bits>>, Tree, R, Result);
decode(<<1:1, Rest/bits>>, Tree, {_L, R}, Result) ->
decode(<<Rest/bits>>, Tree, Tree, [R | Result]).
codewords({L, R}) ->
codewords(L, <<0:1>>) ++ codewords(R, <<1:1>>).
codewords({L, R}, <<Bits/bits>>) ->
codewords(L, <<Bits/bits, 0:1>>) ++ codewords(R, <<Bits/bits, 1:1>>);
codewords(Symbol, <<Bits/bitstring>>) ->
[{Symbol, Bits}].
tree([{N, _} | []]) ->
N;
tree(Ns) ->
[{N1, C1}, {N2, C2} | Rest] = lists:keysort(2, Ns),
tree([{{N1, N2}, C1 + C2} | Rest]).
freq_table(Text) ->
freq_table(lists:sort(Text), []).
freq_table([], Acc) ->
Acc;
freq_table([S | Rest], Acc) ->
{Block, MoreBlocks} = lists:splitwith(fun(X) -> X == S end, Rest),
freq_table(MoreBlocks, [{S, 1 + length(Block)} | Acc]).
%%print_bits(<<>>) ->
%% io:format("\n");
%%
%%print_bits(<<Bit:1, Rest/bitstring>>) ->
%% io:format("~w", [Bit]),
%% print_bits(Rest). | Compiler.old/apps/uasm/src/uasm_huffman.erl | 0.57344 | 0.499268 | uasm_huffman.erl | starcoder |
-module(solution).
-export([main/0]).
% this solution uses the Sparse Table algorithm from
% http://www.cs.sunysb.edu/~bender/pub/JALG05-daglca.pdf
main() ->
{ok, [N, M]} = io:fread("", "~d~d"),
LA = read_data(N),
LR = read_data(2*M),
A = array(LA),
RMQ = rmq(A),
query(LR, RMQ),
true.
% perform queries
query([], _RMQ) ->
true;
query([L,R|T], RMQ) ->
Min = rmq_min(L, R, RMQ),
io:format("~p~n", [Min]),
query(T, RMQ).
% determine minimum for range query
rmq_min(L, R, RMQ) ->
{K, TwoK} = k(L, R),
A1 = maps:get({L, K}, RMQ),
A2 = maps:get({R-TwoK+1, K}, RMQ),
min(A1, A2).
% k = floor(log_2(R-L+1))
k(L, R) ->
K = -1,
TwoK = 1,
W = R-L+1,
k(K, TwoK, W).
k(K, TwoK, W) when TwoK > W ->
{K, TwoK div 2};
k(K, TwoK, W) ->
k(K+1, 2*TwoK, W).
% return M_ij table
rmq(A) ->
N = maps:size(A),
M0 = rmq0(N, A),
rmq(N, M0).
% initialize for intervals of length 2^0
rmq0(N, A) ->
I = 0,
M = maps:new(),
rmq_0(I, N, A, M).
rmq_0(I, N, _A, M) when I =:= N ->
M;
rmq_0(I, N, A, M) ->
AI = maps:get(I, A),
M2 = maps:put({I, 0}, AI, M),
rmq_0(I+1, N, A, M2).
% calculate bigger intervals
rmq(N, M) ->
J = 1,
TwoJ = 2,
rmq(J, TwoJ, N, M).
rmq(_J, TwoJ, N, M) when TwoJ > N ->
M;
rmq(J, TwoJ, N, M) ->
I = 0,
IMax = I+TwoJ-1,
M2 = rmq(I, IMax, J, TwoJ, N, M),
rmq(J+1, 2*TwoJ, N, M2).
rmq(_I, IMax, _J, _TwoJ, N, M) when IMax =:= N ->
M;
rmq(I, IMax, J, TwoJ, N, M) ->
JM = J-1,
I2 = I+(1 bsl JM),
A1 = maps:get({I, JM}, M),
A2 = maps:get({I2, JM}, M),
MIJ = min(A1, A2),
M2 = maps:put({I, J}, MIJ, M),
rmq(I+1, IMax+1, J, TwoJ, N, M2).
% array map starting at index 0
array(L) ->
I=0,
Map = maps:new(),
array(L, I, Map).
array([], _I, Map) ->
Map;
array([H|T], I, Map) ->
Map2 = maps:put(I, H, Map),
array(T, I+1, Map2).
read_data(N) ->
Fmt = unicode:characters_to_list(lists:duplicate(N, "~d")),
{ok, L} = io:fread("", Fmt),
L. | Functional Programming/Functional Structures/Range Minimum Query/solution.erl | 0.550366 | 0.453625 | solution.erl | starcoder |
%%==============================================================================
%% @copyright 2019-2020 Erlang Solutions Ltd.
%% Licensed under the Apache License, Version 2.0 (see LICENSE file)
%% @end
%%
%% @doc
%% In this scenario, users are creating pubsub nodes and publishing items.
%% Users are publishing items to the nodes that they created and receiving items from
%% other nodes they subscribed to. Each node has a number of subscribers limited by
%% the `n_of_subscribers' variable. Publishing can start depending on the `activation_policy'
%% variable, either after `all_nodes' or after `n_nodes' are subscribed to.
%% Interactions between users and pubsub nodes are managed by the `amoc_coordinator'.
%% Additional subscription and publication delay can be introduced with use of the
%% `coordinator_delay' variable. This can help to moderate the load when users are
%% being added.
%%
%%
%% == User steps: ==
%%
%% 1. Connect to the XMPP host given by the `mim_host' variable.
%%
%% 2. Create pubsub node. The rate of node creation is limited by the
%% `node_creation_rate' per minute. The pubsub service address is defined by the
%% `pubsub_addr' variable. Node creation results in a timeout when `iq_timeout'
%% is exceeded.
%%
%% 3. Add user to the `amoc_coordinator' and pass pubsub node and client data.
%%
%% 4. Wait for the following messages in a loop:
%%
%% - {subscribe_to, N} - message from `amoc_coordinator' for the client to subscribe
%% to the given Node N.
%%
%% - {stanza, MessageStanza} - process message stanza, check if it contains the user's
%% own jid. If it does, schedule a `publish_item' message. The rate of these messages
%% is handled by `amoc_throttle' and depends on the `publication_rate' variable.
%%
%% - {stanza, IqStanza} - process an `iq' stanza. Whether response to publish or
%% subscribe is received, update the metrics accordingly.
%%
%% - publish_item - message from `amoc_throttle' that was scheduled after a
%% message stanza was received. An item is prepared with payload of `publication_size'.
%% User sends all messages to the same pubsub node that he created. All messages contain
%% the user's jid that helps to recognise the message.
%%
%% 5. Continue execution of the `user_loop'. If no message is received for `iq_timeout',
%% timeouts are calculated for every user request.
%%
%% == Metrics exposed by this scenario: ==
%%
%% === Counters: ===
%% ==== Message ====
%% - message - incremented with every received message stanza.
%% ==== Node ====
%% - node_creation_failure - incremented when node creation failed.
%%
%% - node_creation_success - incremented when node creation succeeded.
%%
%% - node_creation_timeout - incremented when node creation timed out.
%% ==== Publication ====
%% - publication_query - incremented for every pubsub publication query that was sent.
%%
%% - publication_result - incremented for every correct response to publication query.
%%
%% - publication_error - incremented for every incorrect response to publication query.
%%
%% - publication_success - incremented for every correct response to publication query
%% which didn't timeout.
%%
%% - publication_timeout - incremented for every correct response to publication query
%% that timeout.
%% ==== Subscription ====
%% - subscription_query - incremented for every subscription query that was sent.
%%
%% - subscription_result - incremented for every correct response to subscription query.
%%
%% - subscription_error - incremented for every incorrect response to subscription query.
%%
%% - subscription_success - incremented for every correct response to subscription query
%% which didn't timeout.
%%
%% - subscription_timeout - incremented for every correct response to subscription query
%% that timeout.
%%
%% === Times: ===
%% - node_creation - time for the pubsub node to be created
%%
%% - subscription - time to subscribe to pubsub node
%%
%% - publication - time to publish pubsub item
%%
%% - message_tdd - message time to delivery
%%
%% @end
%%==============================================================================
-module(pubsub_simple).
-behaviour(amoc_scenario).
-include_lib("exml/include/exml.hrl").
-include_lib("escalus/include/escalus.hrl").
-include_lib("kernel/include/logger.hrl").
-define(V(X), fun amoc_config_validation:X/1).
-required_variable([
#{name => iq_timeout, default_value => 10000, verification => ?V(positive_integer),
description => "IQ timeout (milliseconds, def: 10000ms)"},
#{name => coordinator_delay, default_value => 0, verification => ?V(nonnegative_integer),
description => "Delay after N subscriptions (milliseconds, def: 0ms)"},
#{name => node_creation_rate, default_value => 600, verification => ?V(positive_integer),
description => "Rate of node creations (per minute, def:600)"},
#{name => publication_size, default_value => 300, verification => ?V(nonnegative_integer),
description => "Size of additional payload (bytes, def:300)"},
#{name => publication_rate, default_value => 1500, verification => ?V(positive_integer),
description => "Rate of publications (per minute, def:1500)"},
#{name => n_of_subscribers, default_value => 50, verification => ?V(nonnegative_integer),
description => "Number of subscriptions for each node (def: 50)"},
#{name => activation_policy, default_value => all_nodes, verification => [all_nodes, n_nodes],
description => "Publish after subscribtion of (def: all_nodes | n_nodes)"},
#{name => mim_host, default_value => <<"localhost">>, verification => ?V(binary),
description => "The virtual host served by the server (def: <<\"localhost\">>)"},
#{name => pubsub_addr, default_value => <<"pubsub.localhost">>, verification => ?V(binary),
description => "Pubsub service address (def: <<\"pubsub.localhost\">>"}
]).
-define(GROUP_NAME, <<"pubsub_simple_coordinator">>).
-define(NODE_CREATION_THROTTLING, node_creation).
-define(PUBLICATION_THROTTLING, publication).
-define(COORDINATOR_TIMEOUT, 100).
-export([init/0, start/1]).
-spec init() -> ok.
init() ->
init_metrics(),
{ok, PublicationRate} = amoc_config:get(publication_rate),
{ok, NodeCreationRate} = amoc_config:get(node_creation_rate),
amoc_throttle:start(?NODE_CREATION_THROTTLING, NodeCreationRate),
amoc_throttle:start(?PUBLICATION_THROTTLING, PublicationRate),
start_coordinator(),
ok.
-spec start(amoc_scenario:user_id()) -> any().
start(Id) ->
Client = connect_amoc_user(Id),
start_user(Client).
init_metrics() ->
Counters = [message,
%% node counters
node_creation_failure, node_creation_success, node_creation_timeout,
%% publication counters
publication_query, publication_result, publication_error,
publication_success, publication_timeout,
%% subscription counters
subscription_query, subscription_result, subscription_error,
subscription_success, subscription_timeout],
Times = [node_creation, subscription, publication, message_ttd],
[amoc_metrics:init(counters, Metric) || Metric <- Counters],
[amoc_metrics:init(times, Metric) || Metric <- Times].
%%------------------------------------------------------------------------------------------------
%% Coordinator
%%------------------------------------------------------------------------------------------------
start_coordinator() ->
amoc_coordinator:start(?MODULE, get_coordination_plan(), ?COORDINATOR_TIMEOUT).
get_coordination_plan() ->
N = get_no_of_node_subscribers(),
[{N, [fun subscribe_users/2,
users_activation(n_nodes),
coordination_delay()]},
{all, users_activation(all_nodes)}].
subscribe_users(_, CoordinationData) ->
PidsAndNodes = [{Pid, Node} || {Pid, {_Client, Node}} <- CoordinationData],
[subscribe_msg(P, N) || {P, _} <- PidsAndNodes, {_, N} <- PidsAndNodes].
users_activation(ActivationPolicy) ->
case amoc_config:get(activation_policy) of
ActivationPolicy ->
fun(_, CoordinationData) ->
[schedule_publishing(Pid) || {Pid, _} <- CoordinationData]
end;
_ -> fun(_) -> ok end
end.
coordination_delay() ->
Delay = amoc_config:get(coordinator_delay),
fun({coordinate, _}) -> timer:sleep(Delay);
(_) -> ok
end.
subscribe_msg(Pid, Node) ->
Pid ! {subscribe_to, Node}.
%%------------------------------------------------------------------------------------------------
%% User
%%------------------------------------------------------------------------------------------------
start_user(Client) ->
?LOG_DEBUG("user process ~p", [self()]),
Node = create_new_node(Client),
erlang:monitor(process, Client#client.rcv_pid),
escalus_tcp:set_active(Client#client.rcv_pid, true),
user_loop(Client, Node, #{}).
create_new_node(Client) ->
amoc_throttle:send_and_wait(?NODE_CREATION_THROTTLING, create_node),
Node = create_pubsub_node(Client),
amoc_coordinator:add(?MODULE, {Client, Node}),
Node.
user_loop(Client, Node, Requests) ->
IqTimeout = amoc_config:get(iq_timeout),
receive
{subscribe_to, N} ->
{TS, Id} = subscribe(Client, N),
amoc_metrics:update_counter(subscription_query, 1),
user_loop(Client, Node, Requests#{Id=>{new, TS}});
{stanza, _, #xmlel{name = <<"message">>} = Stanza, #{recv_timestamp := TimeStamp}} ->
process_msg(Stanza, TimeStamp),
user_loop(Client, Node, Requests);
{stanza, _, #xmlel{name = <<"iq">>} = Stanza, #{recv_timestamp := TimeStamp}} ->
NewRequests = process_iq(Stanza, TimeStamp, Requests),
user_loop(Client, Node, NewRequests);
publish_item ->
{TS, Id} = publish_pubsub_item(Client, Node),
amoc_metrics:update_counter(publication_query, 1),
user_loop(Client, Node, Requests#{Id=>{new, TS}});
{'DOWN', _, process, Pid, Info} when Pid =:= Client#client.rcv_pid ->
?LOG_ERROR("TCP connection process ~p down: ~p", [Pid, Info]);
Msg ->
?LOG_ERROR("unexpected message ~p", [Msg])
after IqTimeout ->
user_loop(Client, Node, verify_request(Requests))
end.
verify_request(Requests) ->
IqTimeout = amoc_config:get(iq_timeout),
Now = os:system_time(microsecond),
VerifyFN =
fun(Key, Value) ->
case Value of
{new, TS} when Now > TS + IqTimeout * 1000 ->
update_timeout_metrics(Key),
{timeout, TS};
_ -> Value
end
end,
maps:map(VerifyFN, Requests).
update_timeout_metrics(<<"publish", _/binary>>) ->
amoc_metrics:update_counter(publication_timeout, 1);
update_timeout_metrics(<<"subscribe", _/binary>>) ->
amoc_metrics:update_counter(subscription_timeout, 1);
update_timeout_metrics(Id) ->
?LOG_ERROR("unknown iq id ~p", Id).
schedule_publishing(Pid) ->
amoc_throttle:send(?PUBLICATION_THROTTLING, Pid, publish_item).
%%------------------------------------------------------------------------------------------------
%% User connection
%%------------------------------------------------------------------------------------------------
connect_amoc_user(Id) ->
ExtraProps = amoc_xmpp:pick_server([[{host, "127.0.0.1"}]]) ++
[{server, amoc_config:get(mim_host)},
{socket_opts, socket_opts()}],
{ok, Client, _} = amoc_xmpp:connect_or_exit(Id, ExtraProps),
erlang:put(jid, Client#client.jid),
Client.
socket_opts() ->
[binary,
{reuseaddr, false},
{nodelay, true}].
%%------------------------------------------------------------------------------------------------
%% Node creation
%%------------------------------------------------------------------------------------------------
create_pubsub_node(Client) ->
Node = pubsub_node(),
ReqId = iq_id(create, Client, Node),
NodeConfig = [{<<"pubsub#subscribe">>, <<"1">>},
{<<"pubsub#access_model">>, <<"open">>},
{<<"pubsub#publish_model">>, <<"open">>}],
Request = escalus_pubsub_stanza:create_node(Client, ReqId,
Node, NodeConfig),
escalus:send(Client, Request),
{CreateNodeTime, CreateNodeResult} = timer:tc(
fun() ->
catch escalus:wait_for_stanza(Client, amoc_config:get(iq_timeout))
end),
case {escalus_pred:is_iq_result(Request, CreateNodeResult), CreateNodeResult} of
{true, _} ->
?LOG_DEBUG("node creation ~p (~p)", [Node, self()]),
amoc_metrics:update_counter(node_creation_success, 1),
amoc_metrics:update_time(node_creation, CreateNodeTime);
{false, {'EXIT', {timeout_when_waiting_for_stanza, _}}} ->
amoc_metrics:update_counter(node_creation_timeout, 1),
?LOG_ERROR("Timeout creating node: ~p", [CreateNodeResult]),
exit(node_creation_timeout);
{false, _} ->
amoc_metrics:update_counter(node_creation_failure, 1),
?LOG_ERROR("Error creating node: ~p", [CreateNodeResult]),
exit(node_creation_failed)
end,
Node.
pubsub_node() ->
Prefix = <<"princely_musings">>,
Suffix = random_suffix(),
Name = <<Prefix/binary, "_", Suffix/binary>>,
{amoc_config:get(pubsub_addr), Name}.
%%------------------------------------------------------------------------------------------------
%% Node subscription
%%------------------------------------------------------------------------------------------------
subscribe(Client, Node) ->
Id = iq_id(subscribe, Client, Node),
Request = escalus_pubsub_stanza:subscribe(Client, Id, Node),
escalus:send(Client, Request),
{os:system_time(microsecond), Id}.
%%------------------------------------------------------------------------------------------------
%% Item publishing
%%------------------------------------------------------------------------------------------------
publish_pubsub_item(Client, Node) ->
Id = iq_id(publish, Client, Node),
PayloadSize = amoc_config:get(publication_size),
Content = item_content(PayloadSize),
Request = escalus_pubsub_stanza:publish(Client, Content, Id, Node),
escalus:send(Client, Request),
{os:system_time(microsecond), Id}.
item_content(PayloadSize) ->
Payload = #xmlcdata{content = <<<<"A">> || _ <- lists:seq(1, PayloadSize)>>},
#xmlel{
name = <<"entry">>,
attrs = [{<<"timestamp">>, integer_to_binary(os:system_time(microsecond))},
{<<"jid">>, erlang:get(jid)}],
children = [Payload]}.
%%------------------------------------------------------------------------------------------------
%% Item processing
%%------------------------------------------------------------------------------------------------
process_msg(#xmlel{name = <<"message">>} = Stanza, TS) ->
escalus:assert(is_message, Stanza),
Entry = exml_query:path(Stanza, [{element, <<"event">>}, {element, <<"items">>},
{element, <<"item">>}, {element, <<"entry">>}]),
case {exml_query:attr(Entry, <<"jid">>), erlang:get(jid)} of
{JID, JID} -> schedule_publishing(self());
_ -> ok
end,
TimeStampBin = exml_query:attr(Entry, <<"timestamp">>),
TimeStamp = binary_to_integer(TimeStampBin),
TTD = TS - TimeStamp,
?LOG_DEBUG("time to delivery ~p", [TTD]),
amoc_metrics:update_counter(message),
amoc_metrics:update_time(message_ttd, TTD).
process_iq(#xmlel{name = <<"iq">>} = Stanza, TS, Requests) ->
RespId = exml_query:attr(Stanza, <<"id">>),
case {RespId, maps:get(RespId, Requests, undefined)} of
{_, undefined} ->
?LOG_WARNING("unknown iq ~p ~p", [Stanza]);
{<<"publish", _/binary>>, {Tag, ReqTS}} ->
handle_publish_resp(Stanza, {Tag, TS - ReqTS});
{<<"subscribe", _/binary>>, {Tag, ReqTS}} ->
handle_subscribe_resp(Stanza, {Tag, TS - ReqTS})
end,
maps:remove(RespId, Requests).
handle_publish_resp(PublishResult, {Tag, PublishTime}) ->
IqTimeout = amoc_config:get(iq_timeout),
case escalus_pred:is_iq_result(PublishResult) of
true ->
?LOG_DEBUG("publish time ~p", [PublishTime]),
amoc_metrics:update_counter(publication_result, 1),
amoc_metrics:update_time(publication, PublishTime),
case Tag of
new when IqTimeout * 1000 > PublishTime ->
amoc_metrics:update_counter(publication_success, 1);
new ->
amoc_metrics:update_counter(publication_timeout, 1);
timeout -> ok %% do nothing, it's already reported as timeout
end;
_ ->
amoc_metrics:update_counter(publication_error, 1),
?LOG_ERROR("Error publishing failed: ~p", [PublishResult]),
exit(publication_failed)
end.
handle_subscribe_resp(SubscribeResult, {Tag, SubscribeTime}) ->
IqTimeout = amoc_config:get(iq_timeout),
case escalus_pred:is_iq_result(SubscribeResult) of
true ->
?LOG_DEBUG("subscribe time ~p", [SubscribeTime]),
amoc_metrics:update_counter(subscription_result, 1),
amoc_metrics:update_time(subscription, SubscribeTime),
case Tag of
new when IqTimeout > SubscribeTime ->
amoc_metrics:update_counter(subscription_success, 1);
new ->
amoc_metrics:update_counter(subscription_timeout, 1);
timeout -> ok %% do nothing, it's already reported as timeout
end;
_ ->
amoc_metrics:update_counter(subscription_error, 1),
?LOG_ERROR("Error subscribing failed: ~p", [SubscribeResult]),
exit(subscription_failed)
end.
%%------------------------------------------------------------------------------------------------
%% Stanza helpers
%%------------------------------------------------------------------------------------------------
iq_id(Type, Client, {NodeAddr, NodeName}) ->
UserName = escalus_utils:get_username(Client),
Suffix = random_suffix(),
list_to_binary(io_lib:format("~s-~s-~s-~s-~p",
[Type, UserName, NodeAddr, NodeName, Suffix])).
random_suffix() ->
Suffix = base64:encode(crypto:strong_rand_bytes(5)),
re:replace(Suffix, "/", "_", [global, {return, binary}]).
%%------------------------------------------------------------------------------------------------
%% Config helpers
%%------------------------------------------------------------------------------------------------
get_no_of_node_subscribers() ->
%instead of constant No of subscriptions we can use min/max values.
amoc_config:get(n_of_subscribers). | src/scenarios/pubsub_simple.erl | 0.646906 | 0.46952 | pubsub_simple.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% riaknostic - automated diagnostic tools for Riak
%%
%% Copyright (c) 2011 Basho Technologies, Inc. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%% @doc <p>Enforces a common API among all diagnostic modules and
%% provides some automation around their execution.</p>
%% <h2>Behaviour Specification</h2>
%%
%% <h3>description/0</h3>
%% <pre>-spec description() -> iodata().</pre>
%% <p>A short description of what the diagnostic does, which will be
%% printed when the script is given the <code>-l</code> flag.</p>
%%
%% <h3>valid/0</h3>
%% <pre>-spec valid() -> boolean().</pre>
%% <p>Whether the diagnostic is valid to run. For example, some checks
%% require connectivity to the Riak node and hence call {@link
%% riaknostic_node:can_connect/0. riaknostic_node:can_connect()}.</p>
%%
%% <h3>check/0</h3>
%% <pre>-spec check() -> [{lager:log_level(), term()}].</pre>
%% <p>Runs the diagnostic, returning a list of pairs, where the first
%% is a severity level and the second is any term that is understood
%% by the <code>format/1</code> callback.</p>
%%
%% <h3>format/1</h3>
%% <pre>-spec format(term()) -> iodata() | {io:format(), [term()]}.</pre>
%% <p>Formats terms that were returned from <code>check/0</code> for
%% output to the console. Valid return values are an iolist (string,
%% binary, etc) or a pair of a format string and a list of terms, as
%% you would pass to {@link io:format/2. io:format/2}.</p>
%% @end
-module(riaknostic_check).
-export([behaviour_info/1]).
-export([check/1,
modules/0,
print/1]).
%% @doc The behaviour definition for diagnostic modules.
-spec behaviour_info(atom()) -> 'undefined' | [{atom(), arity()}].
behaviour_info(callbacks) ->
[{description, 0},
{valid, 0},
{check, 0},
{format, 1}];
behaviour_info(_) ->
undefined.
%% @doc Runs the diagnostic in the given module, if it is valid. Returns a
%% list of messages that will be printed later using print/1.
-spec check(Module::module()) -> [{lager:log_level(), module(), term()}].
check(Module) ->
case Module:valid() of
true ->
[ {Level, Module, Message} || {Level, Message} <- Module:check() ];
_ ->
[]
end.
%% @doc Collects a list of diagnostic modules included in the
%% riaknostic application.
-spec modules() -> [module()].
modules() ->
{ok, Mods} = application:get_key(riaknostic, modules),
[ M || M <- Mods,
Attr <- M:module_info(attributes),
{behaviour, [?MODULE]} =:= Attr orelse {behavior, [?MODULE]} =:= Attr ].
%% @doc Formats and prints the given message via lager:log/3,4. The diagnostic
%% module's format/1 function will be called to provide a
%% human-readable message. It should return an iolist() or a 2-tuple
%% consisting of a format string and a list of terms.
-spec print({Level::lager:log_level(), Module::module(), Data::term()}) -> ok.
print({Level, Mod, Data}) ->
case Mod:format(Data) of
{Format, Terms} ->
riaknostic_util:log(Level, Format, Terms);
String ->
riaknostic_util:log(Level, String)
end. | deps/riaknostic/src/riaknostic_check.erl | 0.617743 | 0.409103 | riaknostic_check.erl | starcoder |
-module(numerl).
-on_load(init/0).
-export([ eval/1, eye/1, zeros/2, equals/2, add/2, sub/2,mult/2, divide/2, matrix/1, rnd_matrix/1, get/3, at/2, mtfli/1, mtfl/1, row/2, col/2, transpose/1, inv/1, nrm2/1, vec_dot/2, dot/2]).
%Matrices are represented as such:
%-record(matrix, {n_rows, n_cols, bin}).
init()->
Dir = case code:priv_dir(numerl) of
{error, bad_name} ->
filename:join(
filename:dirname(
filename:dirname(
code:which(?MODULE))), "priv");
D -> D
end,
SoName = filename:join(Dir, atom_to_list(?MODULE)),
erlang:load_nif(SoName, 0).
%Creates a random matrix.
rnd_matrix(N)->
L = [[rand:uniform(20) || _ <- lists:seq(1,N) ] || _ <- lists:seq(1,N)],
matrix(L).
%Combine multiple functions.
eval([L,O,R|T])->
F = fun numerl:O/2,
eval([F(L,R) |T]);
eval([Res])->
Res.
%%Creates a matrix.
%List: List of doubles, of length N.
%Return: a matrix of dimension MxN, containing the data.
matrix(_) ->
nif_not_loaded.
%%Returns the Nth value contained within Matrix.
at(_Matrix,_Nth)->
nif_not_loaded.
%%Returns the matrix as a flattened list of ints.
mtfli(_mtrix)->
nif_not_loaded.
%%Returns the matrix as a flattened list of doubles.
mtfl(_mtrix)->
nif_not_loaded.
%%Returns a value from a matrix.
get(_,_,_) ->
nif_not_loaded.
%%Returns requested row.
row(_,_) ->
nif_not_loaded.
%%Returns requested col.
col(_,_) ->
nif_not_loaded.
%%Equality test between matrixes.
equals(_, _) ->
nif_not_loaded.
%%Addition of matrix.
add(_, _) ->
nif_not_loaded.
%%Substraction of matrix.
sub(_, _) ->
nif_not_loaded.
%% Matrix multiplication.
mult(A,B) when is_number(B) -> '*_num'(A,B);
mult(A,B) -> '*_matrix'(A,B).
'*_num'(_,_)->
nif_not_loaded.
'*_matrix'(_, _)->
nif_not_loaded.
%Matrix division by a number
divide(_,_)->
nif_not_loaded.
%% build a null matrix of size NxM
zeros(_, _) ->
nif_not_loaded.
%%Returns an Identity matrix NxN.
eye(_)->
nif_not_loaded.
%Returns the transpose of the given square matrix.
transpose(_)->
nif_not_loaded.
%Returns the inverse of asked square matrix.
inv(_)->
nif_not_loaded.
%------CBLAS--------
%nrm2
%Calculates the squared root of the sum of the squared contents.
nrm2(_)->
nif_not_loaded.
% : dot product of two vectors
% Arguments: vector x, vector y.
% x and y are matrices
% Returns the dot product of all the coordinates of X,Y.
vec_dot(_, _)->
nif_not_loaded.
% dgemm: A dot B
% Arguments: Matrix A, Matrix B.
% alpha, beta: numbers (float or ints) used as doubles.
% A,B,C: matrices.
% Returns the matrice resulting of the operations alpha * A * B + beta * C.
dot(_,_)->
nif_not_loaded. | src/numerl.erl | 0.550607 | 0.531088 | numerl.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% Copyright (c) 2011 Basho Technologies, Inc.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%% @doc Riak Pipe fitting that reads objects from Riak KV. The
%% primary purpose of this fitting is to serve as the first half of a
%% 'map' MapReduce phase.
%%
%% This fitting accepts bucket/key pairs as inputs, which may be
%% represented as either a 2-tuple of `{Bucket, Key}' or a 2-element
%% list of `[Bucket, Key]' (`Bucket' and `Key' should each be a
%% binary). An optional third argument, `KeyData' may be specified as
%% well, as `{{Bucket, Key}, KeyData}' or `[Bucket, Key, KeyData]'.
%% `KeyData' is an opaque term that will be passed with the object to
%% the next fitting.
%%
%% The fitting reads the object from the KV vnode hosting the same
%% partition number as the Pipe vnode owning this worker. For this
%% reason, it is important to use a `chashfun' for this fitting that
%% gives the same answer as the consistent hashing function for the KV
%% object. If the object is not found at the local KV vnode, each KV
%% vnode in the remainder of the object's primary preflist is tried in
%% sequence.
%%
%% If the object is found, the tuple `{ok, Object, Keydata}' is sent
%% as output. If an error occurs looking up the object, and the
%% preflist has been exhausted, the tuple `{Error, {Bucket, Key},
%% KeyData}' is sent as output (where `Error' is usually `{error,
%% notfound}'). The atom `undefined' is used as `KeyData' if none is
%% specified.
-module(riak_kv_pipe_get).
-behaviour(riak_pipe_vnode_worker).
-export([init/2,
process/3,
done/1]).
-export([bkey/1,
keydata/1,
bkey_chash/1,
bkey_nval/1]).
-include("riak_kv_vnode.hrl").
-include_lib("riak_pipe/include/riak_pipe.hrl").
-include_lib("riak_pipe/include/riak_pipe_log.hrl").
-export_type([input/0]).
-record(state, {partition, fd}).
-opaque state() :: #state{}.
-export_type([state/0]).
-type input() :: {Bucket :: binary(), Key :: binary()}
| {{Bucket :: binary(), Key :: binary()}, KeyData :: term()}
%% unfortunate type spec: this list should be
%% either 2 or three elements in length, exactly
%% like the tuples above
| [BucketKeyKeyData :: term()].
%% @doc Stashes `Partition' and `FittingDetails' away for use while
%% processing inputs.
-spec init(riak_pipe_vnode:partition(), riak_pipe_fitting:details()) ->
{ok, state()}.
init(Partition, FittingDetails) ->
{ok, #state{partition=Partition, fd=FittingDetails}}.
%% @doc Lookup the bucket/key pair on the Riak KV vnode, and send it
%% downstream.
-spec process(riak_kv_mrc_pipe:key_input(), boolean(), state())
-> {ok | {error, term()}, state()}.
process(Input, Last, #state{partition=Partition, fd=FittingDetails}=State) ->
%% assume local chashfun was used for initial attempt
case try_partition(Input, {Partition, node()}, FittingDetails) of
{error, _} when Last == false ->
{try_preflist(Input, State), State};
Result ->
{send_output(Input, Result, State), State}
end.
send_output(Input, {ok, Obj}, State) ->
send_output({ok, Obj, keydata(Input)}, State);
send_output(Input, Error, State) ->
send_output({Error, bkey(Input), keydata(Input)}, State).
send_output(Output, #state{partition=Partition, fd=FittingDetails}) ->
riak_pipe_vnode_worker:send_output(
Output, Partition, FittingDetails).
%% @doc Try the other primaries in the Input's preflist (skipping the
%% local vnode we already tried in {@link process/3}.
try_preflist(Input, #state{partition=P}=State) ->
%% pipe only uses primaries - mimicking that here, both to provide
%% continuity, and also to avoid a really long wait for a true
%% not-found
AnnPreflist = riak_core_apl:get_primary_apl(
bkey_chash(Input), bkey_nval(Input), riak_kv),
Preflist = [ V || {V, _A} <- AnnPreflist ],
%% remove the one we already tried
RestPreflist = Preflist--[{P, node()}],
try_preflist(Input, RestPreflist, State).
%% helper function walking the remaining preflist
try_preflist(Input, [], State) ->
%% send not-found if no replicas gave us the value
send_output(Input, {error, notfound}, State);
try_preflist(Input, [NextV|Rest], #state{fd=FittingDetails}=State) ->
case try_partition(Input, NextV, FittingDetails) of
{ok,_}=Result ->
send_output(Input, Result, State);
_Error ->
try_preflist(Input, Rest, State)
end.
try_partition(Input, Vnode, FittingDetails) ->
ReqId = make_req_id(),
Start = os:timestamp(),
riak_core_vnode_master:command(
Vnode,
?KV_GET_REQ{bkey=bkey(Input), req_id=ReqId},
{raw, ReqId, self()},
riak_kv_vnode_master),
receive
{ReqId, {r, {ok, Obj}, _, _}} ->
?T(FittingDetails, [kv_get], [{kv_get_latency, {r, timer:now_diff(os:timestamp(), Start)}}]),
{ok, Obj};
{ReqId, {r, {error, _} = Error, _, _}} ->
?T(FittingDetails, [kv_get], [{kv_get_latency, {Error, timer:now_diff(os:timestamp(), Start)}}]),
Error
end.
%% @doc Not used.
-spec done(state()) -> ok.
done(_State) ->
ok.
make_req_id() ->
erlang:phash2({self(), os:timestamp()}). % stolen from riak_client
%% useful utilities
%% @doc Convert a valid pipe_get input into a standard bkey.
%% Valid inputs are:
%% - `{Bucket, Key}'
%% - `{{Bucket, Key}, KeyData}'
%% - `[Bucket, Key]'
%% - `[Bucket, Key, KeyData]'
-spec bkey(input()) -> {Bucket :: binary(), Key :: binary()}.
bkey({{_,_}=Bkey,_}) -> Bkey;
bkey({_,_}=Bkey) -> Bkey;
bkey([Bucket,Key]) -> {Bucket, Key};
bkey([Bucket,Key,_]) -> {Bucket, Key};
bkey([Bucket,Key,_, BType]) -> {{BType, Bucket}, Key}.
%% @doc Translate JS null to standard Erlang undefined
null2undefined(null) ->
undefined;
null2undefined(V) ->
V.
%% @doc Extract KeyData from input. The atom `undefined' is returned
%% if no keydata is specified.
-spec keydata(input()) -> KeyData :: term().
keydata({{_,_},KeyData}) -> KeyData;
keydata({_,_}) -> undefined;
keydata([_,_]) -> undefined;
keydata([_,_,KeyData]) -> null2undefined(KeyData);
keydata([_,_,KeyData,_]) -> null2undefined(KeyData).
%% @doc Compute the KV hash of the input.
-spec bkey_chash(riak_kv_mrc_pipe:key_input()) -> chash:index().
bkey_chash(Input) ->
riak_core_util:chash_key(bkey(Input)).
%% @doc Find the N value for the bucket of the input.
-spec bkey_nval(riak_kv_mrc_pipe:key_input()) -> integer().
bkey_nval(Input) ->
{Bucket,_} = bkey(Input),
BucketProps = riak_core_bucket:get_bucket(Bucket),
{n_val, NVal} = lists:keyfind(n_val, 1, BucketProps),
NVal. | deps/riak_kv/src/riak_kv_pipe_get.erl | 0.650689 | 0.44903 | riak_kv_pipe_get.erl | starcoder |
%%%=============================================================================
%%%
%%% | o __ _| _ __ |_ _ _ _ (TM)
%%% |_ | | | (_| (/_ | | |_) (_| |_| | | |
%%%
%%% @copyright (C) 2015, Lindenbaum GmbH
%%%
%%% Permission to use, copy, modify, and/or distribute this software for any
%%% purpose with or without fee is hereby granted, provided that the above
%%% copyright notice and this permission notice appear in all copies.
%%%
%%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
%%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
%%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
%%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
%%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
%%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
%%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
%%%
%%% @doc
%%% A library application to work with `glob' patterns.
%%%
%%% This implementation provides the same `glob' pattern facilities as `bash'.
%%% Syntax (partly from [https://en.wikipedia.org/wiki/Glob_%28programming%29)]:
%%% `?': Match exactly one unknown character.
%%% `*': Match any number of unknown characters from the position in which it
%%% appears to the end of the subject also match any number of unknown
%%% characters (regardless of the position where it appears, including at
%%% the start and/or multiple times.
%%% `[characters]': Match a character as part of a group of characters.
%%% `[!characters]': Match any character but the ones specified.
%%% `[character-charcter]': Match a character as part of a character range.
%%% `[!character-charcter]': Match any character but the range specified.
%%%
%%% The underlying implementation utilizes the `re' module which means that a
%%% given `glob' pattern will be converted into a regular expression. For
%%% convenience this module features an API that is quite similar to the `re'
%%% module. Although, the shorthand {@link matches/1} enables a more intuitive
%%% and simple experience.
%%%
%%% As `glob' relies on `re' it offers support for `unicode' input. However,
%%% this support comes with the same restrictions known from `re'.
%%% @end
%%%=============================================================================
-module(glob).
%% API
-export([compile/1, compile/2, run/2, matches/2]).
-opaque mp() :: {?MODULE, tuple()}. %% in fact tuple() referes to an re:mp()
-export_type([mp/0]).
-define(COMPILE_OPTS, [{newline, anycrlf}, dotall, dollar_endonly]).
%%%=============================================================================
%%% API
%%%=============================================================================
%%------------------------------------------------------------------------------
%% @doc
%% Pre-compiles a `glob' expression/pattern. Same as `compile(Expr, false)'.
%% @end
%%------------------------------------------------------------------------------
-spec compile(iodata()) -> {ok, mp()} | {error, term()}.
compile(Expr) -> compile(Expr, false).
%%------------------------------------------------------------------------------
%% @doc
%% Pre-compiles a `glob' expression/pattern. Pre-compiling an expression/pattern
%% improves performance when using the result in consecutive matches.
%%
%% When using `unicode'/`UTF-8' as input for the subject or the
%% expression/pattern itself, pre-compilation is mandatory!
%% @end
%%------------------------------------------------------------------------------
-spec compile(iodata() | unicode:charlist(), boolean()) ->
{ok, mp()} | {error, term()}.
compile(Expr, IsUnicode) ->
Opts = if IsUnicode -> [unicode]; true -> [] end,
case convert(to_list(Expr, IsUnicode), [], Opts) of
{ok, MP} -> {ok, {?MODULE, MP}};
Error -> Error
end.
%%------------------------------------------------------------------------------
%% @doc
%% Executes an expression/pattern matching, returning `match' or `nomatch'. The
%% expression/pattern can be given either as `iodata()' in which case it is
%% automatically compiled (as by {@link glob:compile/1,2}) and executed, or as a
%% pre-compiled `mp()' in which case it is executed against the subject
%% directly.
%%
%% If the expression/pattern was previously compiled with the `IsUnicode=true',
%% `Subject' should be provided as a valid Unicode `charlist()', otherwise any
%% `iodata()' will do.
%%
%% When compilation is involved, the exception `badarg' is thrown if a
%% compilation error occurs. Call {@link glob:compile/1,2} to get information
%% about the error in the expression/pattern.
%% @end
%%------------------------------------------------------------------------------
-spec run(iodata() | unicode:charlist(), iodata() | mp()) -> match | nomatch.
run(Subject, Expr) ->
try
match(Subject, Expr)
catch
error:{badmatch, _} -> error(badarg)
end.
%%------------------------------------------------------------------------------
%% @doc
%% Similar to {@link run/2} but returns a `boolean' indicating whether `Subject'
%% matches `Expr'.
%%
%% When compilation is involved, the exception `badmatch' is thrown if a
%% compilation error occurs. Call {@link glob:compile/1,2} to get information
%% about the error in the expression/pattern.
%% @end
%%------------------------------------------------------------------------------
-spec matches(iodata() | unicode:charlist(), iodata() | mp()) -> boolean().
matches(Subject, Expr) -> match(Subject, Expr) =:= match.
%%%=============================================================================
%%% Internal functions
%%%=============================================================================
%%------------------------------------------------------------------------------
%% @private
%%------------------------------------------------------------------------------
match(Subject, {?MODULE, MP}) ->
re:run(Subject, MP, [{capture, none}]);
match(Subject, Expr) ->
{ok, Compiled} = compile(Expr),
match(Subject, Compiled).
%%------------------------------------------------------------------------------
%% @private
%%------------------------------------------------------------------------------
convert([], Acc, Opts) ->
re:compile([$^ | lists:reverse([$$ | Acc])], Opts ++ ?COMPILE_OPTS);
convert([$* | Rest], Acc, Opts) ->
convert(Rest, [$* | [$. | Acc]], Opts);
convert([$? | Rest], Acc, Opts) ->
convert(Rest, [$. | Acc], Opts);
convert([$\\], _Acc, _Opts) ->
{error, escape_sequence_at_end_of_pattern};
convert([$\\ | [C | Rest]], Acc, Opts) ->
convert(Rest, [C | [$\\ | Acc]], Opts);
convert([$[ | [$! | Rest]], Acc, Opts) ->
convert_character_class(Rest, [$^ | [$[ | Acc]], Opts);
convert([$[ | Rest], Acc, Opts) ->
convert_character_class(Rest, [$[ | Acc], Opts);
convert([C | Rest], Acc, Opts) ->
convert(Rest, escape(C, Acc), Opts).
%%------------------------------------------------------------------------------
%% @private
%%------------------------------------------------------------------------------
convert_character_class([], _Acc, _Opts) ->
{error, non_terminated_character_class};
convert_character_class([$\\], _Acc, _Opts) ->
{error, escape_sequence_at_end_of_pattern};
convert_character_class([$\\ | [C | Rest]], Acc, Opts) ->
convert_character_class(Rest, [C | [$\\ | Acc]], Opts);
convert_character_class([$] | Rest], Acc, Opts) ->
convert(Rest, [$] | Acc], Opts);
convert_character_class([C | Rest], Acc, Opts) ->
convert_character_class(Rest, [C | Acc], Opts).
%%------------------------------------------------------------------------------
%% @private
%%------------------------------------------------------------------------------
escape($^, Acc) -> [$^ | [$\\ | Acc]];
escape($$, Acc) -> [$$ | [$\\ | Acc]];
escape($., Acc) -> [$. | [$\\ | Acc]];
escape($|, Acc) -> [$| | [$\\ | Acc]];
escape($(, Acc) -> [$( | [$\\ | Acc]];
escape($), Acc) -> [$) | [$\\ | Acc]];
escape($+, Acc) -> [$+ | [$\\ | Acc]];
escape(${, Acc) -> [${ | [$\\ | Acc]];
escape($}, Acc) -> [$} | [$\\ | Acc]];
escape(C, Acc) -> [C | Acc].
%%------------------------------------------------------------------------------
%% @private
%%------------------------------------------------------------------------------
to_list(Data, true) ->
unicode:characters_to_list(Data);
to_list(Bin, false) when is_binary(Bin) ->
binary_to_list(Bin);
to_list(IoData, false) ->
to_list(iolist_to_binary(IoData), false). | src/glob.erl | 0.637144 | 0.458531 | glob.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% Copyright <2013-2018> <
%% Technische Universität Kaiserslautern, Germany
%% Université Pierre et Marie Curie / Sorbonne-Université, France
%% Universidade NOVA de Lisboa, Portugal
%% Université catholique de Louvain (UCL), Belgique
%% INESC TEC, Portugal
%% >
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either expressed or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% List of the contributors to the development of Antidote: see AUTHORS file.
%% Description and complete License: see LICENSE file.
%% -------------------------------------------------------------------
%% @doc
%% An operation-based Observed-Remove Set CRDT.
%% As the data structure is operation-based, to issue an operation, one should
%% firstly call `downstream/2' to get the downstream version of the
%% operation and then call `update/2'.
%%
%% It provides five operations: add, which adds an element to a set; add_all,
%% adds a list of elements to a set; remove, which removes an element from a set;
%% remove_all that removes a list of elements from the set; update, that contains
%% a list of previous four commands.
%%
%% This file is adapted from riak_dt_antidote_crdt_set_aw, a state-based implementation of
%% Observed-Remove Set.
%% The changes are as follows:
%% 1. `generate_downstream/3' is added, as this is necessary for op-based CRDTs.
%% 2. `merge/2' is removed.
%% 3. There is no tombstone of removed elements.
%%
%% @reference <NAME>, <NAME>, <NAME>, <NAME> (2011) A comprehensive study of
%% Convergent and Commutative Replicated Data Types. http://hal.upmc.fr/inria-00555588/
%%
%% @end
-module(antidote_crdt_set_aw).
-include("antidote_crdt.hrl").
%% Callbacks
-export([
new/0,
value/1,
downstream/2,
update/2,
equal/2,
to_binary/1,
from_binary/1,
is_operation/1,
require_state_downstream/1,
is_bottom/1
]).
-behaviour(antidote_crdt).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-type antidote_crdt_set_aw() :: orddict:orddict(member(), tokens()).
%% A binary that from_binary/1 will operate on.
-type binary_antidote_crdt_set_aw() :: binary().
-type antidote_crdt_set_aw_op() ::
{add, member()}
| {remove, member()}
| {add_all, [member()]}
| {remove_all, [member()]}
| {reset, {}}.
%% The downstream op is a list of triples.
%% In each triple:
%% - the first component is the elem that was added or removed
%% - the second component is the list of supporting tokens to be added
%% - the third component is the list of supporting tokens to be removed
-type downstream_op() :: [{member(), tokens(), tokens()}].
-type member() :: term().
-type token() :: binary().
-type tokens() :: [token()].
-spec new() -> antidote_crdt_set_aw().
new() ->
orddict:new().
%% @doc return all existing elements in the `antidote_crdt_set_aw()'.
-spec value(antidote_crdt_set_aw()) -> [member()].
value(Set_aw) ->
orddict:fetch_keys(Set_aw).
%% @doc generate downstream operations.
%% If the operation is add or add_all, generate unique tokens for
%% each element and fetches the current supporting tokens.
%% If the operation is remove or remove_all, fetches current
%% supporting tokens of these elements existing in the `antidote_crdt_set_aw()'.
-spec downstream(antidote_crdt_set_aw_op(), antidote_crdt_set_aw()) -> {ok, downstream_op()}.
downstream({add, Elem}, Set_aw) ->
downstream({add_all, [Elem]}, Set_aw);
downstream({add_all, Elems}, Set_aw) ->
CreateDownstream = fun(Elem, CurrentTokens) ->
Token = unique(),
{Elem, [Token], CurrentTokens}
end,
DownstreamOps = create_downstreams(CreateDownstream, lists:usort(Elems), Set_aw, []),
{ok, lists:reverse(DownstreamOps)};
downstream({remove, Elem}, Set_aw) ->
downstream({remove_all, [Elem]}, Set_aw);
downstream({remove_all, Elems}, Set_aw) ->
CreateDownstream = fun(Elem, CurrentTokens) ->
{Elem, [], CurrentTokens}
end,
DownstreamOps = create_downstreams(CreateDownstream, lists:usort(Elems), Set_aw, []),
{ok, lists:reverse(DownstreamOps)};
downstream({reset, {}}, Set_aw) ->
% reset is like removing all elements
downstream({remove_all, value(Set_aw)}, Set_aw).
%% @doc apply downstream operations and update an `antidote_crdt_set_aw()'.
-spec update(downstream_op(), antidote_crdt_set_aw()) -> {ok, antidote_crdt_set_aw()}.
update(DownstreamOp, Set_aw) ->
{ok, apply_downstreams(DownstreamOp, Set_aw)}.
-spec equal(antidote_crdt_set_aw(), antidote_crdt_set_aw()) -> boolean().
equal(Set_awA, Set_awB) ->
% Everything inside is ordered, so this should work
Set_awA == Set_awB.
%?DT_ORSET_TAG from riak_dt
-define(TAG, 76).
-define(V1_VERS, 1).
-spec to_binary(antidote_crdt_set_aw()) -> binary_antidote_crdt_set_aw().
to_binary(Set_aw) ->
%% @TODO something smarter
<<?TAG:8/integer, ?V1_VERS:8/integer, (term_to_binary(Set_aw))/binary>>.
from_binary(<<?TAG:8/integer, ?V1_VERS:8/integer, Bin/binary>>) ->
%% @TODO something smarter
{ok, binary_to_term(Bin)}.
%% @doc generate a unique identifier (best-effort).
-spec unique() -> token().
unique() ->
crypto:strong_rand_bytes(20).
%% @private generic downstream op creation for adds and removals
create_downstreams(_CreateDownstream, [], _Set_aw, DownstreamOps) ->
DownstreamOps;
create_downstreams(CreateDownstream, Elems, [], DownstreamOps) ->
lists:foldl(
fun(Elem, Ops) ->
DownstreamOp = CreateDownstream(Elem, []),
[DownstreamOp | Ops]
end,
DownstreamOps,
Elems
);
create_downstreams(
CreateDownstream,
[Elem1 | ElemsRest] = Elems,
[{Elem2, Tokens} | Set_awRest] = Set_aw,
DownstreamOps
) ->
if
Elem1 == Elem2 ->
DownstreamOp = CreateDownstream(Elem1, Tokens),
create_downstreams(CreateDownstream, ElemsRest, Set_awRest, [
DownstreamOp | DownstreamOps
]);
Elem1 > Elem2 ->
create_downstreams(CreateDownstream, Elems, Set_awRest, DownstreamOps);
true ->
DownstreamOp = CreateDownstream(Elem1, []),
create_downstreams(CreateDownstream, ElemsRest, Set_aw, [DownstreamOp | DownstreamOps])
end.
%% @private apply a list of downstream ops to a given antidote_crdt_set_aw
apply_downstreams([], Set_aw) ->
Set_aw;
apply_downstreams(Ops, []) ->
lists:foldl(
fun({Elem, ToAdd, ToRemove}, Set_aw) ->
Set_aw ++ apply_downstream(Elem, [], ToAdd, ToRemove)
end,
[],
Ops
);
apply_downstreams(
[{Elem1, ToAdd, ToRemove} | OpsRest] = Ops, [{Elem2, CurrentTokens} | Set_awRest] = Set_aw
) ->
if
Elem1 == Elem2 ->
apply_downstream(Elem1, CurrentTokens, ToAdd, ToRemove) ++
apply_downstreams(OpsRest, Set_awRest);
Elem1 > Elem2 ->
[{Elem2, CurrentTokens} | apply_downstreams(Ops, Set_awRest)];
true ->
apply_downstream(Elem1, [], ToAdd, ToRemove) ++ apply_downstreams(OpsRest, Set_aw)
end.
%% @private create an orddict entry from a downstream op
apply_downstream(Elem, CurrentTokens, ToAdd, ToRemove) ->
Tokens = (CurrentTokens ++ ToAdd) -- ToRemove,
case Tokens of
[] ->
[];
_ ->
[{Elem, Tokens}]
end.
%% @doc The following operation verifies
%% that Operation is supported by this particular CRDT.
is_operation({add, _Elem}) -> true;
is_operation({add_all, L}) when is_list(L) -> true;
is_operation({remove, _Elem}) -> true;
is_operation({remove_all, L}) when is_list(L) -> true;
is_operation({reset, {}}) -> true;
is_operation(_) -> false.
require_state_downstream({add, _}) -> true;
require_state_downstream({add_all, _}) -> true;
require_state_downstream({remove, _}) -> true;
require_state_downstream({remove_all, _}) -> true;
require_state_downstream({reset, {}}) -> true.
is_bottom(State) -> State == new().
%% ===================================================================
%% EUnit tests
%% ===================================================================
-ifdef(TEST).
new_test() ->
?assertEqual(orddict:new(), new()).
add_test() ->
Elem = <<"foo">>,
Elems = [<<"li">>, <<"manu">>],
Set1 = new(),
{ok, DownstreamOp1} = downstream({add, Elem}, Set1),
?assertMatch([{Elem, _, _}], DownstreamOp1),
{ok, DownstreamOp2} = downstream({add_all, Elems}, Set1),
?assertMatch([{<<"li">>, _, _}, {<<"manu">>, _, _}], DownstreamOp2),
{ok, Set2} = update(DownstreamOp1, Set1),
?assertEqual([Elem], value(Set2)),
{ok, Set3} = update(DownstreamOp2, Set1),
?assertEqual(Elems, value(Set3)).
value_test() ->
Set1 = new(),
{ok, DownstreamOp1} = downstream({add, <<"foo">>}, Set1),
?assertEqual([], value(Set1)),
{ok, Set2} = update(DownstreamOp1, Set1),
?assertEqual([<<"foo">>], value(Set2)),
{ok, DownstreamOp2} = downstream({add_all, [<<"foo">>, <<"li">>, <<"manu">>]}, Set2),
{ok, Set3} = update(DownstreamOp2, Set2),
?assertEqual([<<"foo">>, <<"li">>, <<"manu">>], value(Set3)).
remove_test() ->
Set1 = new(),
%% Add an element then remove it
{ok, Op1} = downstream({add, <<"foo">>}, Set1),
{ok, Set2} = update(Op1, Set1),
?assertEqual([<<"foo">>], value(Set2)),
{ok, Op2} = downstream({remove, <<"foo">>}, Set2),
{ok, Set3} = update(Op2, Set2),
?assertEqual([], value(Set3)),
%% Add many elements then remove part
{ok, Op3} = downstream({add_all, [<<"foo">>, <<"li">>, <<"manu">>]}, Set1),
{ok, Set4} = update(Op3, Set1),
?assertEqual([<<"foo">>, <<"li">>, <<"manu">>], value(Set4)),
{ok, Op5} = downstream({remove_all, [<<"foo">>, <<"li">>]}, Set4),
{ok, Set5} = update(Op5, Set4),
?assertEqual([<<"manu">>], value(Set5)),
%% Remove more than current have
{ok, Op6} = downstream({add_all, [<<"foo">>, <<"li">>, <<"manu">>]}, Set1),
{ok, Set6} = update(Op6, Set1),
{ok, Op7} = downstream({remove_all, [<<"manu">>, <<"test">>]}, Set6),
Result = update(Op7, Set6),
?assertMatch({ok, _}, Result).
remove2_test() ->
Set1 = new(),
%% Add an element then remove it
{ok, Op1} = downstream({add, <<"foo">>}, Set1),
{ok, Set2} = update(Op1, Set1),
?assertEqual([<<"foo">>], value(Set2)),
{ok, Op2} = downstream({remove, <<"foo">>}, Set2),
{ok, Set3} = update(Op2, Set2),
?assertEqual([], value(Set3)),
%% Remove the element again (e.g. on a different replica)
{ok, Op3} = downstream({remove, <<"foo">>}, Set2),
{ok, Set4} = update(Op3, Set2),
?assertEqual([], value(Set4)),
%% now execute Op3 on Set3, where the element was already removed locally
{ok, Set5} = update(Op3, Set3),
?assertEqual([], value(Set5)).
concurrent_add_test() ->
Set1 = new(),
%% Add an element then remove it
{ok, Op1} = downstream({add, <<"foo">>}, Set1),
{ok, Set2} = update(Op1, Set1),
?assertEqual([<<"foo">>], value(Set2)),
%% If remove is concurrent with the second add, will not remove the second added
{ok, Op2} = downstream({remove, <<"foo">>}, Set2),
{ok, Op3} = downstream({add, <<"foo">>}, Set1),
{ok, Set3} = update(Op3, Set2),
?assertEqual([<<"foo">>], value(Set3)),
{ok, Set4} = update(Op2, Set3),
?assertEqual([<<"foo">>], value(Set4)),
%% If remove follows two adds, remove will remove all
{ok, Op4} = downstream({remove, <<"foo">>}, Set3),
{ok, Set5} = update(Op4, Set3),
?assertEqual([], value(Set5)).
binary_test() ->
Set_aw1 = new(),
BinarySet_aw1 = to_binary(Set_aw1),
{ok, Set_aw2} = from_binary(BinarySet_aw1),
?assert(equal(Set_aw1, Set_aw2)),
{ok, Op1} = downstream({add, <<"foo">>}, Set_aw1),
{ok, Set_aw3} = update(Op1, Set_aw1),
BinarySet_aw3 = to_binary(Set_aw3),
{ok, Set_aw4} = from_binary(BinarySet_aw3),
?assert(equal(Set_aw3, Set_aw4)).
-endif. | apps/antidote_crdt/src/antidote_crdt_set_aw.erl | 0.533154 | 0.457561 | antidote_crdt_set_aw.erl | starcoder |
%%% @doc Fuse implements a circuit breaker pattern for Erlang.
%%% @end
-module(fuse).
-ifdef(PULSE).
-include_lib("pulse_otp/include/pulse_otp.hrl").
-endif.
-export([
ask/2,
install/2,
melt/1,
remove/1,
reset/1,
run/3
]).
-export([
circuit_enable/1,
circuit_disable/1
]).
-type fuse_context() :: sync | async_dirty.
-type fault_rate() :: float().
-type fuse_strategy() ::
{standard, pos_integer(), pos_integer()}
| {fault_injection, fault_rate(), pos_integer(), pos_integer()}.
-type fuse_refresh() :: {reset, pos_integer()}.
-type fuse_options() ::
{fuse_strategy(), fuse_refresh()}.
-export_type([fuse_context/0, fuse_options/0]).
%% @doc Adds a new fuse to the running system.
%% <p>A call `install(N, Os)' will add a new fuse under the name `N' with options given by `Os'. Note that the options must match
%% the correct type, or a `badarg' error will be thrown.</p>
%% @end
%% install/2
-spec install(Name, Options) -> ok | reset | {error, Reason}
when
Name :: atom(),
Options :: fuse_options(),
Reason :: any().
install(Name, Options) ->
options_ok(Options),
fuse_server:install(Name, Options).
%% @doc Administratively disables a circuit.
%% <p>This function is intended to be used administratively, when you want to break the fuse
%% before you do administration on the service which the fuse protects. This can be used to
%% e.g., carry out database maintenance. After maintenance, the administrator can reenable
%% the circuit again.</p>
%% <p>Disabling a circuit dominates every other operation, except `remove/1'.</p>
%% @end.
%% circuit_disable/1
-spec circuit_disable(Name) -> ok
when Name :: atom().
circuit_disable(Name) ->
fuse_server:circuit(Name, disable).
%% @doc Administratively (re-)enables a fuse.
%% <p>This call is used to reenable a disabled circuit again. Always returns ok and is idempotent.</p>
%% <p>Use this command at the point in time where you are done with administrative fixes and want
%% to resume normal operation of the fuse.</p>
%% @end
%% circuit_enable/1
-spec circuit_enable(Name) -> ok
when Name :: atom().
circuit_enable(Name) ->
fuse_server:circuit(Name, enable).
%% @doc Runs a thunk under a given fuse.
%% <p>Calling `run(Name, Func)' will run `Func' protected by the fuse `Name'.</p>
%% @end
%% run/3
-spec run(Name, fun (() -> {ok, Result} | {melt, Result}), fuse_context() ) -> {ok, Result} | blown | {error, not_found}
when
Name :: atom(),
Result :: any().
run(Name, Func, Context) -> fuse_server:run(Name, Func, Context).
%% @doc Queries the state of a fuse.
%% <p>Given `ask(N)' we ask the fuse state for the name `N'. Returns the fuse state, either `ok' or `blown'.
%% If there is no such fuse, returns `{error, not_found}'.</p>
%% @end
%% ask/2
-spec ask(Name, fuse_context()) -> ok | blown | {error, not_found}
when Name :: atom().
ask(Name, Context) -> fuse_server:ask(Name, Context).
%% @doc Resets a fuse.
%% <p>Given `reset(N)' this resets the fuse under the name `N'. The fuse will be unbroken with no melts.</p>
%% @end
%% reset/1
-spec reset(Name) -> ok | {error, not_found}
when Name :: atom().
reset(Name) ->
fuse_server:reset(Name).
%% @doc Melts a fuse a little bit.
%% <p>A call to `melt(N)' will melt fuse `N'. This call always returns `ok' and it is currently implemented synchronously.</p>
%% @end
%% melt/1
-spec melt(Name) -> ok
when Name :: atom().
melt(Name) ->
fuse_server:melt(Name).
%% @doc Removes a fuse.
%% <p>Given `remove(N)' this removes the fuse under the name `N'. This fuse will no longer exist.</p>
%% @end
%% remove/1
-spec remove(Name) -> ok
when Name :: atom().
remove(Name) ->
fuse_server:remove(Name).
%% Internal functions
%% -----------------------
options_ok({{standard, MaxR, MaxT}, {reset, Time}})
when
is_integer(MaxR), MaxR > 0,
is_integer(MaxT), MaxT >= 0,
is_integer(Time), Time >= 0 -> ok;
options_ok({{fault_injection, Rate, MaxR, MaxT}, {reset, Time}})
when
is_integer(MaxR), MaxR > 0,
is_integer(MaxT), MaxT >= 0,
is_integer(Time), Time >= 0,
is_float(Rate), 0.0 < Rate, Rate =< 1.0 -> ok;
options_ok(_) ->
error(badarg). | src/fuse.erl | 0.569972 | 0.428233 | fuse.erl | starcoder |
%% @doc A method of encoding and decoding some set of Erlang
%% terms to and from binaries.
%%
%% An #osmos_format{} record has two fields:
%% <ul>
%% <li>to_binary (<tt>(any()) -> binary()</tt>): convert a term to binary.
%% This is not required to accept all Erlang terms.</li>
%% <li>from_binary (<tt>(binary()) -> any()</tt>): convert a binary returned
%% by the to_binary function back to the original term.</li>
%% </ul>
%%
%% The convenience functions in this module return #osmos_format{} records
%% for several common term encodings.
%% @end
-module (osmos_format).
% formats
-export ([ binary/0,
term/0,
string_vector/0,
uint64_delete/0,
uint64_vector_delete/0 ]).
% converters
-export ([ identity/1,
uint64_delete_to_binary/1,
uint64_delete_from_binary/1,
uint64_vector_delete_to_binary/1,
uint64_vector_delete_from_binary/1,
string_vector_to_binary/1,
string_vector_from_binary/1 ]).
-include ("osmos.hrl").
%% @spec () -> #osmos_format{}
%% @doc A binary. The binary format is the identical binary.
%% @end
binary () ->
ToBinary = fun ?MODULE:identity/1,
FromBinary = fun ?MODULE:identity/1,
#osmos_format { to_binary = ToBinary, from_binary = FromBinary }.
%% @spec () -> #osmos_format{}
%% @doc An erlang term.
%% The binary format is the erlang external term format.
%% @end
term () ->
ToBinary = fun erlang:term_to_binary/1,
FromBinary = fun erlang:binary_to_term/1,
#osmos_format { to_binary = ToBinary, from_binary = FromBinary }.
%% @spec () -> #osmos_format{}
%% @doc An integer in the range [0,2^64), or the atom 'delete'.
%% The binary format is an unsigned 64-bit big-endian integer, or
%% an empty binary to indicate deletion.
%% @end
uint64_delete () ->
ToBinary = fun ?MODULE:uint64_delete_to_binary/1,
FromBinary = fun ?MODULE:uint64_delete_from_binary/1,
#osmos_format { to_binary = ToBinary, from_binary = FromBinary }.
%% @spec () -> #osmos_format{}
%% @doc A tuple of one or more integers, each in the range [0,2^64),
%% or the atom 'delete'. The binary format is a concatenated sequence
%% of big-endian, unsigned 64-bit integers, or an empty binary to
%% indicate deletion.
%% @end
uint64_vector_delete () ->
ToBinary = fun ?MODULE:uint64_vector_delete_to_binary/1,
FromBinary = fun ?MODULE:uint64_vector_delete_from_binary/1,
#osmos_format { to_binary = ToBinary, from_binary = FromBinary }.
%% @spec () -> #osmos_format{}
%% @doc A tuple of strings as binaries, which may not contain ASCII NUL.
%% The binary format is the strings NUL-terminated and concatenated.
%% @end
string_vector () ->
ToBinary = fun ?MODULE:string_vector_to_binary/1,
FromBinary = fun ?MODULE:string_vector_from_binary/1,
#osmos_format { to_binary = ToBinary, from_binary = FromBinary }.
%
% private
%
%% @hidden
identity (X) ->
X.
%% @hidden
uint64_delete_to_binary (N) when is_integer (N) ->
<< N:64/big-unsigned-integer >>;
uint64_delete_to_binary (delete) ->
<<>>.
%% @hidden
uint64_delete_from_binary (<< N:64/big-unsigned-integer >>) ->
N;
uint64_delete_from_binary (<<>>) ->
delete.
%% @hidden
uint64_vector_delete_to_binary (Tuple) when is_tuple (Tuple) ->
iolist_to_binary ([ << C:64/big-unsigned-integer >>
|| C <- tuple_to_list (Tuple) ]);
uint64_vector_delete_to_binary (delete) ->
<<>>.
%% @hidden
uint64_vector_delete_from_binary (<<>>) ->
delete;
uint64_vector_delete_from_binary (B) when is_binary (B), B =/= <<>> ->
uint64_vector_from_binary (B, []).
uint64_vector_from_binary (<< C:64/big-unsigned-integer, B/binary >>, Acc) ->
uint64_vector_from_binary (B, [ C | Acc ]);
uint64_vector_from_binary (<<>>, Acc) ->
list_to_tuple (lists:reverse (Acc)).
%% @hidden
string_vector_to_binary (Tuple) when is_tuple (Tuple) ->
iolist_to_binary ([ [ S, $\0 ] || S <- tuple_to_list (Tuple) ]).
%% @hidden
string_vector_from_binary (B) when is_binary (B) ->
string_vector_from_binary (B, [], []).
string_vector_from_binary (<< Byte:8, Rest/binary >>, S, Acc) when Byte =/= 0 ->
string_vector_from_binary (Rest, [ Byte | S ], Acc);
string_vector_from_binary (<< 0:8, Rest/binary >>, S, Acc) ->
Str = list_to_binary (lists:reverse (S)),
string_vector_from_binary (Rest, [], [ Str | Acc ]);
string_vector_from_binary (<< >>, [], Acc) ->
list_to_tuple (lists:reverse (Acc)). | src/osmos_format.erl | 0.507324 | 0.44348 | osmos_format.erl | starcoder |
% @copyright 2008-2011 Zuse Institute Berlin
% Licensed under the Apache License, Version 2.0 (the "License");
% you may not use this file except in compliance with the License.
% You may obtain a copy of the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS,
% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
% See the License for the specific language governing permissions and
% limitations under the License.
%% @author <NAME> <<EMAIL>>
%% @doc Unit tests for src/util.erl.
%% @end
%% @version $Id$
-module(util_SUITE).
-author('<EMAIL>').
-vsn('$Id$').
-compile(export_all).
-include("unittest.hrl").
all() ->
[min_max, largest_smaller_than, gb_trees_foldl,
repeat, repeat_collect, repeat_accumulate,
repeat_p, repeat_p_collect, repeat_p_accumulate,
random_subsets,
tester_minus_all, tester_minus_all_sort,
tester_minus_first, tester_minus_first_sort,
tester_par_map2, tester_par_map3,
lists_remove_at_indices,
rrd_combine_timing_slots_handle_empty_rrd,
rrd_combine_timing_slots_simple,
rrd_combine_timing_slots_subset,
rrd_combine_gauge_slots_handle_empty_rrd,
rrd_combine_gauge_slots_simple,
rrd_combine_gauge_slots_subset,
sublist, tester_sublist3
].
suite() ->
[
{timetrap, {seconds, 20}}
].
init_per_suite(Config) ->
unittest_helper:init_per_suite(Config).
end_per_suite(Config) ->
unittest_helper:end_per_suite(Config).
min_max(_Config) ->
?equals(util:min(1, 2), 1),
?equals(util:min(2, 1), 1),
?equals(util:min(1, 1), 1),
?equals(util:max(1, 2), 2),
?equals(util:max(2, 1), 2),
?equals(util:max(1, 1), 1),
ok.
largest_smaller_than(_Config) ->
KVs = [{1, 1}, {2, 2}, {4, 4}, {8, 8}, {16, 16}, {32, 32}, {64, 64}],
Tree = gb_trees:from_orddict(KVs),
?equals(util:gb_trees_largest_smaller_than(0, Tree), nil),
?equals(util:gb_trees_largest_smaller_than(1, Tree), nil),
?equals(util:gb_trees_largest_smaller_than(2, Tree), {value, 1, 1}),
?equals(util:gb_trees_largest_smaller_than(3, Tree), {value, 2, 2}),
?equals(util:gb_trees_largest_smaller_than(7, Tree), {value, 4, 4}),
?equals(util:gb_trees_largest_smaller_than(9, Tree), {value, 8, 8}),
?equals(util:gb_trees_largest_smaller_than(31, Tree), {value, 16, 16}),
?equals(util:gb_trees_largest_smaller_than(64, Tree), {value, 32, 32}),
?equals(util:gb_trees_largest_smaller_than(65, Tree), {value, 64, 64}),
?equals(util:gb_trees_largest_smaller_than(1000, Tree), {value, 64, 64}),
ok.
gb_trees_foldl(_Config) ->
KVs = [{1, 1}, {2, 2}, {4, 4}, {8, 8}, {16, 16}, {32, 32}, {64, 64}],
Tree = gb_trees:from_orddict(KVs),
?assert(util:gb_trees_foldl(fun (K, K, Acc) ->
Acc + K
end,
0,
Tree) =:= 127).
repeat(_) ->
util:repeat(fun() -> io:format("#s_repeat#~n") end, [], 5),
io:format("s_repeat_test successful if #s_repeat# was printed 5 times~n"),
ok.
repeat_collect(_) ->
Times = 3,
Result = util:repeat(fun(X) -> X * X end, [Times], Times, [collect]),
?equals(Result, [9, 9, 9]),
ok.
repeat_accumulate(_) ->
Times = 5,
A = util:repeat(fun(X) -> X * X end, [Times], Times,
[{accumulate, fun(X, Y) -> X + Y end, 0}]),
?equals(A, Times*Times*Times),
B = util:repeat(fun(X) -> X * X end, [Times], Times,
[{accumulate, fun(X, Y) -> X + Y end, 1000}]),
?equals(B, 1000 + Times*Times*Times),
ok.
repeat_p(_) ->
Times = 5,
util:repeat(
fun(Caller) -> io:format("~w #p_repeat_test# called by ~w", [self(), Caller]) end,
[self()],
Times, [parallel]),
io:format("p_repeat_test successful if ~B different pids printed #p_repeat#.", [Times]),
ok.
repeat_p_collect(_) ->
Times = 3,
A = util:repeat(fun(X) -> X * X end, [Times], Times, [parallel, collect]),
?equals(A, [9, 9, 9]),
ok.
repeat_p_accumulate(_) ->
Times = 15,
A = util:repeat(fun(X) -> X * X end, [Times], Times,
[parallel, {accumulate, fun(X, Y) -> X + Y end, 0}]),
?equals(A, Times*Times*Times),
B = util:repeat(fun(X) -> X * X end, [Times], Times,
[parallel, {accumulate, fun(X, Y) -> X + Y end, 1000}]),
?equals(B, 1000 + Times*Times*Times),
ok.
-spec rand_subsets_check(Rand1::[integer()], Rand2::[integer()], Rand3::[integer()]) -> ok.
rand_subsets_check(Rand1, Rand2, Rand3) ->
Rand1sort = lists:sort(Rand1),
Rand2sort = lists:sort(Rand2),
Rand3sort = lists:sort(Rand3),
?assert_w_note(Rand1 =/= Rand2 orelse Rand1 =/= Rand3 orelse Rand2 =/= Rand3,
{Rand1, Rand2, Rand3}),
?assert_w_note(Rand1sort =/= Rand2sort orelse Rand1sort =/= Rand3sort orelse Rand2sort =/= Rand3sort,
{Rand1sort, Rand2sort, Rand3sort}),
ok.
random_subsets(_) ->
% assume that when selecting 10 out of 1000 elements, at least one of 3
% calls yields a result different to the others
List = lists:seq(1, 1000),
rand_subsets_check(util:random_subset(10, List),
util:random_subset(10, List),
util:random_subset(10, List)),
rand_subsets_check(element(2, util:pop_randomsubset(10, List)),
element(2, util:pop_randomsubset(10, List)),
element(2, util:pop_randomsubset(10, List))),
ok.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% util:minus_all/2 and util:minus_first/2
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% @doc Checks that all intended items are deleted using util:minus_all/2.
%% Note: this is kindof redundant to prop_minus_all_sort/2 but a cleaner
%% approach avoiding a re-implementation of util:minus_all/2.
-spec prop_minus_all(List::[T], Excluded::[T]) -> true
when is_subtype(T, any()).
prop_minus_all(List, Excluded) ->
Result = util:minus_all(List, Excluded),
_ = [begin
case lists:member(L, Excluded) of
true -> ?equals_w_note([R || R <- Result, R =:= L], [], io_lib:format("~.0p should have been deleted", [L]));
false -> ?equals_w_note([R || R <- Result, R =:= L], [R || R <- List, R =:= L], io_lib:format("Number of ~.0p should remain the same", [L]))
end
end || L <- List],
true.
%% @doc Checks that the order of items stays the same using util:minus_all/2.
-spec prop_minus_all_sort(List::[T], Excluded::[T]) -> true
when is_subtype(T, any()).
prop_minus_all_sort(List, Excluded) ->
Result = util:minus_all(List, Excluded),
prop_minus_all_sort_helper(Result, List, Excluded).
-spec prop_minus_all_sort_helper(Result::[T], List::[T], Excluded::[T]) -> true
when is_subtype(T, any()).
prop_minus_all_sort_helper([], [], _) ->
true;
prop_minus_all_sort_helper([_|_], [], _) ->
false;
prop_minus_all_sort_helper([], [_|_], _) ->
true;
prop_minus_all_sort_helper([RH | RT] = R, [LH | LT], Excluded) ->
case lists:member(LH, Excluded) of
true -> prop_minus_all_sort_helper(R, LT, Excluded);
false when LH =:= RH -> prop_minus_all_sort_helper(RT, LT, Excluded);
false -> false
end.
tester_minus_all(_Config) ->
tester:test(?MODULE, prop_minus_all, 2, 5000, [{threads, 2}]).
tester_minus_all_sort(_Config) ->
tester:test(?MODULE, prop_minus_all_sort, 2, 5000, [{threads, 2}]).
%% @doc Checks that all intended items are deleted once using util:minus_first/2.
%% Note: this is kindof redundant to prop_minus_first_sort/2 but a cleaner
%% approach avoiding a re-implementation of util:minus_first/2.
-spec prop_minus_first(List::[T], Excluded::[T]) -> true
when is_subtype(T, any()).
prop_minus_first(List, Excluded) ->
?equals(util:minus_first(List, Excluded), lists:foldl(fun lists:delete/2, List, Excluded)).
%% @doc Checks that the order of items stays the same using util:minus_first/2.
-spec prop_minus_first_sort(List::[T], Excluded::[T]) -> true
when is_subtype(T, any()).
prop_minus_first_sort(List, Excluded) ->
Result = util:minus_first(List, Excluded),
prop_minus_first_sort_helper(Result, List, Excluded).
-spec prop_minus_first_sort_helper(Result::[T], List::[T], Excluded::[T]) -> true
when is_subtype(T, any()).
prop_minus_first_sort_helper([], [], _) ->
true;
prop_minus_first_sort_helper([_|_], [], _) ->
false;
prop_minus_first_sort_helper([], [_|_], _) ->
true;
prop_minus_first_sort_helper([RH | RT] = R, [LH | LT], Excluded) ->
case lists:member(LH, Excluded) of
true -> prop_minus_first_sort_helper(R, LT, lists:delete(LH, Excluded));
false when LH =:= RH -> prop_minus_first_sort_helper(RT, LT, Excluded);
false -> false
end.
tester_minus_first(_Config) ->
tester:test(?MODULE, prop_minus_first, 2, 5000, [{threads, 2}]).
tester_minus_first_sort(_Config) ->
tester:test(?MODULE, prop_minus_first_sort, 2, 5000, [{threads, 2}]).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% util:par_map/2
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec try_fun(Module::module(), Fun::atom(), Args::[term()]) -> {ok | throw | error | exit, term()}.
try_fun(Module, Fun, Args) ->
try {ok, apply(Module, Fun, Args)}
catch Level:Reason -> {Level, Reason}
end.
-spec compare__par_map2__lists_map(Fun::fun((A) -> term()), [A]) -> true | no_return().
compare__par_map2__lists_map(Fun, List) ->
ParMapRes = try_fun(util, par_map, [Fun, List]),
ListsMapRes = try_fun(lists, map, [Fun, List]),
?equals(ParMapRes, ListsMapRes).
-spec prop_par_map2([integer()]) -> ok.
prop_par_map2(List) ->
compare__par_map2__lists_map(fun(X) -> X * X end, List),
compare__par_map2__lists_map(fun(_) -> erlang:throw(failed) end, List),
compare__par_map2__lists_map(fun(_) -> erlang:error(failed) end, List),
compare__par_map2__lists_map(fun(_) -> erlang:exit(failed) end, List),
ok.
tester_par_map2(_Config) ->
tester:test(?MODULE, prop_par_map2, 1, 5000, [{threads, 2}]).
-spec compare__par_map3__lists_map(Fun::fun((A) -> term()), [A], MaxThreads::pos_integer()) -> true | no_return().
compare__par_map3__lists_map(Fun, List, MaxThreads) ->
ParMapRes = try_fun(util, par_map, [Fun, List, MaxThreads]),
ListsMapRes = try_fun(lists, map, [Fun, List]),
?equals(ParMapRes, ListsMapRes).
-spec prop_par_map3([integer()], 1..1000) -> ok.
prop_par_map3(List, MaxThreads) ->
compare__par_map3__lists_map(fun(X) -> X * X end, List, MaxThreads),
compare__par_map3__lists_map(fun(_) -> erlang:throw(failed) end, List, MaxThreads),
compare__par_map3__lists_map(fun(_) -> erlang:error(failed) end, List, MaxThreads),
compare__par_map3__lists_map(fun(_) -> erlang:exit(failed) end, List, MaxThreads),
ok.
tester_par_map3(_Config) ->
tester:test(?MODULE, prop_par_map3, 2, 5000, [{threads, 2}]).
lists_remove_at_indices(_Config) ->
?equals(util:lists_remove_at_indices([0,1,2,3,4,5], [0,2,4]), [1,3,5]),
% both lists should be non-empty and all indices in the second list should point to
% existing elements in the first list
?expect_exception(util:lists_remove_at_indices([1], []), error, function_clause),
?expect_exception(util:lists_remove_at_indices([], [0]), error, function_clause),
?expect_exception(util:lists_remove_at_indices([0,1,2,3], [5]), error, function_clause),
ok.
sublist(_Config) ->
L = [a,b,c,d,e,f,g],
LLen = length(L),
?equals(util:sublist(L, 1, 0) , {[], LLen}),
?equals(util:sublist(L, 1, 1) , {[a], LLen}),
?equals(util:sublist(L, 1, 3) , {[a,b,c], LLen}),
?equals(util:sublist(L, 1, 7) , {[a,b,c,d,e,f,g], LLen}),
?equals(util:sublist(L, 1, 8) , {[a,b,c,d,e,f,g], LLen}),
?equals(util:sublist(L, 1, 10) , {[a,b,c,d,e,f,g], LLen}),
?equals(util:sublist(L, 2, 10) , {[b,c,d,e,f,g], LLen}),
?equals(util:sublist(L, 3, 10) , {[c,d,e,f,g], LLen}),
?equals(util:sublist(L, 7, 10) , {[g], LLen}),
?equals(util:sublist(L, 8, 10) , {[], LLen}),
?equals(util:sublist(L, 10, 10) , {[], LLen}),
?equals(util:sublist(L, 1, -1) , {[a], LLen}),
?equals(util:sublist(L, 1, -2) , {[a], LLen}),
?equals(util:sublist(L, 1, -3) , {[a], LLen}),
?equals(util:sublist(L, 2, -1) , {[b], LLen}),
?equals(util:sublist(L, 3, -1) , {[c], LLen}),
?equals(util:sublist(L, 4, -1) , {[d], LLen}),
?equals(util:sublist(L, 5, -1) , {[e], LLen}),
?equals(util:sublist(L, 6, -1) , {[f], LLen}),
?equals(util:sublist(L, 7, -1) , {[g], LLen}),
?equals(util:sublist(L, 8, -1) , {[g], LLen}),
?equals(util:sublist(L, 3, -5) , {[c,b,a], LLen}),
?equals(util:sublist(L, -1, 0) , {[], LLen}),
?equals(util:sublist(L, -1, 1) , {[g], LLen}),
?equals(util:sublist(L, -1, 3) , {[g], LLen}),
?equals(util:sublist(L, -1, 10) , {[g], LLen}),
?equals(util:sublist(L, -2, 10) , {[f,g], LLen}),
?equals(util:sublist(L, -3, 10) , {[e,f,g], LLen}),
?equals(util:sublist(L, -7, 10) , {[a,b,c,d,e,f,g], LLen}),
?equals(util:sublist(L, -8, 10) , {[a,b,c,d,e,f,g], LLen}),
?equals(util:sublist(L, -10, 10) , {[a,b,c,d,e,f,g], LLen}),
?equals(util:sublist(L, -1, -1) , {[g], LLen}),
?equals(util:sublist(L, -1, -3) , {[g,f,e], LLen}),
?equals(util:sublist(L, -1, -7) , {[g,f,e,d,c,b,a], LLen}),
?equals(util:sublist(L, -1, -8) , {[g,f,e,d,c,b,a], LLen}),
?equals(util:sublist(L, -1, -10) , {[g,f,e,d,c,b,a], LLen}),
?equals(util:sublist(L, -2, -10) , {[f,e,d,c,b,a], LLen}),
?equals(util:sublist(L, -3, -10) , {[e,d,c,b,a], LLen}),
?equals(util:sublist(L, -7, -10) , {[a], LLen}),
?equals(util:sublist(L, -8, -10) , {[], LLen}),
?equals(util:sublist(L, -10, -10), {[], LLen}),
ok.
-spec prop_sublist3([any()], X::1..1000) -> true.
prop_sublist3(L, X) ->
% last X elements (in different order)
?equals(lists:reverse(element(1, util:sublist(L, -1, -X))),
element(1, util:sublist(L, -X, X))),
% first X elements (in different order)
?equals(lists:reverse(element(1, util:sublist(L, 1, X))),
element(1, util:sublist(L, X, -X))),
true.
tester_sublist3(_Config) ->
tester:test(?MODULE, prop_sublist3, 2, 5000, [{threads, 2}]).
rrd_combine_timing_slots_handle_empty_rrd(_Config) ->
DB0 = rrd:create(10, 10, {timing, us}, {0,0,0}),
Dump = rrd:dump(DB0),
?equals(Dump, []),
?equals(util:rrd_combine_timing_slots(DB0, 0, 10), undefined),
ok
.
rrd_combine_timing_slots_simple(_Config) ->
Adds = [{20, 1}, {25, 3}, {30, 30}, {42, 42}],
DB0 = rrd:create(10, 10, {timing, us}, {0,0,0}),
DB1 = lists:foldl(fun rrd_SUITE:apply/2, DB0, Adds),
?equals(rrd:dump(DB1),
[{{0,0,40}, {0,0,50}, {42, 42*42, 1, 42, 42, {histogram,0,[],0,0}}},
{{0,0,30}, {0,0,40}, {30, 30*30, 1, 30, 30, {histogram,0,[],0,0}}},
{{0,0,20}, {0,0,30}, {1 + 3, 1*1 + 3*3, 2, 1, 3, {histogram,0,[],0,0}}}]),
CurrentTS = {0,0,44}, % assume we are currently in the last slot
Expected = {1 + 3 + 30 + 42, % sum
1*1 + 3*3 + 30*30 + 42*42, % squares' sum
2 + 1 + 1, % count
1, % min
42 % max
},
?equals(util:rrd_combine_timing_slots(DB1, CurrentTS, 100), Expected),
?equals(util:rrd_combine_timing_slots(DB1, CurrentTS, 100, 10), Expected),
ok
.
rrd_combine_timing_slots_subset(_Config) ->
% combine the newest two slots due to the interval
Adds = [{20, 1}, {25, 3}, {30, 30}, {42, 42}],
DB0 = rrd:create(10, 10, {timing, us}, {0,0,0}),
DB1 = lists:foldl(fun rrd_SUITE:apply/2, DB0, Adds),
?equals(rrd:dump(DB1),
[{{0,0,40}, {0,0,50}, {42, 42*42, 1, 42, 42, {histogram,0,[],0,0}}},
{{0,0,30}, {0,0,40}, {30, 30*30, 1, 30, 30, {histogram,0,[],0,0}}},
{{0,0,20}, {0,0,30}, {1 + 3, 1*1 + 3*3, 2, 1, 3, {histogram,0,[],0,0}}}]),
CurrentTS = {0,0,44}, % assume we are currently in the last slot
Interval = 10, % overlap at most two slots
ExpectedSmallEpsilon = {42 + 30, 42*42 + 30*30, 1 + 1, 30, 42},
?equals(util:rrd_combine_timing_slots(DB1, CurrentTS, Interval), ExpectedSmallEpsilon),
?equals(util:rrd_combine_timing_slots(DB1, CurrentTS, Interval, 5), ExpectedSmallEpsilon),
% epsilon is big enough to do it only once
ExpectedBigEpsilon = {42, 42*42, 1, 42, 42},
?equals(util:rrd_combine_timing_slots(DB1, CurrentTS, Interval, 10), ExpectedBigEpsilon),
?equals(util:rrd_combine_timing_slots(DB1, CurrentTS, Interval, 100), ExpectedBigEpsilon),
ok
.
rrd_combine_gauge_slots_handle_empty_rrd(_Config) ->
DB0 = rrd:create(10, 10, gauge, {0,0,0}),
Dump = rrd:dump(DB0),
?equals(Dump, []),
?equals(util:rrd_combine_gauge_slots(DB0, 0, 10), undefined),
ok
.
rrd_combine_gauge_slots_simple(_Config) ->
Adds = [{20, 1}, {25, 3}, {30, 30}, {42, 42}],
DB0 = rrd:create(10, 10, gauge, {0,0,0}),
DB1 = lists:foldl(fun rrd_SUITE:apply/2, DB0, Adds),
?equals(rrd:dump(DB1),
[{{0,0,40}, {0,0,50}, 42},
{{0,0,30}, {0,0,40}, 30},
{{0,0,20}, {0,0,30}, 3}]),
CurrentTS = {0,0,44}, % assume we are currently in the last slot
Expected = 75,
?equals(util:rrd_combine_gauge_slots(DB1, CurrentTS, 100), Expected),
?equals(util:rrd_combine_gauge_slots(DB1, CurrentTS, 100, 10), Expected),
ok
.
rrd_combine_gauge_slots_subset(_Config) ->
% combine the newest two slots due to the interval
Adds = [{20, 1}, {25, 3}, {30, 30}, {42, 42}],
DB0 = rrd:create(10, 10, gauge, {0,0,0}),
DB1 = lists:foldl(fun rrd_SUITE:apply/2, DB0, Adds),
?equals(rrd:dump(DB1),
[{{0,0,40}, {0,0,50}, 42},
{{0,0,30}, {0,0,40}, 30},
{{0,0,20}, {0,0,30}, 3}]),
CurrentTS = {0,0,44}, % assume we are currently in the last slot
Interval = 10, % overlap at most two slots
?equals(util:rrd_combine_gauge_slots(DB1, CurrentTS, Interval), 72),
?equals(util:rrd_combine_gauge_slots(DB1, CurrentTS, Interval, 5), 72),
?equals(util:rrd_combine_gauge_slots(DB1, CurrentTS, Interval, 10), 42), % exits immediately
?equals(util:rrd_combine_gauge_slots(DB1, CurrentTS, Interval, 100), 42),
?equals(util:rrd_combine_gauge_slots(DB1, CurrentTS, 20, 1), 75),
ok
. | test/util_SUITE.erl | 0.631367 | 0.427038 | util_SUITE.erl | starcoder |
%%
%% %CopyrightBegin%
%%
%% Copyright Ericsson AB 1998-2021. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
%% Purpose : User interface to the Erlang debugger/interpreter.
-module(i).
-export([help/0,ia/1,ia/2,ia/3,ia/4,iaa/1,iaa/2,
ib/2,ib/3,ib/4,ibd/2,ibe/2,iba/3,ibc/3,ic/0,ii/1,ii/2,
il/0,im/0,ini/1,ini/2,inq/1,ip/0,ipb/0,ipb/1,iq/1,
ir/0,ir/1,ir/2,ir/3,iv/0,ist/1]).
-import(io, [format/1,format/2]).
-import(lists, [sort/1,foreach/2]).
iv() ->
Vsn = string:slice(filename:basename(code:lib_dir(debugger)), 9),
list_to_atom(Vsn).
%% -------------------------------------------
%% Start a new graphical monitor.
%% A monitor displays status for all processes
%% running interpreted modules.
%% -------------------------------------------
im() ->
case debugger:start() of
{ok, Pid} ->
Pid;
{error, {already_started, Pid}} ->
Pid
end.
%% -------------------------------------------
%% Add Module(s) as being interpreted.
%% The actual paths will be searched for the
%% corresponding source file(s) (Module.erl).
%% Module(s) can be given with absolute path.
%% -------------------------------------------
ii(Module) ->
int:i(Module).
ii(Module,_Options) ->
int:i(Module).
%% -------------------------------------------
%% Don't interpret module(s). The module will be
%% removed from the set of modules interpreted.
%% -------------------------------------------
iq(Module) ->
int:n(Module).
%% -------------------------------------------
%% The corresponding functions for distributed
%% erlang. The loading ... will be performed
%% at all nodes using the broadcast facility.
%% -------------------------------------------
ini(Module) ->
int:ni(Module).
ini(Module,_Options) ->
int:ni(Module).
inq(Module) ->
int:nn(Module).
%% -------------------------------------------
%% Add a new break point at Line in Module.
%% -------------------------------------------
ib(Module,Line) ->
int:break(Module,Line).
%% -------------------------------------------
%% Break at entrance of specified function.
%% Breaks is set at the first expression for
%% all function clauses.
%% -------------------------------------------
ib(Module,Function,Arity) ->
int:break_in(Module,Function,Arity).
%% -------------------------------------------
%% Break at entrance of specified function.
%% Breaks is set at the first expression for
%% all function clauses.
%% Associate the condition to the break.
%% -------------------------------------------
ib(Module,Function,Arity,Cond) ->
Breaks1 = int:all_breaks(Module),
ok = int:break_in(Module,Function,Arity),
Breaks2 = int:all_breaks(Module),
lists:foreach(fun({Mod,Line}) -> int:test_at_break(Mod,Line,Cond) end,
Breaks2--Breaks1).
%% -------------------------------------------
%% Make an existing break point inactive.
%% -------------------------------------------
ibd(Mod,Line) ->
int:disable_break(Mod,Line).
%% -------------------------------------------
%% Make an existing break point active.
%% -------------------------------------------
ibe(Mod,Line) ->
int:enable_break(Mod,Line).
%% -------------------------------------------
%% Set which status a break point shall have
%% after it has been triggered the next time.
%% Action is: enable, disable or delete.
%% -------------------------------------------
iba(Mod,Line,Action) ->
int:action_at_break(Mod,Line,Action).
%% -------------------------------------------
%% Add a conditional function to a break point.
%% The given function shall have arity 1 and
%% return either true or false.
%% The argument of the given function is the
%% current variable bindings of the process at
%% the place of the break point, the bindings
%% can be inspected using int:get_binding/2.
%% Fnk == {Module,Function}
%% Fnk == {Module,Function,ExtraArgs}
%% -------------------------------------------
ibc(Mod,Line,Fnk) ->
int:test_at_break(Mod,Line,Fnk).
%% -------------------------------------------
%% Delete break point.
%% -------------------------------------------
ir(Module,Line) ->
int:delete_break(Module,Line).
%% -------------------------------------------
%% Delete break at entrance of specified function.
%% -------------------------------------------
ir(Module,Function,Arity) ->
int:del_break_in(Module,Function,Arity).
%% -------------------------------------------
%% Delete all break points in module.
%% -------------------------------------------
ir(Module) ->
int:no_break(Module).
%% -------------------------------------------
%% Delete all break points (for all modules).
%% -------------------------------------------
ir() ->
int:no_break().
%% -------------------------------------------
%% Print all interpreted modules.
%% -------------------------------------------
il() ->
Mods = sort(int:interpreted()),
ilformat("Module","File"),
foreach(fun(Mod) -> ilformat(atom_to_list(Mod), get_file(Mod)) end, Mods).
get_file(Mod) ->
case int:file(Mod) of
{error,not_loaded} -> % Marked interpreted but not loaded
"not loaded";
File ->
File
end.
ilformat(A1, A2) ->
format("~-20s ~ts\n", [A1,A2]).
%% -------------------------------------------
%% Print all break points in modules.
%% -------------------------------------------
ipb() ->
Bps = lists:keysort(1,int:all_breaks()),
bhformat("Module","Line","Status","Action","Condition"),
pb_print(Bps).
ipb(Module) when is_atom(Module) ->
ipb1(Module);
ipb(Module) when is_list(Module) ->
ipb1(list_to_atom(Module)).
ipb1(Module) ->
Bps = lists:keysort(1,int:all_breaks(Module)),
bhformat("Module","Line","Status","Action","Condition"),
pb_print(Bps).
pb_print([{{Mod,Line},[Status,Action,_,null|_]}|Bps]) ->
bformat(Mod,Line,Status,Action,""),
pb_print(Bps);
pb_print([{{Mod,Line},[Status,Action,_,Cond|_]}|Bps]) ->
bformat(Mod,Line,Status,Action,
io_lib:format("~w",[Cond])),
pb_print(Bps);
pb_print(_) ->
ok.
bhformat(A1, A2, A3, A4, A5) ->
format("~-15s ~-9s ~-12s ~-12s ~-21s~n", [A1,A2,A3,A4,A5]).
bformat(A1, A2, A3, A4, A5) ->
format("~-15w ~-9w ~-12w ~-12w ~-21s~n", [A1,A2,A3,A4,A5]).
%% -------------------------------------------
%% Set the stack trace flag.
%% Flag can be all (true), no_tail or false.
%% -------------------------------------------
ist(Flag) ->
int:stack_trace(Flag),
true.
%% -------------------------------------------
%% Set the automatic attachment flag.
%% Flags can be init, break and exit.
%% iaa(Flag) or ia([Flag,Flag,...])
%% -------------------------------------------
iaa(Flag) ->
iaa(Flag,{dbg_wx_trace,start,[]}).
%% -------------------------------------------
%% Set the automatic attachment flag.
%% Flags can be init, break and exit.
%% Use given function to start up an attachment
%% window.
%% ia(Flag,Fnk) or ia([Flag,Flag,...],Fnk)
%% where Fnk == {M,F}
%% The given Fnk must have arity 3 or 4.
%% -------------------------------------------
iaa(Flag,Fnk) ->
int:auto_attach(Flag,Fnk),
true.
%% -------------------------------------------
%% Attach to process.
%% -------------------------------------------
ia(Pid) ->
ia(Pid,{dbg_wx_trace,start}).
%% -------------------------------------------
%% Attach to process.
%% X,Y,Z is combined to a process identity.
%% -------------------------------------------
ia(X,Y,Z) ->
ia(c:pid(X,Y,Z)).
%% -------------------------------------------
%% Attach to process.
%% Use Fnk == {M,F} as the attaching interface.
%% -------------------------------------------
ia(Pid,Fnk) ->
case lists:keymember(Pid, 1, int:snapshot()) of
false -> no_proc;
true -> int:attach(Pid,Fnk)
end.
ia(X,Y,Z,Fnk) ->
ia(c:pid(X,Y,Z),Fnk).
%% -------------------------------------------
%% Print status for all interpreted processes.
%% -------------------------------------------
ip() ->
Stats = int:snapshot(),
hformat("Pid","Initial Call","Status","Info"),
ip(Stats).
ip([{Pid,{M,F,A},Status,{}}|Stats]) ->
hformat(io_lib:format("~w",[Pid]),
io_lib:format("~w:~tw/~w",[M,F,length(A)]),
io_lib:format("~w",[Status]),
""),
ip(Stats);
ip([{Pid,{M,F,A},Status,Info}|Stats]) ->
hformat(io_lib:format("~w",[Pid]),
io_lib:format("~w:~tw/~w",[M,F,length(A)]),
io_lib:format("~w",[Status]),
io_lib:format("~w",[Info])),
ip(Stats);
ip([]) ->
ok.
hformat(A1, A2, A3, A4) ->
format("~-12s ~-21ts ~-9s ~-21s~n", [A1,A2,A3,A4]).
%% -------------------------------------------
%% Delete all terminated processes from the
%% interpreter.
%% -------------------------------------------
ic() ->
int:clear().
%% -------------------------------------------
%% Help printout
%% -------------------------------------------
help() ->
format("iv() -- print the current version of the interpreter~n"),
format("im() -- pop up a monitor window~n"),
format("ii(Mod) -- interpret Mod(s) (or AbsMod(s))~n"),
format("ii(Mod,Op) -- interpret Mod(s) (or AbsMod(s))~n"),
format(" use Op as options (same as for compile)~n"),
format("iq(Mod) -- do not interpret Mod(s)~n"),
format("ini(Mod) -- ii/1 at all Erlang nodes~n"),
format("ini(Mod,Op) -- ii/2 at all Erlang nodes~n"),
format("inq(Mod) -- iq at all Erlang nodes~n"),
format("ib(Mod,Line) -- set a break point at Line in Mod~n"),
format("ib(M,F,Arity)-- set a break point in M:F/Arity~n"),
format("ibd(Mod,Line)-- disable the break point at Line in Mod~n"),
format("ibe(Mod,Line)-- enable the break point at Line in Mod~n"),
format("iba(M,L,Action)-- set a new action at break~n"),
format("ibc(M,L,Action)-- set a new condition for break~n"),
format("ir(Mod,Line) -- remove the break point at Line in Mod~n"),
format("ir(M,F,Arity)-- remove the break point in M:F/Arity~n"),
format("ir(Mod) -- remove all break points in Mod~n"),
format("ir() -- remove all existing break points~n"),
format("il() -- list all interpreted modules~n"),
format("ip() -- print status of all interpreted processes~n"),
format("ic() -- remove all terminated interpreted processes~n"),
format("ipb() -- list all break points~n"),
format("ipb(Mod) -- list all break points in Mod~n"),
format("ia(Pid) -- attach to Pid~n"),
format("ia(X,Y,Z) -- attach to pid(X,Y,Z)~n"),
format("ia(Pid,Fun) -- use own Fun = {M,F} as attach application~n"),
format("ia(X,Y,Z,Fun)-- use own Fun = {M,F} as attach application~n"),
format("iaa([Flag]) -- set automatic attach to process~n"),
format(" Flag is init,break and exit~n"),
format("iaa([Fl],Fun)-- use own Fun = {M,F} as attach application~n"),
format("ist(Flag) -- set stack trace flag~n"),
format(" Flag is all (true),no_tail or false~n"),
ok. | lib/debugger/src/i.erl | 0.529993 | 0.411761 | i.erl | starcoder |
%% @doc
%% A Histogram tracks the size and number of events in buckets.
%% You can use Histograms for aggregatable calculation of quantiles.
%%
%% Example use cases for Histograms:
%% <ul>
%% <li>Response latency</li>
%% <li>Request size</li>
%% </ul>
%%
%% Histogram expects `buckets` key in a metric spec. Buckets can be:
%% - a list of numbers in increasing order;
%% one of the generate specs (shortcuts for `prometheus_buckets' functions)
%% - :default;
%% - {:linear, start, step, count};
%% - {:exponential, start, step, count}
%%
%% Example:
%% <pre lang="erlang">
%% -module(example_instrumenter).
%% setup() ->
%% prometheus_histogram:declare([{name, http_request_duration_milliseconds},
%% {labels, [method]},
%% {buckets, [100, 300, 500, 750, 1000]},
%% {help, "Http Request execution time."}]).
%%
%% instrument(Time, Method) ->
%% %% Time must be in native units, otherwise duration_unit must be false
%% prometheus_histogram:observe(http_request_duration_milliseconds,
%% [Method], Time).
%%
%% </pre>
%% @end
-module(prometheus_histogram).
%%% metric
-export([new/1,
new/2,
declare/1,
declare/2,
observe/2,
observe/3,
observe/4,
dobserve/2,
dobserve/3,
dobserve/4,
observe_duration/2,
observe_duration/3,
observe_duration/4,
remove/1,
remove/2,
remove/3,
reset/1,
reset/2,
reset/3,
value/1,
value/2,
value/3,
buckets/1,
buckets/2,
buckets/3]
).
%%% collector
-export([deregister_cleanup/1,
collect_mf/2,
collect_metrics/2]).
%%% gen_server
-export([init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3,
start_link/0]).
-ifdef(TEST).
-export([default_buckets/0,
linear_buckets/3,
exponential_buckets/3]).
-endif.
-import(prometheus_model_helpers, [create_mf/5,
gauge_metrics/1,
gauge_metric/1,
gauge_metric/2,
counter_metric/1,
counter_metric/2,
histogram_metric/3,
histogram_metric/4]).
-include("prometheus.hrl").
-behaviour(prometheus_metric).
-behaviour(prometheus_collector).
%%====================================================================
%% Macros
%%====================================================================
-define(TABLE, ?PROMETHEUS_HISTOGRAM_TABLE).
-define(BUCKETS_POS, 2).
-define(BUCKETS_START, 3).
-define(WIDTH, 16).
%%====================================================================
%% Metric API
%%====================================================================
%% @doc Creates a histogram using `Spec'.
%%
%% Raises `{missing_metric_spec_key, Key, Spec}' error if required `Soec' key
%% is missing.<br/>
%% Raises `{invalid_metric_name, Name, Message}' error if metric `Name'
%% is invalid.<br/>
%% Raises `{invalid_metric_help, Help, Message}' error if metric `Help'
%% is invalid.<br/>
%% Raises `{invalid_metric_labels, Labels, Message}' error if `Labels'
%% isn't a list.<br/>
%% Raises `{invalid_label_name, Name, Message}' error if `Name' isn't a valid
%% label name.<br/>
%% Raises `{invalid_value_error, Value, Message}' error if `duration_unit' is
%% unknown or doesn't match metric name.<br/>
%% Raises `{mf_already_exists, {Registry, Name}, Message}' error if a histogram
%% with the same `Spec' already exists.
%%
%% Histogram-specific errors:<br/>
%% Raises `{histogram_no_buckets, Buckets}' error if `Buckets' are missing,
%% not a list, empty list or not known buckets spec.<br/>
%% Raises `{histogram_invalid_buckets, Buckets, Message}' error if `Buckets'
%% aren't in increasing order.<br/>
%% Raises `{histogram_invalid_bound, Bound}' error if `Bound' isn't a number.
%% @end
new(Spec) ->
Spec1 = validate_histogram_spec(Spec),
prometheus_metric:insert_new_mf(?TABLE, ?MODULE, Spec1).
%% @deprecated Please use {@link new/1} with registry
%% key instead.
new(Spec, Registry) ->
?DEPRECATED("prometheus_histogram:new/2", "prometheus_histogram:new/1"
" with registry key"),
new([{registry, Registry} | Spec]).
%% @doc Creates a histogram using `Spec'.
%% If a histogram with the same `Spec' exists returns `false'.
%%
%% Raises `{missing_metric_spec_key, Key, Spec}' error if required `Soec' key
%% is missing.<br/>
%% Raises `{invalid_metric_name, Name, Message}' error if metric `Name'
%% is invalid.<br/>
%% Raises `{invalid_metric_help, Help, Message}' error if metric `Help'
%% is invalid.<br/>
%% Raises `{invalid_metric_labels, Labels, Message}' error if `Labels'
%% isn't a list.<br/>
%% Raises `{invalid_label_name, Name, Message}' error if `Name' isn't a valid
%% label name.<br/>
%% Raises `{invalid_value_error, Value, MessagE}' error if `duration_unit' is
%% unknown or doesn't match metric name.<br/>
%%
%% Histogram-specific errors:<br/>
%% Raises `{histogram_no_buckets, Buckets}' error if `Buckets' are missing,
%% not a list, empty list or not known buckets spec.<br/>
%% Raises `{histogram_invalid_buckets, Buckets, Message}' error if `Buckets'
%% aren't in increasing order.<br/>
%% Raises `{histogram_invalid_bound, Bound}' error if `Bound' isn't a number.
%% @end
declare(Spec) ->
Spec1 = validate_histogram_spec(Spec),
prometheus_metric:insert_mf(?TABLE, ?MODULE, Spec1).
%% @deprecated Please use {@link declare/1} with registry
%% key instead.
declare(Spec, Registry) ->
?DEPRECATED("prometheus_histogram:declare/2", "prometheus_histogram:declare/1"
" with registry key"),
declare([{registry, Registry} | Spec]).
%% @equiv observe(default, Name, [], Value)
observe(Name, Value) ->
observe(default, Name, [], Value).
%% @equiv observe(default, Name, LabelValues, Value)
observe(Name, LabelValues, Value) ->
observe(default, Name, LabelValues, Value).
%% @doc Observes the given `Value'.
%%
%% Raises `{invalid_value, Value, Message}' if `Value'
%% isn't an integer.<br/>
%% Raises `{unknown_metric, Registry, Name}' error if histogram with named
%% `Name' can't be found in `Registry'.<br/>
%% Raises `{invalid_metric_arity, Present, Expected}' error if labels count
%% mismatch.
%% @end
observe(Registry, Name, LabelValues, Value) when is_integer(Value) ->
case ets:lookup(?TABLE, key(Registry, Name, LabelValues)) of
[Metric] ->
{BucketPosition, SumPosition} =
calculate_histogram_update_positions(Metric, Value),
ets:update_counter(?TABLE, key(Registry, Name, LabelValues),
[{BucketPosition, 1}, {SumPosition, Value}]);
[] ->
insert_metric(Registry, Name, LabelValues, Value, fun observe/4)
end,
ok;
observe(_Registry, _Name, _LabelValues, Value) ->
erlang:error({invalid_value, Value, "observe accepts only integers"}).
%% @equiv dobserve(default, Name, [], Value)
dobserve(Name, Value) ->
dobserve(default, Name, [], Value).
%% @equiv dobserve(default, Name, LabelValues, [], Value)
dobserve(Name, LabelValues, Value) ->
dobserve(default, Name, LabelValues, Value).
%% @doc Observes the given `Value'.
%% If `Value' happened to be a float number even one time(!) you
%% shouldn't use {@link observe/4} after dobserve.
%%
%% Raises `{invalid_value, Value, Message}' if `Value'
%% isn't a number.<br/>
%% Raises `{unknown_metric, Registry, Name}' error if histogram with named
%% `Name' can't be found in `Registry'.<br/>
%% Raises `{invalid_metric_arity, Present, Expected}' error if labels count
%% mismatch.
%% @end
dobserve(Registry, Name, LabelValues, Value) when is_number(Value) ->
MF = prometheus_metric:check_mf_exists(?TABLE, Registry, Name, LabelValues),
CallTimeout = prometheus_metric:mf_call_timeout(MF),
case prometheus_metric:mf_call_timeout(MF) of
false ->
gen_server:cast(?MODULE,
{observe, {Registry, Name, LabelValues, Value}});
_ ->
gen_server:call(?MODULE,
{observe, {Registry, Name, LabelValues, Value}},
CallTimeout)
end,
ok;
dobserve(_Registry, _Name, _LabelValues, Value) ->
erlang:error({invalid_value, Value, "dobserve accepts only numbers"}).
%% @equiv observe_duration(default, Name, [], Fun)
observe_duration(Name, Fun) ->
observe_duration(default, Name, [], Fun).
%% @equiv observe_duration(default, Name, LabelValues, Fun)
observe_duration(Name, LabelValues, Fun) ->
observe_duration(default, Name, LabelValues, Fun).
%% @doc Tracks the amount of time spent executing `Fun'.
%%
%% Raises `{unknown_metric, Registry, Name}' error if histogram with named
%% `Name' can't be found in `Registry'.<br/>
%% Raises `{invalid_metric_arity, Present, Expected}' error if labels count
%% mismatch.
%% Raises `{invalid_value, Value, Message}' if `Fun'
%% isn't a function.<br/>
%% @end
observe_duration(Registry, Name, LabelValues, Fun) when is_function(Fun) ->
Start = erlang:monotonic_time(),
try
Fun()
after
observe(Registry, Name, LabelValues, erlang:monotonic_time() - Start)
end;
observe_duration(_Regsitry, _Name, _LabelValues, Fun) ->
erlang:error({invalid_value, Fun, "observe_duration accepts only functions"}).
%% @equiv remove(default, Name, [])
remove(Name) ->
remove(default, Name, []).
%% @equiv remove(default, Name, LabelValues)
remove(Name, LabelValues) ->
remove(default, Name, LabelValues).
%% @doc Removes histogram series identified by `Registry', `Name'
%% and `LabelValues'.
%%
%% Raises `{unknown_metric, Registry, Name}' error if histogram with named
%% `Name' can't be found in `Registry'.<br/>
%% Raises `{invalid_metric_arity, Present, Expected}' error if labels count
%% mismatch.
%% @end
remove(Registry, Name, LabelValues) ->
prometheus_metric:check_mf_exists(?TABLE, Registry, Name, LabelValues),
case lists:flatten([ets:take(?TABLE,
{Registry, Name, LabelValues, Scheduler})
|| Scheduler <- schedulers_seq()]) of
[] -> false;
_ -> true
end.
%% @equiv reset(default, Name, [])
reset(Name) ->
reset(default, Name, []).
%% @equiv reset(default, Name, LabelValues)
reset(Name, LabelValues) ->
reset(default, Name, LabelValues).
%% @doc Resets the value of the histogram identified by `Registry', `Name'
%% and `LabelValues'.
%%
%% Raises `{unknown_metric, Registry, Name}' error if histogram with named
%% `Name' can't be found in `Registry'.<br/>
%% Raises `{invalid_metric_arity, Present, Expected}' error if labels count
%% mismatch.
%% @end
reset(Registry, Name, LabelValues) ->
MF = prometheus_metric:check_mf_exists(?TABLE, Registry, Name, LabelValues),
Buckets = prometheus_metric:mf_data(MF),
UpdateSpec = generate_update_spec(?BUCKETS_START, length(Buckets)),
case lists:usort([ets:update_element(?TABLE,
{Registry, Name, LabelValues, Scheduler},
UpdateSpec)
|| Scheduler <- schedulers_seq()]) of
[_, _] -> true;
[true] -> true;
_ -> false
end.
%% @equiv value(default, Name, [])
value(Name) ->
value(default, Name, []).
%% @equiv value(default, Name, LabelValues)
value(Name, LabelValues) ->
value(default, Name, LabelValues).
%% @doc Returns the value of the histogram identified by `Registry', `Name'
%% and `LabelValues'. If there is no histogram for `LabelValues',
%% returns `undefined'.
%%
%% If duration unit set, sum will be converted to the duration unit.
%% {@link prometheus_time. Read more here.}
%%
%% Raises `{unknown_metric, Registry, Name}' error if histogram named `Name'
%% can't be found in `Registry'.<br/>
%% Raises `{invalid_metric_arity, Present, Expected}' error if labels count
%% mismatch.
%% @end
value(Registry, Name, LabelValues) ->
MF = prometheus_metric:check_mf_exists(?TABLE, Registry, Name, LabelValues),
RawValues = [ets:lookup(?TABLE, {Registry, Name, LabelValues, Scheduler})
|| Scheduler <- schedulers_seq()],
case lists:flatten(RawValues) of
[] -> undefined;
Values -> {reduce_buckets_counters(Values), reduce_sum(MF, Values)}
end.
%% @equiv buckets(default, Name, [])
buckets(Name) ->
buckets(default, Name, []).
%% @equiv buckets(default, Name, LabelValues)
buckets(Name, LabelValues) ->
buckets(default, Name, LabelValues).
%% @doc Returns buckets of the histogram identified by `Registry', `Name'
%% and `LabelValues'.
%% @end
buckets(Registry, Name, LabelValues) ->
MF = prometheus_metric:check_mf_exists(?TABLE, Registry, Name, LabelValues),
prometheus_metric:mf_data(MF).
%%====================================================================
%% Collector API
%%====================================================================
%% @private
deregister_cleanup(Registry) ->
[delete_metrics(Registry, Buckets)
|| [_, _, _, _, Buckets] <- prometheus_metric:metrics(?TABLE, Registry)],
true = prometheus_metric:deregister_mf(?TABLE, Registry),
ok.
%% @private
collect_mf(Registry, Callback) ->
[Callback(create_histogram(Name, Help, {Labels, Registry, DU, Buckets})) ||
[Name, {Labels, Help}, _, DU, Buckets]
<- prometheus_metric:metrics(?TABLE, Registry)],
ok.
%% @private
collect_metrics(Name, {Labels, Registry, DU, MFBuckets}) ->
BoundPlaceholders = gen_query_bound_placeholders(MFBuckets),
SumPlaceholder = gen_query_placeholder(sum_position(MFBuckets)),
QuerySpec =
[{Registry, Name, '$1', '_'}, '_']
++ BoundPlaceholders
++ [SumPlaceholder],
Fun = fun (Bucket) ->
prometheus_time:maybe_convert_to_native(DU, Bucket)
end,
Buckets = lists:map(Fun, MFBuckets),
MFValues = ets:match(?TABLE, list_to_tuple(QuerySpec)),
[begin
Stat = reduce_label_values(LabelValues, MFValues),
create_histogram_metric(Labels, DU, Buckets, LabelValues, Stat)
end ||
LabelValues <- collect_unique_labels(MFValues)].
%%====================================================================
%% Gen_server API
%%====================================================================
%% @private
start_link() ->
gen_server:start_link({local, prometheus_histogram},
prometheus_histogram, [], []).
%% @private
init(_Args) ->
{ok, []}.
%% @private
handle_call({observe, {Registry, Name, LabelValues, Value}}, _From, State) ->
dobserve_impl(Registry, Name, LabelValues, Value),
{reply, ok, State}.
%% @private
handle_cast({observe, {Registry, Name, LabelValues, Value}}, State) ->
dobserve_impl(Registry, Name, LabelValues, Value),
{noreply, State}.
%% @private
handle_info(_Info, State) ->
{noreply, State}.
%% @private
terminate(_Reason, _State) ->
ok.
%% @private
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%====================================================================
%% Private Parts
%%====================================================================
validate_histogram_spec(Spec) ->
Labels = prometheus_metric_spec:labels(Spec),
validate_histogram_labels(Labels),
RBuckets = prometheus_metric_spec:get_value(buckets, Spec, default_buckets()),
Buckets = validate_buckets(RBuckets),
[{data, Buckets}|Spec].
validate_histogram_labels(Labels) ->
[raise_error_if_le_label_found(Label) || Label <- Labels].
raise_error_if_le_label_found("le") ->
erlang:error({invalid_metric_label_name, "le",
"histogram cannot have a label named \"le\""});
raise_error_if_le_label_found(Label) ->
Label.
default_buckets () ->
prometheus_buckets:default().
linear_buckets(Start, Step, Count) ->
prometheus_buckets:linear(Start, Step, Count).
exponential_buckets(Start, Factor, Count) ->
prometheus_buckets:exponential(Start, Factor, Count).
validate_buckets([]) ->
erlang:error({histogram_no_buckets, []});
validate_buckets(undefined) ->
erlang:error({histogram_no_buckets, undefined});
validate_buckets(default) ->
default_buckets() ++ [infinity];
validate_buckets({linear, Start, Step, Count}) ->
linear_buckets(Start, Step, Count) ++ [infinity];
validate_buckets({exponential, Start, Factor, Count}) ->
exponential_buckets(Start, Factor, Count) ++ [infinity];
validate_buckets(RawBuckets) when is_list(RawBuckets) ->
Buckets = lists:map(fun validate_histogram_bound/1, RawBuckets),
case lists:sort(Buckets) of
Buckets ->
Buckets ++ [infinity];
_ ->
erlang:error({histogram_invalid_buckets, Buckets, "buckets not sorted"})
end;
validate_buckets(Buckets) ->
erlang:error({histogram_invalid_buckets, Buckets, "not a list"}).
validate_histogram_bound(Bound) when is_number(Bound) ->
Bound;
validate_histogram_bound(Bound) ->
erlang:error({histogram_invalid_bound, Bound}).
dobserve_impl(Registry, Name, LabelValues, Value) ->
case ets:lookup(?TABLE, key(Registry, Name, LabelValues)) of
[Metric] ->
{BucketPosition, SumPosition} =
calculate_histogram_update_positions(Metric, Value),
ets:update_element(?TABLE, key(Registry, Name, LabelValues),
{SumPosition, sum(Metric) + Value}),
ets:update_counter(?TABLE, key(Registry, Name, LabelValues),
{BucketPosition, 1});
[] ->
insert_metric(Registry, Name, LabelValues, Value, fun dobserve_impl/4)
end.
insert_metric(Registry, Name, LabelValues, Value, CB) ->
MF = prometheus_metric:check_mf_exists(?TABLE, Registry, Name, LabelValues),
MFBuckets = prometheus_metric:mf_data(MF),
DU = prometheus_metric:mf_duration_unit(MF),
Fun = fun (Bucket) ->
prometheus_time:maybe_convert_to_native(DU, Bucket)
end,
BoundCounters = lists:duplicate(length(MFBuckets), 0),
MetricSpec =
[key(Registry, Name, LabelValues), lists:map(Fun, MFBuckets)]
++ BoundCounters
++ [0],
ets:insert(?TABLE, list_to_tuple(MetricSpec)),
CB(Registry, Name, LabelValues, Value).
calculate_histogram_update_positions(Metric, Value) ->
Buckets = metric_buckets(Metric),
BucketPosition = ?BUCKETS_POS + position(Buckets, fun(Bound) ->
Value =< Bound
end),
SumPosition = sum_position(Metric),
{BucketPosition, SumPosition}.
generate_update_spec(BucketsStart, BucketsCount) ->
[{Index, 0} ||
Index <- lists:seq(BucketsStart, ?BUCKETS_START + BucketsCount)].
gen_query_placeholder(Index) ->
list_to_atom("$" ++ integer_to_list(Index)).
gen_query_bound_placeholders(Buckets) ->
[gen_query_placeholder(Index) ||
Index <- lists:seq(?BUCKETS_START, ?BUCKETS_POS + length(Buckets))].
augment_counters([Start | Counters]) ->
augment_counters(Counters, [Start], Start).
augment_counters([], LAcc, _CAcc) ->
LAcc;
augment_counters([Counter | Counters], LAcc, CAcc) ->
augment_counters(Counters, LAcc ++ [CAcc + Counter], CAcc + Counter).
metric_buckets(Metric) ->
element(?BUCKETS_POS, Metric).
reduce_buckets_counters(Metrics) ->
ABuckets =
[sub_tuple_to_list(Metric, ?BUCKETS_START,
?BUCKETS_START + length(metric_buckets(Metric)))
|| Metric <- Metrics],
[lists:sum(Bucket) || Bucket <- transpose(ABuckets)].
transpose([[]|_]) -> [];
transpose(M) ->
[lists:map(fun hd/1, M) | transpose(lists:map(fun tl/1, M))].
sum_position(Metric) when is_tuple(Metric) ->
?BUCKETS_START + length(metric_buckets(Metric));
sum_position(Buckets) when is_list(Buckets) ->
?BUCKETS_START + length(Buckets).
sum(Metric) ->
element(sum_position(Metric), Metric).
reduce_sum(Metrics) ->
lists:sum([element(sum_position(Metric), Metric) || Metric <- Metrics]).
reduce_sum(MF, Metrics) ->
DU = prometheus_metric:mf_duration_unit(MF),
prometheus_time:maybe_convert_to_du(DU, reduce_sum(Metrics)).
create_histogram_metric(Labels, DU, Buckets, LabelValues, Stat) ->
Fun = fun(Bound) ->
prometheus_time:maybe_convert_to_du(DU, Bound)
end,
BoundValues = lists:sublist(Stat, 1, length(Buckets)),
BCounters = augment_counters(BoundValues),
Buckets1 = lists:zipwith(fun(Bound, BCounter) ->
{Bound, BCounter}
end,
lists:map(Fun, Buckets), BCounters),
histogram_metric(lists:zip(Labels, LabelValues),
Buckets1,
lists:last(BCounters),
prometheus_time:maybe_convert_to_du(DU, lists:last(Stat))).
delete_metrics(Registry, Buckets) ->
BoundCounters = lists:duplicate(length(Buckets), '_'),
MetricSpec = [{Registry, '_', '_', '_'}, '_'] ++ BoundCounters ++ ['_'],
ets:match_delete(?TABLE, list_to_tuple(MetricSpec)).
sub_tuple_to_list(Tuple, Pos, Size) when Pos < Size ->
[element(Pos, Tuple) | sub_tuple_to_list(Tuple, Pos + 1, Size)];
sub_tuple_to_list(_Tuple, _Pos, _Size) -> [].
position(List, Pred) ->
position(List, Pred, 1).
position([], _Pred, _Pos) ->
0;
position([H|L], Pred, Pos) ->
case Pred(H) of
true ->
Pos;
false ->
position(L, Pred, Pos + 1)
end.
schedulers_seq() ->
lists:seq(0, ?WIDTH-1).
key(Registry, Name, LabelValues) ->
X = erlang:system_info(scheduler_id),
Rnd = X band (?WIDTH-1),
{Registry, Name, LabelValues, Rnd}.
collect_unique_labels(MFValues) ->
lists:usort([L || [L | _] <- MFValues]).
reduce_label_values(Labels, MFValues) ->
[lists:sum(C)
|| C <- transpose([V || [L | V] <- MFValues, L == Labels])].
create_histogram(Name, Help, Data) ->
prometheus_model_helpers:create_mf(Name, Help, histogram, ?MODULE, Data). | src/metrics/prometheus_histogram.erl | 0.794863 | 0.590897 | prometheus_histogram.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% riak_kv_bs_keys: Key encoding/decoding for bigsets
%%
%% Copyright (c) 2007-2016 Basho Technologies, Inc. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%%
%% @doc
%%
%% This module contains the functions to generate binaries keys for
%% decomposed "BigSets" that sort correctly in leveldb, and the
%% functions to decode such binary keys into erlang terms.
%%
%% Bigset key format matters as we are currently tied to leveldb. The
%% key scheme/format ensures that all the keys for a set S are
%% logically grouped, contiguously. The keys for set S are further
%% ordered so that the 1st key(s) for a set are the logical clocks
%% key(s) (@see `clock_key/2'), followed by the tombstone keys (@see
%% `tombstone_key/2'), followed by the elements themselves (@see
%% `insert_member_key/4'). Each element key contains the element (an
%% opaque binary), and the dot that supports the element. The element
%% keys are ordered by the natural sort order of the elements
%% themselves, and then the actors, and finally by the event counter
%% of the dot. The final key for a set is the end key (@see
%% `end_key/1') which signals the end of the set.
%%
%% All bigset keys are made up of NULL-byte terminated binary
%% fields. The fields are Set-name, key-type, element, actor,
%% event-counter. Apart from set-name, all fields are optional.
%%
%% All bigset keys also have a common prefix: a "Magic Byte" that
%% sorts before sext encoded riak_object and 2i keys, and an 8-bit
%% integer that is the version for the key format.
%%
%% @end
%% -------------------------------------------------------------------
-module(bigset_keys_nulls).
-behaviour(bigset_keys).
-include("bigset.hrl").
-export([add_comparator_opt/1,
clock_key/2,
decode_key/1,
decode_set/1,
end_key/1,
insert_member_key/4,
is_actor_clock_key/3,
tombstone_key/2
]).
-ifdef(EQC).
-export([
eqc_check/1,
eqc_check/2,
prop_ordered/0,
run/1,
run/2
]).
-include_lib("eqc/include/eqc.hrl").
-endif.
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-define(NULL, $\0).
add_comparator_opt(Opts) ->
Opts.
%% @private common preamble for any set key. Magic byte and version
%% header for `Set'
prefix(Set) ->
<<1, %%Magic byte
1:8/big-unsigned-integer, %% version
Set/binary, %% Set
?NULL
>>.
%% @private the length of each of the fields, as a hint for parsing in
%% decode_key. Stuck at the end of the key so as not to effect
%% sorting.
parse_info(Set, Element, Actor) ->
<<(byte_size(Set)):64/big-unsigned-integer,
(byte_size(Element)):64/big-unsigned-integer,
(byte_size(Actor)):64/big-unsigned-integer >>.
%% @private parse out the length of each field
parse_info(Bin) when is_binary(Bin) ->
<<SetLen:64/big-unsigned-integer,
ElementLen:64/big-unsigned-integer,
ActorLen:64/big-unsigned-integer>> = binary_part(Bin, {byte_size(Bin), -24}),
{SetLen, ElementLen, ActorLen}.
%% @doc a clock key for `Set' and `Actor'
clock_key(Set, Actor) ->
ParseInfo = parse_info(Set, <<>>, Actor),
<<(prefix(Set))/binary,
$c,
?NULL,
?NULL,
Actor/binary,
?NULL,
?NULL,
ParseInfo/binary
>>.
%% @doc a tombstone key for `Set' and `Actor'
tombstone_key(Set, Actor) ->
ParseInfo = parse_info(Set, <<>>, Actor),
<<(prefix(Set))/binary,
$d,
?NULL,
?NULL,
Actor/binary,
?NULL,
?NULL,
ParseInfo/binary
>>.
%% @doc the key for an `Element' of `Set' written with the dot of
%% `Actor' and `Cnt'
insert_member_key(Set, Element, Actor, Cnt) ->
ParseInfo = parse_info(Set, Element, Actor),
<<(prefix(Set))/binary,
$e,
?NULL,
Element/binary,
?NULL,
Actor/binary,
?NULL,
Cnt:64/big-unsigned-integer,
?NULL,
ParseInfo/binary
>>.
%% @doc the end key for `Set'
end_key(Set) ->
ParseInfo = parse_info(Set, <<>>, <<>>),
<<(prefix(Set))/binary,
$z,
?NULL,
?NULL,
?NULL,
?NULL,
ParseInfo/binary
>>.
%% @doc decode_set pulls the set binary from the
%% key. Returns a binary(), the set name.
-spec decode_set(key()) -> list().
decode_set(<<1, 1:8/big-unsigned-integer, Rest/binary>>) ->
{SetLen, _, _} = parse_info(Rest),
<<Set:SetLen/binary, _/binary>> = Rest,
Set.
%% @doc decode key into tagged tuple of it's constituent parts
-spec decode_key(key()) -> tuple().
decode_key(<<1, 1:8/big-unsigned-integer, Rest/binary>>) ->
{SetLen, ElementLen, ActorLen} = parse_info(Rest),
<<Set:SetLen/binary, _:1/binary, Type:1/binary, _:1/binary, Key/binary>> = Rest,
decode_type(Type, Set, Key, ActorLen, ElementLen).
%% @private internal. Decode the type specific key
decode_type(<<$c>>, Set, Key, ActorLen, 0) ->
<<_:1/binary, %% no element
Actor:ActorLen/binary,
_/binary %% The rest
>> = Key,
{clock, Set, Actor};
decode_type(<<$d>>, Set, Key, ActorLen, 0) ->
<<_:1/binary, %% no element
Actor:ActorLen/binary,
_/binary %% The rest
>> = Key,
{tombstone, Set, Actor};
decode_type(<<$z>>, Set, _Key, 0, 0) ->
{end_key, Set};
decode_type(<<$e>>, Set, Key, ActorLen, ElementLen) ->
<<
Element:ElementLen/binary,
_:1/binary,
Actor:ActorLen/binary,
_:1/binary,
Cnt:64/big-unsigned-integer,
_/binary %% The rest
>> = Key,
{element, Set, Element, Actor, Cnt}.
is_actor_clock_key(Set, Actor, Key) ->
Key == clock_key(Set, Actor).
-ifdef(EQC).
-define(NUMTESTS, 1000).
-define(QC_OUT(P),
eqc:on_output(fun(Str, Args) ->
io:format(user, Str, Args) end, P)).
%% @doc expose the properties to eunit test framework
eqc_test_() ->
{timeout, 20, [
?_assertEqual(true, eqc:quickcheck(eqc:testing_time(10, ?QC_OUT(prop_ordered()))))
]}.
%% @doc to simplify running a given property `Prop'
run(Prop) ->
run(Prop, ?NUMTESTS).
%% @doc simplify running `Prop' using `Count' for eqc:numtests/2
run(Prop, Count) ->
eqc:quickcheck(eqc:numtests(Count, Prop)).
%% @doc run eqc:check/1 for the given property
eqc_check(Prop) ->
eqc:check(Prop).
%% @doc run eqc:check/1 using the stored counterexample from `File'
eqc_check(Prop, File) ->
{ok, Bytes} = file:read_file(File),
CE = binary_to_term(Bytes),
eqc:check(Prop, CE).
-define(ELEMENTS, [<<"A">>,<<"B">>,<<"C">>,<<"D">>,<<"X">>,<<"Y">>,<<"Z">>]).
-define(SETS, ?ELEMENTS).
-define(ACTORS, ?SETS).
-record(fold_acc, {
cnt=0,
type,
set,
actor,
element,
event,
bad
}).
%% @doc does the bs key scheme really sorts as required. Has side
%% effects. Creates, writes to, deletes from, and destroys a leveldb
%% instance.
prop_ordered() ->
?SETUP(fun() ->
Root = "test/eleveldb-backend-eqc",
{ok, Ref} = start_eleveldb(Root),
put(bs_keys_level, {ok, Ref}),
fun() -> destroy_eleveldb(Root) end
end,
?FORALL(Keys, gen_keys(),
?FORALL(Writes, gen_writes(lists:flatten(Keys)),
begin
%% write the writes as a big mixed up blob
%% TODO should probably write them in broken
%% up chunks by Set,eh?
{ok, Ref} = get(bs_keys_level),
ok = eleveldb:write(Ref, Writes, [{sync, false}]),
Acc = #fold_acc{},
%% verify the order is as expected, ok
AccFinal =
try
eleveldb:fold(Ref, fun fold/2, Acc, [{iterator_refresh, true}])
catch
{break, Acc2} ->
Acc2
end,
ok = eleveldb:write(Ref, to_deletes(Writes), [{sync, true}]),
measure(writes, length(Writes),
?WHENFAIL(
begin
io:format("failed with acc ~p~n", [AccFinal]),
io:format("keys were ~p~n", [[decode_whenfail(K) || {put, K, <<>>} <- Writes]])
end,
conjunction([{right_number, equals(length(ordsets:from_list(Writes)), AccFinal#fold_acc.cnt)},
{no_bad, equals(undefined, AccFinal#fold_acc.bad)}])))
end))).
decode_whenfail(K) ->
try
decode_key(K)
catch _:_ ->
K
end.
%% @doc transform writes to deletes. Part of cleaning up leveldb
%% between property iterations.
to_deletes(Writes) ->
[{delete, K} || {put, K, _V} <- Writes].
%% @doc fold is state machine where the transition is only valid if
%% the event of the key follows in order from the previous one.
fold({Key, <<>>}, Acc=#fold_acc{set=undefined, cnt=0}) ->
%% first key, must be a clock key
case decode_key(Key) of
{clock, Set, Actor} ->
Acc#fold_acc{set=Set, actor=Actor, type=clock, cnt=1};
Decoded ->
throw({break, Acc#fold_acc{bad=Decoded}})
end;
fold({Key, <<>>}, Acc=#fold_acc{set=Set, cnt=Cnt, type=clock, actor=Actor}) ->
%% last key was a clock, all that's permissable is a clock key
%% with a greater actor, a ts key, an element key, or an end key
%% all for this set
case decode_key(Key) of
{clock, Set, Actor2} when Actor2 > Actor ->
Acc#fold_acc{set=Set, actor=Actor2, type=clock, cnt=Cnt+1};
{tombstone, Set, AnyActor} ->
Acc#fold_acc{set=Set, actor=AnyActor, type=tombstone, cnt=Cnt+1};
{element, Set, Element, AnyActor, Event} ->
Acc#fold_acc{set=Set, actor=AnyActor, type=element,
element=Element,
event=Event,
cnt=Cnt+1};
{end_key, Set} ->
Acc#fold_acc{set=Set, actor=undefined, type=end_key,
element=undefined,
event=0,
cnt=Cnt+1};
Decoded ->
%% Any thing else is broken
throw({break, Acc#fold_acc{bad=Decoded}})
end;
fold({Key, <<>>}, Acc=#fold_acc{set=Set, cnt=Cnt, type=tombstone, actor=Actor}) ->
%% Last key was a ts, only TS, element, last key for same set
%% permitted
case decode_key(Key) of
{tombstone, Set, AnyActor} when AnyActor > Actor ->
Acc#fold_acc{set=Set, actor=AnyActor, type=tombstone, cnt=Cnt+1};
{element, Set, Element, AnyActor, Event} ->
Acc#fold_acc{set=Set, actor=AnyActor, type=element,
element=Element,
event=Event,
cnt=Cnt+1};
{end_key, Set} ->
Acc#fold_acc{set=Set, actor=undefined, type=end_key,
element=undefined,
event=0,
cnt=Cnt+1};
Decoded ->
%% Any thing else is broken
throw({break, Acc#fold_acc{bad=Decoded}})
end;
fold({Key, <<>>}, Acc=#fold_acc{set=Set, cnt=Cnt, type=element, actor=Actor, element=Element, event=Event}) ->
%% Last key was an element key, only an element key with same
%% element or higher. If same element actor or cnt must be higher. Or end_key
case decode_key(Key) of
{element, Set, Element, Actor, Event1} when Event1 > Event ->
Acc#fold_acc{event=Event1,
cnt=Cnt+1};
{element, Set, Element2, AnyActor, Event1} when Element2 > Element ->
Acc#fold_acc{event=Event1,
actor=AnyActor,
element=Element2,
cnt=Cnt+1};
{element, Set, Element, Actor2, Event1} when Actor2 > Actor ->
Acc#fold_acc{event=Event1,
element=Element,
actor=Actor2,
cnt=Cnt+1};
{end_key, Set} ->
Acc#fold_acc{set=Set, actor=undefined, type=end_key,
element=undefined,
event=0,
cnt=Cnt+1};
Decoded ->
%% Any thing else is broken
throw({break, Acc#fold_acc{bad=Decoded}})
end;
fold({Key, <<>>}, Acc=#fold_acc{set=Set, type=end_key, cnt=Cnt}) ->
case decode_key(Key) of
{clock, Set2, Actor} when Set2 > Set ->
Acc#fold_acc{set=Set2, actor=Actor, type=clock, cnt=Cnt+1};
Decoded ->
throw({break, Acc#fold_acc{bad=Decoded}})
end.
%% @doc @TODO use riak_kv_eleveldb_backend, when ready
start_eleveldb(Root) ->
DataDir = Root,
Opts = [{create_if_missing, true},
{write_buffer_size, 1024*1024},
{max_open_files, 20}],
case get(Root) of
{ok, _OldRef} ->
destroy_eleveldb(Root);
_ ->
{ok, Ref} = eleveldb:open(DataDir, Opts),
put(Root, {ok, Ref}),
{ok, Ref}
end.
%% @doc ensure that there is no lingering eleveldb left over. Called
%% by property teardown.
destroy_eleveldb(Root) ->
case get(bs_keys_level) of
{ok, Ref} ->
eleveldb:close(Ref),
?assertCmd("rm -rf " ++ Root),
erase(Root);
_ ->
?assertCmd("rm -rf " ++ Root)
end.
%% @doc take the generated keys and turn them into leveldb
%% writes. Shuffle them.
gen_writes(Keys) ->
?LET(Shuffed, shuffle(Keys), [{put, K, <<>>} || K <- Shuffed]).
%% @doc generate a bunch of bigset keys that represent a number of
%% bigsets
gen_keys() ->
?LET({Actors, Sets},
{actors(), sets()},
[gen_keys(Actors, Set) || Set <- Sets]).
%% @doc generate a list of binary actor IDs
actors() ->
list_o_bins().
%% @doc generate a list of binary set names
sets() ->
list_o_bins().
list_o_bins() ->
?LET(Size, choose(10, 100), non_empty(list(binary(Size)))).%%non_empty(sublist(?SETS)).
%% @doc generate an element
set_element() ->
non_empty(binary(100)).
%% @doc generate all the keys for the `Set' That is clock(s),
%% tombstone(s), element and end keys.
gen_keys(Actors, Set) ->
?LET(SetSize, int(),
gen_element_keys(Set, Actors, abs(SetSize)) ++
gen_clock_ts_keys(Actors, Set) ++
[end_key(Set)]).
%% @doc for the given `Set' generate `Size' many keys.
gen_element_keys(Set, Actors, Size) ->
[gen_element_key(Set, Actors) || _ <- lists:seq(1, Size)].
%% @doc for the given `Set' generate a set element key with a dot from
%% an `Actor' in `Actors'
gen_element_key(Set, Actors) ->
?LET({Actor, Element, Cnt},
{elements(Actors),
set_element(),
int()},
insert_member_key(Set, Actor, Element,
%% lazy non zero
abs(Cnt)+1)).
%% @doc clock and tombstone keys for `Set' for `Actors'
gen_clock_ts_keys(Actors, Set) ->
lists:foldl(fun(Actor, Acc) ->
[clock_key(Set, Actor), tombstone_key(Set, Actor) | Acc]
end,
[],
Actors).
-endif. | src/bigset_keys_nulls.erl | 0.658637 | 0.462898 | bigset_keys_nulls.erl | starcoder |
%%%---------------------------------------------------------------------------
%%% @doc
%%% Command line arguments handler.
%%%
%%% Module implementing this behaviour will be used for parsing arguments
%%% passed in command line, decoding from them an operation to be performed
%%% (usually starting daemon and sending it one of supported administrative
%%% commands), and executing it.
%%%
%%% == Expected typical usage ==
%%%
%%% === command line script ===
%%%
%%% `gen_indira_cli' behaviour is intended to structure processing command
%%% line arguments. {@link gen_indira_cli:execute/3} facilitates this in
%%% `escript' scripts, (`escript' is a part of runtime that is well-suited
%%% for this task, but not the only one). Such a script could look like
%%% this:
%%%
%```
%#!/usr/bin/escript
%
%main(Args) ->
% AdminSocket = "/var/run/example_app/control",
% PidFile = "/var/run/example_app/pid",
% ConfigFile = "/etc/example_app.conf",
% Defaults = [AdminSocket, PidFile, ConfigFile],
% case gen_indira_cli:execute(Args, example_cli, Defaults) of
% ok ->
% ok;
% help ->
% io:fwrite("~s~n", [example_cli:usage()]);
% {error, Reason} ->
% io:fwrite(standard_error, "~p~n", [Reason]), % not a pretty message
% io:fwrite(standard_error, "~s~n", [example_cli:usage()]),
% halt(1)
% end.
%'''
%%%
%%% === command line handler module ===
%%%
%%% The other side is a module that contains operations' code, i.e. the one
%%% implementing `gen_indira_cli' behaviour.
%%%
%%% Most of the commands are expected to be sent to daemon instance through
%%% administrative connection ({@link gen_indira_socket}), though the
%%% details may vary (e.g. reaction to refused connections).
%%%
%%% The single distinguished operation is to start the application that is
%%% the core of a daemon. This may be done using {@link application:start/2}
%%% or {@link indira:start_rec/2} (though one needs to remember to configure
%%% and start Indira). There is also {@link indira:daemonize/2}, which
%%% simplifies starting necessary applications a little.
%%%
%%% Starting a daemon could look like this:
%%%
%```
%-module(example_cli).
%-behaviour(gen_indira_cli).
%...
%
%handle_command(start = _Command, Options) ->
% AdminSocket = get_admin_socket(Options),
% case configure_application(Options) of
% ok ->
% indira:daemonize(example_app, [
% {listen, [{indira_unix, AdminSocket}]},
% {command, {example_command_handler, []}}
% ]);
% {error, Reason} ->
% {error, Reason}
% end.
%
%parse_arguments(Args, [AdminSocket, PidFile, ConfigFile] = _Defaults) ->
% case gen_indira_cli:folds(...) of
% {ok, {start, Options}} -> {ok, start, Options};
% ...
% end.
%'''
%%%
%%% == Expected callbacks ==
%%%
%%% <ul>
%%% <li>`parse_arguments(Args, DefaultValues)' -- determine what operation
%%% to execute from arguments passed in command line
%%%
%%% Returned value:
%%% <ul>
%%% <li>{@type @{ok, command(), options()@}} -- execute a more complex
%%% command by calling `handle_command()'</li>
%%% <li>{@type @{send, socket_address(), command(), options()@}} --
%%% execute a simple command by calling `format_request()',
%%% sending returned request through administrative socket, and
%%% calling `handle_reply()' on the reply</li>
%%% <li>{@type help} -- print a help message to screen (e.g.
%%% <i>--help</i> option was provided)</li>
%%% <li>{@type @{error, Reason :: term()@}} -- signal an erroneous
%%% command line; returned verbatim from {@link execute/3}</li>
%%% </ul>
%%%
%%% Arguments:
%%% <ul>
%%% <li>`Args' ({@type [string()]}) -- arguments passed in command
%%% line</li>
%%% <li>`DefaultValues' ({@type term()}) -- arbitrary term passed to
%%% {@link execute/3} that mainly allows move hardcoded paths from
%%% module to `escript' script</li>
%%% </ul>
%%% </li>
%%% <li>`handle_command(Command, Options)' -- execute a command, for which
%%% simple track of sending a request with {@link send_one_command/4}
%%% and processing reply is not enough (e.g. starting the daemon
%%% itself); function returns {@type ok} or {@type @{error, term()@}}
%%% (returned verbatim to {@link execute/3} caller)
%%%
%%% Arguments:
%%% <ul>
%%% <li>`Command' ({@type command()}) -- command to be executed</li>
%%% <li>`Options' ({@type options()}) -- options set in command
%%% line</li>
%%% </ul>
%%% </li>
%%% <li>`format_request(Command, Options)' -- encode a command as
%%% a JSON-serializable structure, so it can be sent through
%%% administrative socket; function returns {@type @{ok, request()@}}
%%% or {@type @{error, Reason :: term()@}} (returned verbatim to
%%% {@link execute/3} caller)
%%%
%%% Arguments:
%%% <ul>
%%% <li>`Command' ({@type command()}) -- command to be sent to
%%% daemon</li>
%%% <li>`Options' ({@type options()}) -- options set in command
%%% line</li>
%%% </ul>
%%% </li>
%%% <li>`handle_reply(Reply, Command, Options)' -- process a reply to
%%% a command sent to daemon; function returns {@type ok} or {@type
%%% @{error, term()@}} (returned verbatim to {@link execute/3} caller)
%%%
%%% Arguments:
%%% <ul>
%%% <li>`Reply' ({@type reply()}) -- reply to `Command' received from
%%% daemon; all hashes in `Reply' are compatible with {@link
%%% orddict} module</li>
%%% <li>`Command' ({@type command()}) -- command (the original one,
%%% returned from `parse_arguments()') that was sent to
%%% daemon</li>
%%% <li>`Options' ({@type options()}) -- options set in command
%%% line</li>
%%% </ul>
%%% </li>
%%% <li>`format_error(Reason)' -- make a printable message from an error
%%% returned from a function from this module
%%%
%%% Arguments:
%%% <ul>
%%% <li>`Reason' ({@type term()}) -- second element of an error tuple
%%% (`{error,Reason}')</li>
%%% </ul>
%%% </li>
%%% </ul>
%%%
%%% @see gen_indira_command
%%% @see execute/3
%%% @see indira:daemonize/2
%%% @end
%%%---------------------------------------------------------------------------
-module(gen_indira_cli).
-export([execute/3]).
-export([folds/3, foldg/3]).
-export([send_one_command/4]).
-export([format_error/1]).
-export_type([command/0, options/0, request/0, reply/0, socket_address/0]).
%%%---------------------------------------------------------------------------
%%% types {{{
-type command() :: term().
%% Command to execute. Usually an atom is enough to describe what to do.
-type options() :: term().
%% Options that change details of the {@type command()}, like administrative
%% socket location, config file, etc.
-type request() :: indira_json:struct().
%% {@type command()} encoded as a JSON-serializable structure, ready to be
%% sent to daemon instance. See also {@link indira_json}.
-type reply() :: indira_json:struct().
%% Reply received from daemon to an administrative command. See also {@link
%% indira_json}.
-type socket_address() :: {module(), gen_indira_socket:connect_address()}.
%% A {@link gen_indira_socket} module and an address for it to use with
%% `send_one_line()' and `retry_send_one_line()'.
%%% }}}
%%%---------------------------------------------------------------------------
%%%---------------------------------------------------------------------------
%%% behaviour callbacks
%%%---------------------------------------------------------------------------
-callback parse_arguments(Args :: [string()], Defaults :: term()) ->
{ok, command(), options()}
| {send, socket_address(), command(), options()}
| help
| {error, term()}.
-callback handle_command(Command :: command(), Options :: options()) ->
ok | {error, term()}.
-callback format_request(Command :: command(), Options :: options()) ->
{ok, request()} | {error, term()}.
-callback handle_reply(Reply :: reply(), Command :: command(),
Options :: options()) ->
ok | {error, term()}.
-callback format_error(Reason :: term()) ->
iolist() | binary().
%%%---------------------------------------------------------------------------
%%% command line execution
%%%---------------------------------------------------------------------------
%% @doc Execute an operation specified in command line using callback module.
%%
%% Error formats:
%% <ul>
%% <li>`{error, {send, Reason}}' on connection or (de)serialization error
%% (the same as for {@link send_one_command/4})</li>
%% <li>`{error, {bad_return_value, Value}}' when any of the callbacks
%% returns an invalid value</li>
%% <li>`{error, Reason}' when `parse_arguments()', `handle_command()',
%% `format_request()', or `handle_reply()' returns an error</li>
%% </ul>
-spec execute([string()], module(), term()) ->
ok | help | {error, SendError | ReturnValueError | CallbackError}
when SendError :: {send, bad_request_format | bad_reply_format | term()},
ReturnValueError :: {bad_return_value, term()},
CallbackError :: term().
execute(ArgList, CLIHandler, Defaults) ->
case CLIHandler:parse_arguments(ArgList, Defaults) of
{ok, Command, Options} ->
execute_command(CLIHandler, Command, Options);
{send, {_SockMod, _SockAddr} = Address, Command, Options} ->
send_command(CLIHandler, Command, Options, Address);
help ->
help;
{error, Reason} ->
{error, Reason};
Result ->
{error, {bad_return_value, Result}}
end.
%%----------------------------------------------------------
%% execute actions returned by parse_arguments() {{{
%% @doc Pass a complex command to CLI module for execution.
-spec execute_command(module(), command(), options()) ->
ok | {error, term()}.
execute_command(CLIHandler, Command, Options) ->
case CLIHandler:handle_command(Command, Options) of
ok -> ok;
{error, Reason} -> {error, Reason};
Result -> {error, {bad_return_value, Result}}
end.
%% @doc Send a simple command to daemon and pass the reply to CLI module.
-spec send_command(module(), command(), options(), socket_address()) ->
ok | {error, Reason}
when Reason :: {send, bad_request_format | bad_reply_format | term()}
| {bad_return_value, term()}
| term().
send_command(CLIHandler, Command, Options, {SockMod, SockAddr} = _Address) ->
case CLIHandler:format_request(Command, Options) of
{ok, Request} ->
case send_one_command(SockMod, SockAddr, Request, []) of
{ok, Reply} ->
case CLIHandler:handle_reply(Reply, Command, Options) of
ok -> ok;
{error, Reason} -> {error, Reason};
Result -> {error, {bad_return_value, Result}}
end;
{error, Reason} ->
{error, {send, Reason}}
end;
{error, Reason} ->
{error, Reason};
Result ->
{error, {bad_return_value, Result}}
end.
%% }}}
%%----------------------------------------------------------
%%%---------------------------------------------------------------------------
%%% sending a command through admin socket (from command line script)
%%%---------------------------------------------------------------------------
%%----------------------------------------------------------
%% send one command, timeout {{{
%% @doc Open a connection to daemon's Indira, send a command, receive a reply.
%%
%% `Module' implements {@link gen_indira_socket} behaviour.
%%
%% If `retry' option was specified, refused connection will result in
%% retrying to connect indefinitely (or until timeout occurs).
%%
%% `SendError' is a term that can be converted to a printable message with
%% `Module:format_error(SendError)'. See documentation of socket module
%% (e.g. {@link indira_unix}, {@link indira_udp}, {@link indira_tcp}) for
%% list of possible `SendError' values.
%%
%% NOTE: received hashes are compatible with {@link orddict} module.
%%
%% @see indira_unix
%% @see indira_udp
%% @see indira_tcp
-spec send_one_command(module(), gen_indira_socket:connect_address(),
request(), Options :: [Opt]) ->
{ok, reply()} | {error, Reason}
when Reason :: bad_request_format | bad_reply_format
| {socket, Module :: module(), SendError :: term()},
Opt :: {timeout, timeout()} | retry.
send_one_command(Module, Address, Command, Options) when is_list(Options) ->
Timeout = proplists:get_value(timeout, Options, infinity),
Function = case proplists:get_bool(retry, Options) of
true -> retry_send_one_line;
false -> send_one_line
end,
case indira_json:encode(Command) of
{ok, Line} ->
case Module:Function(Address, Line, Timeout) of
{ok, ReplyLine} ->
case indira_json:decode(unicode:characters_to_list(ReplyLine)) of
{ok, Reply} ->
{ok, Reply};
{error, badarg} ->
{error, bad_reply_format}
end;
{error, Reason} ->
{error, {socket, Module, Reason}}
end;
{error, badarg} ->
{error, bad_request_format}
end.
%% }}}
%%----------------------------------------------------------
%%%---------------------------------------------------------------------------
%%% command line arguments parsing
%%%---------------------------------------------------------------------------
%% @doc General fold over command line arguments.
%%
%% Function passed as an argument is called with the <em>list</em> of
%% remaining (unprocessed) command line arguments.
%%
%% The passed function decides how many arguments to consume by
%% returning tuple `{take, N, NewAcc}', so the next call will be with `N'
%% leading elements skipped.
%%
%% If `Fun' returns `{error, Reason}', whole iteration is terminated and
%% `{error, {Reason, Arg}}' is returned to the caller, with `Arg' being the
%% argument at which the error occurred.
%%
%% Anything else than `{take, N, _}' or `{error, _}' is considered to be
%% a new accumulator. The same can be achieved by returning `{take, 1, _}'.
%%
%% If `Fun' is two-argument function, iteration over arguments list has no
%% modifications to it.
%%
%% If `Fun' is three-argument, each `"--foo=XXX"' argument (leading two
%% dashes) is split on the first `"="' character and the right side of the
%% split (`"XXX"') is inserted into arguments list just after the raw option
%% `"--foo"', so it becames `["--foo", "XXX", RestArgs ...]'. To signal that
%% this operation was done, `Fun(split, ArgList, Acc)' is called. Otherwise,
%% `Fun(simple, ArgList, Acc)' is called.
%%
%% If `Fun(split, _, Acc)' doesn't return `{take, N, NewAcc}', it results in
%% an error (`{error, {excessive_value, Arg}}'). However, `Fun' can decide
%% it is OK to only consume option and leave its argument in the argument
%% list by returning `{take, 1, _}'.
-spec foldg(FoldFunction, term(), [string()]) ->
{ok, NewAcc :: term()} | {error, {FoldError | term(), Arg :: string()}}
when FoldError :: excessive_value,
FoldFunction :: SimpleFoldFunction | SplitFoldFunction,
SimpleFoldFunction :: fun(
(ArgList :: [string(), ...], Acc :: term()) ->
NewAcc :: term()
| {take, pos_integer(), NewAcc :: term()}
| {error, term()}
),
SplitFoldFunction :: fun(
(single | split, ArgList :: [string(), ...], Acc :: term()) ->
NewAcc :: term()
| {take, pos_integer(), NewAcc :: term()}
| {error, term()}
).
foldg(Fun, Acc, ArgList) when is_function(Fun, 2) ->
foldg_simple(Fun, Acc, ArgList);
foldg(Fun, Acc, ArgList) when is_function(Fun, 3) ->
foldg_split(Fun, Acc, ArgList).
%%----------------------------------------------------------
%% foldg_simple() {{{
%% @doc Simple general fold over options.
%% `--foo' options are not subject to split on `"="' character.
foldg_simple(_Fun, Acc, [] = _ArgList) ->
{ok, Acc};
foldg_simple(Fun, Acc, [Arg | RestArgs] = ArgList) ->
case Fun(ArgList, Acc) of
% user consumed N arguments of the list
{take, N, NewAcc} when is_integer(N), N > 0 ->
foldg_simple(Fun, NewAcc, lists:nthtail(N, ArgList));
{error, Reason} ->
{error, {Reason, Arg}};
% everything else is a new accumulator
NewAcc ->
foldg_simple(Fun, NewAcc, RestArgs)
end.
%% }}}
%%----------------------------------------------------------
%% foldg_split() {{{
%% @doc Splitting general fold over options.
%% `--foo' options are subject to split on `"="' character, and the function
%% is passed an argument to indicate whether split took place for this
%% option or not.
foldg_split(_Fun, Acc, [] = _ArgList) ->
{ok, Acc};
foldg_split(Fun, Acc, ["--" ++ _ = Arg | RestArgs] = ArgList) ->
% split on "=" only the arguments looking like `--foo...'
case strsplit(Arg, $=) of
[Arg] -> foldg_split_no_value(Fun, Acc, ArgList);
[ArgN, ArgV] -> foldg_split_value(Fun, Acc, [ArgN, ArgV | RestArgs])
end;
foldg_split(Fun, Acc, [_ | _] = ArgList) ->
% if it doesn't look like an option, don't split it on "="
foldg_split_no_value(Fun, Acc, ArgList).
%% @doc Splitting general fold, worker for when no split was done.
foldg_split_no_value(Fun, Acc, [Arg | RestArgs] = ArgList) ->
% at the head of `ArgList' is either a simple `--foo' option or a non-option
case Fun(single, ArgList, Acc) of
% user consumed N arguments of the list
{take, N, NewAcc} when is_integer(N), N > 0 ->
foldg_split(Fun, NewAcc, lists:nthtail(N, ArgList));
{error, Reason} ->
{error, {Reason, Arg}};
NewAcc ->
foldg_split(Fun, NewAcc, RestArgs)
end.
%% @doc Splitting general fold, worker for when split actually took place.
foldg_split_value(Fun, Acc, [Arg | _] = ArgList) ->
% at the head of `ArgList' was a `--foo=bar' option and it was split into
% two elements
case Fun(split, ArgList, Acc) of
% user consumed N arguments of the list
% even if it was 1, user clearly said it was OK to use the value to this
% option as a possibly new option
{take, N, NewAcc} when is_integer(N), N > 0 ->
foldg_split(Fun, NewAcc, lists:nthtail(N, ArgList));
{error, Reason} ->
{error, {Reason, Arg}};
% it is an error to only consume one argument, when in fact it was just
% a half of the original argument
_NewAcc ->
{error, {excessive_value, Arg}}
end.
%% }}}
%%----------------------------------------------------------
%% @doc Simple fold over command line arguments.
%%
%% Function `Fun' is called with only a single command line argument at
%% a time. If the function returns `{need, N}', it will be immediately
%% called with a list of `N+1' arguments (current and the next `N'), (unless
%% the list of remaining arguments is shorter than `N', in which case the
%% whole iteration is terminated with `{error, {not_enough_args, Arg}}'
%% result).
%%
%% As with {@link foldg/3}, `Fun' returning `{error, Reason}' terminates the
%% iteration with result of `{error, {Reason, Arg}}' (on the call after
%% `{need, N}', only the first element of the list is used).
-spec folds(FoldFunction, term(), [string()]) ->
{ok, NewAcc :: term()} | {error, {FoldError | term(), Arg :: string()}}
when FoldError :: not_enough_args,
FoldFunction :: fun(
(Arg :: string() | [string(), ...], Acc :: term()) ->
NewAcc :: term() | {need, pos_integer()} | {error, term()}
).
folds(_Fun, Acc, [] = _ArgList) ->
{ok, Acc};
folds(Fun, Acc, [Arg | RestArgs] = _ArgList) ->
case Fun(Arg, Acc) of
{need, N} when is_integer(N), N > 0 ->
folds_second_call(Fun, Acc, N, Arg, RestArgs);
{error, Reason} ->
{error, {Reason, Arg}};
% everything else is a new accumulator
NewAcc ->
folds(Fun, NewAcc, RestArgs)
end.
%%----------------------------------------------------------
%% folds_second_call() {{{
%% @doc Call {@link folds/3}-supplied function with requested args list.
%% To use when function requested `N' arguments to an option.
folds_second_call(Fun, Acc, N, Opt, ArgList) ->
case listsplit(ArgList, N) of
{OptArgs, RestArgs} ->
case Fun([Opt | OptArgs], Acc) of
{error, Reason} -> {error, {Reason, Opt}};
NewAcc -> folds(Fun, NewAcc, RestArgs)
end;
error ->
{error, {not_enough_args, Opt}}
end.
%% }}}
%%----------------------------------------------------------
%%%---------------------------------------------------------------------------
%%% string helpers
%%%---------------------------------------------------------------------------
%% @doc Split string into two parts on a single character.
%%
%% The returned list is either one- or two-element. Split is done on the
%% first occurrence of `SplitChar'.
-spec strsplit(string(), char()) ->
[string(), ...].
strsplit(String, SplitChar) ->
case lists:splitwith(fun(C) -> C =/= SplitChar end, String) of
{String, ""} -> [String];
{Left, [SplitChar | Right]} -> [Left, Right]
end.
%% @doc Split a list into two lists, first of length `N'.
-spec listsplit(list(), pos_integer()) ->
{list(), list()} | error.
listsplit(List, N) ->
try
lists:split(N, List)
catch
error:badarg -> error
end.
%%%---------------------------------------------------------------------------
%% @doc Make a printable message from an error returned from a function from
%% this module.
-spec format_error(Reason :: term()) ->
iolist().
%% `execute()'
format_error({send, bad_request_format}) ->
format_error(bad_request_format);
format_error({send, bad_reply_format}) ->
format_error(bad_reply_format);
format_error({send, {socket, _SockMod, _Reason} = Error}) ->
format_error(Error);
format_error({bad_return_value, Return}) ->
["callback returned unexpected value ", format_term(Return),
" (programmer's error)"];
%% `send_one_command()'
format_error(bad_request_format) ->
"can't serialize command request (programmer's error)";
format_error(bad_reply_format) ->
"can't deserialize reply to the command (programmer's error)";
format_error({socket, SockMod, Reason}) ->
SockMod:format_error(Reason);
%% `foldg()'
format_error({excessive_value, Option}) when is_list(Option) ->
["unexpected argument for ", Option];
%% `folds()'
format_error({not_enough_args, Option}) when is_list(Option) ->
["missing argument for ", Option];
format_error(Reason) ->
["unrecognized error: ", format_term(Reason)].
%% @doc Serialize an arbitrary term to a single line of text.
-spec format_term(term()) ->
iolist().
format_term(Term) ->
io_lib:print(Term, 1, 16#ffffffff, -1).
%%%---------------------------------------------------------------------------
%%% vim:ft=erlang:foldmethod=marker | src/gen_indira_cli.erl | 0.549641 | 0.521106 | gen_indira_cli.erl | starcoder |
%% -------------------------------------------------------------------
%% @doc Handles interaction with the GUI(s).<br/>
%% This module interfaces possibly multiple external GUIs to the rest
%% of the application. Each GUI has its own {@link gui_port} process.
%% The functions in this module handle the delivery of updates to all
%% the GUIs and deliver commands from all the GUIs to the {@link dj}.
%% <br/>
%% This module starts a single {@link gui_port_sup} which then starts
%% and supervises a list of {@link gui_port}s.
%% @end
%% -------------------------------------------------------------------
-module(gui).
-include("../global_types.hrl").
%% application programming interface
-export([
start_link/1,
worker_updated/8,
round_started/1,
round_ended/1,
worker_input_changed/1,
problem_chosen/1,
problem_state_changed/1,
ack_save_game_state/1,
ack_load_game_state/1,
all_data/7
]).
%% called by gui_port module
-export([
handle_json/1,
get_all_data/0
]).
%% ===================================================================
%% application programming interface
%% ===================================================================
%% @doc Start the gui port supervisor and all the GUIs.
-spec start_link([file:filename()]) -> {ok, pid()} | ignore | {error, _}.
start_link(ExtProgList) ->
gui_port_sup:start_link(ExtProgList).
%% @doc Notify GUIs: A worker has changed.
%% Send updated worker to all GUIs.
-spec worker_updated(worker_id(), string() | none, string(), integer(), integer(), integer(), no | {idx, non_neg_integer()}, boolean()) -> ok.
worker_updated(WorkerID, Proposition, PropCaption, Score, ProcessedScore, ProblemScore, Blocked, Working) ->
%% convert values to JSON-convertible types
JSONProp = case Proposition of
none -> null;
PropString -> list_to_binary(PropString)
end,
JSONBlock = case Blocked of
no -> utils:atom_to_binary(no);
{idx, Idx} -> [{<<"idx">>, Idx}]
end,
send_event_to_gui("worker updated",
[{"worker data",
[
utils:atom_to_binary(WorkerID),
JSONProp,
list_to_binary(PropCaption),
Score,
ProcessedScore,
ProblemScore,
JSONBlock,
Working
]
}]
).
%% @doc Notify GUIs: A round has been started.
-spec round_started(non_neg_integer()) -> ok.
round_started(RoundNumber) ->
send_event_to_gui("round started",
[{"round number", RoundNumber}]
).
%% @doc Notify GUIs: The round has ended.
-spec round_ended(non_neg_integer()) -> ok.
round_ended(RoundNumber) ->
send_event_to_gui("round ended",
[{"round number", RoundNumber}]
).
%% @doc Notify GUIs: The worker input for the next round has been generated.
%% Send the new worker input to all GUIs.
-spec worker_input_changed([string()]) -> ok.
worker_input_changed(InputList) ->
send_event_to_gui("worker input changed",
[{"worker input", lists:map(fun list_to_binary/1, InputList)}]
).
%% @doc Notify GUIs: A new problem is selected.
%% Send the new problem index to all GUIs.
-spec problem_chosen(non_neg_integer()) -> ok.
problem_chosen(ProblemIdx) ->
send_event_to_gui("problem chosen",
[{"problem idx", ProblemIdx}]
).
%% @doc Notify GUIs: The state of the current problem has been modified.
%% Send the new problem state to all GUIs.
-spec problem_state_changed(string()) -> ok.
problem_state_changed(NewState) ->
send_event_to_gui("problem state changed",
[{"problem state", list_to_binary(NewState)}]
).
%% @doc Notify GUIs of success/failure of a "save game" operation.
-spec ack_save_game_state(ok | file:posix()) -> ok.
ack_save_game_state(Result) ->
send_event_to_gui("save game state",
[{"result", utils:atom_to_binary(Result)}]
).
%% @doc Notify GUIs of success/failure of a "load game" operation.
-spec ack_load_game_state(atom()) -> ok.
ack_load_game_state(Result) ->
send_event_to_gui("load game state",
[{"result", utils:atom_to_binary(Result)}]
).
%% @doc Send all the data that the GUIs are allowed to know.
%% This can be used to (re)initialize a GUI.
-spec all_data(Running :: boolean(),
WorkerDataList :: [Worker],
ProblemList :: [Problem],
ProblemIdx :: non_neg_integer(),
RoundNumber :: non_neg_integer(),
State :: string(),
WorkerInput :: [string()]) -> ok when
Worker :: {ID :: worker_id(),
Name :: string(),
RankingGroup :: string(),
Proposition :: string(),
PropCaption :: string(),
PropScore :: integer(),
ProblemScore :: integer(),
Blocked :: no | {idx, non_neg_integer()},
Working :: boolean()},
Problem :: {string(), string(), pos_integer(), string()}.
all_data(Running, WorkerDataList, ProblemList, ProblemIdx, RoundNumber, State, WorkerInput) ->
%% convert values to JSON-convertible types
ProblemListBin = lists:map(fun({Description, Specification, AnswerTime, StartState}) ->
[
list_to_binary(Description),
list_to_binary(Specification),
AnswerTime,
list_to_binary(StartState)
]
end,
ProblemList),
WorkerDataListBin = lists:map(fun({ID, Name, RankingGroup, Proposition, PropCaption, Score,
ProcessedScore, ProblemScore, Blocked, Working}) ->
JSONProp = case Proposition of
none -> null;
PropString -> list_to_binary(PropString)
end,
JSONBlock = case Blocked of
no -> utils:atom_to_binary(no);
{idx, Idx} -> [{<<"id">>, Idx}]
end,
[
utils:atom_to_binary(ID),
list_to_binary(Name),
list_to_binary(RankingGroup),
JSONProp,
list_to_binary(PropCaption),
Score,
ProcessedScore,
ProblemScore,
JSONBlock,
Working
]
end,
WorkerDataList),
send_event_to_gui("all data",
[
{"running", Running},
{"workers", WorkerDataListBin},
{"problems", ProblemListBin},
{"problem idx", ProblemIdx},
{"round", RoundNumber},
{"worker input", lists:map(fun list_to_binary/1, WorkerInput)},
{"state", list_to_binary(State)}
]
).
%% ===================================================================
%% gui_port callbacks
%% ===================================================================
%% @doc Dispatch handlers according to contents of the received json message
-spec handle_json([{binary(), any()}]) -> ok | unknown_json.
handle_json(JsonTerm) ->
case proplists:get_value(<<"action">>, JsonTerm) of
<<"block worker">> ->
json:process_attrs(fun dj:block_worker/1,
[{<<"worker id">>, fun utils:binary_to_atom/1}],
JsonTerm);
<<"unblock worker">> ->
json:process_attrs(fun dj:unblock_worker/1,
[{<<"worker id">>, fun utils:binary_to_atom/1}],
JsonTerm);
<<"choose problem">> ->
json:process_attrs(fun dj:choose_problem/1,
[<<"problem idx">>],
JsonTerm);
<<"start round">> ->
dj:start_round(),
ok;
<<"kill all workers">> ->
dj:kill_all_workers(),
ok;
<<"apply proposition">> ->
json:process_attrs(fun dj:apply_proposition/1,
[{<<"worker id">>, fun utils:binary_to_atom/1}],
JsonTerm);
<<"load game state">> ->
json:process_attrs(fun dj:load_game_state/1,
[{<<"file path">>, fun erlang:binary_to_list/1}],
JsonTerm);
<<"save game state">> ->
json:process_attrs(fun dj:save_game_state/1,
[{<<"file path">>, fun erlang:binary_to_list/1}],
JsonTerm);
<<"add scores">> ->
dj:add_scores(),
ok;
<<"quit program">> ->
dj:quit_program(),
ok;
<<"get all data">> ->
get_all_data(),
ok;
_ ->
unknown_json
end.
%% @doc Passthrough function to request all data from dj.
%% This makes sense so the {@link gui_port} doesn't have to call the
%% dj directly to initialize the GUI.
-spec get_all_data() -> ok.
get_all_data() ->
dj:gui_wants_all_data(),
ok.
%% ===================================================================
%% private functions
%% ===================================================================
-spec send_event_to_gui(string(), list({string(), term()})) -> ok.
send_event_to_gui(Name, Attributes) ->
{ok, GuiProgList} = application:get_env(gui),
BinaryfiedAttrs = lists:map(fun({Key, Val}) -> {list_to_binary(Key), Val} end, Attributes),
Term = [{<<"event">>, list_to_binary(Name)}|BinaryfiedAttrs],
lists:foldl(fun(_ExtProg, Idx) ->
gui_port:send(Idx, json:to_json_msg(Term)),
Idx + 1
end, 0, GuiProgList),
ok.
%% =================================================================== | src/gui/gui.erl | 0.515132 | 0.572125 | gui.erl | starcoder |
%%%-------------------------------------------------------------------
%%% @doc
%%% A set of optics specific to proplists.
%%% @end
%%%-------------------------------------------------------------------
-module(optic_proplists).
%% API
-export([all/0,
all/1,
keys/0,
keys/1,
values/0,
values/1,
properties/0,
properties/1,
key/1,
key/2,
property/1,
property/2]).
%%%===================================================================
%%% API
%%%===================================================================
%% @see values/1
-spec all() -> optic:optic().
all() ->
values(#{}).
%% @see values/1
-spec all(Options) -> optic:optic() when
Options :: optic:variations().
all(Options) ->
values(Options).
%% @see keys/1
-spec keys() -> optic:optic().
keys() ->
keys(#{}).
%% @doc
%% Focus on all keys of a list of properties. Duplicate keys are
%% preserved.
%%
%% Example:
%%
%% ```
%% > optic:get([optic_proplists:keys()], [{first, 1}, {second, 2}]).
%% {ok,[first,second]}
%% '''
%% @end
%% @param Options Common optic options.
%% @returns An opaque optic record.
-spec keys(Options) -> optic:optic() when
Options :: optic:variations().
keys(Options) ->
Fold =
fun (Fun, Acc, List) when is_list(List) ->
{ok, lists:foldl(
fun ({Key, _Value}, InnerAcc) ->
Fun(Key, InnerAcc)
end,
Acc,
proplists:unfold(List))};
(_Fun, _Acc, _Data) ->
{error, undefined}
end,
MapFold =
fun (Fun, Acc, List) when is_list(List) ->
{NewList, NewAcc} = lists:foldl(
fun ({Key, Value}, {InnerList, InnerAcc}) ->
{NewKey, NewAcc} = Fun(Key, InnerAcc),
{[{NewKey, Value} | InnerList], NewAcc}
end,
{[], Acc},
proplists:unfold(List)),
{ok, {lists:reverse(NewList), NewAcc}};
(_Fun, _Acc, _Data) ->
{error, undefined}
end,
New =
fun (_Data, _Template) ->
[]
end,
Optic = optic:new(MapFold, Fold),
optic:variations(Optic, Options, New).
%% @see values/1
-spec values() -> optic:optic().
values() ->
values(#{}).
%% @doc
%% Focus on all values of a list of properties.
%%
%% Example:
%%
%% ```
%% > optic:get([optic_proplists:values()], [{first, 1}, {second, 2}]).
%% {ok,[1,2]}
%% '''
%% @end
%% @param Options Common optic options.
%% @returns An opaque optic record.
-spec values(Options) -> optic:optic() when
Options :: optic:variations().
values(Options) ->
Fold =
fun (Fun, Acc, List) when is_list(List) ->
{ok, lists:foldl(
fun ({_Key, Value}, InnerAcc) ->
Fun(Value, InnerAcc)
end,
Acc,
proplists:unfold(List))};
(_Fun, _Acc, _Data) ->
{error, undefined}
end,
MapFold =
fun (Fun, Acc, List) when is_list(List) ->
{NewList, NewAcc} = lists:foldl(
fun ({Key, Value}, {InnerList, InnerAcc}) ->
{NewValue, NewAcc} = Fun(Value, InnerAcc),
{[{Key, NewValue} | InnerList], NewAcc}
end,
{[], Acc},
proplists:unfold(List)),
{ok, {lists:reverse(NewList), NewAcc}};
(_Fun, _Acc, _Data) ->
{error, undefined}
end,
New =
fun (_Data, _Template) ->
[]
end,
Optic = optic:new(MapFold, Fold),
optic:variations(Optic, Options, New).
%% @see properties/1
-spec properties() -> optic:optic().
properties() ->
properties(#{}).
%% @doc
%% Focus on all properties of a list of properties. A propety is a
%% tuple of a key and value. If a value was not given, it defaults to
%% the atom `true'.
%%
%% Example:
%%
%% ```
%% > optic:get([optic_proplists:properties()], [{first, 1}, {second, 2}]).
%% {ok,[{first,1},{second,2}]}
%% '''
%% @end
%% @param Options Common optic options.
%% @returns An opaque optic record.
-spec properties(Options) -> optic:optic() when
Options :: optic:variations().
properties(Options) ->
Fold =
fun (Fun, Acc, List) when is_list(List) ->
{ok, lists:foldl(Fun, Acc, proplists:unfold(List))};
(_Fun, _Acc, _Data) ->
{error, undefined}
end,
MapFold =
fun (Fun, Acc, List) when is_list(List) ->
{NewList, NewAcc} = lists:foldl(
fun (Tuple, {InnerList, InnerAcc}) ->
{NewTuple, NewAcc} = Fun(Tuple, InnerAcc),
{[NewTuple | InnerList], NewAcc}
end,
{[], Acc},
proplists:unfold(List)),
{ok, {lists:reverse(NewList), NewAcc}};
(_Fun, _Acc, _Data) ->
{error, undefined}
end,
New =
fun (_Data, _Template) ->
[]
end,
Optic = optic:new(MapFold, Fold),
optic:variations(Optic, Options, New).
%% @see key/2
-spec key(Key) -> optic:optic() when
Key :: term().
key(Key) ->
key(Key, #{}).
%% @doc
%% Focus on the value of a property list key. As keys may be
%% duplicated, this may be multiple values. If the value is not given,
%% it defaults to the atom `true'.
%%
%% Example:
%%
%% ```
%% > optic:get([optic_proplists:key(first)], [{first, 1}, {second, 2}]).
%% {ok,[1]}
%% '''
%% @end
%% @param Key The key to focus on.
%% @param Options Common optic options.
%% @returns An opaque optic record.
-spec key(Key, Options) -> optic:optic() when
Key :: term(),
Options :: optic:variations().
key(Key, Options) ->
Fold =
fun (Fun, Acc, List) when is_list(List) ->
case proplists:get_all_values(Key, List) of
[] ->
{error, undefined};
Values ->
{ok, lists:foldl(
fun (Value, InnerAcc) ->
Fun(Value, InnerAcc)
end,
Acc,
Values)}
end;
(_Fun, _Acc, _Data) ->
{error, undefined}
end,
MapFold =
fun (Fun, Acc, List) when is_list(List) ->
case proplists:get_all_values(Key, List) of
[] ->
{error, undefined};
_ ->
{ok, lists:mapfoldl(
fun (Elem, InnerAcc) ->
case proplists:is_defined(Key, [Elem]) of
true ->
Value = proplists:get_value(Key, [Elem]),
{NewValue, NewAcc} = Fun(Value, InnerAcc),
{{Key, NewValue}, NewAcc};
false ->
{Elem, InnerAcc}
end
end,
Acc,
List)}
end;
(_Fun, _Acc, _Data) ->
{error, undefined}
end,
New =
fun (List, Template) when is_list(List) ->
[{Key, Template} | List];
(_Data, Template) ->
[{Key, Template}]
end,
Optic = optic:new(MapFold, Fold),
optic:variations(Optic, Options, New).
%% @see property/2
-spec property(Key) -> optic:optic() when
Key :: term().
property(Key) ->
property(Key, #{}).
%% @doc
%% Focus on a property in a property list by key. As keys may be
%% duplicated, this may be multiple properties. If the value is not
%% given, it defaults to the atom `true'. If the key is modified, the
%% optic is no longer well behaved.
%%
%% Example:
%%
%% ```
%% > optic:get([optic_proplists:property(first)], [{first, 1}, {second, 2}]).
%% {ok,[{first,1}]}
%% '''
%% @end
%% @param Key The key to focus on.
%% @param Options Common optic options.
%% @returns An opaque optic record.
-spec property(Key, Options) -> optic:optic() when
Key :: term(),
Options :: optic:variations().
property(Key, Options) ->
Fold =
fun (Fun, Acc, List) when is_list(List) ->
case proplists:get_all_values(Key, List) of
[] ->
{error, undefined};
Values ->
{ok, lists:foldl(
fun (Value, InnerAcc) ->
Fun({Key, Value}, InnerAcc)
end,
Acc,
Values)}
end;
(_Fun, _Acc, _Data) ->
{error, undefined}
end,
MapFold =
fun (Fun, Acc, List) when is_list(List) ->
case proplists:get_all_values(Key, List) of
[] ->
{error, undefined};
_ ->
{ok, lists:mapfoldl(
fun (Elem, InnerAcc) ->
case proplists:is_defined(Key, [Elem]) of
true ->
Value = proplists:get_value(Key, [Elem]),
{{NewKey, NewValue}, NewAcc} = Fun({Key, Value}, InnerAcc),
{{NewKey, NewValue}, NewAcc};
false ->
{Elem, InnerAcc}
end
end,
Acc,
List)}
end;
(_Fun, _Acc, _Data) ->
{error, undefined}
end,
New =
fun (List, Template) when is_list(List) ->
[{Key, Template} | List];
(_Data, Template) ->
[{Key, Template}]
end,
Optic = optic:new(MapFold, Fold),
optic:variations(Optic, Options, New). | src/optic_proplists.erl | 0.590897 | 0.539226 | optic_proplists.erl | starcoder |
%% @doc A "monad" for wrapping a value with a ordered event queue
%% such that values that have diverged in history can be merged
%% automatically in a predictable manner.
%%
%% In order to provide for an efficient serialization, old events
%% can be expired with expire/2 and the event queue can be
%% truncated to a specific maximum size with truncate/2.
%%
%% The default representation for a timestamp is OS clock msecs,
%% defined by <code>statebox_clock:timestamp/0</code>. This is
%% used by the convenience functions <code>new/1</code> and
%% <code>modify/2</code>.
-module(statebox).
-export([new/2, modify/3, merge/1, expire/2, truncate/2,
new/1, modify/2,
value/1, last_modified/1, is_statebox/1, apply_op/2]).
-record(statebox, {
value :: term(),
%% sorted list of operations (oldest first).
queue :: [event()],
last_modified :: timestamp()}).
-opaque statebox() :: #statebox{}.
-type event() :: {timestamp(), op()}.
-type timestamp() :: statebox_clock:timestamp().
-type timedelta() :: integer().
-type basic_op() :: {module(), atom(), [term()]} |
{fun((term(), term()) -> statebox()) |
fun((term(), term(), term()) -> statebox()), [term()]}.
-type op() :: basic_op() | [op()].
-export_type([statebox/0, event/0, timestamp/0, timedelta/0, basic_op/0, op/0]).
%% Used in a test, must be done before function definitions.
-ifdef(TEST).
-export([dummy_mfa_4/4]).
-endif.
%% @doc Return <code>true</code> if the argument is a statebox, <code>false</code>
%% otherwise.
-spec is_statebox(statebox() | term()) -> boolean().
is_statebox(#statebox{}) ->
true;
is_statebox(_T) ->
false.
%% @doc Construct a statebox at <code>statebox_clock:timestamp()</code>
%% containing the result of <code>Constructor()</code>. This should
%% return an "empty" object of the desired type, such as
%% <code>fun gb_trees:empty/0</code>.
%% @equiv new(timestamp(), Constructor)
-spec new(fun(() -> term())) -> statebox().
new(Constructor) ->
new(statebox_clock:timestamp(), Constructor).
%% @doc Construct a statebox containing the result of
%% <code>Constructor()</code>. This should return an "empty" object of
%% the desired type, such as <code>fun gb_trees:empty/0</code>.
-spec new(timestamp(), fun(() -> term())) -> statebox().
new(T, Constructor) ->
new(T, Constructor(), []).
%% @doc Return the current value of the statebox. You should consider this
%% value to be read-only.
-spec value(statebox()) -> term().
value(#statebox{value=V}) ->
V.
%% @doc Return the last modified timestamp of the statebox.
-spec last_modified(statebox()) -> timestamp().
last_modified(#statebox{last_modified=T}) ->
T.
%% @doc Remove all events older than <code>last_modified(S) - Age</code>
%% from the event queue.
-spec expire(timedelta(), statebox()) -> statebox().
expire(Age, State=#statebox{queue=Q, last_modified=T}) ->
OldT = T - Age,
State#statebox{
queue=lists:dropwhile(fun ({EventT, _}) -> EventT < OldT end, Q)}.
%% @doc Truncate the event queue to the newest N events.
-spec truncate(non_neg_integer(), statebox()) -> statebox().
truncate(N, State=#statebox{queue=Q}) ->
case length(Q) - N of
Tail when Tail > 0 ->
State#statebox{queue=lists:nthtail(Tail, Q)};
_ ->
State
end.
%% @doc Return a new statebox as the product of all in-order events applied to
%% the last modified statebox(). If two events occur at the same time, the
%% event that sorts lowest by value will be applied first.
-spec merge([statebox()]) -> statebox().
merge([State]) ->
State;
merge(Unordered) ->
#statebox{value=V, last_modified=T} = newest(Unordered),
Queue = lists:umerge([Q || #statebox{queue=Q} <- Unordered]),
new(T, apply_queue(V, Queue), Queue).
%% @doc Modify the value in statebox and add {T, Op} to its event queue.
%% Op should be a <code>{M, F, Args}</code> or <code>{Fun, Args}</code>.
%% The value will be transformed as such:
%% <code>NewValue = apply(Fun, Args ++ [value(S)])</code>.
%% The operation should be repeatable and should return the same type as
%% <code>value(S)</code>. This means that this should hold true:
%% <code>Fun(Arg, S) =:= Fun(Arg, Fun(Arg, S))</code>.
%% An example of this kind of operation is <code>orddict:store/3</code>.
%% Only exported operations should be used in order to ensure that the
%% serialization is small and robust (this is not enforced).
-spec modify(timestamp(), op(), statebox()) -> statebox().
modify(T, Op, #statebox{value=Value, queue=Queue, last_modified=OldT})
when OldT =< T ->
new(T, apply_op(Op, Value), queue_in({T, Op}, Queue));
modify(T, _Op, #statebox{last_modified=OldT}) ->
throw({invalid_timestamp, {T, '<', OldT}}).
%% @doc Modify a statebox at timestamp
%% <code>max(1 + last_modified(S), statebox_clock:timestamp())</code>.
%% See <code>modify/3</code> for more information.
%% @equiv modify(max(1 + last_modified(S), statebox_clock:timestamp()), Op, S)
-spec modify(op(), statebox()) -> statebox().
modify(Op, S) ->
modify(max(1 + last_modified(S), statebox_clock:timestamp()), Op, S).
%% @doc Apply an op() to <code>Data</code>.
-spec apply_op(op(), term()) -> term().
apply_op({F, [A]}, Data) when is_function(F, 2) ->
F(A, Data);
apply_op({F, [A, B]}, Data) when is_function(F, 3) ->
F(A, B, Data);
apply_op({F, [A, B, C]}, Data) when is_function(F, 4) ->
F(A, B, C, Data);
apply_op({F, A}, Data) when is_function(F) ->
apply(F, A ++ [Data]);
apply_op({M, F, [A]}, Data) ->
M:F(A, Data);
apply_op({M, F, [A, B]}, Data) ->
M:F(A, B, Data);
apply_op({M, F, A}, Data) ->
apply(M, F, A ++ [Data]);
apply_op([Op | Rest], Data) ->
apply_op(Rest, apply_op(Op, Data));
apply_op([], Data) ->
Data.
%% Internal API
newest([First | Rest]) ->
newest(First, Rest).
newest(M0, [M1 | Rest]) ->
case last_modified(M0) >= last_modified(M1) of
true ->
newest(M0, Rest);
false ->
newest(M1, Rest)
end;
newest(M, []) ->
M.
new(T, V, Q) ->
#statebox{value=V, queue=Q, last_modified=T}.
queue_in(Event, Queue) ->
Queue ++ [Event].
apply_queue(Data, [{_T, Op} | Rest]) ->
apply_queue(apply_op(Op, Data), Rest);
apply_queue(Data, []) ->
Data.
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
new_test() ->
Now = 1,
S = new(Now, fun () -> data end),
?assertEqual(
data,
value(S)),
?assertEqual(
Now,
last_modified(S)),
%% Nothing to expire
?assertEqual(
S,
expire(0, S)),
%% Nothing to truncate
?assertEqual(
S,
truncate(16, S)),
%% Nothing to merge
?assertEqual(
S,
merge([S])),
%% Merging the same object
?assertEqual(
S,
merge([S, S])),
ok.
bad_modify_test() ->
F = fun (N, S) -> modify(N, {fun ordsets:add_element/2, [N]}, S) end,
S10 = lists:foldl(F, new(0, fun () -> [] end), lists:seq(1, 10)),
?assertEqual(
lists:seq(1, 10),
value(S10)),
?assertThrow(
{invalid_timestamp, {9, '<', 10}},
F(9, S10)),
ok.
%% @private
dummy_mfa_4(a, b, C, D) ->
ordsets:add_element(C, D).
batch_apply_op_test() ->
S = new(0, fun () -> [] end),
S0 = modify([], S),
S1 = modify([{ordsets, add_element, [N]} || N <- lists:seq(1, 1)], S),
S10 = modify([{ordsets, add_element, [N]} || N <- lists:seq(1, 10)], S),
?assertEqual(
[],
value(S0)),
?assertEqual(
lists:seq(1, 1),
value(S1)),
?assertEqual(
lists:seq(1, 10),
value(S10)),
ok.
apply_op_5_test() ->
?assertEqual(
[a, b, c, d, e],
statebox:apply_op(
{fun (A, B, C, D, E) -> [A, B, C, D, E] end, [a, b, c, d]},
e)).
alt_apply_op_test() ->
L = [fun (N=1) -> {ordsets, add_element, [N]} end,
fun (N=2) ->
{fun (a, B, C) -> ordsets:add_element(B, C) end, [a, N]}
end,
fun (N=3) ->
{fun ?MODULE:dummy_mfa_4/4, [a, b, N]}
end,
fun (N=4) ->
{?MODULE, dummy_mfa_4, [a, b, N]}
end,
fun (N=5) ->
{ordsets, fold,
[fun (X, Acc) -> ordsets:add_element(X + N, Acc) end, []]}
end],
F = fun ({N, F}, S) -> modify(N, F(N), S) end,
S5 = lists:foldl(F, new(0, fun () -> [] end),
lists:zip(lists:seq(1, 5), L)),
?assertEqual(
lists:seq(5 + 1, 5 + 4),
value(S5)),
ok.
truncate_test() ->
F = fun (N, S) -> modify(N, {fun ordsets:add_element/2, [N]}, S) end,
S10 = lists:foldl(F, new(0, fun () -> [] end), lists:seq(1, 10)),
?assertEqual(
lists:seq(1, 10),
value(S10)),
?assertEqual(
10,
length(S10#statebox.queue)),
?assertEqual(
10,
length((truncate(20, S10))#statebox.queue)),
?assertEqual(
10,
length((truncate(10, S10))#statebox.queue)),
?assertEqual(
1,
length((truncate(1, S10))#statebox.queue)),
ok.
expire_test() ->
F = fun (N, S) -> modify(N, {fun ordsets:add_element/2, [N]}, S) end,
S10 = lists:foldl(F, new(0, fun () -> [] end), lists:seq(1, 10)),
?assertEqual(
lists:seq(1, 10),
value(S10)),
?assertEqual(
10,
length(S10#statebox.queue)),
?assertEqual(
1,
length((expire(0, S10))#statebox.queue)),
?assertEqual(
10,
length((expire(10, S10))#statebox.queue)),
?assertEqual(
10,
length((expire(11, S10))#statebox.queue)),
ok.
orddict_in_a_statebox_test() ->
S0 = new(0, fun () -> [] end),
?assertEqual(
[],
value(S0)),
S1_a = modify(1, {fun orddict:store/3, [key, a]}, S0),
S1_b = modify(1, {fun orddict:store/3, [key, b]}, S0),
S1_c = modify(1, {fun orddict:store/3, [c, c]}, S0),
S2_aa = modify(3, {fun orddict:store/3, [key, a2]}, S1_a),
S2_ab = modify(2, {fun orddict:store/3, [key, b2]}, S1_a),
S2_bb = modify(2, {fun orddict:store/3, [key, b2]}, S1_b),
?assertEqual(
1,
last_modified(S1_a)),
?assertEqual(
1,
last_modified(S1_b)),
?assertEqual(
[{key, a}],
value(S1_a)),
?assertEqual(
[{key, b}],
value(S1_b)),
?assertEqual(
S1_a,
merge([S1_a])),
?assertEqual(
S1_a,
merge([S0, S1_a])),
?assertEqual(
S1_a,
merge([S1_a, S0])),
%% This is a conflict that can not be resolved peacefully,
%% but S1_b wins by op compare
?assertEqual(
value(S1_b),
value(merge([S1_a, S1_b]))),
%% This is a conflict that can not be resolved peacefully,
%% but S1_b wins by op compare
?assertEqual(
value(S1_b),
value(merge([S1_b, S1_a]))),
%% S2_aa wins because it has a bigger timestamp
?assertEqual(
value(S2_aa),
value(merge([S2_aa, S2_ab]))),
%% S2_aa wins because it has a bigger timestamp
?assertEqual(
value(S2_aa),
value(merge([S2_ab, S2_aa]))),
%% S2_aa wins because it has a bigger timestamp
?assertEqual(
value(S2_aa),
value(merge([S2_bb, S2_aa]))),
%% S2_aa wins because it has a bigger timestamp
?assertEqual(
value(S2_aa),
value(merge([S2_aa, S2_bb]))),
%% S1_[ab] and S1_c collide in time but the operations do not conflict
?assertEqual(
[{c, c}, {key, a}],
value(merge([S1_a, S1_c]))),
?assertEqual(
[{c, c}, {key, a}],
value(merge([S1_c, S1_a]))),
?assertEqual(
[{c, c}, {key, b}],
value(merge([S1_b, S1_c]))),
?assertEqual(
[{c, c}, {key, b}],
value(merge([S1_c, S1_b]))),
%% S1_b wins over S1_a by op compare but S1_c is independent
?assertEqual(
[{c, c}, {key, b}],
value(merge([S1_c, S1_a, S1_b]))),
?assertEqual(
[{c, c}, {key, b}],
value(merge([S1_c, S1_b, S1_a]))),
?assertEqual(
[{c, c}, {key, b}],
value(merge([S1_a, S1_b, S1_c]))),
?assertEqual(
[{c, c}, {key, b}],
value(merge([S1_a, S1_c, S1_b]))),
ok.
-define(WHENEVER, 1303513575954).
convenience_test_() ->
{setup,
fun () ->
meck:new(statebox_clock),
meck:expect(statebox_clock, timestamp, 0, ?WHENEVER)
end,
fun (_) -> meck:unload(statebox_clock) end,
[{"new",
fun () ->
?assertEqual(
?WHENEVER,
last_modified(new(fun () -> [] end)))
end},
{"modify",
fun () ->
S = modify({fun ordsets:add_element/2, [a]},
new(0, fun () -> [] end)),
S1 = modify({fun ordsets:add_element/2, [b]},
S),
?assertEqual(
?WHENEVER,
last_modified(S)),
?assertEqual(
[a],
value(S)),
%% Check for clock skew correction
?assertEqual(
1 + ?WHENEVER,
last_modified(S1)),
?assertEqual(
[a, b],
value(S1))
end}]}.
readme_ordsets_test() ->
New = statebox:new(fun () -> [] end),
ChildA = statebox:modify({fun ordsets:add_element/2, [a]}, New),
ChildB = statebox:modify({fun ordsets:add_element/2, [b]}, New),
Resolved = statebox:merge([ChildA, ChildB]),
?assertEqual(
[a, b],
statebox:value(Resolved)).
readme_ordsets_manual_test() ->
New = statebox:new(0, fun () -> [] end),
ChildA = statebox:modify(1, {fun ordsets:add_element/2, [a]}, New),
ChildB = statebox:modify(2, {fun ordsets:add_element/2, [b]}, New),
Resolved = statebox:merge([ChildA, ChildB]),
?assertEqual(
[a, b],
statebox:value(Resolved)).
is_statebox_test() ->
?assertEqual(
false,
is_statebox(not_a_statebox)),
?assertEqual(
true,
is_statebox(new(fun () -> is_a_statebox end))),
ok.
-endif. | src/statebox.erl | 0.589835 | 0.576184 | statebox.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% riakc_counter: Eventually-consistent counter type
%%
%% Copyright (c) 2013 Basho Technologies, Inc. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%% @doc Encapsulates a counter data-type. Counters are integers that
%% can be incremented or decremented. Like the other
%% eventually-consistent types, the original fetched value is
%% unmodified by increments. Instead, increments are captured for
%% later application in Riak.
-module(riakc_counter).
-behaviour(riakc_datatype).
-ifdef(EQC).
-include_lib("eqc/include/eqc.hrl").
-compile(export_all).
-endif.
%% Callbacks
-export([new/0, new/1, new/2,
value/1,
to_op/1,
is_type/1,
type/0]).
%% Operations
-export([increment/1, increment/2,
decrement/1, decrement/2]).
-record(counter, {
value = 0 :: integer(),
increment = undefined :: undefined | integer()
}).
-export_type([counter/0, counter_op/0]).
-opaque counter() :: #counter{}.
-type counter_op() :: {increment, integer()}.
%% @doc Creates a new counter type with a value of 0.
-spec new() -> counter().
new() ->
#counter{}.
%% @doc Creates a new counter type with the passed context. It's
%% ignored, but we need this constructor for new nested (in maps)
%% objects on the fly
-spec new(riakc_datatype:context()) -> counter().
new(_Context) ->
#counter{}.
%% @doc Creates a new counter type with the passed integer and
%% context.
-spec new(integer(), riakc_datatype:context()) -> counter().
new(Value, _Context) when is_integer(Value) ->
#counter{value=Value}.
%% @doc Gets the original value of the counter.
-spec value(counter()) -> integer().
value(#counter{value=Value}) ->
Value.
%% @doc Increments the counter by 1.
-spec increment(counter()) -> counter().
increment(Counter) ->
increment(1, Counter).
%% @doc Increments the counter by the passed amount.
-spec increment(integer(), counter()) -> counter().
increment(Amount, #counter{increment=undefined}=Counter) when is_integer(Amount) ->
Counter#counter{increment=Amount};
increment(Amount, #counter{increment=Incr}=Counter) when is_integer(Amount) ->
Counter#counter{increment=Incr+Amount}.
%% @doc Decrements the counter by 1.
-spec decrement(counter()) -> counter().
decrement(Counter) ->
increment(-1, Counter).
%% @doc Decrements the counter by the passed amount.
-spec decrement(integer(), counter()) -> counter().
decrement(Amount, Counter) ->
increment(-Amount, Counter).
%% @doc Extracts the changes to this counter as an operation.
-spec to_op(counter()) -> riakc_datatype:update(counter_op()).
to_op(#counter{increment=undefined}) ->
undefined;
to_op(#counter{increment=Incr}) ->
{type(), {increment, Incr}, undefined}.
%% @doc Determines whether the passed term is a counter container.
-spec is_type(term()) -> boolean().
is_type(T) ->
is_record(T, counter).
%% @doc Returns the symbolic name of this container.
-spec type() -> atom().
type() -> counter.
-ifdef(EQC).
gen_type() ->
?LET(Count, int(), new(Count, undefined)).
gen_op() ->
{elements([increment, decrement]),
weighted_default({1, []},
{5, [?SUCHTHAT(X, int(), X /= 0)]})}.
-endif. | src/riakc_counter.erl | 0.608827 | 0.435121 | riakc_counter.erl | starcoder |
%%
%% %CopyrightBegin%
%%
%% Copyright Ericsson AB 1996-2016. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
%%
%% Purpose : Fractal trees
-module(frac).
-compile([{nowarn_deprecated_function,{gs,config,2}},
{nowarn_deprecated_function,{gs,create,3}},
{nowarn_deprecated_function,{gs,start,0}}]).
-export([start/0, go/0, test/0, grow/2, expand/3, subst/2]).
%% 0L - grammer -- context insensitive lindenmayer grammer
start() ->
spawn(frac,go,[]).
go() ->
draw(),
receive
_X -> true
end.
draw() ->
S=gs:start(),
Width = 800,
Ht = 520,
Title="Context Insensitive Lindenmayer Grammer (L0) Trees",
Win=gs:create(window,S,[{title,Title},{width,Width},{height,Ht}]),
Canvas=gs:create(canvas,Win,[{width,Width},{height,Ht},{bg,{237,224,189}}]),
gs:config(Win,[{iconname,"Plants"},{map,true}]),
draw(Canvas, 1, Width, Ht),
draw(Canvas, 2, Width, Ht),
draw(Canvas, 3, Width, Ht),
draw(Canvas, 4, Width, Ht).
draw(Graph, Index, Width, Ht) ->
draw_frac(Graph, Index, Width, Ht).
test() ->
grow(3,1).
grow(NGens, RuleNumber) ->
lists:flatten(expand(NGens, RuleNumber, [a])).
rule(1,a) -> [b,'[',a,']',b,'(',a,')',a];
rule(1,b) -> [b,b];
rule(2,a) -> [b,'[',a,'[',b,a,']',']'];
rule(2,b) -> [b,'(','(',b,')',a,')',c];
rule(2,c) -> [c,d];
rule(3,a) -> [d,'[',d,b,e,']','(',d,c,e,')'];
rule(3,b) -> [d,'[',d,a,f,']','(',d,c,f,')',f];
rule(3,c) -> [d,'[',d,b,g,']','(',d,a,g,')',g];
rule(4,a) -> [c,'(',b,a,'(',b,')',')',c,'[',b,a,'[',b,']',']'];
rule(4,b) -> [c,'(',b,e,')',c,'[',b,f,']'];
rule(4,c) -> [g,c,c];
rule(_,X) -> X.
step(a) -> 1.0;
step(b) -> 0.8;
step(c) -> 0.6;
step(d) -> 0.7;
step(e) -> 0.6;
step(f) -> 0.65;
step(g) -> 0.75;
step(_) -> 1.0.
start_coords(1) -> {0.8,0.8};
start_coords(2) -> {0.6,0.8};
start_coords(3) -> {0.4,0.8};
start_coords(4) -> {0.2,0.8};
start_coords(_) -> {0.5, 0.5}.
gens(1) -> 5;
gens(_) -> 5.
scale(1) -> 5;
scale(2) -> 40;
scale(3) -> 40;
scale(4) -> 4;
scale(_) -> 5.
expand(0,_,X) ->
X;
expand(N,Index,X) ->
expand(N - 1, Index, lists:flatten(subst(X, Index))).
subst([],_) -> [];
subst([H|T],Index) ->
[rule(Index,H)|subst(T,Index)].
draw_frac(Id, Index, Width, Ht) ->
X0 = 100,
Y0 = 100,
{XScale,YScale} = start_coords(Index),
Xstart = trunc(X0 + Width*XScale),
Ystart = trunc(Y0 + Ht*YScale),
Angle = 270.0 * 3.14159 / 180.0,
Scale = scale(Index),
N = gens(Index),
Tree = grow(N,Index),
drawit(Tree, Id, Xstart, Ystart, Angle, Scale, []).
% drawit(Tree,S0,S).
drawit([],_,_,_,_,_,_) ->
true;
drawit(['('|T],Id,X0,Y0,Ang,Scale,Stack) ->
Ang1 = Ang + (20.0 * 3.14159 / 180.0),
Scale1 = Scale * 0.8,
drawit(T,Id,X0,Y0,Ang1,Scale1,[{X0,Y0,Ang,Scale}|Stack]);
drawit(['['|T],Id,X0,Y0,Ang,Scale,Stack) ->
Ang1 = Ang - (40.0 * 3.14159 / 180.0),
Scale1 = Scale * 0.8,
drawit(T,Id,X0,Y0,Ang1,Scale1,[{X0,Y0,Ang,Scale}|Stack]);
drawit([')'|T],Id,_,_,_,_,[{X1,Y1,Ang1,Scale1}|Stack]) ->
drawit(T,Id,X1,Y1,Ang1,Scale1,Stack);
drawit([']'|T],Id,_,_,_,_,[{X1,Y1,Ang1,Scale1}|Stack]) ->
drawit(T,Id,X1,Y1,Ang1,Scale1,Stack);
drawit([Symbol|T],Id,X0,Y0,Ang,Scale,Stack) ->
Size = step(Symbol),
L = Size * Scale,
{X1, Y1} = plotit(Id,X0,Y0,L,Ang),
drawit(T,Id,X1,Y1,Ang,Scale,Stack).
plotit(Id,X0,Y0,L,A) ->
CosA = math:cos(A),
SinA = math:sin(A),
X = trunc(X0 + L*CosA),
Y = trunc(Y0 + L*SinA),
gs:create(line,Id,[{coords,[{X0,Y0},{X,Y}]}]),
{X,Y}. | lib/gs/examples/frac.erl | 0.573201 | 0.456046 | frac.erl | starcoder |
-module(slacker_usergroup).
-include("spec.hrl").
-export([create/3, disable/3, enable/3, list/2, update/3, list_users/3, update_users/4]).
%% @doc Create a user group.
%%
%% Options can be:
%% handle: mention handle
%% description: short description
%% channels: comma separated string of encoded channel IDs for which the user group uses as a default
%% include_count: include the number of users in each user group
%%
-spec create(Token :: string(), Name :: string(), Options :: list()) -> http_response().
create(Token, Name, Options) ->
slacker_request:send("usergroups.create", [{"token", Token},{"name", Name}], Options).
%% @doc Disable a user group.
%%
%% Options can be:
%% include_count: include the number of users in each user group
%%
-spec disable(Token :: string(), Usergroup :: string(), Options :: list()) -> http_response().
disable(Token, Usergroup, Options) ->
slacker_request:send("usergroups.disable", [{"token", Token},{"usergroup", Usergroup}], Options).
%% @doc Enable a disabled user group.
%%
%% Options can be:
%% include_count: include the number of users in each user group
%%
-spec enable(Token :: string(), Usergroup :: string(), Options :: list()) -> http_response().
enable(Token, Usergroup, Options) ->
slacker_request:send("usergroups.enable", [{"token", Token},{"usergroup", Usergroup}], Options).
%% @doc List user groups.
%%
%% Options can be:
%% include_disabled: include disabled user groups
%% include_count: include the number of users in each user group
%% include_users: include the list of users in each user group
%%
-spec list(Token :: string(), Options :: list()) -> http_response().
list(Token, Options) ->
slacker_request:send("usergroups.list", [{"token", Token}], Options).
%% @doc Update a user group.
%%
%% Options can be:
%% name: user group name
%% handle: mention handle
%% description: short description
%% channels: comma separated string of encoded channel IDs for which the user group uses as a default
%% include_count: include the number of users in each user group
%%
-spec update(Token :: string(), Usergroup :: string(), Options :: list()) -> http_response().
update(Token, Usergroup, Options) ->
slacker_request:send("usergroups.update", [{"token", Token},{"usergroup", Usergroup}], Options).
%% @doc List users in a user group.
%%
%% Options can be:
%% include_disabled: include the disabled user groups
%%
-spec list_users(Token :: string(), Usergroup :: string(), Options :: list()) -> http_response().
list_users(Token, Usergroup, Options) ->
slacker_request:send("usergroups.users.list", [{"token", Token},{"usergroup", Usergroup}], Options).
%% @doc Update the list of users.
%%
%% Options can be:
%% include_count: include the number of users in the user group
%%
-spec update_users(Token :: string(), Usergroup :: string(), Users :: [string()], Options :: list()) -> http_response().
update_users(Token, Usergroup, Users, Options) ->
slacker_request:send("usergroups.users.update", [{"token", Token},{"usergroup", Usergroup},{"users", Users}], Options). | src/slacker_usergroup.erl | 0.557364 | 0.466785 | slacker_usergroup.erl | starcoder |
%% @author Couchbase <<EMAIL>>
%% @copyright 2017-2019 Couchbase, Inc.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% @doc rest api's for collections
-module(menelaus_web_collections).
-include("ns_common.hrl").
-include("cut.hrl").
-export([handle_get/2,
handle_post_scope/2,
handle_post_collection/3,
handle_delete_scope/3,
handle_delete_collection/4]).
handle_get(Bucket, Req) ->
assert_api_available(Bucket),
menelaus_util:reply_json(Req, collections:for_rest(Bucket)).
handle_post_scope(Bucket, Req) ->
assert_api_available(Bucket),
validator:handle(
fun (Values) ->
Name = proplists:get_value(name, Values),
handle_rv(collections:create_scope(Bucket, Name), Req)
end, Req, form,
[validator:required(name, _),
validator:length(name, 1, 30, _),
name_validator(_),
name_first_char_validator(_),
validator:unsupported(_)]).
handle_post_collection(Bucket, Scope, Req) ->
assert_api_available(Bucket),
validator:handle(
fun (Values) ->
Name = proplists:get_value(name, Values),
handle_rv(collections:create_collection(Bucket, Scope, Name), Req)
end, Req, form,
[validator:required(name, _),
validator:length(name, 1, 30, _),
name_validator(_),
name_first_char_validator(_),
validator:unsupported(_)]).
handle_delete_scope(Bucket, Name, Req) ->
assert_api_available(Bucket),
handle_rv(collections:drop_scope(Bucket, Name), Req).
handle_delete_collection(Bucket, Scope, Name, Req) ->
assert_api_available(Bucket),
handle_rv(collections:drop_collection(Bucket, Scope, Name), Req).
assert_api_available(Bucket) ->
menelaus_util:assert_cluster_version(fun collections:enabled/0),
{ok, BucketConfig} = ns_bucket:get_bucket(Bucket),
case collections:enabled(BucketConfig) of
true ->
ok;
false ->
erlang:throw({web_exception, 400,
"Not allowed on this type of bucket", []})
end.
name_first_char_validator(State) ->
validator:validate(
fun ([Char | _]) ->
case lists:member(Char, "_%") of
true ->
{error, "First character must not be _ or %"};
false ->
ok
end
end, name, State).
name_validator(State) ->
validator:string(
name, "^[0-9A-Za-z_%\-]+$",
"Can only contain characters A-Z, a-z, 0-9 and the following symbols "
"_ - %", State).
handle_rv({ok, Uid}, Req) ->
menelaus_util:reply_json(Req, {[{uid, Uid}]}, 200);
handle_rv(scope_already_exists, Req) ->
menelaus_util:reply_json(
Req, <<"Scope with this name already exists">>, 400);
handle_rv(collection_already_exists, Req) ->
menelaus_util:reply_json(
Req, <<"Collection with this name already exists">>, 400);
handle_rv(collection_not_found, Req) ->
menelaus_util:reply_json(
Req, <<"Collection with this name is not found">>, 404);
handle_rv(scope_not_found, Req) ->
menelaus_util:reply_json(
Req, <<"Scope with this name is not found">>, 404);
handle_rv(default_scope, Req) ->
menelaus_util:reply_json(
Req, <<"Deleting _default scope is not allowed">>, 400);
handle_rv(Error, Req) when Error =:= unsafe;
Error =:= push_config;
Error =:= pull_config ->
menelaus_util:reply_json(
Req, <<"Operation is unsafe at this time. Retry later.">>, 503);
handle_rv(Error, Req) ->
menelaus_util:reply_json(
Req, iolist_to_binary(io_lib:format("Unknown error ~p", [Error])), 400). | src/menelaus_web_collections.erl | 0.608012 | 0.44348 | menelaus_web_collections.erl | starcoder |
%%% vim:ts=2:sw=2:et
%%%-----------------------------------------------------------------------------
%%% @doc Erlang parse transform for permitting default arguments in functions
%%%
%%% Presently the Erlang syntax doesn't allow function arguments to have default
%%% parameters. Consequently a developer needs to replicate the function
%%% definition multiple times passing constant defaults to some parameters of
%%% functions.
%%%
%%% This parse transform addresses this shortcoming by extending the syntax
%%% of function definitions at the top level in a module to have a default
%%% expression such that for `A / Default' argument the `Default' will be
%%% used if the function is called in code without that argument.
%%%
%%% ```
%%% -export([t/2]).
%%%
%%% test(A / 10, B / 20) ->
%%% A + B.
%%% '''
%%% The code above is transformed to:
%%% ```
%%% -export([t/2]).
%%% -export([t/0, t/1]).
%%%
%%% test() -> test(10);
%%% test(A) -> test(A, 20);
%%% test(A,B) -> A+B.
%%% '''
%%%
%%% The arguments with default values must be at the end of the argument list:
%%% ```
%%% test(A, B, C / 1) -> %% This is valid
%%% ...
%%%
%%% test(A / 1, B, C) -> %% This is invalid
%%% ...
%%% '''
%%%
%%% Default arguments must be constants or arithmetic expressions. Function
%%% calls are not supported as default arguments due to the limitations of the
%%% Erlang parser.
%%%
%%% @author <NAME> <saleyn(at)gmail(dot)com>
%%% @end
%%%-----------------------------------------------------------------------------
%%% Copyright (c) 2021 <NAME>
%%%
%%% Permission is hereby granted, free of charge, to any person
%%% obtaining a copy of this software and associated documentation
%%% files (the "Software"), to deal in the Software without restriction,
%%% including without limitation the rights to use, copy, modify, merge,
%%% publish, distribute, sublicense, and/or sell copies of the Software,
%%% and to permit persons to whom the Software is furnished to do
%%% so, subject to the following conditions:
%%%
%%% The above copyright notice and this permission notice shall be included
%%% in all copies or substantial portions of the Software.
%%%
%%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
%%% EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
%%% MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
%%% IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
%%% CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
%%% TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
%%% SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
%%%-----------------------------------------------------------------------------
-module(defarg).
-export([parse_transform/2]).
%% @doc parse_transform entry point
parse_transform(AST, Options) ->
etran_util:process(?MODULE,
fun(Ast) -> replace(Ast) end,
AST, Options).
replace(AST) ->
ModExports = lists:sort(lists:append([Exp || {attribute, _, export, Exp} <- AST])),
replace(AST, undefined, [], ModExports, []).
replace([], _Mod, Exports, _ModExports, Acc) ->
Res = lists:reverse(Acc),
{HeadAST, [{attribute, Loc, _, _} = ModAST|TailAST]} =
lists:splitwith(fun({attribute, _, module, _}) -> false; (_) -> true end, Res),
AddExports = [{attribute, Loc, export, Exp} || Exp <- lists:reverse(Exports)],
HeadAST ++ [ModAST] ++ AddExports ++ TailAST;
replace([{attribute,_,module,Mod}=H|T], _, Exports, ModExports, Acc) ->
replace(T, Mod, Exports, ModExports, [H|Acc]);
replace([{function, Loc, Fun, Arity, [{clause, CLoc, Args, Guards, Body}]}=H|T],
Mod, Exports, ModExports, Acc) ->
{RevDef, RevRestArgs} =
lists:splitwith(
fun({op, _, '/', _Arg, _Def}) -> true;
(_) -> false
end,
lists:reverse(Args)),
{FrontArgs, DefArgs} =
{lists:reverse(RevRestArgs), lists:reverse([{A,D} || {op, _, '/', A, D} <- RevDef])},
case DefArgs of
[] ->
replace(T, Mod, Exports, ModExports, [H|Acc]);
_ ->
lists:filter(fun({op, _, '/', _A, _D}) -> true; (_) -> false end, FrontArgs) /= []
andalso throw(lists:flatten(
lists:format(
"Function ~w:~w/~w has default arguments not at the end of the argument list!",
[get(key), Fun, Arity]))),
%% Add new exports, e.g.: -export([f/2]).
N = Arity - length(DefArgs),
NewExports = case lists:member({Fun,Arity}, ModExports) of
true -> [[{Fun,I} || I <- lists:seq(N, Arity-1)] | Exports];
false -> Exports
end,
LastClause = {function, Loc, Fun, Arity,
[{clause, CLoc, FrontArgs ++ [A || {A,_} <- DefArgs], Guards, Body}]},
AddClauses = element(3,
lists:foldl(fun({A, D}, {Front, ArityN, Acc1}) ->
Acc2 = [{function, Loc, Fun, ArityN,
[{clause, CLoc, Front, [],
[{call, CLoc, {atom, CLoc, Fun}, Front ++ [D]}]}]} | Acc1],
{Front ++ [A], ArityN+1, Acc2}
end, {FrontArgs, N, []}, DefArgs)),
replace(T, Mod, NewExports, ModExports, [LastClause | AddClauses] ++ Acc)
end;
replace([H|T], Mod, Exports, ModExports, Acc) ->
replace(T, Mod, Exports, ModExports, [H|Acc]). | src/defarg.erl | 0.549399 | 0.542136 | defarg.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% Copyright (c) 2016 <NAME>. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%%
%% @doc This module is used to produce a graphical representation of
%% an execution.
%%
-module(conver_vis).
-include("conver.hrl").
-export([draw_execution/3]).
%%% API
%% @doc Draws the representation of the execution described in `Ops'.
%%
%% The execution is represented in a canonical and straightforward way,
%% having an horizontal line for each process, and rectangles for the operations.
%% Consistency violations are highlighted as shades of red filling
%% the operations' rectangles.
%% The execution is drawn using the Egd module, and then written to a png file
%% named as the given `StoreName'.
%%
-spec draw_execution([{atom(), [op()]}], integer(), string()) -> term().
draw_execution(Ops, Duration, StoreName) ->
NProc = length(Ops),
OpHeight = 45, VMargin = 38, HMargin = 50,
GoldenRatio = (1 + math:sqrt(5))/2,
H = (OpHeight + VMargin) * NProc *2,
W = trunc(H + H/GoldenRatio),
Im = egd:create(W, H),
% Processes lines
LineLength = W - (HMargin * 2),
EbinDir = filename:dirname(code:which(?MODULE)), % HACK to get into priv dir
Filename = filename:join([filename:dirname(EbinDir), "priv", "Helvetica14.wingsfont"]),
Font = egd_font:load(Filename),
_ = [{egd:text(Im, {trunc(HMargin/2), trunc(H/(2*NProc)+(X-1)*(H/NProc))},
Font, string:to_upper(atom_to_list(ProcName)), egd:color(black)),
egd:line(Im,
{HMargin, trunc(H/(2*NProc)+(X-1)*(H/NProc)+VMargin)},
{W-HMargin, trunc(H/(2*NProc)+(X-1)*(H/NProc)+VMargin)},
egd:color(black))}
|| {{ProcName, _}, X} <- lists:zip(Ops, lists:seq(1, NProc))],
% Operations rectangles
FScaleTime = fun(X) -> trunc((LineLength * X)/ Duration + HMargin) end,
FDrawOps =
fun(OpProc, IdxP) ->
OpDetails = convert_ops_details(FScaleTime, OpProc, []),
[{egd:text(Im,
{X1, trunc(H/(2*NProc)+(IdxP-1)*(H/NProc)+VMargin)},
Font, Label, Color),
FunRect(Im,
{X1, trunc(H/(2*NProc)+(IdxP-1)*(H/NProc)+VMargin)},
{X2, trunc(H/(2*NProc)+(IdxP-1)*(H/NProc)+VMargin-OpHeight)},
Color)} ||
{X1, X2, Label, Color, FunRect} <- OpDetails]
end,
[FDrawOps(X, Y) || {{_, X}, Y} <- lists:zip(Ops, lists:seq(1, NProc))],
FileName = StoreName ++ ".png",
egd:save(egd:render(Im, png), FileName),
egd:destroy(Im),
os:cmd("see " ++ FileName ++ " &"), % XXX
ok.
%%% Internal functions
-spec convert_ops_details(fun((integer()) -> integer()), [op()], [tuple()]) -> [tuple()].
convert_ops_details(_FScaleTime, [], Acc) -> Acc;
convert_ops_details(FScaleTime, [H|T], Acc) ->
{Color, FunRect} = case length(H#op.notes) of
%% shades of red depending on
%% how many anomalies were detected
0 -> {egd:color(black), fun egd:rectangle/4} ;
1 -> {egd:color({223, 123, 123}), fun egd:filledRectangle/4};
2 -> {egd:color({178, 66, 66}), fun egd:filledRectangle/4};
_ -> {egd:color({212, 17, 17}), fun egd:filledRectangle/4}
end,
Op = {FScaleTime(H#op.start_time), FScaleTime(H#op.end_time),
get_op_label(H#op.type, H#op.arg), Color, FunRect},
convert_ops_details(FScaleTime, T, [Op|Acc]).
-spec get_op_label(op_type(), integer()) -> string().
get_op_label(Type, Arg) ->
case Type of
read -> "R:" ++ integer_to_list(Arg);
write -> "W (" ++ integer_to_list(Arg) ++ ")"
end. | src/conver_vis.erl | 0.632503 | 0.430387 | conver_vis.erl | starcoder |
%%%=============================================================================
%%% @copyright 2017, <NAME>
%%% @doc
%%% Common functions and behaviour for Proof of Work
%%% @end
%%%=============================================================================
-module(aec_pow).
-export([test_target/2,
pick_nonce/0,
next_nonce/1,
target_to_difficulty/1,
scientific_to_integer/1,
integer_to_scientific/1]).
-ifdef(TEST).
-compile([export_all, nowarn_export_all]).
-endif.
-include("pow.hrl").
%% 10^24, approx. 2^80
-define(NONCE_RANGE, 1000000000000000000000000).
-define(POW_MODULE, aec_pow_cuckoo).
%% 0..?MAX_NONCE
-type nonce() :: 0..16#ffffffffffffffff.
-export_type([nonce/0]).
%%------------------------------------------------------------------------------
%% Target threshold and difficulty
%%
%% The mining rate is controlled by setting a target threshold. The PoW nonce
%% is accepted if a hash value (the hash of the header for SHA-256, the hash of
%% the solution graph for Cuckoo Cycleas, converted to an integer) is below
%% this target threshold.
%%
%% A lower target represents a harder task (requiers the hash to start with a
%% number of zeros).
%%
%% The target thershold relates to another value: the diifculty. This is
%% proportional to the hardness of the PoW task:
%%
%% Difficulty = <Target of difficulty 1> / Target,
%%
%% a floating point value.
%% Bitcoin uses 0x00000000FFFF0000000000000000000000000000000000000000000000000000
%% as Difficulty 1 target (0x1d00ffff in scientific notation, see below). For
%% Cuckoo Cycle we need a lighter filtering of solutions than for SHA-256 as the
%% basic algorithm is much slower than a simple hash generation, so we use the
%% largest possible value:
%% 0xFFFF000000000000000000000000000000000000000000000000000000000000 (0x2100ffff
%% in scientific notation) as difficulty 1.
%%
%% We store the current target threshold in the block header in scientific notation.
%% Difficulty is used to select the winning fork of new blocks: the difficulty of a
%% chain of blocks is the sum of the diffculty of each block.
%%
%% Integers represented in scientific notation:
%% 2^24 * <base-2 exponent + 3> + the first 3 most significant bytes (i.e.,
%% the significand, see https://en.wikipedia.org/wiki/Significand).
%% The + 3 corresponds to the length of the
%% significand (i.e., the int value is 0.<significand> * 8^<exponent>).
%% https://en.bitcoin.it/wiki/Difficulty#How_is_difficulty_stored_in_blocks.3F)
%%------------------------------------------------------------------------------
-type sci_int() :: integer().
%% Optional evidence for PoW verification
-type pow_evidence() :: 'no_value' | term().
-type pow_result() :: {'ok', {Nonce :: nonce(), Solution :: pow_evidence()}} |
{error, no_solution | {runtime, term()}}.
%% Difficulty: max threshold (0x00000000FFFF0000000000000000000000000000000000000000000000000000)
%% over the actual one. Always positive.
-type difficulty() :: float().
-export_type([sci_int/0,
difficulty/0,
pow_evidence/0,
pow_result/0]).
%%%=============================================================================
%%% Behaviour
%%%=============================================================================
-callback generate(Data :: aec_hash:hashable(), Difficulty :: aec_pow:sci_int(),
Nonce :: aec_pow:nonce()) ->
aec_pow:pow_result().
-callback verify(Data :: aec_hash:hashable(), Nonce :: aec_pow:nonce(),
Evd :: aec_pow:pow_evidence(), Difficulty :: aec_pow:sci_int()) ->
boolean().
%%%=============================================================================
%%% API
%%%=============================================================================
-spec scientific_to_integer(sci_int()) -> integer().
scientific_to_integer(S) ->
{Exp, Significand} = break_up_scientific(S),
E3 = Exp - 3,
case Exp >= 0 of
true ->
Significand bsl (8 * E3);
false ->
Significand bsr (-8 * E3)
end.
-spec integer_to_scientific(integer()) -> sci_int().
integer_to_scientific(I) ->
%% Find exponent and significand
{Exp, Significand} = integer_to_scientific(I, 3),
case Exp >= 0 of
true ->
%% 1st byte: exponent, next 3 bytes: significand
(Exp bsl 24) + Significand;
false ->
%% flip sign bit in significand
((-Exp) bsl 24) + 16#800000 + Significand
end.
-spec target_to_difficulty(sci_int()) -> float().
target_to_difficulty(Th) ->
%% Max threshold over the current one
?HIGHEST_TARGET_INT/scientific_to_integer(Th).
-spec pick_nonce() -> aec_pow:nonce().
pick_nonce() ->
rand:uniform(?NONCE_RANGE) band ?MAX_NONCE.
-spec next_nonce(aec_pow:nonce()) -> aec_pow:nonce().
next_nonce(N) ->
(N + 1) band ?MAX_NONCE.
%%------------------------------------------------------------------------------
%% Test if binary is under the target threshold
%%------------------------------------------------------------------------------
-spec test_target(binary(), sci_int()) -> boolean().
test_target(Bin, Target) ->
{Exp, Significand} = break_up_scientific(Target),
L = size(Bin),
%% We expect L - Exp zero bytes and Exp nonzero bytes
Zeros = 8*max(0, L - Exp),
case Exp of
_E when _E >=0,
_E < 3 ->
%% Less than 3 bytes behind zeros
compare_bin_to_significand(Bin, Significand bsr (8*(3 - Exp)), Zeros, 8*Exp);
_ when Exp > L,
Exp < L + 3 ->
%% Exponent larger than length of Bin
Skip = 8*(Exp - L),
Compare = 24 - Skip,
case Significand bsr Compare of
0 ->
%% Ok, we do not lose significant bits
compare_bin_to_significand(Bin, Significand bsl Skip, 0, 24);
_ ->
%% Not supposed to happen
{error, {incorrect_target_exponent, Exp}}
end;
_E when _E >= 0 ->
%% At least 3 bytes after zeros
compare_bin_to_significand(Bin, Significand, Zeros, 24);
_E when _E < 0 ->
%% All bits must be zero
Bits = 8*L,
Bin == <<0:Bits>>
end.
compare_bin_to_significand(Bin, Significand, Zeros, NumBits) ->
case Bin of
<<0:Zeros, I:NumBits, _T/bitstring>> ->
%% Compare with significand
I < Significand;
<<0:Zeros, _T/bitstring>> ->
{error, {fewer_bits_than_required, NumBits, 8*size(Bin) - Zeros}};
_ ->
%% Fewer zeros than expected
false
end.
%%%=============================================================================
%%% Internal functions
%%%=============================================================================
integer_to_scientific(I, Exp) when I > 16#7fffff ->
integer_to_scientific(I bsr 8, Exp + 1);
integer_to_scientific(I, Exp) when I < 16#008000 ->
integer_to_scientific(I bsl 8, Exp - 1);
integer_to_scientific(I, Exp) ->
%% Add the number of bytes in the significand
{Exp, I}.
%% Return the exponent and significand of a sci_int().
break_up_scientific(S) ->
SigMask = (1 bsl 24) - 1,
Exp = ((S bxor SigMask) bsr 24),
Significand = S band SigMask,
%% Remove the sign bit, apply to exponent
case 16#800000 band Significand of
0 ->
{Exp, Significand};
_ ->
{-Exp, Significand - 16#800000}
end. | apps/aecore/src/aec_pow.erl | 0.577614 | 0.514644 | aec_pow.erl | starcoder |
%%%-------------------------------------------------------------------
%%% @author <NAME>
%%% @copyright (C) 2021 ACK CYFRONET AGH
%%% This software is released under the MIT license
%%% cited in 'LICENSE.txt'.
%%% @end
%%%-------------------------------------------------------------------
%%% @doc
%%% This module defines persistent record interface - implemented for erlang
%%% records that are to be stored in persistent database and to that end require
%%% an encoder and decoder to/from JSON. Each such record should have a
%%% dedicated module implementing the callbacks.
%%% @end
%%%-------------------------------------------------------------------
-module(persistent_record).
-author("<NAME>").
-include_lib("ctool/include/logging.hrl").
%% API
-export([encode/2, decode/2]).
-type record_version() :: non_neg_integer().
-export_type([record_version/0]).
-type nested_record_encoder() :: fun((jsonable_record:record(), jsonable_record:record_type()) -> json_utils:json_term()).
-type nested_record_decoder() :: fun((json_utils:json_term(), jsonable_record:record_type()) -> jsonable_record:record()).
-export_type([nested_record_encoder/0, nested_record_decoder/0]).
%%%===================================================================
%%% persistent_record behaviour definition
%%%===================================================================
%%--------------------------------------------------------------------
%% @doc
%% Encodes a record into a database-compliant JSON object. The NestedRecordEncoder
%% passed in the second argument can be called to encode any nested record.
%% @end
%%--------------------------------------------------------------------
-callback db_encode(jsonable_record:record(), nested_record_encoder()) -> json_utils:json_term().
%%--------------------------------------------------------------------
%% @doc
%% Decodes a record from a database-compliant JSON object. The NestedRecordDecoder
%% passed in the second argument can be called to decode any nested record.
%% @end
%%--------------------------------------------------------------------
-callback db_decode(json_utils:json_term(), nested_record_decoder()) -> jsonable_record:record().
%%--------------------------------------------------------------------
%% @doc
%% Returns the current version of the record's definition (as defined in code).
%% The version is used to compare versions and trigger an upgrade if needed.
%% @end
%%--------------------------------------------------------------------
-callback version() -> record_version().
%%--------------------------------------------------------------------
%% @doc
%% Upgrades older records (must be implemented if record version > 1).
%% The upgrade is run for the encoded representation of the record.
%% @end
%%--------------------------------------------------------------------
-callback upgrade_encoded_record(record_version(), json_utils:json_term()) ->
{record_version(), json_utils:json_term()}.
-optional_callbacks([upgrade_encoded_record/2]).
%%%===================================================================
%%% API functions
%%%===================================================================
-spec encode(jsonable_record:record(), jsonable_record:record_type()) -> binary().
encode(Record, RecordType) ->
json_utils:encode(db_encode_record(Record, RecordType)).
-spec decode(binary(), jsonable_record:record_type()) -> jsonable_record:record().
decode(JsonEncodedRecord, RecordType) ->
db_decode_record(json_utils:decode(JsonEncodedRecord), RecordType).
%%%===================================================================
%%% Internal functions
%%%===================================================================
%% @private
-spec db_encode_record(jsonable_record:record(), jsonable_record:record_type()) -> json_utils:json_term().
db_encode_record(Record, RecordType) ->
#{
<<"_version">> => RecordType:version(),
<<"_data">> => RecordType:db_encode(Record, fun db_encode_record/2)
}.
%% @private
-spec db_decode_record(json_utils:json_term(), jsonable_record:record_type()) -> jsonable_record:record().
db_decode_record(#{<<"_version">> := CurrentRecordVersion, <<"_data">> := RecordJson}, RecordType) ->
TargetRecordVersion = RecordType:version(),
UpgradedRecordData = upgrade_encoded_record(
TargetRecordVersion, CurrentRecordVersion, RecordType, RecordJson
),
RecordType:db_decode(UpgradedRecordData, fun db_decode_record/2).
%% @private
-spec upgrade_encoded_record(record_version(), record_version(), jsonable_record:record_type(), json_utils:json_term()) ->
json_utils:json_term() | no_return().
upgrade_encoded_record(Version, Version, _RecordType, Json) ->
Json;
upgrade_encoded_record(TargetVersion, CurrentVersion, RecordType, _Json) when CurrentVersion > TargetVersion ->
?emergency(
"Upgrade requested for record '~p' with future version ~B (known versions up to: ~B)",
[RecordType, CurrentVersion, TargetVersion]
),
error({future_version, RecordType, CurrentVersion, TargetVersion});
upgrade_encoded_record(TargetVersion, CurrentVersion, RecordType, Json) ->
{NewVersion, Json2} = try
RecordType:upgrade_encoded_record(CurrentVersion, Json)
catch
error:undef ->
?emergency(
"Missing upgrade procedure for record '~p' from version ~B to ~B",
[RecordType, CurrentVersion, TargetVersion]
),
error({missing_upgrader, RecordType, CurrentVersion, TargetVersion})
end,
upgrade_encoded_record(TargetVersion, NewVersion, RecordType, Json2). | src/serialization/persistent_record.erl | 0.512205 | 0.432243 | persistent_record.erl | starcoder |
%% @author jstypka <<EMAIL>>
%% @version 1.0
%% @doc A module with evolutionary functions which transform one generation into another, including migrations.
-module(emas_evolution).
-export([do_reproduce/2, do_fight/2, optional_pairs/2]).
-include ("emas.hrl").
-type agent() :: emas:agent().
-type sim_params() :: emas:sim_params().
%% ====================================================================
%% API functions
%% ====================================================================
%% @doc This function implements an all-vs-all fight between agents.
%% It returns the list of agents after the fight.
%% -spec each_fights_all([agent()]) -> [agent()].
%% each_fights_all([]) -> [];
%%
%% each_fights_all([H|T]) ->
%% {NewH,NewT} = one_fights_rest(H,T,[]),
%% [NewH | each_fights_all(NewT)].
%% @doc The fight logic for a pair of agents.
%% It returns a list of both agents updated after the fight.
-spec do_fight({agent()} | {agent(), agent()}, sim_params()) -> [agent()].
do_fight({A}, _SP) -> [A];
%% @doc Funkcja implementujaca logike walki dwoch agentow.
%% Zwracana jest lista dwoch przetworzonych agentow.
do_fight({{SolA, EvA, EnA}, {SolB, EvB, EnB}}, SP) ->
AtoBtransfer =
if EvA < EvB -> erlang:min(SP#sim_params.fight_transfer, EnA);
EvA >= EvB -> -erlang:min(SP#sim_params.fight_transfer, EnB)
end,
[{SolA, EvA, EnA - AtoBtransfer}, {SolB, EvB, EnB + AtoBtransfer}].
%% @doc The reproduction logic for a single agent.
%% It returns a list with the updated parent and new child.
-spec do_reproduce({agent()} | {agent(), agent()}, sim_params()) -> [agent()].
do_reproduce({{SolA, EvA, EnA}}, SP) ->
SolB = emas_genetic:reproduction(SolA, SP),
EvB = emas_genetic:evaluation(SolB, SP),
exometer:update([global, fitness], EvB),
AtoBtransfer = erlang:min(SP#sim_params.reproduction_transfer, EnA),
[{SolA, EvA, EnA - AtoBtransfer}, {SolB, EvB, AtoBtransfer}];
%% @doc The reproduction logic for a pair of agents.
%% It returns a list with the updated parents and new children.
do_reproduce({{SolA, EvA, EnA}, {SolB, EvB, EnB}}, SP) ->
[SolC, SolD] = emas_genetic:reproduction(SolA, SolB, SP),
[EvC, EvD] = [emas_genetic:evaluation(S, SP) || S <- [SolC, SolD]],
exometer:update([global, fitness], EvC),
exometer:update([global, fitness], EvD),
[AtoCTransfer, BtoDTransfer] =
[erlang:min(SP#sim_params.reproduction_transfer, E) || E <- [EnA, EnB]],
[{SolA, EvA, EnA - AtoCTransfer},
{SolB, EvB, EnB - BtoDTransfer},
{SolC, EvC, AtoCTransfer},
{SolD, EvD, BtoDTransfer}].
%% @doc Splits agents into pairs with an optional single remainder.
-spec optional_pairs([agent()], [{agent(), agent()}]) ->
[{agent(), agent()} | {agent()}].
optional_pairs([], Acc) -> Acc;
optional_pairs([A], Acc) -> [{A} | Acc];
optional_pairs([A, B | L], Acc) -> optional_pairs(L, [{A, B} | Acc]).
%% ====================================================================
%% Internal functions
%% ====================================================================
%%
%% %% @doc Executes the doFight/1 function between agent A and every other agent from the ToFight list.
%% -spec one_fights_rest(Agent::agent(), ToFight::[agent()], Fought::[agent()]) -> {agent(),[agent()]}.
%% one_fights_rest(Agent,[],Fought) -> {Agent,Fought};
%%
%% one_fights_rest(Agent,[H|ToFight],Fought) ->
%% [NewAgent,NewH] = do_fight({Agent,H}),
%% one_fights_rest(NewAgent,ToFight,[NewH|Fought]). | src/emas_evolution.erl | 0.507812 | 0.654067 | emas_evolution.erl | starcoder |
%% @copyright 2013-2016 <NAME> <<EMAIL>>
%%
%% @doc Tuple based `hash_ring' Implementation Module
%%
%% This module represents a ring (i.e., virtual nodes) as a large tuple.
%%
%% It is superior in terms of node search efficiency and memory footprint.
%% But dynamic addition and removal of nodes require O(N log N) and O(N) time respectively
%% (where N is the number of nodes in the ring).
%%
%% See <a href="https://github.com/sile/hash_ring#benchmark">README.md</a> for a benchmark result.
%%
%% @end
-module(hash_ring_static).
-behaviour(hash_ring).
%%----------------------------------------------------------------------------------------------------------------------
%% 'hash_ring' Callback API
%%----------------------------------------------------------------------------------------------------------------------
-export([make/2, add_nodes/2, remove_nodes/2, get_nodes/1, fold/4]).
%%----------------------------------------------------------------------------------------------------------------------
%% Macros & Recors & Types
%%----------------------------------------------------------------------------------------------------------------------
-define(SENTINEL_NODE, hash_ring_node:make('SENTINEL')).
-define(RING, ?MODULE).
-record(?RING,
{
vnodes :: tuple(), % array of `virtual_node()'
base :: hash_ring_base:state()
}).
-type virtual_node() :: {Hash::non_neg_integer(), Sequence::non_neg_integer(), hash_ring:ring_node()}.
%%----------------------------------------------------------------------------------------------------------------------
%% 'hash_ring' Callback Functions
%%----------------------------------------------------------------------------------------------------------------------
%% @private
make(Nodes, Options) ->
Base = hash_ring_base:make(Options),
HashMask = hash_ring_base:get_hash_mask(Base),
Ring =
#?RING{
vnodes = {{HashMask + 1, 0, ?SENTINEL_NODE}},
base = Base
},
add_nodes(Nodes, Ring).
%% @private
add_nodes(Nodes, Ring) ->
UniqueNodes =
maps:values(
lists:foldl(
fun (N, Acc) -> maps:put(hash_ring_node:get_key(N), N, Acc) end,
#{},
Nodes)),
VirtualNodes =
lists:sort(
lists:foldl(fun (Node, Acc) -> add_node(Node, Acc, Ring#?RING.base) end,
tuple_to_list(Ring#?RING.vnodes),
UniqueNodes)),
Base =
lists:foldl(fun hash_ring_base:add_node/2, Ring#?RING.base, UniqueNodes),
Ring#?RING{
vnodes = list_to_tuple(VirtualNodes),
base = Base
}.
%% @private
remove_nodes(Keys, Ring) ->
KeySet =
gb_sets:from_list(Keys),
VirtualNodes =
lists:filter(fun ({_, _, N}) -> not gb_sets:is_member(hash_ring_node:get_key(N), KeySet) end,
tuple_to_list(Ring#?RING.vnodes)),
Base =
gb_sets:fold(fun hash_ring_base:remove_node/2, Ring#?RING.base, KeySet),
Ring#?RING{
vnodes = list_to_tuple(VirtualNodes),
base = Base
}.
%% @private
get_nodes(Ring) ->
hash_ring_base:get_nodes(Ring#?RING.base).
%% @private
fold(Fun, Item, Initial, Ring) ->
#?RING{base = Base, vnodes = VirtualNodes} = Ring,
HashMask = hash_ring_base:get_hash_mask(Base),
ItemHash = hash_ring_base:hash(Item, Base),
NodeCount = hash_ring_base:get_non_phantom_node_count(Base),
PartitionSize = max(1, (HashMask + 1) div tuple_size(VirtualNodes)),
Position = find_start_position(ItemHash, PartitionSize, VirtualNodes),
fold_successor_nodes(NodeCount, Position, VirtualNodes, Fun, Initial).
%%----------------------------------------------------------------------------------------------------------------------
%% Internal Functions
%%----------------------------------------------------------------------------------------------------------------------
-spec find_start_position(term(), pos_integer(), tuple()) -> non_neg_integer().
find_start_position(ItemHash, PartitionSize, VirtualNodes) ->
find_start_position(ItemHash, PartitionSize, VirtualNodes, 1, (ItemHash div PartitionSize) + 1, tuple_size(VirtualNodes) + 1).
-spec find_start_position(term(), pos_integer(), tuple(), pos_integer(), pos_integer(), pos_integer()) -> pos_integer().
find_start_position(_ItemHash, _PartitionSize, _VirtualNodes, Position, _, Position) ->
Position;
find_start_position(ItemHash, PartitionSize, VirtualNodes, Start, Current0, End) ->
Current = min(max(Start, Current0), End - 1),
{NodeHash, _, _} = element(Current, VirtualNodes),
case NodeHash of
ItemHash -> Current;
_ ->
Delta = ItemHash - NodeHash,
Next = Current + (Delta div PartitionSize),
case Delta > 0 of
true -> find_start_position(ItemHash, PartitionSize, VirtualNodes, Current + 1, Next + 1, End);
false -> find_start_position(ItemHash, PartitionSize, VirtualNodes, Start, Next - 1, Current)
end
end.
-spec fold_successor_nodes(non_neg_integer(), non_neg_integer(), tuple(), hash_ring:fold_fun(), term()) -> term().
fold_successor_nodes(RestNodeCount, StartPosition, VirtualNodes, Fun, Initial) ->
fold_successor_nodes(RestNodeCount, StartPosition, VirtualNodes, Fun, Initial, gb_sets:empty()).
-spec fold_successor_nodes(non_neg_integer(), non_neg_integer(), tuple(), hash_ring:fold_fun(), term(),
gb_sets:set(hash_ring:ring_node())) -> term().
fold_successor_nodes(0, _, _, _, Acc, _) ->
Acc;
fold_successor_nodes(RestNodeCount, Position, VirtualNodes, Fun, Acc, IteratedNodes) when Position >= tuple_size(VirtualNodes) ->
fold_successor_nodes(RestNodeCount, 1, VirtualNodes, Fun, Acc, IteratedNodes);
fold_successor_nodes(RestNodeCount, Position, VirtualNodes, Fun, Acc, IteratedNodes) ->
{_, _, Node} = element(Position, VirtualNodes),
case gb_sets:is_member(Node, IteratedNodes) of
true -> fold_successor_nodes(RestNodeCount, Position + 1, VirtualNodes, Fun, Acc, IteratedNodes);
false ->
case Fun(Node, Acc) of
{false, Acc2} -> Acc2;
{true, Acc2} -> fold_successor_nodes(RestNodeCount - 1, Position + 1, VirtualNodes,
Fun, Acc2, gb_sets:add(Node, IteratedNodes))
end
end.
-spec add_node(hash_ring:ring_node(), [virtual_node()], hash_ring_base:state()) -> [virtual_node()].
add_node(Node, VirtualNodes, Base) ->
ExistingNodes = hash_ring_base:get_nodes(Base),
VirtualNodeCount = hash_ring_base:calc_virtual_node_count(Node, Base),
case maps:find(hash_ring_node:get_key(Node), ExistingNodes) of
error -> add_virtual_nodes(Node, VirtualNodes, 0, VirtualNodeCount, Base);
{ok, Old} ->
OldCount = hash_ring_base:calc_virtual_node_count(Old, Base),
case OldCount =< VirtualNodeCount of
true -> add_virtual_nodes(Node, VirtualNodes, OldCount, VirtualNodeCount, Base);
false -> remove_virtual_nodes(Node, VirtualNodes, VirtualNodeCount)
end
end.
-spec add_virtual_nodes(hash_ring:ring_node(), [virtual_node()], non_neg_integer(), non_neg_integer(),
hash_ring_base:state()) -> [virtual_node()].
add_virtual_nodes(Node, VirtualNodes, Start, End, Base) ->
Key = hash_ring_node:get_key(Node),
[{hash_ring_base:hash({Seq, Key}, Base), Seq, Node} || Seq <- lists:seq(Start, End - 1)] ++ VirtualNodes.
-spec remove_virtual_nodes(hash_ring:ring_node(), [virtual_node()], non_neg_integer()) -> [virtual_node()].
remove_virtual_nodes(Node, VirtualNodes, End) ->
Key = hash_ring_node:get_key(Node),
lists:filter(fun ({_, Seq, N}) -> Seq < End orelse hash_ring_node:get_key(N) =/= Key end,
VirtualNodes). | src/hash_ring_static.erl | 0.549157 | 0.425605 | hash_ring_static.erl | starcoder |
-module(linearize).
-import(lists, [zip/2, zip3/3, unzip/1, unzip3/1, seq/2, nth/2]).
-import(domain, [subset/2, intersection/1, intersection/2, intersect_envs/2, union/1, union/2]).
-export([term/2, term/3]).
-include_lib("eunit/include/eunit.hrl").
term(Term, ModuleMap) ->
term(expr, Term, ModuleMap).
term(expr, Term, ModuleMap) ->
GlobalScope = scope(ModuleMap),
History = [],
expr({GlobalScope, #{}}, History, Term);
term(pattern, Term, ModuleMap) ->
GlobalScope = scope(ModuleMap),
History = [],
PatternDomain = any,
pattern({GlobalScope, #{}}, History, PatternDomain, Term).
scope(ModuleMap) ->
% There's a gotcha when a scope is computed that in order to compute the
% scope, the scope itself is needed. to circumvent this catch-22, we relax
% the requirement and compute an index of index functions first. Each index
% function will compute a scope function when given the full map of
% indices. Thus we can neatly compute the index of index functions, and
% then with the index in hand, build the scope by evaluating each index
% function.
I = fun(Def) ->
fun(Index) ->
fun(ArgDomains, History) ->
Global = global_scope(Index),
case expr({Global, #{}}, [], Def) of
{error, Errs} -> {error, Errs};
{ok, {_, {'fun', _, F}}} -> F(ArgDomains, History)
end end end end,
% The index contains a mapping of def paths to index functions. An index
% function will return a scope function when given an index as an argument.
% A scope function will return the evaluated tree of the def that the def
% path is pointing to, when given arg domains and the current evaluation
% history
Index = maps:from_list([{P ++ [N], I(Term)} || {module, _, P, _, _, Defs} <- maps:values(ModuleMap),
{N, Term} <- maps:to_list(Defs)]),
global_scope(Index).
% The global scope is a map from symbol paths ('[a, b, f]') to a function `F`
% which takes arg domains + history and computes the evaluated tree of the
% function `f` in module `a/b`
global_scope(Index) -> maps:from_list([{Path, I(Index)} || {Path, I} <- maps:to_list(Index)]).
% How do I compute an expression and what do I learn from an expression:
% Inputs: Expression AST
% A local scope of: Symbol -> Domain
% A global scope of: Path -> (Arg Domains -> Tree)
% A call history of applications (to check for recursion)
% Output: The linearized AST
% Map of linearized definitions
% Errors: Strict domain not subset errors
expr({GlobalScope, LocalScope}, History, Term) ->
Pre = fun(_, LclScp, T) -> expr_pre({GlobalScope, LclScp}, History, T) end,
Post = fun(Type, LclScp, T) -> expr_post(Type, {GlobalScope, LclScp}, History, T) end,
case ast:traverse_term(expr, Pre, Post, LocalScope, Term) of
{error, Errs} -> {error, Errs};
{ok, {Env, Tree}} ->
{Local, _} = lists:partition(fun({{KeyPath, _}, _V}) -> length(KeyPath) =:= 1 end, maps:to_list(Env)),
case linearize_local_defs(Local, History, Tree) of
{error, Errs} -> {error, Errs};
{ok, {DefsEnv, DefsTree}} -> {ok, {maps:merge(Env, DefsEnv), DefsTree}}
end
end.
% How do I compute a pattern and what do I learn from a pattern:
% Inputs: Pattern AST
% A local scope of: Symbol -> Domain
% A global scope of: Path -> (Arg Domains -> Tree)
% A history of applications (to check for recursion)
% Output: An environment of: Symbol -> Domain
% The pattern domain literal (containing vars)
% Errors: Only if there's no intersection between domains in for example
% pattern applications
pattern({GlobalScope, LocalScope}, History, Domain, Term) ->
Pre = fun(_, _, T) -> pattern_pre(History, domain(T), T) end,
Post = fun(Type, LclScp, T) -> pattern_post(Type, {GlobalScope, LclScp}, History, T) end,
case ast:traverse_term(pattern, Pre, Post, LocalScope, set_domain(Term, Domain)) of
{error, Errs} -> {error, Errs};
{ok, {_, Trees}} -> {ok, Trees}
end.
% When scanning clauses, we jump out of the tree traversal because a few extra checks are needed:
% - Stop scanning clauses if arg domains are subset of current pattern domains
% - Skip clauses for which there's no intersection between arg domains and pattern domains
% - Check if the argdomains are a subset of the union of clauses
clauses(Scopes, History, ArgDomains, Clauses) ->
ErrCtx = symbol:ctx(hd(Clauses)),
Arities = [length(Patterns) || {clause, _, Patterns, _} <- Clauses],
case lists:min(Arities) =:= lists:max(Arities) of
false -> error:format({variable_arity, Arities}, {linearize, ErrCtx, History});
true ->
case error:collect(clauses(Scopes, History, ArgDomains, Clauses, [])) of
{error, Errs} -> {error, Errs};
{ok, []} -> error:format({no_intersection_between_clauses_and_argdomains, ArgDomains},
{linearize, ErrCtx, History});
{ok, ClausesRes} ->
{EnvCs, TreeCs} = unzip(ClausesRes),
DomainPs = [lists:map(fun domain/1, PsTs) || {clause, _, PsTs, _} <- TreeCs],
ActualDomains = [union(Ds) || Ds <- pivot(DomainPs)],
case subset(ArgDomains, ActualDomains) of
false -> error:format({arguments_not_subset_of_clauses, ArgDomains, ActualDomains},
{linearize, ErrCtx, History});
true -> {ok, {utils:merge(EnvCs), TreeCs}}
end
end
end.
clauses(_, _, _, [], Res) -> lists:reverse(Res);
clauses(Scopes, History, ArgDomains, [Clause | Cs], Res) ->
case clause(Scopes, History, ArgDomains, Clause) of
{error, Errs} -> [{error, Errs} | clauses(Scopes, History, ArgDomains, Cs, Res)];
{ok, {Env, Clauses}} ->
% Only scan patterns until (and including) the first clause for
% which all the pattern domains are subsets of the argument
% domains. There's no point in keeping on scanning patterns if we
% can infer from the argument domains that they will not be reached
ClauseRes = lists:reverse([{ok, {Env, C}} || C <- Clauses]),
IsClauseSubset = fun({clause, _, PsTs, _}) -> domain:subset(ArgDomains, [domain(T) || T <- PsTs]) end,
case lists:any(IsClauseSubset, Clauses) of
true -> clauses(Scopes, History, ArgDomains, [], ClauseRes ++ Res);
false -> clauses(Scopes, History, ArgDomains, Cs, ClauseRes ++ Res)
end
end.
clause(Scopes, History, ArgDomains, {clause, Ctx, Patterns, Expr}) ->
case error:collect([pattern(Scopes, History, D, P) || {D, P} <- zip(ArgDomains, Patterns)]) of
{error, Errs} -> {error, Errs};
{ok, PsTrees} ->
case error:collect([expand_clause(Scopes, History, PsTs, Expr, Ctx)
|| PsTs <- combinations(PsTrees),
not(lists:member(none, lists:map(fun domain/1, PsTs)))]) of
{error, Errs} -> {error, Errs};
{ok, ClausesRes} -> {Envs, Clauses} = unzip(ClausesRes),
{ok, {utils:merge(Envs), Clauses}}
end
end.
expand_clause({GlobalScope, LocalScope}, History, Patterns, Expr, Ctx) ->
ClauseEnv = utils:merge([env(P) || P <- Patterns]),
ClauseScope = maps:merge(LocalScope, ClauseEnv),
case expr({GlobalScope, ClauseScope}, History, Expr) of
{error, Errs} -> {error, Errs};
{ok, {Env, ExprTree}} -> ClauseCtx = maps:put(domain, domain(ExprTree), Ctx),
{ok, {Env, {clause, ClauseCtx, Patterns, ExprTree}}}
end.
pattern_pre(_, Domain, {tagged, Ctx, Path, Expr} = Term) ->
case domain:intersection(Domain, {tagged, Path, any}) of
{sum, Ds} -> ExprDomain = domain:union([D || {tagged, _, D} <- Ds]),
{ok, {tagged, Ctx, Path, set_domain(Expr, ExprDomain)}};
{tagged, Path, ExprDomain} -> {ok, {tagged, Ctx, Path, set_domain(Expr, ExprDomain)}};
_ -> {skip, [set_domain(Term, none)]}
end;
pattern_pre(_, Domain, {dict, Ctx, Elems} = Term) ->
Keys = [symbol:name(E) || E <- Elems],
case domain:intersection(Domain, maps:from_list(zip(Keys, [any || _ <- Elems]))) of
{sum, MapDs} -> Ds = [domain:union([maps:get(K, D) || D <- MapDs]) || K <- Keys],
{ok, {dict, Ctx, [set_domain(E, D) || {E, D} <- zip(Elems, Ds)]}};
M when is_map(M) -> Ds = [maps:get(K, M) || K <- Keys],
{ok, {dict, Ctx, [set_domain(E, D) || {E, D} <- zip(Elems, Ds)]}};
_ -> {skip, [set_domain(Term, none)]}
end;
pattern_pre(_, Domain, {list, Ctx, Elems} = Term) ->
case domain:intersection(Domain, [any || _ <- Elems]) of
{sum, ListDs} -> Ds = [domain:union(L) || L <- pivot(ListDs)],
{ok, {list, Ctx, [set_domain(E, D) || {E, D} <- zip(Elems, Ds)]}};
Ds when is_list(Ds) -> {ok, {list, Ctx, [set_domain(E, D) || {E, D} <- zip(Elems, Ds)]}};
_ -> {skip, [set_domain(Term, none)]}
end;
pattern_pre(_, Domain, {sum, Ctx, Elems}) -> {ok, {sum, Ctx, [set_domain(E, Domain) || E <- Elems]}};
pattern_pre(_, Domain, {keyword, _, _, _} = Term) ->
D = domain:intersection(symbol:tag(Term), Domain),
{ok, set_domain(Term, D)};
pattern_pre(_, Domain, {keyword, _, _} = Term) ->
{ok, set_domain(Term, Domain)};
pattern_pre(_, Domain, {value, _, _, Val} = Term) ->
D = domain:intersection(Val, Domain),
{ok, set_domain(Term, D)};
pattern_pre(_, Domain, {variable, _, _, _} = Term) ->
{ok, set_domain(Term, Domain)};
pattern_pre(_, Domain, {application, Ctx, Expr, Args}) ->
{ok, set_domain({application, Ctx, set_domain(Expr, any), [set_domain(A, any) || A <- Args]}, Domain)};
pattern_pre(_, Domain, {qualified_application, Ctx, ModulePath, Name, Args}) ->
{ok, set_domain({qualified_application, Ctx, ModulePath, Name, [set_domain(A, any) || A <- Args]}, Domain)};
pattern_pre(_, Domain, {qualified_symbol, Ctx, ModulePath, Name}) ->
{ok, set_domain({qualified_application, Ctx, ModulePath, Name, []}, Domain)};
pattern_pre(_, Domain, {beam_application, Ctx, ModulePath, Name, Args}) ->
{ok, set_domain({beam_application, Ctx, ModulePath, Name, [set_domain(A, any) || A <- Args]}, Domain)};
pattern_pre(History, _, {beam_symbol, Ctx, [Module], Name}) ->
error:format({unapplied_beam_function_in_pattern, symbol:tag([beam, Module, Name])}, {linearize, Ctx, History});
pattern_pre(_, Domain, {pair, Ctx, Key, Val}) ->
{ok, {pair, Ctx, set_domain(Key, Domain), set_domain(Val, Domain)}}.
expr_pre(_, _, {'fun', _, _}) -> leave_intact;
expr_pre(_, _, {'let', _, _, _, _}) -> leave_intact;
expr_pre(_, _, {def, _, _, _}) -> leave_intact;
expr_pre(_, _, _) -> ok.
post(expr, Scopes, History, {def, _, _, {'fun', _, _} = Fun}) ->
post(expr, Scopes, History, Fun);
post(expr, Scopes, History, {def, Ctx, _, ExprTerm}) ->
post(expr, Scopes, History, {'fun', Ctx, [{clause, Ctx, [], ExprTerm}]});
% Expr of type `a b -> a + b` (e.g. a function)
% When we linearize a def, we can compile several versions of the def in the
% module. Then we can just call the appropriate version at the calling point.
% This approach has the added benefit in that we can call the def recursively.
%
% What do we do when we assign a local variable to a function and call it at
% two separate points with different domains?
% I think my preferred approach would always inline the function at its calling
% sites. Since an anonymous function can't be recursive, we don't need to worry
% about it calling itself.
post(expr, Scopes, _, {'fun', Ctx, Clauses} = LinearizeFunTerm) ->
F = fun(ArgDomains, History) ->
Arity = fun_term_arity(LinearizeFunTerm),
case Arity =:= length(ArgDomains) of
false -> error:format({wrong_function_arity, Arity, length(ArgDomains)},
{linearize, Ctx, History});
true -> case clauses(Scopes, History, ArgDomains, Clauses) of
{error, Errs} -> {error, Errs};
{ok, {EnvCs, TreeCs}} -> Domain = union([domain(T) || T <- TreeCs]),
{ok, {EnvCs, set_domain({'fun', Ctx, TreeCs}, Domain)}}
end
end
end,
{ok, {#{}, set_domain({'fun', Ctx, F}, F)}};
% Expr of type `val p = e` where `p` is a pattern and `e` is an expression
post(expr, {GlobalScope, LocalScope} = Scopes, History, {'let', Ctx, Pattern, Expr, NextExpr}) ->
case expr(Scopes, History, Expr) of
{error, Errs} -> {error, Errs};
{ok, {EEnv, ETree}} ->
case pattern(Scopes, History, domain(ETree), Pattern) of
{error, Errs} -> {error, Errs};
{ok, [PTree]} -> case expr({GlobalScope, maps:merge(LocalScope, env(PTree))}, History, NextExpr) of
{error, Errs} -> {error, Errs};
{ok, {NEnv, NTree}} -> Domain = domain(NTree),
Term = {'let', Ctx, PTree, ETree, NTree},
{ok, {maps:merge(NEnv, EEnv), set_domain(Term, Domain)}}
end;
{ok, _} -> error:format({sum_type_in_let_pattern, Pattern, domain(ETree)},
{linearize, Ctx, History})
end
end;
post(expr, _, _, {seq, _, _, Then} = Term) -> {ok, {#{}, set_domain(Term, domain(Then))}};
% Patterns of type `module/t(a, b, c)` where `module/t` is a function defined in global scope
post(pattern, {GlobalScope, _}, History, {qualified_application, Ctx, Path, Name, Args} = Term) ->
F = maps:get(Path ++ [Name], GlobalScope),
pattern_apply(F, Args, Path ++ [Name], History, Ctx, domain(Term));
% Expr of type `module/t(a, b, c)` where `module/t` is a function defined in global scope
post(expr, {GlobalScope, _}, History, {qualified_application, Ctx, Path, Name, Args} = Term) ->
F = maps:get(Path ++ [Name], GlobalScope),
expr_apply(F, Args, Path ++ [Name], History, Ctx, Term);
% Patterns of type `module/t` with no arguments passed
post(pattern, Scopes, History, {qualified_symbol, Ctx, Path, Name}) ->
post(pattern, Scopes, History, {qualified_application, Ctx, Path, Name, []});
% Patterns of type `module/t` with no arguments passed
post(expr, {GlobalScope, _}, _, {qualified_symbol, _, Path, Name} = Term) ->
F = maps:get(Path ++ [Name], GlobalScope),
{ok, {#{}, set_domain(Term, F)}};
% Patterns of type `beam/mod/t(a, b, c)` where `t` is a beam function in module `mod`
post(pattern, _, History, {beam_application, Ctx, Path, Name, Args}) ->
ModuleName = module:beam_name(Path),
ArgDomains = [domain(A) || A <- Args],
Arities = utils:get_arities(ModuleName, Name),
case lists:member(length(ArgDomains), Arities) of
false -> error:format({wrong_function_arity, Arities, length(ArgDomains)},
{linearize, Ctx, History});
true ->
case domain:is_literal(ArgDomains) andalso allowed_beam_function(ModuleName, Name) of
true -> Domain = erlang:apply(ModuleName, Name, ArgDomains),
{ok, {#{}, domain:to_term(Domain, Ctx)}};
false -> {ok, {#{}, set_domain({variable, Ctx, '_', symbol:id('_')}, any)}}
end
end;
% Expr of type `beam/mod/t(a, b, c)` where `t` is a beam function in module `mod`
post(expr, Scopes, History, {beam_application, _Ctx, _Path, _Name, _Args} = Term) ->
case post(pattern, Scopes, History, Term) of
{error, Errs} -> {error, Errs};
{ok, {_, Tree}} -> {ok, {#{}, set_domain(Term, domain(Tree))}}
end;
post(pattern, Scopes, History, {beam_symbol, Ctx, Path, Name}) ->
post(pattern, Scopes, History, {beam_application, Ctx, Path, Name, []});
post(expr, Scopes, History, {beam_symbol, Ctx, Path, Name}) ->
ModuleName = module:beam_name(Path),
Arity = utils:get_max_arity(ModuleName, Name),
Args = [{variable, Ctx, ArgName, symbol:id(ArgName)} || N <- lists:seq(1, Arity),
ArgName <- [list_to_atom("arg_" ++ integer_to_list(N))]],
Tree = {'fun', Ctx, [{clause, Ctx, Args, {beam_application, Ctx, Path, Name, Args}}]},
expr(Scopes, History, Tree);
% Patterns of type `x.f(a, b, c)` where `f` refers to/is a function
post(pattern, _, History, {application, Ctx, Expr, Args} = Term) ->
case domain(Expr) of
F when is_function(F) ->
pattern_apply(domain(Expr), Args, [utils:gen_tag(F)], History, Ctx, domain(Term));
Other ->
error:format({function_domain_expected, Other}, {linearize, Ctx, History})
end;
% Expr of type `x.f(a, b, c)` where `f` refers to/is a function
%
% We want to coopt erlang core functions for *local* functions (not 'defs' even
% if they aren't exported) to avoid rolling our own closures etc. This means
% that whatever function we end up compiling in erlang core can't be linearized
% at the point of application like we would do with a qualified application
% call.
%
% What does argument linearization at the point of application do?
% - It allows us to compute pattern applications with correct/narrow input
% arguments
% - It will allow us to overload function definitions and at compile time make
% sure we dispatch to the proper function based on the argument domains.
% - It allows us to compile only clause statements and expressions specific to
% the argument domains
% - It *does not* mean that we can't narrow down domain errors (e.g. where a
% function argument is not in the subset of domains accepted as arguments by
% the functions) specifically to their call site. We still do domain checks
% at the call sites. We just don't return the AST with the function
% linearized specifically to this call site.
%
% When we linearize at function definition, we can't linearize with respect to
% the individual argument domains like we do when we linearize at application
% points. Naiively this would mean that we would assume the `any` domain at
% linearization for all function arguments. However, if we scan through the
% code we can do much better and find the union of domains that this particular
% function is applied to and linearize with the union of all the input
% arguments this particular function is applied to.
%
% To do so, at the application site we record the function tag and the argument
% domains in the env at every application of a local function. We can do that
% in the same format as we store the linearized global function in the env, but
% omit the resulting linearized function AST. Then once the linearized AST has
% been computed, we can traverse it and linearize all local function
% definitions.
post(expr, _, History, {application, Ctx, Expr, Args} = Term) ->
Apply = fun(F) -> expr_apply(F, Args, [utils:gen_tag(F)], History, Ctx, Term) end,
case domain(Expr) of
{sum, Ds} ->
case error:collect([Apply(F) || F <- Ds]) of
{error, Errs} -> {error, Errs};
{ok, Res} -> {Envs, Trees} = unzip(Res),
Domain = union([domain(T) || T <- Trees]),
{ok, {utils:merge(Envs), set_domain(Term, Domain)}}
end;
F -> Apply(F)
end;
% Expr/Patterns of type `T: S`
post(_, _, _, {tagged, _, Path, Expr} = Term) ->
{ok, {#{}, set_domain(Term, {tagged, Path, domain(Expr)})}};
% Expr/Patterns of type `[a, b, c]`
post(_, _, _, {list, _, Elems} = Term) ->
{ok, {#{}, set_domain(Term, [domain(E) || E <- Elems])}};
% Expr/Patterns of type `A | B | C`
post(_, _, _, {sum, _, Elems} = Term) ->
{ok, {#{}, set_domain(Term, {sum, ordsets:from_list([domain(E) || E <- Elems])})}};
% Expr/Patterns of type `{k1: v1, k2: v2}`
post(_, _, _, {dict, _, Elems} = Term) ->
Domain = maps:from_list([{symbol:name(E), domain(E)} || E <- Elems]),
{ok, {#{}, set_domain(Term, Domain)}};
% Dict key/val pattern of type `k: v`
post(pattern, _, _, {pair, _, {keyword, _, _}, Val} = Term) ->
{ok, {#{}, set_domain(Term, domain(Val))}};
% Patterns of type `k: v` or more complicated like `[a, b]: x.t(q)`
post(pattern, Scopes, History, {pair, Ctx, Key, Val}) ->
case pattern(Scopes, History, domain(Val), Key) of
{error, Errs} -> {error, Errs};
{ok, KTrees} -> F = fun(Tree) -> Term = {pair, Ctx, Tree, Val},
Env = env(Tree),
case domain:is_literal(domain(Tree)) of
false -> {ok, {Env, set_domain(Term, domain(Val))}};
true -> {ok, {Env, domain:to_term(domain(Tree), symbol:ctx(Term))}}
end end,
sum([F(Tree) || Tree <- KTrees])
end;
% Expr of type `k: v`
post(expr, _, _, {pair, _, _, Val} = Term) ->
{ok, {#{}, set_domain(Term, domain(Val))}};
% Pattern like `a`
post(pattern, {_, LocalScope}, _, {variable, _, _, _} = Term) ->
ScopeDomain = maps:get(symbol:tag(Term), LocalScope, any),
Domain = intersection(ScopeDomain, domain(Term)),
case domain:is_literal(Domain) of
false -> {ok, {#{symbol:tag(Term) => Domain}, set_domain(Term, Domain)}};
true -> {ok, {#{symbol:tag(Term) => Domain}, domain:to_term(Domain, symbol:ctx(Term))}}
end;
% Expr of type `x`
post(expr, {_, LocalScope}, _, {variable, _, _, _} = Term) ->
Domain = maps:get(symbol:tag(Term), LocalScope),
case domain:is_literal(Domain) of
false -> {ok, {#{}, set_domain(Term, Domain)}};
true -> {ok, {#{}, domain:to_term(Domain, symbol:ctx(Term))}}
end;
% Pattern like `T` -- Domain has already been set in pre_pattern
post(pattern, _, _, {keyword, _, _, _} = Term) -> {ok, {#{}, Term}};
% Pattern like `key: ...` in dictionary -- Domain has already been set in pre_pattern
post(pattern, _, _, {keyword, _, _} = Term) -> {ok, {#{}, Term}};
% Expr of type `T`
post(expr, _, _, {keyword, _, _, _} = Term) ->
{ok, {#{}, set_domain({value, symbol:ctx(Term), atom, symbol:tag(Term)}, symbol:tag(Term))}};
post(expr, _, _, {keyword, _, _} = Term) ->
{ok, {#{}, set_domain(Term, none)}};
% Pattern like `5` -- Domain has already been set in pre_pattern
post(pattern, _, _, {value, _, _, _} = Term) -> {ok, {#{}, Term}};
% Expr/Patterns of type `1` or `"sdfgsf"` or `'atom'`
post(expr, _, _, {value, _, _, Val} = Term) -> {ok, {#{}, set_domain(Term, Val)}}.
% Local functions (i.e. functions that aren't defined by `def`) need to be
% linearized at their point of definition to compile to a local function in
% erlang core. To linearize a local function we collect the argument domains
% from all calling sites for the particular function and linearize it with the
% union of arguments.
linearize_local_defs(LocalDefs, History, Tree) ->
Local = [{Tag, DomainArgs} || {{[Tag], DomainArgs}, _} <- LocalDefs],
GroupedDefs = utils:group_by(fun({K, _V}) -> K end, fun({_K, V}) -> V end, Local),
linearize_local_defs(GroupedDefs, History, Tree, #{}, []).
linearize_local_defs([], _, Tree, Env, []) -> {ok, {Env, Tree}};
linearize_local_defs([], _, _, _, Errors) -> error:collect(lists:reverse(Errors));
linearize_local_defs([{Tag, DomainArgsList} | Defs], History, Tree, Env, Errors) ->
% I've often paused at this code and trying to work out what it does when
% debugging bugs in the linearizer code, so here's a brief description for
% posterity:
%
% `LocalDefs` as defined in the first definition of `linearize_local_defs`
% is a list of function tags and their argument compiled from any
% application. Before processing this list we run a group-by on it to have
% each tag paired with argument domains of all applications.
%
% The `Tree` is the current part of the code that we are linearizing, e.g.
% a function clause. Often trees are nested (say a function clause defined
% inside another function clause. In this case `linearize_local_defs` is
% first called on the inner clause and then on the outer clause.
%
% The following definitions of `linearize_local_defs` recursively process
% these tag:[argdomain] pairs in the following way:
% 1. For each argument to a tag, compute the union of argument domains
% 2. Traverse the current tree and if the Tree contains the function
% declaration for the tagged function, compute the actual function
% definition based on the argument domains and insert this tree
% instead.
%
% Even though we linearize functions at the point of definition rather than
% at their call site, we still compute the domain of the function at the
% call site. This means that for a function called in `k` locations, we end
% up linearizing it `k + 1` times. Once for each application and then once
% again when calling `linearize_local_defs` for the definition with the
% union of arguments used at the `k` calling sites.
DomainArgs = [union(Args) || Args <- pivot(DomainArgsList)],
Pre = fun(expr, _, {'fun', _, F}) when is_function(F) -> leave_intact;
(_, _, _) -> ok end,
Post = fun(expr, _, {'fun', Ctx, F}) when is_function(F) ->
case utils:gen_tag(F) =:= Tag of
true -> case fun_apply(F, DomainArgs, Tag, History, Ctx) of
{error, Errs} -> {error, Errs};
{ok, {Env, T}} -> {ok, Env, T}
end;
false -> ok
end;
(_, _, _) -> ok end,
case ast:traverse_term(expr, Pre, Post, #{}, Tree) of
{error, Errs} -> linearize_local_defs(Defs, History, Tree, Env, [Errs | Errors]);
{ok, {E, T}} -> linearize_local_defs(Defs, History, T, maps:merge(Env, E), Errors)
end.
fun_apply(F, ArgDomains, Tag, History, Ctx) when is_function(F) ->
Step = {Tag, Ctx, ArgDomains},
case is_recursive(Step, History) of
true -> Domain = {recur, F},
{ok, set_domain({recursion, Ctx, Tag, ArgDomains}, Domain)};
false -> F(ArgDomains, [Step | History])
end;
fun_apply(F, _, _, History, Ctx) -> error:format({function_domain_expected, F}, {linearize, Ctx, History}).
pattern_apply(F, Args, Path, History, Ctx, AppDomain) ->
ArgDomains = [domain(A) || A <- Args],
case fun_apply(F, ArgDomains, Path, History, Ctx) of
{error, Errs} -> {error, Errs};
{ok, {_, Tree}} -> Domain = intersection(domain(Tree), AppDomain),
{ok, {#{}, domain:to_term(Domain, Ctx)}}
end.
expr_apply(F, Args, Path, History, Ctx, Term) ->
ArgDomains = [domain(A) || A <- Args],
case fun_apply(F, ArgDomains, Path, History, Ctx) of
{error, Errs} -> {error, Errs};
{ok, {Env, Tree}} -> NewEnv = maps:put({Path, ArgDomains}, Tree, Env),
{ok, {NewEnv, set_domain(Term, domain(Tree))}}
end.
is_recursive({Path, _, Args}, History) ->
lists:any(fun({HPath, _, HArgs}) -> (HPath == Path) andalso (HArgs == Args) end, History).
fun_term_arity({'fun', _, []}) -> 0;
fun_term_arity({'fun', _, [{clause, _, Ps, _} | _]}) -> length(Ps).
domain(Term) -> maps:get(domain, symbol:ctx(Term)).
set_domain(Term, Domain) ->
NewCtx = maps:put(domain, Domain, symbol:ctx(Term)),
setelement(2, Term, NewCtx).
pivot([]) -> [];
pivot([H | _] = ListOfLists) ->
Rev = lists:foldl(fun(Elems, Accs) -> [[E | Acc] || {E, Acc} <- zip(Elems, Accs)] end,
[[] || _ <- H],
ListOfLists),
[lists:reverse(L) || L <- Rev].
% Flatten sum domains: When use for example `boolean` in a pattern, it will
% return the sum of `True` and `False`. We cannot match against both of these
% in one clause in erlang core, so instead we flatten sums so each domain in
% the sum gets its own clause.
%
% It might be obvious from the domain of the pattern argument that, say, only
% `boolean/True` is ever a possibility, in which case we only want to return
% the pattern clause for this case
combinations([Elements | Rest]) -> [[E | Tail] || E <- Elements,
Tail <- combinations(Rest)];
combinations([]) -> [[]].
% Patterns in kind can include sum types, e.g. multiple domain values. However
% in erlang core, each pattern can only be for a single value. For this reason
% we'll have to expand each pattern of domains to possibly multiple literal
% patterns.
expand({dict, Ctx, ElemList}) -> [{dict, Ctx, Elems} || Elems <- combinations(ElemList)];
expand({list, Ctx, ElemList}) -> [{list, Ctx, Elems} || Elems <- combinations(ElemList)];
expand({sum, Ctx, ElemList}) -> [{sum, Ctx, Elems} || Elems <- combinations(ElemList)];
expand({tagged, Ctx, Path, ExprList}) -> [{tagged, Ctx, Path, Expr} || Expr <- ExprList];
expand({pair, Ctx, KeyList, ValList}) -> [{pair, Ctx, Key, Val} || [Key, Val] <- combinations([KeyList, ValList])];
expand({application, Ctx, ExprList, ArgsList}) ->
[{application, Ctx, Expr, Args} || [Expr | Args] <- combinations([ExprList | ArgsList])];
expand({qualified_application, Ctx, ModulePath, Name, ArgsList}) ->
[{qualified_application, Ctx, ModulePath, Name, Args} || Args <- combinations(ArgsList)];
expand({beam_application, Ctx, ModulePath, Name, ArgsList}) ->
[{beam_application, Ctx, ModulePath, Name, Args} || Args <- combinations(ArgsList)];
expand(Term) -> [Term].
expand_sum({sum, _, Elems}) -> Elems;
expand_sum(Term) -> [Term].
sum([Elem]) -> Elem;
sum(Elems) -> set_domain({sum, symbol:ctx(hd(Elems)), Elems}, domain:union([domain(E) || E <- Elems])).
env(Term) -> maps:get(env, symbol:ctx(Term), #{}).
child_env({dict, _, Elems}) -> utils:merge([env(E) || E <- Elems]);
child_env({list, _, Elems}) -> utils:merge([env(E) || E <- Elems]);
child_env({tagged, _, _, Expr}) -> env(Expr);
child_env({pair, _, Key, Val}) -> maps:merge(env(Key), env(Val));
child_env(_) -> #{}.
set_env(Term, Env) ->
Ctx = symbol:ctx(Term),
NewCtx = maps:put(env, utils:merge([child_env(Term), Env, env(Term)]), Ctx),
setelement(2, Term, NewCtx).
pattern_post(Type, Scopes, History, Term) ->
case error:collect([post(Type, Scopes, History, T) || T <- expand(Term)]) of
{error, Errs} -> {error, Errs};
{ok, ResList} -> Trees = [set_env(T, Env) || {Env, Tree} <- ResList, T <- expand_sum(Tree)],
{ok, Trees}
end.
expr_post(Type, Scopes, History, Term) ->
case post(Type, Scopes, History, Term) of
{error, Errs} -> {error, Errs};
{ok, {Env, Tree}} -> {ok, Env, Tree}
end.
allowed_beam_function(Module, Name) ->
Blacklist = #{'calendar' => [],
'rand' => [],
'random' => [],
'erlang' => [date, localtime, now, time, time_offset, timestamp, unique_integer, universaltime]},
Blacklisted = (maps:is_key(Module, Blacklist) andalso
((maps:get(Module, Blacklist) == []) orelse
(lists:member(Name, maps:get(Module, Blacklist))))),
Whitelisted = import:is_whitelisted(Module, Name),
Whitelisted andalso not(Blacklisted). | src/linearize.erl | 0.557604 | 0.537102 | linearize.erl | starcoder |
-module(groklib).
-export([build_pattern/2,
match/3,
get_subpatterns/1,
get_pattern_metadata/1,
expand_pattern/2,
escape/1,
unescape/1]).
-type grok_metadata() :: [{string(), atom()}].
-type exp_pattern() :: {grok_metadata(), CompiledRegExp :: re:mp()}.
-export_type([exp_pattern/0, grok_metadata/0]).
-define(BACKSLASH, $\\).
%%====================================================================
%% API functions
%%--------------------------------------------------------------------
%% Returns metadata of the pattern and resulting compiled regular expression
%%
-spec build_pattern(AppPattern :: iodata(), CorePatterns :: #{Name :: string() => Pattern :: string()}) -> exp_pattern().
build_pattern(AppPattern, CorePatterns) ->
Metadata = extract_metadata(AppPattern),
RegExp = expand_pattern(AppPattern, CorePatterns),
CompiledRegExp = compile_pattern(RegExp),
{Metadata, CompiledRegExp}.
%%--------------------------------------------------------------------
%% Receives text to match, metadata and regular expression.
%% Returns either nomatch or captured data
%%
-spec match(Text :: unicode:chardata(), Metadata :: grok_metadata(), RE :: iodata() | re:mp()) -> nomatch | #{Name :: string => Value :: term()}.
match(Text, Metadata, RegExp) ->
case re:run(unicode:characters_to_binary(Text), RegExp, [global, {capture, all_but_first, binary}]) of
{match, [Captured|_]} ->
convert_types(Captured, Metadata);
nomatch ->
nomatch
end.
%%--------------------------------------------------------------------
%% Receives pattern
%% Returns names of included grok subpatterns
%%
-spec get_subpatterns(Pattern :: iodata()) -> [string()].
get_subpatterns(Pattern) ->
[X || [_, X |_] <- extract_names(Pattern)].
%%--------------------------------------------------------------------
%% Receives pattern
%% Returns complete metadata of the pattern
%%
-spec get_pattern_metadata(Pattern :: iodata()) -> grok_metadata().
get_pattern_metadata(Pattern) ->
extract_metadata(Pattern).
%%--------------------------------------------------------------------
%% Expands pattern with Patterns into returned regular expression
%%
-spec expand_pattern(Pattern :: iodata(), Patterns :: #{Name :: string() => Pattern :: string()}) -> string().
expand_pattern(Pattern, Patterns) ->
%io:format("***** Entering high level expansion with ~p~n", [Pattern]),
Pattern1 = expand_high_level(Pattern, Patterns),
%io:format("***** Entering low level expansion with: ~p~n", [Pattern1]),
Pattern2 = expand_low_level(Pattern1, Patterns),
case re:run(Pattern2, "%{\\w+(:\\w+)?}", [ungreedy]) of
nomatch ->
Pattern2;
{match, _} ->
expand_pattern(Pattern2, Patterns)
end.
%%--------------------------------------------------------------------
%% Doubles backslash characters
%%
-spec escape(Str :: string()) -> string().
escape(Str) ->
lists:flatten(string:replace(Str, "\\", "\\\\", all)).
%%--------------------------------------------------------------------
%% Replaces double backslash charackers with single ones.
%%
-spec unescape(Str :: string()) -> string().
unescape(Str) ->
lists:flatten(string:replace(Str, "\\\\", "\\", all)).
%%====================================================================
%% Private functions
%%====================================================================
%% Utility functions for pattern expansion and compilation
%%--------------------------------------------------------------------
expand_high_level(Pattern, Patterns) ->
case re:run(Pattern, "%{(\\w+):(\\w+)(?::\\w+)?}", [ungreedy, {capture, all, list}]) of
nomatch ->
Pattern;
{match, [String, Type, Name|_]} ->
Replacement = maps:get(Type, Patterns),
Replacement1 = escape("(?P<" ++ Name ++ ">" ++ Replacement ++ ")"),
%io:format("~p -> ~p~n", [Type, Replacement1]),
NewPattern = re:replace(Pattern, String, Replacement1, [ungreedy, {return, list}]),
%io:format("~p~n", [NewPattern]),
expand_high_level(NewPattern, Patterns)
end.
%%--------------------------------------------------------------------
expand_low_level(Pattern, Patterns) ->
case re:run(Pattern, "%{(\\w+)}", [ungreedy, {capture, all, list}]) of
nomatch ->
Pattern;
{match, [String, Type|_]} ->
Replacement = maps:get(Type, Patterns),
Replacement1 = escape(Replacement),
%io:format("~p -> ~p~n", [Type, Replacement1]),
NewPattern = re:replace(Pattern, String, Replacement1, [ungreedy, {return, list}]),
%io:format("~p~n", [NewPattern]),
expand_low_level(NewPattern, Patterns)
end.
%%--------------------------------------------------------------------
compile_pattern(P) ->
{ok, MP} = re:compile(P, [unicode]),
MP.
%%====================================================================
%% Utility functions for meatadata extraction
%%
extract_metadata(Pattern) ->
Names = extract_names(Pattern),
Defaults = set_defaults(Names),
Types = extract_types(Pattern),
merge_names_types(Defaults, Types).
%%--------------------------------------------------------------------
extract_names(Pattern) ->
case re:run(Pattern, "%{(\\w+):(\\w+)(?::\\w+)?}", [ungreedy, global, {capture,all_but_first,list}]) of
{match, Captured} ->
Captured;
nomatch ->
[]
end.
%%--------------------------------------------------------------------
set_defaults(Names) ->
%lists:map(fun([_V, K | _]) -> {list_to_atom(K), undefined} end, Names).
[{X, undefined} || [_, X |_] <- Names].
%%--------------------------------------------------------------------
extract_types(Pattern) ->
case re:run(Pattern, "%{(\\w+):(\\w+):(\\w+)}", [ungreedy, global, {capture,all_but_first,list}]) of
{match, Captured} ->
lists:map(fun([_V, K, T | _]) -> {K, list_to_atom(T)} end, Captured);
nomatch ->
[]
end.
%%--------------------------------------------------------------------
merge_names_types(Names, Types) ->
merge_names_types(Names, Types, []).
merge_names_types([], _, Merged) ->
lists:reverse(Merged);
merge_names_types([{Name, Type}|Names], Types, Merged) ->
T = case get_type(Name, Types) of
undefined ->
Type;
Tp ->
Tp
end,
merge_names_types(Names, Types, [{Name, T}|Merged]).
%%--------------------------------------------------------------------
get_type(_, []) ->
undefined;
get_type(Name, [{Name, Type}|_]) ->
Type;
get_type(Name, [_|Types]) ->
get_type(Name, Types).
%%====================================================================
%% Utility functions for type conversion
%%--------------------------------------------------------------------
convert_types(Data, Metadata) ->
convert_types(Data, Metadata, #{}).
convert_types([], [], Result) ->
Result;
convert_types([Value|Data], [{Name, Type}|Metadata], Result) ->
convert_types(Data, Metadata, maps:put(Name, convert_type(Type, Value), Result)).
%%--------------------------------------------------------------------
%% match captures binaries so input value Val is always a binary.
%% We prefer binary because our groklib clients require binaries in
%% most cases. Although conversion from list to other types is
%% easier than conversion from binary.
%%
convert_type(binary, Val) ->
Val;
convert_type(int, Val) ->
list_to_integer(binary_to_list(Val));
convert_type(float, Val) ->
list_to_float(binary_to_list(Val));
convert_type(list, Val) ->
unicode:characters_to_list(Val);
convert_type(_, Val) ->
Val. | src/groklib.erl | 0.528047 | 0.546859 | groklib.erl | starcoder |
%% @author Couchbase <<EMAIL>>
%% @copyright 2011-Present Couchbase, Inc.
%%
%% Use of this software is governed by the Business Source License included
%% in the file licenses/BSL-Couchbase.txt. As of the Change Date specified
%% in that file, in accordance with the Business Source License, use of this
%% software will be governed by the Apache License, Version 2.0, included in
%% the file licenses/APL2.txt.
%%
%% @doc If time required to replicate backlog of un-replicated yet items
%% (drain time) is less than 2 seconds the failover is considered safe (green).
%% Otherwise, if the drain time is greater than 2 second the level is yellow.
%%
%% We also consider the level to be yellow if the drain time is higher than 1s
%% and has spiked higher than 2s at least once for the most recent minute
%% (in order to avoid too frequent changing of levels).
%%
%% If our information is too stale (> 2 stats collection intervals), then we
%% respond with 'stale' level.
-module(failover_safeness_level).
-include("ns_stats.hrl").
-export([build_local_safeness_info/1,
extract_replication_uptodateness/4]).
-spec get_value(bucket_name()) ->
stale | unknown | green | yellow.
get_value(BucketName) ->
case stats_interface:failover_safeness_level(BucketName) of
{ok, {LastUpdateTimestamp, UpdateInterval, Value}} ->
Now = erlang:system_time(second),
case Now - LastUpdateTimestamp < 2 * UpdateInterval of
true when Value == 1 -> green;
true when Value == 0 -> yellow;
false -> stale;
true ->
?log_error("Unexpected failover_safeness_level(~p): ~p",
[BucketName, Value]),
unknown
end;
{error, not_available} -> stale;
{error, _} -> unknown
end.
%% Builds local replication safeness information. ns_heart normally
%% broadcasts it with heartbeats. This information from all nodes can
%% then be used to to estimate failover safeness level of particular
%% node.
build_local_safeness_info(BucketNames) ->
ReplicationsSafeness =
[{Name, get_value(Name)} || Name <- BucketNames],
%% [{BucketName, [{SrcNode0, HashOfVBucketsReplicated0}, ..other nodes..]}, ..other buckets..]
IncomingReplicationConfs =
[{BucketName,
[{SrcNode, erlang:phash2(VBuckets)} ||
{SrcNode, _DstNode, VBuckets} <-
janitor_agent:this_node_replicator_triples(BucketName)]
}
|| BucketName <- BucketNames],
[{outgoing_replications_safeness_level, ReplicationsSafeness},
{incoming_replications_conf_hashes, IncomingReplicationConfs}].
%% Returns indication of whether it's safe to fail over given node
%% w.r.t. given bucket. Implementation uses information from
%% build_local_safeness_info/1 from all replica nodes.
%%
%% We check that all needed outgoing replications are there (with
%% right vbuckets) and that dcp producer stats of given node indicate
%% that all outgoing replications from given node are reasonably up to
%% date (see discussion of green/yellow levels at top of this
%% file). So we actually use node statuses of all nodes (well, only
%% replicas of given node in fact).
extract_replication_uptodateness(BucketName, BucketConfig, Node, NodeStatuses) ->
Map = proplists:get_value(map, BucketConfig, []),
case outgoing_replications_started(BucketName, Map, Node, NodeStatuses) of
false ->
0.0;
true ->
NodeInfo = ns_doctor:get_node(Node, NodeStatuses),
SafenessLevelAll = proplists:get_value(outgoing_replications_safeness_level, NodeInfo, []),
SafenessLevel = proplists:get_value(BucketName, SafenessLevelAll, unknown),
case SafenessLevel of
unknown -> 0.0;
stale -> 0.0;
yellow -> 0.5;
green -> 1.0
end
end.
outgoing_replications_started(BucketName, Map, Node, NodeStatuses) ->
%% NOTE: we only care about first replicas. I.e. when Node is
%% master, bacause that actually defines failover safeness
ReplicaNodes = lists:foldl(fun (Chain, Set) ->
case Chain of
[Node, DstNode | _] -> % NOTE: Node is bound higher
sets:add_element(DstNode, Set);
_ ->
Set
end
end, sets:new(), Map),
ReplicaOkP =
fun (ReplicaNode) ->
%% NOTE: this includes all replicated vbuckets not just active vbuckets
ExpectedVBuckets = ns_bucket:replicated_vbuckets(Map, Node, ReplicaNode),
ReplicaInfo = ns_doctor:get_node(ReplicaNode, NodeStatuses),
AllIncomingConfs = proplists:get_value(incoming_replications_conf_hashes, ReplicaInfo, []),
IncomingConfsAllNodes = proplists:get_value(BucketName, AllIncomingConfs, []),
ActualVBucketsHash = proplists:get_value(Node, IncomingConfsAllNodes),
erlang:phash2(ExpectedVBuckets) =:= ActualVBucketsHash
end,
sets:fold(fun (ReplicaNode, Ok) ->
Ok andalso ReplicaOkP(ReplicaNode)
end, true, ReplicaNodes). | src/failover_safeness_level.erl | 0.515376 | 0.460471 | failover_safeness_level.erl | starcoder |
%%%-------------------------------------------------------------------
%%% @author <NAME> <<EMAIL>>
%%% @copyright (C) 2019, <NAME>
%%% @doc Barrier Synchronization using the Butterfly Barrier.
%%%
%%% A barrier allows multiple processes working on a computation to
%%% sychronize with each other before proceeding to the next stage.
%%%
%%% The butterfly barrier is an efficient method of synchronization
%%% where each process only needs to synchronize with `log2(N)' other
%%% processes for a group of `N' processes.
%%%
%%% A worker process, when ready, should call `sync/3' to
%%% synchronize. Return from the function is a confirmation that ALL
%%% the other processes are ready too.
%%%
%%% @end
%%% Created : 15 Oct 2019 by <NAME> <<EMAIL>>
%%%-------------------------------------------------------------------
-module(barrier_0).
-export([sync/3, reset/1, run_N/2]).
%%--------------------------------------------------------------------
%% @doc Sync function to be called by each process.
%%
%% Once the `sync/3' return, we can be sure that all the other partner
%% processes have reached the barrier.
%%
%% All the partner processes should use the same `Tag'. This can be
%% any Erlang `term'.
%%
%% Each partner process is identified by a unique rank, `My_proc'. The
%% rank starts from `0' and goes up to `N_procs-1'.
%%
%% The synchronization takes `log2' steps. This is rounded to the next
%% integer, if `N_procs' is not a power of 2.
%%
%% @end
%%--------------------------------------------------------------------
-spec sync(term(), integer(), integer()) -> ok.
sync(Tag, My_proc, N_procs) ->
logger:notice("Proc ~p: sync started", [My_proc]),
espace:out({Tag, My_proc}),
Steps = trunc(math:ceil(math:log2(N_procs))),
sync(Tag, My_proc, N_procs, Steps, 1),
logger:notice("Proc ~p: sync completed", [My_proc]).
%%--------------------------------------------------------------------
%% @doc Wait for each of the `N_procs' buddies to become ready.
%%
%% Each partner process is identified by a unique rank in the range
%% `0' to `N_procs-1'.
%%
%% The buddies of a process are identified by flipping the individual
%% bits of the process number during each step, starting with the
%% least significant bit.
%%
%% For example, in a group of 8 processes, process `0' (`2#000') will
%% in turn check the status of `2#001' (1), `2#010' (2) and `2#100'
%% (4). Likewise process `3' (`2#011') will check `2#010', `2#001' and
%% `2#111'.
%%
%% If the number of processes is not a power of 2, then a real process
%% skips checking the buddy at the non-existent rank.
%%
%% @end
%%--------------------------------------------------------------------
-spec sync(term(), integer(), integer(), integer(), integer()) -> ok.
sync(_Tag, _My_proc, _N_procs, 0, _Mask) ->
ok;
sync(Tag, My_proc, N_procs, Steps, Mask) ->
Buddy = My_proc bxor Mask,
if
Buddy < N_procs ->
espace:rd({Tag, Buddy});
true ->
ok
end,
sync(Tag, My_proc, N_procs, Steps-1, Mask bsl 1).
%%--------------------------------------------------------------------
%% @doc Reset the barrier for the given `Tag'.
%%
%% All the `{Tag, Proc_num}' tuples will be removed from the tuple
%% space.
%%
%% @end
%%--------------------------------------------------------------------
-spec reset(term()) -> ok.
reset(Tag) ->
case espace:inp({Tag, '_'}) of
nomatch ->
ok;
_ ->
reset(Tag)
end.
%%--------------------------------------------------------------------
%% @doc simple function for testing and troubleshooting.
%%
%% Creates `N_procs' `eval's, which in turn will synchronize with the
%% rest.
%%
%% @end
%%--------------------------------------------------------------------
-spec run_N(term(), integer()) -> ok.
run_N(Tag, N_procs) ->
reset(Tag),
F = fun(I) -> espace:eval({done, I, {fun run_1/3, [Tag, I, N_procs]} }) end,
lists:foreach(F, lists:seq(0, N_procs-1)),
ok.
%%--------------------------------------------------------------------
%% @doc A single process with rank `Proc_N' to sync with the rest of
%% the `N_procs' partners'
%%
%% @end
%%--------------------------------------------------------------------
-spec run_1(term(), integer(), integer()) -> ok.
run_1(Tag, Proc_N, N_procs) ->
logger:notice("Proc ~p started, pre-sync.", [Proc_N]),
sync(Tag, Proc_N, N_procs),
logger:notice("Proc ~p post-sync, stopped", [Proc_N]),
ok. | barrier/src/barrier_0.erl | 0.608012 | 0.479138 | barrier_0.erl | starcoder |
%% @author <NAME>
%% @docfile "author.edoc"
%% @doc Counts the number of different characters in the specified file using
%% concurrency.
%% @since 1.0.0
%% @version 1.0.0
-module(ccharcount).
-export([load/1, go/2]).
%% @spec load(Filename) -> list()
%% @doc Reads a file and returns an analysis of its letter frequency.
%% @param Filename The name of the file to analyze.
%% @returns A list of tuples.
load(Filename) ->
{ok, Bin} = file:read_file(Filename),
List = binary_to_list(Bin),
Length = round(length(List) / 20),
Ls = string:lowercase(List),
Sl = split(Ls, Length),
io:fwrite("Loaded and Split~n"),
Result = countsplit(Sl, []),
Result.
%% @spec countsplit(ChunksList, PidList) -> list()
%% @doc Analyzes all the given chunks using concurrency, fetches back the result
%% and returns it.
%% @param Chunks The list to analyse. Each item is a string.
%% @param PIDs The PIDs list of the created processes (which is only used to
%% determine the number of processes).
%% @returns The results as a list of tuples.
countsplit([], PIDs) ->
receive_results(PIDs, []);
countsplit([H|T], PIDs) ->
%Ul=shake:sort(Sl),
NewPid = spawn(ccharcount, go, [H, self()]),
countsplit(T, PIDs ++ [NewPid]).
receive_results([], Results) ->
Results;
receive_results([_|T], Results) ->
receive
{NewResult} -> receive_results(T, join(Results, NewResult));
_Other -> {error, unknown}
end.
join([], []) ->
[];
join([], R)->
R;
join([H1|T1], [H2|T2]) ->
{_, N} = H1,
{C1, N1} = H2,
[{C1, N + N1}] ++ join(T1, T2).
%% @spec split(String, Length) -> list()
%% @doc Splits a string into several string of Length characters.
split([], _) ->
[];
split(List, Length) ->
S1 = string:slice(List, 0, Length),
case length(List) > Length of
true -> S2 = string:slice(List, Length, length(List));
false -> S2 = []
end,
[S1] ++ split(S2, Length).
count(_, [], N) ->
N;
count(Ch, [H|T], N) ->
case Ch == H of
true -> count(Ch, T, N + 1);
false -> count(Ch, T, N)
end.
%% @spec go(L, PID) -> none()
%% @doc Processes a chunk and sends the result to the parent process.
%% @param L The chunk to analyze.
%% @param PID The PID of the parent process.
go(L, PID)->
Alph = [$a,$b,$c,$d,$e,$f,$g,$h,$i,$j,$k,$l,$m,$n,$o,$p,$q,$r,$s,$t,$u,$v,$w,$x,$y,$z],
PID ! {rgo(Alph, L, [])}.
rgo([H|T], L, Result) ->
N = count(H, L, 0),
Result2 = Result ++ [{[H], N}],
rgo(T, L, Result2);
rgo([], _, Result) ->
Result. | src/ccharcount.erl | 0.634883 | 0.525917 | ccharcount.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% Copyright (c) 2014 SyncFree Consortium. All Rights Reserved.
%%
% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%% This module tests gentlerain read, write and snapshot read operations
-module(gr_SUITE).
-compile({parse_transform, lager_transform}).
%% common_test callbacks
-export([
init_per_suite/1,
end_per_suite/1,
init_per_testcase/2,
end_per_testcase/2,
all/0]).
%% tests
-export([read_write_test/1,
read_multiple_test/1,
replication_test/1]).
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("kernel/include/inet.hrl").
-define(BUCKET, gr_bucket).
init_per_suite(Config) ->
ct:print("Starting test suite ~p", [?MODULE]),
test_utils:at_init_testsuite(),
Clusters = test_utils:set_up_clusters_common(Config),
Nodes = lists:flatten(Clusters),
%Ensure that the gentlerain protocol is used
test_utils:pmap(fun(Node) ->
rpc:call(Node, application, set_env,
[antidote, txn_prot, gr]) end, Nodes),
%Check that indeed gentlerain is running
{ok, gr} = rpc:call(hd(Nodes), application, get_env, [antidote, txn_prot]),
%% Check whether heartbeats from all replicas has received
%% After this stable snapshot vectorclock contain entry for all DCs
%% This is required for correct functioning of the protocol
%rt:wait_until(hd(Nodes1), fun wait_init:check_replication_complete/1),
%rt:wait_until(hd(Nodes2), fun wait_init:check_replication_complete/1),
[{nodes, Nodes}|Config].
end_per_suite(Config) ->
Config.
init_per_testcase(_Case, Config) ->
Config.
end_per_testcase(Name, _) ->
ct:print("[ OK ] ~p", [Name]),
ok.
all() -> [read_write_test,
read_multiple_test,
replication_test].
read_write_test(Config) ->
Nodes = proplists:get_value(nodes, Config),
Node = hd(Nodes),
Bound_object = {gr_rw_key, antidote_crdt_counter_pn, bucket},
{ok, [0], _} = rpc:call(Node, antidote, read_objects, [ignore, [], [Bound_object]]),
{ok, _} = rpc:call(Node, antidote, update_objects, [ignore, [], [{Bound_object, increment, 1}]]),
{ok, Res, _} = rpc:call(Node, antidote, read_objects, [ignore, [], [Bound_object]]),
?assertMatch([1], Res).
read_multiple_test(Config) ->
Nodes = proplists:get_value(nodes, Config),
Node = hd(Nodes),
O1 = {gr_read_mult_key1, antidote_crdt_counter_pn, bucket},
{ok, _} = rpc:call(Node, antidote, update_objects, [ignore, [], [{O1, increment, 1}]]),
O2 = {o2, antidote_crdt_counter_pn, bucket},
{ok, CT} = rpc:call(Node, antidote, update_objects, [ignore, [], [{O2, increment, 1}]]),
{ok, Res, _} = rpc:call(Node, antidote, read_objects, [CT, {}, [O1, O2]]),
?assertMatch([1, 1], Res).
replication_test(Config) ->
[Node1, Node2 | _] = proplists:get_value(nodes, Config),
O1 = {gr_repl_key1, antidote_crdt_counter_pn, bucket},
O2 = {gr_repl_key2, antidote_crdt_counter_pn, bucket},
%% Write to DC1
{ok, _CT1} = rpc:call(Node1, antidote, update_objects, [ignore, [], [{O1, increment, 1}]]),
%% Write to DC2
{ok, CT2} = rpc:call(Node2, antidote, update_objects, [ignore, [], [{O2, increment, 1}]]),
%% Read r1 from DC2, with dependency to first write
{ok, [Res1], _} = rpc:call(Node2, antidote, read_objects, [ignore, [], [O1]]),
lager:info("Read r1 from DC2: ~p", [Res1]), %% Result could be 0 or 1, there is no guarantee
{ok, Res2, _} = rpc:call(Node2, antidote, read_objects, [CT2, {}, [O1, O2]]),
%% Since CT1 < CT2, any snapshot that includes second write must include first write
?assertMatch([1, 1], Res2). | test/gr_SUITE.erl | 0.549157 | 0.489686 | gr_SUITE.erl | starcoder |
-module(any).
-compile(no_auto_import).
-include_lib("eunit/include/eunit.hrl").
-export([from/1, unsafeCoerce/1, string/1, int/1, float/1, bool/1, thunk/1, list/2, tuple/1, field/2]).
list_module() ->
list.
tuple_module() ->
tuple.
from(A) ->
gleam__stdlib:identity(A).
unsafeCoerce(A) ->
gleam__stdlib:identity(A).
string(A) ->
gleam__stdlib:decode_string(A).
-ifdef(TEST).
string_test() ->
expect:equal(string(from(<<"">>)), {ok, <<"">>}),
expect:equal(string(from(<<"Hello">>)), {ok, <<"Hello">>}),
expect:equal(string(from(1)), {error, <<"Expected a String, got `1`">>}),
expect:equal(string(from([])), {error, <<"Expected a String, got `[]`">>}).
-endif.
int(A) ->
gleam__stdlib:decode_int(A).
-ifdef(TEST).
int_test() ->
expect:equal(int(from(1)), {ok, 1}),
expect:equal(int(from(2)), {ok, 2}),
expect:equal(int(from(1.0)), {error, <<"Expected an Int, got `1.0`">>}),
expect:equal(int(from([])), {error, <<"Expected an Int, got `[]`">>}).
-endif.
float(A) ->
gleam__stdlib:decode_float(A).
-ifdef(TEST).
float_test() ->
expect:equal(float(from(1.0)), {ok, 1.0}),
expect:equal(float(from(2.2)), {ok, 2.2}),
expect:equal(float(from(1)), {error, <<"Expected a Float, got `1`">>}),
expect:equal(float(from([])), {error, <<"Expected a Float, got `[]`">>}).
-endif.
bool(A) ->
gleam__stdlib:decode_bool(A).
-ifdef(TEST).
bool_test() ->
expect:equal(bool(from(true)), {ok, true}),
expect:equal(bool(from(false)), {ok, false}),
expect:equal(bool(from(1)), {error, <<"Expected a Bool, got `1`">>}),
expect:equal(bool(from([])), {error, <<"Expected a Bool, got `[]`">>}).
-endif.
thunk(A) ->
gleam__stdlib:decode_thunk(A).
-ifdef(TEST).
thunk_test() ->
expect:is_ok(thunk(from(fun() -> 1 end))),
expect:equal(result:map(thunk(from(fun() -> 1 end)), fun(F) -> F() end),
{ok, from(1)}),
expect:is_error(thunk(from(fun(X) -> X end))),
expect:is_error(thunk(from(1))),
expect:is_error(thunk(from([]))).
-endif.
list_any(A) ->
gleam__stdlib:decode_list(A).
list(Any, Decode) ->
result:then(list_any(Any),
fun(Capture1) ->
(list_module()):traverse(Capture1, Decode)
end).
-ifdef(TEST).
list_test() ->
expect:equal(list(from([]), fun string/1), {ok, []}),
expect:equal(list(from([]), fun int/1), {ok, []}),
expect:equal(list(from([1, 2, 3]), fun int/1), {ok, [1, 2, 3]}),
expect:equal(list(from([[1], [2], [3]]),
fun(Capture1) -> list(Capture1, fun int/1) end),
{ok, [[1], [2], [3]]}),
expect:is_error(list(from(1), fun string/1)),
expect:is_error(list(from(1.0), fun int/1)),
expect:is_error(list(from([<<"">>]), fun int/1)),
expect:is_error(list(from([from(1), from(<<"not an int">>)]), fun int/1)).
-endif.
tuple(A) ->
gleam__stdlib:decode_tuple(A).
-ifdef(TEST).
tuple_test() ->
expect:equal(tuple(from({1, []})), {ok, {from(1), from([])}}),
expect:equal(tuple(from({<<"ok">>, <<"ok">>})),
{ok, {from(<<"ok">>), from(<<"ok">>)}}),
expect:is_error(tuple(from({1}))),
expect:is_error(tuple(from({1, 2, 3}))),
expect:equal(result:then(result:then(tuple(from({1, 2.0})),
fun(X) ->
result:map(int((tuple_module()):first(X)),
fun(F) ->
{F,
(tuple_module()):second(X)}
end)
end),
fun(X) ->
result:map(float((tuple_module()):second(X)),
fun(F) ->
{(tuple_module()):first(X), F}
end)
end),
{ok, {1, 2.0}}).
-endif.
field(A, B) ->
gleam__stdlib:decode_field(A, B).
-ifdef(TEST).
field_test() ->
{ok, OkAtom} = atom:from_string(<<"ok">>),
expect:equal(field(from(#{}#{ok => 1}), OkAtom), {ok, from(1)}),
expect:equal(field(from(#{}#{ok => 3}#{earlier => 2}), OkAtom),
{ok, from(3)}),
expect:is_error(field(from(#{}), OkAtom)),
expect:is_error(field(from(1), OkAtom)),
expect:is_error(field(from([]), [])).
-endif. | gleam_stdlib/gen/any.erl | 0.555435 | 0.680713 | any.erl | starcoder |
%%% Licensed under the Apache License, Version 2.0 (the "License");
%%% you may not use this file except in compliance with the License.
%%% You may obtain a copy of the License at
%%%
%%% http://www.apache.org/licenses/LICENSE-2.0
%%%
%%% Unless required by applicable law or agreed to in writing, software
%%% distributed under the License is distributed on an "AS IS" BASIS,
%%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%%% See the License for the specific language governing permissions and
%%% limitations under the License.
%%%
%%% Segment trees, with a delete operation.
%%%
%%% Keys are the (0-based) indices into the list passed to build/1.
%%%
%%% Range bounds are inclusive.
%%%
-module(hipe_segment_trees).
-export([build/1, intersect/2, delete/4]).
-record(segment_tree, {
lo :: integer(),
hi :: integer(),
root :: tnode()
}).
%% X =< Mid belongs in Left
-define(NODE(Left, Right, Mid, Segments), {Left, Right, Mid, Segments}).
-define(POINT_LEAF(Val), Val).
-define(RANGE_LEAF(Lo, Hi), {Lo, Hi}).
-type segments() :: [non_neg_integer()].
-type leaf() :: segments().
-type tnode() :: ?NODE(tnode(), tnode(), integer(), segments()) | leaf().
-opaque tree() :: #segment_tree{} | nil.
-export_type([tree/0]).
%% @doc Builds a segment tree of the given intervals.
-spec build([{integer(), integer()}]) -> tree().
build(ListOfIntervals) ->
case
lists:usort(
lists:append(
[[Lo, Hi] || {Lo, Hi} <- ListOfIntervals, Lo =< Hi]))
of
[] -> nil;
Endpoints ->
Tree0 = empty_tree_from_endpoints(Endpoints),
[Lo|_] = Endpoints,
Hi = lists:last(Endpoints),
Tree1 = insert_intervals(0, ListOfIntervals, Lo, Hi, Tree0),
Tree = squash_empty_subtrees(Tree1),
#segment_tree{lo=Lo, hi=Hi, root=Tree}
end.
empty_tree_from_endpoints(Endpoints) ->
Leaves = leaves(Endpoints),
{T, [], _, _} = balanced_bst(Leaves, length(Leaves)),
T.
leaves([Endpoint]) -> [?POINT_LEAF(Endpoint)];
leaves([A | [B|_] = Tail]) ->
%% We omit the range leaf if it's empty
case A<B-1 of
true -> [?POINT_LEAF(A),?RANGE_LEAF(A+1,B-1) | leaves(Tail)];
false -> [?POINT_LEAF(A) | leaves(Tail)]
end.
balanced_bst(L, S) when S > 1 ->
Sm = S, %% - 1
S2 = Sm div 2,
S1 = Sm - S2,
{Left, L1, LeftLo, LeftHi} = balanced_bst(L, S1),
{Right, L2, _, RightHi} = balanced_bst(L1, S2),
T = ?NODE(Left, Right, LeftHi, []),
{T, L2, LeftLo, RightHi};
balanced_bst([?RANGE_LEAF(Lo, Hi) | L], 1) ->
{[], L, Lo, Hi};
balanced_bst([?POINT_LEAF(Val) | L], 1) ->
{[], L, Val, Val}.
insert_intervals(_Ix, [], _Lo, _Hi, Tree) -> Tree;
insert_intervals(Ix, [Int|Ints], Lo, Hi, Tree) ->
insert_intervals(Ix + 1, Ints, Lo, Hi,
insert_interval(Ix, Int, Lo, Hi, Tree)).
insert_interval(_, {Lo, Hi}, _, _, Node) when Lo > Hi -> Node;
insert_interval(I, Int={Lo,Hi}, NLo, NHi,
?NODE(Left0, Right0, Mid, Segments)) ->
if Lo =< NLo, NHi =< Hi ->
?NODE(Left0, Right0, Mid, [I|Segments]);
true ->
Left = case intervals_intersect(Lo, Hi, NLo, Mid) of
true -> insert_interval(I, Int, NLo, Mid, Left0);
false -> Left0
end,
Right = case intervals_intersect(Lo, Hi, Mid+1, NHi) of
true -> insert_interval(I, Int, Mid+1, NHi, Right0);
false -> Right0
end,
?NODE(Left, Right, Mid, Segments)
end;
insert_interval(I, {_Lo,_Hi}, _NLo, _NHi, Leaf) -> [I|Leaf].
intervals_intersect(ALo, AHi, BLo, BHi) ->
(ALo =< AHi) andalso (BLo =< BHi) %% both nonempty
andalso nonempty_intervals_intersect(ALo, AHi, BLo, BHi).
%% Purely optional optimisation
squash_empty_subtrees(?NODE(Left0, Right0, Mid, Segs)) ->
build_squash_node(squash_empty_subtrees(Left0),
squash_empty_subtrees(Right0),
Mid, Segs);
squash_empty_subtrees(Leaf) -> Leaf.
build_squash_node([], [], _, Segs) -> Segs;
build_squash_node(Left, Right, Mid, Segs) ->
?NODE(Left, Right, Mid, Segs).
%% @doc Returns the indices of the intervals in the tree that contains Point.
-spec intersect(integer(), tree()) -> [non_neg_integer()].
intersect(Point, nil) when is_integer(Point) -> [];
intersect(Point, #segment_tree{lo=Lo, hi=Hi, root=Root})
when is_integer(Point) ->
case Lo =< Point andalso Point =< Hi of
false -> [];
true -> intersect_1(Point, Root, [])
end.
intersect_1(Point, ?NODE(Left, Right, Mid, Segs), Acc0) ->
Child = if Point =< Mid -> Left; true -> Right end,
intersect_1(Point, Child, Segs ++ Acc0);
intersect_1(_, LeafSegs, Acc) -> LeafSegs ++ Acc.
%% @doc Deletes the interval {Lo, Hi}, which had index Index in the list passed
%% to build/1.
-spec delete(non_neg_integer(), integer(), integer(), tree()) -> tree().
delete(_, _, _, nil) -> nil;
delete(_, Lo, Hi, Tree) when Lo > Hi -> Tree;
delete(_, Lo, Hi, Tree = #segment_tree{lo=TLo, hi=THi})
when Hi < TLo; Lo > THi -> Tree;
delete(Index, Lo, Hi, Tree = #segment_tree{lo=TLo, hi=THi, root=Root0})
when is_integer(Lo), is_integer(Hi) ->
Root = delete_1(Index, Lo, Hi, TLo, THi, Root0),
Tree#segment_tree{root=Root}.
delete_1(I, Lo, Hi, NLo, NHi, ?NODE(Left0, Right0, Mid, Segments)) ->
if Lo =< NLo, NHi =< Hi ->
?NODE(Left0, Right0, Mid, delete_2(Segments, I));
true ->
Left = case nonempty_intervals_intersect(Lo, Hi, NLo, Mid) of
true -> delete_1(I, Lo, Hi, NLo, Mid, Left0);
false -> Left0
end,
Right = case nonempty_intervals_intersect(Lo, Hi, Mid+1, NHi) of
true -> delete_1(I, Lo, Hi, Mid+1, NHi, Right0);
false -> Right0
end,
%% We could do build_squash_node here, is it worth it?
?NODE(Left, Right, Mid, Segments)
end;
delete_1(I, _Lo, _Hi, _NLo, _NHi, Leaf) -> delete_2(Leaf, I).
delete_2([I|Segs], I) -> Segs;
delete_2([S|Segs], I) -> [S|delete_2(Segs,I)].
-compile({inline,nonempty_intervals_intersect/4}).
nonempty_intervals_intersect(ALo, AHi, BLo, BHi) ->
(BLo =< AHi) andalso (ALo =< BHi). | lib/hipe/misc/hipe_segment_trees.erl | 0.64131 | 0.560614 | hipe_segment_trees.erl | starcoder |
% @copyright 2018-2020 Zuse Institute Berlin
% Licensed under the Apache License, Version 2.0 (the "License");
% you may not use this file except in compliance with the License.
% You may obtain a copy of the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS,
% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
% See the License for the specific language governing permissions and
% limitations under the License.
%% @author <NAME> <<EMAIL>>
%% @doc Unit tests for autoconf
%% @end
%% |----------+------------|
%% | Erlang | Release |
%% |----------+------------|
%% | R14B04 | 2011-10-04 |
%% | R15B | 2011-12-14 |
%% | R16A | 2013-01-29 |
%% | OTP 17.0 | 2014-04-07 |
%% | OTP 18.0 | 2015-06-24 |
%% | OTP 19.0 | 2016-06-21 |
%% | OTP 20.0 | 2017-06-21 |
%% | OTP 21.0 | 2018-06-19 |
%% | OTP 22.0 | 2019-05-14 |
%% | OTP 23.0 | 2020-05-13 |
%% |----------+------------|
-module(autoconf_SUITE).
-author('<EMAIL>').
-compile(export_all).
-include_lib("unittest.hrl").
all() ->
[test_has_maps_get_2,
test_has_mnesia_sync_log_0,
test_has_cerl_sets_new_0,
test_has_maps_take_2,
test_has_maps_iterator_1,
test_has_maps_next_1,
test_has_logger_add_handler_3,
test_have_crypto_randuniform_support,
test_with_crypto_hash,
test_with_crypto_bytes_to_integer,
test_with_maps,
test_with_rand,
test_have_ssl_handshake,
test_have_ssl_getstat,
test_have_new_stacktrace,
test_namespaced_dict,
test_HAVE_ERLANG_NOW,
test_have_ctline_support,
test_have_callback_support,
test_have_socket_open,
test_have_persistent_term_get,
test_have_counters_get,
test_have_atomics_new
].
suite() ->
[
{timetrap, {seconds, 10}}
].
init_per_suite(Config) ->
Config.
end_per_suite(_Config) ->
ok.
otp_rel() ->
erlang:system_info(otp_release).
otp_rel_long() ->
Name = filename:join([code:root_dir(), "releases",
erlang:system_info(otp_release), "OTP_VERSION"]),
case file:open(Name, [read]) of
{ok, IO} ->
{ok, VSN} = file:read_line(IO),
string:strip(VSN, right, $\n);
{error, _Reason} ->
"unknown"
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% maps:get/2
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% https://github.com/erlang/otp/commit/6fdad74f41803089a0f9026c98f319daecda9a50
% erts,stdlib: Change map module name to maps
% maps:get/2, OTP-17
has_maps_get_2() ->
_ = code:ensure_loaded(maps),
erlang:function_exported(maps, get, 2).
test_has_maps_get_2(_Config) ->
FalseReleases = ["R14B04", "R15B", "R15B01", "R15B02", "R15B03", "R16B",
"R16B01", "R16B02", "R16B03-1"],
TrueReleases = ["17", "18", "19", "20", "21", "22", "23"],
case has_maps_get_2() of
true ->
?assert_w_note(lists:member(otp_rel(), TrueReleases), otp_rel());
false ->
?assert_w_note(lists:member(otp_rel(), FalseReleases), otp_rel())
end,
ok.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% mnesia:sync_log/0
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% https://github.com/scalaris-team/scalaris/commit/659eb6e44d82d4fbcba133b31ca7a372daf9bfc4
% mnesia:sync_log/0, OTP-17
has_mnesia_sync_log_0() ->
_ = code:ensure_loaded(mnesia),
erlang:function_exported(mnesia, sync_log, 0).
test_has_mnesia_sync_log_0(_Config) ->
FalseReleases = ["R14B04", "R15B", "R15B01", "R15B02", "R15B03", "R16B",
"R16B01", "R16B02", "R16B03-1"],
TrueReleases = ["17", "18", "19", "20", "21", "22", "23"],
case has_mnesia_sync_log_0() of
true ->
?assert_w_note(lists:member(otp_rel(), TrueReleases), otp_rel());
false ->
?assert_w_note(lists:member(otp_rel(), FalseReleases), otp_rel())
end,
ok.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% cerl_sets:new/0
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% https://github.com/erlang/otp/commit/c957cb3887aaadffa75b5bb70f12e79edc841396
% compiler: Add cerl_sets module
% cerl_sets:new/0, OTP-18
has_cerl_sets_new_0() ->
_ = code:ensure_loaded(cerl_sets),
erlang:function_exported(cerl_sets, new, 0).
test_has_cerl_sets_new_0(_Config) ->
FalseReleases = ["R14B04", "R15B", "R15B01", "R15B02", "R15B03", "R16B",
"R16B01", "R16B02", "R16B03-1", "17"],
TrueReleases = ["18", "19", "20", "21", "22", "23"],
case has_cerl_sets_new_0() of
true ->
?assert_w_note(lists:member(otp_rel(), TrueReleases), otp_rel());
false ->
?assert_w_note(lists:member(otp_rel(), FalseReleases), otp_rel())
end,
ok.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% maps:take/2
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% https://github.com/erlang/otp/commit/65bd8ade865eebe0d8a3c3210a4e2e9f334e229f
% erts: Add BIF maps:take/2
% maps:take/2, OTP-19
has_maps_take_2() ->
_ = code:ensure_loaded(maps),
erlang:function_exported(maps, take, 2).
test_has_maps_take_2(_Config) ->
FalseReleases = ["R14B04", "R15B", "R15B01", "R15B02", "R15B03", "R16B",
"R16B01", "R16B02", "R16B03-1", "17", "18"],
TrueReleases = ["19", "20", "21", "22", "23"],
case has_maps_take_2() of
true ->
?assert_w_note(lists:member(otp_rel(), TrueReleases), otp_rel());
false ->
?assert_w_note(lists:member(otp_rel(), FalseReleases), otp_rel())
end,
ok.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% maps:iterator/1
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% https://github.com/erlang/otp/commit/0149a73d15df1f80cb46752ec3829f48c38dd230
% erts: Implement maps path iterator
% maps:iterator/1, OTP-21
has_maps_iterator_1() ->
_ = code:ensure_loaded(maps),
erlang:function_exported(maps, iterator, 1).
test_has_maps_iterator_1(_Config) ->
FalseReleases = ["R14B04", "R15B", "R15B01", "R15B02", "R15B03", "R16B",
"R16B01", "R16B02", "R16B03-1", "17", "18", "19", "20"],
TrueReleases = ["21", "22", "23"],
case has_maps_iterator_1() of
true ->
?assert_w_note(lists:member(otp_rel(), TrueReleases), otp_rel());
false ->
?assert_w_note(lists:member(otp_rel(), FalseReleases), otp_rel())
end,
ok.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% maps:next/1
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% https://github.com/erlang/otp/commit/d945d6f1c71d5442a25e4be60f84fc49ae8b6b4e
% stdlib: Introduce maps iterator API
% maps:next/1, OTP-21
has_maps_next_1() ->
_ = code:ensure_loaded(maps),
erlang:function_exported(maps, next, 1).
test_has_maps_next_1(_Config) ->
FalseReleases = ["R14B04", "R15B", "R15B01", "R15B02", "R15B03", "R16B",
"R16B01", "R16B02", "R16B03-1", "17", "18", "19", "20"],
TrueReleases = ["21", "22", "23"],
case has_maps_next_1() of
true ->
?assert_w_note(lists:member(otp_rel(), TrueReleases), otp_rel());
false ->
?assert_w_note(lists:member(otp_rel(), FalseReleases), otp_rel())
end,
ok.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% logger:add_handler/3
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% https://github.com/erlang/otp/commit/0deea4a8f369013ec00e231d0c2c37e4ab3f0ba1
% Add logger
% logger:add_handler/3, OTP-21
has_logger_add_handler_3() ->
_ = code:ensure_loaded(logger),
erlang:function_exported(logger, add_handler, 3).
test_has_logger_add_handler_3(_Config) ->
FalseReleases = ["R14B04", "R15B", "R15B01", "R15B02", "R15B03", "R16B",
"R16B01", "R16B02", "R16B03-1", "17", "18", "19", "20"],
TrueReleases = ["21", "22", "23"],
case has_logger_add_handler_3() of
true ->
?assert_w_note(lists:member(otp_rel(), TrueReleases), otp_rel());
false ->
?assert_w_note(lists:member(otp_rel(), FalseReleases), otp_rel())
end,
ok.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% have_crypto_randuniform_support
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-ifdef(have_crypto_randuniform_support).
test_have_crypto_randuniform_support(_Config) ->
Releases = ["R14B04", "R15B", "R15B01", "R15B02", "R15B03", "R16B", "R16B01",
"R16B02", "R16B03-1", "17", "18", "19", "20", "21", "22", "23"],
?assert_w_note(lists:member(otp_rel(), Releases), otp_rel()),
ok.
-else.
test_have_crypto_randuniform_support(_Config) ->
?assert_w_note(lists:member(otp_rel(), []), otp_rel()),
ok.
-endif.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% with_crypto_hash
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-ifdef(with_crypto_hash).
test_with_crypto_hash(_Config) ->
Releases = ["R15B02", "R15B03", "R16B", "R16B01", "R16B02", "R16B03-1",
"17", "18", "19", "20", "21", "22", "23"],
?assert_w_note(lists:member(otp_rel(), Releases), otp_rel()),
ok.
-else.
test_with_crypto_hash(_Config) ->
Releases = ["R14B04", "R15B", "R15B01"],
?assert_w_note(lists:member(otp_rel(), Releases), otp_rel()),
ok.
-endif.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% with_crypto_bytes_to_integer
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-ifdef(with_crypto_bytes_to_integer).
test_with_crypto_bytes_to_integer(_Config) ->
Releases = ["R16B01", "R16B02", "R16B03-1", "17", "18", "19", "20", "21",
"22", "23"],
?assert_w_note(lists:member(otp_rel(), Releases), otp_rel()),
ok.
-else.
test_with_crypto_bytes_to_integer(_Config) ->
Releases = ["R14B04", "R15B", "R15B01", "R15B02", "R15B03", "R16B"],
?assert_w_note(lists:member(otp_rel(), Releases), otp_rel()),
ok.
-endif.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% with_maps
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-ifdef(with_maps).
test_with_maps(_Config) ->
Releases = ["18", "19", "20", "21", "22", "23"],
?assert_w_note(lists:member(otp_rel(), Releases), otp_rel()),
ok.
-else.
test_with_maps(_Config) ->
Releases = ["R14B04", "R15B", "R15B01", "R15B02", "R15B03", "R16B", "R16B01",
"R16B02", "R16B03-1", "17"],
?assert_w_note(lists:member(otp_rel(), Releases), otp_rel()),
ok.
-endif.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% with_rand
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-ifdef(with_rand).
test_with_rand(_Config) ->
Releases = ["18", "19", "20", "21", "22", "23"],
?assert_w_note(lists:member(otp_rel(), Releases), otp_rel()),
ok.
-else.
test_with_rand(_Config) ->
Releases = ["R14B04", "R15B", "R15B01", "R15B02", "R15B03", "R16B", "R16B01",
"R16B02", "R16B03-1", "17"],
?assert_w_note(lists:member(otp_rel(), Releases), otp_rel()),
ok.
-endif.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% have_ssl_handshake
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-ifdef(have_ssl_handshake).
test_have_ssl_handshake(_Config) ->
Releases = ["21", "22", "23"],
?assert_w_note(lists:member(otp_rel(), Releases), otp_rel()),
ok.
-else.
test_have_ssl_handshake(_Config) ->
Releases = ["R14B04" , "R15B", "R15B01", "R15B02", "R15B03", "R16B", "R16B01",
"R16B02", "R16B03-1", "17", "18", "19", "20"],
?assert_w_note(lists:member(otp_rel(), Releases), otp_rel()),
ok.
-endif.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% have_ssl_getstat
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-ifdef(have_ssl_getstat).
test_have_ssl_getstat(_Config) ->
Releases = ["19", "20", "21", "22", "23"],
?assert_w_note(lists:member(otp_rel(), Releases), otp_rel()),
ok.
-else.
test_have_ssl_getstat(_Config) ->
Releases = ["R14B04", "R15B", "R15B01", "R15B02", "R15B03", "R16B", "R16B01",
"R16B02", "R16B03-1", "17", "18"],
?assert_w_note(lists:member(otp_rel(), Releases), otp_rel()),
ok.
-endif.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% have_new_stacktrace
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-ifdef(have_new_stacktrace).
test_have_new_stacktrace(_Config) ->
Releases = ["21", "22", "23"],
?assert_w_note(lists:member(otp_rel(), Releases), otp_rel()),
ok.
-else.
test_have_new_stacktrace(_Config) ->
Releases = ["R14B04", "R15B", "R15B01", "R15B02", "R15B03", "R16B", "R16B01",
"R16B02", "R16B03-1", "17", "18", "19", "20"],
?assert_w_note(lists:member(otp_rel(), Releases), otp_rel()),
ok.
-endif.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% namespaced_dict
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-ifdef(namespaced_dict).
test_namespaced_dict(_Config) ->
Releases = ["17", "18", "19", "20", "21", "22", "23"],
?assert_w_note(lists:member(otp_rel(), Releases), otp_rel()),
ok.
-else.
test_namespaced_dict(_Config) ->
Releases = ["R14B04", "R15B", "R15B01", "R15B02", "R15B03", "R16B", "R16B01",
"R16B02", "R16B03-1"],
?assert_w_note(lists:member(otp_rel(), Releases), otp_rel()),
ok.
-endif.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% HAVE_ERLANG_NOW
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-ifdef(HAVE_ERLANG_NOW).
test_HAVE_ERLANG_NOW(_Config) ->
?assert_w_note(lists:member(otp_rel(), []), otp_rel()),
ok.
-else.
test_HAVE_ERLANG_NOW(_Config) ->
Releases = ["R14B04", "R15B", "R15B01", "R15B02", "R15B03", "R16B", "R16B01",
"R16B02", "R16B03-1", "17", "18", "19", "20", "21", "22", "23"],
?assert_w_note(lists:member(otp_rel(), Releases), otp_rel()),
ok.
-endif.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% have_ctline_support
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-ifdef(have_ctline_support).
test_have_ctline_support(_Config) ->
Releases = ["R14B04"],
?assert_w_note(lists:member(otp_rel(), Releases), otp_rel()),
ok.
-else.
test_have_ctline_support(_Config) ->
Releases = ["R15B", "R15B01", "R15B02", "R15B03", "R16B", "R16B01",
"R16B02", "R16B03-1", "17", "18", "19", "20", "21", "22", "23"],
?assert_w_note(lists:member(otp_rel(), Releases), otp_rel()),
ok.
-endif.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% have_callback_support
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-ifdef(have_callback_support).
test_have_callback_support(_Config) ->
Releases = ["R15B", "R15B01", "R15B02", "R15B03", "R16B", "R16B01",
"R16B02", "R16B03-1", "17", "18", "19", "20", "21", "22", "23"],
?assert_w_note(lists:member(otp_rel(), Releases), otp_rel()),
ok.
-else.
test_have_callback_support(_Config) ->
Releases = ["R14B04"],
?assert_w_note(lists:member(otp_rel(), Releases), otp_rel()),
ok.
-endif.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% have_socket_open
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% https://github.com/erlang/otp/commit/3ca71520bfb664f0ea809ffdf41505936e4d5e90
% [socket-nif] preliminary version of the new socket interface (nififying)
-ifdef(have_socket_open).
test_have_socket_open(_Config) ->
Releases = ["22", "23"],
?assert_w_note(lists:member(otp_rel(), Releases), otp_rel()),
ok.
-else.
test_have_socket_open(_Config) ->
Releases = ["R14B04", "R15B", "R15B01", "R15B02", "R15B03", "R16B", "R16B01",
"R16B02", "R16B03-1", "17", "18", "19", "20", "21"],
?assert_w_note(lists:member(otp_rel(), Releases), otp_rel()),
ok.
-endif.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% have_persistent_term_get
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% https://github.com/erlang/otp/commit/805748eb668d5562fe17f3172cdae07a86166c3f
% Add a persistent term storage
%% OTP 21.2
-ifdef(have_persistent_term_get).
test_have_persistent_term_get(_Config) ->
Releases = ["22", "23"],
?assert_w_note(lists:member(otp_rel(), Releases), otp_rel()),
ok.
-else.
test_have_persistent_term_get(_Config) ->
Releases = ["R14B04", "R15B", "R15B01", "R15B02", "R15B03", "R16B", "R16B01",
"R16B02", "R16B03-1", "17", "18", "19", "20", "21"],
?assert_w_note(lists:member(otp_rel(), Releases), otp_rel()),
ok.
-endif.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% have_counters_get
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% https://github.com/erlang/otp/commit/fefb5d039e87ff7137e78b3d5f2eaf01e498ec4d
% erts: Add new module 'counters'
%% OTP 21.2
-ifdef(have_counters_get).
test_have_counters_get(_Config) ->
Releases = ["22", "23"],
?assert_w_note(lists:member(otp_rel(), Releases), otp_rel()),
ok.
-else.
test_have_counters_get(_Config) ->
Releases = ["R14B04", "R15B", "R15B01", "R15B02", "R15B03", "R16B", "R16B01",
"R16B02", "R16B03-1", "17", "18", "19", "20", "21"],
?assert_w_note(lists:member(otp_rel(), Releases), otp_rel()),
ok.
-endif.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% have_atomics_new
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% https://github.com/erlang/otp/commit/1315c6457e49595fdd3f91693c0506964416c9f0
% erts: Add new module 'atomics'
%% OTP 21.2
-ifdef(have_atomics_new).
test_have_atomics_new(_Config) ->
Releases = ["22", "23"],
?assert_w_note(lists:member(otp_rel(), Releases), otp_rel()),
ok.
-else.
test_have_atomics_new(_Config) ->
Releases = ["R14B04", "R15B", "R15B01", "R15B02", "R15B03", "R16B", "R16B01",
"R16B02", "R16B03-1", "17", "18", "19", "20", "21"],
?assert_w_note(lists:member(otp_rel(), Releases), otp_rel()),
ok.
-endif. | test/autoconf_SUITE.erl | 0.516352 | 0.432303 | autoconf_SUITE.erl | starcoder |
%% @doc A set of utility routines used to operate on deep proplists.
%%
%% This single module has been designed to hit three primary goals:
%% <ll>
%% <li>allow you to access deeply nested properties inside proplists;</li>
%% <li>admit you to mutate proplists;</li>
%% <li>allow you to access a group of properties with a single call.</li>
%% </ll>
-module(deepprops).
-export([
get/2,
get/3,
require/2,
set/3,
extract/2,
extract/3,
append/3,
list/2,
values/2,
split/2,
defaults/2
]).
%% Properties deep access
-type path() :: term() | [term(), ...].
-spec get(Path, Proplist) -> Result when
Path :: path(),
Proplist :: proplists:proplist(),
Result :: term() | undefined.
get(Path, Proplist) ->
get(Path, Proplist, undefined).
%% @doc Retrieves value of a property located and possibly deeply nested inside the property list
%% `Proplist' under path `Path'.
%%
%% Path may be a single `Key' thus denoting that the property located on the top level of a
%% property list. As well as a list of keys it may be noting that the property is located inside a
%% property list which is located inside another property list and so on until the top level `Proplist'
%% is finally reached.
%%
%% When there is no property under the `Path' the `Default' shall be the result of a call or
%% `undefined' if `Default' has been not specified.
%%
%% Finally for the sake of clarity the following code will run with no exceptions:
%% ```
%% Proplist = [ {top, [ {level, [ {thing, 2}, {where, 3} ]}, {middle, 4} ]}, {last, 5} ],
%% Result = 4,
%% Result = deepprops:get([top, middle], Proplist).
%% '''
-spec get(Path, Proplist, Default) -> Result when
Path :: path(),
Proplist :: proplists:proplist(),
Result :: term() | Default,
Default :: term().
get(Path, Proplist, Default) ->
do_get(keynormalize(Path), Proplist, Default).
do_get([], Value, _) ->
Value;
do_get([Key | Rest], [Key | _], Default) ->
do_get(Rest, true, Default);
do_get([Key | Rest], [{Key, Value} | _], Default) ->
do_get(Rest, Value, Default);
do_get(Path, [_ | Left], Default) ->
do_get(Path, Left, Default);
do_get(_, _, Default) ->
Default.
%% @doc Retrieves mandatory value of a property located and possibly deeply nested inside the
%% property list `Proplist' under path `Path'.
%%
%% The only difference with `get/2' is in the case when no value is
%% present under the given key. In such situations `{novalue, Path}' exception will be thrown.
%%
%% @see get/2
-spec require(Path, Proplist) -> Result when
Path :: path(),
Proplist :: proplists:proplist(),
Result :: term() | no_return().
require(Path, Proplist) ->
case get(Path, Proplist, Unique = make_ref()) of
Unique ->
error({novalue, Path});
Value ->
Value
end.
%% @doc Sets value of a property to the `Value' and returns new property list.
%%
%% Property located and possibly deeply nested inside the property list `Proplist' under path `Path'.
%%
%% Path may be a single key `Key' thus denoting that the property located on the top level of a
%% property list. As well as a list of keys it may be noting that the property is located inside a
%% property list which is located inside another property list and so on until the top level `Proplist'
%% is finally reached.
%%
%% If there is already such property exists the one's value will be replaced with new `Value'.
%% Otherwise the property will be appended to the deepest enclosing proplist addressed by the `Path'.
%%
%% And finally let us clarify with the following valid example:
%% ```
%% Proplist = [ {top, [ {level, [ {thing, 2}, {where, 3} ]}, {middle, 4} ]}, {last, 5} ],
%% Result = [ {top, [ {level, [ {thing, new}, {where, 3} ]}, {middle, 4} ]}, {last, 5} ],
%% Result = deepprops:set([top, level, thing], new, Proplist).
%% '''
-spec set(Path, Value, Proplist) -> Result when
Path :: path(),
Value :: term(),
Proplist :: proplists:proplist(),
Result :: proplists:proplist().
set(Path, Value, Proplist) ->
do_set(keynormalize(Path), Value, Proplist).
%% private
do_set([], Entry, _) ->
Entry;
do_set([Key | Rest], Entry, Proplist = [{_, _} | _]) ->
With = [ {K, do_set(Rest, Entry, V)} || P = {K, V} <- Proplist, keymatch(Key, P) ],
Without = [ P || P <- Proplist, not keymatch(Key, P) ],
case With of
[E|_] -> [E | Without];
[] -> [{Key, do_set(Rest, Entry, [])} | Without]
end;
do_set([Key | Rest], Entry, Value) ->
[{Key, do_set(Rest, Entry, Value)}].
%% @doc Appends the new entry `Value' to the list located under a property and returns new property list.
%%
%% Property located and possibly deeply nested inside the property list `Proplist' under path `Path'.
%%
%% Path may be a single key `Key' thus denoting that the property located on the top level of a
%% property list. As well as a list of keys it may be noting that the property is located inside a
%% property list which is located inside another property list and so on until the top level `Proplist'
%% is finally reached.
%%
%% If there is no such property under `Path' the new one will be created and set to the `[Value]'.
%% Otherwise the `Value' will be appended to the head of the list which is value of the property.
%% Please be careful when appending value to anything but a list because malformed list will then
%% be created
%%
%% And finally let us clarify everything with the following valid example:
%% ```
%% Proplist = [ {top, [ {middle, [4]} ]}, {last, 5} ],
%% Result = [ {top, [ {middle, [new, 4]}, {last, 5} ],
%% Result = deepprops:append([top, middle], new, Proplist).
%% '''
-spec append(Path, Value, Proplist) -> Result when
Path :: path(),
Value :: proplists:property(),
Proplist :: proplists:proplist(),
Result :: proplists:proplist().
append(Path, Entry, Acc) ->
do_append(keynormalize(Path), Entry, Acc).
do_append([], Entry, Acc) ->
[Entry | Acc];
do_append([Key | Rest], Entry, Proplist = [{_, _} | _]) ->
With = [ {K, do_append(Rest, Entry, V)} || P = {K, V} <- Proplist, keymatch(Key, P) ],
Without = [ P || P <- Proplist, not keymatch(Key, P) ],
case With of
[E|_] -> [E | Without];
[] -> [{Key, do_append(Rest, Entry, [])} | Without]
end;
do_append([Key | Rest], Entry, Value) ->
[{Key, do_append(Rest, Entry, Value)}].
-spec extract(Path, Proplist) -> Result when
Path :: path(),
Proplist :: proplists:proplist(),
Result :: {Value, proplists:proplist()},
Value :: term().
extract(Path, Proplist) ->
extract(Path, Proplist, undefined).
%% @doc Extracts the property from the property list `Proplist' and return its value and new property
%% list with the property removed.
%%
%% Property located and possibly deeply nested inside the property list `Proplist' under path `Path'.
%%
%% Path may be a single key `Key' thus denoting that the property located on the top level of a
%% property list. As well as a list of keys it may be noting that the property is located inside a
%% property list which is located inside another property list and so on until the top level `Proplist'
%% is finally reached.
%%
%% If there is such property found then the result will be the tuple `{Value, Rest}' where `Value' is
%% value of the propery and `Rest is property list formed after the original with found property removed
%% from the deepest enclosing property list addressed by the `Path'.
%%
%% On the other hand if no such property actually the tuple `{Default, Proplist}' returned where
%% `Proplist' is the original property list untoched and `Default' is equal to `undefined' when no
%% `Default' value have been passed.
%%
%% And finally let us clarify everything with the following:
%% ```
%% Proplist = [ {top, [ {level, [ {thing, 2}, {where, 3} ]} ]}, {last, 5} ],
%% Result = 3,
%% Rest = [ {top, [ {level, [ {thing, 2} ]} ]}, {last, 5} ],
%% {Result, Rest} = deepprops:extract([top, level, where], Proplist).
%% '''
-spec extract(Path, Proplist, Default) -> Result when
Path :: path(),
Proplist :: proplists:proplist(),
Result :: {Value, proplists:proplist()},
Value :: term() | Default,
Default :: term().
extract(Path, Proplist, Default) ->
do_extract(keynormalize(Path), Proplist, Default).
do_extract([], Proplist, _) ->
{Proplist, []};
do_extract([Key], Proplist = [{_, _} | _], Default) ->
Values = [ V || P = {_, V} <- Proplist, keymatch(Key, P) ],
Rest = [ P || P <- Proplist, not keymatch(Key, P) ],
case Values of
[] -> {Default, Rest};
[Single] -> {Single, Rest};
List -> {List, Rest}
end;
do_extract([Key | RestKeys], Proplist = [{_, _} | _], Default) ->
WithIt = lists:append([ V || P = {_, V} <- Proplist, keymatch(Key, P) ]),
Without = [ P || P <- Proplist, not keymatch(Key, P) ],
{Value, Rest} = do_extract(RestKeys, WithIt, Default),
{Value, [{Key, Rest} | Without]};
do_extract(_, Proplist, Default) ->
{Default, Proplist}.
%% @private
keymatch(Key, Key) -> true;
keymatch(Key, {Key, _}) -> true;
keymatch(_, _) -> false.
%% Properties multiple access
%% @doc Retrieves values of one or more properties with a single invocation. These values form a list
%% which strictly preserve order of properties accessed.
%%
%% This function performs much like group `get' call. In other words the result of this function will be
%% equal to sequential application of `deepprops:get/3' in the way close to folding.
%%
%% Properties located and possibly deeply nested inside the property list `Proplist' addressed by one
%% or more paths in `Paths'. A path then may be a single `PurePath' (explained further) which is semantically
%% equal to `{PurePath, undefined}'. As well as a tuple `{PurePath, Default}' may be specified thus
%% denoting that in the case of absence of the property the `Default' will be the value in the list of values.
%% Therefore `undefined' will be that value when `Default' is not specified.
%%
%% `PurePath' may be a single key `Key' thus denoting that the property located on the top level of a
%% property list. As well as a list of keys it may be noting that the property is located inside a
%% property list which is located inside another property list and so on until the top level `Proplist'
%% is finally reached.
%%
%% Then following final clarification example:
%% ```
%% Proplist = [ {top, [ {level, [ {thing, 2}, {where, 3} ]}, {middle, 4} ]}, {last, 5} ],
%% Result = [ 2, def, 5 ],
%% Result = deepprops:values([ [top, level, thing], {[top, down], def}, last ], Proplist).
%% '''
%%
%% In the latter example the value `def' was returned for path `[top, down]'. If the default value was not
%% specified through `{[top, down], def}' then the `undefined' would be returned instead.
%%
%% @see get/3
-spec values(Paths, Proplist) -> Results when
Proplist :: proplists:proplist(),
Paths :: [Path],
Path :: PurePath | {PurePath, Default},
PurePath :: path(),
Default :: term(),
Results :: [Result],
Result :: term() | Default | undefined.
values(Keys, Proplist) ->
values(Keys, Proplist, []).
values([], _, Acc) ->
lists:reverse(Acc);
values([{Key, Default} | Rest], Proplist, Acc) ->
values(Rest, Proplist, [get(keynormalize(Key), Proplist, Default) | Acc]);
values([Key | Rest], Proplist, Acc) ->
values(Rest, Proplist, [get(keynormalize(Key), Proplist) | Acc]).
-spec split(Paths, Proplist) -> Result when
Proplist :: proplists:proplist(),
Paths :: [Path],
Path :: PurePath | {PurePath, Default},
PurePath :: path(),
Default :: term(),
Result :: {[Value], Rest},
Value :: term(),
Rest :: proplists:proplist().
%% @doc Extracts values of one or more properties with a single invocation and returns them along with
%% new property list with these properties removed. These values form a list which strictly preserve
%% order of properties accessed.
%%
%% This function performs much like group `extract' call. In other words the result of this function will be
%% equal to sequential application of `deepprops:extract/3' in the way close to folding.
%%
%% Properties located and possibly deeply nested inside the property list `Proplist' addressed by one
%% or more paths in `Paths'. A path then may be a single `PurePath'as well as a tuple `{PurePath, Default}'
%% thus denoting that in the case of absence of the property the `Default' will be the value in the list
%% of values. Therefore `undefined' will be that value when `Default' is not specified.
%%
%% The function call will return tuple `{Values, Rest}' where `Values' is list of values being explained
%% and the `Rest' is property list with properties addressed by `Paths' removed from the corresponding
%% deepest enclosing property lists. Obviously the absent properties are not removed.
%%
%% `PurePath' may be a single key `Key' thus denoting that the property located on the top level of a
%% property list. As well as a list of keys it may be noting that the property is located inside a
%% property list which is located inside another property list and so on until the top level `Proplist'
%% is finally reached.
%%
%% Finally goes the clarification example:
%% ```
%% Proplist = [ {top, [ {level, [ {thing, 2}, {where, 3} ]}, {middle, 4} ]}, {last, 5} ],
%% Result = [ 2, def, 5 ],
%% Rest = [ {top, [ {level, [ {where, 3} ]}, {middle, 4} ]} ],
%% {Result, Rest} = deepprops:split([ [top, level, thing], {[top, down], def}, last ], Proplist).
%% '''
%%
%% @see extract/3
split(Keys, Proplist) ->
do_split(Keys, {[], Proplist}).
do_split([], {Values, Rest}) ->
{lists:reverse(Values), Rest};
do_split([{Key, Default} | Keys], {Acc, Proplist}) ->
{Value, Rest} = extract(keynormalize(Key), Proplist, Default),
do_split(Keys, {[Value | Acc], Rest});
do_split([Key | Keys], {Acc, Proplist}) ->
{Value, Rest} = extract(keynormalize(Key), Proplist),
do_split(Keys, {[Value | Acc], Rest}).
%% @doc Returns plain sublist of the property list with properties under `Paths' being
%% retrieved.
%%
%% Properties located and possibly deeply nested inside the property list `Proplist' addressed by one
%% or more paths in `Paths'. A `Path' then may be a single key `Key' thus denoting that the property
%% located on the top level of a property list. As well as a list of keys it may be noting that the
%% property is located inside a property list which is located inside another property list and so on
%% until the top level `Proplist' is finally reached.
%%
%% Worth noting that when property is not present under specific `Path' the resulting list misses this
%% property totally. Thus length of the result may be less than the length of `Paths'. But the ordering
%% is still preserved even in such cases.
%%
%% Finally for the sake of clarity it is guaranteed that the following code will run with no exceptions:
%% ```
%% Proplist = [ {top, [ {level, [ {thing, 2}, {where, 3} ]}, {middle, 4} ]}, {last, 5} ],
%% Result = [ {[top, level, thing], 2}, {last, 5} ],
%% Result = deepprops:list([ [top, level, thing], [top, down], last ], Proplist).
%% '''
-spec list(Paths, Proplist) -> Result when
Proplist :: proplists:proplist(),
Paths :: [path()],
Result :: [Prop],
Prop :: proplists:property().
list(Keys, Proplist) ->
list(Keys, Proplist, []).
list([Key | Rest], Proplist, Acc) ->
Norm = keynormalize(Key),
case get(Norm, Proplist) of
undefined -> list(Rest, Proplist, Acc);
Value -> list(Rest, Proplist, [{Key, Value} | Acc])
end;
list([], _, Acc) ->
lists:reverse(Acc).
%% @private
keynormalize(Key = [_ | _]) -> Key;
keynormalize(Key) -> [Key].
%% @doc Retrieves the property list formed by substitution any missing properties with
%% default ones from `Defaults'.
%%
%% If `Defaults' contains a property which is present in `Proplist' too last one left untouched.
%% Otherwise the `Proplist' is populated with property from `Defaults'. It may seem like merging of
%% two proplists with respect to values in the `Proplist'.
-spec defaults(Defaults, Proplist) -> Result when
Proplist :: proplists:proplist(),
Defaults :: proplists:proplist(),
Result :: proplists:proplist().
defaults([E = {Head, _} | Defaults], Proplist) ->
case keydefined(Head, Proplist) of
true -> defaults(Defaults, Proplist);
_ -> defaults(Defaults, [E | Proplist])
end;
defaults([Head | Defaults], Proplist) ->
defaults([{Head, true} | Defaults], Proplist);
defaults([], Proplist) ->
Proplist.
%% @private
keydefined(Key, [Key | _]) -> true;
keydefined(Key, [{Key, _} | _]) -> true;
keydefined(Key, [_ | Rest]) -> keydefined(Key, Rest);
keydefined(_Key, []) -> false. | src/deepprops.erl | 0.567218 | 0.599954 | deepprops.erl | starcoder |
%%
%% %CopyrightBegin%
%%
%% Copyright Ericsson AB 1999-2020. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
%% Purpose : Transform normal Erlang to Core Erlang
%% At this stage all preprocessing has been done. All that is left are
%% "pure" Erlang functions.
%%
%% Core transformation is done in four stages:
%%
%% 1. Flatten expressions into an internal core form without doing
%% matching.
%%
%% 2. Step "forwards" over the icore code annotating each "top-level"
%% thing with variable usage. Detect bound variables in matching
%% and replace with explicit guard test. Annotate "internal-core"
%% expressions with variables they use and create. Convert matches
%% to cases when not pure assignments.
%%
%% 3. Step "backwards" over icore code using variable usage
%% annotations to change implicit exported variables to explicit
%% returns.
%%
%% 4. Lower receives to more primitive operations. Split binary
%% patterns where a value is matched out and then used used as
%% a size in the same pattern. That simplifies the subsequent
%% passes as all variables are within a single pattern are either
%% new or used, but never both at the same time.
%%
%% To ensure the evaluation order we ensure that all arguments are
%% safe. A "safe" is basically a core_lib simple with VERY restricted
%% binaries.
%%
%% We have to be very careful with matches as these create variables.
%% While we try not to flatten things more than necessary we must make
%% sure that all matches are at the top level. For this we use the
%% type "novars" which are non-match expressions. Cases and receives
%% can also create problems due to exports variables so they are not
%% "novars" either. I.e. a novars will not export variables.
%%
%% Annotations in the #iset, #iletrec, and all other internal records
%% is kept in a record, #a, not in a list as in proper core. This is
%% easier and faster and creates no problems as we have complete control
%% over all annotations.
%%
%% On output, the annotation for most Core Erlang terms will contain
%% the source line number. A few terms will be marked with the atom
%% atom 'compiler_generated', to indicate that the compiler has generated
%% them and that no warning should be generated if they are optimized
%% away.
%%
%%
%% In this translation:
%%
%% call ops are safes
%% call arguments are safes
%% match arguments are novars
%% case arguments are novars
%% receive timeouts are novars
%% binaries and maps are novars
%% let/set arguments are expressions
%% fun is not a safe
-module(v3_core).
-export([module/2,format_error/1]).
-import(lists, [reverse/1,reverse/2,map/2,member/2,foldl/3,foldr/3,mapfoldl/3,
splitwith/2,keyfind/3,sort/1,foreach/2,droplast/1,last/1,
duplicate/2]).
-import(ordsets, [add_element/2,del_element/2,is_element/2,
union/1,union/2,intersection/2,subtract/2]).
-import(cerl, [ann_c_cons/3,ann_c_tuple/2,c_tuple/1,
ann_c_map/3]).
-include("core_parse.hrl").
%% Internal core expressions and help functions.
%% N.B. annotations fields in place as normal Core expressions.
-record(a, {us=[],ns=[],anno=[]}). %Internal annotation
-record(iapply, {anno=#a{},op,args}).
-record(ibinary, {anno=#a{},segments}). %Not used in patterns.
-record(ibitstr, {anno=#a{},val,size,unit,type,flags}).
-record(icall, {anno=#a{},module,name,args}).
-record(icase, {anno=#a{},args,clauses,fc}).
-record(icatch, {anno=#a{},body}).
-record(iclause, {anno=#a{},pats,guard,body}).
-record(ifun, {anno=#a{},id,vars,clauses,fc,name=unnamed}).
-record(iletrec, {anno=#a{},defs,body}).
-record(imatch, {anno=#a{},pat,guard=[],arg,fc}).
-record(imap, {anno=#a{},arg=#c_literal{val=#{}},es,is_pat=false}).
-record(imappair, {anno=#a{},op,key,val}).
-record(iprimop, {anno=#a{},name,args}).
-record(iprotect, {anno=#a{},body}).
-record(ireceive1, {anno=#a{},clauses}).
-record(ireceive2, {anno=#a{},clauses,timeout,action}).
-record(iset, {anno=#a{},var,arg}).
-record(itry, {anno=#a{},args,vars,body,evars,handler}).
-record(ifilter, {anno=#a{},arg}).
-record(igen, {anno=#a{},acc_pat,acc_guard,
skip_pat,tail,tail_pat,arg}).
-record(isimple, {anno=#a{},term :: cerl:cerl()}).
-type iapply() :: #iapply{}.
-type ibinary() :: #ibinary{}.
-type icall() :: #icall{}.
-type icase() :: #icase{}.
-type icatch() :: #icatch{}.
-type iclause() :: #iclause{}.
-type ifun() :: #ifun{}.
-type iletrec() :: #iletrec{}.
-type imatch() :: #imatch{}.
-type imap() :: #imap{}.
-type iprimop() :: #iprimop{}.
-type iprotect() :: #iprotect{}.
-type ireceive1() :: #ireceive1{}.
-type ireceive2() :: #ireceive2{}.
-type iset() :: #iset{}.
-type itry() :: #itry{}.
-type ifilter() :: #ifilter{}.
-type igen() :: #igen{}.
-type isimple() :: #isimple{}.
-type i() :: iapply() | ibinary() | icall() | icase() | icatch()
| iclause() | ifun() | iletrec() | imatch() | imap()
| iprimop() | iprotect() | ireceive1() | ireceive2()
| iset() | itry() | ifilter()
| igen() | isimple().
-type warning() :: {file:filename(), [{integer(), module(), term()}]}.
-record(core, {vcount=0 :: non_neg_integer(), %Variable counter
fcount=0 :: non_neg_integer(), %Function counter
gcount=0 :: non_neg_integer(), %Goto counter
function={none,0} :: fa(), %Current function.
in_guard=false :: boolean(), %In guard or not.
wanted=true :: boolean(), %Result wanted or not.
opts=[] :: [compile:option()], %Options.
dialyzer=false :: boolean(), %Help dialyzer or not.
ws=[] :: [warning()], %Warnings.
file=[{file,""}] %File.
}).
%% XXX: The following type declarations do not belong in this module
-type fa() :: {atom(), arity()}.
-type attribute() :: atom().
-type form() :: {function, integer(), atom(), arity(), _}
| {attribute, integer(), attribute(), _}.
-record(imodule, {name = [],
exports = ordsets:new(),
attrs = [],
defs = [],
file = [],
opts = [],
ws = []}).
-spec module([form()], [compile:option()]) ->
{'ok',cerl:c_module(),[warning()]}.
module(Forms0, Opts) ->
Forms = erl_internal:add_predefined_functions(Forms0),
Module = foldl(fun (F, Acc) ->
form(F, Acc, Opts)
end, #imodule{}, Forms),
#imodule{name=Mod,exports=Exp0,attrs=As0,defs=Kfs0,ws=Ws} = Module,
Exp = case member(export_all, Opts) of
true -> defined_functions(Forms);
false -> Exp0
end,
Cexp = [#c_var{name=FA} || {_,_}=FA <- Exp],
As = reverse(As0),
Kfs = reverse(Kfs0),
{ok,#c_module{name=#c_literal{val=Mod},exports=Cexp,attrs=As,defs=Kfs},Ws}.
form({function,_,_,_,_}=F0, Module, Opts) ->
#imodule{file=File,defs=Defs,ws=Ws0} = Module,
{F,Ws} = function(F0, Ws0, File, Opts),
Module#imodule{defs=[F|Defs],ws=Ws};
form({attribute,_,module,Mod}, Module, _Opts) ->
true = is_atom(Mod),
Module#imodule{name=Mod};
form({attribute,_,file,{File,_Line}}=F, #imodule{attrs=As}=Module, _Opts) ->
Module#imodule{file=File, attrs=[attribute(F)|As]};
form({attribute,_,import,_}, Module, _Opts) ->
%% Ignore. We have no futher use for imports.
Module;
form({attribute,_,export,Es}, #imodule{exports=Exp0}=Module, _Opts) ->
Exp = ordsets:union(ordsets:from_list(Es), Exp0),
Module#imodule{exports=Exp};
form({attribute,_,_,_}=F, #imodule{attrs=As}=Module, _Opts) ->
Module#imodule{attrs=[attribute(F)|As]};
form(_, Module, _Opts) ->
%% Ignore uninteresting forms such as 'eof'.
Module.
attribute({attribute,A,Name,Val0}) ->
Line = [erl_anno:location(A)],
Val = if
is_list(Val0) -> Val0;
true -> [Val0]
end,
{#c_literal{val=Name, anno=Line}, #c_literal{val=Val, anno=Line}}.
defined_functions(Forms) ->
Fs = [{Name,Arity} || {function,_,Name,Arity,_} <- Forms],
ordsets:from_list(Fs).
%% function_dump(module_info,_,_,_) -> ok;
%% function_dump(Name,Arity,Format,Terms) ->
%% io:format("~w/~w " ++ Format,[Name,Arity]++Terms),
%% ok.
function({function,_,Name,Arity,Cs0}, Ws0, File, Opts) ->
try
St0 = #core{vcount=0,function={Name,Arity},opts=Opts,
dialyzer=member(dialyzer, Opts),
ws=Ws0,file=[{file,File}]},
{B0,St1} = body(Cs0, Name, Arity, St0),
%% ok = function_dump(Name, Arity, "body:~n~p~n",[B0]),
{B1,St2} = ubody(B0, St1),
%% ok = function_dump(Name, Arity, "ubody:~n~p~n",[B1]),
{B2,St3} = cbody(B1, St2),
%% ok = function_dump(Name, Arity, "cbody:~n~p~n",[B2]),
{B3,#core{ws=Ws}} = lbody(B2, St3),
%% ok = function_dump(Name, Arity, "lbody:~n~p~n",[B3]),
{{#c_var{name={Name,Arity}},B3},Ws}
catch
Class:Error:Stack ->
io:fwrite("Function: ~w/~w\n", [Name,Arity]),
erlang:raise(Class, Error, Stack)
end.
body(Cs0, Name, Arity, St0) ->
Anno = lineno_anno(element(2, hd(Cs0)), St0),
FunAnno = [{function,{Name,Arity}} | Anno],
{Args0,St1} = new_vars(Anno, Arity, St0),
Args = reverse(Args0), %Nicer order
{Cs1,St2} = clauses(Cs0, St1),
{Ps,St3} = new_vars(Arity, St2), %Need new variables here
Fc = function_clause(Ps, Anno),
{#ifun{anno=#a{anno=FunAnno},id=[],vars=Args,clauses=Cs1,fc=Fc},St3}.
%% clause(Clause, State) -> {Cclause,State}.
%% clauses([Clause], State) -> {[Cclause],State}.
%% Convert clauses. Trap bad pattern aliases.
clauses([C0|Cs0], St0) ->
{C,St1} = clause(C0, St0),
{Cs,St2} = clauses(Cs0, St1),
{[C|Cs],St2};
clauses([], St) -> {[],St}.
clause({clause,Lc,H0,G0,B0}, St0) ->
try head(H0, St0) of
{H1,St1} ->
{G1,St2} = guard(G0, St1),
{B1,St3} = exprs(B0, St2),
Anno = lineno_anno(Lc, St3),
{#iclause{anno=#a{anno=Anno},pats=H1,guard=G1,body=B1},St3}
catch
throw:nomatch ->
%% This pattern can't possibly match. If we simply remove
%% the clause, varibles that are used later might not be
%% bound. Therefore, we must keep the clause, but rewrite
%% the pattern to a pattern that will bind the same
%% variables and ensure that the clause can't be executed
%% by letting the guard return false.
St1 = add_warning(Lc, nomatch, St0),
H1 = [sanitize(P) || P <- H0],
false = H0 =:= H1, %Assertion.
G1 = [[{atom,Lc,false}]],
LcNoWarn = no_compiler_warning(Lc),
clause({clause,LcNoWarn,H1,G1,B0}, St1)
end.
clause_arity({clause,_,H0,_,_}) -> length(H0).
%% head([P], State) -> {[P],[Cexpr],State}.
head(Ps, St) ->
pattern_list(Ps, St).
%% guard([Expr], State) -> {[Cexpr],State}.
%% Build an explict and/or tree of guard alternatives, then traverse
%% top-level and/or tree and "protect" inner tests.
guard([], St) -> {[],St};
guard(Gs0, St0) ->
Gs1 = foldr(fun (Gt0, Rhs) ->
Gt1 = guard_tests(Gt0),
L = element(2, Gt1),
{op,L,'or',Gt1,Rhs}
end, guard_tests(last(Gs0)), droplast(Gs0)),
{Gs,St} = gexpr_top(Gs1, St0#core{in_guard=true}),
{Gs,St#core{in_guard=false}}.
guard_tests(Gs) ->
L = element(2, hd(Gs)),
{protect,L,foldr(fun (G, Rhs) -> {op,L,'and',G,Rhs} end, last(Gs), droplast(Gs))}.
%% gexpr_top(Expr, State) -> {Cexpr,State}.
%% Generate an internal core expression of a guard test. Explicitly
%% handle outer boolean expressions and "protect" inner tests in a
%% reasonably smart way.
gexpr_top(E0, St0) ->
{E1,Eps0,Bools,St1} = gexpr(E0, [], St0),
{E,Eps,St} = force_booleans(Bools, E1, Eps0, St1),
{Eps++[E],St}.
%% gexpr(Expr, Bools, State) -> {Cexpr,[PreExp],Bools,State}.
%% Generate an internal core expression of a guard test.
gexpr({protect,Line,Arg}, Bools0, St0) ->
case gexpr(Arg, [], St0) of
{E0,[],Bools,St1} ->
{E,Eps,St} = force_booleans(Bools, E0, [], St1),
{E,Eps,Bools0,St};
{E0,Eps0,Bools,St1} ->
{E,Eps,St} = force_booleans(Bools, E0, Eps0, St1),
Anno = lineno_anno(Line, St),
{#iprotect{anno=#a{anno=Anno},body=Eps++[E]},[],Bools0,St}
end;
gexpr({op,_,'andalso',_,_}=E0, Bools, St0) ->
{op,L,'andalso',E1,E2} = right_assoc(E0, 'andalso'),
Anno = lineno_anno(L, St0),
{#c_var{name=V0},St} = new_var(Anno, St0),
V = {var,L,V0},
False = {atom,L,false},
E = make_bool_switch(L, E1, V, E2, False),
gexpr(E, Bools, St);
gexpr({op,_,'orelse',_,_}=E0, Bools, St0) ->
{op,L,'orelse',E1,E2} = right_assoc(E0, 'orelse'),
Anno = lineno_anno(L, St0),
{#c_var{name=V0},St} = new_var(Anno, St0),
V = {var,L,V0},
True = {atom,L,true},
E = make_bool_switch(L, E1, V, True, E2),
gexpr(E, Bools, St);
gexpr({op,Line,Op,L,R}=E, Bools, St) ->
case erl_internal:bool_op(Op, 2) of
true ->
gexpr_bool(Op, L, R, Bools, St, Line);
false ->
gexpr_test(E, Bools, St)
end;
gexpr({call,Line,{remote,_,{atom,_,erlang},{atom,_,Op}},[L,R]}=E, Bools, St) ->
case erl_internal:bool_op(Op, 2) of
true ->
gexpr_bool(Op, L, R, Bools, St, Line);
false ->
gexpr_test(E, Bools, St)
end;
gexpr({op,Line,'not',A}, Bools, St) ->
gexpr_not(A, Bools, St, Line);
gexpr({call,Line,{remote,_,{atom,_,erlang},{atom,_,'not'}},[A]}, Bools, St) ->
gexpr_not(A, Bools, St, Line);
gexpr(E0, Bools, St0) ->
gexpr_test(E0, Bools, St0).
%% gexpr_not(L, R, Bools, State) -> {Cexpr,[PreExp],Bools,State}.
%% Generate a guard for boolean operators
gexpr_bool(Op, L, R, Bools0, St0, Line) ->
{Le,Lps,Bools1,St1} = gexpr(L, Bools0, St0),
{Ll,Llps,St2} = force_safe(Le, St1),
{Re,Rps,Bools,St3} = gexpr(R, Bools1, St2),
{Rl,Rlps,St4} = force_safe(Re, St3),
Anno = lineno_anno(Line, St4),
{#icall{anno=#a{anno=Anno}, %Must have an #a{}
module=#c_literal{anno=Anno,val=erlang},
name=#c_literal{anno=Anno,val=Op},
args=[Ll,Rl]},Lps ++ Llps ++ Rps ++ Rlps,Bools,St4}.
%% gexpr_not(Expr, Bools, State) -> {Cexpr,[PreExp],Bools,State}.
%% Generate an erlang:'not'/1 guard test.
gexpr_not(A, Bools0, St0, Line) ->
{Ae0,Aps,Bools,St1} = gexpr(A, Bools0, St0),
case Ae0 of
#icall{module=#c_literal{val=erlang},
name=#c_literal{val='=:='},
args=[E,#c_literal{val=true}]}=EqCall ->
%%
%% Doing the following transformation
%% not(Expr =:= true) ==> Expr =:= false
%% will help eliminating redundant is_boolean/1 tests.
%%
Ae = EqCall#icall{args=[E,#c_literal{val=false}]},
{Al,Alps,St2} = force_safe(Ae, St1),
{Al,Aps ++ Alps,Bools,St2};
Ae ->
{Al,Alps,St2} = force_safe(Ae, St1),
Anno = lineno_anno(Line, St2),
{#icall{anno=#a{anno=Anno}, %Must have an #a{}
module=#c_literal{anno=Anno,val=erlang},
name=#c_literal{anno=Anno,val='not'},
args=[Al]},Aps ++ Alps,Bools,St2}
end.
%% gexpr_test(Expr, Bools, State) -> {Cexpr,[PreExp],Bools,State}.
%% Generate a guard test. At this stage we must be sure that we have
%% a proper boolean value here so wrap things with an true test if we
%% don't know, i.e. if it is not a comparison or a type test.
gexpr_test({atom,L,true}, Bools, St0) ->
{#c_literal{anno=lineno_anno(L, St0),val=true},[],Bools,St0};
gexpr_test({atom,L,false}, Bools, St0) ->
{#c_literal{anno=lineno_anno(L, St0),val=false},[],Bools,St0};
gexpr_test(E0, Bools0, St0) ->
{E1,Eps0,St1} = expr(E0, St0),
%% Generate "top-level" test and argument calls.
case E1 of
#icall{anno=Anno,module=#c_literal{val=erlang},
name=#c_literal{val=is_function},
args=[_,_]} ->
%% is_function/2 is not a safe type test. We must force
%% it to be protected.
Lanno = Anno#a.anno,
{New,St2} = new_var(Lanno, St1),
{icall_eq_true(New),
Eps0 ++ [#iset{anno=Anno,var=New,arg=E1}],Bools0,St2};
#icall{anno=Anno,module=#c_literal{val=erlang},name=#c_literal{val=N},args=As} ->
%% Note that erl_expand_records has renamed type
%% tests to the new names; thus, float/1 as a type
%% test will now be named is_float/1.
Ar = length(As),
case erl_internal:new_type_test(N, Ar) orelse
erl_internal:comp_op(N, Ar) orelse
erl_internal:bool_op(N, Ar) of
true -> {E1,Eps0,Bools0,St1};
false ->
Lanno = Anno#a.anno,
{New,St2} = new_var(Lanno, St1),
Bools = [New|Bools0],
{icall_eq_true(New),
Eps0 ++ [#iset{anno=Anno,var=New,arg=E1}],Bools,St2}
end;
_ ->
Lanno = get_lineno_anno(E1),
ACompGen = #a{anno=[compiler_generated]},
case is_simple(E1) of
true ->
Bools = [E1|Bools0],
{icall_eq_true(E1),Eps0,Bools,St1};
false ->
{New,St2} = new_var(Lanno, St1),
Bools = [New|Bools0],
{icall_eq_true(New),
Eps0 ++ [#iset{anno=ACompGen,var=New,arg=E1}],Bools,St2}
end
end.
icall_eq_true(Arg) ->
#icall{anno=#a{anno=[compiler_generated]},
module=#c_literal{val=erlang},
name=#c_literal{val='=:='},
args=[Arg,#c_literal{val=true}]}.
force_booleans(Vs0, E, Eps, St) ->
Vs1 = [set_anno(V, []) || V <- Vs0],
Vs = unforce(E, Eps, Vs1),
force_booleans_1(Vs, E, Eps, St).
force_booleans_1([], E, Eps, St) ->
{E,Eps,St};
force_booleans_1([V|Vs], E0, Eps0, St0) ->
{E1,Eps1,St1} = force_safe(E0, St0),
ACompGen = #a{anno=[compiler_generated]},
Call = #icall{anno=ACompGen,module=#c_literal{val=erlang},
name=#c_literal{val=is_boolean},
args=[V]},
{New,St} = new_var([], St1),
Iset = #iset{var=New,arg=Call},
Eps = Eps0 ++ Eps1 ++ [Iset],
E = #icall{anno=ACompGen,
module=#c_literal{val=erlang},name=#c_literal{val='and'},
args=[E1,New]},
force_booleans_1(Vs, E, Eps, St).
%% unforce(Expr, PreExprList, BoolExprList) -> BoolExprList'.
%% Filter BoolExprList. BoolExprList is a list of simple expressions
%% (variables or literals) of which we are not sure whether they are booleans.
%%
%% The basic idea for filtering is the following transformation
%%
%% (E =:= Bool) and is_boolean(E) ==> E =:= Bool
%%
%% where E is an arbitrary expression and Bool is 'true' or 'false'.
%%
%% The transformation is still valid if there are other expressions joined
%% by 'and' operations:
%%
%% E1 and (E2 =:= true) and E3 and is_boolean(E) ==> E1 and (E2 =:= true) and E3
%%
%% but expressions such as
%%
%% not (E =:= true) and is_boolean(E)
%%
%% cannot be transformed in this way (such expressions are the reason for
%% adding the is_boolean/1 test in the first place).
%%
unforce(_, _, []) ->
[];
unforce(E, Eps, Vs) ->
Tree = unforce_tree(Eps++[E], gb_trees:empty()),
unforce(Tree, Vs).
unforce_tree([#iset{var=#c_var{name=V},arg=Arg0}|Es], D0) ->
Arg = unforce_tree_subst(Arg0, D0),
D = gb_trees:insert(V, Arg, D0),
unforce_tree(Es, D);
unforce_tree([#icall{}=Call], D) ->
unforce_tree_subst(Call, D);
unforce_tree([#c_var{name=V}], D) ->
gb_trees:get(V, D).
unforce_tree_subst(#icall{module=#c_literal{val=erlang},
name=#c_literal{val='=:='},
args=[_Expr,#c_literal{val=Bool}]}=Call, _)
when is_boolean(Bool) ->
%% We have erlang:'=:='(Expr, Bool). We must not expand this call any more
%% or we will not recognize is_boolean(Expr) later.
Call;
unforce_tree_subst(#icall{args=Args0}=Call, D) ->
Args = map(fun(#c_var{name=V}=Var) ->
case gb_trees:lookup(V, D) of
{value,Val} -> Val;
none -> Var
end;
(Expr) -> Expr
end, Args0),
Call#icall{args=Args};
unforce_tree_subst(Expr, _) -> Expr.
unforce(#icall{module=#c_literal{val=erlang},
name=#c_literal{val=Name},
args=Args}, Vs0) ->
case {Name,Args} of
{'and',[Arg1,Arg2]} ->
Vs = unforce(Arg1, Vs0),
unforce(Arg2, Vs);
{'=:=',[E,#c_literal{val=Bool}]} when is_boolean(Bool) ->
Vs0 -- [set_anno(E, [])];
{_,_} ->
%% Give up.
Vs0
end;
unforce(_, Vs) -> Vs.
%% exprs([Expr], State) -> {[Cexpr],State}.
%% Flatten top-level exprs.
exprs([E0|Es0], St0) ->
{E1,Eps,St1} = expr(E0, St0),
{Es1,St2} = exprs(Es0, St1),
{Eps ++ [E1] ++ Es1,St2};
exprs([], St) -> {[],St}.
%% expr(Expr, State) -> {Cexpr,[PreExp],State}.
%% Generate an internal core expression.
expr({var,L,V}, St) -> {#c_var{anno=lineno_anno(L, St),name=V},[],St};
expr({char,L,C}, St) -> {#c_literal{anno=full_anno(L, St),val=C},[],St};
expr({integer,L,I}, St) -> {#c_literal{anno=full_anno(L, St),val=I},[],St};
expr({float,L,F}, St) -> {#c_literal{anno=full_anno(L, St),val=F},[],St};
expr({atom,L,A}, St) -> {#c_literal{anno=full_anno(L, St),val=A},[],St};
expr({nil,L}, St) -> {#c_literal{anno=full_anno(L, St),val=[]},[],St};
expr({string,L,S}, St) -> {#c_literal{anno=full_anno(L, St),val=S},[],St};
expr({cons,L,H0,T0}, St0) ->
{H1,Hps,St1} = safe(H0, St0),
{T1,Tps,St2} = safe(T0, St1),
A = full_anno(L, St2),
{annotate_cons(A, H1, T1, St2),Hps ++ Tps,St2};
expr({lc,L,E,Qs0}, St0) ->
{Qs1,St1} = preprocess_quals(L, Qs0, St0),
lc_tq(L, E, Qs1, #c_literal{anno=lineno_anno(L, St1),val=[]}, St1);
expr({bc,L,E,Qs}, St) ->
bc_tq(L, E, Qs, St);
expr({tuple,L,Es0}, St0) ->
{Es1,Eps,St1} = safe_list(Es0, St0),
A = record_anno(L, St1),
{annotate_tuple(A, Es1, St1),Eps,St1};
expr({map,L,Es0}, St0) ->
map_build_pairs(#c_literal{val=#{}}, Es0, full_anno(L, St0), St0);
expr({map,L,M,Es}, St) ->
expr_map(M, Es, L, St);
expr({bin,L,Es0}, St0) ->
try expr_bin(Es0, full_anno(L, St0), St0) of
{_,_,_}=Res -> Res
catch
throw:{bad_binary,Eps,St1} ->
St = add_warning(L, bad_binary, St1),
LineAnno = lineno_anno(L, St),
As = [#c_literal{anno=LineAnno,val=badarg}],
{#icall{anno=#a{anno=LineAnno}, %Must have an #a{}
module=#c_literal{anno=LineAnno,val=erlang},
name=#c_literal{anno=LineAnno,val=error},
args=As},Eps,St}
end;
expr({block,_,Es0}, St0) ->
%% Inline the block directly.
{Es1,St1} = exprs(droplast(Es0), St0),
{E1,Eps,St2} = expr(last(Es0), St1),
{E1,Es1 ++ Eps,St2};
expr({'if',L,Cs0}, St0) ->
{Cs1,St1} = clauses(Cs0, St0),
Lanno = lineno_anno(L, St1),
Fc = fail_clause([], Lanno, #c_literal{val=if_clause}),
{#icase{anno=#a{anno=Lanno},args=[],clauses=Cs1,fc=Fc},[],St1};
expr({'case',L,E0,Cs0}, St0) ->
{E1,Eps,St1} = novars(E0, St0),
{Cs1,St2} = clauses(Cs0, St1),
{Fpat,St3} = new_var(St2),
Lanno = lineno_anno(L, St2),
Fc = fail_clause([Fpat], Lanno, c_tuple([#c_literal{val=case_clause},Fpat])),
{#icase{anno=#a{anno=Lanno},args=[E1],clauses=Cs1,fc=Fc},Eps,St3};
expr({'receive',L,Cs0}, St0) ->
{Cs1,St1} = clauses(Cs0, St0),
{#ireceive1{anno=#a{anno=lineno_anno(L, St1)},clauses=Cs1},[],St1};
expr({'receive',L,Cs0,Te0,Tes0}, St0) ->
{Te1,Teps,St1} = novars(Te0, St0),
{Tes1,St2} = exprs(Tes0, St1),
{Cs1,St3} = clauses(Cs0, St2),
{#ireceive2{anno=#a{anno=lineno_anno(L, St3)},
clauses=Cs1,timeout=Te1,action=Tes1},Teps,St3};
expr({'try',L,Es0,[],Ecs,[]}, St0) ->
%% 'try ... catch ... end'
{Es1,St1} = exprs(Es0, St0),
{V,St2} = new_var(St1), %This name should be arbitrary
{Evs,Hs,St3} = try_exception(Ecs, St2),
Lanno = lineno_anno(L, St3),
{#itry{anno=#a{anno=Lanno},args=Es1,vars=[V],body=[V],
evars=Evs,handler=Hs},
[],St3};
expr({'try',L,Es0,Cs0,Ecs,[]}, St0) ->
%% 'try ... of ... catch ... end'
{Es1,St1} = exprs(Es0, St0),
{V,St2} = new_var(St1), %This name should be arbitrary
{Cs1,St3} = clauses(Cs0, St2),
{Fpat,St4} = new_var(St3),
Lanno = lineno_anno(L, St4),
Fc = fail_clause([Fpat], Lanno,
c_tuple([#c_literal{val=try_clause},Fpat])),
{Evs,Hs,St5} = try_exception(Ecs, St4),
{#itry{anno=#a{anno=lineno_anno(L, St5)},args=Es1,
vars=[V],body=[#icase{anno=#a{anno=Lanno},args=[V],clauses=Cs1,fc=Fc}],
evars=Evs,handler=Hs},
[],St5};
expr({'try',L,Es0,[],[],As0}, St0) ->
%% 'try ... after ... end'
{Es1,St1} = exprs(Es0, St0),
{As1,St2} = exprs(As0, St1),
{Name,St3} = new_fun_name("after", St2),
{V,St4} = new_var(St3), % (must not exist in As1)
LA = lineno_anno(L, St4),
Lanno = #a{anno=LA},
Fc = function_clause([], LA),
Fun = #ifun{anno=Lanno,id=[],vars=[],
clauses=[#iclause{anno=Lanno,pats=[],
guard=[#c_literal{val=true}],
body=As1}],
fc=Fc},
App = #iapply{anno=#a{anno=[compiler_generated|LA]},
op=#c_var{anno=LA,name={Name,0}},args=[]},
{Evs,Hs,St5} = try_after([App], St4),
Try = #itry{anno=Lanno,args=Es1,vars=[V],body=[App,V],evars=Evs,handler=Hs},
Letrec = #iletrec{anno=Lanno,defs=[{{Name,0},Fun}],
body=[Try]},
{Letrec,[],St5};
expr({'try',L,Es,Cs,Ecs,As}, St0) ->
%% 'try ... [of ...] [catch ...] after ... end'
expr({'try',L,[{'try',L,Es,Cs,Ecs,[]}],[],[],As}, St0);
expr({'catch',L,E0}, St0) ->
{E1,Eps,St1} = expr(E0, St0),
Lanno = lineno_anno(L, St1),
{#icatch{anno=#a{anno=Lanno},body=Eps ++ [E1]},[],St1};
expr({'fun',L,{function,F,A}}, St0) ->
{Fname,St1} = new_fun_name(St0),
Lanno = full_anno(L, St1),
Id = {0,0,Fname},
{#c_var{anno=Lanno++[{id,Id}],name={F,A}},[],St1};
expr({'fun',L,{function,M,F,A}}, St0) ->
{As,Aps,St1} = safe_list([M,F,A], St0),
Lanno = full_anno(L, St1),
{#icall{anno=#a{anno=Lanno},
module=#c_literal{val=erlang},
name=#c_literal{val=make_fun},
args=As},Aps,St1};
expr({'fun',L,{clauses,Cs}}, St) ->
fun_tq(Cs, L, St, unnamed);
expr({named_fun,L,'_',Cs}, St) ->
fun_tq(Cs, L, St, unnamed);
expr({named_fun,L,Name,Cs}, St) ->
fun_tq(Cs, L, St, {named,Name});
expr({call,L,{remote,_,M,F},As0}, St0) ->
{[M1,F1|As1],Aps,St1} = safe_list([M,F|As0], St0),
Anno = full_anno(L, St1),
{#icall{anno=#a{anno=Anno},module=M1,name=F1,args=As1},Aps,St1};
expr({call,Lc,{atom,Lf,F},As0}, St0) ->
{As1,Aps,St1} = safe_list(As0, St0),
Op = #c_var{anno=lineno_anno(Lf, St1),name={F,length(As1)}},
{#iapply{anno=#a{anno=lineno_anno(Lc, St1)},op=Op,args=As1},Aps,St1};
expr({call,L,FunExp,As0}, St0) ->
{Fun,Fps,St1} = safe(FunExp, St0),
{As1,Aps,St2} = safe_list(As0, St1),
Lanno = lineno_anno(L, St2),
{#iapply{anno=#a{anno=Lanno},op=Fun,args=As1},Fps ++ Aps,St2};
expr({match,L,P0,E0}, St0) ->
%% First fold matches together to create aliases.
{P1,E1} = fold_match(E0, P0),
St1 = case P1 of
{var,_,'_'} -> St0#core{wanted=false};
_ -> St0
end,
{E2,Eps1,St2} = novars(E1, St1),
St3 = St2#core{wanted=St0#core.wanted},
{P2,St4} = try
pattern(P1, St3)
catch
throw:Thrown ->
{Thrown,St3}
end,
{Fpat,St5} = new_var(St4),
Lanno = lineno_anno(L, St5),
Fc = fail_clause([Fpat], Lanno, c_tuple([#c_literal{val=badmatch},Fpat])),
case P2 of
nomatch ->
%% The pattern will not match. We must take care here to
%% bind all variables that the pattern would have bound
%% so that subsequent expressions do not refer to unbound
%% variables.
%%
%% As an example, this code:
%%
%% [X] = {Y} = E,
%% X + Y.
%%
%% will be rewritten to:
%%
%% error({badmatch,E}),
%% case E of
%% {[X],{Y}} ->
%% X + Y;
%% Other ->
%% error({badmatch,Other})
%% end.
%%
St6 = add_warning(L, nomatch, St5),
{Expr,Eps3,St7} = safe(E1, St6),
SanPat0 = sanitize(P1),
{SanPat,St} = pattern(SanPat0, St7),
Badmatch = c_tuple([#c_literal{val=badmatch},Expr]),
Fail = #iprimop{anno=#a{anno=Lanno},
name=#c_literal{val=match_fail},
args=[Badmatch]},
Eps = Eps3 ++ [Fail],
{#imatch{anno=#a{anno=Lanno},pat=SanPat,arg=Expr,fc=Fc},Eps,St};
Other when not is_atom(Other) ->
%% We must rewrite top-level aliases to lets to avoid unbound
%% variables in code such as:
%%
%% <<42:Sz>> = Sz = B
%%
%% If we would keep the top-level aliases the example would
%% be translated like this:
%%
%% case B of
%% <Sz = #{#<42>(Sz,1,'integer',['unsigned'|['big']])}#>
%% when 'true' ->
%% .
%% .
%% .
%%
%% Here the variable Sz would be unbound in the binary pattern.
%%
%% Instead we bind Sz in a let to ensure it is bound when
%% used in the binary pattern:
%%
%% let <Sz> = B
%% in case Sz of
%% <#{#<42>(Sz,1,'integer',['unsigned'|['big']])}#>
%% when 'true' ->
%% .
%% .
%% .
%%
{P3,E3,Eps2} = letify_aliases(P2, E2),
Eps = Eps1 ++ Eps2,
{#imatch{anno=#a{anno=Lanno},pat=P3,arg=E3,fc=Fc},Eps,St5}
end;
expr({op,_,'++',{lc,Llc,E,Qs0},More}, St0) ->
%% Optimise '++' here because of the list comprehension algorithm.
%%
%% To avoid achieving quadratic complexity if there is a chain of
%% list comprehensions without generators combined with '++', force
%% evaluation of More now. Evaluating More here could also reduce the
%% number variables in the environment for letrec.
{Mc,Mps,St1} = safe(More, St0),
{Qs,St2} = preprocess_quals(Llc, Qs0, St1),
{Y,Yps,St} = lc_tq(Llc, E, Qs, Mc, St2),
{Y,Mps++Yps,St};
expr({op,_,'andalso',_,_}=E0, St0) ->
{op,L,'andalso',E1,E2} = right_assoc(E0, 'andalso'),
Anno = lineno_anno(L, St0),
{#c_var{name=V0},St} = new_var(Anno, St0),
V = {var,L,V0},
False = {atom,L,false},
E = make_bool_switch(L, E1, V, E2, False),
expr(E, St);
expr({op,_,'orelse',_,_}=E0, St0) ->
{op,L,'orelse',E1,E2} = right_assoc(E0, 'orelse'),
Anno = lineno_anno(L, St0),
{#c_var{name=V0},St} = new_var(Anno, St0),
V = {var,L,V0},
True = {atom,L,true},
E = make_bool_switch(L, E1, V, True, E2),
expr(E, St);
expr({op,L,Op,A0}, St0) ->
{A1,Aps,St1} = safe(A0, St0),
LineAnno = full_anno(L, St1),
{#icall{anno=#a{anno=LineAnno}, %Must have an #a{}
module=#c_literal{anno=LineAnno,val=erlang},
name=#c_literal{anno=LineAnno,val=Op},args=[A1]},Aps,St1};
expr({op,L,Op,L0,R0}, St0) ->
{As,Aps,St1} = safe_list([L0,R0], St0),
LineAnno = full_anno(L, St1),
{#icall{anno=#a{anno=LineAnno}, %Must have an #a{}
module=#c_literal{anno=LineAnno,val=erlang},
name=#c_literal{anno=LineAnno,val=Op},args=As},Aps,St1}.
letify_aliases(#c_alias{var=V,pat=P0}, E0) ->
{P1,E1,Eps0} = letify_aliases(P0, V),
{P1,E1,[#iset{var=V,arg=E0}|Eps0]};
letify_aliases(P, E) ->
{P,E,[]}.
%% sanitize(Pat) -> SanitizedPattern
%% Rewrite Pat so that it will be accepted by pattern/2 and will
%% bind the same variables as the original pattern.
%%
%% Here is an example of a pattern that would cause a pattern/2
%% to generate a 'nomatch' exception:
%%
%% #{k:=X,k:=Y} = [Z]
%%
%% The sanitized pattern will look like:
%%
%% {{X,Y},[Z]}
sanitize({match,L,P1,P2}) ->
{tuple,L,[sanitize(P1),sanitize(P2)]};
sanitize({cons,L,H,T}) ->
{cons,L,sanitize(H),sanitize(T)};
sanitize({tuple,L,Ps0}) ->
Ps = [sanitize(P) || P <- Ps0],
{tuple,L,Ps};
sanitize({bin,L,Segs0}) ->
Segs = [Var || {bin_element,_,{var,_,_}=Var,_,_} <- Segs0],
{tuple,L,Segs};
sanitize({map,L,Ps0}) ->
Ps = [sanitize(V) || {map_field_exact,_,_,V} <- Ps0],
{tuple,L,Ps};
sanitize({op,L,_Name,P1,P2}) ->
{tuple,L,[sanitize(P1),sanitize(P2)]};
sanitize(P) -> P.
make_bool_switch(L, E, V, T, F) ->
NegL = no_compiler_warning(L),
Error = {tuple,NegL,[{atom,NegL,badarg},V]},
{'case',NegL,E,
[{clause,NegL,[{atom,NegL,true}],[],[T]},
{clause,NegL,[{atom,NegL,false}],[],[F]},
{clause,NegL,[V],[],
[{call,NegL,{remote,NegL,{atom,NegL,erlang},{atom,NegL,error}},
[Error]}]}]}.
expr_map(M0, Es0, L, St0) ->
{M1,Eps0,St1} = safe_map(M0, St0),
Badmap = badmap_term(M1, St1),
A = lineno_anno(L, St1),
Fc = fail_clause([], [{eval_failure,badmap}|A], Badmap),
{M2,Eps1,St2} = map_build_pairs(M1, Es0, full_anno(L, St1), St1),
M3 = case Es0 of
[] -> M1;
[_|_] -> M2
end,
Cs = [#iclause{
anno=#a{anno=[compiler_generated|A]},
pats=[],
guard=[#icall{anno=#a{anno=A},
module=#c_literal{anno=A,val=erlang},
name=#c_literal{anno=A,val=is_map},
args=[M1]}],
body=[M3]}],
Eps = Eps0 ++ Eps1,
{#icase{anno=#a{anno=A},args=[],clauses=Cs,fc=Fc},Eps,St2}.
safe_map(M0, St0) ->
case safe(M0, St0) of
{#c_var{},_,_}=Res ->
Res;
{#c_literal{val=Map},_,_}=Res when is_map(Map) ->
Res;
{NotMap,Eps0,St1} ->
%% Not a map. There will be a syntax error if we try to
%% pretty-print the Core Erlang code and then try to parse
%% it. To avoid the syntax error, force the term into a
%% variable.
{V,St2} = new_var(St1),
Anno = cerl:get_ann(NotMap),
Eps1 = [#iset{anno=#a{anno=Anno},var=V,arg=NotMap}],
{V,Eps0++Eps1,St2}
end.
badmap_term(_Map, #core{in_guard=true}) ->
%% The code generator cannot handle complex error reasons
%% in guards. But the exact error reason does not matter anyway
%% since it is not user-visible.
#c_literal{val=badmap};
badmap_term(Map, #core{in_guard=false}) ->
c_tuple([#c_literal{val=badmap},Map]).
map_build_pairs(Map, Es0, Ann, St0) ->
{Es,Pre,_,St1} = map_build_pairs_1(Es0, cerl_sets:new(), St0),
{ann_c_map(Ann, Map, Es),Pre,St1}.
map_build_pairs_1([{Op0,L,K0,V0}|Es], Used0, St0) ->
{K,Pre0,St1} = safe(K0, St0),
{V,Pre1,St2} = safe(V0, St1),
{Pairs,Pre2,Used1,St3} = map_build_pairs_1(Es, Used0, St2),
As = lineno_anno(L, St3),
Op = map_op(Op0),
{Used2,St4} = maybe_warn_repeated_keys(K, L, Used1, St3),
Pair = cerl:ann_c_map_pair(As, Op, K, V),
{[Pair|Pairs],Pre0++Pre1++Pre2,Used2,St4};
map_build_pairs_1([], Used, St) ->
{[],[],Used,St}.
maybe_warn_repeated_keys(Ck,Line,Used,St) ->
case cerl:is_literal(Ck) of
false -> {Used,St};
true ->
K = cerl:concrete(Ck),
case cerl_sets:is_element(K,Used) of
true ->
{Used, add_warning(Line, {map_key_repeated,K}, St)};
false ->
{cerl_sets:add_element(K,Used), St}
end
end.
map_op(map_field_assoc) -> #c_literal{val=assoc};
map_op(map_field_exact) -> #c_literal{val=exact}.
%% try_exception([ExcpClause], St) -> {[ExcpVar],Handler,St}.
try_exception(Ecs0, St0) ->
%% Note that Tag is not needed for rethrow - it is already in Info.
{Evs,St1} = new_vars(3, St0), % Tag, Value, Info
{Ecs1,St2} = clauses(Ecs0, St1),
Ecs2 = try_build_stacktrace(Ecs1, hd(Evs)),
[_,Value,Info] = Evs,
LA = case Ecs2 of
[] -> [];
[C|_] -> get_lineno_anno(C)
end,
Ec = #iclause{anno=#a{anno=[compiler_generated|LA]},
pats=[c_tuple(Evs)],guard=[#c_literal{val=true}],
body=[#iprimop{anno=#a{}, %Must have an #a{}
name=#c_literal{val=raise},
args=[Info,Value]}]},
Hs = [#icase{anno=#a{anno=LA},args=[c_tuple(Evs)],clauses=Ecs2,fc=Ec}],
{Evs,Hs,St2}.
try_after(As, St0) ->
%% See above.
{Evs,St1} = new_vars(3, St0), % Tag, Value, Info
[_,Value,Info] = Evs,
B = As ++ [#iprimop{anno=#a{}, % Must have an #a{}
name=#c_literal{val=raise},
args=[Info,Value]}],
Ec = #iclause{anno=#a{anno=[compiler_generated]},
pats=[c_tuple(Evs)],guard=[#c_literal{val=true}],
body=B},
Hs = [#icase{anno=#a{},args=[c_tuple(Evs)],clauses=[],fc=Ec}],
{Evs,Hs,St1}.
try_build_stacktrace([#iclause{pats=Ps0,body=B0}=C0|Cs], RawStk) ->
[#c_tuple{es=[Class,Exc,Stk]}=Tup] = Ps0,
case Stk of
#c_var{name='_'} ->
%% Stacktrace variable is not used. Nothing to do.
[C0|try_build_stacktrace(Cs, RawStk)];
_ ->
%% Add code to build the stacktrace.
Ps = [Tup#c_tuple{es=[Class,Exc,RawStk]}],
Call = #iprimop{anno=#a{},
name=#c_literal{val=build_stacktrace},
args=[RawStk]},
Iset = #iset{var=Stk,arg=Call},
B = [Iset|B0],
C = C0#iclause{pats=Ps,body=B},
[C|try_build_stacktrace(Cs, RawStk)]
end;
try_build_stacktrace([], _) -> [].
%% expr_bin([ArgExpr], St) -> {[Arg],[PreExpr],St}.
%% Flatten the arguments of a bin. Do this straight left to right!
%% Note that ibinary needs to have its annotation wrapped in a #a{}
%% record whereas c_literal should not have a wrapped annotation
expr_bin(Es0, Anno, St0) ->
Es1 = [bin_element(E) || E <- Es0],
case constant_bin(Es1) of
error ->
case expr_bin_1(Es1, St0) of
{[],Eps,St} ->
EmptyBin = <<>>,
{#c_literal{anno=Anno,val=EmptyBin},Eps,St};
{Es,Eps,St} ->
{#ibinary{anno=#a{anno=Anno},segments=Es},Eps,St}
end;
Bin ->
{#c_literal{anno=Anno,val=Bin},[],St0}
end.
expr_bin_1(Es, St0) ->
Res = foldr(fun (E, {Ces,Eps0,S0}) ->
try bitstr(E, S0) of
{Ce,Eps,S1} when is_list(Ces) ->
{Ce++Ces,Eps ++ Eps0,S1};
{_Ce,Eps,S1} ->
{Ces,Eps ++ Eps0,S1}
catch
{bad_binary,Eps,S1} ->
{bad_binary,Eps ++ Eps0,S1}
end
end, {[],[],St0}, Es),
case Res of
{bad_binary,Eps,St} ->
throw({bad_binary,Eps,St});
{_,_,_}=Res ->
Res
end.
bitstrs([E0|Es0], St0) ->
{E,Eps0,St1} = bitstr(E0, St0),
{Es,Eps1,St2} = bitstrs(Es0, St1),
{E++Es,Eps0++Eps1,St2};
bitstrs([], St) ->
{[],[],St}.
bitstr({bin_element,Line,{string,_,S},{integer,_,8},_}, St) ->
bitstrs(bin_expand_string(S, Line, 0, 0), St);
bitstr({bin_element,Line,{string,_,[]},Sz0,Ts}, St0) ->
%% Empty string. We must make sure that the type is correct.
{[#c_bitstr{size=Sz}],Eps0,St1} =
bitstr({bin_element,Line,{char,Line,0},Sz0,Ts}, St0),
%% At this point, the type is either a correct literal or
%% an expression.
case Sz of
#c_literal{val=undefined} ->
%% One of the utf* types. The size is not used.
{[],[],St1};
#c_literal{val=Int} when is_integer(Int), Int >= 0 ->
{[],[],St1};
#c_var{} ->
%% Must add a test to verify that the size expression is
%% an integer >= 0.
Erlang = {atom,Line,erlang},
Test0 = {call,Line,{remote,Line,Erlang,{atom,Line,is_integer}},
[Sz0]},
Test1 = {call,Line,{remote,Line,Erlang,{atom,Line,'>='}},
[Sz0,{integer,Line,0}]},
Test2 = {op,Line,'andalso',Test0,Test1},
Fail = {call,Line,{remote,Line,Erlang,{atom,Line,error}},
[{atom,Line,badarg}]},
Test = {op,Line,'orelse',Test2,Fail},
Match = {match,Line,{var,Line,'_'},Test},
{_,Eps1,St2} = expr(Match, St1),
Eps = Eps0 ++ Eps1,
{[],Eps,St2}
end;
bitstr({bin_element,Line,{string,_,S},Sz0,Ts}, St0) ->
{[Bitstr],Eps,St1} = bitstr({bin_element,Line,{char,Line,0},Sz0,Ts}, St0),
Es = [Bitstr#c_bitstr{val=#c_literal{anno=full_anno(Line, St1),val=C}} ||
C <- S],
{Es,Eps,St1};
bitstr({bin_element,_,E0,Size0,[Type,{unit,Unit}|Flags]}, St0) ->
{E1,Eps0,St1} = safe(E0, St0),
{Size1,Eps1,St2} = safe(Size0, St1),
Eps = Eps0 ++ Eps1,
case {Type,E1} of
{_,#c_var{}} -> ok;
{integer,#c_literal{val=I}} when is_integer(I) -> ok;
{utf8,#c_literal{val=I}} when is_integer(I) -> ok;
{utf16,#c_literal{val=I}} when is_integer(I) -> ok;
{utf32,#c_literal{val=I}} when is_integer(I) -> ok;
{float,#c_literal{val=V}} when is_number(V) -> ok;
{binary,#c_literal{val=V}} when is_bitstring(V) -> ok;
{_,_} ->
%% Note that the pre expressions may bind variables that
%% are used later or have side effects.
throw({bad_binary,Eps,St2})
end,
case Size1 of
#c_var{} -> ok;
#c_literal{val=Sz} when is_integer(Sz), Sz >= 0 -> ok;
#c_literal{val=undefined} -> ok;
#c_literal{val=all} -> ok;
_ -> throw({bad_binary,Eps,St2})
end,
{[#c_bitstr{val=E1,size=Size1,
unit=#c_literal{val=Unit},
type=#c_literal{val=Type},
flags=#c_literal{val=Flags}}],
Eps,St2}.
bin_element({bin_element,Line,Expr,Size0,Type0}) ->
{Size,Type} = make_bit_type(Line, Size0, Type0),
{bin_element,Line,Expr,Size,Type}.
make_bit_type(Line, default, Type0) ->
case erl_bits:set_bit_type(default, Type0) of
{ok,all,Bt} -> {make_all_size(Line),erl_bits:as_list(Bt)};
{ok,undefined,Bt} -> {{atom,Line,undefined},erl_bits:as_list(Bt)};
{ok,Size,Bt} -> {{integer,Line,Size},erl_bits:as_list(Bt)}
end;
make_bit_type(_Line, {atom,Anno,all}=Size, Type0) ->
case erl_anno:generated(Anno) of
true ->
%% This `all` was created by the compiler from a binary
%% segment without a size.
{ok,Size,Bt} = erl_bits:set_bit_type(Size, Type0),
{Size,erl_bits:as_list(Bt)};
false ->
%% This `all` was present in the source code. It is not
%% a valid size.
throw(nomatch)
end;
make_bit_type(_Line, Size0, Type0) -> %Integer or 'all'
{ok,Size1,Bt} = erl_bits:set_bit_type(Size0, Type0),
Size = case Size1 of
{char,Anno,CharVal} -> {integer,Anno,CharVal};
_ -> Size1
end,
{Size,erl_bits:as_list(Bt)}.
make_all_size(Line) ->
Anno = erl_anno:set_generated(true, Line),
{atom,Anno,all}.
%% constant_bin([{bin_element,_,_,_,_}]) -> binary() | error
%% If the binary construction is truly constant (no variables,
%% no native fields), and does not contain fields whose expansion
%% become huge (such as <<0:100000000>>), evaluate and return the binary;
%% otherwise return 'error'.
constant_bin(Es) ->
try
constant_bin_1(Es)
catch
error -> error
end.
constant_bin_1(Es) ->
verify_suitable_fields(Es),
EmptyBindings = erl_eval:new_bindings(),
EvalFun = fun({string,_,S}, B) -> {value,S,B};
({integer,_,I}, B) -> {value,I,B};
({char,_,C}, B) -> {value,C,B};
({float,_,F}, B) -> {value,F,B};
({atom,_,undefined}, B) -> {value,undefined,B}
end,
try eval_bits:expr_grp(Es, EmptyBindings, EvalFun) of
{value,Bin,EmptyBindings} ->
Bin
catch error:_ ->
error
end.
%% verify_suitable_fields([{bin_element,_,Sz,Opts}=E|Es]) ->
verify_suitable_fields([{bin_element,_,Val,SzTerm,Opts}|Es]) ->
case member(big, Opts) orelse member(little, Opts) of
true -> ok;
false -> throw(error) %Native endian.
end,
{unit,Unit} = keyfind(unit, 1, Opts),
case {SzTerm,Val} of
{{atom,_,undefined},{string,_,_}} ->
%% UTF-8/16/32.
ok;
{{atom,_,undefined},{char,_,_}} ->
%% UTF-8/16/32.
ok;
{{atom,_,undefined},{integer,_,_}} ->
%% UTF-8/16/32.
ok;
{{integer,_,Sz},_} when Sz*Unit =< 256 ->
%% Don't be cheap - always accept fields up to this size.
ok;
{{integer,_,Sz0},{integer,_,Int}} ->
%% Estimate the number of bits needed to to hold the integer
%% literal. Check whether the field size is reasonable in
%% proportion to the number of bits needed.
Sz = Sz0*Unit,
case count_bits(Int) of
BitsNeeded when 2*BitsNeeded >= Sz ->
ok;
_ ->
%% More than about half of the field size will be
%% filled out with zeroes - not acceptable.
throw(error)
end;
{_,_} ->
%% Reject anything else. There are either variables,
%% or a float with a huge size or an embedded binary.
throw(error)
end,
verify_suitable_fields(Es);
verify_suitable_fields([]) -> ok.
%% Count the number of bits approximately needed to store Int.
%% (We don't need an exact result for this purpose.)
count_bits(Int) ->
count_bits_1(abs(Int), 64).
count_bits_1(0, Bits) -> Bits;
count_bits_1(Int, Bits) -> count_bits_1(Int bsr 64, Bits+64).
bin_expand_string(S, Line, Val, Size) when Size >= 2048 ->
Combined = make_combined(Line, Val, Size),
[Combined|bin_expand_string(S, Line, 0, 0)];
bin_expand_string([H|T], Line, Val, Size) ->
bin_expand_string(T, Line, (Val bsl 8) bor H, Size+8);
bin_expand_string([], Line, Val, Size) ->
[make_combined(Line, Val, Size)].
make_combined(Line, Val, Size) ->
{bin_element,Line,{integer,Line,Val},
{integer,Line,Size},
[integer,{unit,1},unsigned,big]}.
%% fun_tq(Id, [Clauses], Line, State, NameInfo) -> {Fun,[PreExp],State}.
fun_tq(Cs0, L, St0, NameInfo) ->
Arity = clause_arity(hd(Cs0)),
{Cs1,St1} = clauses(Cs0, St0),
{Args,St2} = new_vars(Arity, St1),
{Ps,St3} = new_vars(Arity, St2), %Need new variables here
Anno = full_anno(L, St3),
{Name,St4} = new_fun_name(St3),
Fc = function_clause(Ps, Anno),
Id = {0,0,Name},
Fun = #ifun{anno=#a{anno=Anno},
id=[{id,Id}], %We KNOW!
vars=Args,clauses=Cs1,fc=Fc,name=NameInfo},
{Fun,[],St4}.
%% lc_tq(Line, Exp, [Qualifier], Mc, State) -> {LetRec,[PreExp],State}.
%% This TQ from Simon PJ pp 127-138.
lc_tq(Line, E, [#igen{anno=#a{anno=GA}=GAnno,
acc_pat=AccPat,acc_guard=AccGuard,
skip_pat=SkipPat,tail=Tail,tail_pat=TailPat,
arg={Pre,Arg}}|Qs], Mc, St0) ->
{Name,St1} = new_fun_name("lc", St0),
LA = lineno_anno(Line, St1),
LAnno = #a{anno=LA},
F = #c_var{anno=LA,name={Name,1}},
Nc = #iapply{anno=GAnno,op=F,args=[Tail]},
{[FcVar,Var],St2} = new_vars(2, St1),
Fc = function_clause([FcVar], GA),
TailClause = #iclause{anno=LAnno,pats=[TailPat],guard=[],body=[Mc]},
Cs0 = case {AccPat,AccGuard} of
{SkipPat,[]} ->
%% Skip and accumulator patterns are the same and there is
%% no guard, no need to generate a skip clause.
[TailClause];
_ ->
[#iclause{anno=#a{anno=[compiler_generated|LA]},
pats=[SkipPat],guard=[],body=[Nc]},
TailClause]
end,
{Cs,St4} = case AccPat of
nomatch ->
%% The accumulator pattern never matches, no need
%% for an accumulator clause.
{Cs0,St2};
_ ->
{Lc,Lps,St3} = lc_tq(Line, E, Qs, Nc, St2),
{[#iclause{anno=LAnno,pats=[AccPat],guard=AccGuard,
body=Lps ++ [Lc]}|Cs0],
St3}
end,
Fun = #ifun{anno=GAnno,id=[],vars=[Var],clauses=Cs,fc=Fc},
{#iletrec{anno=GAnno#a{anno=[list_comprehension|GA]},defs=[{{Name,1},Fun}],
body=Pre ++ [#iapply{anno=GAnno,op=F,args=[Arg]}]},
[],St4};
lc_tq(Line, E, [#ifilter{}=Filter|Qs], Mc, St) ->
filter_tq(Line, E, Filter, Mc, St, Qs, fun lc_tq/5);
lc_tq(Line, E0, [], Mc0, St0) ->
{H1,Hps,St1} = safe(E0, St0),
{T1,Tps,St} = force_safe(Mc0, St1),
Anno = lineno_anno(Line, St),
E = ann_c_cons(Anno, H1, T1),
{set_anno(E, [compiler_generated|Anno]),Hps ++ Tps,St}.
%% bc_tq(Line, Exp, [Qualifier], More, State) -> {LetRec,[PreExp],State}.
%% This TQ from Gustafsson ERLANG'05.
%% More could be transformed before calling bc_tq.
bc_tq(Line, Exp, Qs0, St0) ->
{BinVar,St1} = new_var(St0),
{Sz,SzPre,St2} = bc_initial_size(Exp, Qs0, St1),
{Qs,St3} = preprocess_quals(Line, Qs0, St2),
{E,BcPre,St} = bc_tq1(Line, Exp, Qs, BinVar, St3),
Pre = SzPre ++
[#iset{var=BinVar,
arg=#iprimop{name=#c_literal{val=bs_init_writable},
args=[Sz]}}] ++ BcPre,
{E,Pre,St}.
bc_tq1(Line, E, [#igen{anno=GAnno,
acc_pat=AccPat,acc_guard=AccGuard,
skip_pat=SkipPat,tail=Tail,tail_pat=TailPat,
arg={Pre,Arg}}|Qs], Mc, St0) ->
{Name,St1} = new_fun_name("lbc", St0),
LA = lineno_anno(Line, St1),
LAnno = #a{anno=LA},
{[_,AccVar]=Vars,St2} = new_vars(LA, 2, St1),
{[_,_]=FcVars,St3} = new_vars(LA, 2, St2),
F = #c_var{anno=LA,name={Name,2}},
Nc = #iapply{anno=GAnno,op=F,args=[Tail,AccVar]},
Fc = function_clause(FcVars, LA),
TailClause = #iclause{anno=LAnno,pats=[TailPat,AccVar],guard=[],
body=[AccVar]},
Cs0 = case {AccPat,AccGuard} of
{SkipPat,[]} ->
%% Skip and accumulator patterns are the same and there is
%% no guard, no need to generate a skip clause.
[TailClause];
_ ->
[#iclause{anno=#a{anno=[compiler_generated|LA]},
pats=[SkipPat,AccVar],guard=[],body=[Nc]},
TailClause]
end,
{Cs,St} = case AccPat of
nomatch ->
%% The accumulator pattern never matches, no need
%% for an accumulator clause.
{Cs0,St3};
_ ->
{Bc,Bps,St4} = bc_tq1(Line, E, Qs, AccVar, St3),
Body = Bps ++ [#iset{var=AccVar,arg=Bc},Nc],
{[#iclause{anno=LAnno,
pats=[AccPat,AccVar],guard=AccGuard,
body=Body}|Cs0],
St4}
end,
Fun = #ifun{anno=LAnno,id=[],vars=Vars,clauses=Cs,fc=Fc},
{#iletrec{anno=LAnno#a{anno=[list_comprehension|LA]},defs=[{{Name,2},Fun}],
body=Pre ++ [#iapply{anno=LAnno,op=F,args=[Arg,Mc]}]},
[],St};
bc_tq1(Line, E, [#ifilter{}=Filter|Qs], Mc, St) ->
filter_tq(Line, E, Filter, Mc, St, Qs, fun bc_tq1/5);
bc_tq1(_, {bin,Bl,Elements}, [], AccVar, St0) ->
bc_tq_build(Bl, [], AccVar, Elements, St0);
bc_tq1(Line, E0, [], AccVar, St0) ->
BsFlags = [binary,{unit,1}],
BsSize = make_all_size(Line),
{E1,Pre0,St1} = safe(E0, St0),
case E1 of
#c_var{name=VarName} ->
Var = {var,Line,VarName},
Els = [{bin_element,Line,Var,BsSize,BsFlags}],
bc_tq_build(Line, Pre0, AccVar, Els, St1);
#c_literal{val=Val} when is_bitstring(Val) ->
Bits = bit_size(Val),
<<Int0:Bits>> = Val,
Int = {integer,Line,Int0},
Sz = {integer,Line,Bits},
Els = [{bin_element,Line,Int,Sz,[integer,{unit,1},big]}],
bc_tq_build(Line, Pre0, AccVar, Els, St1);
_ ->
%% Any other safe (cons, tuple, literal) is not a
%% bitstring. Force the evaluation to fail (and
%% generate a warning).
Els = [{bin_element,Line,{atom,Line,bad_value},BsSize,BsFlags}],
bc_tq_build(Line, Pre0, AccVar, Els, St1)
end.
bc_tq_build(Line, Pre0, #c_var{name=AccVar}, Elements0, St0) ->
Elements = [{bin_element,Line,{var,Line,AccVar},make_all_size(Line),
[binary,{unit,1}]}|Elements0],
{E,Pre,St} = expr({bin,Line,Elements}, St0),
#a{anno=A} = Anno0 = get_anno(E),
Anno = Anno0#a{anno=[compiler_generated,single_use|A]},
{set_anno(E, Anno),Pre0++Pre,St}.
%% filter_tq(Line, Expr, Filter, Mc, State, [Qualifier], TqFun) ->
%% {Case,[PreExpr],State}.
%% Transform an intermediate comprehension filter to its intermediate case
%% representation.
filter_tq(Line, E, #ifilter{anno=#a{anno=LA}=LAnno,arg={Pre,Arg}},
Mc, St0, Qs, TqFun) ->
%% The filter is an expression, it is compiled to a case of degree 1 with
%% 3 clauses, one accumulating, one skipping and the final one throwing
%% {case_clause,Value} where Value is the result of the filter and is not a
%% boolean.
{Lc,Lps,St1} = TqFun(Line, E, Qs, Mc, St0),
{FailPat,St2} = new_var(St1),
Fc = fail_clause([FailPat], LA,
c_tuple([#c_literal{val=case_clause},FailPat])),
{#icase{anno=LAnno#a{anno=[list_comprehension|LA]},args=[Arg],
clauses=[#iclause{anno=LAnno,
pats=[#c_literal{val=true}],guard=[],
body=Lps ++ [Lc]},
#iclause{anno=LAnno#a{anno=[compiler_generated|LA]},
pats=[#c_literal{val=false}],guard=[],
body=[Mc]}],
fc=Fc},
Pre,St2};
filter_tq(Line, E, #ifilter{anno=#a{anno=LA}=LAnno,arg=Guard},
Mc, St0, Qs, TqFun) when is_list(Guard) ->
%% Otherwise it is a guard, compiled to a case of degree 0 with 2 clauses,
%% the first matches if the guard succeeds and the comprehension continues
%% or the second one is selected and the current element is skipped.
{Lc,Lps,St1} = TqFun(Line, E, Qs, Mc, St0),
{#icase{anno=LAnno#a{anno=[list_comprehension|LA]},args=[],
clauses=[#iclause{anno=LAnno,pats=[],guard=Guard,body=Lps ++ [Lc]}],
fc=#iclause{anno=LAnno#a{anno=[compiler_generated|LA]},
pats=[],guard=[],body=[Mc]}},
[],St1}.
%% preprocess_quals(Line, [Qualifier], State) -> {[Qualifier'],State}.
%% Preprocess a list of Erlang qualifiers into its intermediate representation,
%% represented as a list of #igen{} and #ifilter{} records. We recognise guard
%% tests and try to fold them together and join to a preceding generators, this
%% should give us better and more compact code.
preprocess_quals(Line, Qs, St) ->
preprocess_quals(Line, Qs, St, []).
preprocess_quals(Line, [Q|Qs0], St0, Acc) ->
case is_generator(Q) of
true ->
{Gs,Qs} = splitwith(fun is_guard_test/1, Qs0),
{Gen,St} = generator(Line, Q, Gs, St0),
preprocess_quals(Line, Qs, St, [Gen|Acc]);
false ->
LAnno = #a{anno=lineno_anno(get_qual_anno(Q), St0)},
case is_guard_test(Q) of
true ->
%% When a filter is a guard test, its argument in the
%% #ifilter{} record is a list as returned by
%% lc_guard_tests/2.
{Gs,Qs} = splitwith(fun is_guard_test/1, Qs0),
{Cg,St} = lc_guard_tests([Q|Gs], St0),
Filter = #ifilter{anno=LAnno,arg=Cg},
preprocess_quals(Line, Qs, St, [Filter|Acc]);
false ->
%% Otherwise, it is a pair {Pre,Arg} as in a generator
%% input.
{Ce,Pre,St} = novars(Q, St0),
Filter = #ifilter{anno=LAnno,arg={Pre,Ce}},
preprocess_quals(Line, Qs0, St, [Filter|Acc])
end
end;
preprocess_quals(_, [], St, Acc) ->
{reverse(Acc),St}.
is_generator({generate,_,_,_}) -> true;
is_generator({b_generate,_,_,_}) -> true;
is_generator(_) -> false.
%% Retrieve the annotation from an Erlang AST form.
%% (Use get_anno/1 to retrieve the annotation from Core Erlang forms).
get_qual_anno(Abstract) -> element(2, Abstract).
%%
%% Generators are abstracted as sextuplets:
%% - acc_pat is the accumulator pattern, e.g. [Pat|Tail] for Pat <- Expr.
%% - acc_guard is the list of guards immediately following the current
%% generator in the qualifier list input.
%% - skip_pat is the skip pattern, e.g. <<X,_:X,Tail/bitstring>> for
%% <<X,1:X>> <= Expr.
%% - tail is the variable used in AccPat and SkipPat bound to the rest of the
%% generator input.
%% - tail_pat is the tail pattern, respectively [] and <<_/bitstring>> for list
%% and bit string generators.
%% - arg is a pair {Pre,Arg} where Pre is the list of expressions to be
%% inserted before the comprehension function and Arg is the expression
%% that it should be passed.
%%
%% generator(Line, Generator, Guard, State) -> {Generator',State}.
%% Transform a given generator into its #igen{} representation.
generator(Line, {generate,Lg,P0,E}, Gs, St0) ->
LA = lineno_anno(Line, St0),
GA = lineno_anno(Lg, St0),
{Head,St1} = list_gen_pattern(P0, Line, St0),
{[Tail,Skip],St2} = new_vars(2, St1),
{Cg,St3} = lc_guard_tests(Gs, St2),
{AccPat,SkipPat} = case Head of
#c_var{} ->
%% If the generator pattern is a variable, the
%% pattern from the accumulator clause can be
%% reused in the skip one. lc_tq and bc_tq1 takes
%% care of dismissing the latter in that case.
Cons = ann_c_cons(LA, Head, Tail),
{Cons,Cons};
nomatch ->
%% If it never matches, there is no need for
%% an accumulator clause.
{nomatch,ann_c_cons(LA, Skip, Tail)};
_ ->
{ann_c_cons(LA, Head, Tail),
ann_c_cons(LA, Skip, Tail)}
end,
{Ce,Pre,St4} = safe(E, St3),
Gen = #igen{anno=#a{anno=GA},
acc_pat=AccPat,acc_guard=Cg,skip_pat=SkipPat,
tail=Tail,tail_pat=#c_literal{anno=LA,val=[]},arg={Pre,Ce}},
{Gen,St4};
generator(Line, {b_generate,Lg,P,E}, Gs, St0) ->
LA = lineno_anno(Line, St0),
GA = lineno_anno(Lg, St0),
try pattern(P, St0) of
{#ibinary{segments=Segs}=Cp,St1} ->
%% The function append_tail_segment/2 keeps variable
%% patterns as-is, making it possible to have the same
%% skip clause removal as with list generators.
{AccSegs,Tail,TailSeg,St2} = append_tail_segment(Segs, St1),
AccPat = Cp#ibinary{segments=AccSegs},
{Cg,St3} = lc_guard_tests(Gs, St2),
{SkipSegs,St4} = emasculate_segments(AccSegs, St3),
SkipPat = Cp#ibinary{segments=SkipSegs},
{Ce,Pre,St5} = safe(E, St4),
Gen = #igen{anno=#a{anno=GA},acc_pat=AccPat,acc_guard=Cg,
skip_pat=SkipPat,tail=Tail,
tail_pat=#ibinary{anno=#a{anno=LA},segments=[TailSeg]},
arg={Pre,Ce}},
{Gen,St5}
catch
throw:nomatch ->
{Ce,Pre,St1} = safe(E, St0),
Gen = #igen{anno=#a{anno=GA},acc_pat=nomatch,acc_guard=[],
skip_pat=nomatch,
tail_pat=#c_var{name='_'},
arg={Pre,Ce}},
{Gen,St1}
end.
append_tail_segment(Segs, St0) ->
{Var,St} = new_var(St0),
Tail = #ibitstr{val=Var,size=[#c_literal{val=all}],
unit=#c_literal{val=1},
type=#c_literal{val=binary},
flags=#c_literal{val=[unsigned,big]}},
{Segs++[Tail],Var,Tail,St}.
emasculate_segments(Segs, St) ->
emasculate_segments(Segs, St, []).
emasculate_segments([#ibitstr{val=#c_var{}}=B|Rest], St, Acc) ->
emasculate_segments(Rest, St, [B|Acc]);
emasculate_segments([B|Rest], St0, Acc) ->
{Var,St1} = new_var(St0),
emasculate_segments(Rest, St1, [B#ibitstr{val=Var}|Acc]);
emasculate_segments([], St, Acc) ->
{reverse(Acc),St}.
lc_guard_tests([], St) -> {[],St};
lc_guard_tests(Gs0, St0) ->
Gs1 = guard_tests(Gs0),
{Gs,St} = gexpr_top(Gs1, St0#core{in_guard=true}),
{Gs,St#core{in_guard=false}}.
list_gen_pattern(P0, Line, St) ->
try
pattern(P0, St)
catch
nomatch -> {nomatch,add_warning(Line, nomatch, St)}
end.
%%%
%%% Generate code to calculate the initial size for
%%% the result binary in a binary comprehension.
%%%
bc_initial_size(E0, Q, St0) ->
try
E = bin_bin_element(E0),
{ElemSzExpr,ElemSzPre,EVs,St1} = bc_elem_size(E, St0),
{V,St2} = new_var(St1),
{GenSzExpr,GenSzPre,St3} = bc_gen_size(Q, EVs, St2),
case ElemSzExpr of
#c_literal{val=ElemSz} when ElemSz rem 8 =:= 0 ->
NumBytesExpr = #c_literal{val=ElemSz div 8},
BytesExpr = [#iset{var=V,
arg=bc_mul(GenSzExpr, NumBytesExpr)}],
{V,ElemSzPre++GenSzPre++BytesExpr,St3};
_ ->
{[BitsV,PlusSevenV],St} = new_vars(2, St3),
BitsExpr = #iset{var=BitsV,arg=bc_mul(GenSzExpr, ElemSzExpr)},
PlusSevenExpr = #iset{var=PlusSevenV,
arg=bc_add(BitsV, #c_literal{val=7})},
Expr = #iset{var=V,
arg=bc_bsr(PlusSevenV, #c_literal{val=3})},
{V,ElemSzPre++GenSzPre++
[BitsExpr,PlusSevenExpr,Expr],St}
end
catch
throw:impossible ->
{#c_literal{val=256},[],St0};
throw:nomatch ->
{#c_literal{val=1},[],St0}
end.
bc_elem_size({bin,_,El}, St0) ->
case bc_elem_size_1(El, ordsets:new(), 0, []) of
{Bits,[]} ->
{#c_literal{val=Bits},[],[],St0};
{Bits,Vars0} ->
[{U,V0}|Pairs] = sort(Vars0),
F = bc_elem_size_combine(Pairs, U, [V0], []),
Vs = [V || {_,#c_var{name=V}} <- Vars0],
{E,Pre,St} = bc_mul_pairs(F, #c_literal{val=Bits}, [], St0),
{E,Pre,Vs,St}
end;
bc_elem_size(_, _) ->
throw(impossible).
bc_elem_size_1([{bin_element,_,{string,_,String},{integer,_,N},_}=El|Es],
DefVars, Bits, SizeVars) ->
U = get_unit(El),
bc_elem_size_1(Es, DefVars, Bits+U*N*length(String), SizeVars);
bc_elem_size_1([{bin_element,_,Expr,{integer,_,N},_}=El|Es],
DefVars0, Bits, SizeVars) ->
U = get_unit(El),
DefVars = bc_elem_size_def_var(Expr, DefVars0),
bc_elem_size_1(Es, DefVars, Bits+U*N, SizeVars);
bc_elem_size_1([{bin_element,_,Expr,{var,_,Src},_}=El|Es],
DefVars0, Bits, SizeVars) ->
case ordsets:is_element(Src, DefVars0) of
false ->
U = get_unit(El),
DefVars = bc_elem_size_def_var(Expr, DefVars0),
bc_elem_size_1(Es, DefVars, Bits, [{U,#c_var{name=Src}}|SizeVars]);
true ->
throw(impossible)
end;
bc_elem_size_1([_|_], _, _, _) ->
throw(impossible);
bc_elem_size_1([], _DefVars, Bits, SizeVars) ->
{Bits,SizeVars}.
bc_elem_size_def_var({var,_,Var}, DefVars) ->
ordsets:add_element(Var, DefVars);
bc_elem_size_def_var(_Expr, DefVars) ->
DefVars.
bc_elem_size_combine([{U,V}|T], U, UVars, Acc) ->
bc_elem_size_combine(T, U, [V|UVars], Acc);
bc_elem_size_combine([{U,V}|T], OldU, UVars, Acc) ->
bc_elem_size_combine(T, U, [V], [{OldU,UVars}|Acc]);
bc_elem_size_combine([], U, Uvars, Acc) ->
[{U,Uvars}|Acc].
bc_mul_pairs([{U,L0}|T], E0, Pre, St0) ->
{AddExpr,AddPre,St1} = bc_add_list(L0, St0),
{[V1,V2],St} = new_vars(2, St1),
Set1 = #iset{var=V1,arg=bc_mul(AddExpr, #c_literal{val=U})},
Set2 = #iset{var=V2,arg=bc_add(V1, E0)},
bc_mul_pairs(T, V2, [Set2,Set1|reverse(AddPre, Pre)], St);
bc_mul_pairs([], E, Pre, St) ->
{E,reverse(Pre),St}.
bc_add_list([V], St) ->
{V,[],St};
bc_add_list([H|T], St) ->
bc_add_list_1(T, [], H, St).
bc_add_list_1([H|T], Pre, E, St0) ->
{Var,St} = new_var(St0),
Set = #iset{var=Var,arg=bc_add(H, E)},
bc_add_list_1(T, [Set|Pre], Var, St);
bc_add_list_1([], Pre, E, St) ->
{E,reverse(Pre),St}.
bc_gen_size(Q, EVs, St) ->
bc_gen_size_1(Q, EVs, #c_literal{val=1}, [], St).
bc_gen_size_1([{generate,L,El,Gen}|Qs], EVs, E0, Pre0, St0) ->
bc_verify_non_filtering(El, EVs),
case Gen of
{var,_,ListVar} ->
Lanno = lineno_anno(L, St0),
{LenVar,St1} = new_var(St0),
Set = #iset{var=LenVar,
arg=#icall{anno=#a{anno=Lanno},
module=#c_literal{val=erlang},
name=#c_literal{val=length},
args=[#c_var{name=ListVar}]}},
{E,Pre,St} = bc_gen_size_mul(E0, LenVar, [Set|Pre0], St1),
bc_gen_size_1(Qs, EVs, E, Pre, St);
_ ->
%% The only expressions we handle is literal lists.
Len = bc_list_length(Gen, 0),
{E,Pre,St} = bc_gen_size_mul(E0, #c_literal{val=Len}, Pre0, St0),
bc_gen_size_1(Qs, EVs, E, Pre, St)
end;
bc_gen_size_1([{b_generate,_,El0,Gen0}|Qs], EVs, E0, Pre0, St0) ->
El = bin_bin_element(El0),
Gen = bin_bin_element(Gen0),
bc_verify_non_filtering(El, EVs),
{MatchSzExpr,Pre1,_,St1} = bc_elem_size(El, St0),
Pre2 = reverse(Pre1, Pre0),
{ResVar,St2} = new_var(St1),
{BitSizeExpr,Pre3,St3} = bc_gen_bit_size(Gen, Pre2, St2),
Div = #iset{var=ResVar,arg=bc_div(BitSizeExpr,
MatchSzExpr)},
Pre4 = [Div|Pre3],
{E,Pre,St} = bc_gen_size_mul(E0, ResVar, Pre4, St3),
bc_gen_size_1(Qs, EVs, E, Pre, St);
bc_gen_size_1([], _, E, Pre, St) ->
{E,reverse(Pre),St};
bc_gen_size_1(_, _, _, _, _) ->
throw(impossible).
bin_bin_element({bin,L,El}) ->
{bin,L,[bin_element(E) || E <- El]};
bin_bin_element(Other) -> Other.
bc_gen_bit_size({var,L,V}, Pre0, St0) ->
Lanno = lineno_anno(L, St0),
{SzVar,St} = new_var(St0),
Pre = [#iset{var=SzVar,
arg=#icall{anno=#a{anno=Lanno},
module=#c_literal{val=erlang},
name=#c_literal{val=bit_size},
args=[#c_var{name=V}]}}|Pre0],
{SzVar,Pre,St};
bc_gen_bit_size({bin,_,_}=Bin, Pre, St) ->
{#c_literal{val=bc_bin_size(Bin)},Pre,St};
bc_gen_bit_size(_, _, _) ->
throw(impossible).
bc_verify_non_filtering({bin,_,Els}, EVs) ->
foreach(fun({bin_element,_,{var,_,V},_,_}) ->
case member(V, EVs) of
true -> throw(impossible);
false -> ok
end;
(_) -> throw(impossible)
end, Els);
bc_verify_non_filtering({var,_,V}, EVs) ->
case member(V, EVs) of
true -> throw(impossible);
false -> ok
end;
bc_verify_non_filtering(_, _) ->
throw(impossible).
bc_list_length({string,_,Str}, Len) ->
Len + length(Str);
bc_list_length({cons,_,_,T}, Len) ->
bc_list_length(T, Len+1);
bc_list_length({nil,_}, Len) ->
Len;
bc_list_length(_, _) ->
throw(impossible).
bc_bin_size({bin,_,Els}) ->
bc_bin_size_1(Els, 0).
bc_bin_size_1([{bin_element,_,{string,_,String},{integer,_,Sz},_}=El|Els], N) ->
U = get_unit(El),
bc_bin_size_1(Els, N+U*Sz*length(String));
bc_bin_size_1([{bin_element,_,_,{integer,_,Sz},_}=El|Els], N) ->
U = get_unit(El),
bc_bin_size_1(Els, N+U*Sz);
bc_bin_size_1([], N) -> N;
bc_bin_size_1(_, _) -> throw(impossible).
bc_gen_size_mul(#c_literal{val=1}, E, Pre, St) ->
{E,Pre,St};
bc_gen_size_mul(E1, E2, Pre, St0) ->
{V,St} = new_var(St0),
{V,[#iset{var=V,arg=bc_mul(E1, E2)}|Pre],St}.
bc_mul(E1, #c_literal{val=1}) ->
E1;
bc_mul(E1, E2) ->
#icall{module=#c_literal{val=erlang},
name=#c_literal{val='*'},
args=[E1,E2]}.
bc_div(E1, E2) ->
#icall{module=#c_literal{val=erlang},
name=#c_literal{val='div'},
args=[E1,E2]}.
bc_add(E1, #c_literal{val=0}) ->
E1;
bc_add(E1, E2) ->
#icall{module=#c_literal{val=erlang},
name=#c_literal{val='+'},
args=[E1,E2]}.
bc_bsr(E1, E2) ->
#icall{module=#c_literal{val=erlang},
name=#c_literal{val='bsr'},
args=[E1,E2]}.
get_unit({bin_element,_,_,_,Flags}) ->
{unit,U} = keyfind(unit, 1, Flags),
U.
%% is_guard_test(Expression) -> true | false.
%% Test if a general expression is a guard test.
%%
%% Note that a local function overrides a BIF with the same name.
%% For example, if there is a local function named is_list/1,
%% any unqualified call to is_list/1 will be to the local function.
%% The guard function must be explicitly called as erlang:is_list/1.
is_guard_test(E) ->
%% erl_expand_records has added a module prefix to any call
%% to a BIF or imported function. Any call without a module
%% prefix that remains must therefore be to a local function.
IsOverridden = fun({_,_}) -> true end,
erl_lint:is_guard_test(E, [], IsOverridden).
%% novars(Expr, State) -> {Novars,[PreExpr],State}.
%% Generate a novars expression, basically a call or a safe. At this
%% level we do not need to do a deep check.
novars(E0, St0) ->
{E1,Eps,St1} = expr(E0, St0),
{Se,Sps,St2} = force_novars(E1, St1),
{Se,Eps ++ Sps,St2}.
force_novars(#iapply{}=App, St) -> {App,[],St};
force_novars(#icall{}=Call, St) -> {Call,[],St};
force_novars(#ifun{}=Fun, St) -> {Fun,[],St}; %These are novars too
force_novars(#ibinary{}=Bin, St) -> {Bin,[],St};
force_novars(#c_map{}=Bin, St) -> {Bin,[],St};
force_novars(Ce, St) ->
force_safe(Ce, St).
%% safe(Expr, State) -> {Safe,[PreExpr],State}.
%% Generate an internal safe expression. These are simples without
%% binaries which can fail. At this level we do not need to do a
%% deep check. Must do special things with matches here.
safe(E0, St0) ->
{E1,Eps,St1} = expr(E0, St0),
{Se,Sps,St2} = force_safe(E1, St1),
{Se,Eps ++ Sps,St2}.
safe_list(Es, St) ->
foldr(fun (E, {Ces,Esp,St0}) ->
{Ce,Ep,St1} = safe(E, St0),
{[Ce|Ces],Ep ++ Esp,St1}
end, {[],[],St}, Es).
force_safe(#imatch{pat=P,arg=E}=Imatch, St0) ->
{Le,Lps0,St1} = force_safe(E, St0),
Lps = Lps0 ++ [Imatch#imatch{arg=Le}],
%% Make sure we don't duplicate the expression E. sys_core_fold
%% will usually optimize away the duplicate expression, but may
%% generate a warning while doing so.
case Le of
#c_var{} ->
%% Le is a variable.
%% Thus: P = Le, Le. (Traditional, since the V2 compiler.)
{Le,Lps,St1};
_ ->
%% Le is not a variable.
%% Thus: NewVar = P = Le, NewVar. (New for R12B-1.)
%%
%% Note: It is tempting to rewrite V = Le to V = Le, V,
%% but that will generate extra warnings in sys_core_fold
%% for this expression:
%%
%% [{X,Y} || {X,_} <- E, (Y = X) =:= (Y = 1 + 1)]
%%
%% (There will be a 'case Y =:= Y of...' which will generate
%% a warning.)
{V,St2} = new_var(St1),
{V,Lps0 ++ [Imatch#imatch{pat=#c_alias{var=V,pat=P},arg=Le}],St2}
end;
force_safe(Ce, St0) ->
case is_safe(Ce) of
true -> {Ce,[],St0};
false ->
{V,St1} = new_var(St0),
{V,[#iset{var=V,arg=Ce}],St1}
end.
is_safe(#c_cons{}) -> true;
is_safe(#c_tuple{}) -> true;
is_safe(#c_var{name={_,_}}) -> false; %Fun. Not safe.
is_safe(#c_var{name=_}) -> true; %Ordinary variable.
is_safe(#c_literal{}) -> true;
is_safe(_) -> false.
%% fold_match(MatchExpr, Pat) -> {MatchPat,Expr}.
%% Fold nested matches into one match with aliased patterns.
fold_match({match,L,P0,E0}, P) ->
{P1,E1} = fold_match(E0, P),
{{match,L,P0,P1},E1};
fold_match(E, P) -> {P,E}.
%% pattern(Pattern, State) -> {CorePat,[PreExp],State}.
%% Transform a pattern by removing line numbers. We also normalise
%% aliases in patterns to standard form: {alias,Pat,[Var]}.
pattern({var,L,V}, St) -> {#c_var{anno=lineno_anno(L, St),name=V},St};
pattern({char,L,C}, St) -> {#c_literal{anno=lineno_anno(L, St),val=C},St};
pattern({integer,L,I}, St) -> {#c_literal{anno=lineno_anno(L, St),val=I},St};
pattern({float,L,F}, St) -> {#c_literal{anno=lineno_anno(L, St),val=F},St};
pattern({atom,L,A}, St) -> {#c_literal{anno=lineno_anno(L, St),val=A},St};
pattern({string,L,S}, St) -> {#c_literal{anno=lineno_anno(L, St),val=S},St};
pattern({nil,L}, St) -> {#c_literal{anno=lineno_anno(L, St),val=[]},St};
pattern({cons,L,H,T}, St) ->
{Ph,St1} = pattern(H, St),
{Pt,St2} = pattern(T, St1),
{annotate_cons(lineno_anno(L, St), Ph, Pt, St2),St2};
pattern({tuple,L,Ps}, St) ->
{Ps1,St1} = pattern_list(Ps, St),
{annotate_tuple(record_anno(L, St), Ps1, St),St1};
pattern({map,L,Pairs}, St0) ->
{Ps,St1} = pattern_map_pairs(Pairs, St0),
{#imap{anno=#a{anno=lineno_anno(L, St1)},es=Ps},St1};
pattern({bin,L,Ps}, St0) ->
{Segments,St} = pat_bin(Ps, St0),
{#ibinary{anno=#a{anno=lineno_anno(L, St)},segments=Segments},St};
pattern({match,_,P1,P2}, St) ->
{Cp1,St1} = pattern(P1, St),
{Cp2,St2} = pattern(P2, St1),
{pat_alias(Cp1, Cp2),St2};
%% Evaluate compile-time expressions.
pattern({op,_,'++',{nil,_},R}, St) ->
pattern(R, St);
pattern({op,_,'++',{cons,Li,H,T},R}, St) ->
pattern({cons,Li,H,{op,Li,'++',T,R}}, St);
pattern({op,_,'++',{string,Li,L},R}, St) ->
pattern(string_to_conses(Li, L, R), St);
pattern({op,_Line,_Op,_A}=Op, St) ->
pattern(erl_eval:partial_eval(Op), St);
pattern({op,_Line,_Op,_L,_R}=Op, St) ->
pattern(erl_eval:partial_eval(Op), St).
%% pattern_map_pairs([MapFieldExact],State) -> [#c_map_pairs{}]
pattern_map_pairs(Ps, St0) ->
{CMapPairs,St1} = mapfoldl(fun pattern_map_pair/2, St0, Ps),
{pat_alias_map_pairs(CMapPairs),St1}.
pattern_map_pair({map_field_exact,L,K,V}, St0) ->
Ck0 = erl_eval:partial_eval(K),
{Ck,St1} = exprs([Ck0], St0),
{Cv,St2} = pattern(V, St1),
{#imappair{anno=#a{anno=lineno_anno(L, St2)},
op=#c_literal{val=exact},
key=Ck,
val=Cv},St2}.
pat_alias_map_pairs(Ps) ->
D0 = foldl(fun(#imappair{key=K0}=Pair, A) ->
K = map_sort_key(K0, A),
case A of
#{K:=Aliases} ->
A#{K:=[Pair|Aliases]};
#{} ->
A#{K=>[Pair]}
end
end, #{}, Ps),
%% We must sort to ensure that the order remains consistent
%% between compilations.
D = sort(maps:to_list(D0)),
pat_alias_map_pairs_1(D).
pat_alias_map_pairs_1([{_,[#imappair{val=V0}=Pair|Vs]}|T]) ->
V = foldl(fun(#imappair{val=V}, Pat) ->
pat_alias(V, Pat)
end, V0, Vs),
[Pair#imappair{val=V}|pat_alias_map_pairs_1(T)];
pat_alias_map_pairs_1([]) -> [].
map_sort_key(Key, KeyMap) ->
case Key of
[#c_literal{}=Lit] ->
{atomic,cerl:set_ann(Lit, [])};
[#c_var{}=Var] ->
{atomic,cerl:set_ann(Var, [])};
_ ->
{expr,map_size(KeyMap)}
end.
%% pat_bin([BinElement], State) -> [BinSeg].
pat_bin(Ps0, St) ->
Ps = pat_bin_expand_strings(Ps0),
pat_segments(Ps, St).
pat_bin_expand_strings(Es0) ->
foldr(fun ({bin_element,Line,{string,_,S},Sz,Ts}, Es1) ->
foldr(
fun (C, Es) ->
[{bin_element,Line,{char,Line,C},Sz,Ts}|Es]
end, Es1, S);
(E, Es) ->
[E|Es]
end, [], Es0).
pat_segments([P0|Ps0], St0) ->
{P,St1} = pat_segment(P0, St0),
{Ps,St2} = pat_segments(Ps0, St1),
{[P|Ps],St2};
pat_segments([], St) -> {[],St}.
pat_segment({bin_element,L,Val,Size0,Type0}, St) ->
{Size1,Type1} = make_bit_type(L, Size0, Type0),
[Type,{unit,Unit}|Flags] = Type1,
Anno = lineno_anno(L, St),
{Pval0,St1} = pattern(Val, St),
Pval = coerce_to_float(Pval0, Type0),
Size = erl_eval:partial_eval(Size1),
{Psize,St2} = exprs([Size], St1),
{#ibitstr{anno=#a{anno=Anno},
val=Pval,size=Psize,
unit=#c_literal{val=Unit},
type=#c_literal{val=Type},
flags=#c_literal{val=Flags}},St2}.
coerce_to_float(#c_literal{val=Int}=E, [float|_]) when is_integer(Int) ->
try
E#c_literal{val=float(Int)}
catch
error:badarg -> E
end;
coerce_to_float(E, _) -> E.
%% pat_alias(CorePat, CorePat) -> AliasPat.
%% Normalise aliases. Trap bad aliases by throwing 'nomatch'.
pat_alias(#c_var{name=V1}=P, #c_var{name=V1}) -> P;
pat_alias(#c_var{name=V1}=Var,
#c_alias{var=#c_var{name=V2},pat=Pat}=Alias) ->
if
V1 =:= V2 ->
Alias;
true ->
Alias#c_alias{pat=pat_alias(Var, Pat)}
end;
pat_alias(#c_var{}=P1, P2) -> #c_alias{var=P1,pat=P2};
pat_alias(#c_alias{var=#c_var{name=V1}}=Alias, #c_var{name=V1}) ->
Alias;
pat_alias(#c_alias{var=#c_var{name=V1}=Var1,pat=P1},
#c_alias{var=#c_var{name=V2}=Var2,pat=P2}) ->
Pat = pat_alias(P1, P2),
if
V1 =:= V2 ->
#c_alias{var=Var1,pat=Pat};
true ->
pat_alias(Var1, pat_alias(Var2, Pat))
end;
pat_alias(#c_alias{var=#c_var{}=Var,pat=P1}, P2) ->
#c_alias{var=Var,pat=pat_alias(P1, P2)};
pat_alias(#imap{es=Es1}=M, #imap{es=Es2}) ->
M#imap{es=pat_alias_map_pairs(Es1 ++ Es2)};
pat_alias(P1, #c_var{}=Var) ->
#c_alias{var=Var,pat=P1};
pat_alias(P1, #c_alias{pat=P2}=Alias) ->
Alias#c_alias{pat=pat_alias(P1, P2)};
pat_alias(P1, P2) ->
%% Aliases between binaries are not allowed, so the only
%% legal patterns that remain are data patterns.
case cerl:is_data(P1) andalso cerl:is_data(P2) of
false -> throw(nomatch);
true -> ok
end,
Type = cerl:data_type(P1),
case cerl:data_type(P2) of
Type -> ok;
_ -> throw(nomatch)
end,
Es1 = cerl:data_es(P1),
Es2 = cerl:data_es(P2),
Es = pat_alias_list(Es1, Es2),
cerl:make_data(Type, Es).
%% pat_alias_list([A1], [A2]) -> [A].
pat_alias_list([A1|A1s], [A2|A2s]) ->
[pat_alias(A1, A2)|pat_alias_list(A1s, A2s)];
pat_alias_list([], []) -> [];
pat_alias_list(_, _) -> throw(nomatch).
%% pattern_list([P], State) -> {[P],Exprs,St}
pattern_list([P0|Ps0], St0) ->
{P1,St1} = pattern(P0, St0),
{Ps1,St2} = pattern_list(Ps0, St1),
{[P1|Ps1],St2};
pattern_list([], St) ->
{[],St}.
string_to_conses(Line, Cs, Tail) ->
foldr(fun (C, T) -> {cons,Line,{char,Line,C},T} end, Tail, Cs).
%% make_vars([Name]) -> [{Var,Name}].
make_vars(Vs) -> [ #c_var{name=V} || V <- Vs ].
new_fun_name(#core{function={F,A},fcount=I}=St) ->
Name = "-" ++ atom_to_list(F) ++ "/" ++ integer_to_list(A)
++ "-fun-" ++ integer_to_list(I) ++ "-",
{list_to_atom(Name),St#core{fcount=I+1}}.
%% new_fun_name(Type, State) -> {FunName,State}.
new_fun_name(Type, #core{fcount=C}=St) ->
{list_to_atom(Type ++ "$^" ++ integer_to_list(C)),St#core{fcount=C+1}}.
%% new_var_name(State) -> {VarName,State}.
new_var_name(#core{vcount=C}=St) ->
{C,St#core{vcount=C + 1}}.
%% new_var(State) -> {{var,Name},State}.
%% new_var(LineAnno, State) -> {{var,Name},State}.
new_var(St) ->
new_var([], St).
new_var(Anno, St0) when is_list(Anno) ->
{New,St} = new_var_name(St0),
{#c_var{anno=Anno,name=New},St}.
%% new_vars(Count, State) -> {[Var],State}.
%% new_vars(Anno, Count, State) -> {[Var],State}.
%% Make Count new variables.
new_vars(N, St) -> new_vars_1(N, [], St, []).
new_vars(Anno, N, St) -> new_vars_1(N, Anno, St, []).
new_vars_1(N, Anno, St0, Vs) when N > 0 ->
{V,St1} = new_var(Anno, St0),
new_vars_1(N-1, Anno, St1, [V|Vs]);
new_vars_1(0, _, St, Vs) -> {Vs,St}.
function_clause(Ps, LineAnno) ->
fail_clause(Ps, LineAnno,
ann_c_tuple(LineAnno, [#c_literal{val=function_clause}|Ps])).
fail_clause(Pats, Anno, Arg) ->
#iclause{anno=#a{anno=[compiler_generated]},
pats=Pats,guard=[],
body=[#iprimop{anno=#a{anno=Anno},name=#c_literal{val=match_fail},
args=[Arg]}]}.
%% Optimization for Dialyzer.
right_assoc({op,L1,Op,{op,L2,Op,E1,E2},E3}, Op) ->
right_assoc({op,L2,Op,E1,{op,L1,Op,E2,E3}}, Op);
right_assoc(E, _Op) -> E.
annotate_tuple(A, Es, #core{dialyzer=Dialyzer}) ->
case Dialyzer of
true ->
%% Do not coalesce constant tuple elements. A Hack.
Node = cerl:ann_c_tuple(A, [cerl:c_var(any)]),
cerl:update_c_tuple_skel(Node, Es);
false ->
ann_c_tuple(A, Es)
end.
annotate_cons(A, H, T, #core{dialyzer=Dialyzer}) ->
case Dialyzer of
true ->
%% Do not coalesce constant conses. A Hack.
Node= cerl:ann_c_cons(A, cerl:c_var(any), cerl:c_var(any)),
cerl:update_c_cons_skel(Node, H, T);
false ->
ann_c_cons(A, H, T)
end.
ubody(B, St) -> uexpr(B, [], St).
%% ufun_clauses([Lclause], [KnownVar], State) -> {[Lclause],State}.
ufun_clauses(Lcs, Ks, St0) ->
mapfoldl(fun (Lc, St) -> ufun_clause(Lc, Ks, St) end, St0, Lcs).
%% ufun_clause(Lclause, [KnownVar], State) -> {Lclause,State}.
ufun_clause(Cl0, Ks, St0) ->
%% Since variables in fun heads shadow previous variables
%% with the same name, we used to send an empty list as the
%% known variables when doing liveness analysis of the patterns
%% (in the upattern functions).
%%
%% With the introduction of expressions in size for binary
%% segments and in map keys, all known variables must be
%% available when analysing those expressions, or some variables
%% might not be seen as used if, for example, the expression includes
%% a case construct.
%%
%% Therefore, we will send in the complete list of known variables
%% when doing liveness analysis of patterns. This is
%% safe because any shadowing variables in a fun head has
%% been renamed.
{Cl1,Pvs,Used,_,St1} = do_uclause(Cl0, Ks, St0),
A0 = get_anno(Cl1),
A = A0#a{us=subtract(Used, Pvs),ns=[]},
{Cl1#iclause{anno=A},St1}.
%% uclauses([Lclause], [KnownVar], State) -> {[Lclause],State}.
uclauses(Lcs, Ks, St0) ->
mapfoldl(fun (Lc, St) -> uclause(Lc, Ks, St) end, St0, Lcs).
%% uclause(Lclause, [KnownVar], State) -> {Lclause,State}.
uclause(Cl0, Ks, St0) ->
{Cl1,_Pvs,Used,New,St1} = do_uclause(Cl0, Ks, St0),
A0 = get_anno(Cl1),
A = A0#a{us=Used,ns=New},
{Cl1#iclause{anno=A},St1}.
do_uclause(#iclause{anno=Anno,pats=Ps0,guard=G0,body=B0}, Ks0, St0) ->
{Ps1,Pg,Pvs,Pus,St1} = upattern_list(Ps0, Ks0, St0),
Pu = union(Pus, intersection(Pvs, Ks0)),
Pn = subtract(Pvs, Pu),
Ks1 = union(Pn, Ks0),
{G1,St2} = uguard(Pg, G0, Ks1, St1),
Gu = used_in_any(G1),
Gn = new_in_any(G1),
Ks2 = union(Gn, Ks1),
{B1,St3} = uexprs(B0, Ks2, St2),
Used = intersection(union([Pu,Gu,used_in_any(B1)]), Ks0),
New = union([Pn,Gn,new_in_any(B1)]),
{#iclause{anno=Anno,pats=Ps1,guard=G1,body=B1},Pvs,Used,New,St3}.
%% uguard([Test], [Kexpr], [KnownVar], State) -> {[Kexpr],State}.
%% Build a guard expression list by folding in the equality tests.
uguard([], [], _, St) -> {[],St};
uguard(Pg, [], Ks, St) ->
%% No guard, so fold together equality tests.
uguard(droplast(Pg), [last(Pg)], Ks, St);
uguard(Pg, Gs0, Ks, St0) ->
%% Gs0 must contain at least one element here.
{Gs3,St5} = foldr(fun (T, {Gs1,St1}) ->
{L,St2} = new_var(St1),
{R,St3} = new_var(St2),
{[#iset{var=L,arg=T}] ++ droplast(Gs1) ++
[#iset{var=R,arg=last(Gs1)},
#icall{anno=#a{}, %Must have an #a{}
module=#c_literal{val=erlang},
name=#c_literal{val='and'},
args=[L,R]}],
St3}
end, {Gs0,St0}, Pg),
%%ok = io:fwrite("core ~w: ~p~n", [?LINE,Gs3]),
uexprs(Gs3, Ks, St5).
%% uexprs([Kexpr], [KnownVar], State) -> {[Kexpr],State}.
uexprs([#imatch{anno=A,pat=P0,arg=Arg,fc=Fc}|Les], Ks, St0) ->
case upat_is_new_var(P0, Ks) of
true ->
%% Assignment to a new variable.
uexprs([#iset{var=P0,arg=Arg}|Les], Ks, St0);
false when Les =:= [] ->
%% Need to explicitly return match "value", make
%% safe for efficiency.
{La0,Lps,St1} = force_safe(Arg, St0),
La = mark_compiler_generated(La0),
Mc = #iclause{anno=A,pats=[P0],guard=[],body=[La]},
uexprs(Lps ++ [#icase{anno=A,
args=[La0],clauses=[Mc],fc=Fc}], Ks, St1);
false ->
Mc = #iclause{anno=A,pats=[P0],guard=[],body=Les},
uexprs([#icase{anno=A,args=[Arg],
clauses=[Mc],fc=Fc}], Ks, St0)
end;
uexprs([Le0|Les0], Ks, St0) ->
{Le1,St1} = uexpr(Le0, Ks, St0),
{Les1,St2} = uexprs(Les0, union((get_anno(Le1))#a.ns, Ks), St1),
{[Le1|Les1],St2};
uexprs([], _, St) -> {[],St}.
%% upat_is_new_var(Pattern, [KnownVar]) -> true|false.
%% Test whether the pattern is a single, previously unknown
%% variable.
upat_is_new_var(#c_var{name=V}, Ks) ->
not is_element(V, Ks);
upat_is_new_var(_, _) ->
false.
%% Mark a "safe" as compiler-generated.
mark_compiler_generated(#c_cons{anno=A,hd=H,tl=T}) ->
ann_c_cons([compiler_generated|A], mark_compiler_generated(H),
mark_compiler_generated(T));
mark_compiler_generated(#c_tuple{anno=A,es=Es0}) ->
Es = [mark_compiler_generated(E) || E <- Es0],
ann_c_tuple([compiler_generated|A], Es);
mark_compiler_generated(#c_var{anno=A}=Var) ->
Var#c_var{anno=[compiler_generated|A]};
mark_compiler_generated(#c_literal{anno=A}=Lit) ->
Lit#c_literal{anno=[compiler_generated|A]}.
uexpr(#iset{anno=A,var=V,arg=A0}, Ks, St0) ->
{A1,St1} = uexpr(A0, Ks, St0),
{#iset{anno=A#a{us=del_element(V#c_var.name, (get_anno(A1))#a.us),
ns=add_element(V#c_var.name, (get_anno(A1))#a.ns)},
var=V,arg=A1},St1};
%% imatch done in uexprs.
uexpr(#iletrec{anno=A,defs=Fs0,body=B0}, Ks, St0) ->
%%ok = io:fwrite("~w: ~p~n", [?LINE,{Fs0,B0}]),
{Fs1,St1} = mapfoldl(fun ({Name,F0}, S0) ->
{F1,S1} = uexpr(F0, Ks, S0),
{{Name,F1},S1}
end, St0, Fs0),
{B1,St2} = uexprs(B0, Ks, St1),
Used = used_in_any(map(fun ({_,F}) -> F end, Fs1) ++ B1),
{#iletrec{anno=A#a{us=Used,ns=[]},defs=Fs1,body=B1},St2};
uexpr(#icase{anno=#a{anno=Anno}=A,args=As0,clauses=Cs0,fc=Fc0}, Ks, St0) ->
%% As0 will never generate new variables.
{As1,St1} = uexpr_list(As0, Ks, St0),
{Cs1,St2} = uclauses(Cs0, Ks, St1),
{Fc1,St3} = uclause(Fc0, Ks, St2),
Used = union(used_in_any(As1), used_in_any(Cs1)),
New = case member(list_comprehension, Anno) of
true -> [];
false -> new_in_all(Cs1)
end,
{#icase{anno=A#a{us=Used,ns=New},args=As1,clauses=Cs1,fc=Fc1},St3};
uexpr(#ifun{anno=A0,id=Id,vars=As,clauses=Cs0,fc=Fc0,name=Name}=Fun0, Ks0, St0) ->
{Fun1,St2} = case Ks0 of
[] ->
{Fun0,St0};
[_|_] ->
{Cs1,St1} = rename_shadowing_clauses(Cs0, Ks0, St0),
{Fun0#ifun{clauses=Cs1},St1}
end,
#ifun{clauses=Cs2} = Fun1,
Avs = lit_list_vars(As),
Ks1 = case Name of
unnamed -> Ks0;
{named,FName} -> union(subtract([FName], Avs), Ks0)
end,
Ks2 = union(Avs, Ks1),
{Cs3,St3} = ufun_clauses(Cs2, Ks2, St2),
{Fc1,St4} = ufun_clause(Fc0, Ks2, St3),
Used = subtract(intersection(used_in_any(Cs3), Ks1), Avs),
A1 = A0#a{us=Used,ns=[]},
{#ifun{anno=A1,id=Id,vars=As,clauses=Cs3,fc=Fc1,name=Name},St4};
uexpr(#iapply{anno=A,op=Op,args=As}, _, St) ->
Used = union(lit_vars(Op), lit_list_vars(As)),
{#iapply{anno=A#a{us=Used},op=Op,args=As},St};
uexpr(#iprimop{anno=A,name=Name,args=As}, _, St) ->
Used = lit_list_vars(As),
{#iprimop{anno=A#a{us=Used},name=Name,args=As},St};
uexpr(#icall{anno=A,module=Mod,name=Name,args=As}, _, St) ->
Used = union([lit_vars(Mod),lit_vars(Name),lit_list_vars(As)]),
{#icall{anno=A#a{us=Used},module=Mod,name=Name,args=As},St};
uexpr(#itry{anno=A,args=As0,vars=Vs,body=Bs0,evars=Evs,handler=Hs0}, Ks, St0) ->
%% No variables are exported from try/catch. Starting in OTP 24,
%% variables bound in the argument (the code between the 'try' and
%% the 'of' keywords) are exported to the body (the code following
%% the 'of' keyword).
{As1,St1} = uexprs(As0, Ks, St0),
ArgKs = union(Ks, new_in_any(As1)),
{Bs1,St2} = uexprs(Bs0, ArgKs, St1),
{Hs1,St3} = uexprs(Hs0, Ks, St2),
Used = intersection(used_in_any(Bs1++Hs1++As1), Ks),
{#itry{anno=A#a{us=Used,ns=[]},
args=As1,vars=Vs,body=Bs1,evars=Evs,handler=Hs1},St3};
uexpr(#icatch{anno=A,body=Es0}, Ks, St0) ->
{Es1,St1} = uexprs(Es0, Ks, St0),
{#icatch{anno=A#a{us=used_in_any(Es1)},body=Es1},St1};
uexpr(#ireceive1{anno=A,clauses=Cs0}, Ks, St0) ->
{Cs1,St1} = uclauses(Cs0, Ks, St0),
{#ireceive1{anno=A#a{us=used_in_any(Cs1),ns=new_in_all(Cs1)},
clauses=Cs1},St1};
uexpr(#ireceive2{anno=A,clauses=Cs0,timeout=Te0,action=Tes0}, Ks, St0) ->
%% Te0 will never generate new variables.
{Te1,St1} = uexpr(Te0, Ks, St0),
{Cs1,St2} = uclauses(Cs0, Ks, St1),
{Tes1,St3} = uexprs(Tes0, Ks, St2),
Used = union([used_in_any(Cs1),used_in_any(Tes1),(get_anno(Te1))#a.us]),
New = case Cs1 of
[] -> new_in_any(Tes1);
_ -> intersection(new_in_all(Cs1), new_in_any(Tes1))
end,
{#ireceive2{anno=A#a{us=Used,ns=New},
clauses=Cs1,timeout=Te1,action=Tes1},St3};
uexpr(#iprotect{anno=A,body=Es0}, Ks, St0) ->
{Es1,St1} = uexprs(Es0, Ks, St0),
Used = used_in_any(Es1),
{#iprotect{anno=A#a{us=Used},body=Es1},St1}; %No new variables escape!
uexpr(#ibinary{anno=A,segments=Ss}, _, St) ->
Used = bitstr_vars(Ss),
{#ibinary{anno=A#a{us=Used},segments=Ss},St};
uexpr(#c_literal{}=Lit, _, St) ->
Anno = get_anno(Lit),
{set_anno(Lit, #a{us=[],anno=Anno}),St};
uexpr(Simple, _, St) ->
true = is_simple(Simple), %Sanity check!
Vs = lit_vars(Simple),
Anno = get_anno(Simple),
{#isimple{anno=#a{us=Vs,anno=Anno},term=Simple},St}.
uexpr_list(Les0, Ks, St0) ->
mapfoldl(fun (Le, St) -> uexpr(Le, Ks, St) end, St0, Les0).
%% upattern(Pat, [KnownVar], State) ->
%% {Pat,[GuardTest],[NewVar],[UsedVar],State}.
upattern(#c_var{name='_'}, _, St0) ->
{New,St1} = new_var_name(St0),
{#c_var{name=New},[],[New],[],St1};
upattern(#c_var{name=V}=Var, Ks, St0) ->
case is_element(V, Ks) of
true ->
{N,St1} = new_var_name(St0),
New = #c_var{name=N},
LA = get_lineno_anno(Var),
Test = #icall{anno=#a{anno=LA,us=add_element(N, [V])},
module=#c_literal{val=erlang},
name=#c_literal{val='=:='},
args=[New,Var]},
%% Test doesn't need protecting.
{New,[Test],[N],[],St1};
false -> {Var,[],[V],[],St0}
end;
upattern(#c_cons{hd=H0,tl=T0}=Cons, Ks, St0) ->
{H1,Hg,Hv,Hu,St1} = upattern(H0, Ks, St0),
{T1,Tg,Tv,Tu,St2} = upattern(T0, union(Hv, Ks), St1),
{Cons#c_cons{hd=H1,tl=T1},Hg ++ Tg,union(Hv, Tv),union(Hu, Tu),St2};
upattern(#c_tuple{es=Es0}=Tuple, Ks, St0) ->
{Es1,Esg,Esv,Eus,St1} = upattern_list(Es0, Ks, St0),
{Tuple#c_tuple{es=Es1},Esg,Esv,Eus,St1};
upattern(#imap{es=Es0}=Map, Ks, St0) ->
{Es1,Esg,Esv,Eus,St1} = upattern_list(Es0, Ks, St0),
{Map#imap{es=Es1},Esg,Esv,Eus,St1};
upattern(#imappair{op=#c_literal{val=exact},key=K0,val=V0}=Pair,Ks,St0) ->
{V,Vg,Vn,Vu,St1} = upattern(V0, Ks, St0),
{K,St2} = uexprs(K0, Ks, St1),
Ku = used_in_expr(K),
{Pair#imappair{key=K,val=V},Vg,Vn,union(Ku, Vu),St2};
upattern(#ibinary{segments=Es0}=Bin, Ks, St0) ->
{Es1,Esg,Esv,Eus,St1} = upat_bin(Es0, Ks, St0),
{Bin#ibinary{segments=Es1},Esg,Esv,Eus,St1};
upattern(#c_alias{var=V0,pat=P0}=Alias, Ks, St0) ->
{V1,Vg,Vv,Vu,St1} = upattern(V0, Ks, St0),
{P1,Pg,Pv,Pu,St2} = upattern(P0, union(Vv, Ks), St1),
{Alias#c_alias{var=V1,pat=P1},Vg ++ Pg,union(Vv, Pv),union(Vu, Pu),St2};
upattern(Other, _, St) -> {Other,[],[],[],St}. %Constants
%% upattern_list([Pat], [KnownVar], State) ->
%% {[Pat],[GuardTest],[NewVar],[UsedVar],State}.
upattern_list([P0|Ps0], Ks, St0) ->
{P1,Pg,Pv,Pu,St1} = upattern(P0, Ks, St0),
{Ps1,Psg,Psv,Psu,St2} = upattern_list(Ps0, union(Pv, Ks), St1),
{[P1|Ps1],Pg ++ Psg,union(Pv, Psv),union(Pu, Psu),St2};
upattern_list([], _, St) -> {[],[],[],[],St}.
%% upat_bin([Pat], [KnownVar], State) ->
%% {[Pat],[GuardTest],[NewVar],[UsedVar],State}.
upat_bin(Es0, Ks, St0) ->
{Es1,Pg,Pv,Pu0,St1} = upat_bin(Es0, Ks, [], St0),
%% In a clause such as <<Sz:8,V:Sz>> in a function head, Sz will both
%% be new and used; a situation that is not handled properly by
%% uclause/4. (Basically, since Sz occurs in two sets that are
%% subtracted from each other, Sz will not be added to the list of
%% known variables and will seem to be new the next time it is
%% used in a match.)
%% Since the variable Sz really is new (it does not use a
%% value bound prior to the binary matching), Sz should only be
%% included in the set of new variables. Thus we should take it
%% out of the set of used variables.
Pu1 = subtract(Pu0, intersection(Pv, Pu0)),
{Es1,Pg,Pv,Pu1,St1}.
%% upat_bin([Pat], [KnownVar], [LocalVar], State) ->
%% {[Pat],[GuardTest],[NewVar],[UsedVar],State}.
upat_bin([P0|Ps0], Ks, Bs, St0) ->
{P1,Pg,Pv,Pu,Bs1,St1} = upat_element(P0, Ks, Bs, St0),
{Ps1,Psg,Psv,Psu,St2} = upat_bin(Ps0, union(Pv, Ks), Bs1, St1),
{[P1|Ps1],Pg ++ Psg,union(Pv, Psv),union(Pu, Psu),St2};
upat_bin([], _, _, St) -> {[],[],[],[],St}.
%% upat_element(Segment, [KnownVar], [LocalVar], State) ->
%% {Segment,[GuardTest],[NewVar],[UsedVar],[LocalVar],State}
upat_element(#ibitstr{val=H0,size=Sz0}=Seg, Ks, Bs0, St0) ->
{H1,Hg,Hv,[],St1} = upattern(H0, Ks, St0),
Bs1 = case H0 of
#c_var{name=Hname} ->
case H1 of
#c_var{name=Hname} ->
Bs0;
#c_var{name=Other} ->
[{Hname,Other}|Bs0]
end;
_ ->
Bs0
end,
case Sz0 of
[#c_var{name=Vname}] ->
{Sz1,Us} = rename_bitstr_size(Vname, Bs0),
{Sz2,St2} = uexprs([Sz1], Ks, St1),
{Seg#ibitstr{val=H1,size=Sz2},Hg,Hv,Us,Bs1,St2};
[#c_literal{}] ->
{Sz1,St2} = uexprs(Sz0, Ks, St1),
Us = [],
{Seg#ibitstr{val=H1,size=Sz1},Hg,Hv,Us,Bs1,St2};
Expr when is_list(Expr) ->
Sz1 = [#iset{var=#c_var{name=Old},arg=#c_var{name=New}} ||
{Old,New} <- Bs0] ++ Expr,
{Sz2,St2} = uexprs(Sz1, Ks, St1),
Us = used_in_expr(Sz2),
{Seg#ibitstr{val=H1,size=Sz2},Hg,Hv,Us,Bs1,St2}
end.
rename_bitstr_size(V, [{V,N}|_]) ->
New = #c_var{name=N},
{New,[N]};
rename_bitstr_size(V, [_|Rest]) ->
rename_bitstr_size(V, Rest);
rename_bitstr_size(V, []) ->
Old = #c_var{name=V},
{Old,[V]}.
used_in_expr([Le|Les]) ->
#a{us=Us,ns=Ns} = get_anno(Le),
Used = used_in_expr(Les),
union(Us, subtract(Used, Ns));
used_in_expr([]) -> [].
used_in_any(Les) ->
foldl(fun (Le, Ns) -> union((get_anno(Le))#a.us, Ns) end,
[], Les).
new_in_any(Les) ->
foldl(fun (Le, Ns) -> union((get_anno(Le))#a.ns, Ns) end,
[], Les).
new_in_all([Le|Les]) ->
foldl(fun (L, Ns) -> intersection((get_anno(L))#a.ns, Ns) end,
(get_anno(Le))#a.ns, Les);
new_in_all([]) -> [].
%%%
%%% Rename shadowing variables in fun heads.
%%%
%%% Pattern variables in fun heads always shadow variables bound in
%%% the enclosing environment. Because that is the way that variables
%%% behave in Core Erlang, there was previously no need to rename
%%% the variables.
%%%
%%% However, to support splitting of patterns and/or pattern matching
%%% compilation in Core Erlang, there is a need to rename all
%%% shadowing variables to avoid changing the semantics of the Erlang
%%% program.
%%%
rename_shadowing_clauses([C0|Cs0], Ks, St0) ->
{C,St1} = rename_shadowing_clause(C0, Ks, St0),
{Cs,St} = rename_shadowing_clauses(Cs0, Ks, St1),
{[C|Cs],St};
rename_shadowing_clauses([], _Ks, St) ->
{[],St}.
rename_shadowing_clause(#iclause{pats=Ps0,guard=G0,body=B0}=C, Ks, St0) ->
Subs = {[],[]},
{Ps,{_Isub,Osub},St} = ren_pats(Ps0, Ks, Subs, St0),
G = case G0 of
[] -> G0;
[_|_] -> Osub ++ G0
end,
B = Osub ++ B0,
{C#iclause{pats=Ps,guard=G,body=B},St}.
ren_pats([P0|Ps0], Ks, {_,_}=Subs0, St0) ->
{P,Subs1,St1} = ren_pat(P0, Ks, Subs0, St0),
{Ps,Subs,St} = ren_pats(Ps0, Ks, Subs1, St1),
{[P|Ps],Subs,St};
ren_pats([], _Ks, {_,_}=Subs, St) ->
{[],Subs,St}.
ren_pat(#c_var{name='_'}=P, _Ks, Subs, St) ->
{P,Subs,St};
ren_pat(#c_var{name=V}=Old, Ks, {Isub0,Osub0}=Subs, St0) ->
case member(V, Ks) of
true ->
case ren_is_subst(V, Osub0) of
{yes,New} ->
{New,Subs,St0};
no ->
{New,St} = new_var(St0),
Osub = [#iset{var=Old,arg=New}|Osub0],
{New,{Isub0,Osub},St}
end;
false ->
{Old,Subs,St0}
end;
ren_pat(#c_literal{}=P, _Ks, {_,_}=Subs, St) ->
{P,Subs,St};
ren_pat(#c_alias{var=Var0,pat=Pat0}=Alias, Ks, {_,_}=Subs0, St0) ->
{Var,Subs1,St1} = ren_pat(Var0, Ks, Subs0, St0),
{Pat,Subs,St} = ren_pat(Pat0, Ks, Subs1, St1),
{Alias#c_alias{var=Var,pat=Pat},Subs,St};
ren_pat(#imap{es=Es0}=Map, Ks, {_,_}=Subs0, St0) ->
{Es,Subs,St} = ren_pat_map(Es0, Ks, Subs0, St0),
{Map#imap{es=Es},Subs,St};
ren_pat(#ibinary{segments=Es0}=P, Ks, {Isub,Osub0}, St0) ->
{Es,_Isub,Osub,St} = ren_pat_bin(Es0, Ks, Isub, Osub0, St0),
{P#ibinary{segments=Es},{Isub,Osub},St};
ren_pat(P, Ks0, {_,_}=Subs0, St0) ->
Es0 = cerl:data_es(P),
{Es,Subs,St} = ren_pats(Es0, Ks0, Subs0, St0),
{cerl:make_data(cerl:data_type(P), Es),Subs,St}.
ren_pat_bin([#ibitstr{val=Val0,size=Sz0}=E|Es0], Ks, Isub0, Osub0, St0) ->
Sz = ren_get_subst(Sz0, Isub0),
{Val,{_,Osub1},St1} = ren_pat(Val0, Ks, {Isub0,Osub0}, St0),
Isub1 = case Val0 of
#c_var{} ->
[#iset{var=Val0,arg=Val}|Isub0];
_ ->
Isub0
end,
{Es,Isub,Osub,St} = ren_pat_bin(Es0, Ks, Isub1, Osub1, St1),
{[E#ibitstr{val=Val,size=Sz}|Es],Isub,Osub,St};
ren_pat_bin([], _Ks, Isub, Osub, St) ->
{[],Isub,Osub,St}.
ren_pat_map([#imappair{val=Val0}=MapPair|Es0], Ks, Subs0, St0) ->
{Val,Subs1,St1} = ren_pat(Val0, Ks, Subs0, St0),
{Es,Subs,St} = ren_pat_map(Es0, Ks, Subs1, St1),
{[MapPair#imappair{val=Val}|Es],Subs,St};
ren_pat_map([], _Ks, Subs, St) ->
{[],Subs,St}.
ren_get_subst([#c_var{name=V}]=Old, Sub) ->
case ren_is_subst(V, Sub) of
no -> Old;
{yes,New} -> [New]
end;
ren_get_subst([#c_literal{}]=Old, _Sub) ->
Old;
ren_get_subst(Expr, Sub) when is_list(Expr) ->
Sub ++ Expr.
ren_is_subst(V, [#iset{var=#c_var{name=V},arg=Arg}|_]) ->
{yes,Arg};
ren_is_subst(V, [_|Sub]) ->
ren_is_subst(V, Sub);
ren_is_subst(_V, []) -> no.
%% The AfterVars are the variables which are used afterwards. We need
%% this to work out which variables are actually exported and used
%% from case/receive. In subblocks/clauses the AfterVars of the block
%% are just the exported variables.
cbody(B0, St0) ->
{B1,_,_,St1} = cexpr(B0, [], St0),
{B1,St1}.
%% cclause(Lclause, [AfterVar], State) -> {Cclause,State}.
%% The AfterVars are the exported variables.
cclause(#iclause{anno=#a{anno=Anno},pats=Ps0,guard=G0,body=B0}, Exp, St0) ->
Ps = cpattern_list(Ps0),
{B1,_Us1,St1} = cexprs(B0, Exp, St0),
{G1,St2} = cguard(G0, St1),
{#c_clause{anno=Anno,pats=Ps,guard=G1,body=B1},St2}.
cclauses(Lcs, Es, St0) ->
mapfoldl(fun (Lc, St) -> cclause(Lc, Es, St) end, St0, Lcs).
cguard([], St) -> {#c_literal{val=true},St};
cguard(Gs, St0) ->
{G,_,St1} = cexprs(Gs, [], St0),
{G,St1}.
cpattern_list([P|Ps]) ->
[cpattern(P)|cpattern_list(Ps)];
cpattern_list([]) -> [].
cpattern(#c_alias{pat=Pat}=Alias) ->
Alias#c_alias{pat=cpattern(Pat)};
cpattern(#c_cons{hd=Hd,tl=Tl}=Cons) ->
Cons#c_cons{hd=cpattern(Hd),tl=cpattern(Tl)};
cpattern(#c_tuple{es=Es}=Tup) ->
Tup#c_tuple{es=cpattern_list(Es)};
cpattern(#imap{anno=#a{anno=Anno},es=Es}) ->
#c_map{anno=Anno,es=cpat_map_pairs(Es),is_pat=true};
cpattern(#ibinary{anno=#a{anno=Anno},segments=Segs0}) ->
Segs = [cpat_bin_seg(S) || S <- Segs0],
#c_binary{anno=Anno,segments=Segs};
cpattern(Other) -> Other.
cpat_map_pairs([#imappair{anno=#a{anno=Anno},op=Op,key=Key0,val=Val0}|T]) ->
{Key,_,_} = cexprs(Key0, [], #core{}),
Val = cpattern(Val0),
Pair = #c_map_pair{anno=Anno,op=Op,key=Key,val=Val},
[Pair|cpat_map_pairs(T)];
cpat_map_pairs([]) -> [].
cpat_bin_seg(#ibitstr{anno=#a{anno=Anno},val=E,size=Sz0,unit=Unit,
type=Type,flags=Flags}) ->
{Sz,_,_} = cexprs(Sz0, [], #core{}),
#c_bitstr{anno=Anno,val=E,size=Sz,unit=Unit,type=Type,flags=Flags}.
%% cexprs([Lexpr], [AfterVar], State) -> {Cexpr,[AfterVar],State}.
%% Must be sneaky here at the last expr when combining exports for the
%% whole sequence and exports for that expr.
cexprs([#iset{var=#c_var{name=Name}=Var}=Iset], As, St) ->
%% Make return value explicit, and make Var true top level.
Isimple = #isimple{anno=#a{us=[Name]},term=Var},
cexprs([Iset,Isimple], As, St);
cexprs([Le], As, St0) ->
{Ce,Es,Us,St1} = cexpr(Le, As, St0),
Exp = make_vars(As), %The export variables
if
Es =:= [] -> {core_lib:make_values([Ce|Exp]),union(Us, As),St1};
true ->
{R,St2} = new_var(St1),
{#c_let{anno=get_lineno_anno(Ce),
vars=[R|make_vars(Es)],arg=Ce,
body=core_lib:make_values([R|Exp])},
union(Us, As),St2}
end;
cexprs([#iset{anno=#a{anno=A},var=V,arg=A0}|Les], As0, St0) ->
{Ces,As1,St1} = cexprs(Les, As0, St0),
{A1,Es,Us,St2} = cexpr(A0, As1, St1),
{#c_let{anno=A,vars=[V|make_vars(Es)],arg=A1,body=Ces},
union(Us, As1),St2};
cexprs([Le|Les], As0, St0) ->
{Ces,As1,St1} = cexprs(Les, As0, St0),
{Ce,Es,Us,St2} = cexpr(Le, As1, St1),
if
Es =:= [] ->
{#c_seq{arg=Ce,body=Ces},union(Us, As1),St2};
true ->
{R,St3} = new_var(St2),
{#c_let{vars=[R|make_vars(Es)],arg=Ce,body=Ces},
union(Us, As1),St3}
end.
%% cexpr(Lexpr, [AfterVar], State) -> {Cexpr,[ExpVar],[UsedVar],State}.
cexpr(#iletrec{anno=A,defs=Fs0,body=B0}, As, St0) ->
{Fs1,{_,St1}} = mapfoldl(fun ({{_Name,_Arity}=NA,F0}, {Used,S0}) ->
{F1,[],Us,S1} = cexpr(F0, [], S0),
{{#c_var{name=NA},F1},
{union(Us, Used),S1}}
end, {[],St0}, Fs0),
Exp = intersection(A#a.ns, As),
{B1,_Us,St2} = cexprs(B0, Exp, St1),
{#c_letrec{anno=A#a.anno,defs=Fs1,body=B1},Exp,A#a.us,St2};
cexpr(#icase{anno=A,args=Largs,clauses=Lcs,fc=Lfc}, As, St0) ->
Exp = intersection(A#a.ns, As), %Exports
{Cargs,St1} = foldr(fun (La, {Cas,Sta}) ->
{Ca,[],_Us1,Stb} = cexpr(La, As, Sta),
{[Ca|Cas],Stb}
end, {[],St0}, Largs),
{Ccs,St2} = cclauses(Lcs, Exp, St1),
{Cfc0,St3} = cclause(Lfc, [], St2), %Never exports
{Cfc,St4} = c_add_dummy_export(Cfc0, Exp, St3),
{#c_case{anno=A#a.anno,
arg=core_lib:make_values(Cargs),clauses=Ccs ++ [Cfc]},
Exp,A#a.us,St4};
cexpr(#ireceive1{anno=A,clauses=Lcs}, As, St0) ->
Exp = intersection(A#a.ns, As), %Exports
{Ccs,St1} = cclauses(Lcs, Exp, St0),
True = #c_literal{val=true},
Action = core_lib:make_values(lists:duplicate(1+length(Exp), True)),
{#c_receive{anno=A#a.anno,
clauses=Ccs,
timeout=#c_literal{val=infinity},action=Action},
Exp,A#a.us,St1};
cexpr(#ireceive2{anno=A,clauses=Lcs,timeout=Lto,action=Les}, As, St0) ->
Exp = intersection(A#a.ns, As), %Exports
{Cto,[],_Us1,St1} = cexpr(Lto, As, St0),
{Ccs,St2} = cclauses(Lcs, Exp, St1),
{Ces,_Us2,St3} = cexprs(Les, Exp, St2),
{#c_receive{anno=A#a.anno,
clauses=Ccs,timeout=Cto,action=Ces},
Exp,A#a.us,St3};
cexpr(#itry{anno=A,args=La,vars=Vs0,body=Lb,evars=Evs,handler=Lh}, _As, St0) ->
%% No variables are exported from try/catch. Starting in OTP 24,
%% variables bound in the argument (the code between the 'try' and
%% the 'of' keywords) are exported to the body (the code following
%% the 'of' keyword).
AsExp = intersection(new_in_any(La), used_in_any(Lb)),
{Ca,_Us1,St1} = cexprs(La, AsExp, St0),
{Cb,_Us2,St2} = cexprs(Lb, [], St1),
{Ch,_Us3,St3} = cexprs(Lh, [], St2),
Vs = Vs0 ++ [#c_var{name=V} || V <- AsExp],
{#c_try{anno=A#a.anno,arg=Ca,vars=Vs,body=Cb,evars=Evs,handler=Ch},
[],A#a.us,St3};
cexpr(#icatch{anno=A,body=Les}, _As, St0) ->
{Ces,_Us1,St1} = cexprs(Les, [], St0), %Never export!
{#c_catch{body=Ces},[],A#a.us,St1};
cexpr(#ifun{name=unnamed}=Fun, As, St0) ->
cfun(Fun, As, St0);
cexpr(#ifun{anno=#a{us=Us0}=A0,name={named,Name},fc=#iclause{pats=Ps}}=Fun0,
As, St0) ->
case is_element(Name, Us0) of
false ->
cfun(Fun0, As, St0);
true ->
A1 = A0#a{us=del_element(Name, Us0)},
Fun1 = Fun0#ifun{anno=A1},
{#c_fun{body=Body}=CFun0,[],Us1,St1} = cfun(Fun1, As, St0),
RecVar = #c_var{name={Name,length(Ps)}},
Let = #c_let{vars=[#c_var{name=Name}],arg=RecVar,body=Body},
CFun1 = CFun0#c_fun{body=Let},
Letrec = #c_letrec{anno=A0#a.anno,
defs=[{RecVar,CFun1}],
body=RecVar},
{Letrec,[],Us1,St1}
end;
cexpr(#iapply{anno=A,op=Op,args=Args}, _As, St) ->
{#c_apply{anno=A#a.anno,op=Op,args=Args},[],A#a.us,St};
cexpr(#icall{anno=A,module=Mod,name=Name,args=Args}, _As, St0) ->
Anno = A#a.anno,
case (not cerl:is_c_atom(Mod)) andalso member(tuple_calls, St0#core.opts) of
true ->
GenAnno = [compiler_generated|Anno],
%% Generate the clause that matches on the tuple
{TupleVar,St1} = new_var(GenAnno, St0),
{TupleSizeVar, St2} = new_var(GenAnno, St1),
{TupleModVar, St3} = new_var(GenAnno, St2),
{TupleArgsVar, St4} = new_var(GenAnno, St3),
TryVar = cerl:c_var('Try'),
TupleGuardExpr =
cerl:c_let([TupleSizeVar],
c_call_erl(tuple_size, [TupleVar]),
c_call_erl('>', [TupleSizeVar, cerl:c_int(0)])),
TupleGuard =
cerl:c_try(TupleGuardExpr, [TryVar], TryVar,
[cerl:c_var('T'),cerl:c_var('R')], cerl:c_atom(false)),
TupleApply =
cerl:c_let([TupleModVar],
c_call_erl(element, [cerl:c_int(1),TupleVar]),
cerl:c_let([TupleArgsVar],
cerl:make_list(Args ++ [TupleVar]),
c_call_erl(apply, [TupleModVar,Name,TupleArgsVar]))),
TupleClause = cerl:ann_c_clause(GenAnno, [TupleVar], TupleGuard, TupleApply),
%% Generate the fallback clause
{OtherVar,St5} = new_var(GenAnno, St4),
OtherApply = cerl:ann_c_call(GenAnno, OtherVar, Name, Args),
OtherClause = cerl:ann_c_clause(GenAnno, [OtherVar], OtherApply),
{cerl:ann_c_case(GenAnno, Mod, [TupleClause,OtherClause]),[],A#a.us,St5};
false ->
{#c_call{anno=Anno,module=Mod,name=Name,args=Args},[],A#a.us,St0}
end;
cexpr(#iprimop{anno=A,name=Name,args=Args}, _As, St) ->
{#c_primop{anno=A#a.anno,name=Name,args=Args},[],A#a.us,St};
cexpr(#iprotect{anno=A,body=Es}, _As, St0) ->
{Ce,_,St1} = cexprs(Es, [], St0),
V = #c_var{name='Try'}, %The names are arbitrary
Vs = [#c_var{name='T'},#c_var{name='R'}],
{#c_try{anno=A#a.anno,arg=Ce,vars=[V],body=V,
evars=Vs,handler=#c_literal{val=false}},
[],A#a.us,St1};
cexpr(#ibinary{anno=#a{anno=Anno,us=Us},segments=Segs}, _As, St) ->
{#c_binary{anno=Anno,segments=Segs},[],Us,St};
cexpr(#c_literal{}=Lit, _As, St) ->
Anno = get_anno(Lit),
Vs = Anno#a.us,
{set_anno(Lit, Anno#a.anno),[],Vs,St};
cexpr(#isimple{anno=#a{us=Vs},term=Simple}, _As, St) ->
true = is_simple(Simple), %Sanity check!
{Simple,[],Vs,St}.
cfun(#ifun{anno=A,id=Id,vars=Args,clauses=Lcs,fc=Lfc}, _As, St0) ->
{Ccs,St1} = cclauses(Lcs, [], St0), %NEVER export!
{Cfc,St2} = cclause(Lfc, [], St1),
Anno = A#a.anno,
{#c_fun{anno=Id++Anno,vars=Args,
body=#c_case{anno=Anno,
arg=set_anno(core_lib:make_values(Args), Anno),
clauses=Ccs ++ [Cfc]}},
[],A#a.us,St2}.
c_call_erl(Fun, Args) ->
As = [compiler_generated],
cerl:ann_c_call(As, cerl:c_atom(erlang), cerl:c_atom(Fun), Args).
c_add_dummy_export(#c_clause{body=B0}=C, [_|_]=Exp, St0) ->
%% Add dummy export in order to always return the correct number
%% of values for the default clause.
{V,St1} = new_var(St0),
B = #c_let{vars=[V],arg=B0,
body=#c_values{es=[V|duplicate(length(Exp), #c_literal{val=[]})]}},
{C#c_clause{body=B},St1};
c_add_dummy_export(C, [], St) ->
{C,St}.
%%%
%%% Lower a `receive` to more primitive operations. Rewrite patterns
%%% that use and bind the same variable as nested cases.
%%%
%%% Here follows an example of how a receive in this Erlang code:
%%%
%%% foo(Timeout) ->
%%% receive
%%% {tag,Msg} -> Msg
%%% after
%%% Timeout ->
%%% no_message
%%% end.
%%%
%%% is translated into Core Erlang:
%%%
%%% 'foo'/1 =
%%% fun (Timeout) ->
%%% ( letrec
%%% 'recv$^0'/0 =
%%% fun () ->
%%% let <PeekSucceeded,Message> =
%%% primop 'recv_peek_message'()
%%% in case PeekSucceeded of
%%% <'true'> when 'true' ->
%%% case Message of
%%% <{'tag',Msg}> when 'true' ->
%%% do primop 'remove_message'()
%%% Msg
%%% ( <Other> when 'true' ->
%%% do primop 'recv_next'()
%%% apply 'recv$^0'/0()
%%% -| ['compiler_generated'] )
%%% end
%%% <'false'> when 'true' ->
%%% let <TimedOut> =
%%% primop 'recv_wait_timeout'(Timeout)
%%% in case TimedOut of
%%% <'true'> when 'true' ->
%%% do primop 'timeout'()
%%% 'no_message'
%%% <'false'> when 'true' ->
%%% apply 'recv$^0'/0()
%%% end
%%% end
%%% in apply 'recv$^0'/0()
%%% -| ['letrec_goto'] )
lbody(B, St) ->
cerl_trees:mapfold(fun lexpr/2, St, B).
lexpr(#c_case{}=Case, St) ->
%% Split patterns that bind and use the same variable.
split_case(Case, St);
lexpr(#c_receive{clauses=[],timeout=Timeout0,action=Action}, St0) ->
%% Lower a receive with only an after to its primitive operations.
False = #c_literal{val=false},
True = #c_literal{val=true},
{Timeout,Outer0,St1} =
case is_safe(Timeout0) of
true ->
{Timeout0,False,St0};
false ->
{TimeoutVar,Sti0} = new_var(St0),
OuterLet = #c_let{vars=[TimeoutVar],arg=Timeout0,body=False},
{TimeoutVar,OuterLet,Sti0}
end,
MaybeIgnore = case Timeout of
#c_literal{val=infinity} -> [dialyzer_ignore];
_ -> []
end,
{LoopName,St2} = new_fun_name("recv", St1),
LoopFun = #c_var{name={LoopName,0}},
ApplyLoop = #c_apply{anno=[dialyzer_ignore],op=LoopFun,args=[]},
TimeoutCs = [#c_clause{anno=MaybeIgnore,pats=[True],guard=True,
body=#c_seq{arg=primop(timeout),
body=Action}},
#c_clause{anno=[compiler_generated,dialyzer_ignore],
pats=[False],guard=True,
body=ApplyLoop}],
{TimeoutBool,St3} = new_var(St2),
TimeoutCase = #c_case{anno=[receive_timeout],arg=TimeoutBool,
clauses=TimeoutCs},
TimeoutLet = #c_let{vars=[TimeoutBool],
arg=primop(recv_wait_timeout, [Timeout]),
body=TimeoutCase},
Fun = #c_fun{vars=[],body=TimeoutLet},
Letrec = #c_letrec{anno=[letrec_goto],
defs=[{LoopFun,Fun}],
body=ApplyLoop},
%% If the 'after' expression is unsafe we evaluate it in an outer 'let'.
Outer = case Outer0 of
#c_let{} -> Outer0#c_let{body=Letrec};
_ -> Letrec
end,
{Outer,St3};
lexpr(#c_receive{anno=RecvAnno,clauses=Cs0,timeout=Timeout0,action=Action}, St0) ->
%% Lower receive to its primitive operations.
False = #c_literal{val=false},
True = #c_literal{val=true},
{Timeout,Outer0,St1} =
case is_safe(Timeout0) of
true ->
{Timeout0,False,St0};
false ->
{TimeoutVar,Sti0} = new_var(St0),
OuterLet = #c_let{vars=[TimeoutVar],arg=Timeout0,body=False},
{TimeoutVar,OuterLet,Sti0}
end,
MaybeIgnore = case Timeout of
#c_literal{val=infinity} -> [dialyzer_ignore];
_ -> []
end,
{LoopName,St2} = new_fun_name("recv", St1),
LoopFun = #c_var{name={LoopName,0}},
ApplyLoop = #c_apply{anno=[dialyzer_ignore],op=LoopFun,args=[]},
Cs1 = rewrite_cs(Cs0),
RecvNext = #c_seq{arg=primop(recv_next),
body=ApplyLoop},
RecvNextC = #c_clause{anno=[compiler_generated,dialyzer_ignore],
pats=[#c_var{name='Other'}],guard=True,body=RecvNext},
Cs = Cs1 ++ [RecvNextC],
{Msg,St3} = new_var(St2),
{MsgCase,St4} = split_case(#c_case{anno=RecvAnno,arg=Msg,clauses=Cs}, St3),
TimeoutCs = [#c_clause{pats=[True],guard=True,
body=#c_seq{arg=primop(timeout),
body=Action}},
#c_clause{anno=[dialyzer_ignore],pats=[False],guard=True,
body=ApplyLoop}],
{TimeoutBool,St5} = new_var(St4),
TimeoutCase = #c_case{arg=TimeoutBool,clauses=TimeoutCs},
TimeoutLet = #c_let{vars=[TimeoutBool],
arg=primop(recv_wait_timeout, [Timeout]),
body=TimeoutCase},
{PeekSucceeded,St6} = new_var(St5),
PeekCs = [#c_clause{pats=[True],guard=True,
body=MsgCase},
#c_clause{anno=MaybeIgnore,
pats=[False],guard=True,
body=TimeoutLet}],
PeekCase = #c_case{arg=PeekSucceeded,clauses=PeekCs},
PeekLet = #c_let{vars=[PeekSucceeded,Msg],
arg=primop(recv_peek_message),
body=PeekCase},
Fun = #c_fun{vars=[],body=PeekLet},
Letrec = #c_letrec{anno=[letrec_goto],
defs=[{LoopFun,Fun}],
body=ApplyLoop},
%% If the 'after' expression is unsafe we evaluate it in an outer 'let'.
Outer = case Outer0 of
#c_let{} -> Outer0#c_let{body=Letrec};
_ -> Letrec
end,
{Outer,St6};
lexpr(Tree, St) ->
{Tree,St}.
rewrite_cs([#c_clause{body=B0}=C|Cs]) ->
B = #c_seq{arg=primop(remove_message),body=B0},
[C#c_clause{body=B}|rewrite_cs(Cs)];
rewrite_cs([]) -> [].
primop(Name) ->
primop(Name, []).
primop(Name, Args) ->
#c_primop{name=#c_literal{val=Name},args=Args}.
%%%
%%% Split patterns such as <<Size:32,Tail:Size>> that bind
%%% and use a variable in the same pattern. Rewrite to a
%%% nested case in a letrec.
%%%
split_case(#c_case{anno=CaseAnno,arg=Arg,clauses=Cs0}=Case0, St0) ->
Args = case Arg of
#c_values{es=Es} -> Es;
_ -> [Arg]
end,
{VarArgs,St1} = split_var_args(Args, St0),
case split_clauses(Cs0, VarArgs, CaseAnno, St1) of
none ->
{Case0,St0};
{PreCase,AftCs,St2} ->
AftCase = Case0#c_case{arg=core_lib:make_values(VarArgs),
clauses=AftCs},
AftFun = #c_fun{vars=[],body=AftCase},
{Letrec,St3} = split_case_letrec(AftFun, PreCase, St2),
Body = split_letify(VarArgs, Args, Letrec, [], []),
{Body,St3}
end.
split_var_args(Args, St) ->
mapfoldl(fun(#c_var{}=Var, S0) ->
{Var,S0};
(#c_literal{}=Lit, S0) ->
{Lit,S0};
(_, S0) ->
new_var(S0)
end, St, Args).
split_letify([Same|Vs], [Same|Args], Body, VsAcc, ArgAcc) ->
split_letify(Vs, Args, Body, VsAcc, ArgAcc);
split_letify([V|Vs], [Arg|Args], Body, VsAcc, ArgAcc) ->
split_letify(Vs, Args, Body, [V|VsAcc], [Arg|ArgAcc]);
split_letify([], [], Body, [], []) ->
Body;
split_letify([], [], Body, [_|_]=VsAcc, [_|_]=ArgAcc) ->
#c_let{vars=reverse(VsAcc),
arg=core_lib:make_values(reverse(ArgAcc)),
body=Body}.
split_case_letrec(#c_fun{anno=FunAnno0}=Fun0, Body, #core{gcount=C}=St0) ->
FunAnno = [compiler_generated|FunAnno0],
Fun = Fun0#c_fun{anno=FunAnno},
Anno = [letrec_goto],
DefFunName = goto_func(C),
Letrec = #c_letrec{anno=Anno,defs=[{#c_var{name=DefFunName},Fun}],body=Body},
St = St0#core{gcount=C+1},
lbody(Letrec, St).
split_clauses([C0|Cs0], Args, CaseAnno, St0) ->
case split_clauses(Cs0, Args, CaseAnno, St0) of
none ->
case split_clause(C0, St0) of
none ->
none;
{Ps,Nested,St1} ->
{Case,St2} = split_reconstruct(Args, Ps, Nested,
C0, CaseAnno, St1),
{Case,Cs0,St2}
end;
{Case0,Cs,St} ->
#c_case{clauses=NewClauses} = Case0,
Case = Case0#c_case{clauses=[C0|NewClauses]},
{Case,Cs,St}
end;
split_clauses([], _, _, _) ->
none.
goto_func(Count) ->
{list_to_atom("label^" ++ integer_to_list(Count)),0}.
split_reconstruct(Args, Ps, nil, #c_clause{anno=Anno}=C0, CaseAnno, St0) ->
C = C0#c_clause{pats=Ps},
{Fc,St1} = split_fc_clause(Ps, Anno, St0),
{#c_case{anno=CaseAnno,arg=core_lib:make_values(Args),clauses=[C,Fc]},St1};
split_reconstruct(Args, Ps, {split,SplitArgs,Pat,Nested}, C, CaseAnno, St) ->
Split = {split,SplitArgs,fun(Body) -> Body end,Pat,Nested},
split_reconstruct(Args, Ps, Split, C, CaseAnno, St);
split_reconstruct(Args, Ps, {split,SplitArgs,Wrap,Pat,Nested},
#c_clause{anno=Anno}=C0, CaseAnno, St0) ->
{InnerCase,St1} = split_reconstruct(SplitArgs, [Pat], Nested, C0,
CaseAnno, St0),
{Fc,St2} = split_fc_clause(Args, Anno, St1),
Wrapped = Wrap(InnerCase),
C = C0#c_clause{pats=Ps,guard=#c_literal{val=true},body=Wrapped},
{#c_case{anno=CaseAnno,arg=core_lib:make_values(Args),clauses=[C,Fc]},St2}.
split_fc_clause(Args, Anno0, #core{gcount=Count}=St0) ->
Anno = [compiler_generated|Anno0],
Arity = length(Args),
{Vars,St1} = new_vars(Arity, St0),
Op = #c_var{name=goto_func(Count)},
Apply = #c_apply{anno=Anno,op=Op,args=[]},
{#c_clause{anno=[dialyzer_ignore|Anno],pats=Vars,
guard=#c_literal{val=true},body=Apply},St1}.
split_clause(#c_clause{pats=Ps0}, St0) ->
case split_pats(Ps0, St0) of
none ->
none;
{Ps,Case,St} ->
{Ps,Case,St}
end.
split_pats([P0|Ps0], St0) ->
case split_pats(Ps0, St0) of
none ->
case split_pat(P0, St0) of
none ->
none;
{P,Case,St} ->
{[P|Ps0],Case,St}
end;
{Ps,Case,St} ->
{[P0|Ps],Case,St}
end;
split_pats([], _) ->
none.
split_pat(#c_binary{segments=Segs0}=Bin, St0) ->
Vars = gb_sets:empty(),
case split_bin_segments(Segs0, Vars, St0, []) of
none ->
none;
{TailVar,Wrap,Bef,Aft,St} ->
BefBin = Bin#c_binary{segments=Bef},
{BefBin,{split,[TailVar],Wrap,Bin#c_binary{segments=Aft},nil},St}
end;
split_pat(#c_map{es=Es}=Map, St) ->
split_map_pat(Es, Map, St, []);
split_pat(#c_var{}, _) ->
none;
split_pat(#c_alias{pat=Pat}=Alias0, St0) ->
case split_pat(Pat, St0) of
none ->
none;
{Ps,Split,St1} ->
{Var,St} = new_var(St1),
Alias = Alias0#c_alias{pat=Var},
{Alias,{split,[Var],Ps,Split},St}
end;
split_pat(Data, St0) ->
Type = cerl:data_type(Data),
Es = cerl:data_es(Data),
split_data(Es, Type, St0, []).
split_map_pat([#c_map_pair{key=Key,val=Val}=E0|Es], Map0, St0, Acc) ->
case eval_map_key(Key, E0, Es, Map0, St0) of
none ->
case split_pat(Val, St0) of
none ->
split_map_pat(Es, Map0, St0, [E0|Acc]);
{Ps,Split,St1} ->
{Var,St} = new_var(St1),
E = E0#c_map_pair{val=Var},
Map = Map0#c_map{es=reverse(Acc, [E|Es])},
{Map,{split,[Var],Ps,Split},St}
end;
{MapVar,Split,St1} ->
BefMap0 = Map0#c_map{es=reverse(Acc)},
BefMap = #c_alias{var=MapVar,pat=BefMap0},
{BefMap,Split,St1}
end;
split_map_pat([], _, _, _) -> none.
eval_map_key(#c_var{}, _E, _Es, _Map, _St) ->
none;
eval_map_key(#c_literal{}, _E, _Es, _Map, _St) ->
none;
eval_map_key(Key, E0, Es, Map, St0) ->
{[KeyVar,MapVar],St1} = new_vars(2, St0),
E = E0#c_map_pair{key=KeyVar},
AftMap0 = Map#c_map{es=[E|Es]},
{Wrap,CaseArg,AftMap,St2} = wrap_map_key_fun(Key, KeyVar, MapVar, AftMap0, St1),
{MapVar,{split,[CaseArg],Wrap,AftMap,nil},St2}.
wrap_map_key_fun(Key, KeyVar, MapVar, AftMap, St0) ->
case is_safe(Key) of
true ->
{fun(Body) ->
#c_let{vars=[KeyVar],arg=Key,body=Body}
end,MapVar,AftMap,St0};
false ->
{[SuccVar|Evars],St} = new_vars(4, St0),
{fun(Body) ->
Try = #c_try{arg=Key,vars=[KeyVar],
body=#c_values{es=[#c_literal{val=true},KeyVar]},
evars=Evars,
handler=#c_values{es=[#c_literal{val=false},
#c_literal{val=false}]}},
#c_let{vars=[SuccVar,KeyVar],arg=Try,body=Body}
end,
#c_tuple{es=[SuccVar,MapVar]},
#c_tuple{es=[#c_literal{val=true},AftMap]},
St}
end.
split_data([E|Es0], Type, St0, Acc) ->
case split_pat(E, St0) of
none ->
split_data(Es0, Type, St0, [E|Acc]);
{Ps,Split,St1} ->
{Var,St} = new_var(St1),
Data = cerl:make_data(Type, reverse(Acc, [Var|Es0])),
{Data,{split,[Var],Ps,Split},St}
end;
split_data([], _, _, _) -> none.
split_bin_segments([#c_bitstr{val=Val,size=Size}=S0|Segs], Vars0, St0, Acc) ->
Vars = case Val of
#c_var{name=V} -> gb_sets:add(V, Vars0);
_ -> Vars0
end,
case Size of
#c_literal{} ->
split_bin_segments(Segs, Vars, St0, [S0|Acc]);
#c_var{name=SizeVar} ->
case gb_sets:is_member(SizeVar, Vars0) of
true ->
%% The size variable is variable previously bound
%% in this same segment. Split the clause here to
%% avoid a variable that is both defined and used
%% in the same pattern.
{TailVar,Tail,St} = split_tail_seg(S0, Segs, St0),
Wrap = fun(Body) -> Body end,
{TailVar,Wrap,reverse(Acc, [Tail]),[S0|Segs],St};
false ->
split_bin_segments(Segs, Vars, St0, [S0|Acc])
end;
_ ->
%% The size is an expression. Split the clause here,
%% calculate the expression in a try/catch, and finally
%% continue the match in an inner case.
{TailVar,Tail,St1} = split_tail_seg(S0, Segs, St0),
{SizeVar,St2} = new_var(St1),
S = S0#c_bitstr{size=SizeVar},
{Wrap,St3} = split_wrap(SizeVar, Size, St2),
{TailVar,Wrap,reverse(Acc, [Tail]),[S|Segs],St3}
end;
split_bin_segments(_, _, _, _) ->
none.
split_tail_seg(#c_bitstr{anno=A}=S, Segs, St0) ->
{TailVar,St} = new_var(St0),
Unit = split_bin_unit([S|Segs], St0),
{TailVar,
#c_bitstr{anno=A,val=TailVar,
size=#c_literal{val=all},
unit=#c_literal{val=Unit},
type=#c_literal{val=binary},
flags=#c_literal{val=[unsigned,big]}},
St}.
split_wrap(SizeVar, SizeExpr, St0) ->
{Evars,St1} = new_vars(3, St0),
{fun(Body) ->
Try = #c_try{arg=SizeExpr,vars=[SizeVar],body=SizeVar,
evars=Evars,handler=#c_literal{val=bad_size}},
#c_let{vars=[SizeVar],arg=Try,body=Body}
end,St1}.
split_bin_unit(Ss, #core{dialyzer=Dialyzer}) ->
case Dialyzer of
true ->
%% When a binary match has been rewritten to a nested
%% case like this:
%%
%% case Bin of
%% <<Size:32,Tail:Size/bitstring-unit:1>> ->
%% case Tail of
%% <<Result/binary-unit:8>> -> Result;
%% ...
%% end
%%
%% dialyzer will determine the type of Bin based solely on
%% the binary pattern in the outer case. It will not
%% back-propagate any type information for Tail to Bin. For
%% this example, dialyzer would infer the type of Bin to
%% be <<_:8,_:_*1>>.
%%
%% Help dialyzer to infer a better type by calculating the
%% greatest common unit for the segments in the inner case
%% expression. For this example, the greatest common unit
%% for the pattern in the inner case is 8; it will allow
%% dialyzer to infer the type for Bin to be
%% <<_:32,_:_*8>>.
split_bin_unit_1(Ss, 0);
false ->
%% Return the unit for pattern in the outer case that
%% results in the best code.
1
end.
split_bin_unit_1([#c_bitstr{type=#c_literal{val=Type},size=Size,
unit=#c_literal{val=U}}|Ss],
GCU) ->
Bits = case {Type,Size} of
{utf8,_} -> 8;
{utf16,_} -> 16;
{utf32,_} -> 32;
{_,#c_literal{val=0}} -> 1;
{_,#c_literal{val=Sz}} when is_integer(Sz) -> Sz * U;
{_,_} -> U
end,
split_bin_unit_1(Ss, gcd(GCU, Bits));
split_bin_unit_1([], GCU) -> GCU.
gcd(A, B) ->
case A rem B of
0 -> B;
X -> gcd(B, X)
end.
%% lit_vars(Literal) -> [Var].
lit_vars(Lit) -> lit_vars(Lit, []).
lit_vars(#c_cons{hd=H,tl=T}, Vs) -> lit_vars(H, lit_vars(T, Vs));
lit_vars(#c_tuple{es=Es}, Vs) -> lit_list_vars(Es, Vs);
lit_vars(#c_map{arg=V,es=Es}, Vs) -> lit_vars(V, lit_list_vars(Es, Vs));
lit_vars(#c_map_pair{key=K,val=V}, Vs) -> lit_vars(K, lit_vars(V, Vs));
lit_vars(#c_var{name=V}, Vs) -> add_element(V, Vs);
lit_vars(_, Vs) -> Vs. %These are atomic
lit_list_vars(Ls) -> lit_list_vars(Ls, []).
lit_list_vars(Ls, Vs) ->
foldl(fun (L, Vs0) -> lit_vars(L, Vs0) end, Vs, Ls).
bitstr_vars(Segs) ->
bitstr_vars(Segs, []).
bitstr_vars(Segs, Vs) ->
foldl(fun (#c_bitstr{val=V,size=S}, Vs0) ->
lit_vars(V, lit_vars(S, Vs0))
end, Vs, Segs).
record_anno(L, #core{dialyzer=Dialyzer}=St) ->
case erl_anno:record(L) andalso Dialyzer of
true ->
[record | lineno_anno(L, St)];
false ->
full_anno(L, St)
end.
full_anno(L, #core{wanted=false}=St) ->
[result_not_wanted|lineno_anno(L, St)];
full_anno(L, #core{wanted=true}=St) ->
lineno_anno(L, St).
lineno_anno(L, St) ->
Line = erl_anno:line(L),
Generated = erl_anno:generated(L),
CompilerGenerated = [compiler_generated || Generated],
[Line] ++ St#core.file ++ CompilerGenerated.
get_lineno_anno(Ce) ->
case get_anno(Ce) of
#a{anno=A} -> A;
A when is_list(A) -> A
end.
no_compiler_warning(Anno) ->
erl_anno:set_generated(true, Anno).
%%
%% The following three functions are used both with cerl:cerl() and with i()'s
%%
-spec get_anno(cerl:cerl() | i()) -> term().
get_anno(C) -> element(2, C).
-spec set_anno(cerl:cerl() | i(), term()) -> cerl:cerl().
set_anno(C, A) -> setelement(2, C, A).
-spec is_simple(cerl:cerl() | i()) -> boolean().
is_simple(#c_var{}) -> true;
is_simple(#c_literal{}) -> true;
is_simple(#c_cons{hd=H,tl=T}) ->
is_simple(H) andalso is_simple(T);
is_simple(#c_tuple{es=Es}) -> is_simple_list(Es);
is_simple(#c_map{es=Es}) -> is_simple_list(Es);
is_simple(#c_map_pair{key=K,val=V}) ->
is_simple(K) andalso is_simple(V);
is_simple(_) -> false.
-spec is_simple_list([cerl:cerl()]) -> boolean().
is_simple_list(Es) -> lists:all(fun is_simple/1, Es).
%%%
%%% Handling of warnings.
%%%
-type err_desc() :: 'bad_binary' | 'nomatch'.
-spec format_error(err_desc()) -> nonempty_string().
format_error(nomatch) ->
"pattern cannot possibly match";
format_error(bad_binary) ->
"binary construction will fail because of a type mismatch";
format_error({map_key_repeated,Key}) when is_atom(Key) ->
io_lib:format("key '~w' will be overridden in expression", [Key]);
format_error({map_key_repeated,Key}) ->
io_lib:format("key ~p will be overridden in expression", [Key]).
add_warning(Anno, Term, #core{ws=Ws,file=[{file,File}]}=St) ->
case erl_anno:generated(Anno) of
false ->
St#core{ws=[{File,[{erl_anno:location(Anno),?MODULE,Term}]}|Ws]};
true ->
St
end. | lib/compiler/src/v3_core.erl | 0.637821 | 0.512205 | v3_core.erl | starcoder |
-module(influxdb_line_encoding).
-export([
encode/1
]).
-export_type([
point/0,
measurement/0,
tags/0,
fields/0,
timestamp/0
]).
-type point() :: {measurement(), tags(), fields(), timestamp()} | {measurement(), tags(), fields()}.
-type measurement() :: key().
-type tags() :: #{key() => iodata() | atom()}.
-type fields() :: #{key() => number() | boolean() | iodata() | atom()}.
-type key() :: iodata() | atom().
-type timestamp() :: integer().
-spec encode([point()]) -> iodata().
%% @doc encode a list of `Measurement`, `Tags`, `Fields` and `Timestamp` into multiple lines.
encode(Measurements) when is_list(Measurements) ->
encode_(Measurements, []).
encode_([{Measurement, Tags, Fields, Timestamp} | Rest], Acc) ->
encode_(Rest, [encode(Measurement, Tags, Fields, Timestamp) | Acc]);
encode_([{Measurement, Tags, Fields} | Rest], Acc) ->
encode_(Rest, [encode(Measurement, Tags, Fields, undefined) | Acc]);
encode_([], Acc) ->
lists:reverse(Acc).
-spec encode(measurement(), tags(), fields(), timestamp() | undefined) -> iodata().
%% @doc encode `Measurement`, `Tags`, `Fields` and `Timestamp` into a line, including the final line feed.
encode(Measurement, Tags, Fields, Timestamp) ->
[encode_measurement(Measurement), encode_tags(Tags), $\s, encode_fields(Fields), encode_timestamp(Timestamp), $\n].
% Internals
%% @doc encode the measurement name, escaping `,` and ` ` (space).
encode_measurement(Measurement) when is_atom(Measurement) ->
encode_measurement(atom_to_list(Measurement));
encode_measurement(Measurement) ->
escape(Measurement, fun
(C) when C =:= $, orelse C =:= $\s -> [$\\, C];
(C) -> C
end).
%% @doc encode the tags map, escaping `,`, ` ` (space) and `=` in both the tag name and tag value. The encoded tags map
%% includes the `,` prefix in the case of the non empty map.
encode_tags(Tags) ->
encode_tags(lists:ukeysort(1, [{normalize_key(Key), Value} || {Key, Value} <- maps:to_list(Tags)]), []).
encode_tags([], Acc) ->
lists:reverse(Acc);
encode_tags([{Key, Value} | Rest], Acc) ->
encode_tags(Rest, [encode_tag_value(Value), $=, encode_key(Key), $, | Acc]).
%% @doc encode the fields map, escaping `,`, ` ` (space) and `=` in the field name and using the right encoding for the
%% field value based on its type.
encode_fields(Fields) ->
encode_fields(lists:ukeysort(1, [{normalize_key(Key), Value} || {Key, Value} <- maps:to_list(Fields)]), []).
encode_fields([{Key, Value}], Acc) ->
lists:reverse(Acc, [encode_key(Key), $=, encode_field_value(Value)]);
encode_fields([{Key, Value} | Rest], Acc) ->
encode_fields(Rest, [$,, encode_field_value(Value), $=, encode_key(Key) | Acc]).
%% @doc encode a timestamp. This includes the prefix ` ` (space) if the value is not undefined.
encode_timestamp(undefined) ->
[];
encode_timestamp(Timestamp) ->
[$\s, erlang:integer_to_binary(Timestamp)].
%% @doc convert the key from iodata() | atom() to string().
normalize_key(Data) when is_atom(Data) ->
erlang:atom_to_list(Data);
normalize_key(Data) ->
unicode:characters_to_list(Data).
%% @doc encode a tag or field key.
encode_key(Data) ->
escape(Data, fun
(C) when C =:= $, orelse C =:= $= orelse C =:= $\s -> [$\\, C];
(C) -> C
end).
%% @doc encode a tag value.
encode_tag_value(Data) when is_atom(Data) ->
encode_tag_value(atom_to_list(Data));
encode_tag_value(Data) ->
escape(Data, fun
(C) when C =:= $, orelse C =:= $= orelse C =:= $\s -> [$\\, C];
(C) -> C
end).
%% @doc encode a field value.
encode_field_value(Value) when is_integer(Value) ->
[erlang:integer_to_binary(Value), $i];
encode_field_value(Value) when is_float(Value) ->
erlang:float_to_binary(Value, [{decimals, 10}, compact]);
encode_field_value(true) ->
<<"t">>;
encode_field_value(false) ->
<<"f">>;
encode_field_value(Value) when is_atom(Value) ->
encode_field_value(atom_to_list(Value));
encode_field_value(Value) when is_list(Value) orelse is_binary(Value) ->
[$", escape(Value, fun
(C) when C =:= $" -> [$\\, C];
(C) -> C
end), $"].
%% @doc encode
%% @doc internal function used for traversing an iodata structure and escaping characters using the given escape
%% function.
escape(Data, EscapeFun) when is_list(Data) ->
[if
is_integer(C) -> EscapeFun(C);
is_list(C) orelse is_binary(C) -> escape(C, EscapeFun)
end || C <- Data];
escape(Bin, EscapeFun) when is_binary(Bin) ->
unicode:characters_to_binary(escape(unicode:characters_to_list(Bin), EscapeFun)).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-define(label(S), unicode:characters_to_list(io_lib:format("~p", [S]))).
encode_measurement_test_() ->
Tests = #{
"string" => "string",
<<"binary">> => "binary",
["io", <<"l">>, $i, <<"st">>] => "iolist",
"spaces🚀️, commas and emoji" => "spaces🚀️\\,\\ commas\\ and\\ emoji",
"spaces🚀️ and emoji" => "spaces🚀️\\ and\\ emoji"
},
[{?label(Measurement), ?_assertEqual(Encoded, unicode:characters_to_list(encode_measurement(Measurement)))}
|| {Measurement, Encoded} <- maps:to_list(Tests)].
encode_tags_test_() ->
Tests = #{
#{} => "",
#{"key" => "value"} => ",key=value",
#{"b" => "B", ["a", <<"1">>] => "A"} => ",a1=A,b=B"
},
[{?label(Tags), ?_assertEqual(Encoded, unicode:characters_to_list(encode_tags(Tags)))}
|| {Tags, Encoded} <- maps:to_list(Tests)].
encode_fields_test_() ->
Tests = #{
#{"float" => 1.2} => "float=1.2",
#{"integer" => 1} => "integer=1i",
#{"true" => true} => "true=t",
#{"false" => false} => "false=f",
#{"string" => "string"} => "string=\"string\""
},
[{?label(Fields), ?_assertEqual(Encoded, unicode:characters_to_list(encode_fields(Fields)))}
|| {Fields, Encoded} <- maps:to_list(Tests)].
encode_timestamp_test_() ->
Now = erlang:system_time(nano_seconds),
Tests = #{
undefined => "",
Now => [$\s, erlang:integer_to_binary(Now)]
},
[{?label(Timestamp), ?_assertEqual(Encoded, encode_timestamp(Timestamp))}
|| {Timestamp, Encoded} <- maps:to_list(Tests)].
-endif. | src/influxdb_line_encoding.erl | 0.649023 | 0.830319 | influxdb_line_encoding.erl | starcoder |
%% Copyright 2011 <NAME> <<EMAIL>>
%
% Licensed under the Apache License, Version 2.0 (the "License");
% you may not use this file except in compliance with the License.
% You may obtain a copy of the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS,
% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
% See the License for the specific language governing permissions and
% limitations under the License.
%% NOTE: This module implements finite field arithmetic over the galois field
% GF(256) with a specified prime modulus.
-module(gf256).
-export([field/1, add/3, subtract/3, multiply/3]).
-export([exponent/2, log/2, inverse/2, value/3]).
-export([monomial_product/4, polynomial_product/3, divide/3]).
-record(gf256, {exponent, log}).
% UNUSED
%-record(gf256poly, {field, coefficients}).
% NOTE: Implementation and use are greatly simplified by expressing polynomials
% simply as lists of coefficient values, rather than explicit reification of
% polynomial "objects".
-define(RANGE, 255).
%%
field(PrimeModulus) ->
Exponent = exponent_table(1, PrimeModulus, []),
Log = log_table(Exponent, 1, [0]),
#gf256{exponent = Exponent, log = Log}.
%
exponent_table(X, Modulus, Acc) when length(Acc) =< ?RANGE ->
case X bsl 1 of
V when V > ?RANGE ->
X0 = V bxor Modulus;
V ->
X0 = V
end,
exponent_table(X0, Modulus, [X|Acc]);
exponent_table(_, _, Acc) ->
lists:reverse(Acc).
%
log_table(E, Count, Acc) when Count =< ?RANGE ->
X = index_of(Count, 0, E),
log_table(E, Count + 1, [X|Acc]);
log_table(_, _, Acc) ->
lists:reverse(Acc).
%
index_of(X, Count, [X|_]) ->
Count;
index_of(X, Count, [_|T]) ->
index_of(X, Count + 1, T).
%%
add(#gf256{}, A, B) when is_integer(A), is_integer(B) ->
A bxor B;
add(#gf256{}, [0], B) when is_list(B) ->
B;
add(#gf256{}, A, [0]) when is_list(A) ->
A;
add(F = #gf256{}, A, B) when is_list(A), is_list(B) ->
add(F, lists:reverse(A), lists:reverse(B), []).
add(F, [H|T], [H0|T0], Acc) ->
add(F, T, T0, [H bxor H0 | Acc]);
add(F, [H|T], [], Acc) ->
add(F, T, [], [H|Acc]);
add(F, [], [H|T], Acc) ->
add(F, [], T, [H|Acc]);
add(_, [], [], Acc) ->
Acc.
%% NOTE: Subtraction is the same as addition over a galois field.
subtract(F = #gf256{}, A, B) ->
add(F, A, B).
%%
multiply(#gf256{}, 0, _) ->
0;
multiply(#gf256{}, _, 0) ->
0;
multiply(F = #gf256{}, A, B) ->
X = (log(F, A) + log(F, B)) rem ?RANGE,
exponent(F, X).
%%
exponent(#gf256{exponent = E}, X) ->
lists:nth(X + 1, E).
%%
log(#gf256{log = L}, X) ->
lists:nth(X + 1, L).
%%
inverse(F = #gf256{}, X) ->
exponent(F, ?RANGE - log(F, X)).
%%
value(#gf256{}, Poly, 0) ->
lists:last(Poly);
value(F = #gf256{}, Poly, 1) ->
lists:foldl(fun(X, Sum) -> gf256:add(F, X, Sum) end, 0, Poly);
value(F = #gf256{}, [H|T], X) ->
value(F, T, X, H).
%
value(F, [H|T], X, Acc) ->
Acc0 = multiply(F, X, Acc),
Acc1 = add(F, Acc0, H),
value(F, T, X, Acc1);
value(_, [], _, Acc) ->
Acc.
%
monomial(#gf256{}, 0, Degree) when Degree >= 0 ->
[0];
monomial(#gf256{}, Coeff, Degree) when Degree >= 0 ->
[Coeff|lists:duplicate(Degree, 0)].
%%
monomial_product(F, Poly, Coeff, Degree) ->
monomial_product(F, Poly, Coeff, Degree, []).
%
monomial_product(F, [H|T], C, D, Acc) ->
P = gf256:multiply(F, H, C),
monomial_product(F, T, C, D, [P|Acc]);
monomial_product(F, [], C, D, Acc) when D > 0 ->
monomial_product(F, [], C, D - 1, [0|Acc]);
monomial_product(_, [], _, 0, Acc) ->
lists:reverse(Acc).
%%
polynomial_product(_, [0], _) ->
[0];
polynomial_product(_, _, [0]) ->
[0];
polynomial_product(F, P0, P1) ->
polynomial_product0(F, P0, P1, [], []).
%
polynomial_product0(F, [H|T], P1, P2, Acc) ->
[H0|T0] = polynomial_product1(F, H, P1, P2, []),
polynomial_product0(F, T, P1, T0, [H0|Acc]);
polynomial_product0(F, [], P1, [H|T], Acc) ->
polynomial_product0(F, [], P1, T, [H|Acc]);
polynomial_product0(_, [], _, [], Acc) ->
lists:reverse(Acc).
%
polynomial_product1(_, _, [], [], Acc) ->
lists:reverse(Acc);
polynomial_product1(F, X, [H|T], [], Acc) ->
Coeff = polynomial_product2(F, X, H, 0),
polynomial_product1(F, X, T, [], [Coeff|Acc]);
polynomial_product1(F, X, [H|T], [H0|T0], Acc) ->
Coeff = polynomial_product2(F, X, H, H0),
polynomial_product1(F, X, T, T0, [Coeff|Acc]).
polynomial_product2(F, X, H, H0) ->
Coeff = multiply(F, X, H),
add(F, H0, Coeff).
%%
divide(F = #gf256{}, A, B = [H|_]) when B =/= [0] ->
IDLT = inverse(F, H),
divide(F, IDLT, B, [0], A).
%
divide(F, IDLT, B, Q, R = [H|_]) when length(R) >= length(B), R =/= [0] ->
Diff = length(R) - length(B),
Scale = multiply(F, H, IDLT),
M = monomial(F, Scale, Diff),
Q0 = add(F, Q, M),
Coeffs = monomial_product(F, B, Scale, Diff),
[_|R0] = add(F, R, Coeffs),
divide(F, IDLT, B, Q0, R0);
divide(_, _, _, Q, R) ->
{Q, R}. | src/gf256.erl | 0.652906 | 0.474814 | gf256.erl | starcoder |
%%%------------------------------------------------------------------------
%%% @author <NAME> <<EMAIL>>
%%% @author <NAME> <<EMAIL>>
%%% @copyright (C) 2015, <NAME>, <NAME>
%%% @doc
%%% Functions to divide mapped trees in clusters
%%% @end
%%% Created : 2 Jun 2015 by <NAME>
%%%------------------------------------------------------------------------
-module(tree_clustering).
-export([cluster/4]).
%%--------------------------------------------------------------------
%% @doc
%% Takes two trees and a Mapping between them, divides the
%% trees in contiguous clusters. The criteria for division
%% is that the nodes in a cluster must be contiguous and
%% either common to both trees, or exclusive to one of them.
%% It applies {@link fix_frontiers:fix_frontiers/4}.
%% The function returns a tuple with three elements:
%% the list of common clusters, the list of exclusive clusters
%% for Tree1, the list of exclusive clusters for Tree2.
%% @end
%%--------------------------------------------------------------------
-spec cluster(Pass, Mapping, Tree1 :: tree:tree(), Tree2 :: tree:tree()) ->
{Mapping,
cluster_dict:cluster_dict(tree:tree_node()),
cluster_dict:cluster_dict(tree:tree_node()),
cluster_dict:cluster_dict(tree:tree_node())} when
Pass :: 1 | 2,
Mapping :: da_map:da_map(tree:tree_node(),
tree:tree_node()).
cluster(Pass, Mapping, Tree1, Tree2) ->
TmpMapping = detach_root(Mapping, Tree1, Tree2),
Comm = da_map:fold(comm_cluster(Tree1, Tree2, TmpMapping),
cluster_dict:new(), TmpMapping),
{NewComm, NewMapping} = fix_frontiers:delete_trivial_clusters(
fix_frontiers:fix_frontiers(Pass, Tree1, Tree2, TmpMapping, Comm)),
Ex1 = tree:breadth_fold(tree_cluster(Tree1, NewMapping),
cluster_dict:new(), Tree1),
Ex2 = tree:breadth_fold(tree_cluster(Tree2, NewMapping),
cluster_dict:new(), Tree2),
{NewMapping, NewComm, Ex1, Ex2}.
%%--------------------------------------------------------------------
%% @doc
%% Detaches the root nodes of both trees in the Mapping.
%% This is intended to cause a chain reaction so that headers of
%% functions will be replicated in the exclusive trees even if they
%% are identical.
%% @end
%%--------------------------------------------------------------------
-spec detach_root(Mapping :: da_map:da_map(tree:tree_node(),
tree:tree_node()),
Tree1 :: tree:tree(), Tree2 :: tree:tree()) ->
da_map:da_map(tree:tree_node(),
tree:tree_node()).
detach_root(Mapping, Tree1, Tree2) ->
Node1 = tree:get_root_node(Tree1),
Node2 = tree:get_root_node(Tree2),
da_map:delete_by_value(Node2,
da_map:delete_by_key(Node1, Mapping)).
%%--------------------------------------------------------------------
%% @doc
%% Takes two trees, and produces a function that,
%% when folded against a mapping between the trees,
%% produces a cluster dictionary with the contiguous
%% mapped clusters of the tree.
%% @end
%%--------------------------------------------------------------------
-spec comm_cluster(Tree1 :: tree:tree(), Tree2 :: tree:tree(),
Mapping :: da_map:da_map(tree:tree_node(),
tree:tree_node())) ->
(fun((Node1 :: tree:tree_node(),
Node2 :: tree:tree_node(),
Comm :: cluster_dict:cluster_dict(tree:tree_node())) ->
cluster_dict:cluster_dict(tree:tree_node()))).
comm_cluster(Tree1, Tree2, Mapping) ->
fun (Node1, Node2, Comm) ->
comm_cluster_fold(Tree1, Tree2, Mapping,
Node1, Node2, Comm)
end.
%%---------------------------------------------------------------------
%% @doc
%% Adds the pair of nodes to the cluster dictionary ensuring that
%% they get merged to adjacent nodes. This is done by creating
%% a new cluster with the pair of nodes and their parents, (only
%% if the parents are also mapped), and merging this cluster
%% with the existing ones {@link cluster_dict:merge_cluster/2}.
%% @end
%%---------------------------------------------------------------------
-spec comm_cluster_fold(Tree1 :: tree:tree(), Tree2 :: tree:tree(),
Mapping :: da_map:da_map(tree:tree_node(),
tree:tree_node()),
Node1 :: tree:tree_node(),
Node2 :: tree:tree_node(),
CommCluster :: cluster_dict:cluster_dict(tree:tree_node())) ->
cluster_dict:cluster_dict(tree:tree_node()).
comm_cluster_fold(Tree1, Tree2, Mapping, Node1, Node2, CommCluster) ->
NodePair = tree:create_node_pair(Node1, Node2),
case {tree:get_parent(Node1, Tree1),
tree:get_parent(Node2, Tree2)} of
{error, _} -> add_to_cluster(NodePair, CommCluster);
{_, error} -> add_to_cluster(NodePair, CommCluster);
{{ok, PNode1}, {ok, PNode2}} ->
PosInParent1 = tree:get_pos_in_parent(Node1, PNode1),
PosInParent2 = tree:get_pos_in_parent(Node2, PNode2),
PNodePair = tree:create_node_pair(PNode1, PNode2),
add_to_cluster(NodePair, PNodePair, CommCluster,
da_map:has_pair(PNode1, PNode2, Mapping)
andalso (PosInParent1 =:= PosInParent2))
end.
%%--------------------------------------------------------------------
%% @doc
%% Takes a Tree and a Common cluster dictionary and produces a
%% function that, when folded against a Tree will return a
%% cluster dictionary with the exclusive clusters of the tree.
%% @end
%%--------------------------------------------------------------------
-spec tree_cluster(Tree :: tree:tree(),
Mapping :: da_map:da_map(tree:tree_node(),
tree:tree_node())) ->
(fun((Node :: tree:tree_node(),
Acc :: cluster_dict:cluster_dict(tree:tree_node())) ->
cluster_dict:cluster_dict(tree:tree_node()))).
tree_cluster(Tree, Mapping) ->
fun (Node, Acc) ->
tree_cluster_fold(Tree, Mapping, Node, Acc)
end.
%%---------------------------------------------------------------------
%% @doc
%% Adds the node to the cluster dictionary ensuring that
%% they get merged to adjacent nodes. This is done by creating
%% a new cluster with the nodes and its parent, (only
%% if the parent is not mapped), and merging this cluster
%% with the existing ones {@link cluster_dict:merge_cluster/2}.
%% @end
%%---------------------------------------------------------------------
-spec tree_cluster_fold(Tree :: tree:tree(),
Mapping :: da_map:da_map(tree:tree_node(),
tree:tree_node()),
Node :: tree:tree_node(),
ExCluster :: cluster_dict:cluster_dict(tree:tree_node())) ->
cluster_dict:cluster_dict(tree:tree_node()).
tree_cluster_fold(Tree, Mapping, Node, ExCluster) ->
case is_mapped(Node, Mapping) of
true -> ExCluster;
false -> case tree:get_parent(Node, Tree) of
error -> add_to_cluster(Node, ExCluster);
{ok, PNode} ->
add_to_cluster(Node, PNode, ExCluster,
not (is_mapped(PNode,Mapping)))
end
end.
%%--------------------------------------------------------------------
%% @doc
%% Returns true if Node is Key or Value in the DaMap.
%% @end
%%--------------------------------------------------------------------
-spec is_mapped(Node :: NodeType,
DaMap :: da_map:da_map(Key :: NodeType,
Value :: NodeType)) -> boolean().
is_mapped(Node, Mapping) ->
da_map:has_key(Node, Mapping) orelse
da_map:has_value(Node, Mapping).
%%--------------------------------------------------------------------
%% @doc
%% Creates a new cluster with Node and merges it in the
%% ClusterDict.
%% @end
%%--------------------------------------------------------------------
-spec add_to_cluster(Node :: N,
ClusterDict :: cluster_dict:cluster_dict(N)) ->
cluster_dict:cluster_dict(N).
add_to_cluster(Node, ClusterDict) ->
NewCluster = cluster:new_cluster(Node),
cluster_dict:merge_cluster(NewCluster, ClusterDict).
%%--------------------------------------------------------------------
%% @doc
%% Creates a new cluster with Node and merges it in the
%% ClusterDict. If InsertParent is true it also inserts
%% ParentNode in the same cluster.
%% @see add_to_cluster/2
%% @end
%%--------------------------------------------------------------------
-spec add_to_cluster(Node :: N, ParentNode :: N,
cluster_dict:cluster_dict(N), boolean()) -> cluster_dict:cluster_dict(N).
add_to_cluster(Node, ParentNode, ClusterDict, InsertParent) ->
cluster_dict:merge_cluster(
case InsertParent of
true -> cluster:new_parent_child_to_cluster(ParentNode, Node);
false -> cluster:new_cluster(Node)
end, ClusterDict). | src/behaviour_extraction/tree_clustering.erl | 0.569853 | 0.616647 | tree_clustering.erl | starcoder |
% ``The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved via the world wide web at http://www.erlang.org/.
%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
%%
-module(bloom).
-author("<NAME> <<EMAIL>>").
-export([sbf/1, sbf/2, sbf/3, sbf/4,
bloom/1, bloom/2,
member/2, add/2,
size/1, capacity/1]).
-export([is_element/2, add_element/2]). % alternative names
-import(math, [log/1, pow/2]).
is_element(E, B) -> member(E, B).
add_element(E, B) -> add(E, B).
%% Based on
%% Scalable Bloom Filters
%% <NAME>, <NAME>, <NAME>, <NAME>
%% Information Processing Letters
%% Volume 101, Issue 6, 31 March 2007, Pages 255-261
%%
%% Provides scalable bloom filters that can grow indefinitely while
%% ensuring a desired maximum false positive probability. Also provides
%% standard partitioned bloom filters with a maximum capacity. Bit arrays
%% are dimensioned as a power of 2 to enable reusing hash values across
%% filters through bit operations. Double hashing is used (no need for
%% enhanced double hashing for partitioned bloom filters).
%% modified slightly by <NAME> to make it a single file
%% (incorporated the array-based bitarray internally)
-define(W, 27).
-record(bloom, {
e, % error probability
n, % maximum number of elements
mb, % 2^mb = m, the size of each slice (bitvector)
size, % number of elements
a % list of bitvectors
}).
-record(sbf, {
e, % error probability
r, % error probability ratio
s, % log 2 of growth ratio
size, % number of elements
b % list of plain bloom filters
}).
%% Constructors for (fixed capacity) bloom filters
%%
%% N - capacity
%% E - error probability
bloom(N) -> bloom(N, 0.001).
bloom(N, E) when is_number(N), N > 0,
is_float(E), E > 0, E < 1,
N >= 4/E -> % rule of thumb; due to double hashing
bloom(size, N, E).
bloom(Mode, Dim, E) ->
K = 1 + trunc(log2(1/E)),
P = pow(E, 1 / K),
case Mode of
size -> Mb = 1 + trunc(-log2(1 - pow(1 - P, 1 / Dim)));
bits -> Mb = Dim
end,
M = 1 bsl Mb,
N = trunc(log(1-P) / log(1-1/M)),
#bloom{e=E, n=N, mb=Mb, size = 0,
a = [bitarray_new(1 bsl Mb) || _ <- lists:seq(1, K)]}.
log2(X) -> log(X) / log(2).
%% Constructors for scalable bloom filters
%%
%% N - initial capacity before expanding
%% E - error probability
%% S - growth ratio when full (log 2) can be 1, 2 or 3
%% R - tightening ratio of error probability
sbf(N) -> sbf(N, 0.001).
sbf(N, E) -> sbf(N, E, 1).
sbf(N, E, 1) -> sbf(N, E, 1, 0.85);
sbf(N, E, 2) -> sbf(N, E, 2, 0.75);
sbf(N, E, 3) -> sbf(N, E, 3, 0.65).
sbf(N, E, S, R) when is_number(N), N > 0,
is_float(E), E > 0, E < 1,
is_integer(S), S > 0, S < 4,
is_float(R), R > 0, R < 1,
N >= 4/(E*(1-R)) -> % rule of thumb; due to double hashing
#sbf{e=E, s=S, r=R, size=0, b=[bloom(N, E*(1-R))]}.
%% Returns number of elements
%%
size(#bloom{size=Size}) -> Size;
size(#sbf{size=Size}) -> Size.
%% Returns capacity
%%
capacity(#bloom{n=N}) -> N;
capacity(#sbf{}) -> infinity.
%% Test for membership
%%
member(Elem, #bloom{mb=Mb}=B) ->
Hashes = make_hashes(Mb, Elem),
hash_member(Hashes, B);
member(Elem, #sbf{b=[H|_]}=Sbf) ->
Hashes = make_hashes(H#bloom.mb, Elem),
hash_member(Hashes, Sbf).
hash_member(Hashes, #bloom{mb=Mb, a=A}) ->
Mask = 1 bsl Mb -1,
{I1, I0} = make_indexes(Mask, Hashes),
all_set(Mask, I1, I0, A);
hash_member(Hashes, #sbf{b=B}) ->
lists:any(fun(X) -> hash_member(Hashes, X) end, B).
make_hashes(Mb, E) when Mb =< 16 ->
erlang:phash2({E}, 1 bsl 32);
make_hashes(Mb, E) when Mb =< 32 ->
{erlang:phash2({E}, 1 bsl 32), erlang:phash2([E], 1 bsl 32)}.
make_indexes(Mask, {H0, H1}) when Mask > 1 bsl 16 -> masked_pair(Mask, H0, H1);
make_indexes(Mask, {H0, _}) -> make_indexes(Mask, H0);
make_indexes(Mask, H0) -> masked_pair(Mask, H0 bsr 16, H0).
masked_pair(Mask, X, Y) -> {X band Mask, Y band Mask}.
all_set(_Mask, _I1, _I, []) -> true;
all_set(Mask, I1, I, [H|T]) ->
case bitarray_get(I, H) of
true -> all_set(Mask, I1, (I+I1) band Mask, T);
false -> false
end.
%% Adds element to set
%%
add(Elem, #bloom{mb=Mb} = B) ->
Hashes = make_hashes(Mb, Elem),
hash_add(Hashes, B);
add(Elem, #sbf{size=Size, r=R, s=S, b=[H|T]=Bs}=Sbf) ->
#bloom{mb=Mb, e=E, n=N, size=HSize} = H,
Hashes = make_hashes(Mb, Elem),
case hash_member(Hashes, Sbf) of
true -> Sbf;
false ->
case HSize < N of
true -> Sbf#sbf{size=Size+1, b=[hash_add(Hashes, H)|T]};
false ->
B = add(Elem, bloom(bits, Mb + S, E * R)),
Sbf#sbf{size=Size+1, b=[B|Bs]}
end
end.
hash_add(Hashes, #bloom{mb=Mb, a=A, size=Size} = B) ->
Mask = 1 bsl Mb -1,
{I1, I0} = make_indexes(Mask, Hashes),
case all_set(Mask, I1, I0, A) of
true -> B;
false -> B#bloom{size=Size+1, a=set_bits(Mask, I1, I0, A, [])}
end.
set_bits(_Mask, _I1, _I, [], Acc) -> lists:reverse(Acc);
set_bits(Mask, I1, I, [H|T], Acc) ->
set_bits(Mask, I1, (I+I1) band Mask, T, [bitarray_set(I, H) | Acc]).
bitarray_new(N) -> array:new((N-1) div ?W + 1, {default, 0}).
bitarray_set(I, A) ->
AI = I div ?W,
V = array:get(AI, A),
V1 = V bor (1 bsl (I rem ?W)),
array:set(AI, V1, A).
bitarray_get(I, A) ->
AI = I div ?W,
V = array:get(AI, A),
V band (1 bsl (I rem ?W)) =/= 0.
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
simple_shuffle(L, N) ->
lists:sublist(simple_shuffle(L), 1, N).
simple_shuffle(L) ->
N = 1000 * length(L),
L2 = [{rand:uniform(N), E} || E <- L],
{_, L3} = lists:unzip(lists:keysort(1, L2)),
L3.
fixed_case_test_() ->
{timeout, 100, fun() -> fixed_case(bloom(5000), 5000, 0.001) end}.
fixed_case(Bloom, Size, FalseRate) ->
?assert(bloom:capacity(Bloom) > Size),
?assertEqual(0, bloom:size(Bloom)),
RandomList = simple_shuffle(lists:seq(1,100*Size), Size),
[?assertEqual(false, bloom:is_element(E, Bloom)) || E <- RandomList],
Bloom2 =
lists:foldl(fun(E, Bloom0) ->
bloom:add_element(E, Bloom0)
end, Bloom, RandomList),
[?assertEqual(true, bloom:is_element(E, Bloom2)) || E <- RandomList],
?assert(bloom:size(Bloom2) > ((1-FalseRate)*Size)),
ok.
scalable_case(Bloom, Size, FalseRate) ->
?assertEqual(infinity, bloom:capacity(Bloom)),
?assertEqual(0, bloom:size(Bloom)),
RandomList = simple_shuffle(lists:seq(1,100*Size), 10*Size),
[?assertEqual(false, bloom:is_element(E, Bloom)) || E <- RandomList],
Bloom2 =
lists:foldl(fun(E, Bloom0) ->
bloom:add_element(E, Bloom0)
end, Bloom, RandomList),
[?assertEqual(true, bloom:is_element(E, Bloom2)) || E <- RandomList],
?assert(bloom:size(Bloom2) > ((1-FalseRate)*Size)),
ok.
bloom_test() ->
scalable_case(sbf(1000, 0.2), 1000, 0.2),
ok.
-endif. | src/bloom.erl | 0.639736 | 0.505859 | bloom.erl | starcoder |
%% (MIT License)
%%
%% Copyright (c) 2014 <NAME>. All rights reserved.
%%
%% Permission is hereby granted, free of charge, to any person obtaining a copy of
%% this software and associated documentation files (the "Software"), to deal in
%% the Software without restriction, including without limitation the rights to
%% use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
%% of the Software, and to permit persons to whom the Software is furnished to do
%% so, subject to the following conditions:
%%
%% The above copyright notice and this permission notice shall be included in all
%% copies or substantial portions of the Software.
%%
%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
%% SOFTWARE.
-module(exs64_speed).
-export([test_speed/0]).
%% From random module
-type ran() :: {integer(), integer(), integer()}.
-spec test_speed_exs64_uniform_rec1([float()], non_neg_integer(), non_neg_integer(),
pos_integer(), exs64:state()) -> 'ok'.
test_speed_exs64_uniform_rec1(Acc, 0, _, _, _) ->
_ = lists:reverse(Acc),
ok;
test_speed_exs64_uniform_rec1(Acc, X, 0, R, I) ->
_ = lists:reverse(Acc),
test_speed_exs64_uniform_rec1([], X - 1, R, R, I);
test_speed_exs64_uniform_rec1(Acc, X, Q, R, I) ->
{F, I2} = exs64:uniform_s(I),
test_speed_exs64_uniform_rec1([F|Acc], X, Q - 1, R, I2).
-spec test_speed_exs64_uniform(non_neg_integer(), non_neg_integer()) -> non_neg_integer().
test_speed_exs64_uniform(P, Q) ->
_ = statistics(runtime),
I = exs64:seed(),
ok = test_speed_exs64_uniform_rec1([], P, Q, Q, I),
{_, T} = statistics(runtime),
T.
-spec test_speed_orig_uniform_n_rec1([integer()], non_neg_integer(), non_neg_integer(), pos_integer(), ran()) -> 'ok'.
test_speed_orig_uniform_n_rec1(Acc, 0, _, _, _) ->
_ = lists:reverse(Acc),
ok;
test_speed_orig_uniform_n_rec1(Acc, X, 0, R, I) ->
_ = lists:reverse(Acc),
test_speed_orig_uniform_n_rec1([], X - 1, R, R, I);
test_speed_orig_uniform_n_rec1(Acc, X, Q, R, I) ->
{F, I2} = random:uniform_s(10000, I),
test_speed_orig_uniform_n_rec1([F|Acc], X, Q - 1, R, I2).
-spec test_speed_orig_uniform_n(non_neg_integer(), non_neg_integer()) -> non_neg_integer().
test_speed_orig_uniform_n(P, Q) ->
_ = statistics(runtime),
I = random:seed(),
ok = test_speed_orig_uniform_n_rec1([], P, Q, Q, I),
{_, T} = statistics(runtime),
T.
-spec test_speed_exs64_uniform_n_rec1([non_neg_integer()], non_neg_integer(), non_neg_integer(), pos_integer(), exs64:state()) -> 'ok'.
test_speed_exs64_uniform_n_rec1(Acc, 0, _, _, _) ->
_ = lists:reverse(Acc),
ok;
test_speed_exs64_uniform_n_rec1(Acc, X, 0, R, I) ->
_ = lists:reverse(Acc),
test_speed_exs64_uniform_n_rec1([], X - 1, R, R, I);
test_speed_exs64_uniform_n_rec1(Acc, X, Q, R, I) ->
{F, I2} = exs64:uniform_s(10000, I),
test_speed_exs64_uniform_n_rec1([F|Acc], X, Q - 1, R, I2).
-spec test_speed_exs64_uniform_n(non_neg_integer(), non_neg_integer()) -> non_neg_integer().
test_speed_exs64_uniform_n(P, Q) ->
_ = statistics(runtime),
I = exs64:seed(),
ok = test_speed_exs64_uniform_n_rec1([], P, Q, Q, I),
{_, T} = statistics(runtime),
T.
-spec test_speed_orig_uniform_rec1([float()], non_neg_integer(), non_neg_integer(), pos_integer(), ran()) -> 'ok'.
test_speed_orig_uniform_rec1(Acc, 0, _, _, _) ->
_ = lists:reverse(Acc),
ok;
test_speed_orig_uniform_rec1(Acc, X, 0, R, I) ->
_ = lists:reverse(Acc),
test_speed_orig_uniform_rec1([], X - 1, R, R, I);
test_speed_orig_uniform_rec1(Acc, X, Q, R, I) ->
{F, I2} = random:uniform_s(I),
test_speed_orig_uniform_rec1([F|Acc], X, Q - 1, R, I2).
-spec test_speed_orig_uniform(non_neg_integer(), non_neg_integer()) -> non_neg_integer().
test_speed_orig_uniform(P, Q) ->
_ = statistics(runtime),
I = random:seed(),
ok = test_speed_orig_uniform_rec1([], P, Q, Q, I),
{_, T} = statistics(runtime),
T.
-spec test_speed() -> 'ok'.
test_speed() ->
io:format("{orig_uniform, orig_uniform_n, exs64_uniform, exs64_uniform_n}~n~p~n",
[{test_speed_orig_uniform(100, 10000),
test_speed_orig_uniform_n(100, 10000),
test_speed_exs64_uniform(100, 10000),
test_speed_exs64_uniform_n(100, 10000)}
]). | src/exs64_speed.erl | 0.579638 | 0.40645 | exs64_speed.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% riaknostic - automated diagnostic tools for Riak
%%
%% Copyright (c) 2011 Basho Technologies, Inc. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%% @doc Utility functions for riaknostic.
%% @end
-module(riaknostic_util).
-export([short_name/1,
run_command/1,
log/2,log/3,
binary_to_float/1]).
%% @doc Converts a check module name into a short name that can be
%% used to refer to a check on the command line. For example,
%% <code>riaknostic_check_disk becomes</code> <code>"disk"</code>.
-spec short_name(module()) -> iodata() | unicode:charlist().
short_name(Mod) when is_atom(Mod) ->
re:replace(atom_to_list(Mod), "riaknostic_check_", "", [{return, list}]).
%% @doc Runs a shell command and returns the output. stderr is
%% redirected to stdout so its output will be included.
-spec run_command(Command::iodata()) -> StdOut::iodata().
run_command(Command) ->
riaknostic_util:log(debug, "Running shell command: ~s", [Command]),
Port = erlang:open_port({spawn,Command},[exit_status, stderr_to_stdout]),
do_read(Port, []).
do_read(Port, Acc) ->
receive
{Port, {data, StdOut}} ->
riaknostic_util:log(debug, "Shell command output: ~n~s~n",[StdOut]),
do_read(Port, Acc ++ StdOut);
{Port, {exit_status, _}} ->
%%port_close(Port),
Acc;
Other ->
io:format("~w", [Other]),
do_read(Port, Acc)
end.
%% @doc Converts a binary containing a text representation of a float
%% into a float type.
-spec binary_to_float(binary()) -> float().
binary_to_float(Bin) ->
list_to_float(binary_to_list(Bin)).
log(Level, Format, Terms) ->
case should_log(Level) of
true ->
io:format(lists:concat(["[", Level, "] ", Format, "~n"]), Terms);
false ->
ok
end,
lager:log(Level, self(), Format, Terms).
log(Level, String) ->
case should_log(Level) of
true ->
io:format(lists:concat(["[", Level, "] ", String, "~n"]));
false ->
ok
end,
lager:log(Level, self(), String).
should_log(Level) ->
AppLevel = case application:get_env(riaknostic, log_level) of
undefined -> info;
{ok, L0} -> L0
end,
lager_util:level_to_num(AppLevel) >= lager_util:level_to_num(Level). | deps/riaknostic/src/riaknostic_util.erl | 0.519521 | 0.437283 | riaknostic_util.erl | starcoder |
% @copyright 2009-2011 Zuse Institute Berlin,
% onScale solutions GmbH
% Licensed under the Apache License, Version 2.0 (the "License");
% you may not use this file except in compliance with the License.
% You may obtain a copy of the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS,
% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
% See the License for the specific language governing permissions and
% limitations under the License.
%% @author <NAME> <<EMAIL>>
%% @doc Part of generic Paxos-Consensus implementation
%% The state needed for a single acceptor instance.
%% @end
%% @version $Id$
-module(acceptor_state).
-author('<EMAIL>').
-vsn('$Id$').
%% Operations on acceptor_state
-export([new/1]).
-export([get_learners/1, set_learners/2]).
-export([get_raccepted/1]).
-export([get_value/1]).
-export([add_prepare_msg/2]).
-export([add_accept_msg/3]).
-export([accepted/1]).
-type acceptor_state() ::
{ any(), %% PaxosID,
[ comm:mypid() ], %% Learners,
non_neg_integer(), %% Rack
non_neg_integer() | -1, %% Raccepted
any() %% Value
}.
-spec new(any()) -> acceptor_state().
new(PaxosID) ->
{PaxosID, _Learners = [], _Rack = 0, _Raccepted = -1, paxos_no_value_yet}.
-spec get_learners(acceptor_state()) -> [ comm:mypid() ].
get_learners(State) -> element(2, State).
-spec set_learners(acceptor_state(), [ comm:mypid() ]) -> acceptor_state().
set_learners(State, Learners) -> setelement(2, State, Learners).
-spec get_rack(acceptor_state()) -> non_neg_integer().
get_rack(State) -> element(3, State).
-spec set_rack(acceptor_state(), non_neg_integer()) -> acceptor_state().
set_rack(State, Round) -> setelement(3, State, Round).
-spec get_raccepted(acceptor_state()) -> non_neg_integer().
get_raccepted(State) -> element(4, State).
-spec set_raccepted(acceptor_state(), non_neg_integer()) -> acceptor_state().
set_raccepted(State, Round) -> setelement(4, State, Round).
-spec get_value(acceptor_state()) -> any().
get_value(State) -> element(5, State).
-spec set_value(acceptor_state(), any()) -> acceptor_state().
set_value(State, Value) -> setelement(5, State, Value).
-spec add_prepare_msg(acceptor_state(), non_neg_integer()) ->
{ok, acceptor_state()} | {dropped, non_neg_integer()}.
add_prepare_msg(State, InRound) ->
Rack = get_rack(State),
case (InRound > Rack) andalso (InRound > get_raccepted(State)) of
true -> {ok, set_rack(State, InRound)};
false -> {dropped, Rack}
end.
-spec add_accept_msg(acceptor_state(), non_neg_integer(), any()) ->
{ok, acceptor_state()} | {dropped, non_neg_integer()}.
add_accept_msg(State, InRound, InProposal) ->
Rack = get_rack(State),
case (InRound >= Rack) andalso (InRound > get_raccepted(State)) of
true ->
NewState1 = set_raccepted(State, InRound),
NewState2 = set_value(NewState1, InProposal),
{ok, NewState2};
false -> {dropped, Rack}
end.
-spec accepted(acceptor_state()) -> boolean().
accepted(State) -> paxos_no_value_yet =/= get_value(State). | src/paxos/acceptor_state.erl | 0.611266 | 0.416589 | acceptor_state.erl | starcoder |
% Licensed under the Apache License, Version 2.0 (the "License"); you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
% todo
% - remove existance check on increment(), decrement() and record(). have
% modules initialize counters on startup.
-module(couch_stats_collector).
-behaviour(gen_server).
-export([start/0, stop/0]).
-export([all/0, all/1, get/1, increment/1, decrement/1, record/2, clear/1]).
-export([track_process_count/1, track_process_count/2]).
-export([init/1, terminate/2, code_change/3]).
-export([handle_call/3, handle_cast/2, handle_info/2]).
-define(HIT_TABLE, stats_hit_table).
-define(ABS_TABLE, stats_abs_table).
start() ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
stop() ->
gen_server:call(?MODULE, stop).
all() ->
ets:tab2list(?HIT_TABLE) ++ abs_to_list().
all(Type) ->
case Type of
incremental -> ets:tab2list(?HIT_TABLE);
absolute -> abs_to_list()
end.
get(Key) ->
case ets:lookup(?HIT_TABLE, Key) of
[] ->
case ets:lookup(?ABS_TABLE, Key) of
[] ->
nil;
AbsVals ->
lists:map(fun({_, Value}) -> Value end, AbsVals)
end;
[{_, Counter}] ->
Counter
end.
increment(Key) ->
Key2 = make_key(Key),
case catch ets:update_counter(?HIT_TABLE, Key2, 1) of
{'EXIT', {badarg, _}} ->
true = ets:insert(?HIT_TABLE, {Key2, 1}),
ok;
_ ->
ok
end.
decrement(Key) ->
Key2 = make_key(Key),
case catch ets:update_counter(?HIT_TABLE, Key2, -1) of
{'EXIT', {badarg, _}} ->
true = ets:insert(?HIT_TABLE, {Key2, -1}),
ok;
_ -> ok
end.
record(Key, Value) ->
true = ets:insert(?ABS_TABLE, {make_key(Key), Value}).
clear(Key) ->
true = ets:delete(?ABS_TABLE, make_key(Key)).
track_process_count(Stat) ->
track_process_count(self(), Stat).
track_process_count(Pid, Stat) ->
MonitorFun = fun() ->
Ref = erlang:monitor(process, Pid),
receive {'DOWN', Ref, _, _, _} -> ok end,
couch_stats_collector:decrement(Stat)
end,
case (catch couch_stats_collector:increment(Stat)) of
ok -> spawn(MonitorFun);
_ -> ok
end.
init(_) ->
ets:new(?HIT_TABLE, [named_table, set, public]),
ets:new(?ABS_TABLE, [named_table, duplicate_bag, public]),
{ok, nil}.
terminate(_Reason, _State) ->
ok.
handle_call(stop, _, State) ->
{stop, normal, stopped, State}.
handle_cast(foo, State) ->
{noreply, State}.
handle_info(_Info, State) ->
{noreply, State}.
code_change(_OldVersion, State, _Extra) ->
{ok, State}.
make_key({Module, Key}) when is_integer(Key) ->
{Module, list_to_atom(integer_to_list(Key))};
make_key(Key) ->
Key.
abs_to_list() ->
SortedKVs = lists:sort(ets:tab2list(?ABS_TABLE)),
lists:foldl(fun({Key, Val}, Acc) ->
case Acc of
[] ->
[{Key, [Val]}];
[{Key, Prev} | Rest] ->
[{Key, [Val | Prev]} | Rest];
Others ->
[{Key, [Val]} | Others]
end
end, [], SortedKVs). | src/couchdb/couch_stats_collector.erl | 0.531696 | 0.484075 | couch_stats_collector.erl | starcoder |
%%% @doc Merkle Tree is a data structure where every non-leaf node contains the
%%% hash of the labels of its child nodes, and the leaves have their own values
%%% (or key/value pair) hashed. Because of this characteristic, Merkle Trees are
%%% used to verify that two or more parties have the same data without
%%% exchanging the entire data collection. For more information about Merkle
%%% Trees and other use cases you can visit its Wikipedia article: [https://en.wikipedia.org/wiki/Merkle_tree]
%%%
%%% This module implements a binary Merkle Tree that is built based on a list of
%%% `{Key, Value}' pairs. The tree is sorted but might not be balanced.
%%%
%%% ```
%%% ┌───────────────┐
%%% │ Root │
%%% ┌─────────────────│Hash(AA1 + BB2)│───────────────┐
%%% │ └───────────────┘ │
%%% │ │
%%% │ │
%%% │ │
%%% │ │
%%% ┌─────────────┐ ┌─────────────┐
%%% │ AA1 │ │ BB2 │
%%% ┌────│Hash(A1 + B1)│──────┐ ┌──│Hash(C1 + D1)│────────┐
%%% │ └─────────────┘ │ │ └─────────────┘ │
%%% │ │ │ │
%%% │ │ │ │
%%% ┌───────────┐ ┌───────────┐ ┌───────────┐ ┌───────────┐
%%% │ A1 │ │ B1 │ │ C1 │ │ D1 │
%%% ┌─│Hash(A + B)│─┐ ┌─│Hash(C + D)│─┐ ┌─│Hash(E + F)│─┐ ┌─│Hash(G + H)│─┐
%%% │ └───────────┘ │ │ └───────────┘ │ │ └───────────┘ │ │ └───────────┘ │
%%% │ │ │ │ │ │ │ │
%%% │ │ │ │ │ │ │ │
%%% ┌───────┐ ┌───────┐ ┌───────┐ ┌───────┐ ┌───────┐ ┌───────┐ ┌───────┐ ┌───────┐
%%% │ A │ │ B │ │ C │ │ D │ │ E │ │ F │ │ G │ │ H │
%%% │Hash(A)│ │Hash(B)│ │Hash(C)│ │Hash(D)│ │Hash(E)│ │Hash(F)│ │Hash(G)│ │Hash(H)│
%%% └───────┘ └───────┘ └───────┘ └───────┘ └───────┘ └───────┘ └───────┘ └───────┘
%%% '''
%%%
%%% Every leaf node will have its `left' and `right' pointers pointing to
%%% `nil', and it will contain a hash based on the key and value. The inner
%%% nodes will point to their respective leaf nodes children, and its hash will
%%% be `Hash(LeftHash + RightHash)'.
%%% @end
-module(merkletree).
-define(HASH, sha256).
-record(inner, {key :: key() | 'undefined',
hash :: hash(),
height :: non_neg_integer(),
min_key :: key(),
max_key :: key(),
left :: tree(),
right :: tree()}).
-type inner() :: #inner{}.
-type tree() :: inner() | 'nil'.
-type key() :: binary().
-type value() :: binary().
-type hash() :: binary().
%% API exports
-export([build/1, diff/2, keys/1]).
%%====================================================================
%% API functions
%%====================================================================
%% @doc Creates a tree from a list of `{Key, Value}' pairs.
-spec build([{key(), value()}]) -> tree().
build(L) ->
List = [to_inner(X) || X <- lists:keysort(1, L)],
build_tree(List).
%% @doc Returns the list of `Key' that are different between the given trees.
-spec diff(tree(), tree()) -> [key()].
diff(T1, T2) ->
List = remove_equal_elements(dirty_diff(T1, T2), sets:new()),
lists:usort([X || {X, _} <- List]).
%% @doc Returns a sorted list of all keys from the given tree.
-spec keys(tree()) -> [key()].
keys(Tree) ->
[K || {K, _} <- lists:usort(extract_keys(Tree))].
%%====================================================================
%% Internal functions
%%====================================================================
-spec build_tree([tree()]) -> tree().
build_tree([]) ->
'nil';
build_tree([Root]) ->
Root;
build_tree(List) ->
UpperLevel = lists:reverse(combine(List, [])),
build_tree(UpperLevel).
-spec combine([tree()], [tree()]) -> [tree()].
combine([], Acc) ->
Acc;
combine([X], Acc) ->
[X | Acc];
combine([X, Y | T], Acc) ->
combine(T, [to_inner(X, Y) | Acc]).
-spec to_inner({key(), value()}) -> inner().
to_inner({Key, Value}) ->
#inner{key = Key, min_key = Key, max_key = Key, height = 0, left = 'nil', right = 'nil', hash = crypto:hash(?HASH, <<Key/binary, Value/binary>>)}.
-spec to_inner(inner(), inner()) -> inner().
to_inner(L = #inner{hash = LHash, min_key = MinKey, height = LHeight}, R = #inner{hash = RHash, max_key = MaxKey, height = RHeight}) ->
Height = max(LHeight, RHeight) + 1,
#inner{left = L, right = R, height = Height, min_key = MinKey, max_key = MaxKey, hash = crypto:hash(?HASH, <<LHash/binary, RHash/binary>>)}.
-spec extract_keys(tree()) -> [{key(), hash()}].
extract_keys('nil') ->
[];
extract_keys(#inner{key = Key, hash = Hash, left = 'nil', right = 'nil'}) ->
[{Key, Hash}];
extract_keys(#inner{left = Left, right = Right}) ->
lists:flatten([extract_keys(Left), extract_keys(Right)]).
-spec remove_equal_elements([{key(), hash()}], sets:set()) -> [{key(), hash()}].
remove_equal_elements([], Set) ->
sets:to_list(Set);
remove_equal_elements([H|T], Set) ->
case sets:is_element(H, Set) of
true -> remove_equal_elements(T, sets:del_element(H, Set));
false -> remove_equal_elements(T, sets:add_element(H, Set))
end.
%% This function returns the list of `{Key, Hash}' pairs that are different
%% between the given trees.
%%
%% The idea is to compare the hashes and if they are different the next
%% iteration only goes to the possible branch that might contain the entire tree.
%%
%% Let's call the given trees T1 and T2, respectively. If T1 is bigger (T1#height > T2#height) we switch them.
%%
%% When T1 might be entirely contained in a branch of T2, the diff continues
%% solely on that branch and the keys from the other T2 branch are collected.
%%
%% If T1 keys overlap between the two left and right branches of T2 we continue the diff by each branch.
-spec dirty_diff(tree(), tree()) -> [{key(), hash()}].
dirty_diff(T, T) ->
[];
dirty_diff('nil', T2) ->
lists:flatten([extract_keys(T2)]);
dirty_diff(T1, 'nil') ->
lists:flatten([extract_keys(T1)]);
dirty_diff(T1 = #inner{hash = _Hash1, height = LHeight}, T2 = #inner{hash = _Hash2, height = RHeight}) when LHeight > RHeight ->
dirty_diff(T2, T1);
dirty_diff(Leaf = #inner{left = 'nil', right = 'nil'}, Tree = #inner{}) ->
Diff = case contained_branch(Leaf, Tree) of
left -> [dirty_diff(Leaf, Tree#inner.left), extract_keys(Tree#inner.right)];
right -> [dirty_diff(Leaf, Tree#inner.right), extract_keys(Tree#inner.left)];
none -> [extract_keys(Leaf), extract_keys(Tree)]
end,
lists:flatten(Diff);
dirty_diff(T1 = #inner{hash = _Hash1}, T2 = #inner{hash = _Hash2}) ->
Diff = case contained_branch(T1, T2) of
left -> [dirty_diff(T1, T2#inner.left), extract_keys(T2#inner.right)];
right -> [dirty_diff(T1, T2#inner.right), extract_keys(T2#inner.left)];
none -> [dirty_diff(T1#inner.left, T2#inner.left), dirty_diff(T1#inner.right, T2#inner.right)]
end,
lists:flatten(Diff).
-spec contains(tree(), tree()) -> boolean().
contains(#inner{key = Key, left = 'nil', right = 'nil'}, #inner{min_key = MinKey}) ->
MinKey >= Key;
contains(#inner{min_key = MinKey1, max_key = MaxKey1}, #inner{min_key = MinKey2, max_key = MaxKey2}) when MinKey1 >= MinKey2, MaxKey1 =< MaxKey2 ->
true;
contains(_T1, _T2) ->
false.
-spec contained_branch(tree(), tree()) -> 'left' | 'right' | 'none'.
contained_branch(#inner{max_key = MaxKey1, left = #inner{}, right = #inner{}}, #inner{min_key = MinKey2}) when MaxKey1 =< MinKey2 ->
none;
contained_branch(T1 = #inner{}, #inner{left = Left2, right = Right2}) ->
case contains(T1, Left2) of
true -> left;
false -> case contains(T1, Right2) of
true -> right;
false -> none
end
end. | src/merkletree.erl | 0.747708 | 0.844024 | merkletree.erl | starcoder |
%% @doc
%% Stages are data-exchange steps that send and/or receive data
%% from other stages.
%%
%% When a stage sends data, it acts as a producer. When it receives
%% data, it acts as a consumer. Stages may take both producer and
%% consumer roles at once.
%%
%% ## Stage types
%%
%% Besieds taking both producer and consumer roles, a stage may be
%% called "source" if it only produces items or called "sink" if it
%% only cousumes items.
%%
%% For example, imagine the stages below where A sends data to B
%% that sends data to C:
%%
%% [A] -> [B] -> [C]
%%
%% we conclude that:
%%
%% * A is only a producer (and therefore a source)
%% * B is both producer and consumer
%% * C is only a consumer (and therefore a sink)
%%
%% As we will see in the upcoming Examples section, we must
%% specify the type of the stage when we implement each of them.
%%
%% To start the flow of events, we subscribe consumers to
%% producers. Once the communication channel between them is
%% established, consumers will ask the producers for events.
%% We typically say the consumer is sending demand upstream.
%% Once demand arrives, the producer will emit items, never
%% emitting more items than the consumer asked for. This provides
%% a back-pressure mechanism.
%%
%% A consumer may have multiple producers and a producer may have
%% multiple consumers. When a consumer asks for data, each producer
%% is handled separately, with its own demand. When a producer
%% receives demand and sends data to multiple consumers, the demand
%% is tracked and the events are sent by a dispatcher. This allows
%% producers to send data using different "strategies". See
%% gen_stage_dispatcher for more information.
%%
%% Many developers tend to create layers of stages, such as A, B and
%% C, for achieving concurrency. If all you want is concurrency, starting
%% multiple instances of the same stage is enough. Layers in gen_stage
%% must be created when there is a need for back-pressure or to route the
%% data in different ways.
%%
%% For example, if you need the data to go over multiple steps but
%% without a need for back-pressure or without a need to break the
%% data apart, do not design it as such:
%%
%% [Producer] -> [Step 1] -> [Step 2] -> [Step 3]
%%
%% Instead it is better to design it as:
%%
%% [Consumer]
%% /
%% [Producer]-<-[Consumer]
%% \
%% [Consumer]
%%
%% where "Consumer" are multiple processes running the same code that
%% subscribe to the same "Producer".
%% @end
-module(gen_stage).
-behaviour(gen_server).
-export([
start_link/2,
start_link/3,
start_link/4,
start/2,
start/3,
start/4,
sync_info/2,
sync_info/3,
async_info/2,
demand/1,
demand/2,
sync_subscribe/2,
sync_subscribe/3,
sync_resubscribe/4,
sync_resubscribe/5,
async_subscribe/2,
async_resubscribe/4,
ask/2,
ask/3,
cancel/2,
cancel/3,
call/2,
call/3,
cast/2,
reply/2,
stop/1,
stop/3,
estimate_buffered_count/1,
estimate_buffered_count/2,
consumer_receive/4,
consumer_subscribe/4
]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
-define(TIMEOUT, 5000).
-record(stage, {
mod,
state,
type,
dispatcher_mod,
dispatcher_state,
buffer,
buffer_keep,
events = forward,
monitors = #{},
producers = #{},
consumers = #{}
}).
% gen_server type
-type server() :: pid() | atom() | {global, term()} | {via, module(), term()} | {atom(), node()}.
-type on_start() :: {ok, pid()} | ignore | {error, {already_started, pid()} | term()}.
-type name() :: {local, atom()} | {global, term()} | {via, module(), term()}.
-type options() :: [option()].
-type option() ::
{debug, [debug()]}
| {timeout, timeout()}
| {spawn_opt, [term()]}
| {hibernate_after, timeout()}.
-type debug() :: trace | log | statistics | {log_to_file, iolist() | binary()}.
%-type type() :: producer | consumer | producer_consumer.
-type subscription_option() ::
{cancel, permanent | transient | temporary}
| {to, server()}
| {min_demand, integer()}
| {max_demand, integer()}
| {atom(), term()}.
-type subscription_options() :: [subscription_option()].
-type producer_only_option() :: {demand, forward | accumulate}.
-type producer_and_producer_consumer_option() ::
{buffer_size, non_neg_integer() | infinity}
| {buffer_keep, first | last}
| {dispatcher, module() | {module(), gen_stage_dispatcher:options()}}.
-type consumer_and_producer_consumer_option() ::
{subscribe_to, [module() | {module(), subscription_options()}]}.
-type producer_option() :: producer_only_option() | producer_and_producer_consumer_option().
-type consumer_option() :: consumer_and_producer_consumer_option().
-type producer_consumer_option() ::
producer_and_producer_consumer_option() | consumer_and_producer_consumer_option().
-type stage() :: pid() | atom() | {global, term()} | {via, module(), term()} | {atom(), node()}.
-type subscription_tag() :: reference().
-type from() :: {pid(), subscription_tag()}.
%% @doc
%% Invoked when the server is started.
%%
%% `start_link/3' (or `start/3') will block until this callback returns.
%% `Args' is the argument term (second argument) passed to `start_link/3'
%% (or `start/3').
%%
%% In case of successful start, this callback must return a tuple
%% where the first element is the stage type, which is one of:
%% * `producer'
%% * `consumer'
%% * `producer_consumer' (if the stage is acting as both)
%%
%% The returned tuple may also contain 3 or 4 elements. The third
%% element may be the `hibernate' atom or a set of options defined
%% below.
%%
%% Returning `ignore` will cause `start_link/3` to return `ignore`
%% and the process will exit normally without entering the loop or
%% calling `terminate/2`.
%%
%% Returning `{stop, Reason}` will cause `start_link/3` to return
%% `{error, Reason}` and the process to exit with reason `reason`
%% without entering the loop or calling `terminate/2`.
%% @end
-callback init(Args :: term()) ->
{producer, State}
| {producer, State, [producer_option()]}
| {producer_consumer, State}
| {producer_consumer, State, [producer_consumer_option()]}
| {consumer, State}
| {consumer, State, [consumer_option()]}
| ignore
| {stop, Reason :: any()}
when State :: any().
%% @doc
%% Invoked on `producer` stages.
%%
%% This callback is invoked on `producer` stages with the demand from
%% consumers/dispatcher. The producer that implements this callback must
%% either store the demand, or return the amount of requested events.
%%
%% Must always be explicitly implemented by `producer` stages.
%% @end
-callback handle_demand(Demand :: pos_integer(), State :: term()) ->
{noreply, [Event], NewState}
| {noreply, [Event], NewState, hibernate}
| {stop, Reason, NewState}
when NewState :: term(), Reason :: term(), Event :: term().
%% @doc
%% Invoked when a consumer subscribes to a producer.
%%
%% This callback is invoked in both producers and consumers.
%% `producer_or_consumer` will be `producer` when this callback is
%% invoked on a consumer that subscribed to a producer, and `consumer`
%% if when this callback is invoked on producers a consumer subscribed
%% to.
%% @end
-callback handle_subscribe(Type :: producer | consumer, subscription_options(), from(), State :: term()) ->
{automatic | manual, NewState}
| {stop, Reason, NewState}
when NewState :: term(), Reason :: term().
%% @doc
%% Invoked when items are discarded from the buffer.
%%
%% It receives the number of excess (discarded) items from this invocation.
%% This callback returns a boolean that controls whether the default error log for discarded items is printed or not.
%% Return true to print the log, return false to skip the log.
%% @end
-callback format_discarded(Discarded :: non_neg_integer(), State :: term()) -> boolean().
%% @doc
%% Invoked when a consumer is no longer subscribed to a producer.
%%
%% It receives the cancellation reason, the `from` tuple representing the
%% cancelled subscription and the state. The `cancel_reason` will be a
%% `{cancel, _}` tuple if the reason for cancellation was a `gen_stage:cancel/2`
%% call. Any other value means the cancellation reason was due to an EXIT.
%% @end
-callback handle_cancel(CancellationReason :: {cancel | down, Reason}, from(), State :: term()) ->
{noreply, [Event], NewState}
| {noreply, [Event], NewState, hibernate}
| {stop, Reason, NewState}
when Event :: term(), NewState :: term(), Reason :: term().
%% @doc
%% Invoked on `producer_consumer` and `consumer` stages to handle events.
%%
%% Must always be explicitly implemented by such types.
%%
%% Return values are the same as `c:handle_cast/2`.
%% @end
-callback handle_events(Events :: [Event], from(), State :: term()) ->
{noreply, [Event], NewState}
| {noreply, [Event], NewState, hibernate}
| {stop, Reason, NewState}
when NewState :: term(), Reason :: term(), Event :: term().
%% @doc
%% Invoked to handle synchronous `call/3` messages.
%%
%% `call/3` will block until a reply is received (unless the call times out or
%% nodes are disconnected).
%% @end
-callback handle_call(Request :: term(), From :: {pid(), term()}, State :: term()) ->
{reply, Reply, [Event], NewState}
| {reply, Reply, [Event], NewState, hibernate}
| {noreply, [Event], NewState}
| {noreply, [Event], NewState, hibernate}
| {stop, Reason, Reply, NewState}
| {stop, Reason, NewState}
when Reply :: term(), NewState :: term(), Reason :: term(), Event :: term().
%% @doc Invoked to handle asynchronous `cast/2` messages.
-callback handle_cast(Request :: term(), State :: term()) ->
{noreply, [Event], NewState}
| {noreply, [Event], NewState, hibernate}
| {stop, Reason :: term(), NewState}
when NewState :: term(), Event :: term().
%% @doc
%% Invoked to handle all other messages.
%%
%% `message` is the message and `state` is the current state of the `gen_stage`.
%% When a timeout occurs the message is `timeout`.
%%
%% Return values are the same as `c:handle_cast/2`.
%% @end
-callback handle_info(Message :: term(), State :: term()) ->
{noreply, [Event], NewState}
| {noreply, [Event], NewState, hibernate}
| {stop, Reason :: term(), NewState}
when NewState :: term(), Event :: term().
%% @doc
%% The same as `gen_server:terminate/2`.
%% @end
-callback terminate(Reason, State :: term()) -> term()
when Reason :: normal | shutdown | {shutdown, term()} | term().
%% @doc
%% The same as `gen_server:code_change/3`.
%% @end
-callback code_change(OldVsn :: term(), State :: term(), Extra :: term()) ->
{ok, NewState :: term()} | {error, Reason :: term()}.
%% @doc The same as `gen_server:format_status/3`.
-callback format_status(normal | terminate, [{term(), term()} | (State :: term())]) ->
Status :: term().
-optional_callbacks(
[
% gen_stage
handle_subscribe/4,
handle_cancel/3,
handle_demand/2,
handle_events/3,
format_discarded/2,
% gen_server
code_change/3,
format_status/2,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2
]).
%% @doc
%% Starts a `gen_stage` process linked to the current process.
%%
%% This is often used to start the `gen_stage` as part of a supervision tree.
%%
%% Once the server is started, the `init/1` function of the given `Mod` is
%% called with `Args` as its arguments to initialize the stage. To ensure a
%% synchronized start-up procedure, this function does not return until `init/1`
%% has returned.
%%
%% Note that a `gen_stage` started with `start_link/3` is linked to the
%% parent process and will exit in case of crashes from the parent. The `gen_stage`
%% will also exit due to the `normal` reason in case it is configured to trap
%% exits in the `init/1` callback.
%% @end
-spec start_link(module(), term()) -> on_start().
start_link(Mod, Args) ->
start_link(Mod, Args, []).
-spec start_link(module(), term(), options()) -> on_start().
start_link(Mod, Args, Options) ->
gen_server:start_link(?MODULE, {Mod, Args}, Options).
-spec start_link(name(), module(), term(), options()) -> on_start().
start_link(Name, Mod, Args, Options) when is_atom(Mod), is_list(Options) ->
gen_server:start_link(Name, ?MODULE, {Mod, Args}, Options).
%% @doc
%% Starts a `GenStage` process without links (outside of a supervision tree).
%%
%% See `start_link/3` for more information.
%% @end
-spec start(module(), term()) -> on_start().
start(Mod, Args) ->
start(Mod, Args, []).
-spec start(module(), term(), options()) -> on_start().
start(Mod, Args, Options) ->
gen_server:start(?MODULE, {Mod, Args}, Options).
-spec start(name(), module(), term(), options()) -> on_start().
start(Name, Mod, Args, Options) when is_atom(Mod), is_list(Options) ->
gen_server:start(Name, ?MODULE, {Mod, Args}, Options).
%% @doc
%% Queues an info message that is delivered after all currently buffered events.
%%
%% This call is synchronous and will return after the stage has queued
%% the info message. The message will be eventually handled by the
%% `handle_info/2` callback.
%%
%% If the stage is a consumer, it does not have buffered events, so the
%% messaged is queued immediately.
%%
%% This function will return `:ok` if the info message is successfully queued.
%% @end
-spec sync_info(stage(), term()) -> ok.
sync_info(Stage, Msg) ->
sync_info(Stage, Msg, ?TIMEOUT).
-spec sync_info(stage(), term(), timeout()) -> ok.
sync_info(Stage, Msg, Timeout) ->
call(Stage, {'$info', Msg}, Timeout).
%% @doc
%% Asynchronously queues an info message that is delivered after all
%% currently buffered events.
%%
%% If the stage is a consumer, it does not have buffered events, so the
%% message is queued immediately.
%%
%% This call returns `ok` regardless if the info has been successfully
%% queued or not. It is typically called from the stage itself.
%% @end
-spec async_info(stage(), term()) -> ok.
async_info(Stage, Msg) ->
cast(Stage, {'$info', Msg}).
%% @doc
%% Returns the demand mode for a producer.
%%
%% It is either `forward` or `accumulate`. See `demand/2`.
%% @end
-spec demand(stage()) -> forward | accumulate.
demand(Stage) ->
call(Stage, '$demand').
%% @doc
%% Sets the demand mode for a producer.
%% When `forward`, the demand is always forwarded to the `handle_demand`
%% callback. When `accumulate`, demand is accumulated until its mode is
%% set to `forward`. This is useful as a synchronization mechanism, where
%% the demand is accumulated until all consumers are subscribed. Defaults
%% to `forward`.
%%
%% This command is asynchronous.
%% @end
-spec demand(stage(), forward | accumulate) -> ok.
demand(Stage, Mode) ->
cast(Stage, {'$demand', Mode}).
%% @doc
%% Asks the consumer to subscribe to the given producer synchronously.
%%
%% This call is synchronous and will return after the called consumer
%% sends the subscribe message to the producer. It does not, however,
%% wait for the subscription confirmation. Therefore this function
%% will return before `handle_subscribe/4` is called in the consumer.
%% In other words, it guarantees the message was sent, but it does not
%% guarantee a subscription has effectively been established.
%%
%% This function will return `{ok, subscription_tag}` as long as the
%% subscription message is sent. It will return `{error, not_a_consumer}`
%% when the stage is not a consumer. `subscription_tag` is the second element
%% of the two-element tuple that will be passed to `handle_subscribe/4`.
%% @end
-spec sync_subscribe(stage(), subscription_options()) -> {ok, subscription_tag()} | {error, not_a_consumer} | {error, {bad_opts, iolist() | binary()}}.
sync_subscribe(Stage, Opts) ->
sync_subscribe(Stage, Opts, ?TIMEOUT).
-spec sync_subscribe(stage(), subscription_options(), timeout()) -> {ok, subscription_tag()} | {error, not_a_consumer} | {error, {bad_opts, iolist() | binary()}}.
sync_subscribe(Stage, Opts, Timeout) ->
sync_subscribe(Stage, undefined, Opts, Timeout).
%% TODO check paramter
%% @doc
%% Cancels `SubscriptionTag` with `Reason` and resubscribe
%% to the same stage with the given options.
%%
%% This is useful in case you need to update the options in
%% which you are currently subscribed to in a producer.
%%
%% This function is sync, which means it will wait until the
%% subscription message is sent to the producer, although it
%% won't wait for the subscription confirmation.
%%
%% See `sync_subscribe/2` for options and more information.
%% @end
-spec sync_resubscribe(stage(), subscription_tag(), term(), subscription_options()) -> {ok, subscription_tag()} | {error, not_a_consumer} | {error, {bad_opts, iolist() | binary()}}.
sync_resubscribe(Stage, SubscriptionTag, Reason, Opts) ->
sync_resubscribe(Stage, SubscriptionTag, Reason, Opts, ?TIMEOUT).
-spec sync_resubscribe(stage(), subscription_tag(), term(), subscription_options(), timeout()) -> {ok, subscription_tag()} | {error, not_a_consumer} | {error, {bad_opts, iolist() | binary()}}.
sync_resubscribe(Stage, SubscriptionTag, Reason, Opts, Timeout) ->
sync_subscribe(Stage, {SubscriptionTag, Reason}, Opts, Timeout).
sync_subscribe(Stage, Cancel, Opts, Timeout) ->
case proplists:lookup(to, Opts) of
none ->
throw(iolist_to_binary(io_lib:format("expected to argument in sync_(re)subscribe", [])));
{to, To} ->
NewOpts = proplists:delete(to, Opts),
call(Stage, {'$subscribe', Cancel, To, NewOpts}, Timeout)
end.
%% @doc
%% Asks the consumer to subscribe to the given producer asynchronously.
%% This function is async, which means it always returns
%% `ok` once the request is dispatched but without waiting
%% for its completion. This particular function is usually
%% called from a stage's `init/1` callback.
%%
%% Options
%%
%% This function accepts the same options as `sync_subscribe/2`.
%% @end
-spec async_subscribe(stage(), subscription_options()) -> ok.
async_subscribe(Stage, Opts) ->
async_subscribe(Stage, undefined, Opts).
%% @doc
%% Cancels `SubscriptionTag` with `Reason` and resubscribe
%% to the same stage with the given options.
%%
%% This is useful in case you need to update the options in
%% which you are currently subscribed to in a producer.
%%
%% This function is async, which means it always returns
%% `ok` once the request is dispatched but without waiting
%% for its completion.
%%
%% Options
%%
%% This function accepts the same options as `sync_subscribe/2`.
%% @end
-spec async_resubscribe(stage(), subscription_tag(), term(), subscription_options()) -> ok.
async_resubscribe(Stage, SubscriptionTag, Reason, Opts) ->
async_subscribe(Stage, {SubscriptionTag, Reason}, Opts).
async_subscribe(Stage, Cancel, Opts) ->
case proplists:lookup(to, Opts) of
none ->
throw(iolist_to_binary(io_lib:format("expected to argument in async_(re)subscribe", [])));
{to, To} ->
NewOpts = proplists:delete(to, Opts),
cast(Stage, {'$subscribe', Cancel, To, NewOpts})
end.
%% @doc
%% Asks the given demand to the producer.
%%
%% `ProducerSubscription` is the subscription this demand will be asked on; this
%% term could be for example stored in the stage when received in
%% `handle_subscribe/4`.
%%
%% The demand is a non-negative integer with the amount of events to
%% ask a producer for. If the demand is `0`, this function simply returns `ok`
%% without asking for data.
%%
%% This function must only be used in the cases when a consumer
%% sets a subscription to `manual` mode in the `handle_subscribe/4`
%% callback.
%%
%% It accepts the same options as `erlang:send/3`, and returns the same value as
%% `erlang:send/3`.
%% @end
-spec ask(from(), non_neg_integer()) -> ok | noconnect | nosuspend.
ask(ProducerSubscription, Demand) ->
ask(ProducerSubscription, Demand, []).
-spec ask(from(), non_neg_integer(), [noconnect | nosuspend]) -> ok | noconnect | nosuspend.
ask({_Pid, _Ref}, 0, _Opts) ->
ok;
ask({Pid, Ref}, Demand, Opts) when is_integer(Demand), Demand > 0 ->
erlang:send(Pid, {'$gen_producer', {self(), Ref}, {ask, Demand}}, Opts).
%% @doc
%% Cancels the given subscription on the producer.
%%
%% The second argument is the cancellation reason. Once the
%% producer receives the request, a confirmation may be
%% forwarded to the consumer (although there is no guarantee
%% as the producer may crash for unrelated reasons before).
%% The consumer will react to the cancellation according to
%% the `cancel` option given when subscribing. For example:
%%
%% gen_stage:cancel({Pid, Subscription}, shutdown)
%%
%% will cause the consumer to crash if the `cancel` given
%% when subscribing is `permanent` (the default) but it
%% won't cause a crash in other modes. See the options in
%% `sync_subscribe/3` for more information.
%%
%% The `cancel` operation is an asynchronous request. The
%% third argument are same options as `erlang:send/3`,
%% allowing you to pass `noconnect` or `nosuspend` which
%% is useful when working across nodes. This function returns
%% the same value as `erlang:send/3`.
%% @end
-spec cancel(from(), term()) -> ok | noconnect | nosuspend.
cancel({Pid, Ref} = _ProducerSubscription, Reason) ->
cancel({Pid, Ref}, Reason, []).
-spec cancel(from(), term(), [noconnect | nosuspend]) -> ok | noconnect | nosuspend.
cancel({Pid, Ref} = _ProducerSubscription, Reason, Opts) ->
erlang:send(Pid, {'$gen_producer', {self(), Ref}, {cancel, Reason}}, Opts).
send_noconnect(Pid, Msg) ->
erlang:send(Pid, Msg, [noconnect]).
%% @doc
%% Makes a synchronous call to the `stage` and waits for its reply.
%%
%% The client sends the given `request` to the stage and waits until a
%% reply arrives or a timeout occurs. `handle_call/3` will be called on
%% the stage to handle the request.
%%
%% `stage` can be any of the values described in the "Name registration"
%% section of the documentation for this module.
%%
%% Timeouts
%%
%% `timeout` is an integer greater than zero which specifies how many
%% milliseconds to wait for a reply, or the atom `infinity` to wait
%% indefinitely. The default value is `5000`. If no reply is received
%% within the specified time, the function call fails and the caller
%% exits. If the caller catches the failure and continues running, and
%% the stage is just late with the reply, such reply may arrive at any
%% time later into the caller's message queue. The caller must in this
%% case be prepared for this and discard any such garbage messages that
%% are two-element tuples with a reference as the first element.
%% @end
-spec call(stage(), term()) -> term().
call(Stage, Request) ->
call(Stage, Request, ?TIMEOUT).
-spec call(stage(), term(), timeout()) -> term().
call(Stage, Request, Timeout) ->
gen_server:call(Stage, Request, Timeout).
%% @doc
%% Sends an asynchronous request to the `stage`.
%%
%% This function always returns `ok` regardless of whether
%% the destination `stage` (or node) exists. Therefore it
%% is unknown whether the destination stage successfully
%% handled the message.
%%
%% `handle_cast/2` will be called on the stage to handle
%% the request. In case the `stage` is on a node which is
%% not yet connected to the caller one, the call is going to
%% block until a connection happens.
%% @end
-spec cast(stage(), term()) -> ok.
cast(Stage, Request) ->
gen_server:cast(Stage, Request).
%% @doc
%% Replies to a client.
%%
%% This function can be used to explicitly send a reply to a client that
%% called `call/3` when the reply cannot be specified in the return value
%% of `handle_call/3`.
%%
%% `client` must be the `from` argument (the second argument) accepted by
%% `handle_call/3` callbacks. `reply` is an arbitrary term which will be
%% given back to the client as the return value of the call.
%%
%% Note that `reply/2` can be called from any process, not just the `gen_stage`
%% that originally received the call (as long as that `gen_stage` communicated the
%% `from` argument somehow).
%%
%% This function always returns `ok`.
%% @end
reply({To, Tag}, Reply) when is_pid(To) ->
try
erlang:send(To, {Tag, Reply}),
ok
catch
_:_ -> ok
end.
%% @doc
%% Stops the stage with the given `reason`.
%%
%% The `terminate/2` callback of the given `stage` will be invoked before
%% exiting. This function returns `ok` if the server terminates with the
%% given reason; if it terminates with another reason, the call exits.
%% This function keeps OTP semantics regarding error reporting.
%% If the reason is any other than `normal`, `shutdown` or
%% `{shutdown, _}`, an error report is logged.
%% @end
stop(Stage) ->
stop(Stage, normal, infinity).
-spec stop(stage(), term(), timeout()) -> ok.
stop(Stage, Reason, Timeout) ->
gen:stop(Stage, Reason, Timeout).
%% @doc Returns the estimated number of buffered items for a producer.
estimate_buffered_count(Stage) ->
estimate_buffered_count(Stage, 5000).
-spec estimate_buffered_count(stage(), non_neg_integer()) -> non_neg_integer().
estimate_buffered_count(Stage, Timeout) ->
call(Stage, '$estimate_buffered_count', Timeout).
init({Mod, Args}) ->
case Mod:init(Args) of
{producer, State} ->
init_producer(Mod, [], State);
{producer, State, Opts} when is_list(Opts) ->
init_producer(Mod, Opts, State);
{producer_consumer, State} ->
init_producer_consumer(Mod, [], State);
{producer_consumer, State, Opts} when is_list(Opts) ->
init_producer_consumer(Mod, Opts, State);
{consumer, State} ->
init_consumer(Mod, [], State);
{consumer, State, Opts} when is_list(Opts) ->
init_consumer(Mod, Opts, State);
{stop, _} = Stop ->
Stop;
ignore ->
ignore;
Other ->
{stop, {bad_return_value, Other}}
end.
init_producer(Mod, Opts, State) ->
case init_dispatcher(Opts) of
{ok, DispatcherMod, DispatcherState, Opts1} ->
case validate_integer(Opts1, buffer_size, 10000, 0, infinity, true) of
{ok, BufferSize, Opts2} ->
case validate_in(Opts2, buffer_keep, last, [first, last]) of
{ok, BufferKeep, Opts3} ->
case validate_in(Opts3, demand, forward, [accumulate, forward]) of
{ok, Demand, Opts4} ->
case validate_no_opts(Opts4) of
ok ->
Events =
case Demand of
accumulate -> [];
_ -> forward
end,
Stage = #stage{
mod = Mod,
state = State,
type = producer,
buffer = gen_stage_buffer:new(BufferSize),
buffer_keep = BufferKeep,
events = Events,
dispatcher_mod = DispatcherMod,
dispatcher_state = DispatcherState
},
{ok, Stage};
{error, Message} ->
{stop, {bad_opts, Message}}
end;
{error, Message} ->
{stop, {bad_opts, Message}}
end;
{error, Message} ->
{stop, {bad_opts, Message}}
end;
{error, Message} ->
{stop, {bad_opts, Message}}
end;
{error, Message} ->
{stop, {bad_opts, Message}}
end.
init_dispatcher(Opts) ->
case proplists:get_value(dispatcher, Opts, gen_stage_demand_dispatcher) of
Dispatcher when is_atom(Dispatcher) ->
{ok, DispatcherState} = Dispatcher:init([]),
NewOpts = proplists:delete(dispatcher, Opts),
{ok, Dispatcher, DispatcherState, NewOpts};
{Dispatcher, DispatcherOpts} when is_atom(Dispatcher), is_list(DispatcherOpts) ->
{ok, DispatcherState} = Dispatcher:init(DispatcherOpts),
NewOpts = proplists:delete(dispatcher, Opts),
{ok, Dispatcher, DispatcherState, NewOpts};
Other ->
{error, io_lib:format("expected dispatcher to be an atom or a {atom(), list()}, got ~p", [Other])}
end.
init_producer_consumer(Mod, Opts, State) ->
case init_dispatcher(Opts) of
{ok, DispatcherMod, DispatcherState, Opts1} ->
case validate_list(Opts1, subscribe_to, []) of
{ok, SubscribeTo, Opts2} ->
case validate_integer(Opts2, buffer_size, infinity, 0, infinity, true) of
{ok, BufferSize, Opts3} ->
case validate_in(Opts3, buffer_keep, last, [first, last]) of
{ok, BufferKeep, Opts4} ->
case validate_no_opts(Opts4) of
ok ->
Stage = #stage{
mod = Mod,
state = State,
type = producer_consumer,
buffer = gen_stage_buffer:new(BufferSize),
buffer_keep = BufferKeep,
events = {queue:new(), 0},
dispatcher_mod = DispatcherMod,
dispatcher_state = DispatcherState
},
consumer_init_subscribe(SubscribeTo, Stage);
{error, Message} ->
{stop, {bad_opts, Message}}
end;
{error, Message} ->
{stop, {bad_opts, Message}}
end;
{error, Message} ->
{stop, {bad_opts, Message}}
end;
{error, Msg} ->
{stop, {bad_opts, Msg}}
end;
{error, Message} ->
{stop, {bad_opts, Message}}
end.
init_consumer(Mod, Opts, State) ->
case validate_list(Opts, subscribe_to, []) of
{ok, SubscribeTo, NewOpts} ->
case validate_no_opts(NewOpts) of
ok ->
Stage = #stage{mod = Mod, state = State, type = consumer},
consumer_init_subscribe(SubscribeTo, Stage);
{error, Msg} ->
{stop, {bad_opts, Msg}}
end;
{error, Msg} ->
{stop, {bad_opts, Msg}}
end.
handle_call({'$info', Msg}, _From, Stage) ->
producer_info(Msg, Stage);
handle_call('$demand', _From, Stage) ->
producer_demand(Stage);
handle_call({'$subscribe', Current, To, Opts}, _From, Stage) ->
consumer_subscribe(Current, To, Opts, Stage);
handle_call('$estimate_buffered_count', _From, Stage) ->
producer_estimate_buffered_count(Stage);
handle_call(Msg, From, #stage{mod = Mod, state = State} = Stage) ->
case Mod:handle_call(Msg, From, State) of
{reply, Reply, Events, NewState} when is_list(Events) ->
NewStage = dispatch_events(Events, length(Events), Stage),
{reply, Reply, NewStage#stage{state = NewState}};
{reply, Reply, Events, NewState, hibernate} when is_list(Events) ->
NewStage = dispatch_events(Events, length(Events), Stage),
{reply, Reply, NewStage#stage{state = NewState}, hibernate};
{stop, Reason, Reply, NewState} ->
{stop, Reason, Reply, Stage#stage{state = NewState}};
Return ->
handle_noreply_callback(Return, Stage)
end.
handle_cast({'$info', Msg}, Stage) ->
{reply, _, NewStage} = producer_info(Msg, Stage),
{noreply, NewStage};
handle_cast({'$demand', Mode}, Stage) ->
producer_demand(Mode, Stage);
handle_cast({'$subscribe', Current, To, Opts}, Stage) ->
case consumer_subscribe(Current, To, Opts, Stage) of
{reply, _, NewStage} -> {noreply, NewStage};
{stop, Reason, _, NewStage} -> {stop, Reason, NewStage};
{stop, _, _} = Stop -> Stop
end;
handle_cast(Msg, #stage{state = State} = Stage) ->
noreply_callback(handle_cast, [Msg, State], Stage).
handle_info({'DOWN', Ref, _, _, Reason} = Msg, Stage) ->
#stage{producers = Producers, monitors = Monitors, state = State} = Stage,
case Producers of
#{Ref := _} ->
consumer_cancel(Ref, down, Reason, Stage);
#{} ->
case Monitors of
#{Ref := ConsumerRef} ->
producer_cancel(ConsumerRef, down, Reason, Stage);
#{} ->
noreply_callback(handle_info, [Msg, State], Stage)
end
end;
handle_info({'$gen_producer', _, _} = Msg, #stage{type = consumer} = Stage) ->
ErrMsg = "gen_stage consumer ~tp received $gen_producer message: ~tp~n",
error_logger:error_msg(ErrMsg, [self_name(), Msg]),
{noreply, Stage};
handle_info({'$gen_producer', {ConsumerPid, Ref} = From, {subscribe, Cancel, Opts}}, #stage{consumers = Consumers} = Stage) ->
case Consumers of
#{Ref := _} ->
ErrMsg = "gen_stage producer ~tp received duplicated subscription from: ~tp~n",
error_logger:error_msg(ErrMsg, [self_name(), From]),
Msg = {'$gen_consumer', {self(), Ref}, {cancel, duplicated_subscription}},
send_noconnect(ConsumerPid, Msg),
{noreply, Stage};
#{} ->
case maybe_producer_cancel(Cancel, Stage) of
{noreply, Stage1} ->
MonRef = erlang:monitor(process, ConsumerPid),
NewMon = maps:put(MonRef, Ref, Stage1#stage.monitors),
NewCon = maps:put(Ref, {ConsumerPid, MonRef}, Stage1#stage.consumers),
producer_subscribe(Opts, From, Stage1#stage{monitors = NewMon, consumers = NewCon});
Other ->
Other
end
end;
handle_info({'$gen_producer', {ConsumerPid, Ref} = From, {ask, Counter}}, #stage{consumers = Consumers} = Stage) when is_integer(Counter) ->
case Consumers of
#{Ref := _} ->
dispatcher_callback(ask, [Counter, From, Stage#stage.dispatcher_state], Stage);
#{} ->
Msg = {'$gen_consumer', {self(), Ref}, {cancel, unknown_subscription}},
send_noconnect(ConsumerPid, Msg),
{noreply, Stage}
end;
handle_info({'$gen_producer', {_, Ref}, {cancel, Reason}}, Stage) ->
producer_cancel(Ref, cancel, Reason, Stage);
handle_info({'$gen_consumer', _, _} = Msg, #stage{type = producer} = Stage) ->
ErrMsg = "stage producer ~tp received $gen_consumer message: ~tp~n",
error_logger:error_msg(ErrMsg, [self_name(), Msg]),
{noreply, Stage};
handle_info({'$gen_consumer', {ProducerPid, Ref}, Events}, #stage{type = producer_consumer, events = {Queue, Counter}, producers = Producers} = Stage) when is_list(Events) ->
case maps:is_key(Ref, Producers) of
true ->
NewQueue = put_pc_events(Events, Ref, Queue),
take_pc_events(NewQueue, Counter, Stage);
false ->
Msg = {'$gen_producer', {self(), Ref}, {cancel, unknown_subscription}},
send_noconnect(ProducerPid, Msg),
{noreply, Stage}
end;
handle_info({'$gen_consumer', {ProducerPid, Ref} = From, Events}, #stage{type = consumer, producers = Producers, mod = Mod, state = State} = Stage) when is_list(Events) ->
case Producers of
#{Ref := Entry} ->
{Batches, NewStage} = consumer_receive(From, Entry, Events, Stage),
consumer_dispatch(Batches, From, Mod, State, NewStage, false);
_ ->
Msg = {'$gen_producer', {self(), Ref}, {cancel, unknown_subscription}},
send_noconnect(ProducerPid, Msg),
{noreply, Stage}
end;
handle_info({'$gen_consumer', {_, Ref}, {cancel, Reason}}, Stage) ->
consumer_cancel(Ref, cancel, Reason, Stage);
handle_info(Msg, #stage{state = State} = Stage) ->
noreply_callback(handle_info, [Msg, State], Stage).
terminate(Reason, #stage{mod = Mod, state = State}) ->
case erlang:function_exported(Mod, terminate, 2) of
true -> Mod:terminate(Reason, State);
false -> ok
end.
code_change(OldVsn, #stage{mod = Mod, state = State} = Stage, Extra) ->
case erlang:function_exported(Mod, code_change, 3) of
true ->
case Mod:code_change(OldVsn, State, Extra) of
{ok, NewState} -> {ok, Stage#stage{state = NewState}};
Other -> Other
end;
false ->
{ok, Stage}
end.
% format_status
%% Shared helpers
noreply_callback(handle_info, [Msg, State], #stage{mod = Mod} = Stage) ->
case erlang:function_exported(Mod, handle_info, 2) of
true ->
handle_noreply_callback(Mod:handle_info(Msg, State), Stage);
false ->
Log = "undefined handle_info in ~tp~nunhandled message: ~tp~n",
error_logger:warning_msg(Log, [Mod, Msg]),
{noreply, Stage#stage{state = State}}
end;
noreply_callback(handle_cancel, [Subscription, From, State], #stage{mod = Mod} = Stage) ->
case erlang:function_exported(Mod, handle_cancel, 3) of
true ->
handle_noreply_callback(Mod:handle_cancel(Subscription, From, State), Stage);
false ->
{noreply, Stage#stage{state = State}}
end;
noreply_callback(Callback, Args, #stage{mod = Mod} = Stage) ->
handle_noreply_callback(apply(Mod, Callback, Args), Stage).
handle_noreply_callback(Return, Stage) ->
case Return of
{noreply, Events, State} when is_list(Events) ->
NewStage = dispatch_events(Events, length(Events), Stage),
{noreply, NewStage#stage{state = State}};
{noreply, Events, State, hibernate} when is_list(Events) ->
NewStage = dispatch_events(Events, length(Events), Stage),
{noreply, NewStage#stage{state = State}, hibernate};
{stop, Reason, State} ->
{stop, Reason, Stage#stage{state = State}};
Other ->
{stop, {bad_return_value, Other}, Stage}
end.
%% Producer helpers
producer_demand(#stage{events = forward} = Stage) ->
{reply, forward, Stage};
producer_demand(#stage{events = Events} = Stage) when is_list(Events) ->
{reply, accumulate, Stage}.
producer_demand(forward, #stage{type = producer_consumer} = Stage) ->
{noreply, Stage};
producer_demand(_Mode, #stage{type = Type} = Stage) when type =/= producer ->
ErrorMsg = "Demand mode can only be set for producers, gen_stage ~tp is a ~ts",
error_logger:error_msg(ErrorMsg, [self_name(), Type]),
{noreply, Stage};
producer_demand(forward, #stage{events = Events} = Stage) ->
NewStage = Stage#stage{events = forward},
if
is_list(Events) ->
Fun =
fun
(D, {noreply, #stage{state = State} = StageAcc}) ->
noreply_callback(handle_demand, [D, State], StageAcc);
(D, {noreply, #stage{state = State} = StageAcc, _}) ->
noreply_callback(handle_demand, [D, State], StageAcc);
(_, {stop, _, _} = Acc) ->
Acc
end,
lists:foldl(Fun, {noreply, NewStage}, lists:reverse(Events));
true ->
{noreply, NewStage}
end;
producer_demand(accumulate, #stage{events = Events} = Stage) ->
if
is_list(Events) -> {noreply, Stage};
true -> {noreply, Stage#stage{events = []}}
end.
producer_subscribe(Opts, From, Stage) ->
#stage{mod = Mod, state = State, dispatcher_mod = DispatcherMod, dispatcher_state = DispatcherState} = Stage,
case maybe_subscribe(Mod, consumer, Opts, From, State) of
{automatic, NewState} ->
NewStage = Stage#stage{state = NewState},
case DispatcherMod:subscribe(Opts, From, DispatcherState) of
{ok, _, _} = Result -> handle_dispatcher_result(Result, NewStage);
{error, Reason} -> producer_cancel(element(1, From), cancel, Reason, NewStage)
end;
{stop, Reason, NewState} ->
{stop, Reason, Stage#stage{state = NewState}};
Other ->
{stop, {bad_return_value, Other}, Stage}
end.
maybe_subscribe(Mod, Type, Opts, From, State) ->
case erlang:function_exported(Mod, handle_subscribe, 4) of
true ->
Mod:handle_subscribe(Type, Opts, From, State);
false ->
{automatic, State}
end.
maybe_producer_cancel({Ref, Reason}, Stage) ->
producer_cancel(Ref, cancel, Reason, Stage);
maybe_producer_cancel(undefined, Stage) ->
{noreply, Stage}.
maybe_format_discarded(Mod, Excess, State) ->
case erlang:function_exported(Mod, format_discarded, 2) of
true -> Mod:format_discarded(Excess, State);
false -> true
end.
producer_cancel(Ref, Kind, Reason, Stage) ->
case maps:take(Ref, Stage#stage.consumers) of
error ->
{noreply, Stage};
{{Pid, MonRef}, Consumers} ->
erlang:demonitor(MonRef, [flush]),
send_noconnect(Pid, {'$gen_consumer', {self(), Ref}, {cancel, Reason}}),
Stage1 = Stage#stage{consumers = Consumers, monitors = maps:remove(MonRef, Stage#stage.monitors)},
case noreply_callback(handle_cancel, [{Kind, Reason}, {Pid, Ref}, Stage1#stage.state], Stage1) of
{noreply, #stage{dispatcher_state = DispatcherState} = Stage2} ->
dispatcher_callback(cancel, [{Pid, Ref}, DispatcherState], Stage2);
{stop, _, _} = Stop ->
Stop
end
end.
dispatcher_callback(Callback, Args, #stage{dispatcher_mod = DispatcherMod} = Stage) ->
Result = apply(DispatcherMod, Callback, Args),
handle_dispatcher_result(Result, Stage).
handle_dispatcher_result({ok, Counter, DispatcherState}, Stage) ->
case Stage of
#stage{type = producer_consumer, events = {Queue, Demand}} ->
NewCounter = Counter + Demand,
Stage1 = Stage#stage{dispatcher_state = DispatcherState, events = {Queue, NewCounter}},
{ok, _, Stage2} = take_from_buffer(NewCounter, Stage1),
#stage{events = {Queue1, Counter2}} = Stage2,
take_pc_events(Queue1, Counter2, Stage2);
_ ->
case take_from_buffer(Counter, Stage#stage{dispatcher_state = DispatcherState}) of
{ok, 0, NewStage} ->
{noreply, NewStage};
{ok, NewCounter, #stage{events = forward, state = State} = NewStage} ->
noreply_callback(handle_demand, [NewCounter, State], NewStage);
{ok, NewCounter, #stage{events = Events} = NewStage} when is_list(Events) ->
{noreply, NewStage#stage{events = [NewCounter | Events]}}
end
end.
dispatch_events([], _Len, Stage) ->
Stage;
dispatch_events(Events, _Len, #stage{type = consumer} = Stage) ->
ErrMsg = "stage consumer ~tp cannot dispatch events (an empty list must be returned): ~tp~n",
error_logger:error_msg(ErrMsg, [self_name(), Events]),
Stage;
dispatch_events(Events, _Len, #stage{consumers = Consumers} = Stage) when map_size(Consumers) =:= 0 ->
buffer_events(Events, Stage);
dispatch_events(Events, Len, Stage) ->
#stage{dispatcher_mod = DispatcherMod, dispatcher_state = DispatcherState} = Stage,
{ok, Events1, DispatcherState1} = DispatcherMod:dispatch(Events, Len, DispatcherState),
NewStage =
case Stage of
#stage{type = producer_consumer, events = {Queue, Demand}} ->
NewDemand = Demand - (Len - length(Events1)),
Stage#stage{dispatcher_state = DispatcherState1, events = {Queue, max(NewDemand, 0)}};
_ ->
Stage#stage{dispatcher_state = DispatcherState1}
end,
buffer_events(Events1, NewStage).
take_from_buffer(Counter, #stage{buffer = Buffer} = Stage) ->
case gen_stage_buffer:take_count_or_until_permanent(Buffer, Counter) of
empty ->
{ok, Counter, Stage};
{ok, NewBuffer, NewCounter, Temps, Perms} ->
Stage1 = dispatch_events(Temps, Counter - NewCounter, Stage#stage{buffer = NewBuffer}),
Stage2 = lists:foldl(fun dispatch_info/2, Stage1, Perms),
take_from_buffer(NewCounter, Stage2)
end.
buffer_events([], Stage) ->
Stage;
buffer_events(Events, #stage{mod = Mod, buffer = Buffer, buffer_keep = Keep, state = State} = Stage) ->
{NewBuffer, Excess, Perms} = gen_stage_buffer:store_temporary(Buffer, Events, Keep),
case Excess of
0 -> ok;
_ ->
case maybe_format_discarded(Mod, Excess, State) of
true ->
ErrMsg = "stage producer ~tp has discarded ~tp events from buffer",
error_logger:warning_msg(ErrMsg, [self_name(), Excess]);
false ->
ok
end
end,
lists:foldl(fun dispatch_info/2, Stage#stage{buffer = NewBuffer}, Perms).
producer_estimate_buffered_count(#stage{type = consumer} = Stage) ->
ErrorMsg = "Buffered count can only be requested for producers, gen_stage ~tp is a consumer",
error_logger:error_msg(ErrorMsg, [self_name()]),
{reply, 0, Stage};
producer_estimate_buffered_count(#stage{buffer = Buffer} = Stage) ->
{reply, gen_stage_buffer:estimate_size(Buffer), Stage}.
%% Info helpers
producer_info(Msg, #stage{type = consumer} = Stage) ->
erlang:send(self(), Msg),
{reply, ok, Stage};
producer_info(Msg, #stage{type = producer_consumer, events = {Queue, Demand}} = Stage) ->
NewStage =
case queue:is_empty(Queue) of
true ->
buffer_or_dispatch_info(Msg, Stage);
false ->
Stage#stage{events = {queue:in({info, Msg}, Queue), Demand}}
end,
{reply, ok, NewStage};
producer_info(Msg, #stage{type = produer} = Stage) ->
{reply, ok, buffer_or_dispatch_info(Msg, Stage)}.
buffer_or_dispatch_info(Msg, #stage{buffer = Buffer} = Stage) ->
case gen_stage_buffer:store_permanent_unless_empty(Buffer, Msg) of
empty -> dispatch_info(Msg, Stage);
{ok, NewBuffer} -> Stage#stage{buffer = NewBuffer}
end.
dispatch_info(Msg, Stage) ->
#stage{dispatcher_mod = DispatcherMod, dispatcher_state = DispatcherState} = Stage,
{ok, NewState} = DispatcherMod:info(Msg, DispatcherState),
Stage#stage{dispatcher_state = NewState}.
%% Consumer helpers
consumer_init_subscribe(Producers, Stage) ->
Fun =
fun
(To, {ok, StageAcc}) ->
case consumer_subscribe(To, StageAcc) of
{reply, _, NewStage} -> {ok, NewStage};
{stop, Reason, _, _} -> {stop, Reason};
{stop, Reason, _} -> {stop, Reason}
end;
(_, {stop, Reason}) -> {stop, Reason}
end,
lists:foldl(Fun, {ok, Stage}, Producers).
consumer_receive({_, Ref} = From, {ProducerId, Cancel, {Demand, Min, Max}}, Events, Stage) ->
{NewDemand, Batches} = split_batches(Events, From, Min, Max, Demand),
NewProducers = maps:put(Ref, {ProducerId, Cancel, {NewDemand, Min, Max}}, Stage#stage.producers),
{Batches, Stage#stage{producers = NewProducers}};
consumer_receive(_, {_, _, manual}, Events, Stage) ->
{[{Events, 0}], Stage}.
consumer_dispatch([{Batch, Ask} | Batches], From, Mod, State, Stage, _Hibernate) ->
case Mod:handle_events(Batch, From, State) of
{noreply, Events, NewState} when is_list(Events) ->
NewStage = dispatch_events(Events, length(Events), Stage),
ask(From, Ask, [noconnect]),
consumer_dispatch(Batches, From, Mod, NewState, NewStage, false);
{noreply, Events, NewState, hibernate} when is_list(Events) ->
NewStage = dispatch_events(Events, length(Events), Stage),
ask(From, Ask, [noconnect]),
consumer_dispatch(Batches, From, Mod, NewState, NewStage, true);
{stop, Reason, NewState} ->
{stop, Reason, Stage#stage{state = NewState}};
Other ->
{stop, {bad_return_value, Other}, Stage#stage{state = State}}
end;
consumer_dispatch([], _From, _Mod, State, Stage, false) ->
{noreply, Stage#stage{state = State}};
consumer_dispatch([], _From, _Mod, State, Stage, true) ->
{noreply, Stage#stage{state = State}, hibernate}.
consumer_subscribe({To, Opts}, Stage) when is_list(Opts) ->
consumer_subscribe(undefined, To, Opts, Stage);
consumer_subscribe(To, Stage) ->
consumer_subscribe(undefined, To, [], Stage).
consumer_subscribe(_Cancel, To, _Opts, #stage{type = producer} = Stage) ->
ErrMsg = "stage producer ~tp cannot be subscribed to another stage: ~tp~n",
error_logger:error_msg(ErrMsg, [self_name(), To]),
{reply, {error, not_a_consumer}, Stage};
consumer_subscribe(Current, To, Opts, Stage) ->
case validate_integer(Opts, max_demand, 1000, 1, infinity, false) of
{ok, Max, _} ->
case validate_integer(Opts, min_demand, Max div 2, 0, Max - 1, false) of
{ok, Min, _} ->
case validate_in(Opts, cancel, permanent, [temporary, transient, permanent]) of
{ok, Cancel, _} ->
Producer = whereis_server(To),
if
Producer =/= undefined ->
Ref = monitor(process, Producer),
Msg = {'$gen_producer', {self(), Ref}, {subscribe, Current, Opts}},
send_noconnect(Producer, Msg),
consumer_subscribe(Opts, Ref, Producer, Cancel, Min, Max, Stage);
Cancel =:= permanent orelse Cancel =:= transient ->
{stop, noproc, {ok, make_ref()}, Stage};
Cancel =:= temporary ->
{reply, {ok, make_ref()}, Stage}
end;
{error, Msg} ->
ErrMsg = "stage consumer ~tp subscribe received invalid option: ~ts~n",
error_logger:error_msg(ErrMsg, [self_name(), Msg]),
{reply, {error, {bad_opts, Msg}}, Stage}
end;
{error, Msg} ->
ErrMsg = "stage consumer ~tp subscribe received invalid option: ~ts~n",
error_logger:error_msg(ErrMsg, [self_name(), Msg]),
{reply, {error, {bad_opts, Msg}}, Stage}
end;
{error, Msg} ->
ErrMsg = "stage consumer ~tp subscribe received invalid option: ~ts~n",
error_logger:error_msg(ErrMsg, [self_name(), Msg]),
{reply, {error, {bad_opts, Msg}}, Stage}
end.
consumer_subscribe(Opts, Ref, Producer, Cancel, Min, Max, Stage) ->
#stage{mod = Mod, state = State} = Stage,
To = {Producer, Ref},
case maybe_subscribe(Mod, producer, Opts, To, State) of
{automatic, NewState} ->
ask(To, Max, [noconnect]),
Producers = maps:put(Ref, {Producer, Cancel, {Max, Min, Max}}, Stage#stage.producers),
{reply, {ok, Ref}, Stage#stage{producers = Producers, state = NewState}};
{manual, NewState} ->
Producers = maps:put(Ref, {Producer, Cancel, manual}, Stage#stage.producers),
{reply, {ok, Ref}, Stage#stage{producers = Producers, state = NewState}};
{stop, Reason, NewState} ->
{stop, Reason, Stage#stage{state = NewState}};
Other ->
{stop, {bad_return_value, Other}, Stage}
end.
consumer_cancel(Ref, Kind, Reason, #stage{producers = Producers} = Stage) ->
case maps:take(Ref, Producers) of
error ->
{noreply, Stage};
{{Producer, Mode, _}, NewProducers} ->
erlang:demonitor(Ref, [flush]),
NewStage = Stage#stage{producers = NewProducers},
schedule_cancel(Mode, {Kind, Reason}, {Producer, Ref}, NewStage)
end.
schedule_cancel(Mode, KindReason, ProducerRef, #stage{type = producer_consumer, events = {Queue, Demand}} = Stage) ->
case queue:is_empty(Queue) of
true ->
invoke_cancel(Mode, KindReason, ProducerRef, Stage);
false ->
NewQueue = queue:in({cancel, Mode, KindReason, ProducerRef}, Queue),
{noreply, Stage#stage{events = {NewQueue, Demand}}}
end;
schedule_cancel(Mode, KindReason, ProducerRef, Stage) ->
invoke_cancel(Mode, KindReason, ProducerRef, Stage).
invoke_cancel(Mode, {_, Reason} = KindReason, {Pid, _} = ProducerRef, #stage{state = State} = Stage) ->
case noreply_callback(handle_cancel, [KindReason, ProducerRef, State], Stage) of
{noreply, NewStage} ->
case Mode =:= permanent orelse (Mode =:= transient andalso (not is_transient_shutdown(Reason))) of
true ->
case Reason of
already_subscribed ->
{noreply, NewStage};
_ ->
ErrMsg = "stage consumer ~tp is stopping after receiving cancel from producer ~tp with reason: ~tp~n",
error_logger:info_msg(ErrMsg, [self_name(), Pid, Reason]),
{stop, Reason, Stage}
end;
false ->
{noreply, NewStage}
end;
Other ->
Other
end.
%% Producer consumer helpers
put_pc_events(Events, Ref, Queue) ->
queue:in({Events, Ref}, Queue).
send_pc_events(Events, Ref, #stage{mod = Mod, state = State, producers = Producers} = Stage) ->
case Producers of
#{Ref := Entry} ->
{ProducerId, _, _} = Entry,
From = {ProducerId, Ref},
{Batches, NewStage} = consumer_receive(From, Entry, Events, Stage),
consumer_dispatch(Batches, From, Mod, State, NewStage, false);
#{} ->
consumer_dispatch([{Events, 0}], {pid, Ref}, Mod, State, Stage, false)
end.
take_pc_events(Queue, Counter, Stage) when Counter > 0 ->
case queue:out(Queue) of
{{value, {info, Msg}}, Queue1} ->
take_pc_events(Queue1, Counter, buffer_or_dispatch_info(Msg, Stage));
{{value, {cancel, Mode, KindReason, Ref}}, Queue1} ->
case invoke_cancel(Mode, KindReason, Ref, Stage) of
{noreply, Stage1} ->
take_pc_events(Queue1, Counter, Stage1);
{noreply, Stage1, hibernate} ->
take_pc_events(Queue1, Counter, Stage1);
{stop, _, _} = Stop ->
Stop
end;
{{value, {Events, Ref}}, Queue1} ->
case send_pc_events(Events, Ref, Stage#stage{events = {Queue1, Counter}}) of
{noreply, #stage{events = {Queue2, Counter1}} = Stage1} ->
take_pc_events(Queue2, Counter1, Stage1);
{noreply, #stage{events = {Queue2, Counter1}} = Stage1, hibernate} ->
take_pc_events(Queue2, Counter1, Stage1);
{stop, _, _} = Stop ->
Stop
end;
{empty, Queue1} ->
{noreply, Stage#stage{events = {Queue1, Counter}}}
end;
take_pc_events(Queue, Counter, Stage) ->
{noreply, Stage#stage{events = {Queue, Counter}}}.
validate_list(Opts, Key, Default) ->
case proplists:get_value(Key, Opts, Default) of
Value when is_list(Value) ->
NewOpts = proplists:delete(Key, Opts),
{ok, Value, NewOpts};
Value ->
{error, io_lib:format("expected ~p to be a list, got: ~p", [Key, Value])}
end.
validate_in(Opts, Key, Default, Values) ->
Value = proplists:get_value(Key, Opts, Default),
case lists:member(Value, Values) of
true ->
NewOpts = proplists:delete(Key, Opts),
{ok, Value, NewOpts};
false ->
{error, io_lib:format("expected ~p to be one of ~p, got: ~p", [Key, Values, Value])}
end.
validate_integer(Opts, Key, Default, Min, Max, Infinity) ->
Value = proplists:get_value(Key, Opts, Default),
if
Value =:= infinity andalso Infinity ->
{ok, Value, Opts};
not is_integer(Value) ->
ErrorMsg = "expected ~p to be an integer, got: ~p",
{error, io_lib:format(ErrorMsg, [Key, Value])};
Value < Min ->
ErrorMsg = "expected ~p to be equal to or greater than ~p, got: ~p",
{error, io_lib:format(ErrorMsg, [Key, Min, Value])};
Value > Max ->
ErrorMsg = "expected ~p to be equal to or less than ~p, got: ~p",
{error, io_lib:format(ErrorMsg, [Key, Max, Value])};
true ->
NewOpts = proplists:delete(Key, Opts),
{ok, Value, NewOpts}
end.
validate_no_opts([]) ->
ok;
validate_no_opts(Opts) ->
{error, {badarg, Opts}}.
is_transient_shutdown(normal) -> true;
is_transient_shutdown(shutdown) -> true;
is_transient_shutdown({shutdown, _}) -> true;
is_transient_shutdown(_) -> false.
self_name() ->
case process_info(self(), registered_name) of
{registered_name, Name} when is_atom(Name) -> Name;
_ -> self()
end.
split_batches(Events, From, Min, Max, Demand) ->
split_batches(Events, From, Min, Max, Demand, Demand, []).
split_batches([], _From, _Min, _Max, _OldDemand, NewDemand, Batches) ->
{NewDemand, lists:reverse(Batches)};
split_batches(Events, From, Min, Max, OldDemand, NewDemand, Batches) ->
{NewEvents, Batch, BatchSize} = split_events(Events, Max - Min, 0, []),
{OldDemand1, BatchSize1} =
case OldDemand - BatchSize of
Diff when Diff < 0 ->
ErrorMsg = "gen_stage consumer ~tp has received ~tp events in excess from: ~tp~n",
error_logger:error_msg(ErrorMsg, [self_name(), abs(Diff), From]),
{0, OldDemand};
Diff ->
{Diff, BatchSize}
end,
% In case we've reached min, we will ask for more events
{NewDemand1, BatchSize2} =
case NewDemand - BatchSize1 of
Diff1 when Diff1 =< Min ->
{Max, Max - Diff1};
Diff1 ->
{Diff1, 0}
end,
split_batches(NewEvents, From, Min, Max, OldDemand1, NewDemand1, [{Batch, BatchSize2} | Batches]).
split_events(Events, Limit, Limit, Acc) ->
{Events, lists:reverse(Acc), Limit};
split_events([], _Limit, Counter, Acc) ->
{[], lists:reverse(Acc), Counter};
split_events([Event | Events], Limit, Counter, Acc) ->
split_events(Events, Limit, Counter + 1, [Event | Acc]).
whereis_server(Pid) when is_pid(Pid) ->
Pid;
whereis_server(Name) when is_atom(Name) ->
erlang:whereis(Name);
whereis_server({global, Name}) ->
global:whereis_name(Name);
whereis_server({via, Mod, Name}) ->
apply(Mod, whereis_name, [Name]);
whereis_server({Name, Local}) when is_atom(Name) andalso Local =:= node() ->
erlang:whereis(Name);
whereis_server({Name, Node} = Server) when is_atom(Name) andalso is_atom(Node) ->
Server. | src/gen_stage.erl | 0.527803 | 0.639638 | gen_stage.erl | starcoder |
%% @doc Parse CSV file into a nested list of lines and fields.
%% @author <NAME> <<EMAIL>>
%% Copyright 2010-2013 <NAME>
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(z_csv_parser).
-author("<NAME> <<EMAIL>>").
-export([
inspect_file/1,
inspect_data/1,
scan_lines/1,
scan_lines/2,
parse_line/2,
cleanup_field/1
]).
-define(CSV_CHUNK_SIZE, 32*1024).
% -type sep() :: $, | $; | $\t.
-type sep() :: 44 | 59 | 9.
-type line() :: list( binary() ).
-type lines() :: list( line() ).
-export_type([ sep/0, line/0, lines/0 ]).
-spec inspect_file( file:filename_all() ) -> {ok, line(), sep()} | {error, invalid_csv_file | term()}.
inspect_file(Filename) ->
case file:open(Filename, [read, binary]) of
{ok, Device} ->
FSize = filelib:file_size(Filename),
case file:read(Device, min(?CSV_CHUNK_SIZE, FSize)) of
{ok, Data} ->
file:close(Device),
inspect_data(Data);
{error, _Reason} = Error ->
file:close(Device),
Error
end;
{error, _Reason} = Error ->
Error
end.
%% @doc Check if the first row is made up of column headers.
%% The file must have at least a name and a category column.
-spec inspect_data( binary() ) -> {ok, line(), sep()} | {error, invalid_csv_file}.
inspect_data(<<>>) ->
{error, invalid_csv_file};
inspect_data(B0) ->
B = utf8(B0),
case fetch_line(B) of
{ok, Line} ->
{ok, Tabs} = parse_line(Line, $\t),
{ok, Comma} = parse_line(Line, $,),
{ok, SCol} = parse_line(Line, $;),
{_, Cols, Sep} = lists:last(lists:sort([
{length(Tabs), Tabs, $\t},
{length(Comma), Comma, $,},
{length(SCol), SCol, $;}
])),
{ok, [ z_string:trim( z_convert:to_binary(C) ) || C <- Cols ], Sep};
_ ->
lager:info("Invalid CSV file, could not fetch line with column defs (is there a LF or CR at the end?)"),
{error, invalid_csv_file}
end.
utf8(S) ->
case mochiutf8:valid_utf8_bytes(S) of
S ->
S;
Stripped ->
case eiconv:convert("Windows-1250", S) of
{ok, Utf8} -> Utf8;
{error, _} -> Stripped
end
end.
fetch_line(B) ->
fetch_line(B, []).
fetch_line(<<>>, _Line) ->
false;
fetch_line(<<10, _/binary>>, Line) ->
{ok, lists:reverse(Line)};
fetch_line(<<13, _/binary>>, Line) ->
{ok, lists:reverse(Line)};
fetch_line(<<C, B/binary>>, Line) ->
fetch_line(B, [C|Line]).
%% @doc Parse a line into its columns, using a character a separator.
parse_line(Line, Sep) when is_binary(Line), is_integer(Sep) ->
parse_line_binary(Line, Sep, <<>>, []);
parse_line(Line, Sep) when is_list(Line), is_integer(Sep) ->
parse_line(Line, Sep, [], []).
%% @doc Try to parse the line with the given field escape and quote chars.
parse_line([], _Sep, Col, Cols) ->
{ok, lists:reverse([z_csv_parser:cleanup_field(lists:reverse(Col))|Cols])};
parse_line([Sep|Rest], Sep, Col, Cols) ->
parse_line(Rest, Sep, [], [z_csv_parser:cleanup_field(lists:reverse(Col))|Cols]);
parse_line([C|Rest], Sep, Col, Cols) ->
parse_line(Rest, Sep, [C|Col], Cols).
%% @doc Try to parse the line with the given field escape and quote chars.
parse_line_binary(<<>>, _Sep, Col, Cols) ->
{ok, lists:reverse([z_csv_parser:cleanup_field(Col)|Cols])};
parse_line_binary(<<Sep, Rest/binary>>, Sep, Col, Cols) ->
parse_line_binary(Rest, Sep, <<>>, [z_csv_parser:cleanup_field(Col)|Cols]);
parse_line_binary(<<C/utf8, Rest/binary>>, Sep, Col, Cols) ->
parse_line_binary(Rest, Sep, <<Col/binary, C/utf8>>, Cols).
%% @doc Scan the file (or device) and return lines with fields.
-spec scan_lines( file:filename() | pid() ) -> lines().
scan_lines(DeviceOrFilename) ->
scan_lines(DeviceOrFilename, $,).
%% @doc Scan the file (or device) and return lines with fields.
-spec scan_lines( file:filename() | pid(), sep() ) -> lines().
scan_lines(Filename, FieldSep) when is_list(Filename); is_binary(Filename) ->
{ok, Device} = file:open(Filename, [read, binary, {encoding, latin1}]),
Res = scan_lines(Device, FieldSep, <<>>, 0, [[]], <<>>, false),
_ = file:close(Device),
Res;
scan_lines(Device, FieldSep) ->
scan_lines(Device, FieldSep, <<>>, 0, [[]], <<>>, false).
scan_lines(Device, Fs, Chunk, Index, Acc, Remainder, Quoted) ->
case {Chunk, Quoted} of
% Chunk is empty. Get the next chunk from the file.
{EmptyChunk, _}
when
EmptyChunk =:= <<>>;
EmptyChunk =:= <<$\\>>;
(EmptyChunk =:= <<$">> andalso Quoted);
EmptyChunk =:= <<13>> ->
case io:get_chars(Device, "", ?CSV_CHUNK_SIZE) of
eof ->
All = case Remainder of
<<>> ->
Acc;
_ ->
case EmptyChunk of
<<$">> -> append_last_field(<<$">>, Remainder, Acc);
_ -> append_last_field(<<>>, Remainder, Acc)
end
end,
%% Remove lastly added empty line
All2 = case All of
[[<<>>]|Rest] -> Rest;
[[]|Rest] -> Rest;
_ -> All
end,
lists:reverse(All2);
{error, E} ->
throw({error, E});
NextChunk ->
NewChunk = case EmptyChunk of
<<>> -> NextChunk;
_ -> <<EmptyChunk/binary, NextChunk/binary>>
end,
scan_lines(Device, Fs, NewChunk, 0, Acc, Remainder, Quoted)
end;
% Escaped characters
{<<_Field:Index/binary, $\\, 13, 10, _Rest/binary>>, _} ->
scan_lines(Device, Fs, Chunk, Index + 3, Acc, Remainder, Quoted);
{<<_Field:Index/binary, $\\, _, _Rest/binary>>, _} ->
scan_lines(Device, Fs, Chunk, Index + 2, Acc, Remainder, Quoted);
% Quoted ----
{<<_Field:Index/binary, $", $", _Rest/binary>>, true} ->
scan_lines(Device, Fs, Chunk, Index + 2, Acc, Remainder, true);
{<<_Field:Index/binary, $", _Rest/binary>>, true} ->
scan_lines(Device, Fs, Chunk, Index + 1, Acc, Remainder, false);
{<<_Field:Index/binary, 13, 10, _Rest/binary>>, true} ->
scan_lines(Device, Fs, Chunk, Index + 2, Acc, Remainder, true);
{<<_Field:Index/binary, 13, _Rest/binary>>, true} ->
scan_lines(Device, Fs, Chunk, Index + 1, Acc, Remainder, true);
{<<_Field:Index/binary, 10, _Rest/binary>>, true} ->
scan_lines(Device, Fs, Chunk, Index + 1, Acc, Remainder, true);
{<<_Field:Index/binary, _, _Rest/binary>>, true} ->
scan_lines(Device, Fs, Chunk, Index + 1, Acc, Remainder, true);
% Unquoted ----
{<<_Field:Index/binary, $", _Rest/binary>>, false} when Index =:= 0 andalso Remainder =:= <<>> ->
scan_lines(Device, Fs, Chunk, Index + 1, Acc, Remainder, true);
{<<Field:Index/binary, 13, 10, Rest/binary>>, false} ->
scan_lines(Device, Fs, Rest, 0, [ [] | append_last_field(Remainder, Field, Acc)], <<>>, false);
{<<Field:Index/binary, 13, Rest/binary>>, false} ->
scan_lines(Device, Fs, Rest, 0, [ [] | append_last_field(Remainder, Field, Acc)], <<>>, false);
{<<Field:Index/binary, 10, Rest/binary>>, false} ->
scan_lines(Device, Fs, Rest, 0, [ [] | append_last_field(Remainder, Field, Acc)], <<>>, false);
{<<Field:Index/binary, Fs, Rest/binary>>, false} ->
scan_lines(Device, Fs, Rest, 0, append_field(Remainder, Field, Acc), <<>>, false);
{<<_Field:Index/binary, _, _Rest/binary>>, false} ->
scan_lines(Device, Fs, Chunk, Index + 1, Acc, Remainder, false);
% Long line; add to remainder.
{LongLine, _} ->
scan_lines(Device, Fs, <<>>, 0, Acc, <<Remainder/binary, LongLine/binary>>, Quoted)
end.
append_field(<<>>, Field, [Row|Rows]) ->
[[cleanup_field(Field)|Row]|Rows];
append_field(Prefix, Field, [Row|Rows]) ->
NewField = <<Prefix/binary, Field/binary>>,
[[cleanup_field(NewField)|Row]|Rows].
append_last_field(Prefix, Field, Acc) ->
[R|RS] = append_field(Prefix, Field, Acc),
[lists:reverse(R)|RS].
%% Remove any quotes and whitespace around the fields.
cleanup_field(L) when is_list(L) ->
cleanup_field(z_convert:to_binary(L));
cleanup_field(<<>>) ->
<<>>;
cleanup_field(<<$", _/binary>> = S) ->
utf8(unescape(z_convert:to_binary(z_string:trim(z_string:unquote(S))), true));
cleanup_field(S) ->
utf8(unescape(z_convert:to_binary(z_string:trim(S)), false)).
unescape(S, IsQuoted) ->
unescape(S, <<>>, IsQuoted).
unescape(<<>>, Acc, _IsQuoted) ->
Acc;
unescape(<<$\\, $\\, Rest/binary>>, Acc, IsQuoted) ->
unescape(Rest, <<Acc/binary, $\\>>, IsQuoted);
unescape(<<$\\, $n, Rest/binary>>, Acc, IsQuoted) ->
unescape(Rest, <<Acc/binary, 10>>, IsQuoted);
unescape(<<$\\, $r, Rest/binary>>, Acc, IsQuoted) ->
unescape(Rest, <<Acc/binary, 13>>, IsQuoted);
unescape(<<$\\, $t, Rest/binary>>, Acc, IsQuoted) ->
unescape(Rest, <<Acc/binary, 9>>, IsQuoted);
unescape(<<$\\, $', Rest/binary>>, Acc, IsQuoted) ->
unescape(Rest, <<Acc/binary, $'>>, IsQuoted);
unescape(<<$\\, $", Rest/binary>>, Acc, IsQuoted) ->
unescape(Rest, <<Acc/binary, $">>, IsQuoted);
unescape(<<$", $", Rest/binary>>, Acc, true) ->
unescape(Rest, <<Acc/binary, $">>, true);
unescape(<<C, Rest/binary>>, Acc, IsQuoted) ->
unescape(Rest, <<Acc/binary, C>>, IsQuoted). | apps/zotonic_core/src/csv/z_csv_parser.erl | 0.513912 | 0.54958 | z_csv_parser.erl | starcoder |
%Copyright [2012] [<NAME>]
% Licensed under the Apache License, Version 2.0 (the "License");
% you may not use this file except in compliance with the License.
% You may obtain a copy of the License at
% http://www.apache.org/licenses/LICENSE-2.0
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS,
% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
% See the License for the specific language governing permissions and
% limitations under the License.
-module(wsock_framing_spec).
-include_lib("espec/include/espec.hrl").
-include_lib("hamcrest/include/hamcrest.hrl").
-include("wsock.hrl").
%-compile([export_all]).
-define(OP_CODE_CONT, 0).
-define(OP_CODE_TEXT, 1).
-define(OP_CODE_BIN, 2).
-define(OP_CODE_CLOSE, 8).
-define(OP_CODE_PING, 9).
-define(OP_CODE_PONG, 10).
spec() ->
describe("to_binary", fun() ->
describe("not payload fields", fun() ->
describe("opcode", fun() ->
before_all(fun() ->
spec_set(validator, fun(OpcodeType, OpcodeBinType) ->
Frame = wsock_framing:frame("aas", [{opcode, OpcodeType}]),
<<_:4, Opcode:4, _/binary>> = wsock_framing:to_binary(Frame),
assert_that(Opcode, is(OpcodeBinType))
end)
end),
it("should set 'opcode' to close if close frame", fun() ->
(spec_get(validator))(close, ?OP_CODE_CLOSE)
end),
it("should set 'opcode' to pong if pong frame", fun() ->
(spec_get(validator))(pong, ?OP_CODE_PONG)
end),
it("should set 'opcode' to ping if ping frame", fun() ->
(spec_get(validator))(ping, ?OP_CODE_PING)
end),
it("should set 'opcode' to continuation if continuation frame", fun() ->
(spec_get(validator))(continuation, ?OP_CODE_CONT)
end),
it("should set 'opcode' to binary if binary frame", fun() ->
(spec_get(validator))(binary, ?OP_CODE_BIN)
end),
it("should set 'opcode' to text if text frame", fun() ->
(spec_get(validator))(text, ?OP_CODE_TEXT)
end)
end),
describe("mask", fun() ->
before_all(fun() ->
spec_set(validator, fun(Options, ExpectedMask) ->
Frame = wsock_framing:frame("asas", Options),
BinFrame = wsock_framing:to_binary(Frame),
<<_:8, Mask:1, _:7, _/binary>> = BinFrame,
assert_that(Mask, is(ExpectedMask))
end)
end),
it("should set 'mask' to 0 if unmasked data", fun() ->
(spec_get(validator))([{opcode, text}], 0)
end),
it("should set 'mask' to 1 if masked data", fun() ->
(spec_get(validator))([{opcode, text}, mask], 1)
end)
end),
describe("mask key", fun() ->
it("should include masking key when 'mask' option is set", fun() ->
Frame = wsock_framing:frame("pesto", [mask, {opcode, text}]),
BinFrame = wsock_framing:to_binary(Frame),
<<_:16, MaskKey:32, _/binary>> = BinFrame,
assert_that(MaskKey, is(Frame#frame.masking_key))
end),
it("should not include masking key when 'mask' option is not set", fun() ->
Frame = wsock_framing:frame("conasa", [{opcode, text}]),
BinFrame = wsock_framing:to_binary(Frame),
<<_:16, Payload/binary>> = BinFrame,
assert_that(Payload, is(Frame#frame.payload))
end)
end),
describe("fin", fun() ->
before_all(fun() ->
spec_set(validator, fun(Options, Expected) ->
Frame = wsock_framing:frame("asas", Options),
BinFrame = wsock_framing:to_binary(Frame),
<<Fin:1, _/bits>> = BinFrame,
assert_that(Fin, is(Expected))
end)
end),
it("should unset 'fin' bit if 'fin' option is not set", fun() ->
(spec_get(validator))([{opcode, text}], 0)
end),
it("should set 'fin' bit if 'fin' option is set", fun() ->
(spec_get(validator))([mask, {opcode, text}], 0)
end)
end),
describe("rsv", fun() ->
it("should set all 3 rsv bits to 0", fun() ->
Frame = wsock_framing:frame("asasda", [{opcode, text}]),
BinFrame = wsock_framing:to_binary(Frame),
<<_:1, Rsv:3, _/bits>> = BinFrame,
assert_that(Rsv, is(0))
end)
end),
describe("payload length", fun()->
it("should set payload length of data with <= 125 bytes", fun() ->
Frame = wsock_framing:frame("asdasd", [{opcode, text}]),
BinFrame = wsock_framing:to_binary(Frame),
<<_:9, PayloadLen:7, _/binary>> = BinFrame,
assert_that(PayloadLen, is(Frame#frame.payload_len))
end),
it("should set extended payload length of data with > 125 and <= 65536 bytes", fun() ->
Frame = wsock_framing:frame(crypto:rand_bytes(300), [{opcode, binary}]),
BinFrame = wsock_framing:to_binary(Frame),
<<_:9, PayloadLen:7, ExtendedPLen:16, _/binary>> = BinFrame,
assert_that(PayloadLen, is(Frame#frame.payload_len)),
assert_that(ExtendedPLen, is(Frame#frame.extended_payload_len))
end),
it("should set extended payload length cont. of data with > 65536 bytes ", fun() ->
Frame = wsock_framing:frame(crypto:rand_bytes(70000), [{opcode, binary}]),
BinFrame = wsock_framing:to_binary(Frame),
<<_:9, PayloadLen:7, ExtendedPLen:64, _/binary>> = BinFrame,
assert_that(PayloadLen, is(Frame#frame.payload_len)),
assert_that(ExtendedPLen, is(Frame#frame.extended_payload_len_cont))
end)
end)
end),
describe("payload", fun()->
before_all(fun() ->
spec_set(validator, fun(Size, Options, Callback) ->
Frame = wsock_framing:frame(crypto:rand_bytes(Size), Options),
BinFrame = wsock_framing:to_binary(Frame),
Payload = Callback(BinFrame),
assert_that(Payload, is(Frame#frame.payload))
end)
end),
describe("length <= 125 bytes", fun() ->
it("should set unmasked data", fun() ->
(spec_get(validator))(100, [{opcode, text}], fun(<<_:16, Payload/binary>>) ->
Payload
end)
end),
it("should set masked data", fun()->
(spec_get(validator))(100, [mask, {opcode, text}], fun(<<_:48, Payload/binary>>) ->
Payload
end)
end)
end),
describe("length 125 > and <= 65536 bytes", fun() ->
it("should set unmasked data", fun() ->
(spec_get(validator))(300, [{opcode, text}], fun(<<_:32, Payload/binary>>) ->
Payload
end)
end),
it("should set masked data", fun()->
(spec_get(validator))(300, [mask, {opcode, text}], fun(<<_:64, Payload/binary>>) ->
Payload
end)
end)
end),
describe("length > 65536 bytes", fun() ->
it("should set unmasked data", fun() ->
(spec_get(validator))(70000, [{opcode, text}], fun(<<_:80, Payload/binary>>) ->
Payload
end)
end),
it("should set masked data", fun()->
(spec_get(validator))(70000, [mask, {opcode, text}], fun(<<_:112, Payload/binary>>) ->
Payload
end)
end)
end)
end)
end),
describe("from_binary", fun() ->
before_all(fun() ->
spec_set(frame_builder, fun(Fin, Rsv, Opcode, Mask, Data) ->
ByteSize = byte_size(Data),
DataLen = case ByteSize of
X when X =< 125 -> X;
X when X =< 65536 -> 126;
X when X > 65536 -> 127
end,
BinFrame = get_binary_frame(Fin, Rsv, Rsv, Rsv, Opcode, Mask, DataLen, ByteSize, Data),
[Frame] = wsock_framing:from_binary(BinFrame),
{BinFrame, Frame}
end)
end),
describe("non payload fields", fun() ->
describe("fin", fun() ->
it("should set fin property to fin bit", fun() ->
Data = crypto:rand_bytes(20),
DataLen = byte_size(Data),
BinFrame = get_binary_frame(1, 0, 0, 0, 1, 0, DataLen, 0, Data),
[Frame] = wsock_framing:from_binary(BinFrame),
assert_that(Frame#frame.fin, is(1))
end)
end),
describe("rsv", fun() ->
before_all(fun() ->
spec_set(generator, fun(Rsv1, Rsv2, Rsv3)->
Data = crypto:rand_bytes(20),
DataLen = byte_size(Data),
BinFrame = get_binary_frame(1, Rsv1, Rsv2, Rsv3, 1, 0, DataLen, 0, Data),
[Frame] = wsock_framing:from_binary(BinFrame),
Frame
end)
end),
it("should set rsv1 to rsv1 bit", fun() ->
Frame = (spec_get(generator))(0, 0, 0),
assert_that(Frame#frame.rsv1, is(0))
end),
it("should set rsv2 to rsv2 bit", fun() ->
Frame = (spec_get(generator))(0, 0, 0),
assert_that(Frame#frame.rsv2, is(0))
end),
it("should set rsv3 to rsv3 bit", fun() ->
Frame = (spec_get(generator))(0, 0, 0),
assert_that(Frame#frame.rsv3, is(0))
end)
end),
describe("opcode", fun() ->
before_all(fun() ->
spec_set(validator, fun(OpCode) ->
Data = crypto:rand_bytes(20),
DataLen = byte_size(Data),
BinFrame = get_binary_frame(1, 0, 0, 0, OpCode, 0, DataLen, 0, Data),
[Frame] = wsock_framing:from_binary(BinFrame),
assert_that(Frame#frame.opcode, is(OpCode))
end)
end),
it("should set opcode to continuation if continuation frame", fun() ->
(spec_get(validator))(?OP_CODE_CONT)
end),
it("should set opcode to text if text frame", fun() ->
(spec_get(validator))(?OP_CODE_TEXT)
end),
it("should set opcode to binary if binary frame", fun() ->
(spec_get(validator))(?OP_CODE_BIN)
end),
it("should set opcode to ping if ping frame", fun() ->
(spec_get(validator))(?OP_CODE_PING)
end),
it("should set opcode to pong if pong frame", fun() ->
(spec_get(validator))(?OP_CODE_PONG)
end),
it("should set opcode to close if close frame", fun() ->
(spec_get(validator))(?OP_CODE_CLOSE)
end)
end),
describe("mask", fun() ->
before_all(fun() ->
spec_set(validator, fun(Mask) ->
{_BinFrame, Frame} = (spec_get(frame_builder))(0, 0, 1, Mask, crypto:rand_bytes(20)),
assert_that(Frame#frame.mask, is(Mask))
end)
end),
it("should set mask if masked data", fun() ->
(spec_get(validator))(1)
end),
it("should not set mask if unmasked data", fun() ->
(spec_get(validator))(0)
end)
end),
describe("payoad lenght", fun()->
before_all(fun() ->
spec_set(frame, fun(Size) ->
{_BinFrame, Frame} = (spec_get(frame_builder))(0, 0, 2, 0, crypto:rand_bytes(Size)),
Frame
end)
end),
it("set payload length of data with <= 125 bytes", fun() ->
Frame = (spec_get(frame))(100),
assert_that(Frame#frame.payload_len, is(100))
end),
it("set payload length of data with > 125 <= 65536 bytes", fun() ->
Frame = (spec_get(frame))(200),
assert_that(Frame#frame.payload_len, is(126)),
assert_that(Frame#frame.extended_payload_len, is(200))
end),
it("set payload length of data with > 65536 bytes", fun() ->
Frame = (spec_get(frame))(70000),
assert_that(Frame#frame.payload_len, is(127)),
assert_that(Frame#frame.extended_payload_len_cont, is(70000))
end)
end),
describe("masking key", fun() ->
before_all(fun() ->
spec_set(frame, fun(Mask) ->
(spec_get(frame_builder))(0, 0, 2, Mask, crypto:rand_bytes(20))
end)
end),
it("should be undefined if data is unmasked", fun() ->
{_, Frame} = (spec_get(frame))(0),
assert_that(Frame#frame.masking_key, is(undefined))
end),
it("should be set if data is masked", fun() ->
{BinFrame, Frame} = (spec_get(frame))(1),
<<_:2/binary, MK:32/integer, _/binary>> = BinFrame,
assert_that(Frame#frame.masking_key, is_not(undefined)),
assert_that(Frame#frame.masking_key, is(MK))
end)
end)
end),
describe("payload", fun() ->
before_all(fun() ->
spec_set(validator, fun(Mask, Size) ->
Data = crypto:rand_bytes(Size),
{_BinFrame, Frame} = (spec_get(frame_builder))(0, 0, 2, Mask, Data),
assert_that(Frame#frame.payload, is(Data))
end)
end),
describe("when payload length is 0", fun() ->
it("should set fragmented to true", fun() ->
{_, Frame} = (spec_get(frame_builder))(0, 0, 0, 0, <<>>),
assert_that(Frame#frame.fragmented, is(false))
end)
end),
describe("when payload length <= 125", fun()->
it("should set unmasked data", fun()->
(spec_get(validator))(0 ,100)
end),
it("should unmask masked data", fun() ->
(spec_get(validator))(1 ,100)
end)
end),
describe("when payload length > 125 and <= 65536 bytes", fun()->
it("should set unmasked data", fun()->
(spec_get(validator))(0 , 300)
end),
it("should unmask masked data", fun() ->
(spec_get(validator))(1 , 300)
end)
end),
describe("when payload length > 65536 bytes", fun()->
it("should set unmasked data", fun()->
(spec_get(validator))(0 , 70000)
end),
it("should unmask masked data", fun() ->
(spec_get(validator))(1 , 70000)
end)
end)
end),
describe("when binary is composed from various frames", fun() ->
it("should return a list of frame records", fun() ->
Text1 = "Jankle jankle",
Payload1 = list_to_binary(Text1),
PayloadLen1 = byte_size(Payload1),
Text2 = "Pasa pra casa",
Payload2 = list_to_binary(Text2),
PayloadLen2 = byte_size(Payload2),
BinFrame1 = get_binary_frame(0, 0, 0, 0, 1, 0, PayloadLen1, 0, Payload1),
BinFrame2 = get_binary_frame(1, 0, 0, 0, 0, 0, PayloadLen2, 0, Payload2),
BinFrames = <<BinFrame1/binary, BinFrame2/binary>>,
[Frame1, Frame2] = wsock_framing:from_binary(BinFrames),
assert_that(Frame1#frame.fin, is(0)),
assert_that(Frame1#frame.rsv1, is(0)),
assert_that(Frame1#frame.rsv2, is(0)),
assert_that(Frame1#frame.rsv3, is(0)),
assert_that(Frame1#frame.opcode, is(1)),
assert_that(Frame1#frame.mask, is(0)),
assert_that(Frame1#frame.payload_len, is(PayloadLen1)),
assert_that(Frame1#frame.payload, is(Payload1)),
assert_that(Frame2#frame.fin, is(1)),
assert_that(Frame2#frame.rsv1, is(0)),
assert_that(Frame2#frame.rsv2, is(0)),
assert_that(Frame2#frame.rsv3, is(0)),
assert_that(Frame2#frame.opcode, is(0)),
assert_that(Frame2#frame.mask, is(0)),
assert_that(Frame2#frame.payload_len, is(PayloadLen2)),
assert_that(Frame2#frame.payload, is(Payload2))
end)
end),
describe("when input data is fragmented", fun() ->
describe("when there's only 8 bits of data", fun() ->
it("should return a fragmented frame", fun() ->
Data = crypto:rand_bytes(20),
DataLen = byte_size(Data),
BinFrame = get_binary_frame(1, 0, 0, 0, 1, 0, DataLen, 0, Data),
<<Fragment:1/binary, _/binary>> = BinFrame,
[Frame] = wsock_framing:from_binary(Fragment),
assert_that(Frame#frame.fragmented, is(true)),
assert_that(Frame#frame.fin, is(1)),
assert_that(Frame#frame.rsv1, is(0)),
assert_that(Frame#frame.rsv2, is(0)),
assert_that(Frame#frame.rsv3, is(0)),
assert_that(Frame#frame.opcode, is(1)),
assert_that(Frame#frame.raw, is(<<>>))
end)
end),
describe("when there's only 16 bits of data", fun() ->
it("shoudl return a fragmented frame", fun() ->
Data = crypto:rand_bytes(20),
DataLen = byte_size(Data),
BinFrame = get_binary_frame(1, 0, 0, 0, 1, 0, DataLen, 0, Data),
<<Fragment:2/binary, _/binary>> = BinFrame,
[Frame] = wsock_framing:from_binary(Fragment),
assert_that(Frame#frame.fragmented, is(true)),
assert_that(Frame#frame.mask, is(0)),
assert_that(Frame#frame.payload_len, is(DataLen)),
assert_that(Frame#frame.raw, is(<<>>))
end)
end),
describe("when there's only 24 bits of data", fun() ->
describe("when payload length is extended", fun() ->
it("should not set the extended payload length", fun() ->
Data = crypto:rand_bytes(140),
DataLen = byte_size(Data),
BinFrame = get_binary_frame(1, 0, 0, 0, 1, 0, 126, DataLen, Data),
<<Fragment:3/binary, _/binary>> = BinFrame,
<<_:2/binary, LastFragment/binary>> = Fragment,
[Frame] = wsock_framing:from_binary(Fragment),
assert_that(Frame#frame.fragmented, is(true)),
assert_that(Frame#frame.payload_len, is(126)),
assert_that(Frame#frame.extended_payload_len, is(undefined)),
assert_that(Frame#frame.raw, is(LastFragment))
end)
end)
end),
describe("when new data is received", fun() ->
it("should complete the fragmented frame", fun() ->
Data = crypto:rand_bytes(140),
DataLen = byte_size(Data),
BinFrame = get_binary_frame(1, 0, 0, 0, 1, 0, 126, DataLen, Data),
<<FirstFragment:3/binary, SecondFragment/binary>> = BinFrame,
[FragmentedFrame] = wsock_framing:from_binary(FirstFragment),
[Frame] = wsock_framing:from_binary(SecondFragment, FragmentedFrame),
assert_that(Frame#frame.fragmented, is(false)),
assert_that(Frame#frame.payload, is(Data)),
assert_that(Frame#frame.raw, is(<<>>))
end)
end)
end)
end),
describe("frame", fun() ->
describe("when no options are passed", fun() ->
it("should unset fin", fun() ->
Frame = wsock_framing:frame("Foo bar"),
assert_that(Frame#frame.fin, is(0))
end),
it("should set opcode to text on text data", fun()->
Frame = wsock_framing:frame("Foo bar"),
assert_that(Frame#frame.opcode, is(1))
end),
it("should set opcode to binary on binary data", fun()->
Frame = wsock_framing:frame(<<"Foo bar">>),
assert_that(Frame#frame.opcode, is(2))
end),
it("should leave data unmasked", fun() ->
Data = "Fofito",
Frame = wsock_framing:frame(Data),
assert_that(Frame#frame.mask, is(0)),
assert_that(Frame#frame.payload, is(list_to_binary(Data))),
assert_that(Frame#frame.masking_key, is(undefined))
end)
end),
describe("not payload fields", fun()->
describe("fin", fun() ->
it("should set fin if fin option is present", fun()->
Frame = wsock_framing:frame("Foo bar", [fin]),
assert_that(Frame#frame.fin, is(1))
end),
it("should unset fin if fin option is not present", fun() ->
Frame = wsock_framing:frame("asdasda", []),
assert_that(Frame#frame.fin, is(0))
end)
end),
describe("rsv", fun() ->
it("should set all 3 'rsv' to 0", fun()->
Data = "Foo bar",
Frame = wsock_framing:frame(Data),
assert_that(Frame#frame.rsv1, is(0)),
assert_that(Frame#frame.rsv2, is(0)),
assert_that(Frame#frame.rsv3, is(0))
end)
end),
describe("opcode", fun() ->
before_all(fun() ->
spec_set(validator, fun(Options, Expected) ->
Frame = wsock_framing:frame("Foo bar", Options),
assert_that(Frame#frame.opcode, is(Expected))
end)
end),
it("should set opcode to text if opcode option is text", fun()->
(spec_get(validator))([{opcode, text}], 1)
end),
it("should set opcode to binary if opcode option is binary", fun()->
(spec_get(validator))([{opcode, binary}], 2)
end),
it("should set opcode to ping if opcode option is ping", fun()->
(spec_get(validator))([{opcode, ping}], 9)
end),
it("should set opcode to pong if opcode option is pong", fun() ->
(spec_get(validator))([{opcode, pong}], 10)
end),
it("should set opcode to close if opcode option is close", fun() ->
(spec_get(validator))([{opcode, close}], 8)
end),
it("should set opcode to continuation if opcode option is continuation", fun()->
(spec_get(validator))([{opcode, continuation}], 0)
end)
end),
describe("mask", fun() ->
it("should be unset if mask option is not present", fun() ->
Frame = wsock_framing:frame("asdasd", []),
assert_that(Frame#frame.mask, is(0))
end),
it("should set mask if mask option is present", fun() ->
Frame = wsock_framing:frame("assd", [mask]),
assert_that(Frame#frame.mask, is(1))
end)
end),
describe("masking key", fun() ->
it("should be unset if mask option is not present", fun() ->
Frame = wsock_framing:frame("asda", []),
assert_that(Frame#frame.masking_key, is(undefined))
end),
it("should be set if mask option is present", fun() ->
Frame = wsock_framing:frame("asads", [mask]),
assert_that(Frame#frame.masking_key, is_not(undefined))
end)
end),
describe("payload length", fun() ->
before_all(fun() ->
spec_set(validator, fun(Size, PL, EPL, EPLC) ->
Frame = wsock_framing:frame(crypto:rand_bytes(Size), []),
assert_that(Frame#frame.payload_len, is(PL)),
assert_that(Frame#frame.extended_payload_len, is(EPL)),
assert_that(Frame#frame.extended_payload_len_cont, is(EPLC))
end)
end),
it("should set payload length of data <= 125 bytes", fun() ->
(spec_get(validator))(100, 100, 0, 0)
end),
it("should set payload length of data > 125 and <= 65536 bytes", fun() ->
(spec_get(validator))(1000, 126, 1000, 0)
end),
it("should set payload length of data > 65536 bytes", fun() ->
(spec_get(validator))(70000, 127, 0, 70000)
end)
end)
end),
describe("payload", fun()->
it("should set unmasked payload", fun() ->
Data = crypto:rand_bytes(100),
Frame = wsock_framing:frame(Data, []),
assert_that(Frame#frame.payload, is(Data))
end),
it("should set masked payload", fun() ->
Data = crypto:rand_bytes(100),
Frame = wsock_framing:frame(Data, [mask]),
MaskedData = mask(Data, Frame#frame.masking_key, <<>>),
assert_that(Frame#frame.payload, is(MaskedData))
end)
end),
describe("control frames", fun()->
describe("close", fun() ->
it("should frame closes without payload", fun() ->
Frame = wsock_framing:frame([], [fin, {opcode, close}]),
assert_that(Frame#frame.fin, is(1)),
assert_that(Frame#frame.rsv1, is(0)),
assert_that(Frame#frame.rsv2, is(0)),
assert_that(Frame#frame.rsv3, is(0)),
assert_that(Frame#frame.opcode, is(8)),
assert_that(Frame#frame.mask, is(0))
end),
it("should frames closes with payload", fun() ->
Frame = wsock_framing:frame({1000, "Closing this shit"}, [mask, fin, {opcode, close}]),
%mask function also unmask the data
<<Code:16, Reason/binary>> = mask(
Frame#frame.payload,
Frame#frame.masking_key,
<<>>),
assert_that(Frame#frame.fin, is(1)),
assert_that(Frame#frame.rsv1, is(0)),
assert_that(Frame#frame.rsv2, is(0)),
assert_that(Frame#frame.rsv3, is(0)),
assert_that(Frame#frame.opcode, is(8)),
assert_that(Code, is(1000)),
assert_that(Frame#frame.mask, is(1)),
assert_that(binary_to_list(Reason), is("Closing this shit"))
end)
end),
it("should not allow payload size over 125 bytes")
end)
end).
get_binary_frame(Fin, Rsv1, Rsv2, Rsv3, Opcode, Mask, Length, ExtendedPayloadLength, Payload) ->
Head = <<Fin:1, Rsv1:1, Rsv2:1, Rsv3:1, Opcode:4, Mask:1, Length:7>>,
TempBin = case Length of
126 ->
<<Head/binary, ExtendedPayloadLength:16>>;
127 ->
<<Head/binary, ExtendedPayloadLength:64>>;
_ ->
<<Head/binary>>
end,
case Mask of
0 ->
<<TempBin/binary, Payload/binary>>;
1 ->
<<Mk:32>> = crypto:rand_bytes(4),
MaskedPayload = mask(Payload, Mk, <<>>),
<<TempBin/binary, Mk:32, MaskedPayload/binary>>
end.
%get_random_string(Length) ->
% AllowedChars = "qwertyQWERTY1234567890",
% lists:foldl(fun(_, Acc) ->
% [lists:nth(random:uniform(length(AllowedChars)),
% AllowedChars)]
% ++ Acc
% end, [], lists:seq(1, Length)).
%mask(Bin, MaskKey, Acc) ->
mask(<<Data:32, Rest/bits>>, MaskKey, Acc) ->
T = Data bxor MaskKey,
mask(Rest, MaskKey, <<Acc/binary, T:32>>);
mask(<< Data:24>>, MaskKey, Acc) ->
<<MaskKey2:24, _/bits>> = <<MaskKey:32>>,
T = Data bxor MaskKey2,
<<Acc/binary, T:24>>;
mask(<< Data:16>>, MaskKey, Acc) ->
<<MaskKey2:16, _/bits>> = <<MaskKey:32>>,
T = Data bxor MaskKey2,
<<Acc/binary, T:16>>;
mask(<< Data:8>>, MaskKey, Acc) ->
<<MaskKey2:8, _/bits>> = <<MaskKey:32>>,
T = Data bxor MaskKey2,
<<Acc/binary, T:8>>;
mask(<<>>, _, Acc) ->
Acc. | test/spec/wsock_framing_spec.erl | 0.558207 | 0.455501 | wsock_framing_spec.erl | starcoder |
%% Copyright 2014 Opscode, Inc. All Rights Reserved.
%% @doc Provides functions to parse a TSV input.
%% Pretty simple format: each line should be separated by a new-line character (\n)
%% On each line, fields are separated by tabs (\t).
%% Fields cannot contain tabs.
%% Accepts strings as either lists or binaries (and passes them as such to the `fun' provided).
-module(tsv_parser).
-export([process_tsv_string_with/3,
process_tsv_string_with/4]).
-include("deliv_types.hrl").
-ifdef(TEST).
-compile([export_all]).
-endif.
-define(LINE_DELIMITER, "\n").
-define(FIELD_DELIMITER, "\t").
-define(UNDEFINED_VALUE, undefined).
%% @doc Parses the TSV-compatible string, and calls `Fun' for each line.
%% `Fun' should take 2 arguments:
%% - the 1st one is either `empty_line', or `eof' or `{line, Line}'
%% where `Line' is the list of the fields found
%% - the 2nd is the accumulator passed from call to call.
%% Missing fields are replaced by `undefined' atoms
%% The first call to `Fun' is made with Acc0
%% Returns whatever `Fun' returns on its last call (with `eof')
-spec process_tsv_string_with(String, Fun, _Acc0) -> Acc when
String :: str_or_binary(),
Fun :: fun((empty_line | {line, _Line}, Acc) -> Acc),
Acc :: any().
process_tsv_string_with(String, Fun, Acc0) ->
process_tsv_string_with(String, Fun, Acc0, 0).
%% @doc Pretty much the same as `process_tsv_string_with/3', except that you can
%% specify how many fields should be on each line at the minimum, and if there are
%% lines with less fields, they get padded with `undefined' atoms to reach the
%% specified `MinLength'
-spec process_tsv_string_with(String, Fun, _Acc0, MinLength) -> Acc when
String :: str_or_binary(),
Fun :: fun((empty_line | {line, _Line}, Acc) -> Acc),
Acc :: any(),
MinLength :: integer().
process_tsv_string_with(String, Fun, Acc0, MinLength) when MinLength >= 0 ->
ReOptions = case erlang:is_list(String) of
false -> [];
true -> [{return,list}]
end,
Fun(
eof,
lists:foldl(
fun(Line, Acc) -> Fun(split_line(Line, ReOptions, MinLength), Acc) end,
Acc0,
split_input(String, ReOptions)
)
).
%% @private
%% Parses a line
split_line(Empty, _ReOptions, _MinLength) when Empty =:= ""; Empty =:= <<"">> -> empty_line;
split_line(Line, ReOptions, MinLength) ->
Fields = lists:map(
fun(Empty) when Empty =:= ""; Empty =:= <<"">> -> ?UNDEFINED_VALUE;
(Field) -> Field
end,
re:split(Line, ?FIELD_DELIMITER, ReOptions)
),
{line, Fields ++ lists:duplicate(max(0, MinLength - length(Fields)), ?UNDEFINED_VALUE)}.
%% @private
%% Splits input in lines
split_input(Input, ReOptions) ->
re:split(Input, ?LINE_DELIMITER, ReOptions). | components/automate-workflow-server/apps/delivery/src/tsv_parser.erl | 0.734024 | 0.572723 | tsv_parser.erl | starcoder |
%% -*- coding: utf-8 -*-
%% -------------------------------------------------------------------
%%
%% riak_kv_gcounter: A state based, grow only, convergent counter
%%
%% Copyright (c) 2007-2013 Basho Technologies, Inc. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%% @doc
%% A G-Counter CRDT. A G-Counter is a Grow-only counter. Modeled as a list of
%% two-tuples. Each entry in the list is an {actor, count} pair. The value of the counter
%% is the sum of all entries in the list. An actor may only update its own entry. An entry
%% can only be incremented. Borrows liberally from argv0 and Justin Sheehy's vclock module
%% in implementation.
%%
%% @see riak_kv_pncounter
%%
%% @reference <NAME>, <NAME>, <NAME>, <NAME> (2011) A comprehensive study of
%% Convergent and Commutative Replicated Data Types. [http://hal.upmc.fr/inria-00555588/]
%%
%% @end
-module(riak_kv_gcounter).
-export([new/0, new/2, value/1, update/3, merge/2, equal/2, to_binary/1, from_binary/1]).
%% EQC API
-ifdef(EQC).
-include_lib("eqc/include/eqc.hrl").
-export([gen_op/0, update_expected/3, eqc_state_value/1]).
-endif.
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-export_type([gcounter/0, gcounter_op/0]).
-opaque gcounter() :: [entry()].
%% Redeclaring gcounter() with -type for local use, to make dialyzer
%% accept specs for merge/2,3. Apparently, dialyzer only accepts
%% standalone variables for args having an opaque type spec, whereas
%% we call those functions with an expressly constructed lists.
-type gcounter_() :: [entry()].
-type entry() :: {Actor::term(), Count::pos_integer()}.
-type gcounter_op() :: increment | {increment, pos_integer()}.
%% @doc Create a new, empty `gcounter()'
-spec new() -> gcounter().
new() ->
[].
%% @doc Create a `gcounter()' with an initial update
-spec new(term(), pos_integer()) -> {ok, gcounter()}.
new(Id, Count) when is_integer(Count), Count > 0 ->
update({increment, Count}, Id, new()).
%% @doc The single total value of a `gcounter()'.
-spec value(gcounter()) -> non_neg_integer().
value(GCnt) ->
lists:sum([ Cnt || {_Act, Cnt} <- GCnt]).
%% @doc `increment' the entry in `GCnt' for `Actor' by 1 or `{increment, Amt}'.
%% returns an updated `gcounter()' or error if `Amt' is not a `pos_integer()'
-spec update(gcounter_op(), term(), gcounter()) ->
{ok, gcounter()}.
update(increment, Actor, GCnt) ->
{ok, increment_by(1, Actor, GCnt)};
update({increment, Amount}, Actor, GCnt) when is_integer(Amount), Amount > 0 ->
{ok, increment_by(Amount, Actor, GCnt)}.
%% @doc Merge two `gcounter()'s to a single `gcounter()'. This is the Least Upper Bound
%% function described in the literature.
-spec merge(gcounter_(), gcounter_()) -> gcounter_().
merge(GCnt1, GCnt2) ->
merge(GCnt1, GCnt2, []).
%% @private merge two counters.
-spec merge(gcounter_(), gcounter_(), gcounter_()) -> gcounter_().
merge([], [], Acc) ->
lists:reverse(Acc);
merge(LeftOver, [], Acc) ->
lists:reverse(Acc, LeftOver);
merge([], LeftOver, Acc) ->
lists:reverse(Acc, LeftOver);
merge([{Actor1, Cnt1}=AC1|Rest], Clock2, Acc) ->
case lists:keytake(Actor1, 1, Clock2) of
{value, {Actor1, Cnt2}, RestOfClock2} ->
merge(Rest, RestOfClock2, [{Actor1, max(Cnt1, Cnt2)}|Acc]);
false ->
merge(Rest, Clock2, [AC1|Acc])
end.
%% @doc Are two `gcounter()'s structurally equal? This is not `value/1' equality.
%% Two counters might represent the total `42', and not be `equal/2'. Equality here is
%% that both counters contain the same actors and those actors have the same count.
-spec equal(gcounter(), gcounter()) -> boolean().
equal(VA,VB) ->
lists:sort(VA) =:= lists:sort(VB).
%% @private peform the increment.
-spec increment_by(pos_integer(), term(), gcounter()) -> gcounter().
increment_by(Amount, Actor, GCnt) when is_integer(Amount), Amount > 0 ->
{Ctr, NewGCnt} = case lists:keytake(Actor, 1, GCnt) of
false ->
{Amount, GCnt};
{value, {_N, C}, ModGCnt} ->
{C + Amount, ModGCnt}
end,
[{Actor, Ctr}|NewGCnt].
-define(TAG, 70).
-define(V1_VERS, 1).
%% @doc Encode an effecient binary representation of a `gcounter()'
-spec to_binary(gcounter()) -> binary().
to_binary(GCnt) ->
EntriesBin = term_to_binary(GCnt),
<<?TAG:8/integer, ?V1_VERS:8/integer, EntriesBin/binary>>.
%% @doc Decode binary G-Counter
-spec from_binary(binary()) -> gcounter().
from_binary(<<?TAG:8/integer, ?V1_VERS:8/integer, EntriesBin/binary>>) ->
binary_to_term(EntriesBin).
%% ===================================================================
%% EUnit tests
%% ===================================================================
-ifdef(TEST).
-ifdef(EQC).
%% EQC generator
gen_op() ->
oneof([increment, {increment, gen_pos()}]).
gen_pos()->
?LET(X, int(), 1+abs(X)).
update_expected(_ID, increment, Prev) ->
Prev+1;
update_expected(_ID, {increment, By}, Prev) ->
Prev+By;
update_expected(_ID, _Op, Prev) ->
Prev.
eqc_state_value(S) ->
S.
eqc_value_test_() ->
{timeout, 120, [?_assert(crdt_statem_eqc:prop_converge(0, 1000, ?MODULE))]}.
-endif.
new_test() ->
?assertEqual([], new()).
value_test() ->
GC1 = [{1, 1}, {2, 13}, {3, 1}],
GC2 = [],
?assertEqual(15, value(GC1)),
?assertEqual(0, value(GC2)).
update_increment_test() ->
GC0 = new(),
{ok, GC1} = update(increment, 1, GC0),
{ok, GC2} = update(increment, 2, GC1),
{ok, GC3} = update(increment, 1, GC2),
?assertEqual([{1, 2}, {2, 1}], GC3).
update_increment_by_test() ->
GC0 = new(),
{ok, GC} = update({increment, 7}, 1, GC0),
?assertEqual([{1, 7}], GC).
merge_test() ->
GC1 = [{<<"1">>, 1},
{<<"2">>, 2},
{<<"4">>, 4}],
GC2 = [{<<"3">>, 3},
{<<"4">>, 3}],
?assertEqual([], merge(new(), new())),
?assertEqual([{<<"1">>,1},{<<"2">>,2},{<<"3">>,3},{<<"4">>,4}],
lists:sort( merge(GC1, GC2))).
merge_less_left_test() ->
GC1 = [{<<"5">>, 5}],
GC2 = [{<<"6">>, 6}, {<<"7">>, 7}],
?assertEqual([{<<"5">>, 5},{<<"6">>,6}, {<<"7">>, 7}],
merge(GC1, GC2)).
merge_less_right_test() ->
GC1 = [{<<"6">>, 6}, {<<"7">>,7}],
GC2 = [{<<"5">>, 5}],
?assertEqual([{<<"5">>,5},{<<"6">>,6}, {<<"7">>, 7}],
lists:sort( merge(GC1, GC2)) ).
merge_same_id_test() ->
GC1 = [{<<"1">>, 2},{<<"2">>,4}],
GC2 = [{<<"1">>, 3},{<<"3">>,5}],
?assertEqual([{<<"1">>, 3},{<<"2">>,4},{<<"3">>,5}],
lists:sort( merge(GC1, GC2)) ).
equal_test() ->
GC1 = [{1, 2}, {2, 1}, {4, 1}],
GC2 = [{1, 1}, {2, 4}, {3, 1}],
GC3 = [{1, 2}, {2, 1}, {4, 1}],
GC4 = [{4, 1}, {1, 2}, {2, 1}],
?assertNot(equal(GC1, GC2)),
?assert(equal(GC1, GC3)),
?assert(equal(GC1, GC4)).
usage_test() ->
GC1 = new(),
GC2 = new(),
?assert(equal(GC1, GC2)),
{ok, GC1_1} = update({increment, 2}, a1, GC1),
{ok, GC2_1} = update(increment, a2, GC2),
GC3 = merge(GC1_1, GC2_1),
{ok, GC2_2} = update({increment, 3}, a3, GC2_1),
{ok, GC3_1} = update(increment, a4, GC3),
{ok, GC3_2} = update(increment, a1, GC3_1),
?assertEqual([{a1, 3}, {a2, 1}, {a3, 3}, {a4, 1}],
lists:sort(merge(GC3_2, GC2_2))).
roundtrip_bin_test() ->
GC = new(),
{ok, GC1} = update({increment, 2}, <<"a1">>, GC),
{ok, GC2} = update({increment, 4}, a2, GC1),
{ok, GC3} = update(increment, "a4", GC2),
{ok, GC4} = update({increment, 10000000000000000000000000000000000000000}, {complex, "actor", [<<"term">>, 2]}, GC3),
Bin = to_binary(GC4),
Decoded = from_binary(Bin),
?assert(equal(GC4, Decoded)).
lots_of_actors_test() ->
GC = lists:foldl(fun(_, GCnt) ->
ActorLen = crypto:rand_uniform(1, 1000),
Actor = crypto:rand_bytes(ActorLen),
Cnt = crypto:rand_uniform(1, 10000),
{ok, GC} = riak_kv_gcounter:update({increment, Cnt}, Actor, GCnt),
GC
end,
new(),
lists:seq(1, 1000)),
Bin = to_binary(GC),
Decoded = from_binary(Bin),
?assert(equal(GC, Decoded)).
-endif. | deps/riak_kv/src/riak_kv_gcounter.erl | 0.506103 | 0.476336 | riak_kv_gcounter.erl | starcoder |
% @copyright 2013-2015 Zuse Institute Berlin
% Licensed under the Apache License, Version 2.0 (the "License");
% you may not use this file except in compliance with the License.
% You may obtain a copy of the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS,
% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
% See the License for the specific language governing permissions and
% limitations under the License.
%% @author <NAME> <<EMAIL>>
%% @doc Allow to centrally schedule all messages involved in a certain
%% protocol execution and calculate the number of possible
%% interleavings for convenience (so one can guess how often the
%% protocol has to be run randomly to cover a good fraction of all
%% possible interleavings).
%% How it works: We use the same mechanism as trace_mpath and redirect
%% all messages to the scheduler. The scheduler maintains a mailbox
%% for each pair of (Src, Dest) to maintain FIFO ordering of channels,
%% but allowing all other possible message interleavings. It delivers
%% - steered by the given random seed - a single message for execution
%% and receives a confirmation when the corresponding message handler
%% is done. All messages generated by this message handler will arrive
%% at the scheduler before the on_handler_done message, as Erlang
%% provides FIFO on channels and all comm:sends are redirected
%% to the central scheduler.
%% How is the number of possible message interleavings calculated: At
%% each step, we know how many messages we can choose from. So the
%% number of different interleavings is the product of the
%% possibilities in each step.
%% How to detect the end of the protocol? When no more messages are
%% queued, the protocol is finished. You can easily wait for this with
%% the wait_for_end() function.
%% Why this cannot be done with the breakpoints that gen_components
%% provide? Breakpoints only have the execution of a message handler
%% under its control. If a message handler generates new messages in
%% the system, the VM directly enqueues them to the corresponding
%% mailboxes of the receivers. As there is no shuffeling in the
%% mailboxes of each receiver, there is only a limited amount of
%% message interleaving simulated using breakpoints. In contrast, with
%% proto_sched, messages generated by different message handlers can
%% overhaul each other, as long as they do not correspond to the same
%% communication channel, where FIFO ordering is maintained.
%% Fast tests for timeouts: For msg_delay, we could add a modification
%% so timeouts are just directly put in the pool of deliverable
%% messages. So this would simulate a long lasting execution in a
%% short timeframe (time compression). Actually msg_delay events
%% somehow must be sorted according to the time they 'can' be
%% delivered? if req A is send and wants to be delivered after 10 sec
%% and B is then send and wants to be delivered after 5 seconds, each
%% could be delivered first?!
%% @version $Id:$
-module(proto_sched).
-author('<EMAIL>').
-vsn('$Id:$').
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%
%% Quick start:
%% 1. call proto_sched:thread_num(N) %% seed has to be put somehow in
%% 2. in each process/thread participating call
%% 2.1. call proto_sched:thread_begin()
%% 2.2. perform a synchronous request like api_tx:read("a")
%% 2.3. call proto_sched:thread_end()
%% 3. call proto_sched:wait_for_end()
%% 4. call proto_sched:get_infos() to retrieve some statistics like
%% the number of possible interleavings, the number of local or
%% globally send messages, etc.
%% 5. call proto_sched:cleanup() to forget about the run and
%% delete statistics data.
%%
%% immediately before every receive statement using SCALARIS_RECV
%% insert a trace_mpath:thread_yield() to pass the control flow back
%% to the proto_sched.
%%
%% You can also provide a trace_id, so that the proto_sched can be used
%% independently for several protocols at the same time (e.g. in
%% concurrent unittests). See the interfaces and exported functions
%% below.
%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%-define(TRACE(X,Y), log:log("proto_sched: " ++ X,Y)).
-define(TRACE(X,Y), ok).
-include("scalaris.hrl").
-include("record_helpers.hrl").
-behaviour(gen_component).
%% client functions
%% declare how many threads you will have (with optional trace_id):
%% when thread_num threads called thread_begin(), proto_sched starts
%% the scheduled execution
-export([thread_num/1, thread_num/2]).
%% in each thread, give control to proto_sched (with optional trace_id)
-export([thread_begin/0, thread_begin/1]).
%% in each thread, declare its end (with optional trace_id)
-export([thread_end/0, thread_end/1]).
%% (1) before a receive, yield each thread to pass control to central
%% scheduler
-export([thread_yield/0]).
-export([get_infos/0, get_infos/1, info_shorten_messages/2]).
-export([register_callback/2, register_callback/3]).
-export([infected/0]).
-export([clear_infection/0, restore_infection/0]).
-export([wait_for_end/0, wait_for_end/1]).
-export([cleanup/0, cleanup/1]).
%% report messages from other modules
-export([start/2]).
-export([log_send/5]).
-export([epidemic_reply_msg/4]).
%% gen_component behaviour
-export([start_link/1, init/1]).
-export([on/2]). %% internal message handler as gen_component
-type logger() :: {proto_sched, comm:mypid()}.
-type anypid() :: pid() | comm:mypid().
-type trace_id() :: term().
-type send_event() :: {log_send, Time::'_', trace_id(),
Source::anypid(), Dest::anypid(), comm:message(),
local | global}.
-type passed_state() :: {trace_id(), logger()}.
-type gc_mpath_msg() :: {'$gen_component', trace_mpath, passed_state(),
Src::anypid(), Dest::anypid(), comm:message()}.
-ifdef(with_export_type_support).
-export_type([logger/0]).
-export_type([passed_state/0]).
-export_type([callback_on_msg/0]).
-endif.
-type queue_key() :: {Src :: comm:mypid(), Dest :: comm:mypid()}.
-type delay_queue_key() :: {Dest :: comm:mypid()}.
-type msg_queues() :: [queue_key()].
-type msg_delay_queues() :: [delay_queue_key()].
-type callback_on_msg() ::
fun((Src::comm:mypid(), Dest::comm:mypid(), Msg::comm:message()) -> ok).
-record(state,
{msg_queues = ?required(state, msg_queues)
:: msg_queues(),
msg_delay_queues = ?required(state, msg_delay_queues)
:: msg_delay_queues(),
status = ?required(state, status)
:: new | stopped | running
| {delivered, comm:mypid(), reference(), erlang_timestamp()},
to_be_cleaned = ?required(state, to_be_cleaned)
:: false | {to_be_cleaned, pid()},
passed_state = ?required(state, passed_state)
:: none | passed_state(),
num_possible_executions = ?required(state, num_possible_executions)
:: pos_integer(),
num_delivered_msgs = ?required(state, num_delivered_msgs)
:: non_neg_integer(),
delivered_msgs = ?required(state, delivered_msgs)
:: [send_event()], %% delivered messages in reverse order
nums_chosen_from = ?required(state, nums_chosen_from)
:: [pos_integer()], %% #possibilities for each delivered msg in reverse order
callback_on_send = ?required(state, callback_on_send)
:: callback_on_msg(),
callback_on_deliver = ?required(state, callback_on_deliver)
:: callback_on_msg(),
thread_num = ?required(state, thread_num)
:: non_neg_integer(),
threads_registered = ?required(state, threads_registered)
:: non_neg_integer(),
inform_on_end = ?required(state, inform_on_end)
:: pid() | none
}).
-type state_t() :: #state{}.
-type state() :: [{trace_id(), state_t()}].
-spec thread_num(pos_integer()) -> ok.
thread_num(N) -> thread_num(N, default).
-spec thread_num(pos_integer(), trace_id()) -> ok.
thread_num(N, TraceId) ->
send_steer_msg({thread_num, TraceId, N, comm:this()}),
receive
?SCALARIS_RECV({thread_num_done}, ok);
?SCALARIS_RECV({thread_num_failed},
util:do_throw('proto_sched:thread_num_failed'))
end.
-spec thread_begin() -> ok.
thread_begin() -> thread_begin(default).
-spec thread_begin(trace_id()) -> ok.
thread_begin(TraceId) ->
?ASSERT2(not infected(), duplicate_thread_begin),
%% We could send this as normal traced client message to
%% ourselves?! But we better send in a special way to be able to
%% detect these thread_begin messages in a separate handler
%% clause as we want to detect when thread_num was set to small.
send_steer_msg({thread_begin, TraceId, comm:this()}),
%% proto_sched will then schedule itself a proper infected
%% message, that we then receive, which atomatically infects this
%% client thread
receive
?SCALARIS_RECV({thread_begin_but_already_running},
util:do_throw('proto_sched:thread_begin-but_already_running'));
?SCALARIS_RECV(
{thread_release_to_run},
%% Yippie, we were chosen for execution, so we go on now up
%% to the next trace_mpath:thread_yield() (in front of a
%% receive) or proto_sched:thread_stop() that we pass.
ok)
end,
?DBG_ASSERT2(infected(), not_infected_after_thread_begin),
ok.
-spec thread_yield() -> ok.
thread_yield() ->
?ASSERT2(infected(), yield_outside_thread_start_thread_end),
trace_mpath:thread_yield().
-spec thread_end() -> ok.
thread_end() -> thread_end(default).
-spec thread_end(trace_id()) -> ok.
thread_end(TraceId) ->
?ASSERT2(infected(), duplicate_or_uninfected_thread_end),
%% inform proto_sched that we are finished.
send_steer_msg({on_handler_done, TraceId, thread_end, comm:this()}),
%% switch off the infection
clear_infection(),
?DBG_ASSERT2(not infected(), infected_after_thread_end),
ok.
-spec wait_for_end() -> ok.
wait_for_end() -> wait_for_end(default).
-spec wait_for_end(trace_id()) -> ok.
wait_for_end(TraceId) ->
?ASSERT2(not infected(), wait_for_end_when_infected),
send_steer_msg({wait_for_end, TraceId, self()}),
receive
?SCALARIS_RECV({proto_sched_done}, ok);
?SCALARIS_RECV({wait_for_end_trace_not_found},
util:do_throw('proto_sched:wait_for_end-trace not found'))
end.
-spec start(trace_id(), logger()) -> ok.
start(TraceId, Logger) ->
PState = passed_state_new(TraceId, Logger),
own_passed_state_put(PState).
-spec register_callback(CallbackFun::callback_on_msg(), on_send | on_deliver)
-> ok | failed.
register_callback(CallbackFun, OnX) ->
register_callback(CallbackFun, OnX, default).
-spec register_callback(CallbackFun::callback_on_msg(), on_send | on_deliver,
trace_id()) -> ok | failed.
register_callback(CallbackFun, OnX, TraceId) ->
%% clear infection
clear_infection(),
%% register the callback function
LoggerPid = pid_groups:find_a(proto_sched),
comm:send_local(LoggerPid, {register_callback, CallbackFun, OnX, TraceId, comm:this()}),
%% restore infection
restore_infection(),
receive
?SCALARIS_RECV({register_callback_reply, Result}, Result)
end.
-spec info_shorten_messages(Infos, CharsPerMsg::pos_integer()) -> Infos
when is_subtype(Infos, [tuple()]).
info_shorten_messages([], _CharsPerMsg) ->
[];
info_shorten_messages(Infos, CharsPerMsg) ->
{value, {delivered_msgs, DeliveredMsgs}, RestInfos} =
lists:keytake(delivered_msgs, 1, Infos),
DeliveredMsgs1 =
[begin
MsgStr = lists:flatten(io_lib:format("~111610.0p", [Msg])),
element(1, util:safe_split(CharsPerMsg, MsgStr))
end || Msg <- DeliveredMsgs],
[{delivered_msgs, DeliveredMsgs1} | RestInfos].
-spec get_infos() -> [tuple()].
get_infos() -> get_infos(default).
-spec get_infos(trace_id()) -> [tuple()].
get_infos(TraceId) ->
case pid_groups:find_a(proto_sched) of
failed -> [];
LoggerPid ->
clear_infection(),
comm:send_local(LoggerPid, {get_infos, comm:this(), TraceId}),
receive
?SCALARIS_RECV({get_infos_reply, Infos}, Infos)
end,
restore_infection(),
Infos
end.
-spec infected() -> boolean().
infected() ->
trace_mpath:infected().
-spec clear_infection() -> ok.
clear_infection() ->
trace_mpath:clear_infection().
-spec restore_infection() -> ok.
restore_infection() ->
trace_mpath:restore_infection().
-spec cleanup() -> ok.
cleanup() -> cleanup(default).
-spec cleanup(trace_id()) -> ok.
cleanup(TraceId) ->
%% clear infection
?ASSERT2(not infected(), 'proto_sched:cleanup_called_infected'),
ProtoSchedPid = pid_groups:find_a(?MODULE),
comm:send_local(ProtoSchedPid, {cleanup, TraceId, self()}),
receive {cleanup_done} -> ok;
{cleanup_trace_not_found} ->
erlang:throw('proto_sched:cleanup_trace_not_found')
end,
ok.
%% Functions used to report tracing events from other modules
-spec epidemic_reply_msg(passed_state(), anypid(), anypid(), comm:message()) ->
gc_mpath_msg().
epidemic_reply_msg(PState, FromPid, ToPid, Msg) ->
{'$gen_component', trace_mpath, PState, FromPid, ToPid, Msg}.
-spec log_send(passed_state(), anypid(), anypid(), comm:message(),
local | global | local_after) -> ok.
log_send(PState, FromPid, ToPid, Msg, LocalOrGlobal) ->
case passed_state_logger(PState) of
{proto_sched, LoggerPid} ->
TraceId = passed_state_trace_id(PState),
send_log_msg(
PState,
LoggerPid,
{log_send, '_', TraceId, FromPid, ToPid, Msg, LocalOrGlobal})
end,
ok.
-spec send_log_msg(passed_state(), comm:mypid(), send_event() | comm:message()) -> ok.
send_log_msg(RestoreThis, LoggerPid, Msg) ->
%% don't log the sending of log messages ...
clear_infection(),
comm:send(LoggerPid, Msg),
own_passed_state_put(RestoreThis).
-spec start_link(pid_groups:groupname()) -> {ok, pid()}.
start_link(ServiceGroup) ->
gen_component:start_link(?MODULE, fun ?MODULE:on/2, [],
[{erlang_register, ?MODULE},
{pid_groups_join_as, ServiceGroup, ?MODULE}]).
-spec init(any()) -> state().
init(_Arg) ->
msg_delay:send_trigger(1, {check_slow_handler_trigger}),
[].
-spec on(send_event() | comm:message(), state()) -> state().
on({thread_begin, TraceId, Client}, State) ->
?TRACE("proto_sched:on({thread_begin, ~p, ~p})", [TraceId, Client]),
?DBG_ASSERT2(not infected(), infected_in_on_handler),
T1 = get_or_create(TraceId, State),
case T1#state.status of
new ->
T2 = T1#state{
threads_registered = 1 + T1#state.threads_registered},
%% trigger start_deliver when thread_num = threads_registered
T3 = start_deliver_when_ready(TraceId, T2),
NewState = lists:keystore(TraceId, 1, State, {TraceId, T3}),
gen_component:post_op({log_send, os:timestamp(),
TraceId, Client, Client,
{thread_release_to_run}, global}, NewState);
_ ->
log:log("Wrong proto_sched:thread_begin, found state is: ~.0p",
[T1]),
%% wrong call to proto_sched:thread_begin(),
%% send fail message to raise exception at caller code
%% position.
comm:send(Client, {thread_begin_but_already_running}),
State
end;
on({thread_num, TraceId, N, Client}, State) ->
?TRACE("proto_sched:on({thread_num, ~p, ~p})", [TraceId, N]),
?DBG_ASSERT(not infected()),
T1 = get_or_create(TraceId, State),
case new =:= T1#state.status andalso 0 =:= T1#state.thread_num of
true ->
T2 = T1#state{thread_num = N},
%% trigger start_deliver when thread_num = threads_registered
T3 = start_deliver_when_ready(TraceId, T2),
comm:send(Client, {thread_num_done}),
lists:keystore(TraceId, 1, State, {TraceId, T3});
_ ->
log:log("Wrong proto_sched:thread_num, "
"(duplicate call or already running) "
"- found state is: ~.0p",
[T1]),
%% wrong call to proto_sched:thread_begin(),
%% send fail message to raise exception at
%% caller code position.
comm:send(Client, {thread_num_failed}),
State
end;
on({log_send, _Time, TraceId, From, To, UMsg, LorG}, State) ->
?TRACE("proto_sched:on({log_send ... ~.0p (~.0p) -> ~.0p (~.0p): ~.0p})",
[From,
pid_groups:group_and_name_of(From),
To,
pid_groups:group_and_name_of(To),
UMsg]),
FromGPid = comm:make_global(From),
?DBG_ASSERT2(not infected(), infected_in_on_handler),
TmpEntry = case lists:keyfind(TraceId, 1, State) of
false ->
add_message(From, To, UMsg, LorG, new(TraceId));
{TraceId, OldTrace} ->
add_message(From, To, UMsg, LorG, OldTrace)
end,
%% call the callback function (if any)
CallbackFun = TmpEntry#state.callback_on_send,
?TRACE("executing callback function ~p.", [CallbackFun]),
CallbackFun(From, To, UMsg),
case TmpEntry#state.status of
new ->
%% still waiting for all threads to join
?DBG_ASSERT2(UMsg =:= {thread_release_to_run}, wrong_starting_msg),
lists:keystore(TraceId, 1, State, {TraceId, TmpEntry});
{delivered, FromGPid, _Ref, _DeliverTime} ->
%% only From is allowed to enqueue messages
%% only when delivered or to_be_cleaned (during execution
%% of a scheduled piece of code) new arbitrary messages
%% can be added to the schedule
lists:keystore(TraceId, 1, State, {TraceId, TmpEntry})
end;
on({start_deliver, TraceId}, State) ->
?TRACE("proto_sched:on({start_deliver, ~p})", [TraceId]),
?DBG_ASSERT2(not infected(), infected_in_on_handler),
%% initiate delivery: if messages are already queued, deliver
%% first message, otherwise when first message arrives, start
%% delivery with that message.
case lists:keyfind(TraceId, 1, State) of
%% Entry is always there, as start_deliver is only called after
%% enough thread_num and thread_begin calls
{TraceId, OldTrace} ->
case new =/= OldTrace#state.status of
true ->
log:log("Duplicate proto_sched:start_deliver() call"
" probably not what you intend to do for"
" reproducible results~n"),
case util:is_unittest() of
true ->
erlang:throw(proto_sched_duplicate_start_deliver);
false ->
ok
end;
_ -> ok
end,
NewEntry = OldTrace#state{status = running},
NewState = lists:keystore(TraceId, 1, State, {TraceId, NewEntry}),
?TRACE("proto_sched:on({start_deliver, ~p}) postop deliver", [TraceId]),
gen_component:post_op({deliver, TraceId}, NewState)
end;
on({deliver, TraceId}, State) ->
?TRACE("proto_sched:on({deliver, ~p})", [TraceId]),
?DBG_ASSERT2(not infected(), infected_in_on_handler),
case lists:keyfind(TraceId, 1, State) of
false ->
?TRACE("proto_sched:on({deliver, ~p}) Nothing to deliver, unknown trace id!", [TraceId]),
State;
{TraceId, TraceEntry} ->
case TraceEntry#state.status of
{delivered, _ToPid, _Ref, _Time} ->
?TRACE("There is already message delivered to ~.0p",
[_ToPid]),
erlang:throw(proto_sched_already_in_delivered_mode);
_ -> ok
end,
case TraceEntry#state.msg_queues of
[] ->
?TRACE("Running out of messages, "
"waiting for further ones to arrive on id '~p'.~n"
"When protocol is finished, call proto_sched:stop(~p) and~n"
"proto_sched:cleanup(~p)",
[TraceId, TraceId, TraceId]),
?TRACE("Seen ~p possible executions so far for id '~p'.",
[TraceEntry#state.num_possible_executions, TraceId]),
case TraceEntry#state.inform_on_end of
none -> ok;
Client ->
comm:send_local(Client, {proto_sched_done})
end,
NewEntry = TraceEntry#state{status = stopped},
lists:keystore(TraceId, 1, State, {TraceId, NewEntry});
_ ->
{From, To, LorG, Msg, NumPossible, TmpEntry} =
pop_random_message(TraceEntry),
?TRACE("Chosen from ~p possible next messages.", [NumPossible]),
Monitor = case comm:is_local(comm:make_global(To)) of
true -> erlang:monitor(process,
comm:make_local(comm:get_plain_pid(To)));
false -> none
end,
NewEntry =
TmpEntry#state{num_possible_executions
= NumPossible * TmpEntry#state.num_possible_executions,
status = {delivered,
comm:make_global(To),
Monitor,
os:timestamp()},
num_delivered_msgs
= 1 + TmpEntry#state.num_delivered_msgs,
delivered_msgs
= [ {From, To, LorG, Msg}
| TmpEntry#state.delivered_msgs],
nums_chosen_from
= [ NumPossible
| TmpEntry#state.nums_chosen_from] },
%% we want to get raised messages, so we have to infect this message
PState = TraceEntry#state.passed_state,
InfectedMsg = epidemic_reply_msg(PState, From, To, Msg),
?TRACE("delivering msg to execute:"
" ~.0p (~.0p) -> ~.0p (~.0p): ~.0p.",
[From,
pid_groups:group_and_name_of(comm:make_local(From)),
To,
pid_groups:group_and_name_of(comm:make_local(To)),
Msg]),
%% call the callback function (if any) before sending out the msg
CallbackFun = TraceEntry#state.callback_on_deliver,
?TRACE("executing callback function ~p.", [CallbackFun]),
CallbackFun(From, To, Msg),
%% Send infected message with a shepherd. In case of send errors,
%% we will be informed by a {send_error, Pid, Msg, Reason} message.
comm:send(comm:make_global(To), InfectedMsg, [{shepherd, self()}]),
lists:keystore(TraceId, 1, State, {TraceId, NewEntry})
end
end;
on({on_handler_done, TraceId, _Tag, To}, State) ->
?TRACE("proto_sched:on({on_handler_done, ~p}).", [TraceId]),
%% do not use gen_component:post_op to allow a pending cleanup
%% call to interrupt us early.
case lists:keyfind(TraceId, 1, State) of
false ->
%% this is a bug
log:log("This is a bug"),
State;
{TraceId, TraceEntry} ->
case TraceEntry#state.status of
{delivered, To, Ref, _Time} ->
%% this delivered was done, so we can schedule a new msg.
erlang:demonitor(Ref),
%% enqueue a new deliver request for this TraceId
?TRACE("~p proto_sched:on({on_handler_done, ~p})"
" trigger next deliver 1.", [To, TraceId]),
comm:send_local(self(), {deliver, TraceId}),
%% set status to running
NewEntry = TraceEntry#state{status = running},
NewState = lists:keystore(TraceId, 1, State,
{TraceId, NewEntry}),
case NewEntry#state.to_be_cleaned of
{to_be_cleaned, CallerPid} ->
?TRACE("proto_sched:on({on_handler_done, ~p})"
" doing cleanup.", [TraceId]),
gen_component:post_op({do_cleanup,
TraceId,
CallerPid}, NewState);
false -> NewState
end;
new ->
%% proto_sched:end() immediately after proto_sched:start()?
%% enqueue a new deliver request for this TraceId
?TRACE("proto_sched:on({on_handler_done, ~p})"
"trigger next deliver 2 ~p.", [TraceId, new]),
comm:send_local(self(), {deliver, TraceId}),
State
end
end;
on({send_error, Pid, Msg, _Reason} = _ShepherdMsg, State) ->
%% call on_handler_done and continue with message delivery
TraceId = get_trace_id(get_passed_state(Msg)),
?TRACE("send error for trace id ~p: ~p calling on_handler_done.", [TraceId, _ShepherdMsg]),
case lists:keyfind(TraceId, 1, State) of
false -> State;
{TraceId, TraceEntry} ->
case TraceEntry#state.status of
{delivered, Pid, _Ref, _Time} ->
%% send error, generate on_handler_done
gen_component:post_op({on_handler_done, TraceId, send_error, Pid}, State);
_ ->
%% not in state delivered, so probably the monitor
%% already cleaned up for the died process with
%% its 'DOWN' message.
State
end
end;
on({register_callback, CallbackFun, OnX, TraceId, Client}, State) ->
?TRACE("proto_sched:on({register_callback, ~p, ~p, ~p, ~p}).",
[CallbackFun, OnX, TraceId, Client]),
?DBG_ASSERT2(not infected(), infected_in_on_handler),
case lists:keyfind(TraceId, 1, State) of
false ->
comm:send(Client, {register_callback_reply, failed}),
State;
{TraceId, TraceEntry} ->
comm:send(Client, {register_callback_reply, ok}),
NewEntry =
case OnX of
on_send ->
TraceEntry#state{callback_on_send = CallbackFun};
on_deliver ->
TraceEntry#state{callback_on_deliver = CallbackFun}
end,
lists:keyreplace(TraceId, 1, State, {TraceId, NewEntry})
end;
on({get_infos, Client, TraceId}, State) ->
?TRACE("proto_sched:on({get_infos, ~p, ~p}).", [Client, TraceId]),
?DBG_ASSERT2(not infected(), infected_in_on_handler),
case lists:keyfind(TraceId, 1, State) of
false ->
comm:send(Client, {get_infos_reply, []});
{TraceId, TraceEntry} ->
BranchingFactor =
case length(TraceEntry#state.nums_chosen_from) of
0 -> 0;
N -> lists:sum(TraceEntry#state.nums_chosen_from) / N
end,
Infos =
[{delivered_msgs,
lists:reverse(TraceEntry#state.delivered_msgs)},
{nums_chosen_from,
lists:reverse(TraceEntry#state.nums_chosen_from)},
{avg_branching_factor, BranchingFactor},
{num_delivered_msgs,
TraceEntry#state.num_delivered_msgs},
{num_possible_executions,
TraceEntry#state.num_possible_executions}],
comm:send(Client, {get_infos_reply, Infos})
end,
State;
on({wait_for_end, TraceId, CallerPid}, State) ->
?TRACE("proto_sched:on({wait_for_end, ~p, ~p}).", [TraceId, CallerPid]),
case lists:keyfind(TraceId, 1, State) of
false ->
comm:send_local(CallerPid, {wait_for_end_trace_not_found}),
State;
{TraceId, TraceEntry} ->
case TraceEntry#state.status of
stopped ->
comm:send_local(CallerPid, {proto_sched_done}),
State;
_ ->
?ASSERT2(none =:= TraceEntry#state.inform_on_end,
'proto_sched:wait_for_end_already_called'),
NewEntry = TraceEntry#state{inform_on_end = CallerPid},
lists:keyreplace(TraceId, 1, State, {TraceId, NewEntry})
end
end;
on({cleanup, TraceId, CallerPid}, State) ->
?TRACE("proto_sched:on({cleanup, ~p, ~p}).", [TraceId, CallerPid]),
?DBG_ASSERT2(not infected(), infected_in_on_handler),
case lists:keyfind(TraceId, 1, State) of
false ->
comm:send_local(CallerPid, {cleanup_trace_not_found}),
State;
{TraceId, TraceEntry} ->
case TraceEntry#state.status of
{delivered, _To, _Ref, _Time} ->
?TRACE("proto_sched:on({cleanup, ~p, ~p}) set status to to_be_cleaned.", [TraceId, CallerPid]),
NewEntry = TraceEntry#state{
to_be_cleaned = {to_be_cleaned, CallerPid}},
lists:keyreplace(TraceId, 1, State, {TraceId, NewEntry});
_ ->
gen_component:post_op({do_cleanup, TraceId, CallerPid}, State)
end
end;
on({do_cleanup, TraceId, CallerPid}, State) ->
?TRACE("proto_sched:on({do_cleanup, ~p, ~p}).", [TraceId, CallerPid]),
?DBG_ASSERT2(not infected(), infected_in_on_handler),
case lists:keytake(TraceId, 1, State) of
{value, {TraceId, TraceEntry}, TupleList2} ->
send_out_pending_messages(TraceEntry#state.msg_queues),
send_out_pending_messages(TraceEntry#state.msg_delay_queues),
comm:send_local(CallerPid, {cleanup_done}),
TupleList2;
false ->
comm:send_local(CallerPid, {cleanup_done}),
State
end;
on({'DOWN', Ref, process, Pid, _Reason}, State) ->
?TRACE("proto_sched:on({'DOWN', ~p, process, ~p, ~p}).",
[Ref, Pid, _Reason]),
%% search for trace with status delivered, Pid and Ref
StateTail = lists:dropwhile(fun({_TraceId, X}) ->
case X#state.status of
{delivered, _Pid, Ref, _Time} ->
false;
_ -> true
end end,
State),
case StateTail of
[] -> State; %% outdated 'DOWN' message - ok
[TraceEntry | _] ->
%% log:log("proto_sched:on({'DOWN', ~p, process, ~p, ~p}).",
%% [Ref, Pid, Reason]),
%% the process we delivered to has died, so we generate us a
%% gc_on_done message ourselves.
%% use post_op to avoid concurrency with send_error
%% message when delivering to already dead nodes.
gen_component:post_op({on_handler_done,
element(1, TraceEntry),
pid_ended_died_or_killed, comm:make_global(Pid)}, State)
end;
on({check_slow_handler_trigger}, State) ->
msg_delay:send_trigger(1, {check_slow_handler_trigger}),
gen_component:post_op({check_slow_handler_action}, State);
on({check_slow_handler_action}, State) ->
%% check for delivered messages that take longer than a second
%% and output diagnostic information on it.
%% trace id
%% executing process
%% message to process
%% amount of time the response is pending
%% current function of the process delivered to (when local)
[ case TState#state.status of
{delivered, _GPid, _Ref, StartTime} ->
Delta = timer:now_diff(os:timestamp(), StartTime) div 1000000,
case 1 =< Delta of
true -> report_slow_handler(TId, TState);
_ -> ok
end;
_ -> ok
end
|| {TId, TState} <- State ],
State.
-spec report_slow_handler(trace_id(), state_t()) -> ok.
report_slow_handler(Tid, Entry) ->
{delivered, GPid, _Ref, StartTime} = Entry#state.status,
Delta = (timer:now_diff(os:timestamp(), StartTime) div 100000)/10,
{PidGrpName, StackTrace} =
case comm:is_local(GPid) of
true ->
LPid = comm:make_local(GPid),
{comm:make_local(LPid),
try erlang:process_info(LPid,
current_stacktrace)
catch error:badarg ->
%% older erlang version
%% -> fall back to current function
catch(erlang:process_info(LPid,
current_function))
end};
_ ->
{not_local, no_stackstrace}
end,
log:log("proto_sched: Msg takes longer than ~p s to process:~n"
"TraceId: ~p~n"
"Process: ~p (~p)~n"
"DeliMsg: ~10000.0p~n"
"Msg No: ~p~n"
"StackTr: ~p~n",
[ Delta,
Tid,
GPid, PidGrpName,
hd(Entry#state.delivered_msgs),
Entry#state.num_delivered_msgs,
StackTrace
]),
ok.
passed_state_new(TraceId, Logger) ->
{TraceId, Logger}.
passed_state_trace_id(State) -> element(1, State).
passed_state_logger(State) -> element(2, State).
own_passed_state_put(State) -> erlang:put(trace_mpath, State), ok.
%%own_passed_state_get() -> erlang:get(trace_mpath).
new(TraceId) ->
LoggerPid = pid_groups:find_a(?MODULE),
Logger = comm:make_global(LoggerPid),
#state{ msg_queues = [],
msg_delay_queues = [],
status = new,
to_be_cleaned = false,
passed_state = passed_state_new(TraceId, {proto_sched, Logger}),
num_possible_executions = 1,
num_delivered_msgs = 0,
delivered_msgs = [],
nums_chosen_from = [],
callback_on_send = fun(_From, _To, _Msg) -> ok end,
callback_on_deliver = fun(_From, _To, _Msg) -> ok end,
thread_num = 0,
threads_registered = 0,
inform_on_end = none
}.
%% @doc Sends out all messages remaining in queues
send_out_pending_messages(Queues) ->
lists:foreach(fun(Key) ->
{_Src, Dest} = Key,
Queue = queue:to_list(erlang:erase(Key)),
To = comm:make_global(Dest),
_ = [comm:send(To, Msg) || {_LorG, Msg} <- Queue]
end,
Queues).
-spec add_message(comm:mypid(), comm:mypid(), comm:message(), local | global, state_t()) -> state_t().
add_message(Src, Dest, Msg, LorG, #state{msg_queues = OldQueues} = State) ->
Key = {Src, Dest},
NewQueues = add_to_list_of_queues(Key, {LorG, Msg}, OldQueues),
State#state{msg_queues = NewQueues}.
%% -spec add_delay_message(comm:mypid(), comm:message(), state_t()) -> state_t().
%% add_delay_message(Dest, Msg, #state{msg_delay_queues = OldQueues} =
%% State) ->
%% Key = {Dest},
%% NewQueues = add_to_list_of_queues(Key, Msg, OldQueues),
%% State#state{msg_delay_queues = NewQueues}.
-spec pop_random_message(state_t()) ->
{Src::comm:mypid(), Dest::comm:mypid(),
local | global, Msg::comm:message(),
Possibilities::pos_integer(),
state_t()}.
pop_random_message(#state{msg_queues = OldQueues} = State) ->
{{Src, Dest} = Key, Len} = util:randomelem_and_length(OldQueues),
Q = erlang:get(Key),
{{value, {LorG, M}}, Q2} = queue:out(Q),
NewQueues = update_queue_in_list_of_queues(Key, Q2, OldQueues),
State2 = State#state{msg_queues = NewQueues},
{Src, Dest, LorG, M, Len, State2}.
%% -spec pop_random_delay_message(state_t()) -> {comm:mypid(), comm:message(), state_t()}.
%% pop_random_delay_message(#state{msg_delay_queues = OldQueues} = State) ->
%% {{Dest} = Key, Q} = util:randomelem(OldQueues),
%% {{value, M}, Q2} = queue:out(Q),
%% NewQueues = update_queue_in_list_of_queues(Key, Q2, OldQueues),
%% State2 = State#state{msg_delay_queues = NewQueues},
%% {Dest, M, State2}.
-spec add_to_list_of_queues
(queue_key(), {local | global, comm:message()}, msg_queues()) -> msg_queues().%;
%(delay_queue_key(), comm:message(), msg_delay_queues()) -> msg_delay_queues().
add_to_list_of_queues(Key, M, Queues) ->
case erlang:get(Key) of
undefined ->
_ = erlang:put(Key, queue:from_list([M])),
[Key | Queues];
Queue ->
_ = erlang:put(Key, queue:in(M, Queue)),
Queues
end.
-spec update_queue_in_list_of_queues
(queue_key(), queue:queue({local | global, comm:message()}), msg_queues()) -> msg_queues().%;
%(delay_queue_key(), queue:queue(comm:message()), msg_delay_queues()) -> msg_delay_queues().
update_queue_in_list_of_queues(Key, Q, Queues) ->
case queue:is_empty(Q) of
true ->
erlang:erase(Key),
lists:delete(Key, Queues);
false ->
_ = erlang:put(Key, Q),
Queues
end.
-spec get_passed_state(gc_mpath_msg()) -> passed_state().
get_passed_state(Msg) ->
element(3, Msg).
-spec get_trace_id(passed_state()) -> trace_id().
get_trace_id(State) ->
element(1, State).
send_steer_msg(Msg) ->
LoggerPid = pid_groups:find_a(?MODULE),
Logger = comm:make_global(LoggerPid),
%% send not as an infected message, but directly to the logger process
send_log_msg(erlang:get(trace_mpath), Logger, Msg).
-spec get_or_create(trace_id(), state()) -> state_t().
get_or_create(TraceId, State) ->
case lists:keyfind(TraceId, 1, State) of
false -> new(TraceId);
{TraceId, Entry} -> Entry
end.
-spec start_deliver_when_ready(trace_id(), state_t()) -> state_t().
start_deliver_when_ready(TraceId, Entry) ->
case Entry#state.thread_num =:= Entry#state.threads_registered of
true ->
comm:send_local(self(), {start_deliver, TraceId}),
Entry;
false ->
Entry
end. | src/proto_sched.erl | 0.569254 | 0.522629 | proto_sched.erl | starcoder |
%% @author <NAME> (n3053620)
%% @copyright 2015 <NAME>
%% @version 1.0
%% @doc
%% This module provides functionality to validate a connection of a client against the client list.
%% Certain functions will also perform an action upon validating a connection.
-module (validation).
-include ("server.hrl").
-export ([check_connection/2, client_action/2]).
%% @doc
%% Checks if the TCP or UDP configuration given matches those stored in the ETS.
%% @spec check_connection (Name, TCP | UDP) -> boolean()
%% Name = string()
%% TCP = socket()
%% UDP = {IP, Port}
%% IP = inet:ip_address() | inet:hostname()
%% Port = inet:port_number()
check_connection (Name, Socket) when not is_tuple (Socket) ->
case clients:client (Name) of
%% We only need to check the the IP and Port values.
{_, StoredSocket, _, _, _, _, _, _} ->
Socket =:= StoredSocket;
%% If it doesn't exist then we're fine to ignore it.
none ->
false
end;
%% Checks if the TCP or UDP configuration given matches those stored in the ETS.
check_connection (Name, {IP, Port}) ->
case clients:client (Name) of
%% We only need to check the the IP and Port values.
{_, _, StoredIP, StoredPort, _, _, _, _} ->
(IP =:= StoredIP) and (Port =:= StoredPort);
%% If it doesn't exist then we're fine to ignore it.
none ->
false
end.
%% @doc
%% Calls the given action provided the TCP or UDP information given is associated with a connected client.
%% @spec client_action (TCP | UDP, Action) -> any()
%% TCP = socket()
%% UDP = {IP, Port}
%% IP = inet:ip_address() | ip:hostname()
%% Port = inet:port_number()
%% Action = function()
client_action (Socket, Action) when not is_tuple (Socket) ->
%% We need to check the TCP socket of a client.
Predicate = [{{'$1', '$2', '$3', '$4', '$5', '$6', '$7', '$8'},
[{'=:=', '$2', Socket}],
['$_']}],
check_action (Predicate, Action);
%% Calls the given action provided the TCP or UDP information given is associated with a connected client.
client_action ({IP, Port}, Action) ->
%% We need to check the IP and port of the client.
Predicate = [{{'$1', '$2', '$3', '$4', '$5', '$6', '$7', '$8'},
[{'=:=', '$3', IP}, {'=:=', '$4', Port}],
['$_']}],
check_action (Predicate, Action).
%% @doc
%% Checks if the ETS select function returns a match, if so the action will be performed.
%% @spec check_action (Predicate, Action) -> any()
%% Predicate = term()
%% Action = function()
check_action (Predicate, Action) ->
case ets:select (?CLIENTS, Predicate) of
%% Only perform the action if the socket is linked with a client.
[] ->
io:format ("Validation: Attempt to perform an action from an invalid client.~n");
_ ->
Action()
end. | Source/Server/src/validation.erl | 0.582729 | 0.433562 | validation.erl | starcoder |
-module(pgo_datetime_SUITE).
-compile(export_all).
-include_lib("common_test/include/ct.hrl").
-include_lib("stdlib/include/assert.hrl").
all() -> [select, insert, interval].
init_per_suite(Config) ->
application:ensure_all_started(pgo),
{ok, _} = pgo_sup:start_child(default, #{pool_size => 1,
database => "test",
user => "test"}),
Config.
end_per_suite(_Config) ->
application:stop(pgo),
ok.
select(_Config) ->
?assertMatch(#{command := select,
rows := [{{2012,1,17}}]},
pgo:query("select '2012-01-17 10:54:03.45'::date")),
?assertMatch(#{command := select,
rows := [{{{2012,1,17},{10,54,3.45}}}]},
pgo:query("select '2012-01-17 10:54:03.45'::timestamp")),
?assertMatch(#{command := select,
rows := [{{{2012,1,17},{10,54,3.45}}}]},
pgo:query("select '2012-01-17 10:54:03.45'::timestamptz")).
insert(_Config) ->
?assertMatch(#{command := create},
pgo:query("create temporary table times (a_timestamp timestamp, a_time time)")),
?assertMatch(#{command := insert},
pgo:query("insert into times (a_timestamp, a_time) VALUES ($1, $2)",
[{{2012,1,17},{10,54,3.45}}, {10,54,3.45}])),
?assertMatch(#{command := select,
rows := [{{{2012,1,17},{10,54,3.45}}, {10,54,3.45}}]},
pgo:query("select a_timestamp, a_time from times")).
interval(_Config) ->
?assertMatch(#{command := create},
pgo:query("create temporary table interval_times (a_timestamp timestamp, b_timestamp timestamp)")),
?assertMatch(#{command := insert},
pgo:query("insert into interval_times (a_timestamp, b_timestamp) VALUES ($1, $2)",
[{{2012,1,17},{10,54,3.45}}, {{2012,1,20},{10,54,3.45}}])),
?assertMatch(#{command := insert},
pgo:query("insert into interval_times (a_timestamp, b_timestamp) VALUES ($1, $2)",
[{{2013,1,17},{10,54,3.45}}, {{2013,1,28},{10,54,3.45}}])),
?assertMatch(#{command := select,
rows := [{{{2012,1,17},{10,54,3.45}}, {{2012,1,20},{10,54,3.45}}}]},
pgo:query("select a_timestamp, b_timestamp from interval_times where b_timestamp - a_timestamp < $1", [{interval, {0, 5, 0}}])). | test/pgo_datetime_SUITE.erl | 0.591251 | 0.579519 | pgo_datetime_SUITE.erl | starcoder |
-module(example_curve).
-author('<NAME> <<EMAIL>>').
-behaviour(digeno_callback).
%% digeno callback functions
-export([get_config/0,
generate/0,
evaluate/1,
fitness/2,
dead_on_arrival/2,
mutate/1,
combine/2,
format/1,
format_result/1]).
%% This is a module defining a Genetic Optimisation problem.
%% Problem of curve fitting a polynomial function against data points:
%% Given an integer N and a set of data points [{X, Y}], determine the
%% coefficients of a univariate polynomial function of degree N that
%% best fits the set of data. Let the error metric to be minimized be
%% the sum of squared differences.
%% Instances are polynomial functions of the form:
%% y(x) = a0 + a1 x + a2 x^2 + ... + an x^n
%% Instance representation: a list of coefficients [a0, a1, ..., an]
%% Example problem: sin(x) in the range [-pi, pi].
%%
%% The best possible instance is approximately:
%% [0, 1, 0, -0.166666, 0, 0.008333, 0, -0.000198]
%% according to the Taylor series expansion:
%% [0, 1, 0, -1/(3!), 0 1/(5!), 0, -1/(7!), ...]
-define(DEGREE, 7).
%% digeno callbacks
get_config() -> [{population_size, 1000},
{fitness_target, 1000.0},
{converg_detect, auto},
{display_decimator, 1000}].
generate() -> random_poly(?DEGREE).
mutate(Fn) -> mutate(Fn, utils:grandom(?DEGREE)).
combine(Fn1, Fn2) ->
[utils:crandom([Ai, Bi]) || {Ai, Bi} <- lists:zip(Fn1, Fn2)].
%% Test: combine([a0, a1, a2, a3, a4, a5, a6, a7],
%% [b0, b1, b2, b3, b4, b5, b6, b7]).
evaluate(Fn) -> eval_error(Fn, data()).
dead_on_arrival(_Fn, _Result) -> false.
fitness(_Fn, 0) -> 2.0;
fitness(_Fn, Result) -> 1.0 / Result.
format(Fn) -> string:join([io_lib:format("~9.6f", [Ai]) || Ai <- Fn], " ").
format_result(Result) -> io_lib:format("~f", [Result]).
%% private functions
mk_data(Fun, {StartX, EndX}, NumPoints) ->
Delta = (EndX - StartX) / (NumPoints - 1.0),
mk_data(Fun, StartX, Delta, NumPoints-1, []).
mk_data(Fun, StartX, _Delta, 0, Acc) ->
X = StartX,
[{X, Fun(X)} | Acc];
mk_data(Fun, StartX, Delta, NumPoints, Acc) ->
X = StartX + NumPoints * Delta,
mk_data(Fun, StartX, Delta, NumPoints-1, [{X, Fun(X)} | Acc]).
data() -> mk_data(fun(X) -> math:sin(X) end, {-math:pi(), math:pi()}, 100).
eval_fn(Fn, X) ->
{Y, _} = lists:foldl(fun(Ai, {Acc, Xi}) ->
{Acc + Ai * Xi, X * Xi}
end, {0.0, 1.0}, Fn),
Y.
eval_error(Fn, Data) ->
Errors = [eval_fn(Fn, X) - Y || {X, Y} <- Data],
lists:sum([E*E || E <- Errors]).
random_coeff() -> utils:crandom([-1, 1]) * random:uniform().
random_poly(N) -> [random_coeff() || _P <- lists:seq(0, N)].
adjust_coeff(A) ->
case utils:crandom([nudge1, nudge2, mag, inv, sign, set0, set1, rand]) of
nudge1 -> A * utils:crandom([0.2, 0.3, 0.5, 0.7, 1.4, 2.0, 3.0, 5.0]);
nudge2 -> A * utils:crandom([0.9, 0.99, 0.999, 0.9999, 1.1, 1.01, 1.001, 1.0001]);
mag -> A * utils:crandom([0.0001, 0.001, 0.01, 0.1, 10, 100, 1000, 10000]);
inv when A /= 0.0 -> 1.0 / A;
inv -> 0.0;
sign -> -A;
set0 -> 0.0;
set1 -> 1.0;
rand -> random_coeff()
end.
%% mutate N times in a row to allow changing different coefficients at once
mutate(Fn, 0) -> Fn;
mutate(Fn, N) ->
I = random:uniform(length(Fn)),
Ai = lists:nth(I, Fn),
NewAi = adjust_coeff(Ai),
mutate(utils:change_item(I, NewAi, Fn), N-1). | src/example_curve.erl | 0.506347 | 0.749821 | example_curve.erl | starcoder |
%%% vim:ts=2:sw=2:et
%%%-----------------------------------------------------------------------------
%%% @doc Erlang pipeline parse transform
%%%
%%% This transform implements a parser syntax extension that enables application
%%% of cascading function calls using the `/' operator.
%%%
%%% In the `LHS / RHS / ... Last.' notation, the result of evaluation of the LHS
%%% expression is passed as an argument to the RHS expression. This process
%%% continues until the `Last' expression is evaluated. The head element of the
%%% pipeline must be either a term to which the arithmetic division `/` operator
%%% cannot apply (i.e. not integers, floats, functions), or if you need to pass
%%% integer(s) or float(s), wrap them in a list brackets.
%%%
%%% This transfor is inspired by the similar functionality in Linux (i.e. `|'
%%% pipe) and Elixir (`|>' pipe).
%%%
%%% When using this as a parse transform, include the `{parse_transform,erlpipe}'
%%% compiler option.
%%%
%%% The following examples illustrate the work of the transform, in which:
%%% ```
%%% test1(A) -> [A] / fun1 / mod:fun2 / fun3.
%%% test2(A,B) -> [A,B] / fun4 / fun5() / io:format("~p\n", [_]).
%%% '''
%%% will be transformed to:
%%% ```
%%% test1(A) -> fun3(mod:fun2(fun1(A))).
%%% test2(A,B) -> io:format("~p\n", [fun5(fun4(A,B))]).
%%% '''
%%%
%%% Similarly to Elixir, a special `tap/2' function is implemented, which
%%% passes the given argument to an anonymous function, returning the argument
%%% itself. The following:
%%% ```
%%% f(A) -> A+1.
%%% ...
%%% test_tap() ->
%%% [10] / tap(f)
%%% / tap(fun f/1)
%%% / tap(fun(I) -> I+1 end).
%%% '''
%%% is equivalent to:
%%% ```
%%% ...
%%% test_tap() ->
%%% begin
%%% f(10),
%%% begin
%%% f(10),
%%% begin
%%% (fun(I) -> I end)(10)
%%% 10
%%% end
%%% end
%%% end.
%%% '''
%%%
%%% For debugging the AST of the resulting transform, pass the following
%%% options to the `erlc' compiler:
%%% <dl>
%%% <li>`-Derlpipe_orig' - print the original AST before the transform</li>
%%% <li>`-Derlpipe_ast' - print the transformed AST</li>
%%% <li>`-Derlpipe_src' - print the resulting source code after the transform</li>
%%% </dl>
%%%
%%% @author <NAME> <saleyn(at)gmail(dot)com>
%%% @end
%%%-----------------------------------------------------------------------------
%%% Copyright (c) 2021 <NAME>
%%%
%%% Permission is hereby granted, free of charge, to any person
%%% obtaining a copy of this software and associated documentation
%%% files (the "Software"), to deal in the Software without restriction,
%%% including without limitation the rights to use, copy, modify, merge,
%%% publish, distribute, sublicense, and/or sell copies of the Software,
%%% and to permit persons to whom the Software is furnished to do
%%% so, subject to the following conditions:
%%%
%%% The above copyright notice and this permission notice shall be included
%%% in all copies or substantial portions of the Software.
%%%
%%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
%%% EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
%%% MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
%%% IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
%%% CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
%%% TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
%%% SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
%%%-----------------------------------------------------------------------------
-module(erlpipe).
-export([parse_transform/2]).
-import(etran_util, [transform/2]).
-define(OP, '/').
%% @doc parse_transform entry point
parse_transform(AST, Options) ->
etran_util:apply_transform(?MODULE, fun replace/1, AST, Options).
replace({op, _Loc, ?OP, Arg, Rhs}) ->
apply_args(Arg, Rhs);
replace(_Exp) ->
continue.
apply_args({op, _Loc, ?OP, A, E}, Rhs) ->
case apply_args(A, E) of
continue -> continue;
[Args] -> apply_args(Rhs, [Args]);
Args -> apply_args(Rhs, [Args])
end;
apply_args({cons, _Loc, _, _} = List, Rhs) ->
Args = [hd(transform(fun replace/1, [F])) || F <- cons_to_list(List)],
[E] = transform(fun replace/1, [Rhs]),
do_apply(E, Args);
apply_args({Op, _Loc, _} = Arg, RHS) when Op==atom; Op==bin; Op==tuple; Op==string ->
if is_tuple(RHS) ->
do_apply(RHS, [Arg]);
true ->
do_apply(Arg, RHS)
end;
%% List comprehension
apply_args({lc,_,_,_}=Lhs, Rhs) ->
do_apply(Lhs, Rhs);
apply_args(AArgs, Rhs) when is_list(AArgs), is_list(Rhs) ->
Args = [hd(transform(fun replace/1, [F])) || F <- AArgs],
[E] = transform(fun replace/1, Rhs),
do_apply(E, Args);
apply_args(LHS, RHS) when is_tuple(LHS), is_list(RHS) ->
do_apply(LHS, RHS);
apply_args(LHS, RHS) when is_tuple(LHS), is_tuple(RHS) ->
continue.
do_apply({atom, Loc, _V} = Function, Arguments) ->
{call, Loc, Function, Arguments};
do_apply({remote, Loc, _M, _F} = Function, Arguments) ->
{call, Loc, Function, Arguments};
do_apply({call, Loc, Fun, []}, Arguments) ->
{call, Loc, Fun, Arguments};
do_apply({'fun', Loc, {function, Fun, _}}, Arguments) ->
{call, Loc, {atom, Loc, Fun}, Arguments};
do_apply({'fun', Loc, {function, _Mod, _Fun, _Arity}=F}, Arguments) ->
{call, Loc, {'fun', Loc, F}, Arguments};
do_apply({'fun', Loc, {clauses, _}}=Fun, Arguments) ->
{call, Loc, Fun, Arguments};
do_apply({call, _Loc, {atom, ALoc, tap}, [Arg]}, RHS) ->
%% Tapping into a function's call (the return is a passed-through RHS argument)
Res = do_apply(Arg, RHS),
% If we are asked to tap into the fun's call, wrap the call in a block
{block, ALoc, [{match, ALoc, {var, ALoc, '_'}, Res}, hd(RHS)]};
%% RHS is a tuple when it's the head of a pipeline:
%% E.g. [I || I <- L] / ...
do_apply({Op, Loc, Fun, Args} = LHS, RHS) when (Op =:= call orelse Op =:= lc), is_list(RHS) ->
[NewLHS] = transform(fun(Forms) -> substitute(RHS, Forms) end, [LHS]),
case NewLHS of
LHS ->
{Op, Loc, Fun, RHS ++ Args};
ResLHS ->
ResLHS
end;
%% RHS is a list when it's in the middle of a pipeline:
%% E.g. ... / [I || I <- _] / ...
do_apply({Op, _, _, _} = LHS, RHS) when (Op =:= call orelse Op =:= lc), is_tuple(RHS) ->
do_apply(RHS, [LHS]);
do_apply({Op, Loc, Fun, Args} = LHS, RHS) when Op =:= call; Op =:= lc ->
[NewLHS] = transform(fun(Forms) -> substitute(RHS, Forms) end, [LHS]),
case NewLHS of
LHS when is_list(RHS) ->
{Op, Loc, Fun, RHS ++ Args};
LHS ->
do_apply(RHS, [LHS]);
ResLHS ->
ResLHS
end;
%% Use of operators
do_apply({op, Loc, Op, Lhs, Rhs}, Arguments) ->
NewLhs = transform(fun replace/1, [Lhs]),
NewRhs = transform(fun replace/1, [Rhs]),
[LHS] = transform(fun(Forms) -> substitute(Arguments, Forms) end, NewLhs),
[RHS] = transform(fun(Forms) -> substitute(Arguments, Forms) end, NewRhs),
{op, Loc, Op, LHS, RHS};
do_apply({var, _, '_'}, [Arg]) ->
Arg;
do_apply(Exp, _A) when is_tuple(Exp) ->
Exp.
cons_to_list({cons, _, A, B}) ->
[A | cons_to_list(B)];
cons_to_list({nil, _}) ->
[];
cons_to_list([A]) ->
[A].
%% Substitute '_', '_1', '_2', ... '_N' with the corresponding argument
substitute(Args, {var, _, V}) when is_atom(V) ->
case atom_to_list(V) of
"_" when is_list(Args) ->
hd(Args);
"_" -> Args;
[$_|T] ->
try
M = list_to_integer(T),
lists:nth(M, Args)
catch _:_ ->
continue
end;
_ ->
continue
end;
substitute(_Args, _) ->
continue. | src/erlpipe.erl | 0.657538 | 0.613989 | erlpipe.erl | starcoder |
%% @author <NAME> <<EMAIL>>
%% @copyright 2021 <NAME>
%% @doc Calculate the ImageMagick distortion parameters for a 3D rotation.
%% Copyright 2021 <NAME>
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(z_media_rotate3d).
-export([
rotate3d/5
]).
-include("../../include/zotonic.hrl").
%% @doc Calculate the transformation matrix for the perspective. Translate the corner
%% points of the image to the arguments for the ImageMagick distort function. This code
%% has been derived from http://www.fmwconcepts.com/imagemagick/3Drotate/index.php
%%
%% Here we don't scale the image, the image is rotated from the center of the image, and
%% we use a fixed viewing point that is compatible with the viewing point used in the
%% mod_image_edit css3 transformations.
-spec rotate3d( Width :: non_neg_integer(), Height :: non_neg_integer(),
Roll :: number(), Tilt :: number(), Pan :: number() ) -> {ok, string()} | {error, term()}.
rotate3d(Width, Height, _Roll, _Tilt, _Pan) when Width =< 1; Height =< 1 ->
{error, empty_image};
rotate3d(Width, Height, Roll, Tilt, Pan) when is_number(Roll), is_number(Tilt), is_number(Pan) ->
% Calculate the projection matrices for the roll, tilt and pan
R_Roll = roll(Roll),
R_Tilt = tilt(Tilt),
R_Pan = pan(Pan),
% Start with the identity matrix
R0 = [
[ 1.0, 0.0, 0.0 ],
[ 0.0, 1.0, 0.0 ],
[ 0.0, 0.0, 1.0 ]
],
% Apply the rotations to the identity matrix
R1 = multiply_matrix(R_Roll, R0),
R2 = multiply_matrix(R_Tilt, R1),
R = multiply_matrix(R_Pan, R2),
% Apply correction for the position/distance of the viewer.
% Dfov = 56.3099324782,
% DfovRad = deg2rad(Dfov / 2),
% Sfov = math:sin(DfovRad),
% Cfov = math:cos(DfovRad),
% Tfov = Sfov / Cfov,
Tfov = 0.5351837583,
Diag = math:sqrt( (Width * Width) + (Height * Height) ),
Focal = Diag / (2 * Tfov),
DI = (Width - 1.0) / 2.0,
DJ = (Height - 1.0) / 2.0,
% Offset / conversion matrix
B = [
[ 1.0, 0.0, -DI ],
[ 0.0, -1.0, DJ ],
[ 0.0, 0.0, 1.0 ]
],
% Output scaling matrix
Aim = [
[ 1.0, 0.0, -DI / Focal ],
[ 0.0, -1.0, -DJ / Focal ],
[ 0.0, 0.0, -1 / Focal ]
],
[
[ R_00, R_01, _R_02 ],
[ R_10, R_11, _R_12 ],
[ R_20, R_21, _R_22 ]
] = R,
T = [
[ R_00, R_01, 0 ],
[ R_10, R_11, 0 ],
[ R_20, R_21, - Focal ]
],
P0 = multiply_matrix(T, B),
P = multiply_matrix(Aim, P0),
% Project the corners of the image to the new coordinates.
{UpLeft_X, UpLeft_Y} = forward_project(0, 0, P),
{UpRight_X, UpRight_Y} = forward_project(Width-1, 0, P),
{BottomRight_X, BottomRight_Y} = forward_project(Width-1, Height-1, P),
{BottomLeft_X, BottomLeft_Y} = forward_project(0, Height-1, P),
% Build the ImageMagick command argument
D = "-distort Perspective \"" ++ lists:flatten([
io_lib:format("~p,~p ~p,~p", [
0,0, UpLeft_X,UpLeft_Y
]),
" ",
io_lib:format("~p,~p ~p,~p", [
Width-1,0, UpRight_X,UpRight_Y
]),
" ",
io_lib:format("~p,~p ~p,~p", [
Width-1,Height-1, BottomRight_X,BottomRight_Y
]),
" ",
io_lib:format("~p,~p ~p,~p", [
0,Height-1, BottomLeft_X,BottomLeft_Y
]),
"\""
]),
{ok, D}.
% Transformation matrices for pan, tilt and angle.
pan(Pan) when Pan >= -180, Pan =< 180->
PanRad = deg2rad(Pan),
SinPan = math:sin(PanRad),
CosPan = math:cos(PanRad),
[
[ CosPan, 0, SinPan ],
[ 0, 1, 0 ],
[ -SinPan, 0, CosPan ]
].
tilt(Tilt) when Tilt >= -180, Tilt =< 180->
TiltRad = deg2rad(Tilt),
SinTilt = math:sin(TiltRad),
CosTilt = math:cos(TiltRad),
[
[ 1, 0, 0 ],
[ 0, CosTilt, SinTilt ],
[ 0, -SinTilt, CosTilt ]
].
roll(Roll) when Roll >= -180, Roll =< 180->
RollRad = deg2rad(Roll),
SinRoll = math:sin(RollRad),
CosRoll = math:cos(RollRad),
[
[ CosRoll, SinRoll, 0 ],
[ -SinRoll, CosRoll, 0 ],
[ 0, 0, 1 ]
].
deg2rad(Angle) ->
math:pi() * Angle / 180.0.
% Project a point from the input image to the output image.
forward_project(X, Y, M) ->
[
[ P_00, P_01, P_02 ],
[ P_10, P_11, P_12 ],
[ P_20, P_21, P_22 ]
] = M,
NumU = (P_00 * X) + (P_01 * Y) + P_02,
NumV = (P_10 * X) + (P_11 * Y) + P_12,
Den = (P_20 * X) + (P_21 * Y) + P_22,
{trunc(NumU / Den), trunc(NumV / Den)}.
%%%%%%%%%% Matrix functions %%%%%%%%%%
% From: http://zxq9.com/archives/1387
%%% A naive matrix generation, rotation and multiplication module.
%%% It doesn't concern itself with much checking, so input dimensions must be known
%%% prior to calling any of these functions lest you receive some weird results back,
%%% as most of these functions do not crash on input that go against the rules of
%%% matrix multiplication.
%%%
%%% All functions crash on obviously bad values.
-type matrix() :: [[number()]].
%% @doc
%% Takes a matrix of {X, Y} size and rotates it left, returning a matrix of {Y, X} size.
-spec rotate_matrix(matrix()) -> matrix().
rotate_matrix(Matrix) ->
rotate_matrix(Matrix, [], [], []).
%% @private
%% Iterates doubly over a matrix, packing the diminished remainder into Rem and
%% packing the current row into Current. This is naive, in that it assumes an
%% even matrix of dimentions {X, Y}, and will return one of dimentions {Y, X}
%% based on the length of the first row, regardless whether the input was actually
%% even.
-spec rotate_matrix(Matrix, Rem, Current, Acc) -> Rotated
when Matrix :: matrix(),
Rem :: [[number()]],
Current :: [number()],
Acc :: matrix(),
Rotated :: matrix().
rotate_matrix([[] | _], [], [], Acc) ->
Acc;
rotate_matrix([], Rem, Current, Acc) ->
NewRem = lists:reverse(Rem),
NewCurrent = lists:reverse(Current),
rotate_matrix(NewRem, [], [], [NewCurrent | Acc]);
rotate_matrix([[V | Vs] | Rows], Rem, Current, Acc) ->
rotate_matrix(Rows, [Vs | Rem], [V | Current], Acc).
%% @doc
%% Multiply two matrices together according to the matrix multiplication rules.
%% This function does not check that the inputs are actually proper (regular)
%% matrices, but does check that the input row/column lengths are compatible.
-spec multiply_matrix(A, B) -> Product
when A :: matrix(),
B :: matrix(),
Product :: matrix().
multiply_matrix(A = [R | _], B) when length(R) == length(B) ->
multiply_matrix(A, rotate_matrix(B), []).
%% @private
%% Iterate a row multiplication operation of each row of A over matrix B until
%% A is exhausted.
-spec multiply_matrix(A, B, Acc) -> Product
when A :: matrix(),
B :: matrix(),
Acc :: matrix(),
Product :: matrix().
multiply_matrix([A | As], B, Acc) ->
Prod = multiply_row(A, B, []),
multiply_matrix(As, B, [Prod | Acc]);
multiply_matrix([], _, Acc) ->
lists:reverse(Acc).
%% @private
%% Multiply each row of matrix B by the input Row, returning the list of resulting sums.
-spec multiply_row(Row, B, Acc) -> Product
when Row :: [number()],
B :: matrix(),
Acc :: [number()],
Product :: [number()].
multiply_row(Row, [B | Bs], Acc) ->
ZipProd = lists:zipwith(fun(X, Y) -> X * Y end, Row, B),
Sum = lists:sum(ZipProd),
multiply_row(Row, Bs, [Sum | Acc]);
multiply_row(_, [], Acc) ->
Acc. | apps/zotonic_core/src/support/z_media_rotate3d.erl | 0.704668 | 0.672587 | z_media_rotate3d.erl | starcoder |
%%% @doc
%%% An implementation of the simple grant language: https://evernym.github.io/sgl/.
%%%
%%% Given a rule and a list of prinicipals it will determine if the prinicpals
%%% meet the requirements of the rule.
%%% @end
-module(sgl).
-export([evaluate/2]).
-type id_condition() :: {id, Id :: binary()}.
-type role_condition() ::
{role, Role :: binary()} | {role, N :: pos_integer(), Role :: binary()}.
-type all_condition() :: {
allcond,
[id_condition() | role_condition() | any_condition() | all_condition()]
}.
-type any_condition() ::
{
anycond,
[id_condition() | role_condition() | any_condition() | all_condition()]
}
| {
anycond,
N :: pos_integer(),
[id_condition() | role_condition() | any_condition() | all_condition()]
}.
-type rule() :: {
{grant, [Permission :: binary()]},
{condition, id_condition() | role_condition() | any_condition() | all_condition()}
}.
-type principal() :: {principal, {id, Id :: binary()}, {roles, [binary()]}}.
%% @doc Check the a given set of principals meet the requirements of the given grant rule.
-spec evaluate(Rule :: rule(), Principals :: [principal()]) ->
{ok, PrinicipalIds :: [Id :: binary()]} | {error, PrinicipalIds :: [Id :: binary()]}.
evaluate({{grant, _}, {condition, Conditions}}, Principals) ->
{Result, Used, _} = check(Conditions, {[], Principals, []}),
%% Filter down to just the principal IDs
Ids = lists:foldl(
fun({principal, {id, Id}, _}, Acc) ->
[Id | Acc]
end,
[],
Used
),
{Result, Ids}.
%%%
%%% Private stuff below
%%%
%% Check an Id condition
check({id, Value}, Input) ->
do_check_id(Value, Input);
%% Check role match with default of 1 required
check({role, Value}, Input) ->
check({role, 1, Value}, Input);
%% Check role match with N required
check({role, N, Value}, Input) ->
do_check_roles(Value, N, Input);
%% Check 'any' match with default of 1
check({anycond, Rules}, Input) ->
check({anycond, 1, Rules}, Input);
%% Check 'any' match with N required
check({anycond, N, Rules}, Input) ->
do_check_any(Rules, N, Input);
%% Check 'all' rule
check({allcond, Rules}, Input) ->
do_check_all(Rules, Input).
%% Id check logic
do_check_id(_, {Used, [], Unused}) ->
%% Exhausted the list. No matches
%%Principals = lists:flatten([Used, Unused]),
Principals = Used ++ Unused,
{error, [], Principals};
%% Ids match
do_check_id(Id, {Used, [{principal, {id, Id}, _} = H | Tail], Unused}) ->
{ok, [H | Used], Tail ++ Unused};
do_check_id(Id, {Used, [H | T], Unused}) ->
% No match so far. Add the current principal (H) to unused and continue checking
do_check_id(Id, {Used, T, [H | Unused]}).
%% Role check logic
%% We hit the 'count' of required roles.
do_check_roles(_, 0, {Used, Rest, Unused}) ->
{ok, Used, Rest ++ Unused};
%% No matches
do_check_roles(_, _, {Used, [], Unused}) ->
Principals = Used ++ Unused,
{error, [], Principals};
do_check_roles(
Role,
RequiredCount,
{Used, [{principal, _, {roles, Roles}} = H | Tail], Unused}
) ->
case lists:any(fun(E) -> E =:= Role end, Roles) of
true ->
%% Found a role match. Add the ID to Used and decrement the count
do_check_roles(Role, RequiredCount - 1, {[H | Used], Tail, Unused});
_ ->
%% Not found (so far), keep going
do_check_roles(Role, RequiredCount, {Used, Tail, [H | Unused]})
end.
%% All check logic
do_check_all([], {[], _, Unused}) ->
%% Hit the end of rules and found no matches
{error, [], Unused};
do_check_all([], {Used, _, Unused}) ->
%% Hit the end of rules, no more to check, and didn't fail along the way. We're good!
{ok, Used, Unused};
do_check_all([Rule | Rest], {Used, Remaining, Unused}) ->
case check(Rule, {Used, Remaining, Unused}) of
{ok, Used1, UnusedPrincipals} ->
%% Found a match with one of the conditions
do_check_all(Rest, {Used1, UnusedPrincipals, Unused});
{error, U, P} ->
%% Return on the first fail
{error, U, P}
end.
%% Any check logic
do_check_any([], N, {Used, _, Unused}) when N > 0 ->
%% We exhausted all the rules and didn't meet the requirements
{error, Used, Unused};
do_check_any(_, 0, {Used, _, Unused}) ->
%% We hit the required amount
{ok, Used, Unused};
do_check_any([Rule | Rest], N, {Used, Remaining, Unused}) ->
case check(Rule, {Used, Remaining, Unused}) of
{ok, Used1, UnusedPrincipals} ->
%% Found a match with one of the conditions. Decrement and continue
do_check_any(Rest, N - 1, {Used1, UnusedPrincipals, Unused});
{error, Used1, UnusedPrincipals} ->
%% We hit and error, but continue as we're looking for 'any' matches
do_check_any(Rest, N, {Used1, UnusedPrincipals, Unused})
end. | src/sgl.erl | 0.5 | 0.4436 | sgl.erl | starcoder |
%% This is a simple implementation of the project, using one centralized server.
%%
%% It will create one "server" actor that contains all internal state (users and
%% their subscriptions, channels and their messages, and logged in users).
%%
%% This implementation is provided with unit tests. However, these tests are
%% neither complete nor implementation independent. Thus, be careful when
%% reusing them.
-module(server_centralized).
-include_lib("eunit/include/eunit.hrl").
-export([initialize/0, initialize_with/3, server_actor/3, typical_session_1/1,
typical_session_2/1]).
%%
%% Additional API Functions
%%
% Start server.
initialize() ->
initialize_with(dict:new(), dict:new(), dict:new()).
% Start server with an initial state.
% Useful for benchmarking.
initialize_with(Users, LoggedIn, Channels) ->
ServerPid = spawn_link(?MODULE, server_actor, [Users, LoggedIn, Channels]),
catch unregister(server_actor),
register(server_actor, ServerPid),
ServerPid.
% The server actor works like a small database and encapsulates all state of
% this simple implementation.
%
% * Users is a dictionary of user names to tuples of the form:
% {user, Name, Subscriptions}
% where Subscriptions is a set of channels that the user joined.
% * LoggedIn is a dictionary of the names of logged in users and their pid.
% * Channels is a dictionary of channel names to tuples:
% {channel, Name, Messages}
% where Messages is a list of messages, of the form:
% {message, UserName, ChannelName, MessageText, SendTime}
server_actor(Users, LoggedIn, Channels) ->
receive
{Sender, register_user, UserName} ->
NewUsers = dict:store(UserName, {user, UserName, sets:new()}, Users),
Sender ! {self(), user_registered},
server_actor(NewUsers, LoggedIn, Channels);
{Sender, log_in, UserName} ->
NewLoggedIn = dict:store(UserName, Sender, LoggedIn),
Sender ! {self(), logged_in},
server_actor(Users, NewLoggedIn, Channels);
{Sender, log_out, UserName} ->
NewLoggedIn = dict:erase(UserName, LoggedIn),
Sender ! {self(), logged_out},
server_actor(Users, NewLoggedIn, Channels);
{Sender, join_channel, UserName, ChannelName} ->
User = dict:fetch(UserName, Users), % assumes user exists
NewUser = join_channel(User, ChannelName),
NewUsers = dict:store(UserName, NewUser, Users),
Sender ! {self(), channel_joined},
server_actor(NewUsers, LoggedIn, Channels);
{Sender, send_message, UserName, ChannelName, MessageText, SendTime} ->
Message = {message, UserName, ChannelName, MessageText, SendTime},
% 1. Store message in its channel
NewChannels = store_message(Message, Channels),
% 2. Send logged in users the message, if they joined this channel
broadcast_message_to_members(Users, LoggedIn, Message),
Sender ! {self(), message_sent},
server_actor(Users, LoggedIn, NewChannels);
{Sender, get_channel_history, ChannelName} ->
{channel, ChannelName, Messages} = find_or_create_channel(ChannelName, Channels),
Sender ! {self(), channel_history, Messages},
server_actor(Users, LoggedIn, Channels)
end.
%%
%% Internal Functions
%%
% Find channel, or create new channel
find_or_create_channel(ChannelName, Channels) ->
case dict:find(ChannelName, Channels) of
{ok, Channel} -> Channel;
error -> {channel, ChannelName, []}
end.
% Modify `User` to join `ChannelName`.
join_channel({user, Name, Subscriptions}, ChannelName) ->
{user, Name, sets:add_element(ChannelName, Subscriptions)}.
% Modify `Channels` to store `Message`.
store_message(Message, Channels) ->
{message, _UserName, ChannelName, _MessageText, _SendTime} = Message,
{channel, ChannelName, Messages} = find_or_create_channel(ChannelName, Channels),
dict:store(ChannelName, {channel, ChannelName, Messages ++ [Message]}, Channels).
% Broadcast `Message` to `Users` if they joined the channel and are logged in.
% (But don't send it to the sender.)
broadcast_message_to_members(Users, LoggedIn, Message) ->
{message, SenderName, ChannelName, _MessageText, _SendTime} = Message,
% For each LoggedIn user, fetch his subscriptions and check whether those
% contain the channel
Subscribed = fun(UserName, _) ->
{user, _, Subscriptions} = dict:fetch(UserName, Users),
IsMember = sets:is_element(ChannelName, Subscriptions),
IsMember and (UserName /= SenderName)
end,
LoggedInAndSubscribed = dict:filter(Subscribed, LoggedIn),
% Send messages
dict:map(fun(_, UserPid) ->
UserPid ! {self(), new_message, Message}
end, LoggedInAndSubscribed),
ok.
%%
%% Tests
%%
% These tests are for this specific implementation. They are a partial
% definition of the semantics of the provided interface but also make certain
% assumptions of the implementation. You can re-use them, but you might need to
% modify them.
initialize_test() ->
catch unregister(server_actor),
initialize().
register_user_test() ->
initialize_test(),
?assertMatch({_, user_registered}, server:register_user(server_actor, "A")),
?assertMatch({_, user_registered}, server:register_user(server_actor, "B")),
?assertMatch({_, user_registered}, server:register_user(server_actor, "C")),
?assertMatch({_, user_registered}, server:register_user(server_actor, "D")),
["A", "B", "C", "D"].
log_in_test() ->
[UserName1, UserName2 | _] = register_user_test(),
?assertMatch({_Server1, logged_in}, server:log_in(server_actor, UserName1)),
?assertMatch({_Server2, logged_in}, server:log_in(server_actor, UserName2)).
% Note: returned pids _Server1 and _Server2 do not necessarily need to be
% the same.
log_out_test() ->
[UserName1, UserName2 | _] = register_user_test(),
{Server1, logged_in} = server:log_in(server_actor, UserName1),
{Server2, logged_in} = server:log_in(server_actor, UserName2),
?assertMatch(logged_out, server:log_out(Server1, UserName1)),
?assertMatch(logged_out, server:log_out(Server2, UserName2)).
join_channel_test() ->
[UserName1 | _] = register_user_test(),
{Server1, logged_in} = server:log_in(server_actor, UserName1),
?assertMatch(channel_joined,
server:join_channel(Server1, UserName1, "Channel1")),
?assertMatch(channel_joined,
server:join_channel(Server1, UserName1, "Channel2")),
{UserName1, Server1, "Channel1", "Channel2"}.
send_message_test() ->
{UserName1, Server1, Channel1, _Channel2} = join_channel_test(),
?assertMatch(message_sent,
server:send_message(Server1, UserName1, Channel1, "Hello!")),
?assertMatch(message_sent,
server:send_message(Server1, UserName1, Channel1, "How are you?")).
channel_history_test() ->
% Create users, log in, join channels.
[UserName1, UserName2 | _] = register_user_test(),
{Server1, logged_in} = server:log_in(server_actor, UserName1),
{Server2, logged_in} = server:log_in(server_actor, UserName2),
Channel1 = "Channel1",
server:join_channel(Server1, UserName1, Channel1),
server:join_channel(Server2, UserName2, Channel1),
% Send some messages
server:send_message(Server1, UserName1, Channel1, "Hello!"),
server:send_message(Server2, UserName2, Channel1, "Hi!"),
server:send_message(Server1, UserName1, Channel1, "How are you?"),
% Check history
[{message, UserName1, Channel1, "Hello!", Time1},
{message, UserName2, Channel1, "Hi!", Time2},
{message, UserName1, Channel1, "How are you?", Time3}] =
server:get_channel_history(Server1, Channel1),
?assert(Time1 =< Time2),
?assert(Time2 =< Time3).
typical_session_test() ->
initialize_test(),
Session1 = spawn_link(?MODULE, typical_session_1, [self()]),
Session2 = spawn_link(?MODULE, typical_session_2, [self()]),
receive
{Session1, ok} ->
receive
{Session2, ok} ->
done
end
end.
typical_session_1(TesterPid) ->
{_, user_registered} = server:register_user(server_actor, "Jennifer"),
{Server, logged_in} = server:log_in(server_actor, "Jennifer"),
channel_joined = server:join_channel(Server, "Jennifer", "multicore"),
message_sent = server:send_message(Server, "Jennifer", "multicore", "Hello!"),
% Wait for reply
Time2 = receive
{_, new_message, Message} ->
?assertMatch({message, "Janwillem", "multicore", "Hi!", _}, Message),
{message, _, _, _, Time} = Message,
Time
end,
% Respond
message_sent = server:send_message(Server, "Jennifer", "multicore", "How are you?"),
% Check history
[{message, "Jennifer", "multicore", "Hello!", Time1},
{message, "Janwillem", "multicore", "Hi!", Time2},
{message, "Jennifer", "multicore", "How are you?", Time3}] =
server:get_channel_history(Server, "multicore"),
?assert(Time1 =< Time2),
?assert(Time2 =< Time3),
TesterPid ! {self(), ok}.
typical_session_2(TesterPid) ->
{_, user_registered} = server:register_user(server_actor, "Janwillem"),
{Server, logged_in} = server:log_in(server_actor, "Janwillem"),
channel_joined = server:join_channel(Server, "Janwillem", "multicore"),
% Wait for first message
Time1 = receive
{_, new_message, Message1} ->
?assertMatch({message, "Jennifer", "multicore", "Hello!", _}, Message1),
{message, _, _, _, Time} = Message1,
Time
end,
% Reply
message_sent = server:send_message(Server, "Janwillem", "multicore", "Hi!"),
% Wait for response
Time3 = receive
{_, new_message, Message3} ->
?assertMatch({message, "Jennifer", "multicore", "How are you?", _}, Message3),
{message, _, _, _, Time_} = Message3,
Time_
end,
% Check history
[{message, "Jennifer", "multicore", "Hello!", Time1},
{message, "Janwillem", "multicore", "Hi!", Time2},
{message, "Jennifer", "multicore", "How are you?", Time3}] =
server:get_channel_history(Server, "multicore"),
?assert(Time1 =< Time2),
?assert(Time2 =< Time3),
TesterPid ! {self(), ok}. | centralized/server_centralized.erl | 0.555918 | 0.422147 | server_centralized.erl | starcoder |
% @copyright 2011 Zuse Institute Berlin
% Licensed under the Apache License, Version 2.0 (the "License");
% you may not use this file except in compliance with the License.
% You may obtain a copy of the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS,
% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
% See the License for the specific language governing permissions and
% limitations under the License.
%% @author <NAME> <<EMAIL>>
%% @doc Test suite for the math_pos module.
%% @end
%% @version $Id$
-module(math_pos_SUITE).
-author('<EMAIL>').
-vsn('$Id$ ').
-compile(export_all).
-include("unittest.hrl").
-include("scalaris.hrl").
all() ->
[plus, minus, multiply, divide,
tester_make_same_length,
tester_plus_symm, tester_plus_valid,
tester_minus, tester_minus_valid,
prop_divide_valid, prop_multiply_valid].
suite() ->
[
{timetrap, {seconds, 20}}
].
init_per_suite(Config) ->
unittest_helper:init_per_suite(Config).
end_per_suite(Config) ->
_ = unittest_helper:end_per_suite(Config),
ok.
-spec plus(Config::[tuple()]) -> ok.
plus(_Config) ->
?equals(math_pos:plus( [1], [1], 10), [2]),
?equals(math_pos:plus( [1], [2], 10), [3]),
?equals(math_pos:plus( [1], [9], 10), [0]),
?equals(math_pos:plus([0,1], [0,9], 10), [1,0]),
?equals(math_pos:plus( [2], [1], 10), [3]),
?equals(math_pos:plus( [9], [1], 10), [0]),
?equals(math_pos:plus([0,9], [0,1], 10), [1,0]).
-spec minus(Config::[tuple()]) -> ok.
minus(_Config) ->
?equals(math_pos:minus( [1], [1], 10), [0]),
?equals(math_pos:minus( [1], [2], 10), [9]),
?equals(math_pos:minus( [1], [9], 10), [2]),
?equals(math_pos:minus([0,1], [0,9], 10), [9,2]),
?equals(math_pos:minus( [2], [1], 10), [1]),
?equals(math_pos:minus( [9], [1], 10), [8]),
?equals(math_pos:minus([0,9], [0,1], 10), [0,8]).
-spec multiply(Config::[tuple()]) -> ok.
multiply(_Config) ->
?equals(math_pos:multiply( [1], 2, 10), [2]),
?equals(math_pos:multiply( [2], 2, 10), [4]),
?equals(math_pos:multiply( [3], 2, 10), [6]),
?equals(math_pos:multiply( [4], 2, 10), [8]),
?equals(math_pos:multiply( [5], 2, 10), [0]),
?equals(math_pos:multiply( [6], 2, 10), [2]),
?equals(math_pos:multiply( [7], 2, 10), [4]),
?equals(math_pos:multiply([0,1], 2, 10), [0,2]),
?equals(math_pos:multiply([0,2], 2, 10), [0,4]),
?equals(math_pos:multiply([0,3], 2, 10), [0,6]),
?equals(math_pos:multiply([0,4], 2, 10), [0,8]),
?equals(math_pos:multiply([0,5], 2, 10), [1,0]),
?equals(math_pos:multiply([0,6], 2, 10), [1,2]),
?equals(math_pos:multiply([0,7], 2, 10), [1,4]),
?equals(math_pos:multiply( [1], 3, 10), [3]),
?equals(math_pos:multiply( [2], 3, 10), [6]),
?equals(math_pos:multiply( [3], 3, 10), [9]),
?equals(math_pos:multiply( [4], 3, 10), [2]),
?equals(math_pos:multiply( [5], 3, 10), [5]),
?equals(math_pos:multiply( [6], 3, 10), [8]),
?equals(math_pos:multiply( [7], 3, 10), [1]),
?equals(math_pos:multiply([0,1], 3, 10), [0,3]),
?equals(math_pos:multiply([0,2], 3, 10), [0,6]),
?equals(math_pos:multiply([0,3], 3, 10), [0,9]),
?equals(math_pos:multiply([0,4], 3, 10), [1,2]),
?equals(math_pos:multiply([0,5], 3, 10), [1,5]),
?equals(math_pos:multiply([0,6], 3, 10), [1,8]),
?equals(math_pos:multiply([0,7], 3, 10), [2,1]).
-spec divide(Config::[tuple()]) -> ok.
divide(_Config) ->
?equals(math_pos:divide( [1], 2, 10), [0]),
?equals(math_pos:divide( [2], 2, 10), [1]),
?equals(math_pos:divide( [3], 2, 10), [1]),
?equals(math_pos:divide( [4], 2, 10), [2]),
?equals(math_pos:divide([1,0], 2, 10), [0,5]),
?equals(math_pos:divide([1,1], 2, 10), [0,5]),
?equals(math_pos:divide([1,2], 2, 10), [0,6]),
?equals(math_pos:divide([1,3], 2, 10), [0,6]),
?equals(math_pos:divide([1,4], 2, 10), [0,7]),
?equals(math_pos:divide([2,0], 2, 10), [1,0]),
?equals(math_pos:divide([2,1], 2, 10), [1,0]),
?equals(math_pos:divide([2,2], 2, 10), [1,1]),
?equals(math_pos:divide([2,3], 2, 10), [1,1]),
?equals(math_pos:divide([2,4], 2, 10), [1,2]),
?equals(math_pos:divide( [1], 3, 10), [0]),
?equals(math_pos:divide( [2], 3, 10), [0]),
?equals(math_pos:divide( [3], 3, 10), [1]),
?equals(math_pos:divide( [4], 3, 10), [1]),
?equals(math_pos:divide([1,0], 3, 10), [0,3]),
?equals(math_pos:divide([1,1], 3, 10), [0,3]),
?equals(math_pos:divide([1,2], 3, 10), [0,4]),
?equals(math_pos:divide([1,3], 3, 10), [0,4]),
?equals(math_pos:divide([1,4], 3, 10), [0,4]),
?equals(math_pos:divide([2,0], 3, 10), [0,6]),
?equals(math_pos:divide([2,1], 3, 10), [0,7]),
?equals(math_pos:divide([3,0], 3, 10), [1,0]),
?equals(math_pos:divide([3,1], 3, 10), [1,0]),
?equals(math_pos:divide([3,2], 3, 10), [1,0]),
?equals(math_pos:divide([3,3], 3, 10), [1,1]),
?equals(math_pos:divide([3,4], 3, 10), [1,1]),
?equals(math_pos:divide([3,5], 3, 10), [1,1]),
?equals(math_pos:divide([3,6], 3, 10), [1,2]).
%% make_same_length
-spec prop_make_same_length(A::string(), B::string(), front | back) -> true.
prop_make_same_length(A, B, Pos) ->
{A1, B1, A1Added, B1Added} = math_pos:make_same_length(A, B, Pos),
?equals(erlang:length(A1), erlang:length(B1)),
?equals(math_pos:remove_zeros(A1, Pos, all), math_pos:remove_zeros(A, Pos, all)),
?equals(math_pos:remove_zeros(B1, Pos, all), math_pos:remove_zeros(B, Pos, all)),
?equals(math_pos:remove_zeros(A1, Pos, A1Added), A),
?equals(math_pos:remove_zeros(B1, Pos, B1Added), B),
true.
-spec tester_make_same_length(Config::[tuple()]) -> ok.
tester_make_same_length(_Config) ->
tester:test(?MODULE, prop_make_same_length, 3, 10000).
%% plus
-spec prop_plus_valid_base(X, X, Pos::front | back, Base::pos_integer()) -> true when is_subtype(X, list(non_neg_integer())).
prop_plus_valid_base(A_, B_, Pos, Base) ->
{A, B, _, _} = math_pos:make_same_length(A_, B_, Pos),
A_plus_B = math_pos:plus(A, B, Base),
?equals(erlang:length(A_plus_B), erlang:length(A)),
case lists:all(fun(E) -> E >= 0 andalso E < Base end, A_plus_B) of
true -> true;
_ -> ?ct_fail("math_pos:plus(A, B, ~B) evaluated to \"~.0p\" and "
"contains invalid elements~n",
[Base, A_plus_B])
end.
-spec prop_plus_valid1(A::[0..9], B::[0..9]) -> true.
prop_plus_valid1(A_, B_) -> prop_plus_valid_base(A_, B_, front, 10).
-spec prop_plus_valid2(A::string(), B::string()) -> true.
prop_plus_valid2(A_, B_) -> prop_plus_valid_base(A_, B_, back, 16#10ffff + 1).
-spec prop_plus_valid3(A::nonempty_string(), B::nonempty_string()) -> true.
prop_plus_valid3(A_, B_) ->
Base = erlang:max(lists:max(A_), lists:max(B_)) + 1,
prop_plus_valid_base(A_, B_, back, Base).
-spec tester_plus_valid(Config::[tuple()]) -> ok.
tester_plus_valid(_Config) ->
tester:test(?MODULE, prop_plus_valid1, 2, 10000),
tester:test(?MODULE, prop_plus_valid2, 2, 10000),
tester:test(?MODULE, prop_plus_valid3, 2, 10000).
-spec prop_plus_symm_base(X, X, Pos::front | back, Base::pos_integer()) -> true when is_subtype(X, list(non_neg_integer())).
prop_plus_symm_base(A_, B_, Pos, Base) ->
{A, B, _, _} = math_pos:make_same_length(A_, B_, Pos),
?equals(math_pos:plus(A, B, Base), math_pos:plus(B, A, Base)),
true.
-spec prop_plus_symm1(A::[0..9], B::[0..9]) -> true.
prop_plus_symm1(A_, B_) -> prop_plus_symm_base(A_, B_, front, 10).
-spec prop_plus_symm2(A::string(), B::string()) -> true.
prop_plus_symm2(A_, B_) -> prop_plus_symm_base(A_, B_, back, 16#10ffff + 1).
-spec prop_plus_symm3(A::nonempty_string(), B::nonempty_string()) -> true.
prop_plus_symm3(A_, B_) ->
Base = erlang:max(lists:max(A_), lists:max(B_)) + 1,
prop_plus_symm_base(A_, B_, back, Base).
-spec tester_plus_symm(Config::[tuple()]) -> ok.
tester_plus_symm(_Config) ->
tester:test(?MODULE, prop_plus_symm1, 2, 10000),
tester:test(?MODULE, prop_plus_symm2, 2, 10000),
tester:test(?MODULE, prop_plus_symm3, 2, 10000).
%% minus
-spec prop_minus_valid_base(X, X, Pos::front | back, Base::pos_integer()) -> true when is_subtype(X, list(non_neg_integer())).
prop_minus_valid_base(A_, B_, Pos, Base) ->
{A, B, _, _} = math_pos:make_same_length(A_, B_, Pos),
A_minus_B = math_pos:minus(A, B, Base),
?equals(erlang:length(A_minus_B), erlang:length(A)),
case lists:all(fun(E) -> E >= 0 andalso E < Base end, A_minus_B) of
true -> true;
_ -> ?ct_fail("math_pos:minus(A, B, ~B) evaluated to \"~.0p\" and "
"contains invalid elements~n",
[Base, A_minus_B])
end.
-spec prop_minus_valid1(A::[0..9], B::[0..9]) -> true.
prop_minus_valid1(A_, B_) -> prop_minus_valid_base(A_, B_, front, 10).
-spec prop_minus_valid2(A::string(), B::string()) -> true.
prop_minus_valid2(A_, B_) -> prop_minus_valid_base(A_, B_, back, 16#10ffff + 1).
-spec prop_minus_valid3(A::nonempty_string(), B::nonempty_string()) -> true.
prop_minus_valid3(A_, B_) ->
Base = erlang:max(lists:max(A_), lists:max(B_)) + 1,
prop_minus_valid_base(A_, B_, back, Base).
-spec tester_minus_valid(Config::[tuple()]) -> ok.
tester_minus_valid(_Config) ->
tester:test(?MODULE, prop_minus_valid1, 2, 10000),
prop_minus_valid2([557056,0,0,0,0,1], [557055,1114111,1114111,1114111,1114111,1114111]),
tester:test(?MODULE, prop_minus_valid2, 2, 10000),
tester:test(?MODULE, prop_minus_valid3, 2, 10000).
-spec prop_minus_base(X, X, Pos::front | back, Base::pos_integer()) -> true when is_subtype(X, list(non_neg_integer())).
prop_minus_base(A_, B_, Pos, Base) ->
{A, B, _, _} = math_pos:make_same_length(A_, B_, Pos),
A_B = math_pos:minus(A, B, Base),
?equals(math_pos:plus(A_B, B, Base), A),
true.
-spec prop_minus1(A::[0..9], B::[0..9]) -> true.
prop_minus1(A_, B_) -> prop_minus_base(A_, B_, front, 10).
-spec prop_minus2(A::string(), B::string()) -> true.
prop_minus2(A_, B_) -> prop_minus_base(A_, B_, back, 16#10ffff + 1).
-spec prop_minus3(A::nonempty_string(), B::nonempty_string()) -> true.
prop_minus3(A_, B_) ->
Base = erlang:max(lists:max(A_), lists:max(B_)) + 1,
prop_minus_base(A_, B_, back, Base).
-spec tester_minus(Config::[tuple()]) -> ok.
tester_minus(_Config) ->
tester:test(?MODULE, prop_minus1, 2, 10000),
tester:test(?MODULE, prop_minus2, 2, 10000),
tester:test(?MODULE, prop_minus3, 2, 10000).
%% divide
-spec prop_divide_valid_base(X, Div::pos_integer(), Base::pos_integer()) -> true when is_subtype(X, list(non_neg_integer())).
prop_divide_valid_base(A, Div, Base) ->
A_div = math_pos:divide(A, Div, Base),
?equals(erlang:length(A_div), erlang:length(A)),
case lists:all(fun(E) -> E >= 0 andalso E < Base end, A_div) of
true -> true;
_ -> ?ct_fail("math_pos:divide(A, Div, ~B) evaluated to \"~.0p\" and "
"contains invalid elements~n",
[Base, A_div])
end.
-spec prop_divide_valid1(A::[0..9], Div::pos_integer()) -> true.
prop_divide_valid1(A, Div) -> prop_divide_valid_base(A, Div, 10).
-spec prop_divide_valid2(A::string(), Div::pos_integer()) -> true.
prop_divide_valid2(A, Div) -> prop_divide_valid_base(A, Div, 16#10ffff + 1).
-spec prop_divide_valid3(A::nonempty_string(), Div::pos_integer()) -> true.
prop_divide_valid3(A, Div) ->
Base = lists:max(A) + 1,
prop_divide_valid_base(A, Div, Base).
-spec prop_divide_valid(Config::[tuple()]) -> ok.
prop_divide_valid(_Config) ->
tester:test(?MODULE, prop_divide_valid1, 2, 10000),
tester:test(?MODULE, prop_divide_valid2, 2, 10000),
tester:test(?MODULE, prop_divide_valid3, 2, 10000).
%% multiply
-spec prop_multiply_valid_base(X, Fac::non_neg_integer(), Base::pos_integer()) -> true when is_subtype(X, list(non_neg_integer())).
prop_multiply_valid_base(A, Fac, Base) ->
A_prod = math_pos:multiply(A, Fac, Base),
?equals(erlang:length(A_prod), erlang:length(A)),
case lists:all(fun(E) -> E >= 0 andalso E < Base end, A_prod) of
true -> true;
_ -> ?ct_fail("math_pos:multiply(A, Div, ~B) evaluated to \"~.0p\" and "
"contains invalid elements~n",
[Base, A_prod])
end.
-spec prop_multiply_valid1(A::[0..9], Fac::0..9) -> true.
prop_multiply_valid1(A, Fac) -> prop_multiply_valid_base(A, Fac, 10).
-spec prop_multiply_valid2(A::string(), Fac::0..16#10ffff) -> true.
prop_multiply_valid2(A, Fac) -> prop_multiply_valid_base(A, Fac, 16#10ffff + 1).
-spec prop_multiply_valid3(A::nonempty_string(), Fac::non_neg_integer()) -> true.
prop_multiply_valid3(A, Fac) ->
A_max = lists:max(A), Base = A_max + 1,
prop_multiply_valid_base(A, erlang:min(Fac, A_max), Base).
-spec prop_multiply_valid(Config::[tuple()]) -> ok.
prop_multiply_valid(_Config) ->
tester:test(?MODULE, prop_multiply_valid1, 2, 10000),
tester:test(?MODULE, prop_multiply_valid2, 2, 10000),
tester:test(?MODULE, prop_multiply_valid3, 2, 10000). | test/math_pos_SUITE.erl | 0.666822 | 0.574723 | math_pos_SUITE.erl | starcoder |
%% Copyright (c) Facebook, Inc. and its affiliates.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(erlfmt_markdown_SUITE).
-include_lib("common_test/include/ct.hrl").
-include_lib("stdlib/include/assert.hrl").
%% Test server callbacks
-export([
suite/0,
all/0,
groups/0,
init_per_suite/1,
end_per_suite/1,
init_per_group/2,
end_per_group/2,
init_per_testcase/2,
end_per_testcase/2
]).
%% Test cases
-export([
markdown_files/1,
markdown_string/1
]).
suite() ->
[{timetrap, {seconds, 10}}].
init_per_suite(Config) ->
Config.
end_per_suite(_Config) ->
ok.
init_per_group(_GroupName, Config) ->
Config.
end_per_group(_GroupName, _Config) ->
ok.
init_per_testcase(_TestCase, Config) ->
Config.
end_per_testcase(_TestCase, _Config) ->
ok.
groups() ->
[
{markdown, [parallel], [
markdown_files,
markdown_string
]}
].
all() ->
[{group, markdown}].
markdown_files(Config) when is_list(Config) ->
{ok, Data} = file:get_cwd(),
RepoRootPath = filename:join([Data, "..", "..", "..", ".."]),
DocPath = filename:join(RepoRootPath, "doc"),
Filenames = lists:append(
find_markdown_filenames_in(RepoRootPath),
find_markdown_filenames_in(DocPath)
),
?assertMatch([_One, _Two, _Three | _AtLeast], Filenames),
lists:map(
fun (Filename) ->
{ok, Content} = file:read_file(Filename),
check_markdown(binary_to_list(Content))
end,
Filenames
).
find_markdown_filenames_in(Path) ->
{ok, BaseFilenames} = file:list_dir_all(Path),
Filenames =
lists:map(fun (Filename) -> filename:join([Path, Filename]) end, BaseFilenames),
lists:filter(fun (Filename) -> filename:extension(Filename) == ".md" end, Filenames).
markdown_string(Config) when is_list(Config) ->
S =
"# Heading\n"
"ignore for now\n"
"```erlang formatted key\n"
"hello(mike, joe, robert)\n"
"```\n"
"ignore for now\n"
"```erlang unformatted\n"
"goodbye(mike, joe, robert)\n"
"```\n"
"The previous unformatted version, should match this formatted version\n"
"```erlang formatted\n"
"goodbye(mike, joe, robert)\n"
"```\n"
"This unformatted version, should match the first formatted version with the same key\n"
"```erlang unformatted key\n"
"hello(mike, joe, robert)\n"
"```\n",
check_markdown(S).
check_markdown(Content) ->
Sections = string:split(Content, "```", all),
{_, Formatted, Unformatted} = lists:foldl(
fun split_code_into_maps/2,
{text, maps:new(), maps:new()},
Sections
),
maps:map(
fun (Key, FormattedCode) ->
check_fmt(FormattedCode, FormattedCode),
case maps:find(Key, Unformatted) of
error ->
ignore;
{ok, UnformattedCode} ->
check_fmt(UnformattedCode, FormattedCode)
end
end,
Formatted
),
% check that there are no unformatted pairs without formatted friends
?assertEqual(#{}, maps:without(maps:keys(Formatted), Unformatted)).
split_code_into_maps(_Text, {text, Formatted, Unformatted}) ->
{code, Formatted, Unformatted};
split_code_into_maps(Text, {code, Formatted, Unformatted}) ->
[FirstLine, Code] = string:split(Text, "\n"),
Spec = string:split(FirstLine, " ", all),
case Spec of
["erl" ++ _, ToFormat | Key] ->
case ToFormat of
"formatted" ->
{text, maps:put(Key, Code, Formatted), Unformatted};
"unformatted" ->
{text, Formatted, maps:put(Key, Code, Unformatted)}
end;
_ ->
{text, Formatted, Unformatted}
end.
check_fmt(Unformatted, Expected) ->
NewlyFormatted = format(Unformatted, 80),
?assertEqual(string:trim(NewlyFormatted), string:trim(Expected)).
format(String, PageWidth) ->
Doc = format_doc(String),
Rendered = erlfmt_algebra:document_render(Doc, [{page_width, PageWidth}]),
unicode:characters_to_list(Rendered).
format_doc(String) ->
{ok, [Node], []} = erlfmt:read_nodes_string("nofile", String),
erlfmt_format:to_algebra(Node). | test/erlfmt_markdown_SUITE.erl | 0.606848 | 0.746301 | erlfmt_markdown_SUITE.erl | starcoder |
%% @author <NAME> <<EMAIL>>
%% @copyright 2018 <NAME>
%% @doc Ensure a mixed rendering becomes a valid iolist.
%% Copyright 2018 <NAME>
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(z_output_html).
-export([
output/2
]).
-include_lib("zotonic_core/include/zotonic.hrl").
%% @doc Replace non-iolist parts in the output tree.
-spec output( term(), z:context() ) -> {iolist(), z:context()}.
output(MixedHtml, Context) ->
{MixedHtml1, Context1} = z_notifier:foldl(
#output_html{ html = MixedHtml },
{MixedHtml, Context},
Context),
output1(MixedHtml1, Context1, []).
%% @doc Recursively walk through the output, replacing all non iolist data.
output1(undefined, Context, Acc) ->
{lists:reverse(Acc), Context};
output1(<<>>, Context, Acc) ->
{lists:reverse(Acc), Context};
output1([], Context, Acc) ->
{lists:reverse(Acc), Context};
output1(B, Context, Acc) when is_binary(B) ->
{[ lists:reverse(Acc), B ], Context};
output1([List|Rest], Context, Acc) when is_list(List) ->
{Rendered, Context1} = output1(List, Context, []),
output1(Rest, Context1, [ Rendered | Acc ]);
output1([ undefined | Rest], Context, Acc) ->
output1(Rest, Context, Acc);
output1([ C |Rest ], Context, Acc) when is_atom(C) ->
output1(Rest, Context, [ atom_to_binary(C, utf8) | Acc ]);
output1([ {trans, _} = Trans | Rest ], Context, Acc) ->
output1(Rest, Context, [ z_trans:lookup_fallback(Trans, Context) | Acc ]);
output1([ {{_,_,_},{_,_,_}} = D | Rest ], Context, Acc) ->
output1([filter_date:date(D, "Y-m-d H:i:s", Context)|Rest], Context, Acc);
output1([ T | Rest ], Context, Acc) when is_tuple(T) ->
output1([iolist_to_binary(io_lib:format("~p", [T]))|Rest], Context, Acc);
output1([ C | Rest ], Context, Acc) when is_integer(C), C >= 0, C =< 255->
output1(Rest, Context, [ C | Acc ]);
output1([ C | Rest ], Context, Acc) when is_integer(C), C >= 0 ->
output1(Rest, Context, [ <<C/utf8>> | Acc ]);
output1([ C | Rest ], Context, Acc) ->
output1(Rest, Context, [ z_convert:to_binary(C) | Acc ]). | apps/zotonic_core/src/support/z_output_html.erl | 0.524395 | 0.419886 | z_output_html.erl | starcoder |
%% -*- erlang -*-
%%
%% Basic combinatorics for Erlang lists and maps.
%%
%% Copyright 2016-2017 <NAME>
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%% @author <NAME> <<EMAIL>>
%% @version 0.1.5
%% @copyright 2016-2017 <NAME>
-module( lib_combin ).
-export( [cnr_all/1, cnr/2, pnr/1, permut_map/1, pick_from/1, vnr/2, fac/1] ).
-ifdef( EUNIT ).
-include_lib( "eunit/include/eunit.hrl" ).
-endif.
%%====================================================================
%% API functions
%%====================================================================
%% @doc Enumerates all combinations (order does not matter) with any possible
%% length without replacement by drawing elements from `SrcLst'.
%%
%% Example:
%% ```
%% lib_combin:cnr_all( [a,b,c] ).
%% [[],[a],[b],[c],[b,a],[c,a],[c,b],[c,b,a]]
%% '''
-spec cnr_all( SrcLst::[_] ) -> [[_]].
cnr_all( SrcLst ) ->
F = fun( N ) -> cnr( N, SrcLst ) end,
lists:flatmap( F, lists:seq( 0, length( SrcLst ) ) ).
%% @doc Enumerates all combinations (order does not matter) of length `N'
%% without replacement by drawing elements from `SrcLst'.
%%
%% Herein, `N` must be non-negative for the function clause to match.
%%
%% Example:
%% ```
%% lib_combin:cnr( 2, [a,b,c] ).
%% [[b,a],[c,a],[c,b]]
%% '''
-spec cnr( N::_, SrcLst::[_] ) -> [[_]].
cnr( N, SrcLst ) when N >= 0 ->
Cnr = fun
Cnr( 0, _, Acc ) -> [Acc];
Cnr( _, [], _ ) -> [];
Cnr( M, [H|T], Acc ) ->
case T of
[] -> Cnr( M-1, [], [H|Acc] );
[_|_] -> Cnr( M-1, T, [H|Acc] )++Cnr( M, T, Acc )
end
end,
Cnr( N, SrcLst, [] ).
%% @doc Enumerates all variations (order matters) of length `N' without
%% replacement by drawing elements from `SrcLst'.
%%
%% Herein, `N` must be non-negative for the function clause to match.
%%
%% Example:
%% ```
%% lib_combin:vnr( 2, [a,b,c] ).
%% [[b,a],[c,a],[a,b],[c,b],[a,c],[b,c]]
%% '''
-spec vnr( N::_, SrcLst::[_] ) -> [[_]].
vnr( N, SrcLst ) when N >= 0 ->
Variat = fun
Variat( 0, _, Acc ) ->
[Acc];
Variat( M, S, Acc ) ->
lists:flatmap( fun( X ) -> Variat( M-1, S--[X], [X|Acc] ) end, S )
end,
Variat( N, SrcLst, [] ).
%% @doc Enumerates all permutations (order matters) without replacement by
%% drawing elements from `SrcLst'.
%%
%% Example:
%% ```
%% lib_combin:pnr( [a,b,c] ).
%% [[c,b,a],[b,c,a],[c,a,b],[a,c,b],[b,a,c],[a,b,c]]
%% '''
-spec pnr( SrcLst::[_] ) -> [[_]].
pnr( SrcLst ) ->
vnr( length( SrcLst ), SrcLst ).
%% @doc Enumerates all possible permutations by drawing one element from each
%% list value of a given map `SrcMap'.
%%
%% Example:
%% ```
%% lib_combin:permut_map( #{ sauce => [ketchup, mayo], bread => [sesame, plain], meat => [beef, chicken, mutton] } ).
%% [#{bread => plain,meat => beef,sauce => ketchup},
%% #{bread => sesame,meat => beef,sauce => ketchup},
%% #{bread => plain,meat => chicken,sauce => ketchup},
%% #{bread => sesame,meat => chicken,sauce => ketchup},
%% #{bread => plain,meat => mutton,sauce => ketchup},
%% #{bread => sesame,meat => mutton,sauce => ketchup},
%% #{bread => plain,meat => beef,sauce => mayo},
%% #{bread => sesame,meat => beef,sauce => mayo},
%% #{bread => plain,meat => chicken,sauce => mayo},
%% #{bread => sesame,meat => chicken,sauce => mayo},
%% #{bread => plain,meat => mutton,sauce => mayo},
%% #{bread => sesame,meat => mutton,sauce => mayo}]
%% '''
-spec permut_map( map() ) -> _.
permut_map( SrcMap ) ->
G = fun( K, VLst, Acc ) ->
[A#{ K => V } || V <- VLst, A <- Acc]
end,
maps:fold( G, [#{}], SrcMap ).
%% @doc Picks a random element from a given list.
%%
%% Example:
%% ```
%% pick_from( [a,b,c] ).
%% c
%% '''
-spec pick_from( [_] ) -> _.
pick_from( SrcLst=[_|_] ) ->
N = rand:uniform( length( SrcLst ) ),
lists:nth( N, SrcLst ).
%% @doc The factorial function.
%%
%% Example:
%% ```
%% factorial( 4 ).
%% 24
%% '''
-spec fac( non_neg_integer() ) -> pos_integer().
fac( 0 ) ->
1;
fac( N ) when N > 0 ->
N*fac( N-1 ).
%%====================================================================
%% Internal functions
%%====================================================================
%%====================================================================
%% Unit tests
%%====================================================================
-ifdef( EUNIT ).
cnr_one_returns_n_elements_test() ->
SrcLst = [a, b, c, d, e, f],
?assertEqual( 6, length( cnr( 1, SrcLst ) ) ).
cnr_n_returns_one_elements_test() ->
SrcLst = [a, b, c, d, e, f],
?assertEqual( 1, length( cnr( 6, SrcLst ) ) ).
cnr_zero_is_degenerate_but_valid_test() ->
SrcLst = [a, b, c, d, e, f],
?assertEqual( [[]], cnr( 0, SrcLst ) ).
cnr_neg_throws_error_test() ->
SrcLst = [a, b, c, d, e, f],
?assertError( function_clause, cnr( -1, SrcLst ) ).
cnr_too_large_returns_empty_list_test() ->
SrcLst = [a, b, c, d, e, f],
?assertEqual( [], cnr( 7, SrcLst ) ).
cnr_all_test() ->
SrcLst = [a,b,c],
?assertEqual( 1+3+3+1, length( cnr_all( SrcLst ) ) ).
vnr_one_returns_n_elements_test() ->
SrcLst = [a, b, c, d, e, f],
?assertEqual( 6, length( vnr( 1, SrcLst ) ) ).
vnr_n_returns_one_elements_test() ->
SrcLst = [a, b, c, d, e, f],
?assertEqual( fac( 6 ), length( vnr( 6, SrcLst ) ) ).
vnr_zero_is_degenerate_but_valid_test() ->
SrcLst = [a, b, c, d, e, f],
?assertEqual( [[]], vnr( 0, SrcLst ) ).
vnr_neg_throws_error_test() ->
SrcLst = [a, b, c, d, e, f],
?assertError( function_clause, vnr( -1, SrcLst ) ).
vnr_too_large_returns_empty_list_test() ->
SrcLst = [a, b, c, d, e, f],
?assertEqual( [], vnr( 7, SrcLst ) ).
permut_empty_map_returns_empty_map_singleton_test() ->
?assertEqual( [#{}], permut_map( #{} ) ).
permut_map_containing_single_empty_list_returns_empty_list_test() ->
?assertEqual( [], permut_map( #{ b => [] } ) ).
permut_map_containing_empty_list_returns_empty_list_test() ->
?assertEqual( [], permut_map( #{ a => [x, y], b => [], c => [m, n] } ) ).
burger_restaurant_example_test() ->
IngredientMap = #{ sauce => [ketchup, mayo],
bread => [sesame, plain],
meat => [beef, chicken, mutton] },
ExpectedLst = [
#{bread => sesame,meat => beef,sauce => ketchup},
#{bread => plain,meat => beef,sauce => ketchup},
#{bread => sesame,meat => chicken,sauce => ketchup},
#{bread => plain,meat => chicken,sauce => ketchup},
#{bread => sesame,meat => mutton,sauce => ketchup},
#{bread => plain,meat => mutton,sauce => ketchup},
#{bread => sesame,meat => beef,sauce => mayo},
#{bread => plain,meat => beef,sauce => mayo},
#{bread => sesame,meat => chicken,sauce => mayo},
#{bread => plain,meat => chicken,sauce => mayo},
#{bread => sesame,meat => mutton,sauce => mayo},
#{bread => plain,meat => mutton,sauce => mayo}],
Result = lib_combin:permut_map( IngredientMap ),
?assertEqual( ExpectedLst, Result ).
pick_from_singleton_list_returns_only_element_test() ->
?assertEqual( a, pick_from( [a] ) ).
pick_from_empty_throws_error_test() ->
?assertError( function_clause, pick_from( [] ) ).
fac_zero_is_one_test() ->
?assertEqual( 1, fac( 0 ) ).
fac_one_is_one_test() ->
?assertEqual( 1, fac( 1 ) ).
fac_two_is_two_test() ->
?assertEqual( 2, fac( 2 ) ).
fac_three_is_six_test() ->
?assertEqual( 6, fac( 3 ) ).
fac_four_is_24_test() ->
?assertEqual( 24, fac( 4 ) ).
fac_neg_throws_error_test() ->
?assertError( function_clause, fac( -1 ) ).
-endif. | src/lib_combin.erl | 0.594787 | 0.626696 | lib_combin.erl | starcoder |
-module(heap_demo).
-export([min_heap/0, max_heap/0]).
-spec min_heap() -> ok.
min_heap() ->
%% Creating a new min-heap
H = heap:new(min),
%% Checking if heap is empty
true = heap:is_empty(H),
%% Adding elements
{6, _R1} = heap:insert(H, 6),
{9, R2} = heap:insert(H, 9),
{3, _R3} = heap:insert(H, 3),
{12, _R4} = heap:insert(H, 12),
%% Checking number of elements in the heap
4 = heap:heap_size(H),
%% Finding the minimum element (without removing it)
3 = heap:min(H),
4 = heap:heap_size(H),
%% Removing the minimum element
3 = heap:take_min(H),
3 = heap:heap_size(H),
%% Lowering the priority of an element
true = heap:update(H, R2, 2),
2 = heap:min(H),
%% Increasing the priority of an element
true = heap:update(H, R2, 20),
6 = heap:min(H),
%% Removing all the elements
6 = heap:take_min(H),
12 = heap:take_min(H),
20 = heap:take_min(H),
true = heap:is_empty(H),
%% Delete the heap
true = heap:delete(H),
%% Construct heap from a list
L = [{1,6},{2,9},{3,3},{4,12}],
{H2, _R} = heap:from_list(min, L),
true = heap:delete(H2),
ok.
-spec max_heap() -> ok.
max_heap() ->
%% Creating a new max-heap
H = heap:new(max),
%% Checking if heap is empty
true = heap:is_empty(H),
%% Adding elements
{6, _R1} = heap:insert(H, 6),
{9, R2} = heap:insert(H, 9),
{3, _R3} = heap:insert(H, 3),
{12, _R4} = heap:insert(H, 12),
%% Checking number of elements in the heap
4 = heap:heap_size(H),
%% Finding the maximum element (without removing it)
12 = heap:max(H),
4 = heap:heap_size(H),
%% Removing the maximum element
12 = heap:take_max(H),
3 = heap:heap_size(H),
%% Lowering the priority of an element
true = heap:update(H, R2, 5),
6 = heap:max(H),
%% Increasing the priority of an element
true = heap:update(H, R2, 20),
20 = heap:max(H),
%% Removing all the elements
20 = heap:take_max(H),
6 = heap:take_max(H),
3 = heap:take_max(H),
true = heap:is_empty(H),
%% Delete the heap
true = heap:delete(H),
%% Construct heap from a list
L = [{1,6},{2,9},{3,3},{4,12}],
{H2, _R} = heap:from_list(max, L),
true = heap:delete(H2),
ok. | demo/src/heap_demo.erl | 0.584745 | 0.421492 | heap_demo.erl | starcoder |
%% =============================================================================
%% bondy_router.erl -
%%
%% Copyright (c) 2016-2022 Leapsight. All rights reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%% =============================================================================
%% -----------------------------------------------------------------------------
%% @doc This module provides the routing logic for all WAMP interactions.
%%
%% In general `bondy_router' tries to handle all messages asynchronously.
%% It does it by
%% using either a static or a dynamic pool of workers based on configuration.
%% This module implements both type of workers as a gen_server (this module).
%% A static pool uses a set of supervised processes whereas a
%% dynamic pool spawns a new erlang process for each message. In both cases,
%% sidejob supervises the processes.
%% By default bondy_router uses a dynamic pool.
%%
%% The pools are implemented using the sidejob library in order to provide
%% load regulation. Inn case a maximum pool capacity has been reached,
%% the router will handle the message synchronously i.e. blocking the
%% calling processes (usually the one that handles the transport connection
%% e.g. {@link bondy_wamp_ws_connection_handler}).
%%
%% The router also handles messages synchronously in those
%% cases where it needs to preserve message ordering guarantees.
%%
%% This module handles only the concurrency and basic routing logic,
%% delegating the rest to either {@link bondy_broker} for PubSub interactions,
%% {@link bondy_dealer} for RPC interactions and {@link bondy_router_relay} for
%% all interactions targetting a remote peer.
%%
%% ```
%% ,------. ,------.
%% | Peer | | Peer |
%% `--+---' `--+---'
%% | |
%% | TCP established |
%% |<----------------------------------------->|
%% | |
%% | TLS established |
%% |+<--------------------------------------->+|
%% |+ +|
%% |+ WebSocket established +|
%% |+|<------------------------------------->|+|
%% |+| |+|
%% |+| WAMP established |+|
%% |+|+<----------------------------------->+|+|
%% |+|+ +|+|
%% |+|+ +|+|
%% |+|+ WAMP closed +|+|
%% |+|+<----------------------------------->+|+|
%% |+| |+|
%% |+| |+|
%% |+| WAMP established |+|
%% |+|+<----------------------------------->+|+|
%% |+|+ +|+|
%% |+|+ +|+|
%% |+|+ WAMP closed +|+|
%% |+|+<----------------------------------->+|+|
%% |+| |+|
%% |+| WebSocket closed |+|
%% |+|<------------------------------------->|+|
%% |+ +|
%% |+ TLS closed +|
%% |+<--------------------------------------->+|
%% | |
%% | TCP closed |
%% |<----------------------------------------->|
%% | |
%% ,--+---. ,--+---.
%% | Peer | | Peer |
%% `------' `------'
%%
%% '''
%% (Diagram copied from WAMP RFC Draft)
%%
%% @end
%% -----------------------------------------------------------------------------
-module(bondy_router).
-include_lib("kernel/include/logger.hrl").
-include_lib("wamp/include/wamp.hrl").
-include("bondy.hrl").
-define(ROUTER_ROLES, #{
broker => #{features => ?BROKER_FEATURES},
dealer => #{features => ?DEALER_FEATURES}
}).
-type event() :: {wamp_message(), bondy_context:t()}.
%% API
-export([agent/0]).
-export([flush/2]).
-export([forward/2]).
-export([forward/3]).
-export([roles/0]).
-export([stop/0]).
-export([pre_stop/0]).
%% =============================================================================
%% API
%% =============================================================================
%% -----------------------------------------------------------------------------
%% @doc
%% @end
%% -----------------------------------------------------------------------------
-spec roles() -> #{binary() => #{binary() => boolean()}}.
roles() ->
?ROUTER_ROLES.
%% -----------------------------------------------------------------------------
%% @doc
%% Returns the Bondy agent identification string
%% @end
%% -----------------------------------------------------------------------------
agent() ->
Vsn = list_to_binary(bondy_app:vsn()),
<<"LEAPSIGHT-BONDY-", Vsn/binary>>.
%% -----------------------------------------------------------------------------
%% @doc Forwards a WAMP message to the Dealer or Broker based on message type.
%% The message might end up being handled synchronously
%% (performed by the calling process i.e. the transport handler)
%% or asynchronously (by sending the message to the router load regulated
%% worker pool).
%%
%% @end
%% -----------------------------------------------------------------------------
-spec forward(M :: wamp_message(), Ctxt :: bondy_context:t()) ->
{ok, bondy_context:t()}
| {reply, Reply :: wamp_message(), bondy_context:t()}
| {stop, Reply :: wamp_message(), bondy_context:t()}.
forward(#subscribe{} = M, #{session := _} = Ctxt) ->
%% This is a sync call as clients can call subscribe multiple times
%% concurrently. This is becuase matching and adding to the registry is not
%% done atomically: bondy_registry:add uses art_server:match/2 to
%% determine if a subscription already exists and then adds to the registry
%% (and trie). If we allow this request to be concurrent 2 or more request
%% could get no matches from match and thus create 3 subscriptions when
%% according to the protocol the subscriber should always get the same
%% subscription as result.
%% REVIEW An alternative approach would be for this to be handled async and
%% a pool of register servers to block.
ok = sync_forward({M, Ctxt}),
{ok, Ctxt};
forward(#register{} = M, #{session := _} = Ctxt) ->
%% This is a sync call as it is an easy way to preserve RPC ordering as
%% defined by RFC 11.2:
%% Further, if _Callee A_ registers for *Procedure 1*, the "REGISTERED"
%% message will be sent by _Dealer_ to _Callee A_ before any
%% "INVOCATION" message for *Procedure 1*.
%% Because we block the callee until we get the response,
%% the callee will not receive any other messages.
%% However, notice that if the callee has another connection with the
%% router, then it might receive an invocation through that connection
%% before we reply here.
%% At the moment this relies on Erlang's guaranteed causal delivery of
%% messages between two processes even when in different nodes.
ok = sync_forward({M, Ctxt}),
{ok, Ctxt};
forward(
#call{procedure_uri = <<"wamp.", _/binary>>} = M, #{session := _} = Ctxt) ->
async_forward(M, Ctxt);
forward(
#call{procedure_uri = <<"bondy.", _/binary>>} = M,
#{session := _} = Ctxt) ->
async_forward(M, Ctxt);
forward(#call{} = M, #{session := _} = Ctxt0) ->
%% This is a sync call as it is an easy way to guarantee ordering of
%% invocations between any given pair of Caller and Callee as
%% defined by RFC 11.2, as Erlang guarantees causal delivery of messages
%% between two processes even when in different nodes (when using
%% distributed Erlang).
%% RFC:
%% If Callee A has registered endpoints for both Procedure 1 and Procedure
%% 2, and Caller B first issues a Call 1 to Procedure 1 and then a Call 2
%% to Procedure 2, and both calls are routed to Callee A, then Callee A
%% will first receive an invocation corresponding to Call 1 and then Call
%% 2. This also holds if Procedure 1 and Procedure 2 are identical.
ok = sync_forward({M, Ctxt0}),
%% The invocation is always async and the result or error will be delivered
%% asynchronously by the dealer.
{ok, Ctxt0};
forward(M, #{session := _} = Ctxt) ->
async_forward(M, Ctxt).
%% -----------------------------------------------------------------------------
%% @doc
%% @end
%% -----------------------------------------------------------------------------
-spec forward(wamp_message(), maybe(bondy_ref:t()), map()) -> ok | no_return().
forward(Msg, To, #{realm_uri := RealmUri} = Opts) ->
%% To == undefined when Msg == #publish{}
case To == undefined orelse bondy_ref:is_local(To) of
true ->
do_forward(Msg, To, Opts);
false ->
RelayOpts = #{
ack => true,
retransmission => true,
partition_key => erlang:phash2(RealmUri)
},
case bondy:peek_via(Opts) of
undefined ->
Node = bondy_ref:node(To),
PeerMsg = {forward, To, Msg, Opts},
bondy_router_relay:forward(Node, PeerMsg, RelayOpts);
Relay ->
case bondy_ref:is_local(Relay) of
true ->
bondy:send(RealmUri, To, Msg, Opts);
false ->
Node = bondy_ref:node(Relay),
PeerMsg = {forward, To, Msg, Opts},
RelayOpts = #{
ack => true,
retransmission => true,
partition_key => erlang:phash2(RealmUri)
},
bondy_router_relay:forward(Node, PeerMsg)
end
end
end.
%% -----------------------------------------------------------------------------
%% @doc Sends a GOODBYE message to all existing client connections.
%% The client should reply with another GOODBYE within the configured time and
%% when it does or on timeout, Bondy will close the connection triggering the
%% cleanup of all the client sessions.
%% @end
%% -----------------------------------------------------------------------------
pre_stop() ->
M = wamp_message:goodbye(
#{message => <<"Router is shutting down">>},
?WAMP_SYSTEM_SHUTDOWN
),
Fun = fun
({continue, Cont}) ->
try
bondy_session:list_refs(Cont)
catch
Class:Reason:Stacktrace ->
?LOG_ERROR(#{
description => "Error while shutting down router",
class => Class,
reason => Reason,
stacktrace => Stacktrace
}),
[]
end;
({RealmUri, Ref}) ->
catch bondy:send(RealmUri, Ref, M),
ok
end,
%% We loop with batches of 100
bondy_utils:foreach(Fun, bondy_session:list_refs(100)).
stop() ->
ok.
%% -----------------------------------------------------------------------------
%% @doc Removes all subscriptions, registrations and all the pending items in
%% the RPC promise queue that are associated for reference `Ref' in realm
%% `RealmUri'.
%% @end
%% -----------------------------------------------------------------------------
-spec flush(RealmUri :: uri(), Ref :: bondy_ref:t()) -> ok.
flush(RealmUri, Ref) ->
ok = bondy_dealer:flush(RealmUri, Ref),
bondy_broker:flush(RealmUri, Ref).
%% =============================================================================
%% PRIVATE
%% =============================================================================
%% -----------------------------------------------------------------------------
%% @private
%% @doc
%% @end
%% -----------------------------------------------------------------------------
-spec acknowledge_message(map()) -> boolean().
acknowledge_message(#publish{options = Opts}) ->
maps:get(acknowledge, Opts, false);
acknowledge_message(_) ->
false.
%% =============================================================================
%% PRIVATE : GEN_SERVER
%% =============================================================================
%% @private
async_forward(M, Ctxt0) ->
%% Client already has a session.
%% RFC: By default, publications are unacknowledged, and the _Broker_ will
%% not respond, whether the publication was successful indeed or not.
%% This behavior can be changed with the option
%% "PUBLISH.Options.acknowledge|bool"
Acknowledge = acknowledge_message(M),
%% Asynchronously forwards a message by either sending it to an
%% existing worker or spawning a new one depending on
%% bondy_broker_pool_type.
Event = {M, Ctxt0},
try bondy_router_worker:cast(fun() -> sync_forward(Event) end) of
ok ->
{ok, Ctxt0};
{error, overload} ->
?LOG_WARNING(#{
description => "Router pool overloaded, will route message synchronously"
}),
%% @TODO publish metaevent and stats
%% @TODO use throttling and send error to caller conditionally
%% We do it synchronously i.e. blocking the caller
ok = sync_forward(Event),
{ok, Ctxt0}
catch
error:Reason when Acknowledge == true ->
%% TODO Maybe publish metaevent
%% REVIEW are we using the right error uri?
ErrorMap = bondy_error:map(Reason),
Reply = wamp_message:error_from(
M,
#{},
?WAMP_CANCELLED,
[maps:get(<<"message">>, ErrorMap)],
#{error => ErrorMap}
),
{reply, Reply, Ctxt0};
Class:Reason:Stacktrace ->
Ctxt = bondy_context:realm_uri(Ctxt0),
SessionId = bondy_context:session_id(Ctxt0),
ExtId = bondy_session_id:to_external(SessionId),
?LOG_ERROR(#{
description => "Error while routing message",
class => Class,
reason => Reason,
stacktrace => Stacktrace,
session_external_id => ExtId,
session_id => SessionId,
context => Ctxt,
message => M
}),
%% TODO Maybe publish metaevent and stats
{ok, Ctxt0}
end.
%% -----------------------------------------------------------------------------
%% @private
%% @doc
%% Synchronously forwards a message in the calling process.
%% @end.
%% -----------------------------------------------------------------------------
-spec sync_forward(event()) -> ok.
sync_forward({#subscribe{} = M, Ctxt}) ->
bondy_broker:forward(M, Ctxt);
sync_forward({#unsubscribe{} = M, Ctxt}) ->
bondy_broker:forward(M, Ctxt);
sync_forward({#publish{} = M, Ctxt}) ->
bondy_broker:forward(M, Ctxt);
sync_forward({#register{} = M, Ctxt}) ->
bondy_dealer:forward(M, Ctxt);
sync_forward({#unregister{} = M, Ctxt}) ->
bondy_dealer:forward(M, Ctxt);
sync_forward({#call{} = M, Ctxt}) ->
bondy_dealer:forward(M, Ctxt);
sync_forward({#cancel{} = M, Ctxt}) ->
bondy_dealer:forward(M, Ctxt);
sync_forward({#yield{} = M, Ctxt}) ->
bondy_dealer:forward(M, Ctxt);
sync_forward({#error{request_type = Type} = M, Ctxt})
when Type == ?INVOCATION orelse Type == ?INTERRUPT ->
bondy_dealer:forward(M, Ctxt);
sync_forward({M, _Ctxt}) ->
error({unexpected_message, M}).
do_forward(#publish{} = M, To, Opts) ->
bondy_broker:forward(M, To, Opts);
do_forward(#error{} = M, To, Opts) ->
%% This is a CALL, INVOCATION or INTERRUPT error
bondy_dealer:forward(M, To, Opts);
do_forward(#interrupt{} = M, To, Opts) ->
bondy_dealer:forward(M, To, Opts);
do_forward(#call{} = M, To, Opts) ->
bondy_dealer:forward(M, To, Opts);
do_forward(#invocation{} = M, To, Opts) ->
bondy_dealer:forward(M, To, Opts);
do_forward(#yield{} = M, To, Opts) ->
bondy_dealer:forward(M, To, Opts). | apps/bondy/src/bondy_router.erl | 0.618089 | 0.410077 | bondy_router.erl | starcoder |
%%%------------------------------------------------------------------------
%% Copyright 2020, OpenTelemetry Authors
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% @doc A Propagator injects or extracts data from a Context so information
%% like baggage and trace context can be transported along with cross service
%% requests, like an HTTP request.
%%
%% Propagators are defined based on the type of encoding they inject and
%% extract. At this time there is only a TextMapPropagator,
%% {@link otel_propagator_text_map}, which works on ASCII keys and values.
%%
%% This behaviour is only for defining the callbacks used by each propagator
%% per type and is only used by developers adding a new type of propagator
%% (like for binary protocols), not implementations of propagators themselves
%% (like B3 or W3C TraceContext).
%%
%% Users configure and call propagators based on their type. See the docs
%% for {@link otel_propagator_text_map} for more details.
%% @end
%%%-------------------------------------------------------------------------
-module(otel_propagator).
-export([builtins_to_modules/1,
builtin_to_module/1]).
%% Sets a value into a carrier
-callback inject(t(), carrier()) -> carrier().
-callback inject_from(otel_ctx:t(), t(), carrier()) -> carrier().
%% extracts values from a carrier and sets them in the context
-callback extract(t(), carrier()) -> otel_ctx:t().
-callback extract_to(otel_ctx:t(), t(), carrier()) -> otel_ctx:t().
-type t() :: builtin() | module() | {module(), term()}.
%% trace_context and tracecontext are the same. tracecontext is the term
%% in Otel specs and trace_context is the more idiomatic Erlang spelling
-type builtin() :: trace_context | tracecontext | b3multi | baggage. %% b3 | jaeger
%% a carrier can be any type
-type carrier() :: term().
-export_type([t/0,
builtin/0,
carrier/0]).
%% convert the short name of a propagator to its module name if it is a builtin
%% if the name doens't match a builtin it is assumed to be a module
%% @hidden
-spec builtins_to_modules([t()]) -> [module()].
builtins_to_modules(Propagators) ->
[builtin_to_module(P) || P <- Propagators].
%% @hidden
-spec builtin_to_module(builtin() | module()) -> module().
builtin_to_module(tracecontext) ->
otel_propagator_trace_context;
builtin_to_module(trace_context) ->
otel_propagator_trace_context;
builtin_to_module(b3multi) ->
otel_propagator_b3multi;
builtin_to_module(baggage) ->
otel_propagator_baggage;
%% TODO: add multib3 and jaeger as builtin propagators
%% builtin_to_module(multib3) ->
%% otel_propagator_multib3;
%% builtin_to_module(jaeger) ->
%% otel_propagator_jaeger;
builtin_to_module(Module) when is_atom(Module) ->
Module;
builtin_to_module(Propagator) ->
Propagator. | apps/opentelemetry_api/src/otel_propagator.erl | 0.582966 | 0.442938 | otel_propagator.erl | starcoder |
% references:
% https://en.wikipedia.org/wiki/Chord_(peer-to-peer)
-module(chord).
-export([spawn_ring/1, forward/2, lookup/2, insert/3]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2]).
-behavior(gen_server).
-define(M, 10).
-define(MAX, round(math:pow(2, ?M))).
-define(TICKINTERVAL, 1000).
-define(TIMEOUT, 10000).
-record(idpair, {
id = nil,
pid = nil
}).
-record(state, {
id = nil, % the nodes id
prev = nil, % predecessor id/pid
tbl = nil, % the server's local hashtable
finger = []
}).
setnth(L, I, N) -> % set the nth value in a list L
lists:sublist(L,I - 1) ++ [N] ++ lists:nthtail(I,L).
successor(State) -> % get the first element in the finger table (the successor).
[S | _] = State#state.finger,
S.
set_successor(State, Succ) -> % set the first element in the finger table
[_ | R] = State#state.finger,
State#state{finger = [Succ | R]}.
selfpair(State) -> % generate an idpair from a nodes State
#idpair{id = State#state.id, pid = self()}.
% formats I has a hash string
formathash(I) ->
lists:flatten(io_lib:format("~40.16.0b", [I])).
% generate a hash string for S
hashstr(S) -> % hash a string and return a string of the hash
<<H:160/big-unsigned-integer>> = crypto:hash(sha, S),
formathash(H rem ?MAX).
% checks if B is in range (A, C).
between(A, _, A) -> % edge case: one node in the ring pointing to itself
true;
between(A, B, C) when A < C -> % range check
A < B andalso B < C;
% edge case: when A and C are swapped, check the outside of the
% range (i.e. the rest of the circle besides the range (A, C).
between(A, B, C) ->
A < B orelse B < C.
% between check, right inclusive, i.e. is B in (A, C].
between_rin(A, B, C) ->
C == B orelse between(A, B, C).
genid() -> % convert IP address + pid into a hashed value (so each server gets a unique id)
{_, [{IpTuple,_,_}|_]} = inet:getif(),
hashstr(lists:flatten(io_lib:format("~p~p", [IpTuple, self()]))).
forward(Node, Message) -> % forward a message to the next node
gen_server:cast(Node, {forward, Message, 15}).
lookup(Node, Key) ->
K = hashstr(Key),
gen_server:cast(Node, {lookup_forward, lookup, K, self()}),
receive
{lookup_for, K, V} -> V
after
?TIMEOUT -> {error, timeout}
end.
insert(Node, Key, Val) ->
K = hashstr(Key),
gen_server:cast(Node, {lookup_forward, {insert, Val}, K, self()}),
receive
{insert_ok, K} -> {ok, K}
after
?TIMEOUT -> {error, timeout}
end.
% Intelligently forward to the next node using the finger table
% FoundF : idpair -> void
% ForwardF : idpair -> void
find_successor(State, ID, ForwardF, FoundF) ->
Next = successor(State),
case between_rin(State#state.id, ID, Next#idpair.id) of
true ->
FoundF(Next);
_ ->
N = closest_preceding_from_self(State, ID),
ForwardF(N)
end.
% create a node in the dht
create() ->
{ok, N} = gen_server:start_link(?MODULE, #state{}, []),
N.
% create a node for the dht and have it join starting at Head
create_and_join(Head) ->
N = create(),
gen_server:call(N, {join, Head}),
N.
% spawn the initial ring
spawn_ring(N) ->
H = create(),
[H | [create_and_join(H) || _ <- lists:seq(1, N)]].
% find the closest node to ID that's directly before it
closest_preceding_from_self(State, ID) ->
closest_preceding_node(selfpair(State), lists:reverse(State#state.finger), ID).
% idpair -> [idpair] -> id -> idpair
closest_preceding_node(Node, [], _) ->
Node;
closest_preceding_node(Node, [H|T], ID) ->
case (H /= nil) andalso between(Node#idpair.id, H#idpair.id, ID) of
true -> H;
_ -> closest_preceding_node(Node, T, ID)
end.
% init function for gen_server behavior
init(S) ->
timer:send_interval(?TICKINTERVAL, tick),
ID = genid(),
{ok, S#state{id=ID, tbl=dict:new(), finger=[#idpair{id = ID, pid = self()} | [nil || _ <- lists:seq(2, ?M)]]}}.
% terminate function for gen_server behavior
terminate(_, _) -> ok.
% update the finger table (called every TICKINTERVAL)
fix_fingers(State) ->
I = rand:uniform(?M),
IntID = list_to_integer(State#state.id, 16),
FingerID = formathash((IntID + round(math:pow(2, I - 1))) rem ?MAX),
Next = successor(State),
gen_server:cast(Next#idpair.pid, {find_successor, I, FingerID, self()}),
ok.
% join a ring
handle_call({join, H}, _From, State) ->
gen_server:cast(H, {find_successor, 1, State#state.id, self()}),
{reply, ok, State}.
% update Ith successor to be N in the finger table
handle_info({successor_for, I, N}, State) ->
{noreply, State#state{finger = setnth(State#state.finger, I, N)}};
% tick handler (stabilize and fix fingers).
handle_info(tick, State) ->
Next = successor(State),
if
Next /= nil -> gen_server:cast(Next#idpair.pid, {prev, self()})
end,
fix_fingers(State),
{noreply, State};
% generic message
handle_info(M, State) ->
io:format("~p Unknown message ~p~n", [self(), M]),
{noreply, State}.
% stabilize this node, (given X = successors predecessor)
handle_cast({stabilize, X}, State) ->
Next = successor(State),
case X /= nil andalso between(State#state.id, X#idpair.id, Next#idpair.id) of
true ->
{noreply, set_successor(State, X)};
_ ->
gen_server:cast(Next#idpair.pid, {notify, selfpair(State)}),
{noreply, State}
end;
% tell the node that requested this nodes predecessor to stabilize
handle_cast({prev, From}, State) ->
gen_server:cast(From, {stabilize, State#state.prev}),
{noreply, State};
% find successor of ID
handle_cast({find_successor, I, ID, From}, State) ->
find_successor(State, ID,
fun(NextN) ->
gen_server:cast(NextN#idpair.pid, {find_successor, I, ID, From})
end,
fun(FoundN) ->
From ! {successor_for, I, FoundN}
end),
{noreply, State};
% lookup via find_successor
handle_cast({lookup_forward, M, K, From}, State) ->
find_successor(State, K,
fun(NextN) ->
gen_server:cast(NextN#idpair.pid, {lookup_forward, M, K, From})
end,
fun(FoundN) ->
gen_server:cast(FoundN#idpair.pid, {M, K, From})
end),
{noreply, State};
handle_cast({{insert, V}, K, From}, State) ->
From ! {insert_ok, K},
{noreply, State#state{tbl = dict:store(K, V, State#state.tbl)}};
handle_cast({lookup, K, From}, State) ->
io:format("Found ~p on ~p~n", [K, self()]),
From ! {lookup_for, K,
case dict:find(K, State#state.tbl) of
error -> {error, not_found};
M -> M
end},
{noreply, State};
% P thinks it might be our predecessor
handle_cast({notify, P}, State) ->
% P needs to be an idpair
case (State#state.prev == nil) orelse
between(State#state.prev#idpair.id, P#idpair.id, State#state.id) of
true ->
{noreply, State#state{prev = P}};
_ ->
{noreply, State}
end;
handle_cast({forward, Msg, N}, State) when N == 0 ->
io:format("~p Dropping ~p~n", [self(), Msg]),
{noreply, State};
% forward a message to the direct predecessor
handle_cast({forward, Msg, N}, State) ->
Next = successor(State),
io:format("~p Forwarding ~p to ~p~n", [self(), Msg, Next]),
gen_server:cast(Next#idpair.pid, {forward, Msg, N - 1}),
{noreply, State}. | chord-dht/chord.erl | 0.521227 | 0.521167 | chord.erl | starcoder |
% @copyright 2011 Zuse Institute Berlin
% Licensed under the Apache License, Version 2.0 (the "License");
% you may not use this file except in compliance with the License.
% You may obtain a copy of the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS,
% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
% See the License for the specific language governing permissions and
% limitations under the License.
%%% @author <NAME> <<EMAIL>>
%%% @doc Basic Histogram.
%%% <NAME> and <NAME>, "A streaming parallel
%%% decision tree algorithm", J. Machine Learning Research 11
%%% (2010), pp. 849--872.
%%% @end
%% @version $Id$
-module(histogram).
-author('<EMAIL>').
-vsn('$Id$').
-ifdef(with_export_type_support).
-export_type([histogram/0]).
-endif.
% external API
-export([create/1, add/2, add/3, get_data/1, merge/2]).
% private API
-export([resize/1, insert/2, find_smallest_interval/1, merge_interval/2]).
-include("record_helpers.hrl").
-type data_item() :: {float(), pos_integer()}.
-type data_list() :: list(data_item()).
-record(histogram, {size = ?required(histogram, size):: non_neg_integer(),
data = [] :: data_list(),
data_size = 0 :: non_neg_integer()}).
-opaque histogram() :: #histogram{}.
-spec create(Size::non_neg_integer()) -> histogram().
create(Size) ->
#histogram{size = Size}.
-spec add(Value::float(), Histogram::histogram()) -> histogram().
add(Value, Histogram) ->
add(Value, 1, Histogram).
-spec add(Value::float(), Count::pos_integer(), Histogram::histogram()) -> histogram().
add(_Value, _Count, Histogram = #histogram{size = 0}) ->
Histogram;
add(Value, Count, Histogram = #histogram{data = OldData, data_size = OldDataSize}) ->
resize(Histogram#histogram{data = insert({Value, Count}, OldData),
data_size = OldDataSize + 1}).
-spec get_data(Histogram::histogram()) -> data_list().
get_data(Histogram) ->
Histogram#histogram.data.
%% @doc Merges the given two histograms by adding every data point of Hist2
%% to Hist1.
-spec merge(Hist1::histogram(), Hist2::histogram()) -> histogram().
merge(Hist1 = #histogram{data = Hist1Data, data_size = Hist1DataSize},
Hist2 = #histogram{data_size = Hist2DataSize}) ->
NewData = lists:foldl(fun insert/2, Hist1Data, get_data(Hist2)),
resize(Hist1#histogram{data = NewData, data_size = Hist1DataSize + Hist2DataSize}).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% private
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec resize(Histogram::histogram()) -> histogram().
resize(Histogram = #histogram{data = Data, size = ExpectedSize, data_size = ActualSize}) ->
if
ActualSize > ExpectedSize ->
MinSecondValue = find_smallest_interval(Data),
NewHistogram = Histogram#histogram{data = merge_interval(MinSecondValue, Data),
data_size = ActualSize - 1},
resize(NewHistogram);
true ->
Histogram
end.
-spec insert(Value::data_item(), Data::data_list()) -> data_list().
insert({Value, _} = DataItem, [{Value2, _} | _] = Data) when Value < Value2 ->
[DataItem | Data];
insert(DataItem, [DataItem2 | Rest]) ->
[DataItem2 | insert(DataItem, Rest)];
insert(DataItem, []) ->
[DataItem].
%% @doc Finds the smallest interval between two consecutive values and returns
%% the second value (in the list's order).
%% PRE: length(Data) >= 2
-spec find_smallest_interval(Data::data_list()) -> MinSecondValue::float().
find_smallest_interval([{Value, _}, {Value2, _} | Rest]) ->
find_smallest_interval_loop(Value2 - Value, Value2, Value2, Rest).
-spec find_smallest_interval_loop(MinInterval::float(), MinSecondValue::float(), LastValue::float(), Data::data_list()) -> MinSecondValue::float().
find_smallest_interval_loop(MinInterval, MinSecondValue, LastValue, [{Value, _} | Rest]) ->
Diff = Value - LastValue,
case MinInterval =< Diff of
true -> NewMinInterval = MinInterval,
NewMinSecondValue = MinSecondValue;
_ -> NewMinInterval = Diff,
NewMinSecondValue = Value
end,
find_smallest_interval_loop(NewMinInterval, NewMinSecondValue, Value, Rest);
find_smallest_interval_loop(_MinInterval, MinSecondValue, _LastValue, []) ->
MinSecondValue.
%% @doc Merges two consecutive values if the second of them is MinSecondValue.
%% Stops after the first match.
%% PRE: length(Data) >= 2, two consecutive values with the given difference
-spec merge_interval(MinSecondValue::float(), Data::data_list()) -> data_list().
merge_interval(Value2, [{Value, Count}, {Value2, Count2} | Rest]) ->
[{(Value * Count + Value2 * Count2) / (Count + Count2), Count + Count2} | Rest];
merge_interval(MinSecondValue, [DataItem | Rest]) ->
[DataItem | merge_interval(MinSecondValue, Rest)]. | src/histogram.erl | 0.583915 | 0.59461 | histogram.erl | starcoder |
%% Copyright 2019, JobTeaser
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(totp_validator).
-export([init/1, init/2, authenticate/2, authenticate/3, otpauth_uri/3]).
-record(validator, {key :: binary(),
nb_digits :: pos_integer(),
initial_time :: totp:timestamp(),
time_step :: pos_integer(),
look_behind :: non_neg_integer(),
look_ahead :: non_neg_integer(),
last_auth_time_period :: totp:time_period() | undefined}).
-type validator() :: #validator{}.
-type validator_options() :: [validator_option()].
-type validator_option() :: {nb_digits, pos_integer()}
| {initial_time, totp:timestamp()}
| {time_step, pos_integer()}
| {look_behind, non_neg_integer()}
| {look_ahead, non_neg_integer()}.
%% @doc Initialize and return a new TOTP validator using default settings.
%%
%% @see init/2
-spec init(Key :: binary()) -> validator().
init(Key) ->
init(Key, []).
%% @doc Initialize and return a new TOTP validator.
-spec init(Key :: binary(), Options :: validator_options()) -> validator().
init(Key, Options) ->
InitialTime = proplists:get_value(initial_time, Options, 0),
TimeStep = proplists:get_value(time_step, Options, 30),
NbDigits = proplists:get_value(nb_digits, Options, 6),
LookBehind = proplists:get_value(look_behind, Options, 1),
LookAhead = proplists:get_value(look_ahead, Options, 1),
#validator{key = Key,
initial_time = InitialTime,
time_step = TimeStep,
nb_digits = NbDigits,
look_behind = LookBehind,
look_ahead = LookAhead}.
%% @doc Authenticate a password using the current system timestamp.
%%
%% @see authenticate/3
-spec authenticate(validator(), Password) ->
{validator(), valid | invalid} when
Password :: pos_integer().
authenticate(Validator, Password) ->
authenticate(Validator, Password, totp:current_timestamp()).
%% @doc Authenticate a password, assuming a specific current timestamp.
-spec authenticate(validator(), Password, Time) ->
{validator(), valid | invalid} when
Password :: pos_integer(),
Time :: totp:timestamp().
authenticate(Validator, Password, Time) ->
InitialTime = Validator#validator.initial_time,
TimeStep = Validator#validator.time_step,
TimePeriod = totp:time_period(InitialTime, TimeStep, Time),
authenticate_with_time_period(Validator, Password, TimePeriod).
-spec authenticate_with_time_period(validator(), Password, TimePeriod) ->
{validator(), valid | invalid} when
Password :: pos_integer(),
TimePeriod :: totp:time_period().
authenticate_with_time_period(Validator, _Password, TimePeriod) when
TimePeriod == Validator#validator.last_auth_time_period ->
{Validator, invalid};
authenticate_with_time_period(Validator, Password, TimePeriod) ->
Key = Validator#validator.key,
NbDigits = Validator#validator.nb_digits,
LookBehind = Validator#validator.look_behind,
LookAhead = Validator#validator.look_ahead,
TimePeriodPasswords = [totp:generate_with_time_period(Key, TP, NbDigits) ||
TP <- lists:seq(TimePeriod - LookBehind,
TimePeriod + LookAhead)],
EqualToPassword = fun (TPPassword) -> Password == TPPassword end,
Validator2 = Validator#validator{last_auth_time_period = TimePeriod},
case lists:search(EqualToPassword, TimePeriodPasswords) of
{value, _} ->
{Validator2, valid};
false ->
{Validator2, invalid}
end.
%% @doc Return an URI representing a validator that can be used to
%% automatically configure a client (or at least a Google authenticator). See
%% <a
%% href="https://github.com/google/google-authenticator/wiki/Key-Uri-Format">the
%% Google authenticator documentation</a>.
-spec otpauth_uri(validator(), Issuer, AccountName) -> URI when
Issuer :: binary(),
AccountName :: binary(),
URI :: binary().
otpauth_uri(Validator, Issuer, AccountName) ->
Key = Validator#validator.key,
NbDigits = Validator#validator.nb_digits,
TimeStep = Validator#validator.time_step,
Parameters = [{<<"period">>, integer_to_list(TimeStep)}],
otpauth_uri:generate(totp, Key, NbDigits, Issuer, AccountName, Parameters). | src/totp_validator.erl | 0.665084 | 0.520801 | totp_validator.erl | starcoder |
%% -------- Utility Functions ---------
%%
%% Generally helpful funtions within leveled
%%
-module(leveled_util).
-include("include/leveled.hrl").
-include_lib("eunit/include/eunit.hrl").
-export([generate_uuid/0,
integer_now/0,
integer_time/1,
magic_hash/1,
safe_rename/4]).
-define(WRITE_OPS, [binary, raw, read, write]).
-spec generate_uuid() -> list().
%% @doc
%% Generate a new globally unique ID as a string.
%% Credit to
%% https://github.com/afiskon/erlang-uuid-v4/blob/master/src/uuid.erl
generate_uuid() ->
<<A:32, B:16, C:16, D:16, E:48>> = leveled_rand:rand_bytes(16),
L = io_lib:format("~8.16.0b-~4.16.0b-4~3.16.0b-~4.16.0b-~12.16.0b",
[A, B, C band 16#0fff, D band 16#3fff bor 16#8000, E]),
binary_to_list(list_to_binary(L)).
-spec integer_now() -> non_neg_integer().
%% @doc
%% Return now in gregorian seconds
integer_now() ->
integer_time(os:timestamp()).
-spec integer_time (erlang:timestamp()) -> non_neg_integer().
%% @doc
%% Return a given time in gergorian seconds
integer_time(TS) ->
DT = calendar:now_to_universal_time(TS),
calendar:datetime_to_gregorian_seconds(DT).
-spec magic_hash(any()) -> integer().
%% @doc
%% Use DJ Bernstein magic hash function. Note, this is more expensive than
%% phash2 but provides a much more balanced result.
%%
%% Hash function contains mysterious constants, some explanation here as to
%% what they are -
%% http://stackoverflow.com/questions/10696223/reason-for-5381-number-in-djb-hash-function
magic_hash({binary, BinaryKey}) ->
H = 5381,
hash1(H, BinaryKey) band 16#FFFFFFFF;
magic_hash(AnyKey) ->
BK = term_to_binary(AnyKey),
magic_hash({binary, BK}).
hash1(H, <<>>) ->
H;
hash1(H, <<B:8/integer, Rest/bytes>>) ->
H1 = H * 33,
H2 = H1 bxor B,
hash1(H2, Rest).
-spec safe_rename(string(), string(), binary(), boolean()) -> ok.
%% @doc
%% Write a file, sync it and rename it (and for super-safe mode read it back)
%% An attempt to prevent crashes leaving files with empty or partially written
%% values
safe_rename(TempFN, RealFN, BinData, ReadCheck) ->
{ok, TempFH} = file:open(TempFN, ?WRITE_OPS),
ok = file:write(TempFH, BinData),
ok = file:sync(TempFH),
ok = file:close(TempFH),
ok = file:rename(TempFN, RealFN),
case ReadCheck of
true ->
{ok, ReadBack} = file:read_file(RealFN),
true = (ReadBack == BinData),
ok;
false ->
ok
end.
%%%============================================================================
%%% Test
%%%============================================================================
-ifdef(TEST).
-define(TEST_AREA, "test/test_area/util/").
magichashperf_test() ->
KeyFun =
fun(X) ->
K = {o, "Bucket", "Key" ++ integer_to_list(X), null},
{K, X}
end,
KL = lists:map(KeyFun, lists:seq(1, 1000)),
{TimeMH, HL1} = timer:tc(lists, map, [fun(K) -> magic_hash(K) end, KL]),
io:format(user, "1000 keys magic hashed in ~w microseconds~n", [TimeMH]),
{TimePH, _Hl2} = timer:tc(lists, map, [fun(K) -> erlang:phash2(K) end, KL]),
io:format(user, "1000 keys phash2 hashed in ~w microseconds~n", [TimePH]),
{TimeMH2, HL1} = timer:tc(lists, map, [fun(K) -> magic_hash(K) end, KL]),
io:format(user, "1000 keys magic hashed in ~w microseconds~n", [TimeMH2]).
safe_rename_test() ->
ok = filelib:ensure_dir(?TEST_AREA),
TempFN = filename:join(?TEST_AREA, "test_manifest0.pnd"),
RealFN = filename:join(?TEST_AREA, "test_manifest0.man"),
ok = safe_rename(TempFN, RealFN, <<1:128/integer>>, false),
?assertMatch({ok, <<1:128/integer>>}, file:read_file(RealFN)),
TempFN1 = filename:join(?TEST_AREA, "test_manifest1.pnd"),
RealFN1 = filename:join(?TEST_AREA, "test_manifest1.man"),
ok = safe_rename(TempFN1, RealFN1, <<2:128/integer>>, true),
?assertMatch({ok, <<2:128/integer>>}, file:read_file(RealFN1)).
-endif. | src/leveled_util.erl | 0.582254 | 0.436202 | leveled_util.erl | starcoder |
%%======================================================================
%%
%% Leo Commons
%%
%% Copyright (c) 2012-2015 Rakuten, Inc.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% ---------------------------------------------------------------------
%% Leo Commons - MNESIA Utils.
%%
%% @doc leo_mnesia is utilities for mnesia operation
%% @reference https://github.com/leo-project/leo_commons/blob/master/src/leo_mnesia.erl
%% @end
%%======================================================================
-module(leo_mnesia).
-author('<NAME>').
-include("leo_commons.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("stdlib/include/qlc.hrl").
-export([read/1, write/1, delete/1,
batch/1, export/2, export/3]).
%% @doc Retrieve a value from mnesia
-spec(read(Fun) ->
{ok, [any()]} | not_found | {error, any()}
when Fun::function()).
read(Fun) ->
case catch mnesia:activity(transaction, Fun) of
{_, Cause} ->
{error, Cause};
[] ->
not_found;
List ->
{ok, List}
end.
%% @doc Insert a value into mnesia
-spec(write(Fun) ->
ok | {error, any()}
when Fun::function()).
write(Fun) ->
case catch mnesia:activity(transaction, Fun) of
ok ->
ok;
{_, Cause} ->
{error, Cause}
end.
%% @doc Remove a value from mnesia
-spec(delete(Fun) ->
ok | {error, any()}
when Fun::function()).
delete(Fun) ->
case catch mnesia:activity(transaction, Fun) of
ok ->
ok;
{_, Cause} ->
{error, Cause}
end.
%% @doc Bache processing
-spec(batch(Fun) ->
ok | {error, any()}
when Fun::function()).
batch(Fun) ->
case catch mnesia:activity(transaction, Fun) of
ok ->
ok;
{_, Cause} ->
{error, Cause}
end.
%% @doc Export mnesia's records
%%
-spec(export(FilePath, Table) ->
ok | {error, any()} when FilePath::string(),
Table::atom()).
export(FilePath, Table) ->
export(FilePath, Table, ?EXPORT_TYPE_TUPLE).
%% @doc Export mnesia's records
%%
-spec(export(FilePath, Table, ExportType) ->
ok | {error, any()} when FilePath::string(),
Table::atom(),
ExportType::export_type()).
export(FilePath, Table, ExportType) ->
%% open a file
{ok, Handler} = file:open(FilePath, [write, append, binary]),
Rows = mnesia:table_info(Table, size),
%% output records
mnesia:transaction(
fun() ->
case catch mnesia:first(Table) of
'$end_of_table' ->
ok;
{'EXIT', _} ->
ok;
Key ->
Ret = mnesia:read(Table, Key, read),
case output(Handler, ExportType, Ret) of
ok ->
export_1(Rows - 1, Handler, Table, ExportType, Key);
Error ->
Error
end
end
end),
%% close a file
file:close(Handler),
ok.
%% @private
export_1(0,_Handler,_Table,_ExportType,_Key) ->
ok;
export_1(Rows, Handler, Table, ExportType, Key) ->
case catch mnesia:next(Table, Key) of
'$end_of_table' ->
ok;
{'EXIT', Cause} ->
{error, Cause};
Key_1 ->
Ret = mnesia:read(Table, Key_1, read),
case output(Handler, ExportType, Ret) of
ok ->
export_1(Rows - 1, Handler, Table, ExportType, Key_1);
Error ->
Error
end
end.
%% @doc Append a record
%% @private
output(Handler, ?EXPORT_TYPE_TUPLE, Ret) ->
lists:foreach(fun(X) -> io:format(Handler, "~p.~n",[X]) end, Ret);
output(_,_,_) ->
{error, not_support_type}. | deps/leo_commons/src/leo_mnesia.erl | 0.693161 | 0.41253 | leo_mnesia.erl | starcoder |
%% @author <NAME> <<EMAIL>>
%% @doc
%% An Erlang implementation of a Server Wide Logical Clock,
%% in this case a Bitmapped Version Vector.
%% @end
-module('swc_node').
-author('<NAME> <<EMAIL>>').
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-include_lib("swc/include/swc.hrl").
%% API exports
-export([ new/0
, ids/1
, get/2
, norm/1
, values/1
, missing_dots/3
, add/2
, merge/2
, join/2
, base/1
, event/2
, store_entry/3
]).
-export_type([bvv/0]).
%%====================================================================
%% API functions
%%====================================================================
%% @doc Constructs an empty BVV (an orddict in Erlang).
-spec new() -> bvv().
new() -> orddict:new().
%% @doc Returns all IDs from the entries in a BVV.
-spec ids(bvv()) -> [id()].
ids(B) ->
orddict:fetch_keys(B).
%% @doc Returns the entry of a BVV associated with a given ID.
-spec get(id(), bvv()) -> entry().
get(K,B) ->
case orddict:find(K,B) of
error -> {0,0};
{ok, E} -> E
end.
%% @doc Normalizes an entry pair, by removing dots and adding them to the base
%% if they are contiguous with the base.
-spec norm(entry()) -> entry().
norm({N,B}) ->
case B rem 2 of
0 -> {N,B};
1 -> norm({N+1, B bsr 1})
end.
%% @doc Normalizes all entries in the BVV, using norm/2.
-spec norm_bvv(bvv()) -> bvv().
norm_bvv(BVV) ->
% normalize all entries
FunMap = fun (_Id, E) -> norm(E) end,
BVV1 = orddict:map(FunMap, BVV),
% remove `{0,0}` entries
FunFilter = fun (_Id, E) -> E =/= {0,0} end,
orddict:filter(FunFilter, BVV1).
%% @doc Returns the dots in the first clock that are missing from the second clock,
%% but only from entries in the list of ids received as argument.
-spec missing_dots(bvv(), bvv(), [id()]) -> [{id(),[counter()]}].
missing_dots(B1, B2, Ids) ->
Fun =
fun (K,V,Acc) ->
case lists:member(K, Ids) of
false -> Acc;
true ->
case orddict:find(K,B2) of
error ->
[{K,values(V)} | Acc];
{ok, V2} ->
case subtract_dots(V,V2) of
[] -> Acc;
X -> [{K,X} | Acc]
end
end
end
end,
orddict:fold(Fun,[],B1).
-spec subtract_dots(entry(), entry()) -> [counter()].
subtract_dots({N1,B1}, {N2,B2}) when N1 > N2 ->
Dots1 = lists:seq(N2+1,N1) ++ values_aux(N1,B1,[]),
Dots2 = values_aux(N2,B2,[]),
ordsets:subtract(Dots1, Dots2);
subtract_dots({N1,B1}, {N2,B2}) when N1 =< N2 ->
Dots1 = values_aux(N1,B1,[]),
Dots2 = lists:seq(N1+1,N2) ++ values_aux(N2,B2,[]),
ordsets:subtract(Dots1, Dots2).
%% @doc Returns the sequence numbers for the dots represented by an entry.
-spec values(entry()) -> [counter()].
values({N,B}) ->
lists:seq(1,N) ++ values_aux(N,B,[]).
%% @doc Returns the sequence numbers for the dots represented by a bitmap. It's
%% an auxiliary function used by values/1.
-spec values_aux(counter(), counter(), [counter()]) -> [counter()].
values_aux(_,0,L) -> lists:reverse(L);
values_aux(N,B,L) ->
M = N + 1,
case B rem 2 of
0 -> values_aux(M, B bsr 1, L);
1 -> values_aux(M, B bsr 1, [ M | L ])
end.
%% @doc Adds a dot (ID, Counter) to a BVV.
-spec add(bvv(), {id(), counter()}) -> bvv().
add(BVV, {Id, Counter}) ->
Initial = add_aux({0,0}, Counter),
Fun = fun (Entry) -> add_aux(Entry, Counter) end,
orddict:update(Id, Fun, Initial, BVV).
%% @doc Adds a dot to a BVV entry, returning the normalized entry.
-spec add_aux(entry(), counter()) -> entry().
add_aux({N,B}, M) ->
case N < M of
false -> norm({N,B});
true -> M2 = B bor (1 bsl (M-N-1)),
norm({N,M2})
end.
%% @doc Merges all entries from the two BVVs.
-spec merge(bvv(), bvv()) -> bvv().
merge(BVV1, BVV2) ->
FunMerge = fun (_Id, E1, E2) -> join_aux(E1, E2) end,
norm_bvv(orddict:merge(FunMerge, BVV1, BVV2)).
%% @doc Joins entries from BVV2 that are also IDs in BVV1, into BVV1.
-spec join(bvv(), bvv()) -> bvv().
join(BVV1, BVV2) ->
% filter keys from BVV2 that are not in BVV1
K1 = orddict:fetch_keys(BVV1),
Pred = fun (Id,_E) -> lists:member(Id, K1) end,
BVV2b = orddict:filter(Pred, BVV2),
% merge BVV1 with filtered BVV2b
FunMerge = fun (_Id, E1, E2) -> join_aux(E1, E2) end,
norm_bvv(orddict:merge(FunMerge, BVV1, BVV2b)).
%% @doc Returns a (normalized) entry that results from the union of dots from
%% two other entries. Auxiliary function used by join/2.
-spec join_aux(entry(), entry()) -> entry().
join_aux({N1,B1}, {N2,B2}) ->
case N1 >= N2 of
true -> {N1, B1 bor (B2 bsr (N1-N2))};
false -> {N2, B2 bor (B1 bsr (N2-N1))}
end.
%% @doc Takes and returns a BVV where in each entry, the bitmap is reset to zero.
-spec base(bvv()) -> bvv().
base(BVV) ->
% normalize all entries
BVV1 = norm_bvv(BVV),
% remove all non-contiguous counters w.r.t the base
Fun = fun (_Id, {N,_B}) -> {N,0} end,
orddict:map(Fun, BVV1).
%% @doc Takes a BVV at node Id and returns a pair with sequence number for a new
%% event (dot) at node Id and the original BVV with the new dot added; this
%% function makes use of the invariant that the node BVV for node Id knows all
%% events generated at Id, i.e., the first component of the pair denotes the
%% last event, and the second component, the bitmap, is always zero.
-spec event(bvv(), id()) -> {counter(), bvv()}.
event(BVV, Id) ->
% find the next counter for Id
C = case orddict:find(Id, BVV) of
% since nodes call event with their Id, their entry always matches {N,0}
{ok, {N,0}} -> N + 1;
error -> 1
end,
% return the new counter and the updated BVV
{C, add(BVV, {Id,C})}.
%% @doc Stores an Id-Entry pair in a BVV; if the id already exists, the
%% associated entry is replaced by the new one.
store_entry(_Id, {0,0}, BVV) -> BVV;
store_entry(Id, Entry={N,0}, BVV) ->
case orddict:find(Id, BVV) of
{ok, {N2,_}} when N2 >= N -> BVV;
{ok, {N2,_}} when N2 < N -> orddict:store(Id, Entry, BVV);
error -> orddict:store(Id, Entry, BVV)
end.
%%===================================================================
%% EUnit tests
%%===================================================================
-ifdef(TEST).
norm_test() ->
?assertEqual( norm({5,3}), {7,0} ),
?assertEqual( norm({5,2}), {5,2} ),
?assertEqual( norm_bvv( [{"a",{0,0}}] ), [] ),
?assertEqual( norm_bvv( [{"a",{5,3}}] ), [{"a",{7,0}}] ).
values_test() ->
?assertEqual( lists:sort( values({0,0}) ), lists:sort( [] )),
?assertEqual( lists:sort( values({5,3}) ), lists:sort( [1,2,3,4,5,6,7] )),
?assertEqual( lists:sort( values({2,5}) ), lists:sort( [1,2,3,5] )).
missing_dots_test() ->
B1 = [{"a",{12,0}}, {"b",{7,0}}, {"c",{4,0}}, {"d",{5,0}}, {"e",{5,0}}, {"f",{7,10}}, {"g",{5,10}}, {"h",{5,14}}],
B2 = [{"a",{5,14}}, {"b",{5,14}}, {"c",{5,14}}, {"d",{5,14}}, {"e",{15,0}}, {"f",{5,14}}, {"g",{7,10}}, {"h",{7,10}}],
?assertEqual( lists:sort(missing_dots(B1,B2,[])), []),
?assertEqual( lists:sort(missing_dots(B1,B2,["a","b","c","d","e","f","g","h"])), [{"a",[6,10,11,12]}, {"b",[6]}, {"f",[6,11]}, {"h",[8]}]),
?assertEqual( lists:sort(missing_dots(B1,B2,["a","c","d","e","f","g","h"])), [{"a",[6,10,11,12]}, {"f",[6,11]}, {"h",[8]}]),
?assertEqual( lists:sort(missing_dots([{"a",{2,2}}, {"b",{3,0}}], [], ["a"])), [{"a",[1,2,4]}]),
?assertEqual( lists:sort(missing_dots([{"a",{2,2}}, {"b",{3,0}}], [], ["a","b"])), [{"a",[1,2,4]}, {"b",[1,2,3]}]),
?assertEqual( missing_dots([], B1, ["a","b","c","d","e","f","g","h"]), []).
subtract_dots_test() ->
?assertEqual( subtract_dots({12,0},{5,14}), [6,10,11,12]),
?assertEqual( subtract_dots({7,0},{5,14}), [6]),
?assertEqual( subtract_dots({4,0},{5,14}), []),
?assertEqual( subtract_dots({5,0},{5,14}), []),
?assertEqual( subtract_dots({5,0},{15,0}), []),
?assertEqual( subtract_dots({7,10},{5,14}), [6,11]),
?assertEqual( subtract_dots({5,10},{7,10}), []),
?assertEqual( subtract_dots({5,14},{7,10}), [8]).
add_test() ->
?assertEqual( add( [{"a",{5,3}}] , {"b",0} ), [{"a",{5,3}}, {"b",{0,0}}] ),
?assertEqual( add( [{"a",{5,3}}] , {"a",1} ), [{"a",{7,0}}] ),
?assertEqual( add( [{"a",{5,3}}] , {"a",8} ), [{"a",{8,0}}] ),
?assertEqual( add( [{"a",{5,3}}] , {"b",8} ), [{"a",{5,3}}, {"b",{0,128}}] ).
add_aux_test() ->
?assertEqual( add_aux({5,3}, 8), {8,0} ),
?assertEqual( add_aux({5,3}, 7), {7,0} ),
?assertEqual( add_aux({5,3}, 4), {7,0} ),
?assertEqual( add_aux({2,5}, 4), {5,0} ),
?assertEqual( add_aux({2,5}, 6), {3,6} ),
?assertEqual( add_aux({2,4}, 6), {2,12} ).
merge_test() ->
?assertEqual( merge( [{"a",{5,3}}] , [{"a",{2,4}}] ), [{"a",{7,0}}] ),
?assertEqual( merge( [{"a",{5,3}}] , [{"b",{2,4}}] ), [{"a",{7,0}}, {"b",{2,4}}] ),
?assertEqual( merge( [{"a",{5,3}}, {"c",{1,2}}] , [{"b",{2,4}}, {"d",{5,3}}] ),
[{"a",{7,0}}, {"b",{2,4}}, {"c",{1,2}}, {"d",{7,0}}] ),
?assertEqual( merge( [{"a",{5,3}}, {"c",{1,2}}] , [{"b",{2,4}}, {"c",{5,3}}] ),
[{"a",{7,0}}, {"b",{2,4}}, {"c",{7,0}}]).
join_test() ->
?assertEqual( join( [{"a",{5,3}}] , [{"a",{2,4}}] ), [{"a",{7,0}}] ),
?assertEqual( join( [{"a",{5,3}}] , [{"b",{2,4}}] ), [{"a",{7,0}}] ),
?assertEqual( join( [{"a",{5,3}}, {"c",{1,2}}] , [{"b",{2,4}}, {"d",{5,3}}] ), [{"a",{7,0}}, {"c",{1,2}}] ),
?assertEqual( join( [{"a",{5,3}}, {"c",{1,2}}] , [{"b",{2,4}}, {"c",{5,3}}] ), [{"a",{7,0}}, {"c",{7,0}}] ).
join_aux_test() ->
?assertEqual( join_aux({5,3}, {2,4}), join_aux({2,4}, {5,3}) ),
?assertEqual( join_aux({5,3}, {2,4}), {5,3} ),
?assertEqual( join_aux({2,2}, {3,0}), {3,1} ),
?assertEqual( join_aux({2,2}, {3,1}), {3,1} ),
?assertEqual( join_aux({2,2}, {3,2}), {3,3} ),
?assertEqual( join_aux({2,2}, {3,4}), {3,5} ),
?assertEqual( join_aux({3,2}, {1,4}), {3,3} ),
?assertEqual( join_aux({3,2}, {1,16}), {3,6} ).
base_test() ->
?assertEqual( base( [{"a",{5,3}}] ), [{"a",{7,0}}] ),
?assertEqual( base( [{"a",{5,2}}] ), [{"a",{5,0}}] ),
?assertEqual( base( [{"a",{5,3}}, {"b",{2,4}}, {"c",{1,2}}, {"d",{5,2}}] ),
[{"a",{7,0}}, {"b",{2,0}}, {"c",{1,0}}, {"d",{5,0}}] ).
event_test() ->
?assertEqual( event( [{"a",{7,0}}] , "a"), {8, [{"a",{8,0}}]} ),
?assertEqual( event( [{"a",{5,3}}] , "b"), {1, [{"a",{5,3}}, {"b",{1,0}}]} ),
?assertEqual( event( [{"a",{5,3}}, {"b",{2,0}}, {"c",{1,2}}, {"d",{5,3}}] , "b"),
{3, [{"a",{5,3}}, {"b",{3,0}}, {"c",{1,2}}, {"d",{5,3}}]} ).
store_entry_test() ->
?assertEqual( store_entry( "a", {0,0}, [{"a",{7,0}}]), [{"a",{7,0}}] ),
?assertEqual( store_entry( "b", {0,0}, [{"a",{7,0}}]), [{"a",{7,0}}] ),
?assertEqual( store_entry( "a", {9,0}, [{"a",{7,0}}]), [{"a",{9,0}}] ),
?assertEqual( store_entry( "a", {90,0}, [{"a",{7,1234}}]), [{"a",{90,0}}] ),
?assertEqual( store_entry( "b", {9,0}, [{"a",{7,0}}]), [{"a",{7,0}}, {"b",{9,0}}] ).
-endif. | src/swc_node.erl | 0.551815 | 0.522507 | swc_node.erl | starcoder |
-module(plausible_block_SUITE).
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
-export([
all/0, init_per_testcase/2, end_per_testcase/2
]).
-export([
basic/1,
definitely_invalid/1,
ultimately_invalid/1,
valid/1
]).
all() ->
[basic, definitely_invalid, ultimately_invalid, valid].
init_per_testcase(TestCase, Config) ->
catch gen_server:stop(blockchain_sup),
blockchain_ct_utils:init_base_dir_config(?MODULE, TestCase, Config).
end_per_testcase(_, _Config) ->
catch gen_server:stop(blockchain_sup),
ok.
basic(Config) ->
BaseDir = ?config(base_dir, Config),
SimDir = ?config(sim_dir, Config),
Balance = 5000,
BlocksN = 80,
{ok, _Sup, {PrivKey, PubKey}, _Opts} = test_utils:init(BaseDir),
{ok, _GenesisMembers, _GenesisBlock, ConsensusMembers, _} = test_utils:init_chain(Balance, {PrivKey, PubKey}),
Chain0 = blockchain_worker:blockchain(),
{ok, Genesis} = blockchain:genesis_block(Chain0),
% Add some blocks
Blocks = lists:reverse(lists:foldl(
fun(_, Acc) ->
{ok, Block} = test_utils:create_block(ConsensusMembers, []),
blockchain:add_block(Block, Chain0),
[Block|Acc]
end,
[],
lists:seq(1, BlocksN)
)),
LastBlock = lists:last(Blocks),
{ok, Chain} = blockchain:new(SimDir, Genesis, undefined, undefined),
plausible = blockchain:can_add_block(LastBlock, Chain),
%% check we return the plausible message to the caller
plausible = blockchain:add_block(LastBlock, Chain),
%% don't return the plausible message more than once
exists = blockchain:add_block(LastBlock, Chain),
?assertEqual({ok, 1}, blockchain:height(Chain)),
?assertEqual({ok, 1}, blockchain:sync_height(Chain)),
[LastBlock] = blockchain:get_plausible_blocks(Chain),
%% add a block and check the plausible block remains
ok = blockchain:add_block(hd(Blocks), Chain),
?assertEqual({ok, 2}, blockchain:height(Chain)),
?assertEqual({ok, 2}, blockchain:sync_height(Chain)),
[LastBlock] = blockchain:get_plausible_blocks(Chain),
%% add all the rest of the blocks (emulate a sync)
ok = blockchain:add_blocks(Blocks -- [LastBlock], Chain),
%% check the plausible block is now the head of the chain
?assertEqual({ok, 81}, blockchain:height(Chain)),
?assertEqual({ok, 81}, blockchain:sync_height(Chain)),
?assertEqual(blockchain:head_hash(Chain), {ok, blockchain_block:hash_block(LastBlock)}),
%% make sure the plausible blocks got removed
[] = blockchain:get_plausible_blocks(Chain),
%% make a new block
{ok, FinalBlock} = test_utils:create_block(ConsensusMembers, []),
blockchain:add_block(FinalBlock, Chain0),
ok = blockchain:add_block(FinalBlock, Chain),
?assertEqual({ok, 82}, blockchain:height(Chain)),
?assertEqual({ok, 82}, blockchain:sync_height(Chain)),
?assertEqual(blockchain:head_hash(Chain), {ok, blockchain_block:hash_block(FinalBlock)}),
[] = blockchain:get_plausible_blocks(Chain),
%% try adding the previously plausible block again, it should not work
exists = blockchain:add_block(LastBlock, Chain),
[] = blockchain:get_plausible_blocks(Chain),
ok.
definitely_invalid(Config) ->
BaseDir = ?config(base_dir, Config),
SimDir = ?config(sim_dir, Config),
Balance = 5000,
BlocksN = 80,
{ok, _Sup, {PrivKey, PubKey}, _Opts} = test_utils:init(BaseDir),
{ok, _GenesisMembers, _GenesisBlock, ConsensusMembers, _} = test_utils:init_chain(Balance, {PrivKey, PubKey}),
Chain0 = blockchain_worker:blockchain(),
{ok, Genesis} = blockchain:genesis_block(Chain0),
% Add some blocks
Blocks = lists:reverse(lists:foldl(
fun(_, Acc) ->
{ok, Block} = test_utils:create_block(ConsensusMembers, []),
blockchain:add_block(Block, Chain0),
[Block|Acc]
end,
[],
lists:seq(1, BlocksN)
)),
%% tear down the chain
gen_server:stop(blockchain_sup),
os:cmd("rm -rf" ++ proplists:get_value(base_dir, _Opts)),
%% boot an entirely disjoint chain
{ok, _Sup1, {PrivKey1, PubKey1}, _Opts1} = test_utils:init(BaseDir++"extra"),
{ok, _GenesisMembers1, _GenesisBlock2, _ConsensusMembers1, _} = test_utils:init_chain(Balance, {PrivKey1, PubKey1}),
Chain1 = blockchain_worker:blockchain(),
% Add some blocks
Blocks1 = lists:reverse(lists:foldl(
fun(_, Acc) ->
{ok, Block} = test_utils:create_block(ConsensusMembers, []),
blockchain:add_block(Block, Chain1),
[Block|Acc]
end,
[],
lists:seq(1, BlocksN)
)),
LastBlock = lists:last(Blocks1),
{ok, Chain} = blockchain:new(SimDir, Genesis, undefined, undefined),
{error, disjoint_chain} = blockchain:can_add_block(LastBlock, Chain),
{error, disjoint_chain} = blockchain:add_block(hd(Blocks1), Chain),
?assertEqual({ok, 1}, blockchain:height(Chain)),
?assertEqual({ok, 1}, blockchain:sync_height(Chain)),
[] = blockchain:get_plausible_blocks(Chain),
%% add a block and check the plausible block remains
ok = blockchain:add_block(hd(Blocks), Chain),
?assertEqual({ok, 2}, blockchain:height(Chain)),
?assertEqual({ok, 2}, blockchain:sync_height(Chain)),
[] = blockchain:get_plausible_blocks(Chain),
%% add all the rest of the blocks (emulate a sync)
ok = blockchain:add_blocks(Blocks, Chain),
%% check the plausible block is not the head of the chain
?assertEqual({ok, 81}, blockchain:height(Chain)),
?assertEqual({ok, 81}, blockchain:sync_height(Chain)),
?assertEqual(blockchain:head_hash(Chain), {ok, blockchain_block:hash_block(lists:last(Blocks))}),
%% make sure the plausible block is not in the chain at all
{error, not_found} = blockchain:get_block(blockchain_block:hash_block(LastBlock), Chain),
%% make sure the plausible blocks got removed
[] = blockchain:get_plausible_blocks(Chain),
ok.
ultimately_invalid(Config) ->
BaseDir = ?config(base_dir, Config),
SimDir = ?config(sim_dir, Config),
Balance = 5000,
BlocksN = 80,
{ok, _Sup, {PrivKey, PubKey}, _Opts} = test_utils:init(BaseDir),
{ok, GenesisMembers, _GenesisBlock, ConsensusMembers, _} = test_utils:init_chain(Balance, {PrivKey, PubKey}),
Chain0 = blockchain_worker:blockchain(),
{ok, Genesis} = blockchain:genesis_block(Chain0),
% Add some blocks
Blocks = lists:reverse(lists:foldl(
fun(_, Acc) ->
{ok, Block} = test_utils:create_block(ConsensusMembers, []),
blockchain:add_block(Block, Chain0),
[Block|Acc]
end,
[],
lists:seq(1, BlocksN)
)),
%% tear down the chain
gen_server:stop(blockchain_sup),
os:cmd("rm -rf" ++ proplists:get_value(base_dir, _Opts)),
%% boot an entirely disjoint chain
{ok, _Sup1, {PrivKey, PubKey}, _Opts1} = test_utils:init(BaseDir++"extra", {PrivKey, PubKey}),
{ok, _GenesisMembers, _GenesisBlock2, ConsensusMembers, _} = test_utils:init_chain(Balance, GenesisMembers, #{}),
Chain1 = blockchain_worker:blockchain(),
% Add some blocks
Blocks1 = lists:reverse(lists:foldl(
fun(_, Acc) ->
{ok, Block} = test_utils:create_block(ConsensusMembers, []),
blockchain:add_block(Block, Chain1),
[Block|Acc]
end,
[],
lists:seq(1, BlocksN)
)),
LastBlock = lists:last(Blocks1),
{ok, Chain} = blockchain:new(SimDir, Genesis, undefined, undefined),
plausible = blockchain:can_add_block(LastBlock, Chain),
%% check we return the plausible message to the caller
plausible = blockchain:add_block(LastBlock, Chain),
%% don't return the plausible message more than once
exists = blockchain:add_block(LastBlock, Chain),
%% try to add a block that should be invalid right away
{error, disjoint_chain} = blockchain:add_block(hd(Blocks1), Chain),
?assertEqual({ok, 1}, blockchain:height(Chain)),
?assertEqual({ok, 1}, blockchain:sync_height(Chain)),
[LastBlock] = blockchain:get_plausible_blocks(Chain),
%% add a block and check the plausible block remains
ok = blockchain:add_block(hd(Blocks), Chain),
?assertEqual({ok, 2}, blockchain:height(Chain)),
?assertEqual({ok, 2}, blockchain:sync_height(Chain)),
[LastBlock] = blockchain:get_plausible_blocks(Chain),
%% add all the rest of the blocks (emulate a sync)
ok = blockchain:add_blocks(Blocks, Chain),
%% check the plausible block is not the head of the chain
?assertEqual({ok, 81}, blockchain:height(Chain)),
?assertEqual({ok, 81}, blockchain:sync_height(Chain)),
?assertEqual(blockchain:head_hash(Chain), {ok, blockchain_block:hash_block(lists:last(Blocks))}),
%% make sure the plausible block is not in the chain at all
{error, not_found} = blockchain:get_block(blockchain_block:hash_block(LastBlock), Chain),
%% make sure the plausible blocks got removed
[] = blockchain:get_plausible_blocks(Chain),
ok.
valid(Config) ->
BaseDir = ?config(base_dir, Config),
SimDir = ?config(sim_dir, Config),
Balance = 5000,
BlocksN = 80,
{ok, _Sup, {PrivKey, PubKey}, _Opts} = test_utils:init(BaseDir),
{ok, _GenesisMembers, _GenesisBlock, ConsensusMembers, _} = test_utils:init_chain(Balance, {PrivKey, PubKey}),
Chain0 = blockchain_worker:blockchain(),
{ok, Genesis} = blockchain:genesis_block(Chain0),
% Add some blocks
Blocks = lists:reverse(lists:foldl(
fun(_, Acc) ->
{ok, Block} = test_utils:create_block(ConsensusMembers, []),
blockchain:add_block(Block, Chain0),
[Block|Acc]
end,
[],
lists:seq(1, BlocksN)
)),
LastBlock = lists:last(Blocks),
{ok, Chain} = blockchain:new(SimDir, Genesis, undefined, undefined),
plausible = blockchain:can_add_block(LastBlock, Chain),
%% check we return the plausible message to the caller
plausible = blockchain:add_block(LastBlock, Chain),
%% don't return the plausible message more than once
exists = blockchain:add_block(LastBlock, Chain),
?assertEqual({ok, 1}, blockchain:height(Chain)),
?assertEqual({ok, 1}, blockchain:sync_height(Chain)),
[LastBlock] = blockchain:get_plausible_blocks(Chain),
%% add a block and check the plausible block remains
ok = blockchain:add_block(hd(Blocks), Chain),
?assertEqual({ok, 2}, blockchain:height(Chain)),
?assertEqual({ok, 2}, blockchain:sync_height(Chain)),
true = blockchain:has_block(LastBlock, Chain),
[LastBlock] = blockchain:get_plausible_blocks(Chain),
%% add all the rest of the blocks (emulate a sync)
ok = blockchain:add_blocks(Blocks -- [LastBlock], Chain),
%% check the plausible block is now the head of the chain
?assertEqual({ok, 81}, blockchain:height(Chain)),
?assertEqual({ok, 81}, blockchain:sync_height(Chain)),
?assertEqual(blockchain:head_hash(Chain), {ok, blockchain_block:hash_block(LastBlock)}),
%% make sure the plausible blocks got removed
[] = blockchain:get_plausible_blocks(Chain),
%% NOTE:
%% N = 7, F = 2 (testing)
%% Even if we replace upto 3 ( F + 1) members, it should be
%% considered a plausible block
true = check_plausibility_after_replacing_k_members(1, ConsensusMembers, Chain), %% keep 6
true = check_plausibility_after_replacing_k_members(2, ConsensusMembers, Chain), %% keep 5
true = check_plausibility_after_replacing_k_members(3, ConsensusMembers, Chain), %% keep 4
true = check_plausibility_after_replacing_k_members(4, ConsensusMembers, Chain), %% keep 3
false = check_plausibility_after_replacing_k_members(5, ConsensusMembers, Chain), %% keep 2
false = check_plausibility_after_replacing_k_members(6, ConsensusMembers, Chain), %% keep 1
false = check_plausibility_after_replacing_k_members(7, ConsensusMembers, Chain), %% keep 0
ok.
replace_members(ConsensusMembers, ToReplace) ->
N = length(ConsensusMembers),
NewKeys = test_utils:generate_keys(ToReplace),
lists:sublist(ConsensusMembers, (N - ToReplace)) ++ NewKeys.
check_plausibility_after_replacing_k_members(K, ConsensusMembers, Chain) ->
N = length(ConsensusMembers),
ct:pal("N: ~p", [N]),
ct:pal("K: ~p", [K]),
?assertEqual(7, N),
NewMembers = replace_members(ConsensusMembers, K),
?assertEqual(N, length(NewMembers)),
CM = [libp2p_crypto:bin_to_b58(I) || {I, _} <- ConsensusMembers],
CM2 = [libp2p_crypto:bin_to_b58(I) || {I, _} <- NewMembers],
ct:pal("ConsensusMembers: ~p", [CM]),
ct:pal("NewMembers: ~p", [CM2]),
%% check that ConsensusMembers and NewMembers have (N - K) elements in common
true = sets:size(sets:intersection(sets:from_list(ConsensusMembers), sets:from_list(NewMembers))) == (N - K),
{ok, BlockByNewMembers} = test_utils:create_block(NewMembers, []),
ct:pal("AnotherBlock: ~p", [BlockByNewMembers]),
IsPlausible = blockchain:is_block_plausible(BlockByNewMembers, Chain),
ct:pal("IsPlausible: ~p", [IsPlausible]),
IsPlausible. | test/plausible_block_SUITE.erl | 0.520984 | 0.435721 | plausible_block_SUITE.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% Copyright (c) 2014 Basho Technologies, Inc. All Rights Reserved.
%%
%% This Source Code Form is subject to the terms of the Mozilla Public
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at http://mozilla.org/MPL/2.0/.
%%
%% -------------------------------------------------------------------
%% @doc Exometer reporter for lager backend
%%
%% This reporter emits messages to the lager logging backend,
%% at a reporting level chosen by the user (default: `notice').
%%
%% To change the reporting level, pass on the option `{level, Level}'.
%%
%% Example:
%% <pre lang="erlang">
%% Eshell V5.9.2 (abort with ^G)
%% 1> exometer:start().
%% 17:41:14.078 [info] Application lager started on node nonode@nohost
%% ok
%% 17:41:14.125 [info] Starting reporters with []
%% 17:41:14.125 [info] Application exometer started on node nonode@nohost
%%
%% 2> lager:set_loglevel(lager_console_backend,notice).
%% ok
%% 3> exometer:new([c], counter).
%% ok
%% 4> exometer:update([c], 2).
%% ok
%% 5> exometer_report:add_reporter(
%% exometer_report_lager,[{type_map,[{'_',integer}]}]).
%% ok
%% 6> exometer_report:subscribe(exometer_report_lager,[c],[value],10000).
%% ok
%% 17:42:47.496 [notice] exometer_report_lager: c_value 1398008567:2 (integer)
%% 17:42:57.498 [notice] exometer_report_lager: c_value 1398008577:2 (integer)
%% 17:43:07.499 [notice] exometer_report_lager: c_value 1398008587:2 (integer)
%% 7> exometer:update([c], 2).
%% ok
%% 17:43:17.501 [notice] exometer_report_lager: c_value 1398008597:4 (integer)
%% </pre>
%% @end
-module(exometer_report_lager).
-behaviour(exometer_report).
-export(
[
exometer_init/1,
exometer_info/2,
exometer_cast/2,
exometer_call/3,
exometer_report/5,
exometer_subscribe/5,
exometer_unsubscribe/4,
exometer_newentry/2,
exometer_setopts/4,
exometer_terminate/2
]).
-include("exometer.hrl").
-include("log.hrl").
-define(SERVER, ?MODULE).
%% calendar:datetime_to_gregorian_seconds({{1970,1,1},{0,0,0}}).
-define(UNIX_EPOCH, 62167219200).
-record(st, {type_map = [], level = notice}).
%%%===================================================================
%%% exometer_report callback API
%%%===================================================================
exometer_init(Opts) ->
?info("~p(~p): Starting~n", [?MODULE, Opts]),
St0 = #st{},
TypeMap = proplists:get_value(type_map, Opts, St0#st.type_map),
Level = proplists:get_value(level, Opts, St0#st.level),
{ok, St0#st{type_map = TypeMap, level = Level}}.
exometer_subscribe(_Metric, _DataPoint, _Extra, _Interval, St) ->
{ok, St}.
exometer_unsubscribe(_Metric, _DataPoint, _Extra, St) ->
{ok, St}.
%% Invoked through the remote_exometer() function to
%% send out an update.
exometer_report(Metric, DataPoint, _Extra, Value, #st{level = Level} = St) ->
?debug("Report metric ~p_~p = ~p~n", [Metric, DataPoint, Value]),
%% Report the value and setup a new refresh timer.
Str = [?MODULE_STRING, ": ", name(Metric, DataPoint),
":", value(Value), $\n],
log(Level, lists:flatten(Str)),
{ok, St}.
exometer_call(Unknown, From, St) ->
?info("Unknown call ~p from ~p", [Unknown, From]),
{ok, St}.
exometer_cast(Unknown, St) ->
?info("Unknown cast: ~p", [Unknown]),
{ok, St}.
exometer_info(Unknown, St) ->
?info("Unknown info: ~p", [Unknown]),
{ok, St}.
exometer_newentry(_Entry, St) ->
{ok, St}.
exometer_setopts(_Metric, _Options, _Status, St) ->
{ok, St}.
exometer_terminate(_, _) ->
ignore.
%%%===================================================================
%%% Internal functions
%%%===================================================================
%% Add metric and datapoint within metric
name(Metric, DataPoint) ->
metric_to_string(Metric) ++ "_" ++ thing_to_list(DataPoint).
metric_to_string([Final]) ->
thing_to_list(Final);
metric_to_string([H | T]) ->
thing_to_list(H) ++ "_" ++ metric_to_string(T).
thing_to_list(E) when is_atom(E) -> atom_to_list(E);
thing_to_list(E) when is_list(E) -> E;
thing_to_list(E) when is_integer(E) -> integer_to_list(E);
thing_to_list(E) when is_binary(E) -> binary_to_list(E).
%% Add value, int or float, converted to list
value(V) when is_integer(V) -> integer_to_list(V);
value(V) when is_float(V) -> io_lib:format("~f", [V]);
value(_) -> "0".
log(Level, String) ->
lager:log(Level, self(), String). | deps/exometer_core/src/exometer_report_lager.erl | 0.655115 | 0.477981 | exometer_report_lager.erl | starcoder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.