code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1 value |
|---|---|---|---|---|---|
%% ===================================================================
%% @author <NAME> <<EMAIL>>
%% @copyright 2011-2016 <NAME>
%%
%% @doc e2 option validation utility.
%%
%% For example in how this facility can be used to validate options
%% lists, see [https://github.com/gar1t/e2/blob/master/test/e2_opt_tests.erl
%% test/e2_opt_tests.erl].
%% @end
%% ===================================================================
-module(e2_opt).
-export([validate/2, validate/3, value/2, value/3]).
-define(NO_DEFAULT, '$e2_opt_nodefault').
-record(schema, {implicit, constraints}).
-record(constraint,
{values,
type,
min,
max,
pattern,
validate,
implicit=false,
optional=false,
default=?NO_DEFAULT}).
-define(is_type(T), (T == int orelse
T == float orelse
T == string orelse
T == number orelse
T == atom orelse
T == list orelse
T == boolean orelse
T == binary orelse
T == iolist orelse
T == function)).
%%%===================================================================
%%% API
%%%===================================================================
validate(Options, Schema) ->
validate(Options, compile_schema(Schema), dict:new()).
validate([], #schema{}=Schema, Opts0) ->
apply_missing(Schema, Opts0);
validate([Opt|Rest], #schema{}=Schema, Opts0) ->
validate(Rest, Schema, apply_opt(Opt, Schema, Opts0));
validate(MoreOptions, Schema, Opts0) ->
validate(MoreOptions, compile_schema(Schema), Opts0).
value(Name, Opts) ->
dict:fetch(Name, Opts).
value(Name, Opts, Default) ->
case dict:find(Name, Opts) of
{ok, Value} -> Value;
error -> Default
end.
compile_schema(Schema) ->
Constraints = [compile_constraint(C) || C <- Schema],
#schema{implicit=index_implicit(Constraints), constraints=Constraints}.
%%%===================================================================
%%% Internal functions
%%%===================================================================
compile_constraint(Name) when is_atom(Name) ->
{Name, #constraint{}};
compile_constraint({Name, Opts}) ->
{Name, apply_constraint_options(Opts, #constraint{})}.
index_implicit(Constraints) ->
index_implicit(Constraints, dict:new()).
index_implicit([], Imp) -> Imp;
index_implicit([{_, #constraint{implicit=false}}|Rest], Imp) ->
index_implicit(Rest, Imp);
index_implicit([{Name, #constraint{implicit=true, values=undefined}}|_], _) ->
error({values_required, Name});
index_implicit([{Name, #constraint{implicit=true, values=Vals}}|Rest], Imp) ->
index_implicit(Rest, index_implicit_vals(Name, Vals, Imp)).
index_implicit_vals(_, [], Imp) -> Imp;
index_implicit_vals(Name, [Val|Rest], Imp) ->
case dict:find(Val, Imp) of
{ok, _} -> error({duplicate_implicit_value, Val});
error -> index_implicit_vals(Name, Rest, dict:store(Val, Name, Imp))
end.
-define(constraint_val(Field, Val, C), C#constraint{Field=Val}).
apply_constraint_options([], C) -> C;
apply_constraint_options([{values, Values}|Rest], C) when is_list(Values) ->
apply_constraint_options(Rest, ?constraint_val(values, Values, C));
apply_constraint_options([{type, Type}|Rest], C) when ?is_type(Type) ->
apply_constraint_options(Rest, ?constraint_val(type, Type, C));
apply_constraint_options([Type|Rest], C) when ?is_type(Type) ->
apply_constraint_options(Rest, ?constraint_val(type, Type, C));
apply_constraint_options([{min, Min}|Rest], C) ->
apply_constraint_options(Rest, ?constraint_val(min, Min, C));
apply_constraint_options([{max, Max}|Rest], C) ->
apply_constraint_options(Rest, ?constraint_val(max, Max, C));
apply_constraint_options([{pattern, Pattern}|Rest], C) ->
apply_constraint_options(
Rest, ?constraint_val(pattern, compile_pattern(Pattern), C));
apply_constraint_options([{validate, F}|Rest], C) when is_function(F) ->
apply_constraint_options(
Rest, ?constraint_val(validate, check_validate(F), C));
apply_constraint_options([optional|Rest], C) ->
apply_constraint_options(Rest, ?constraint_val(optional, true, C));
apply_constraint_options([{optional, B}|Rest], C) when is_boolean(B) ->
apply_constraint_options(Rest, ?constraint_val(optional, B, C));
apply_constraint_options([{default, Default}|Rest], C) ->
apply_constraint_options(Rest, ?constraint_val(default, Default, C));
apply_constraint_options([implicit|Rest], C) ->
apply_constraint_options(Rest, ?constraint_val(implicit, true, C));
apply_constraint_options([{Name, _}|_], _) ->
error({badarg, Name});
apply_constraint_options([Other|_], _) ->
error({badarg, Other}).
compile_pattern(Pattern) ->
case re:compile(Pattern) of
{ok, Re} -> Re;
{error, _} -> error({badarg, pattern})
end.
check_validate(F) ->
case erlang:fun_info(F, arity) of
{arity, 1} -> F;
{arity, _} -> error({badarg, validate})
end.
apply_opt(Opt, Schema, Opts) ->
{Name, Value} = validate_opt(Opt, Schema),
case dict:find(Name, Opts) of
{ok, _} -> error({duplicate, Name});
error -> dict:store(Name, Value, Opts)
end.
validate_opt({Name, Value}, Schema) ->
case find_constraint(Name, Schema) of
{ok, Constraint} ->
case check_value(Value, Constraint) of
ok -> {Name, Value};
error -> error({badarg, Name})
end;
error -> error({badarg, Name})
end;
validate_opt(Option, Schema) ->
case implicit_option(Option, Schema) of
{ok, Name} -> {Name, Option};
error ->
validate_opt({Option, true}, Schema)
end.
find_constraint(Name, #schema{constraints=Constraints}) ->
case lists:keyfind(Name, 1, Constraints) of
{Name, Constraint} -> {ok, Constraint};
false -> error
end.
implicit_option(Value, #schema{implicit=Implicit}) ->
case dict:find(Value, Implicit) of
{ok, Name} -> {ok, Name};
error -> error
end.
check_value(Val, Constraint) ->
apply_checks(Val, Constraint,
[fun check_enum/2,
fun check_type/2,
fun check_range/2,
fun check_pattern/2,
fun apply_validate/2]).
apply_checks(_Val, _Constraint, []) -> ok;
apply_checks(Val, Constraint, [Check|Rest]) ->
case Check(Val, Constraint) of
ok -> apply_checks(Val, Constraint, Rest);
error -> error
end.
check_enum(_Val, #constraint{values=undefined}) -> ok;
check_enum(Val, #constraint{values=Values}) ->
case lists:member(Val, Values) of
true -> ok;
false -> error
end.
-define(is_iolist(T),
try erlang:iolist_size(Val) of
_ -> true
catch
error:badarg -> false
end).
check_type(_Val, #constraint{type=undefined}) -> ok;
check_type(Val, #constraint{type=int}) when is_integer(Val) -> ok;
check_type(Val, #constraint{type=float}) when is_float(Val) -> ok;
check_type(Val, #constraint{type=number}) when is_number(Val) -> ok;
check_type(Val, #constraint{type=string}) ->
case ?is_iolist(Val) of
true -> ok;
false -> error
end;
check_type(Val, #constraint{type=boolean}) when is_boolean(Val) -> ok;
check_type(Val, #constraint{type=list}) when is_list(Val) -> ok;
check_type(Val, #constraint{type=atom}) when is_atom(Val) -> ok;
check_type(Val, #constraint{type=binary}) when is_binary(Val) -> ok;
check_type(Val, #constraint{type=function}) when is_function(Val) -> ok;
check_type(Val, #constraint{type=iolist}) ->
try iolist_size(Val) of
_ -> ok
catch
error:badarg -> error
end;
check_type(_, _) -> error.
check_range(_Val, #constraint{min=undefined, max=undefined}) -> ok;
check_range(Val, #constraint{min=undefined, max=Max}) when Val =< Max -> ok;
check_range(Val, #constraint{min=Min, max=undefined}) when Val >= Min -> ok;
check_range(Val, #constraint{min=Min, max=Max}) when Val =< Max,
Val >= Min-> ok;
check_range(_, _) -> error.
check_pattern(_Val, #constraint{pattern=undefined}) -> ok;
check_pattern(Val, #constraint{pattern=Regex}) ->
case re:run(Val, Regex, [{capture, none}]) of
match -> ok;
nomatch -> error
end.
apply_validate(_Val, #constraint{validate=undefined}) -> ok;
apply_validate(Val, #constraint{validate=Validate}) ->
case Validate(Val) of
ok -> ok;
error -> error;
Other -> error({validate_result, Other})
end.
apply_missing(#schema{constraints=Constraints}, Opts0) ->
lists:foldl(fun apply_default/2, Opts0, Constraints).
apply_default({Name, #constraint{default=Default, optional=Optional}}, Opts) ->
case dict:find(Name, Opts) of
{ok, _} -> Opts;
error ->
case Default of
?NO_DEFAULT ->
case Optional of
true -> Opts;
false -> error({required, Name})
end;
_ -> dict:store(Name, Default, Opts)
end
end. | src/e2_opt.erl | 0.532911 | 0.519765 | e2_opt.erl | starcoder |
%% @author <NAME> <<EMAIL>> [http://ferd.ca/]
%% @doc Erlpass is a simple wrapper library trying to abstract away common
%% password operations using safe algorithms, in this case, bcrypt.
-module(erlpass).
-export([hash/1, hash/2, match/2, change/3, change/4]).
-define(DEFAULT_WORK_FACTOR, 12).
%% @type password() = iodata(). A password, supports valid unicode.
-type password() :: iodata().
%% @type work_factor() = 4..31. Work factor of the bcrypt algorithm
-type work_factor() :: 4..31.
%% @type hash() = binary(). The hashed password with a given work factor.
-type hash() :: binary().
-export_type([password/0, work_factor/0, hash/0]).
%% @doc Similar to {@link hash/2. <code>hash(Password, 12)</code>}.
-spec hash(password()) -> hash().
hash(S) when is_binary(S); is_list(S) -> hash(S, ?DEFAULT_WORK_FACTOR).
%% @doc Hashes a given {@link password(). <code>password</code>} with a given
%% {@link work_factor(). work factor}. Bcrypt will be used to create
%% a {@link hash(). hash} of the password to be stored by the application.
%% Compare the password to the hash by using {@link match/2. <code>match/2</code>}.
%% Bcrypt takes care of salting the hashes for you so this does not need to be
%% done. The higher the work factor, the longer the password will take to be
%% hashed and checked.
-spec hash(password(), work_factor()) -> hash().
hash(Str, Factor) ->
{ok, Hash} = bcrypt:hashpw(format_pass(Str), element(2, bcrypt:gen_salt(Factor))),
list_to_binary(Hash).
%% @doc Compares a given password to a hash. Returns <code>true</code> if
%% the password matches, and <code>false</code> otherwise. The comparison
%% is done in constant time (based on the hash length)
-spec match(password(), hash()) -> boolean().
match(Pass, Hash) ->
LHash = binary_to_list(Hash),
{ok, ResHash} = bcrypt:hashpw(format_pass(Pass), LHash),
verify_in_constant_time(LHash, ResHash).
%% @doc If a given {@link password(). password} matches a given
%% {@link hash(). hash}, the password is re-hashed again using
%% the new {@link work_factor(). work factor}. This allows to update a
%% given work factor to something stronger.
%% Equivalent to {@link change/4. <code>change(Pass, Hash, Pass, Factor)</code>}.
-spec change(password(), hash(), work_factor()) -> hash() | {error, bad_password}.
change(Pass, Hash, Factor) ->
change(Pass, Hash, Pass, Factor).
%% @doc If a given old {@link password(). password} matches a given old
%% {@link hash(). hash}, a new {@link password(). password} is hashed using the
%% {@link work_factor(). work factor} passed in as an argument.
%% Allows to safely change a password, only if the previous one was given
%% with it.
-spec change(password(), hash(), password(), work_factor()) -> hash() | {error, bad_password}.
change(OldPass, Hash, NewPass, Factor) ->
case match(OldPass, Hash) of
true -> hash(NewPass, Factor);
false -> {error, bad_password}
end.
%%% PRIVATE
%% This 'list_to_binary' stuff is risky -- no idea what the implementation
%% is like.
%% We have to support unicode
%% @doc transforms a given {@link password(). password} in a safe binary format
%% that can be understood by the bcrypt library.
-spec format_pass(iodata()) -> binary().
format_pass(Str) when is_list(Str) ->
case unicode:characters_to_binary(Str) of
{error, _Good, _Bad} -> list_to_binary(Str);
{incomplete, _Good, _Bad} -> list_to_binary(Str);
Bin -> Bin
end;
format_pass(Bin) when is_binary(Bin) -> Bin.
%% @doc Verifies two hashes for matching purpose, in constant time. That allows
%% a safer verification as no attacker can use the time it takes to compare hash
%% values to find an attack vector (past figuring out the complexity)
verify_in_constant_time([X|RestX], [Y|RestY], Result) ->
verify_in_constant_time(RestX, RestY, (X bxor Y) bor Result);
verify_in_constant_time([], [], Result) ->
Result == 0.
verify_in_constant_time(X, Y) when is_list(X) and is_list(Y) ->
case length(X) == length(Y) of
true ->
verify_in_constant_time(X, Y, 0);
false ->
false
end;
verify_in_constant_time(_X, _Y) -> false. | src/erlpass.erl | 0.764364 | 0.612223 | erlpass.erl | starcoder |
-module(tagged_gen).
-export([term/1]).
-include_lib("eunit/include/eunit.hrl").
-include("test/macros.hrl").
term({tagged, Ctx, Path, Val} = Term) -> tagged(Path, Ctx, term(symbol:name(Term), Val));
term(Term) -> term(symbol:name(Term), Term).
term(_, {link, _, _} = Term) -> Term;
term(_Tag, {keyword, _, _, _} = Term) -> Term;
term(Tag, Term) ->
Ctx = element(2, Term),
{def, Ctx, Tag, sub(Ctx, Term)}.
sub(Ctx, {list, ListCtx, Elems}) ->
{Args, Patterns, Terms} = substitute_domains(Elems),
{'fun', Ctx, [{clause, Ctx, patterns(Args, Patterns, Ctx), {list, ListCtx, Terms}}]};
sub(Ctx, {dict, DictCtx, Elems}) ->
{Args, Patterns, Terms} = substitute_domains(Elems),
{'fun', Ctx, [{clause, Ctx, patterns(Args, Patterns, Ctx), {dict, DictCtx, Terms}}]};
sub(Ctx, {sum, SumCtx, Elems}) ->
Arg = {symbol, Ctx, variable, arg(1)},
Defs = [{[Arg], [E], Arg} || E <- Elems],
MakeClause = fun(Args, Patterns, Body) ->
{clause, SumCtx, patterns(Args, Patterns, Ctx), Body}
end,
Clauses = [MakeClause(Args, Patterns, Body) || {Args, Patterns, Body} <- Defs],
{'fun', Ctx, Clauses};
sub(Ctx, Term) ->
TermCtx = element(2, Term),
Var = symbol:id(substituted),
VarTerm = {symbol, TermCtx, variable, Var},
{'fun', Ctx, [{clause, Ctx, patterns([VarTerm], [Term], Ctx), VarTerm}]}.
tagged(Path, TagCtx, {def, Ctx, Name, Term}) -> {def, Ctx, Name, tagged(Path, TagCtx, Term)};
tagged(Path, TagCtx, {'fun', Ctx, Clauses}) ->
{'fun', Ctx, [tagged(Path, TagCtx, Clause) || Clause <- Clauses]};
tagged(Path, TagCtx, {clause, Ctx, Patterns, Body}) -> {clause, Ctx, Patterns, {tagged, TagCtx, Path, Body}};
tagged(Path, TagCtx, Term) -> {tagged, TagCtx, Path, Term}.
patterns(Args, Patterns, Ctx) -> [{pair, Ctx, A, sanitize_pattern(P)} || {A, P} <- lists:zip(Args, Patterns)].
substitute_domains(Elems) -> substitute_domains(Elems, 1, [], [], []).
substitute_domains([], _, Args, Patterns, Terms) ->
{lists:reverse(Args),
lists:reverse(Patterns),
lists:reverse(Terms)};
substitute_domains([{pair, Ctx, Key, Val} | Rest], N, Args, Patterns, Terms) ->
Arg = {symbol, symbol:ctx(Val), variable, arg(N)},
ArgPair = {pair, Ctx, Key, Arg},
substitute_domains(Rest, N+1, [Arg | Args], [Val | Patterns], [ArgPair | Terms]);
substitute_domains([Term | Rest], N, Args, Patterns, NewTerms) ->
Arg = {symbol, symbol:ctx(Term), variable, arg(N)},
substitute_domains(Rest, N+1, [Arg | Args], [Term | Patterns], [Arg | NewTerms]).
% When we substitute an expression for a variable, we use the expression as a
% pattern guard in the generated function. To give an example, the generated function for `S: Boolean` is:
%
% ```
% def S (substituted_1: Boolean) -> substituted_1
% ```
%
% The pattern might contain variables as part of the original term. In a
% pattern, an unbound variable is just substituted for the `any` domain.
% However, if the pattern contain a function application, the application
% arguments are treated as expressions (and not patterns). If they are not
% defined in other parts of the code, they will cause an error when tagging and
% transpiling the code.
%
% I've had a long think about how best to deal with the presence of these
% variables. The best option, I think is to replace them with a hard-coded
% `any` domain, which is what I've done below.
%
% Before this change, a pattern typechecked in `strict` mode would fail if the
% `any` domain was passed as an argument to a function with constaints on its
% arguments. As part if this commit, I've changed so patterns are almost at
% most typechecked with strictness mode `normal`, which in turn will prevent
% this from happening.
sanitize_pattern(Term) ->
{ok, {_, NewTerm}} = ast:traverse_term(pattern, fun(_, _, _) -> ok end, fun sanitize_post/3, #{}, Term),
NewTerm.
sanitize_post(expr, _, {symbol, Ctx, variable, _Name}) -> {ok, {symbol, Ctx, keyword, '_'}};
sanitize_post(expr, _, {symbol, Ctx, keyword, _Name}) -> {ok, {symbol, Ctx, keyword, '_'}};
sanitize_post(_, _, _) -> ok.
arg(N) -> list_to_atom("substituted_" ++ integer_to_list(N)).
-ifdef(TEST).
-define(setup(Term, Tests), {setup, fun() -> load(Term) end, fun clean/1, Tests}).
load(Term) ->
case term(Term) of
{error, Errs} -> {error, Errs};
{ok, {Export, _} = Def} ->
ModuleForm = cerl:c_module(cerl:c_atom(tagged), [Export], [], [Def]),
case compile:forms(ModuleForm, [report, verbose, from_core]) of
error -> error:format({compilation_error}, {tagged_gen});
{error, Err} -> error:format({compilation_error, Err}, {tagged_gen});
{ok, Name, Bin} ->
code:load_binary(Name, "tagged.beam", Bin),
{ok, {Def, tagged}}
end
end.
clean({error, _}) -> noop;
clean({ok, _}) -> true = code:soft_purge(tagged),
true = code:delete(tagged).
product_single_var_test_() ->
{"A product with a single element should create one constructor which
called with a value produces the product with that value",
?setup({tagged, #{args => []}, [t],
{dict, #{},
[{dict_pair, #{}, {key, #{}, k}, {symbol, #{}, variable, v}}]}},
fun({ok, _}) ->
[?test({tagged, t, #{k := 'Boolean/True'}}, tagged:t('Boolean/True'))]
end)}.
product_multiple_var_test_() ->
{"A product with a single element should create one constructor which
called with a value produces the product with that value",
?setup({tagged, #{args => []}, [t],
{dict, #{},
[{dict_pair, #{}, {key, #{}, k1}, {symbol, #{}, variable, v1}},
{dict_pair, #{}, {key, #{}, k2}, {symbol, #{}, variable, v2}}]}},
fun({ok, _}) ->
[?test({tagged, t, #{k1 := 'Boolean/True',
k2 := 'Boolean/False'}}, tagged:t('Boolean/True', 'Boolean/False'))]
end)}.
product_literal_test_() ->
{"literals should not be replaced by a variable in the constructor",
?setup({tagged, #{args => []}, [t],
{dict, #{},
[{dict_pair, #{}, {key, #{}, k}, {value, #{}, interger, 32}}]}},
fun({ok, _}) ->
[?test({tagged, t, #{k := 32}}, tagged:t())]
end)}.
product_nested_literal_test_() ->
{"nested literals should not be replaced by a variable in the constructor",
?setup({tagged, #{args => []}, [t],
{dict, #{},
[{dict_pair, #{}, {key, #{}, k},
{list, #{}, [{value, #{}, interger, 32}]}}]}},
fun({ok, _}) ->
[?test({tagged, t, #{k := [32]}}, tagged:t())]
end)}.
product_mixed_var_and_literal_test_() ->
{"For a product with mixed values and literals the constructor only
replaces the values and only takes the number of args to do so",
?setup({tagged, #{args => []}, [t],
{dict, #{},
[{dict_pair, #{}, {key, #{}, k1}, {value, #{}, interger, 32}},
{dict_pair, #{}, {key, #{}, k2}, {symbol, #{}, variable, v}}]}},
fun({ok, _}) ->
[?test({tagged, t, #{k1 := 32,
k2 := 'Boolean/True'}}, tagged:t('Boolean/True'))]
end)}.
sum_of_products_test_() ->
{"A tag that covers over a sum of products should generate a function of
arity 1 which pattern matches its input against the sum members",
?setup({tagged, #{args => []}, [t],
{sum, #{},
[{dict, #{},
[{dict_pair, #{}, {key, #{}, k1}, {value, #{}, interger, 32}},
{dict_pair, #{}, {key, #{}, k2}, {symbol, #{}, variable, v1}}]},
{dict, #{},
[{dict_pair, #{}, {key, #{}, k3}, {symbol, #{}, variable, v2}},
{dict_pair, #{}, {key, #{}, k4}, {value, #{}, float, 3.14}}]},
{dict, #{},
[{dict_pair, #{}, {key, #{}, k5}, {symbol, #{}, variable, v1}},
{dict_pair, #{}, {key, #{}, k6}, {symbol, #{}, variable, v2}}]}]}},
fun({ok, _}) ->
[?test({tagged, t, #{k1 := 32, k2 := 'T/A'}},
tagged:t(#{k1 => 32, k2 => 'T/A'})),
?test({tagged, t, #{k4 := 3.14, k3 := 'S/B'}},
tagged:t(#{k3 => 'S/B', k4 => 3.14})),
?test({tagged, t, #{k5 := 'S/A', k6 := 'T/B'}},
tagged:t(#{k5 => 'S/A', k6 => 'T/B'}))]
end)}.
nested_sum_of_products_test_() ->
{"A tag that covers over a nested sum of products should generate a set of
function based on the arity of each product. For products with the same
arity, they should be differentiated by pattern matching on the input",
?setup({tagged, #{args => []}, [t],
{sum, #{},
[{sum, #{},
[{dict, #{},
[{dict_pair, #{}, {key, #{}, k1}, {value, #{}, interger, 32}},
{dict_pair, #{}, {key, #{}, k2}, {symbol, #{}, variable, v1}}]},
{dict, #{},
[{dict_pair, #{}, {key, #{}, k3}, {symbol, #{}, variable, v2}},
{dict_pair, #{}, {key, #{}, k4}, {value, #{}, float, 3.14}}]}]},
{dict, #{},
[{dict_pair, #{}, {key, #{}, k5}, {symbol, #{}, variable, v1}},
{dict_pair, #{}, {key, #{}, k6}, {symbol, #{}, variable, v2}}]}]}},
fun({ok, _}) ->
[?test({tagged, t, #{k1 := 32, k2 := 'T/A'}},
tagged:t(#{k1 => 32, k2 => 'T/A'})),
?test({tagged, t, #{k4 := 3.14, k3 := 'S/B'}},
tagged:t(#{k3 => 'S/B', k4 => 3.14})),
?test({tagged, t, #{k5 := 'S/A', k6 := 'T/B'}},
tagged:t(#{k5 => 'S/A', k6 => 'T/B'}))]
end)}.
keyword_not_in_typesenv_test_() ->
{"A type not in typeenv is treated as a literal and kept",
?setup({tagged, #{args => []}, [t], {type, #{}, 'T', ['T']}},
fun({ok, _}) ->
[?test({tagged, t, 'T'}, tagged:t())]
end)}.
% I can't properly test this because an env has changed to be a call to a
% domain function but I'm not generating domain functions for this test suite
%type_in_typeenv_test_() ->
% {"A type in typeenv is treated as a non-literal and replaced",
% ?setup({tagged, #{args => []}, [t], {type, #{}, 'T', ['T']}},
% #{'T' => {sum, #{}, [{type, #{}, 'A', ['T', 'A']}, {type, #{}, 'B', ['T', 'B']}]}},
% fun({ok, _}) ->
% [?test({tagged, t, 'T/A'}, tagged:t('T/A'))]
% end)}.
list_test_() ->
{"A list generates a constructor similar to a dict",
?setup({tagged, #{args => []}, [t],
{list, #{},
[{symbol, #{}, variable, a},
{symbol, #{}, variable, b}]}},
fun({ok, _}) ->
[?test({tagged, t, ['A', 'B']}, tagged:t('A', 'B'))]
end)}.
single_val_sum_test_() ->
{"Don't generate pattern matching for a sum with just one value",
?setup({tagged, #{args => []}, [t],
{sum, #{},
[{symbol, #{}, variable, a}]}},
fun({ok, _}) ->
[?test({tagged, t, 'T/A'},
tagged:t('T/A'))]
end)}.
multi_var_sum_test_() ->
{"A sum with multiple variables should generate clause statements that uses
the same variable name as the expression",
?setup({tagged, #{args => []}, [t],
{sum, #{},
[{symbol, #{}, variable, a},
{symbol, #{}, variable, b}]}},
fun({ok, _}) ->
[?test({tagged, t, 'T/A'},
tagged:t('T/A'))]
end)}.
non_tagged_term_test_() ->
{"Substitute non-literals for variables in a term which isn't a tagged value",
?setup({symbol, #{}, variable, a},
fun({ok, _}) ->
[?test(blah, tagged:a(blah))]
end)}.
-endif. | src/codegen/tagged_gen.erl | 0.563618 | 0.421909 | tagged_gen.erl | starcoder |
%%%-------------------------------------------------------------------
%%% @author aaron
%%% @copyright (C) 2019, <COMPANY>
%%% @doc
%%%
%%% @end
%%% Created : 27. Jul 2019 06:34
%%%-------------------------------------------------------------------
-module(query).
-author("aaron").
%% API
-export([test/0, available/3]).
-include_lib("records.hrl").
%% config
-define(MAX_DISTANCE, 5).
%%%%%% public %%%%%%
%% returns a list of available `location` records based upon the LatLong
%% @param Amount - integer - requested amount of storage
%% @param LatLon - #latlon{} - origin of request
%% @param Locations - list #location{}
%% @returns list<location>
available(Amount, LatLon, Locations) ->
lists:filter(
fun(Location) ->
location_is_nearby(LatLon, Location) and
location_is_in_radius(LatLon, Location) and
location:is_available(Location, Amount)
end,
Locations).
%%%%%% private %%%%%%
filter_by_radius(LatLon, Locations) ->
lists:filter(fun(Location) ->
location_is_in_radius(LatLon, Location) end,
Locations).
location_is_in_radius(LatLon, Location) ->
OriginLocation = #location{latlon = LatLon},
location:distance(Location, OriginLocation) =< ?MAX_DISTANCE.
%% filter out Locations that aren't w/i +/- 1 Lon and Lat
filter_by_latlon(LatLon, Locations) ->
lists:filter(
fun(Location) -> location_is_nearby(LatLon, Location) end,
Locations).
location_is_nearby(LatLon, Location) ->
{Lat, Lon} = latlon:to_tuple(LatLon),
{Lat2, Lon2} = latlon:to_tuple(Location#location.latlon),
is_nearby(Lat, Lat2) and is_nearby(Lon, Lon2).
is_nearby(X, Y) ->
A = round(X),
B = round(Y),
(A == B) or (A + 1 == B) or (A - 1 == B).
%%%%%% tests %%%%%%
test() ->
ok = test_available(),
ok = test_filter_by_radius(),
ok = test_filter_guard(),
ok = test_is_plus_or_minus_one().
test_available() ->
Amount = 10,
LatLon = #latlon{lat = 1, lon = 1},
% match
Location1 = #location{
units = #units{total = 20, in_use = 0},
latlon = #latlon{lat = 1, lon = 1.01}},
% no match - not nearby
Location2 = #location{
units = #units{total = 20, in_use = 0},
latlon = #latlon{lat = 50, lon = 50}},
false = location_is_nearby(LatLon, Location2),
% no match - not within radius
Location3 = #location{
units = #units{total = 20, in_use = 0},
latlon = #latlon{lat = 2, lon = 2}},
false = location_is_in_radius(LatLon, Location3),
% not match - not enough available units
Location4 = #location{
units = #units{total = 20, in_use = 15},
latlon = #latlon{lat = 1, lon = 1}},
false = location:is_available(Location4, Amount),
% locations to check
Locations = [Location1, Location2, Location3, Location4],
Ret = available(Amount, LatLon, Locations),
1 = length(Ret),
[H | _T] = Ret,
Location1 = H,
ok.
test_filter_by_radius() ->
LatLon = #latlon{lat = 1, lon = 1},
% matches
Location1 = #location{latlon = #latlon{lat = 1.01, lon = 1}},
% doesn't match
Location2 = #location{latlon = #latlon{lat = 2, lon = 2}},
Ret = filter_by_radius(LatLon, [Location1, Location2]),
1 = length(Ret),
[H | _T] = Ret,
Location1 = H,
ok.
test_filter_guard() ->
LatLon = #latlon{lat = 7, lon = 7},
% matches
Location1 = #location{latlon = #latlon{lat = 6, lon = 7}},
% doesn't match
Location2 = #location{latlon = #latlon{lat = 5, lon = 9}},
Ret = filter_by_latlon(LatLon, [Location1, Location2]),
1 = length(Ret),
[H | _T] = Ret,
Location1 = H,
ok.
test_is_plus_or_minus_one() ->
true = is_nearby(7, 6),
true = is_nearby(7, 7),
true = is_nearby(6, 7),
false = is_nearby(5, 7),
false = is_nearby(7, 5),
ok. | src/old_storagier/query.erl | 0.52074 | 0.503235 | query.erl | starcoder |
%% Puzzle:
%%
%% Crab alignment
%% https://adventofcode.com/2021/day/7
%%
%% explanation:
%% https://blog.beerriot.com/2021/12/17/advent-of-code-day-7/
-module(puzzle07).
-export([
solveA/0,
solveB/0,
find_best_positionA/1,
find_best_positionB/1,
median/1,
mean/1,
cost_to_moveA/2,
cost_to_moveB/2
]).
solveA() ->
find_best_positionA(load_crabs()).
solveB() ->
find_best_positionB(load_crabs()).
load_crabs() ->
{ok, Data} = file:read_file("puzzles/puzzle07-input.txt"),
[OneLine|_] = string:split(Data, <<"\n">>),
[ binary_to_integer(F) ||
F <- string:split(OneLine, <<",">>, all) ].
find_best_positionA(Crabs) ->
case median(Crabs) of
[Median] ->
{Median, cost_to_moveA(Median, Crabs)};
[Median, Median] ->
{Median, cost_to_moveA(Median, Crabs)};
[A, B] ->
case {cost_to_moveA(A, Crabs), cost_to_moveA(B, Crabs)} of
{CostA, CostB} when CostA < CostB ->
{A, CostA};
{_, CostB} ->
{B, CostB}
end
end.
median(Crabs) ->
case length(Crabs) rem 2 of
1 ->
[lists:nth(length(Crabs) div 2, lists:sort(Crabs))];
0 ->
[A,B|_] = lists:nthtail(length(Crabs) div 2 - 1,
lists:sort(Crabs)),
[A,B]
end.
cost_to_moveA(Position, Crabs) ->
lists:sum([abs(Position-C) || C <- Crabs]).
find_best_positionB(Crabs) ->
case mean(Crabs) of
Mean when trunc(Mean) == Mean ->
{trunc(Mean), cost_to_moveB(trunc(Mean), Crabs)};
Mean ->
case {cost_to_moveB(trunc(Mean), Crabs),
cost_to_moveB(trunc(Mean)+1, Crabs)} of
{CostA, CostB} when CostA < CostB ->
{trunc(Mean), CostA};
{_, CostB} ->
{trunc(Mean)+1, CostB}
end
end.
mean(Crabs) ->
lists:sum(Crabs) / length(Crabs).
cost_to_moveB(Position, Crabs) ->
lists:sum([lists:sum(lists:seq(1, abs(Position-C))) || C <- Crabs]). | src/puzzle07.erl | 0.540924 | 0.648077 | puzzle07.erl | starcoder |
%% coding: latin1
%--------------------------------------------------------------------
%
% Copyright (c) 2015 <NAME>
%
% This software is released under the MIT license
% http://www.opensource.org/licenses/mit-license.php
%
%--------------------------------------------------------------------
-module( catalan ).
-export( [example/0, run/1] ).
% suppress warning for unused utility function
-export( [serialize/1] ).
% -----------------------------------------------------
%
% Problem
% -------
%
% Given a simple flat arithmetic expression, generate all
% the possible bracketings of terms and evaluate them.
%
% Input
% -----
%
% A string of interleaved single-digit non-zero integers
% and binary arithmetic operators as follows:
%
% add: '+'
% subract: '-'
% multiply: 'x' or '*'
% divide: '÷' or '/'
%
% For example: "7+7÷7+7x7-7"
%
% Output
% ------
%
% Print out all the possible bracketings, one per line,
% with an equals sign and the result of evaluating the expression.
% If the number of operators is n, then the number of
% lines of output will be the Catalan number for n+1:
%
% http://en.wikipedia.org/wiki/Catalan_number
%
% http://oeis.org/A000108
%
% Example
% -------
%
% > catalan:run("7+7/7-7").
% (7+(7/(7-7))) = NaN
% (7+((7/7)-7)) = 1
% ((7+7)/(7-7)) = NaN
% ((7+(7/7))-7) = 1
% (((7+7)/7)-7) = -5
% ok
%
% Where there are 3 operators, and C4 = 5, so 5 possibilities.
%
% The example/0 function is hardcoded with the 5-operator expression:
% "7+7÷7+7x7-7"
% which generates C6 = 42 lines of output.
%
% Solution
% --------
%
% The solution comprises:
% - parse the input string to make a list of tokens (input AST)
% - make a list of rooted terms of the form {left,op,right}
% by using each arithmetic operator as a root
% - expand each rooted term to make a tree (output AST)
% by recursively decomposing each right/left subterm
% until all branch nodes are operators
% and all leaf nodes are single numerical values
% - for each tree:
% - evaluate the tree by recursively combining values
% according to the arithmetic operator in the branch nodes
% - print the line of the result
%
% -----------------------------------------------------
% Types
% expression is the input string
-type expression() :: nonempty_string().
% tokens are the nodes of an AST for the parsed expression
-type digit() :: 1..9.
-type op() :: add | sub | mul | mal | dvd | dsd.
-type token() :: digit() | op().
-type tokens() :: [token(),...].
% root is first split of token list by chosing one op as the root
-type root() :: { tokens(), op(), tokens() }.
-type roots() :: [root()].
% tree is a full hierarchy of branch nodes and leaf values
% leaf values can be results of evaluation
% so they include floating point numbers from divisions
% and the 'nan' atom that represents a divide-by-zero error
-type value() :: nan | number().
-type tree() :: value() | { tree(), op(), tree() }.
-type trees() :: [tree(),...].
% -----------------------------------------
% main program
-spec example() -> ok.
example() -> run( "7+7÷7+7x7-7" ).
-spec run( expression() ) -> ok.
run( Exp ) -> [ print_eval(Tree) || Tree <- trees( tokenize(Exp) ) ], ok.
% -----------------------------------------
% string to tokens
-spec chr2tok( char() ) -> token().
chr2tok( $+ ) -> add;
chr2tok( $- ) -> sub;
chr2tok( $x ) -> mul;
chr2tok( $* ) -> mal;
chr2tok( $÷ ) -> dvd;
chr2tok( $/ ) -> dsd;
chr2tok( X ) when is_integer(X) and (X>=$0) and (X=<$9) -> X-$0.
-spec tokenize( expression() ) -> tokens().
tokenize( Exp ) -> [ chr2tok(Chr) || Chr <- Exp ].
% -----------------------------------------
% tokens to string
-spec tok2chr( token() ) -> char().
tok2chr( add ) -> $+;
tok2chr( sub ) -> $-;
tok2chr( mul ) -> $x;
tok2chr( mal ) -> $*;
tok2chr( dvd ) -> $÷;
tok2chr( dsd ) -> $/;
tok2chr( X ) when is_integer(X) and (X>=1) and (X=<9) -> X+$0.
-spec serialize( tokens() ) -> expression().
serialize( Tokens ) -> [ tok2chr(Tok) || Tok <- Tokens ].
% -----------------------------------------
% tree to number
-spec eval( tree() ) -> value().
eval( Val ) when is_number(Val) -> Val;
eval( { L, Op, R } ) -> op( eval(L), Op, eval(R) ).
-spec op( value(), op(), value() ) -> value().
op( nan, _, _ ) -> nan;
op( _, _, nan ) -> nan;
op( X, add, Y ) -> X + Y;
op( X, sub, Y ) -> X - Y;
op( X, mul, Y ) -> X * Y;
op( X, mal, Y ) -> X * Y;
op( X, dvd, Y ) -> try (X / Y) catch _:_ -> nan end;
op( X, dsd, Y ) -> try (X / Y) catch _:_ -> nan end.
% -----------------------------------------
% tree to string
-spec str( tree() ) -> expression().
str( { L, Op, R } ) -> "(" ++ str(L) ++ str(Op) ++ str(R) ++ ")";
str( Tok ) -> [tok2chr(Tok)].
% -----------------------------------------
% tokens to roots to trees
-spec trees( tokens() ) -> trees().
trees( Leaf ) when length(Leaf) == 1 -> Leaf;
trees( Tokens ) ->
[ { L, Op, R } ||
{ Left, Op, Right } <- roots(Tokens),
L <- trees( Left ),
R <- trees( Right )
].
% -----------------------------------------
% tokens to roots
-spec roots( tokens() ) -> roots().
roots( [L,Op|Right] ) -> roots( { [L], Op, Right }, [] ).
-spec roots( root(), roots() ) -> roots().
roots( Root={ _, _, [_] }, Roots ) ->
lists:reverse( [Root|Roots] );
roots( Root={ Left, Op1, [R,Op2|Right] }, Roots ) ->
roots( { Left++[Op1,R], Op2, Right }, [Root|Roots] ).
% -----------------------------------------
% print utilities
-spec print_eval( tree() ) -> ok.
print_eval( Tree ) when is_tuple(Tree) ->
io:format( "~s = ~s~n", [str(Tree),pr(eval(Tree))] ).
-spec pr( value() ) -> string().
pr( nan ) -> "NaN";
pr( I ) when (abs(I-trunc(I)) < 1.0e-5) -> io_lib:format( "~B", [trunc(I)] );
pr( X ) when is_float(X) -> io_lib:format( "~p", [X] ).
% ----------------------------------------- | catalan.erl | 0.68458 | 0.476214 | catalan.erl | starcoder |
-module(web_profiler).
-export([run/2, ping/2, receive_response/2, do_f/4]).
-define(LIST_OF_WEBSITES, [
"http://stratus3d.com/",
"http://lobste.rs/",
"http://news.ycombinator.com/",
"http://stackoverflow.com/"
]).
% Since there is so much state that must be passed around for the lists:foldl/3
% state I defined a record to make things easier.
-record(pmap_state, {
function,
ref,
max,
nodes,
results = [],
pids = [],
running_processes = 0
}).
run(Nodes, Timeout) ->
% Map over the list
{Time, Result} = timer:tc(fun() ->
pmap(fun(Url) -> ping(Url, Timeout) end, ?LIST_OF_WEBSITES, 2, Nodes)
end),
io:format("Result: ~p~n", [Result]),
% Return the time it took to execute all of the requests
Time.
do_f(Parent, Ref, F, I) ->
Parent ! {self(), Ref, (catch F(I))}.
gather(Pid, Ref) ->
receive
{Pid, Ref, Ret} -> Ret
end.
pmap(F, L, Max, Nodes) ->
Ref = make_ref(),
InitialState = #pmap_state{max = Max, ref = Ref, function = F, nodes = Nodes},
#pmap_state{pids = Pids, results = Result} = lists:foldl(fun map_element/2, InitialState, L),
% Receive the remaining `Max` number results after the foldl
RemainingResults = lists:map(fun(Pid) ->
gather(Pid, Ref)
end, Pids),
% Return the final results
lists:reverse(RemainingResults ++ Result).
map_element(Element, #pmap_state{
running_processes = RunningProcesses,
max = Max,
pids = Pids,
ref = Ref,
function = F,
nodes = Nodes
} = State) when RunningProcesses < Max ->
% Spawn process per item until limit is reached
{Node, NewNodes} = next_node(Nodes),
io:format("spawning~n"),
Pid = spawn_on_node(Node, Ref, F, Element),
State#pmap_state{running_processes = RunningProcesses + 1, pids = [Pid|Pids], nodes = NewNodes};
map_element(Element, #pmap_state{pids = Pids, results = Results, ref = Ref, function = F, nodes = Nodes} = State) ->
% When limit is reached wait until we receive a message from one of the workers
[Pid|RestPids] = lists:reverse(Pids),
io:format("waiting for message back~n"),
Result = gather(Pid, Ref),
% When message is received spawn process for next item in list if one remains
{Node, NewNodes} = next_node(Nodes),
NewPid = spawn_on_node(Node, Ref, F, Element),
State#pmap_state{results = [Result|Results], pids = [NewPid|lists:reverse(RestPids)], nodes = NewNodes}.
spawn_on_node(Node, Ref, F, Element) ->
Self = self(),
spawn(Node, ?MODULE, do_f, [Self, Ref, F, Element]).
next_node([Node|Nodes]) ->
{Node, Nodes ++ [Node]}.
ping(URL, Timeout) ->
{_Protocol, Host, Port, Path} = parse_url(URL),
{ok, Socket} = gen_tcp:connect(Host, Port, [binary, {packet, 0}, {active, false}]),
% Send the request
ok = gen_tcp:send(Socket, io_lib:format("HEAD ~s HTTP/1.0\r\n\r\n", [Path])),
% Time the response
{Time, Result} = timer:tc(fun receive_response/2, [Socket, Timeout]),
% Format the return value of the function
case Result of
timeout ->
timeout;
_ ->
{time, Time}
end.
receive_response(Socket, Timeout) ->
% And receive the response
case gen_tcp:recv(Socket, 0, Timeout) of
{ok, Packet} -> Packet;
{error, timeout} -> timeout
end.
parse_url(Url) ->
{ok, Parsed} = http_uri:parse(Url),
% We ignore the query string for simplicity here
{Protocol, _, Host, Port, Path, _Query} = Parsed,
{Protocol, Host, Port, Path}. | chapter_26/exercise_6/web_profiler.erl | 0.509764 | 0.525551 | web_profiler.erl | starcoder |
% @doc Core OTPCL commands. These commands are technically optional, but
% leaving them out (i.e. in a custom interpreter state) can create some rather
% peculiar results, in particular since this is where basic commands like
% `return' and `|' live (yes, the OTPCL standard pipe is internally an ordinary
% command, so you can call it on its own; you can also define your own
% pipe-commands, as detailed below).
-module(otpcl_core).
-include("otpcl.hrl").
-export(['CMD_return'/2, return/2, 'CMD_|'/2, '|'/2]).
'CMD_return'(Args, State) ->
return(Args, State).
'CMD_|'(Args, State) ->
'|'(Args, State).
-spec return(any(), state()) -> {any(), state()}.
% @doc Sets `$RETVAL' in the given state. `$RETVAL' will be set to one of `ok'
% (if passed an empty list), the list element (if passed a single-element list)
% or the given return value as-is (if passed literally anything else). Because
% `$RETVAL' is an ordinary OTPCL variable, it's possible to write
% functions/commands that read it, allowing for, say, chained operations on an
% item (this is, notably, how OTPCL's pipe functionality works; more on that in
% the documentation for ``'|'/2'').
return([], State) ->
{ok, State};
return([RetVal], State) ->
{RetVal, State};
return(Args, State) ->
{Args, State}.
-spec '|'([any()|[any()]], state()) -> {any(), state()}.
% @doc Inline command chaining operator. Takes the result of the preceding
% command (stored in `$RETVAL') and passes it as the first argument to the named
% command (the rest of the arguments passed being passed through as additional
% arguments to the named command).
%
% OTPCL's parser treats any "free" instance of a pipe character (i.e. unescaped
% and not already part of some other word) as a command terminator, so OTPCL
% will interpret a line like `foo | bar | baz' as equivalent to separately
% calling `foo', `| bar', and `| baz' (which would in turn be equivalent to `baz
% [bar [foo]]'). This means that it's possible to define custom commands with
% pipe-like behavior, and rather simply, too; for example, to define a `|!'
% command that sends the result of a command to a process:
%
% ```
% '|!'([Pid], State) ->
% {RetVal, State} = otpcl_meta:get(['RETVAL', State),
% Pid ! RetVal,
% {ok, State}.
% '''
%
% Assuming the above function is tied to a command name somehow, one could then
% call `foo bar baz |! $pid' to send the result of the command `foo bar baz' as
% a message to the process identified via `$pid'.
'|'([Cmd|Args], State) ->
{RetVal, State} = otpcl_meta:get(<<"RETVAL">>, State),
otpcl_meta:apply(Cmd, [RetVal|Args], State). | src/otpcl_core.erl | 0.730963 | 0.55923 | otpcl_core.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% riak_dt_map: OR-Set schema based multi CRDT container
%%
%% Copyright (c) 2007-2013 Basho Technologies, Inc. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%% @doc a multi CRDT holder. A Struct/Document-ish thing. Uses the
%% same tombstone-less, Observed Remove semantics as `riak_dt_orswot'.
%% A Map is set of `Field's a `Field' is a two-tuple of:
%% `{Name::binary(), CRDTModule::module()}' where the second element
%% is the name of a crdt module that may be embedded. CRDTs stored
%% inside the Map will have their `update/3/4' function called, but the
%% second argument will be a `riak_dt:dot()', so that they share the
%% causal context of the map, even when fields are removed, and
%% subsequently re-added.
%%
%% The contents of the Map are modeled as a dictionary of
%% `field_name()' to `field_value()' mappings. Where `field_ name()'
%% is a two tuple of an opaque `binary()' name, and one of the
%% embeddable crdt types (currently `riak_dt_orswot',
%% `riak_dt_emcntr', `riak_dt_lwwreg', `riak_dt_od_flag', and
%% `riak_dt_map'). The reason for this limitation is that embedded
%% types must support embedding: that is a shared, `dot'-based, causal
%% context, and a reset-remove semantic (more on these below.) The
%% `field_value()' is a two-tuple of `entries()' and a
%% `tombstone()'. The presence of a `tombstone()' in a "tombstoneless"
%% Map is confusing. The `tombstone()' is only stored for fields that
%% are currently in the map, removing a field also removes its
%% tombstone.
%%
%% To use the Map create a `new()' Map. When you call `update/3' or
%% `update/4' you pass a list of operations and an optional causal
%% context. @See `update/3' or `update/4' for more details. The list
%% of operations is applied atomically in full, and new state
%% returned, or not at all, and an error is returned.
%%
%% <h2>Semantics</h2>
%%
%% The semantics of this Map are Observed-Remove-Reset-Remove. What
%% this means is practice is, if a field is removed, and concurrently
%% that same field is updated, the field is _in_ the Map (only
%% observed updates are removed) but those removes propagate, so only
%% the concurrent update survives. A concrete example helps: If a Map
%% contains a field that is a set, and the set has 5 elements in it,
%% and concurrently the replica at A removes the field that contains
%% the set, while the replica at B adds an item to the set, on merge
%% there is a field for the set, but it contains only the one item B
%% added. The removal of the field is semantically equivalent to
%% removing all elements in the set, and removing the field. The same
%% goes for an embedded Map. If concurrently a Map field is removed,
%% while a new sub-field is updated, only the updated field(s) survive
%% the reset-remove.
%%
%% There is an anomaly for embedded counters that does not fully
%% support reset remove. Embedded counters (@see riak_dt_emcntr) are
%% different to a normal `pn-counter'. Embedded counters map `dot's to
%% {P, N} pairs. When a counter is incremented a new dot is created,
%% that replaces the old dot with the new value. `pn-counter' usually
%% merges by taking the `max' of any `P' or `N' entry for an
%% actor. This does not work in an embedded context. When a counter
%% field is removed, and then _re_-added, the new `P' and `N' entries
%% may be lower than the old, and merging loses the remove
%% information. However, if a `dot' is stored with the value, and the
%% max of the `dot' is used in merge, new updates win over removed
%% updates. So far so good. Here is the problem. If Replica B removes
%% a counter field, and does not re-add it, and replica A concurrently
%% updates it's entry for that field, then the reset-remove does not
%% occur. All new dots are not `observed' by Replica B, so not
%% removed. The new `dots' contain the updates from the previous
%% `dots', and the old `dot' is discarded. To achieve reset-remove all
%% increments would need a dot, and need to be retained, which would
%% be very costly in terms of space. One way to accept this anomaly is
%% to think of a Map like a file system: removing a directory and
%% concurrently adding a file means that the directory is present and
%% only the file remains in it. Updating a counter and concurrently
%% removing it, means the counter remains, with the updated value,
%% much like appending to a file in the file system analogy: you don't
%% expect only the diff to survive, but the whole updated file.
%%
%% <h2>Merging/Size</h2>
%%
%% When any pair of Maps are merged, the embedded CRDTs are _not_
%% merged, instead each concurrent `dot'->`field()' entry is
%% kept. This leads to a greater size for Maps that are highly
%% divergent. Updating a field in the map, however, leads to all
%% entries for that field being merged to a single CRDT that is stored
%% against the new `dot'. As mentioned above, there is also a
%% `tombstone' entry per present field. This is bottom CRDT for the
%% field type with a clock that contains all seen and removed
%% `dots'. There tombstones are merged at merge time, so only one is
%% present per field. Clearly the repetition of actor information (the
%% clock, each embedded CRDT, the field `dots', the tombstones) is a
%% serious issue with regard to size/bloat of this data type. We use
%% erlang's `to_binary/2' function, which compresses the data, to get
%% around this at present.
%%
%% <h2>Context and Deferred operations</h2>
%%
%% For CRDTs that use version vectors and dots (this `Map' and all
%% CRDTs that may be embedded in it), the size of the CRDT is
%% influenced by the number of actors updating it. In some systems
%% (like Riak!) we attempt to minimize the number of actors by only
%% having the database update CRDTs. This leads to a kind of "action
%% at a distance", where a client sends operations to the database,
%% and an actor in the database system performs the operations. The
%% purpose is to ship minimal state between database and client, and
%% to limit the number of actors in the system. There is a problem
%% with action at a distance and the OR semantic. The client _must_ be
%% able to tell the database what has been observed when it sends a
%% remove operation. There is a further problem. A replica that
%% handles an operation may not have all the state the client
%% observed. We solve these two problems by asking the client to
%% provide a causal context for operations (@see `update/4'.) Context
%% operations solve the OR problem, but they don't solve the problem
%% of lagging replicas handling operations.
%%
%% <h3>Lagging replicas, deferred operations</h3>
%%
%% In a system like Riak, a replica that is not up-to-date (including,
%% never seen any state for a CRDT) maybe asked to perform an
%% operation. If no context is given, and the operation is a field
%% remove, or a "remove" like operation on an embedded CRDT, the
%% operation may fail with a precondition error (for example, remove a
%% field that is not present) or succeed and remove more state than
%% intended (a field remove with no context may remove updates unseen
%% by the client.) When a context is provided, and the Field to be
%% removed is absent, the Map state stores the context, and Field
%% name, in a list of deferred operations. When, eventually, through
%% propagation and merging, the Map's clock descends the context for
%% the operation, the operation is executed. It is important to note
%% that _only_ actorless (field remove) operations can occur this way.
%%
%% <h4>Embedded CRDTs Deferred Operations</h4>
%%
%% There is a bug with embedded types and deferred operations. Imagine
%% a client has seen a Map with a Set field, and the set contains {a,
%% b, c}. The client sends an operation to remove {a} from the set. A
%% replica that is new takes the operation. It will create a new Map,
%% a Field for the Set, and store the `remove` operation as part of
%% the Set's state. A client reads this new state, and sends a field
%% remove operation, that is executed by same replica. Now the
%% deferred operation is lost, since the field is removed. We're
%% working on ways to fix this. One idea is to not remove a field with
%% "undelivered" operations, but instead to "hide" it.
%%
%% See {@link riak_dt_orswot} for more on the OR semantic
%%
%% See {@link riak_dt_emcntr} for the embedded counter.
%% @end
-module(riak_dt_map).
-behaviour(riak_dt).
-include("riak_dt.hrl").
-ifdef(EQC).
-include_lib("eqc/include/eqc.hrl").
-endif.
-ifdef(TEST).
-compile(export_all).
-include_lib("eunit/include/eunit.hrl").
-endif.
%% API
-export([new/0, value/1, value/2, update/3, update/4]).
-export([merge/2, equal/2, to_binary/1, from_binary/1]).
-export([to_binary/2]).
-export([precondition_context/1, stats/1, stat/2]).
-export([parent_clock/2]).
-export([to_version/2]).
%% EQC API
-ifdef(EQC).
-export([gen_op/0, gen_op/1, gen_field/0, gen_field/1, generate/0, size/1]).
-endif.
-export_type([riak_dt_map/0, binary_map/0, map_op/0]).
-type binary_map() :: binary(). %% A binary that from_binary/1 will accept
-type riak_dt_map() :: {riak_dt_vclock:vclock(), entries(), deferred()}.
-type ord_map() :: {riak_dt_vclock:vclock(), orddict:orddict(), orddict:orddict()}.
-type any_map() :: riak_dt_map() | ord_map().
-type entries() :: dict(field_name(), field_value()).
-type field() :: {field_name(), field_value()}.
-type field_name() :: {Name :: binary(), CRDTModule :: crdt_mod()}.
-type field_value() :: {crdts(), tombstone()}.
-type crdts() :: [entry()].
-type entry() :: {riak_dt:dot(), crdt()}.
%% Only for present fields, ensures removes propogate
-type tombstone() :: crdt().
%% Only field removals can be deferred. CRDTs stored in the map may
%% have contexts and deferred operations, but as these are part of the
%% state, they are stored under the field as an update like any other.
-type deferred() :: dict(context(), [field()]).
-ifdef(namespaced_types).
-type dict(A, B) :: dict:dict(A, B).
-else.
-type dict(_A, _B) :: dict().
-endif.
%% limited to only those mods that support both a shared causal
%% context, and by extension, the reset-remove semantic.
-type crdt_mod() :: riak_dt_emcntr | riak_dt_lwwreg |
riak_dt_od_flag |
riak_dt_map | riak_dt_orswot.
-type crdt() :: riak_dt_emcntr:emcntr() | riak_dt_od_flag:od_flag() |
riak_dt_lwwreg:lwwreg() |
riak_dt_orswot:orswot() |
riak_dt_map:riak_dt_map().
-type map_op() :: {update, [map_field_update() | map_field_op()]}.
-type map_field_op() :: {remove, field()}.
-type map_field_update() :: {update, field(), crdt_op()}.
-type crdt_op() :: riak_dt_emcntr:emcntr_op() |
riak_dt_lwwreg:lwwreg_op() |
riak_dt_orswot:orswot_op() | riak_dt_od_flag:od_flag_op() |
riak_dt_map:map_op().
-type context() :: riak_dt_vclock:vclock() | undefined.
-type values() :: [value()].
-type value() :: {field(), riak_dt_map:values() | integer() | [term()] | boolean() | term()}.
-type precondition_error() :: {error, {precondition, {not_present, field()}}}.
-define(DICT, dict).
-define(SET, sets).
%% @doc Create a new, empty Map.
-spec new() -> riak_dt_map().
new() ->
{riak_dt_vclock:fresh(), ?DICT:new(), ?DICT:new()}.
%% @doc sets the clock in the map to that `Clock'. Used by a
%% containing Map for sub-CRDTs
-spec parent_clock(riak_dt_vclock:vclock(), riak_dt_map()) ->
riak_dt_map().
parent_clock(Clock, Map) ->
{_MapClock, Values, Deferred} = to_v2(Map),
{Clock, Values, Deferred}.
%% @doc get the current set of values for this Map
-spec value(riak_dt_map()) -> values().
value({_C, V, _D}=Map) when is_list(V) ->
value(to_v2(Map));
value({_Clock, Values, _Deferred}) ->
lists:sort(?DICT:fold(fun({Name, Type}, CRDTs, Acc) ->
Merged = merge_crdts(Type, CRDTs),
[{{Name, Type}, Type:value(Merged)} | Acc] end,
[],
Values)).
%% @private merge entry for field, if present, or return new if not
merge_field({_Name, Type}, error) ->
Type:new();
merge_field({_Name, Type}, {ok, CRDTs}) ->
merge_crdts(Type, CRDTs);
merge_field(Field, Values) ->
merge_field(Field, ?DICT:find(Field, Values)).
%% @private merge the CRDTs of a type
merge_crdts(Type, {CRDTs, TS}) ->
V = ?DICT:fold(fun(_Dot, CRDT, CRDT0) ->
Type:merge(CRDT0, CRDT) end,
Type:new(),
CRDTs),
%% Merge with the tombstone to drop any removed dots
Type:merge(TS, V).
%% @doc query map (not implemented yet)
-spec value(term(), riak_dt_map()) -> values().
value(_, Map) ->
value(Map).
%% @doc update the `riak_dt_map()' or a field in the `riak_dt_map()' by
%% executing the `map_op()'. `Ops' is a list of one or more of the
%% following ops:
%%
%% `{update, field(), Op} where `Op' is a valid update operation for a
%% CRDT of type `Mod' from the `Key' pair `{Name, Mod}' If there is no
%% local value for `Key' a new CRDT is created, the operation applied
%% and the result inserted otherwise, the operation is applied to the
%% local value.
%%
%% `{remove, `field()'}' where field is `{name, type}', results in
%% the crdt at `field' and the key and value being removed. A
%% concurrent `update' will "win" over a remove so that the field is
%% still present, and it's value will contain the concurrent update.
%%
%% Atomic, all of `Ops' are performed successfully, or none are.
-spec update(map_op(), riak_dt:actor() | riak_dt:dot(), riak_dt_map()) ->
{ok, riak_dt_map()} | precondition_error().
update(Op, ActorOrDot, {_C, V, _D}=Map) when is_list(V) ->
update(Op, ActorOrDot, to_v2(Map));
update(Op, ActorOrDot, Map) ->
update(Op, ActorOrDot, Map, undefined).
%% @doc the same as `update/3' except that the context ensures no
%% unseen field updates are removed, and removal of unseen updates is
%% deferred. The Context is passed down as the context for any nested
%% types. hence the common clock.
%%
%% @see parent_clock/2
-spec update(map_op(), riak_dt:actor() | riak_dt:dot(), riak_dt_map(),
riak_dt:context()) -> {ok, riak_dt_map()}.
update(Op, ActorOrDot, {_C, V, _D}=Map, Ctx) when is_list(V) ->
update(Op, ActorOrDot, to_v2(Map), Ctx);
update({update, Ops}, ActorOrDot, {Clock0, Values, Deferred}, Ctx) ->
{Dot, Clock} = update_clock(ActorOrDot, Clock0),
apply_ops(Ops, Dot, {Clock, Values, Deferred}, Ctx).
%% @private update the clock, and get a dot for the operations. This
%% means that field removals increment the clock too.
-spec update_clock(riak_dt:actor() | riak_dt:dot(),
riak_dt_vclock:vclock()) ->
{riak_dt:dot(), riak_dt_vclock:vclock()}.
update_clock(Dot, Clock) when is_tuple(Dot) ->
NewClock = riak_dt_vclock:merge([[Dot], Clock]),
{Dot, NewClock};
update_clock(Actor, Clock) ->
NewClock = riak_dt_vclock:increment(Actor, Clock),
Dot = {Actor, riak_dt_vclock:get_counter(Actor, NewClock)},
{Dot, NewClock}.
%% @private
-spec apply_ops([map_field_update() | map_field_op()], riak_dt:dot(),
{riak_dt_vclock:vclock(), entries() , deferred()}, context()) ->
{ok, riak_dt_map()} | precondition_error().
apply_ops([], _Dot, Map, _Ctx) ->
{ok, Map};
apply_ops([{update, {_Name, Type}=Field, Op} | Rest], Dot, {Clock, Values, Deferred}, Ctx) ->
CRDT = merge_field(Field, Values),
CRDT1 = Type:parent_clock(Clock, CRDT),
case Type:update(Op, Dot, CRDT1, Ctx) of
{ok, Updated} ->
NewValues = ?DICT:store(Field, {?DICT:store(Dot, Updated, ?DICT:new()),
%% old tombstone was
%% merged into current
%% value so create a new
%% empty one
Type:new()}
, Values),
apply_ops(Rest, Dot, {Clock, NewValues, Deferred}, Ctx);
Error ->
Error
end;
apply_ops([{remove, Field} | Rest], Dot, Map, Ctx) ->
case remove_field(Field, Map, Ctx) of
{ok, NewMap} ->
apply_ops(Rest, Dot, NewMap, Ctx);
E ->
E
end.
%% @private when context is undefined, we simply remove all instances
%% of Field, regardless of their dot. If the field is not present then
%% we warn the user with a precondition error. However, in the case
%% that a context is provided we can be more fine grained, and only
%% remove those field entries whose dots are seen by the context. This
%% preserves the "observed" part of "observed-remove". There is no
%% precondition error if we're asked to remove smoething that isn't
%% present, either we defer it, or it has been done already, depending
%% on if the Map clock descends the context clock or not.
%%
%% {@link defer_remove/4} for handling of removes of fields that are
%% _not_ present
-spec remove_field(field(), riak_dt_map(), context()) ->
{ok, riak_dt_map()} | precondition_error().
remove_field(Field, {Clock, Values, Deferred}, undefined) ->
case ?DICT:find(Field, Values) of
error ->
{error, {precondition, {not_present, Field}}};
{ok, _Removed} ->
{ok, {Clock, ?DICT:erase(Field, Values), Deferred}}
end;
%% Context removes
remove_field(Field, {Clock, Values, Deferred0}, Ctx) ->
Deferred = defer_remove(Clock, Ctx, Field, Deferred0),
NewValues = case ctx_rem_field(Field, Values, Ctx, Clock) of
empty ->
?DICT:erase(Field, Values);
CRDTs ->
?DICT:store(Field, CRDTs, Values)
end,
{ok, {Clock, NewValues, Deferred}}.
%% @private drop dominated fields
ctx_rem_field(_Field, error, _Ctx_, _Clock) ->
empty;
ctx_rem_field({_, Type}, {ok, {CRDTs, TS0}}, Ctx, MapClock) ->
%% Drop dominated fields, and update the tombstone.
%%
%% If the context is removing a field at dot {a, 1} and the
%% current field is {a, 2}, the tombstone ensures that all events
%% from {a, 1} are removed from the crdt value. If the ctx remove
%% is at {a, 3} and the current field is at {a, 2} then we need to
%% remove only events upto {a, 2}. The glb clock enables that.
%%
TombstoneClock = riak_dt_vclock:glb(Ctx, MapClock), %% GLB is events seen by both clocks only
TS = Type:parent_clock(TombstoneClock, Type:new()),
Remaining = ?DICT:filter(fun(Dot, _CRDT) ->
is_dot_unseen(Dot, Ctx)
end,
CRDTs),
case ?DICT:size(Remaining) of
0 -> %% Ctx remove removed all dots for field
empty;
_ ->
%% Update the tombstone with the GLB clock
{Remaining, Type:merge(TS, TS0)}
end;
ctx_rem_field(Field, Values, Ctx, MapClock) ->
ctx_rem_field(Field, ?DICT:find(Field, Values), Ctx, MapClock).
%% @private If we're asked to remove something we don't have (or have,
%% but maybe not all 'updates' for it), is it because we've not seen
%% the some update that we've been asked to remove, or is it because
%% we already removed it? In the former case, we can "defer" this
%% operation by storing it, with its context, for later execution. If
%% the clock for the Map descends the operation clock, then we don't
%% need to defer the op, its already been done. It is _very_ important
%% to note, that only _actorless_ operations can be saved. That is
%% operations that DO NOT need to increment the clock. In a Map this
%% means field removals only. Contexts for update operations do not
%% result in deferred operations on the parent Map. This simulates
%% causal delivery, in that an `update' must be seen before it can be
%% `removed'.
-spec defer_remove(riak_dt_vclock:vclock(), riak_dt_vclock:vclock(), field(), deferred()) ->
deferred().
defer_remove(Clock, Ctx, Field, Deferred) ->
case riak_dt_vclock:descends(Clock, Ctx) of
%% no need to save this remove, we're done
true -> Deferred;
false -> ?DICT:update(Ctx,
fun(Fields) ->
ordsets:add_element(Field, Fields) end,
ordsets:add_element(Field, ordsets:new()),
Deferred)
end.
%% @doc merge two `riak_dt_map()'s.
-spec merge(riak_dt_map(), riak_dt_map()) -> riak_dt_map().
merge({_LHSC, LHSE, _LHSD}=LHS, {_RHSC, RHSE, _RHSD}=RHS) when is_list(LHSE);
is_list(RHSE) ->
merge(to_v2(LHS), to_v2(RHS));
merge(Map, Map) ->
Map;
%% @TODO is there a way to optimise this, based on clocks maybe?
merge({LHSClock, LHSEntries, LHSDeferred}, {RHSClock, RHSEntries, RHSDeferred}) ->
Clock = riak_dt_vclock:merge([LHSClock, RHSClock]),
{CommonKeys, LHSUnique, RHSUnique} = key_sets(LHSEntries, RHSEntries),
Acc0 = filter_unique(LHSUnique, LHSEntries, RHSClock, ?DICT:new()),
Acc1 = filter_unique(RHSUnique, RHSEntries, LHSClock, Acc0),
Entries = merge_common(CommonKeys, LHSEntries, RHSEntries, LHSClock, RHSClock, Acc1),
Deferred = merge_deferred(RHSDeferred, LHSDeferred),
apply_deferred(Clock, Entries, Deferred).
%% @private filter the set of fields that are on one side of a merge
%% only.
-spec filter_unique(riak_dt_set(), entries(), riak_dt_vclock:vclock(), entries()) -> entries().
filter_unique(FieldSet, Entries, Clock, Acc) ->
ordsets:fold(fun({_Name, Type}=Field, Keep) ->
{Dots, TS} = ?DICT:fetch(Field, Entries),
KeepDots = ?DICT:filter(fun(Dot, _CRDT) ->
is_dot_unseen(Dot, Clock)
end,
Dots),
case ?DICT:size(KeepDots) of
0 ->
Keep;
_ ->
%% create a tombstone since the
%% otherside does not have this field,
%% it either removed it, or never had
%% it. If it never had it, the removing
%% dots in the tombstone will have no
%% impact on the value, if the otherside
%% removed it, then the removed dots
%% will be propogated by the tombstone.
Tombstone = Type:merge(TS, Type:parent_clock(Clock, Type:new())),
?DICT:store(Field, {KeepDots, Tombstone}, Keep)
end
end,
Acc,
FieldSet).
%% @private predicate function, `true' if the provided `dot()' is
%% concurrent with the clock, `false' if the clock has seen the dot.
-spec is_dot_unseen(riak_dt:dot(), riak_dt_vclock:vclock()) -> boolean().
is_dot_unseen(Dot, Clock) ->
not riak_dt_vclock:descends(Clock, [Dot]).
%% @doc Get the keys from an ?DICT as a ?SET
-spec key_set(riak_dt_dict()) -> riak_dt_set().
key_set(Dict) ->
ordsets:from_list(?DICT:fetch_keys(Dict)).
%% @doc break the keys from an two ?DICTs out into three ?SETs, the
%% common keys, those unique to one, and those unique to the other.
-spec key_sets(riak_dt_dict(), riak_dt_dict()) -> {riak_dt_set(), riak_dt_set(), riak_dt_set()}.
key_sets(LHS, RHS) ->
LHSet = key_set(LHS),
RHSet = key_set(RHS),
{ordsets:intersection(LHSet, RHSet),
ordsets:subtract(LHSet, RHSet),
ordsets:subtract(RHSet, LHSet)}.
%% @private for a set of dots (that are unique to one side) decide
%% whether to keep, or drop each.
-spec filter_dots(riak_dt_set(), riak_dt_dict(), riak_dt_vclock:vclock()) -> entries().
filter_dots(Dots, CRDTs, Clock) ->
DotsToKeep = ordsets:filter(fun(Dot) ->
is_dot_unseen(Dot, Clock)
end,
Dots),
?DICT:filter(fun(Dot, _CRDT) ->
ordsets:is_element(Dot, DotsToKeep)
end,
CRDTs).
%% @private merge the common fields into a set of surviving dots and a
%% tombstone per field. If a dot is on both sides, keep it. If it is
%% only on one side, drop it if dominated by the otherside's clock.
merge_common(FieldSet, LHS, RHS, LHSClock, RHSClock, Acc) ->
ordsets:fold(fun({_, Type}=Field, Keep) ->
{LHSDots, LHTS} = ?DICT:fetch(Field, LHS),
{RHSDots, RHTS} = ?DICT:fetch(Field, RHS),
{CommonDots, LHSUniqe, RHSUnique} = key_sets(LHSDots, RHSDots),
TS = Type:merge(RHTS, LHTS),
CommonSurviving = ordsets:fold(fun(Dot, Common) ->
L = ?DICT:fetch(Dot, LHSDots),
?DICT:store(Dot, L, Common)
end,
?DICT:new(),
CommonDots),
LHSSurviving = filter_dots(LHSUniqe, LHSDots, RHSClock),
RHSSurviving = filter_dots(RHSUnique, RHSDots, LHSClock),
Dots = ?DICT:from_list(lists:merge([?DICT:to_list(CommonSurviving),
?DICT:to_list(LHSSurviving),
?DICT:to_list(RHSSurviving)])),
case ?DICT:size(Dots) of
0 ->
Keep;
_ ->
?DICT:store(Field, {Dots, TS}, Keep)
end
end,
Acc,
FieldSet).
%% @private
-spec merge_deferred(deferred(), deferred()) -> deferred().
merge_deferred(LHS, RHS) ->
?DICT:merge(fun(_K, LH, RH) ->
ordsets:union(LH, RH) end,
LHS, RHS).
%% @private apply those deferred field removals, if they're
%% preconditions have been met, that is.
-spec apply_deferred(riak_dt_vclock:vclock(), entries(), deferred()) ->
{riak_dt_vclock:vclock(), entries(), deferred()}.
apply_deferred(Clock, Entries, Deferred) ->
?DICT:fold(fun(Ctx, Fields, Map) ->
remove_all(Fields, Map, Ctx)
end,
{Clock, Entries, ?DICT:new()},
Deferred).
%% @private
-spec remove_all([field()], riak_dt_map(), context()) -> riak_dt_map().
remove_all(Fields, Map, Ctx) ->
lists:foldl(fun(Field, MapAcc) ->
{ok, MapAcc2}= remove_field(Field, MapAcc, Ctx),
MapAcc2
end,
Map,
Fields).
%% @doc compare two `riak_dt_map()'s for equality of structure Both
%% schemas and value list must be equal. Performs a pariwise equals for
%% all values in the value lists
-spec equal(riak_dt_map(), riak_dt_map()) -> boolean().
equal({_LHSC, LHSE, _LHSD}=LHS, {_RHSC, RHSE, _RHSD}=RHS) when is_list(LHSE);
is_list(RHSE) ->
equal(to_v2(LHS), to_v2(RHS));
equal({Clock1, Values1, Deferred1}, {Clock2, Values2, Deferred2}) ->
riak_dt_vclock:equal(Clock1, Clock2) andalso
lists:sort(?DICT:to_list(Deferred1)) ==
lists:sort(?DICT:to_list(Deferred2)) andalso
pairwise_equals(lists:sort(?DICT:to_list(Values1)),
lists:sort(?DICT:to_list(Values2))).
-spec pairwise_equals([field()], [field()]) -> boolean().
pairwise_equals([], []) ->
true;
pairwise_equals([{{Name, Type}, {Dots1, TS1}}| Rest1], [{{Name, Type}, {Dots2, TS2}}|Rest2]) ->
%% Tombstones don't need to be equal. When we merge with a map
%% where one side is absent, we take the absent sides clock, when
%% we merge where both sides have a field, we merge the
%% tombstones, and apply deferred. The deferred remove uses a glb
%% of the context and the clock, meaning we get a smaller
%% tombstone. Both are correct when it comes to determining the
%% final value. As long as tombstones are not conflicting (that is
%% A == B | A > B | B > A)
case {?DICT:fetch_keys(Dots1) == ?DICT:fetch_keys(Dots2), Type:equal(TS1, TS2)} of
{true, true} ->
pairwise_equals(Rest1, Rest2);
_ ->
false
end;
pairwise_equals(_, _) ->
false.
%% @doc an opaque context that can be passed to `update/4' to ensure
%% that only seen fields are removed. If a field removal operation has
%% a context that the Map has not seen, it will be deferred until
%% causally relevant.
-spec precondition_context(riak_dt_map()) -> riak_dt:context().
precondition_context({Clock, _Field, _Deferred}) ->
Clock.
%% @doc stats on internal state of Map.
%% A proplist of `{StatName :: atom(), Value :: integer()}'. Stats exposed are:
%% `actor_count': The number of actors in the clock for the Map.
%% `field_count': The total number of fields in the Map (including divergent field entries).
%% `duplication': The number of duplicate entries in the Map across all fields.
%% basically `field_count' - ( unique fields)
%% `deferred_length': How many operations on the deferred list, a reasonable expression
%% of lag/staleness.
-spec stats(riak_dt_map()) -> [{atom(), integer()}].
stats(Map) ->
[ {S, stat(S, to_v2(Map))} || S <- [actor_count, field_count, duplication, deferred_length]].
-spec stat(atom(), riak_dt_map()) -> number() | undefined.
stat(Stat, {_, E, _D}=Map) when is_list(E) ->
stat(Stat, to_v2(Map));
stat(actor_count, {Clock, _, _}) ->
length(Clock);
stat(field_count, {_, Fields, _}) ->
?DICT:size(Fields);
stat(duplication, {_, Fields, _}) ->
%% Number of duplicated fields
{FieldCnt, Duplicates} = ?DICT:fold(fun(_Field, {Dots ,_}, {FCnt, DCnt}) ->
{FCnt+1, DCnt + ?DICT:size(Dots)}
end,
{0, 0},
Fields),
Duplicates - FieldCnt;
stat(deferred_length, {_, _, Deferred}) ->
?DICT:size(Deferred);
stat(_,_) -> undefined.
-include("riak_dt_tags.hrl").
-define(TAG, ?DT_MAP_TAG).
-define(V1_VERS, 1).
-define(V2_VERS, 2).
%% @doc returns a binary representation of the provided `riak_dt_map()'.
%% The resulting binary is tagged and versioned for ease of future
%% upgrade. Calling `from_binary/1' with the result of this function
%% will return the original map. Use the application env var
%% `binary_compression' to turn t2b compression on (`true') and off
%% (`false')
%%
%% @see from_binary/1
-spec to_binary(riak_dt_map()) -> binary_map().
to_binary(Map) ->
{ok, B} = to_binary(?V2_VERS, Map),
B.
%% @private encode v1 maps as v2, and vice versa. The first argument
%% is the target binary type.
-spec to_binary(Vers :: pos_integer(), riak_dt_map()) -> {ok, binary_map()} | ?UNSUPPORTED_VERSION.
to_binary(?V1_VERS, Map0) ->
Map = to_v1(Map0),
{ok, <<?TAG:8/integer, ?V1_VERS:8/integer, (riak_dt:to_binary(Map))/binary>>};
to_binary(?V2_VERS, Map0) ->
Map = to_v2(Map0),
{ok, <<?TAG:8/integer, ?V2_VERS:8/integer, (riak_dt:to_binary(Map))/binary>>};
to_binary(Vers, _Map) ->
?UNSUPPORTED_VERSION(Vers).
-spec to_version(pos_integer(), any_map()) -> any_map().
to_version(2, Map) -> to_v2(Map);
to_version(1, Map) -> to_v1(Map);
to_version(_, Map) -> Map.
%% @private transpose a v1 map (orddicts) to a v2 (dicts)
-spec to_v2(any_map()) -> riak_dt_map().
to_v2({Clock, Fields0, Deferred0}) when is_list(Fields0),
is_list(Deferred0) ->
Fields = ?DICT:from_list([ field_to_v2(Key, Value) || {Key, Value} <- Fields0]),
Deferred = ?DICT:from_list(Deferred0),
{Clock, Fields, Deferred};
to_v2(S) ->
S.
%% @private transpose a v2 map (dicts) to a v1 (orddicts)
-spec to_v1(any_map()) -> ord_map().
to_v1({_Clock, Fields0, Deferred0}=S) when is_list(Fields0),
is_list(Deferred0) ->
S;
to_v1({Clock, Fields0, Deferred0}) ->
%% Must be dicts, there is no is_dict test though
%% should we use error handling as logic here??
Fields = orddict:map(fun field_to_v1/2, riak_dt:dict_to_orddict(Fields0)),
Deferred = riak_dt:dict_to_orddict(Deferred0),
{Clock, Fields, Deferred}.
%% @doc When the argument is a `binary_map()' produced by
%% `to_binary/1' will return the original `riak_dt_map()'.
%%
%% @see to_binary/1
-spec from_binary(binary_map()) -> {ok, riak_dt_map()} | ?UNSUPPORTED_VERSION | ?INVALID_BINARY.
from_binary(<<?TAG:8/integer, ?V1_VERS:8/integer, B/binary>>) ->
Map = riak_dt:from_binary(B),
%% upgrade ondisk v1 structure to v2 term. This will also handle
%% the horrid riak-2.0.4 map that has lists for entries/deferred,
%% but dict elsewhere, and v2 types nested!
{ok, to_v2(Map)};
from_binary(<<?TAG:8/integer, ?V2_VERS:8/integer, B/binary>>) ->
%% Only fully v2 maps are written as v2, calling to_v2 a paranoid
%% waste?
Map = riak_dt:from_binary(B),
{ok, to_v2(Map)};
from_binary(<<?TAG:8/integer, Vers:8/integer, _B/binary>>) ->
?UNSUPPORTED_VERSION(Vers);
from_binary(_B) ->
?INVALID_BINARY.
field_to_v2({Name, Type}, {CRDTs0, Tombstone0}) when is_list(CRDTs0) ->
Tombstone = Type:to_version(2, Tombstone0),
CRDTs = dict:from_list([ {Dot, Type:to_version(2, CRDT)} || {Dot, CRDT} <- CRDTs0 ]),
{{Name, Type}, {CRDTs, Tombstone}};
field_to_v2(FieldName, FieldValue) ->
%% this is a messed up half v1 half v2 map from the ill fated
%% riak2.0.4 release. The top level `fields' and `deferred' were
%% written to disk/wire as lists to be backwards compatible with
%% v1, but internally it is all v2 still, it doesn't need
%% recursing over internally.
{FieldName, FieldValue}.
field_to_v1({_Name, Type}, {CRDTs0, Tombstone0}) ->
Tombstone = Type:to_version(1, Tombstone0),
CRDTs = orddict:map(fun(_Dot, CRDT) ->
Type:to_version(1, CRDT)
end, riak_dt:dict_to_orddict(CRDTs0)),
{CRDTs, Tombstone}.
%% ===================================================================
%% EUnit tests
%% ===================================================================
-ifdef(TEST).
%% This fails on previous version of riak_dt_map
assoc_test() ->
Field = {'X', riak_dt_orswot},
{ok, A} = update({update, [{update, Field, {add, 0}}]}, a, new()),
{ok, B} = update({update, [{update, Field, {add, 0}}]}, b, new()),
{ok, B2} = update({update, [{update, Field, {remove, 0}}]}, b, B),
C = A,
{ok, C3} = update({update, [{remove, Field}]}, c, C),
?assertEqual(merge(A, merge(B2, C3)), merge(merge(A, B2), C3)),
?assertEqual(value(merge(merge(A, C3), B2)), value(merge(merge(A, B2), C3))),
?assertEqual(merge(merge(A, C3), B2), merge(merge(A, B2), C3)).
clock_test() ->
Field = {'X', riak_dt_orswot},
{ok, A} = update({update, [{update, Field, {add, 0}}]}, a, new()),
B = A,
{ok, B2} = update({update, [{update, Field, {add, 1}}]}, b, B),
{ok, A2} = update({update, [{update, Field, {remove, 0}}]}, a, A),
{ok, A3} = update({update, [{remove, Field}]}, a, A2),
{ok, A4} = update({update, [{update, Field, {add, 2}}]}, a, A3),
AB = merge(A4, B2),
?assertEqual([{Field, [1, 2]}], value(AB)).
remfield_test() ->
Field = {'X', riak_dt_orswot},
{ok, A} = update({update, [{update, Field, {add, 0}}]}, a, new()),
B = A,
{ok, A2} = update({update, [{update, Field, {remove, 0}}]}, a, A),
{ok, A3} = update({update, [{remove, Field}]}, a, A2),
{ok, A4} = update({update, [{update, Field, {add, 2}}]}, a, A3),
AB = merge(A4, B),
?assertEqual([{Field, [2]}], value(AB)).
%% Bug found by EQC, not dropping dots in merge when an element is
%% present in both Maos leads to removed items remaining after merge.
present_but_removed_test() ->
F = {'X', riak_dt_lwwreg},
%% Add Z to A
{ok, A} = update({update, [{update, F, {assign, <<"A">>}}]}, a, new()),
%% Replicate it to C so A has 'Z'->{a, 1}
C = A,
%% Remove Z from A
{ok, A2} = update({update, [{remove, F}]}, a, A),
%% Add Z to B, a new replica
{ok, B} = update({update, [{update, F, {assign, <<"B">>}}]}, b, new()),
%% Replicate B to A, so now A has a Z, the one with a Dot of
%% {b,1} and clock of [{a, 1}, {b, 1}]
A3 = merge(B, A2),
%% Remove the 'Z' from B replica
{ok, B2} = update({update, [{remove, F}]}, b, B),
%% Both C and A have a 'Z', but when they merge, there should be
%% no 'Z' as C's has been removed by A and A's has been removed by
%% C.
Merged = lists:foldl(fun(Set, Acc) ->
merge(Set, Acc) end,
%% the order matters, the two replicas that
%% have 'Z' need to merge first to provoke
%% the bug. You end up with 'Z' with two
%% dots, when really it should be removed.
A3,
[C, B2]),
?assertEqual([], value(Merged)).
%% A bug EQC found where dropping the dots in merge was not enough if
%% you then store the value with an empty clock (derp).
no_dots_left_test() ->
F = {'Z', riak_dt_lwwreg},
{ok, A} = update({update, [{update, F, {assign, <<"A">>}}]}, a, new()),
{ok, B} = update({update, [{update, F, {assign, <<"B">>}}]}, b, new()),
C = A, %% replicate A to empty C
{ok, A2} = update({update, [{remove, F}]}, a, A),
%% replicate B to A, now A has B's 'Z'
A3 = merge(A2, B),
%% Remove B's 'Z'
{ok, B2} = update({update, [{remove, F}]}, b, B),
%% Replicate C to B, now B has A's old 'Z'
B3 = merge(B2, C),
%% Merge everytyhing, without the fix You end up with 'Z' present,
%% with no dots
Merged = lists:foldl(fun(Set, Acc) ->
merge(Set, Acc) end,
A3,
[B3, C]),
?assertEqual([], value(Merged)).
%% A reset-remove bug eqc found where dropping a superseded dot lost
%% field remove merge information the dropped dot contained, adding
%% the tombstone fixed this.
tombstone_remove_test() ->
F = {'X', riak_dt_orswot},
A=B=new(),
{ok, A1} = update({update, [{update, F, {add, 0}}]}, a, A),
%% Replicate!
B1 = merge(A1, B),
{ok, A2} = update({update, [{remove, F}]}, a, A1),
{ok, B2} = update({update, [{update, F, {add, 1}}]}, b, B1),
%% Replicate
A3 = merge(A2, B2),
%% that remove of F from A means remove the 0 A added to F
?assertEqual([{F, [1]}], value(A3)),
{ok, B3} = update({update, [{update, F, {add, 2}}]}, b, B2),
%% replicate to A
A4 = merge(A3, B3),
%% final values
Final = merge(A4, B3),
%% before adding the tombstone, the dropped dots were simply
%% merged with the surviving field. When the second update to B
%% was merged with A, that information contained in the superseded
%% field in A at {b,1} was lost (since it was merged into the
%% _VALUE_). This casued the [0] from A's first dot to
%% resurface. By adding the tombstone, the superseded field merges
%% it's tombstone with the surviving {b, 2} field so the remove
%% information is preserved, even though the {b, 1} value is
%% dropped. Pro-tip, don't alter the CRDTs' values in the merge!
?assertEqual([{F, [1,2]}], value(Final)).
%% This test is a regression test for a counter example found by eqc.
%% The previous version of riak_dt_map used the `dot' from the field
%% update/creation event as key in `merge_left/3'. Of course multiple
%% fields can be added/updated at the same time. This means they get
%% the same `dot'. When merging two replicas, it is possible that one
%% has removed one or more of the fields added at a particular `dot',
%% which meant a function clause error in `merge_left/3'. The
%% structure was wrong, it didn't take into account the possibility
%% that multiple fields could have the same `dot', when clearly, they
%% can. This test fails with `dot' as the key for a field in
%% `merge_left/3', but passes with the current structure, of
%% `{field(), dot()}' as key.
dot_key_test() ->
{ok, A} = update({update, [{update, {'X', riak_dt_orswot}, {add, <<"a">>}}, {update, {'X', riak_dt_od_flag}, enable}]}, a, new()),
B = A,
{ok, A2} = update({update, [{remove, {'X', riak_dt_od_flag}}]}, a, A),
?assertEqual([{{'X', riak_dt_orswot}, [<<"a">>]}], value(merge(B, A2))).
stat_test() ->
Map = new(),
{ok, Map1} = update({update, [{update, {c, riak_dt_emcntr}, increment},
{update, {s, riak_dt_orswot}, {add, <<"A">>}},
{update, {m, riak_dt_map}, {update, [{update, {ss, riak_dt_orswot}, {add, 0}}]}},
{update, {l, riak_dt_lwwreg}, {assign, <<"a">>, 1}},
{update, {l2, riak_dt_lwwreg}, {assign, <<"b">>, 2}}]}, a1, Map),
{ok, Map2} = update({update, [{update, {l, riak_dt_lwwreg}, {assign, <<"foo">>, 3}}]}, a2, Map1),
{ok, Map3} = update({update, [{update, {l, riak_dt_lwwreg}, {assign, <<"bar">>, 4}}]}, a3, Map1),
Map4 = merge(Map2, Map3),
?assertEqual([{actor_count, 0}, {field_count, 0}, {duplication, 0}, {deferred_length, 0}], stats(Map)),
?assertEqual(3, stat(actor_count, Map4)),
?assertEqual(5, stat(field_count, Map4)),
?assertEqual(undefined, stat(waste_pct, Map4)),
?assertEqual(1, stat(duplication, Map4)),
{ok, Map5} = update({update, [{update, {l3, riak_dt_lwwreg}, {assign, <<"baz">>, 5}}]}, a3, Map4),
?assertEqual(6, stat(field_count, Map5)),
?assertEqual(1, stat(duplication, Map5)),
%% Updating field {l, riak_dt_lwwreg} merges the duplicates to a single field
%% {@link apply_ops}
{ok, Map6} = update({update, [{update, {l, riak_dt_lwwreg}, {assign, <<"bim">>, 6}}]}, a2, Map5),
?assertEqual(0, stat(duplication, Map6)),
{ok, Map7} = update({update, [{remove, {l, riak_dt_lwwreg}}]}, a1, Map6),
?assertEqual(5, stat(field_count, Map7)).
equals_test() ->
{ok, A} = update({update, [{update, {'X', riak_dt_orswot}, {add, <<"a">>}}, {update, {'X', riak_dt_od_flag}, enable}]}, a, new()),
{ok, B} = update({update, [{update, {'Y', riak_dt_orswot}, {add, <<"a">>}}, {update, {'Z', riak_dt_od_flag}, enable}]}, b, new()),
?assert(not equal(A, B)),
C = merge(A, B),
D = merge(B, A),
?assert(equal(C, D)),
?assert(equal(A, A)).
unsupported_version_test() ->
?assertMatch(?UNSUPPORTED_VERSION(12), to_binary(12, new())),
?assertMatch(?UNSUPPORTED_VERSION(8) , from_binary(<<?TAG:8/integer, 8:8/integer, (crypto:rand_bytes(22))/binary>>)).
invalid_binary_test() ->
?assertMatch(?INVALID_BINARY, from_binary(<<(crypto:rand_bytes(187))/binary>>)).
-ifdef(EQC).
-define(NUMTESTS, 1000).
-define(QC_OUT(P),
eqc:on_output(fun(Str, Args) ->
io:format(user, Str, Args) end, P)).
%% ===================================
%% crdt_statem_eqc callbacks
%% ===================================
size(Map) ->
%% How big is a Map? Maybe number of fields and depth matter? But
%% then the number of fields in sub maps too?
byte_size(term_to_binary(Map)) div 10.
generate() ->
?LET({Ops, Actors}, {non_empty(list(gen_op())), non_empty(list(bitstring(16*8)))},
lists:foldl(fun(Op, Map) ->
Actor = case length(Actors) of
1 -> hd(Actors);
_ -> lists:nth(crypto:rand_uniform(1, length(Actors)), Actors)
end,
case update(Op, Actor, Map) of
{ok, M} -> M;
_ -> Map
end
end,
new(),
Ops)).
%% Add depth parameter
gen_op() ->
?SIZED(Size, gen_op(Size)).
gen_op(Size) ->
?LET(Ops, non_empty(list(gen_update(Size))), {update, Ops}).
gen_update(Size) ->
?LET(Field, gen_field(Size),
oneof([{remove, Field},
{update, Field, gen_field_op(Field, Size div 2)}])).
gen_field() ->
?SIZED(Size, gen_field(Size)).
gen_field(Size) ->
{growingelements(['A', 'B', 'C', 'X', 'Y', 'Z']) %% Macro? Bigger?
, elements([
riak_dt_emcntr,
riak_dt_orswot,
riak_dt_lwwreg,
riak_dt_od_flag
] ++ [riak_dt_map || Size > 0])}.
gen_field_op({_Name, Type}, Size) ->
Type:gen_op(Size).
v1_downgrade_roundtrip_test_() ->
{timeout,
120,
fun() ->
quickcheck(numtests(?NUMTESTS, ?QC_OUT(prop_v1_downgrade_roundtrip())))
end}.
prop_v1_downgrade_roundtrip() ->
?FORALL(Map, generate(),
begin
{ok, ConvertedMap} = from_binary(to_binary(to_version(1, Map))),
conjunction([{equal, equal(Map, ConvertedMap)},
{not_v1, equals(to_v2(ConvertedMap), ConvertedMap)}])
end).
-endif.
-endif. | src/riak_dt_map.erl | 0.809803 | 0.65426 | riak_dt_map.erl | starcoder |
% Licensed under the Apache License, Version 2.0 (the "License"); you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
-module(couch_ejson_size).
-export([encoded_size/1]).
%% Compound objects
encoded_size({[]}) ->
2; % opening { and closing }
encoded_size({KVs}) ->
% Would add 2 because opening { and closing }, but then inside the LC
% would accumulate an extra , at the end so subtract 2 - 1
1 + lists:sum([encoded_size(K) + encoded_size(V) + 2 || {K,V} <- KVs]);
encoded_size([]) ->
2; % opening [ and closing ]
encoded_size(List) when is_list(List) ->
% 2 is for [ and ] but inside LC would accumulate an extra , so subtract
% 2 - 1
1 + lists:sum([encoded_size(V) + 1 || V <- List]);
%% Floats.
encoded_size(0.0) ->
3;
encoded_size(1.0) ->
3;
encoded_size(Float) when is_float(Float), Float < 0.0 ->
encoded_size(-Float) + 1;
encoded_size(Float) when is_float(Float), Float < 1.0 ->
if
Float =< 1.0e-300 -> 3; % close enough to 0.0
Float =< 1.0e-100 -> 6; % Xe-YYY
Float =< 1.0e-10 -> 5; % Xe-YY
Float =< 0.01 -> 4; % Xe-Y, 0.0X
true -> 3 % 0.X
end;
encoded_size(Float) when is_float(Float) ->
if
Float >= 1.0e100 -> 5; % XeYYY
Float >= 1.0e10 -> 4; % XeYY
true -> 3 % XeY, X.Y
end;
%% Integers
encoded_size(0) ->
1;
encoded_size(Integer) when is_integer(Integer), Integer < 0 ->
encoded_size(-Integer) + 1;
encoded_size(Integer) when is_integer(Integer) ->
if
Integer < 10 -> 1;
Integer < 100 -> 2;
Integer < 1000 -> 3;
Integer < 10000 -> 4;
true -> trunc(math:log10(Integer)) + 1
end;
%% Strings
encoded_size(Binary) when is_binary(Binary) ->
2 + byte_size(Binary);
%% Special terminal symbols as atoms
encoded_size(null) ->
4;
encoded_size(true) ->
4;
encoded_size(false) ->
5;
%% Other atoms
encoded_size(Atom) when is_atom(Atom) ->
encoded_size(atom_to_binary(Atom, utf8)). | src/couch/src/couch_ejson_size.erl | 0.650911 | 0.525247 | couch_ejson_size.erl | starcoder |
%% @doc
%% Measuring time intervals with Prometheus.erl.
%% Measuring time intervals is trivial - you just have to be sure you are using
%% monotonic time source. Basically interval is a difference between
%% start time and end time.
%% Erlang has standard `erlang:monotonic_time' function that returns
%% so called native time units. Native time units are meaningless
%% and have to be converted to seconds (or other units)
%% using `erlang:convert_time_unit'.
%% However as `erlang:convert_time_unit' documentation
%% [warns](http://erlang.org/doc/man/erlang.html#convert_time_unit-3):
%%
%% ```
%% You may lose accuracy and precision when converting between time units.
%% In order to minimize such loss, collect all data at native time unit and
%% do the conversion on the end result.
%% '''
%%
%% and because Prometheus mandates support for floats,
%% `set_duration/observe_duration` functions always work with
%% native time units and conversion is delayed until scraping/retrieving value.
%% To implement this, metric needs to know desired time unit.
%% Users can specify time unit explicitly via `duration_unit'
%% or implicitly via metric name (preferred, since prometheus best practices
%% guide insists on `<name>_duration_<unit>' metric name format).
%%
%% Possible units:
%% - microseconds;
%% - milliseconds;
%% - seconds;
%% - minutes;
%% - hours;
%% - days;
%%
%% Histogram also converts buckets bounds to native units if
%% duration_unit is provided. It converts it back when scraping or
%% retrieving value.
%%
%% If values already converted to a 'real' unit, conversion can be disabled
%% by setting `duration_unit' to `false'.
%%
%% ## Examples
%%
%% Example where duration unit derived from name:
%% <pre lang="erlang">
%% prometheus_histogram:new([{name, fun_duration_seconds},
%% {buckets, [0.5, 1.1]}, %% in seconds
%% {help, ""}]),
%% prometheus_histogram:observe_duration(fun_duration_seconds,
%% fun () ->
%% timer:sleep(1000)
%% end),
%% prometheus_histogram:value(fun_duration_seconds).
%% {[0,1,0],1.001030886}
%% </pre>
%%
%% Example where duration unit set explicitly:
%% <pre lang="erlang">
%% prometheus_histogram:new([{name, fun_duration_histogram},
%% {buckets, [500, 1100]}, %% in milliseconds
%% {help, ""},
%% {duration_unit, milliseconds}]),
%%
%% prometheus_histogram:observe_duration(fun_duration_histogram,
%% fun () ->
%% timer:sleep(1000)
%% end),
%%
%% prometheus_histogram:value(fun_duration_histogram).
%% {[0,1,0],1001.885302}
%% </pre>
%%
%% Example where value is in seconds already:
%% <pre lang="erlang">
%% prometheus_histogram:new([{name, duration_seconds},
%% {buckets, [0.5, 1.1]}, %% in seconds
%% {help, ""},
%% {duration_unit, false}]),
%%
%% prometheus_histogram:dobserve(duration_seconds, 1.2),
%%
%% prometheus_histogram:value(duration_seconds).
%% {[0,0,1],1.2}
%% </pre>
%% @end
-module(prometheus_time).
-export([duration_unit_from_string/1,
validate_duration_unit/1,
maybe_convert_to_native/2,
maybe_convert_to_du/2]).
-ifdef(TEST).
-export([from_native/2,
to_native/2]).
-endif.
%%====================================================================
%% Macros
%%====================================================================
-define(DURATION_UNITS, [{"microseconds", microseconds},
{"milliseconds", milliseconds},
{"seconds", seconds},
{"minutes", minutes},
{"hours", hours},
{"days", days}]).
%%====================================================================
%% Public API
%%====================================================================
%% @private
duration_unit_from_string(Str) ->
duration_unit_from_string(Str, ?DURATION_UNITS).
%% @private
validate_duration_unit(false) ->
false;
validate_duration_unit(undefined) ->
undefined;
validate_duration_unit(SDU) ->
case lists:any(fun({_, DU}) ->
DU == SDU
end,
?DURATION_UNITS) of
true ->
SDU;
_ ->
erlang:error({invalid_value, SDU, "unknown duration unit"})
end.
%% @private
maybe_convert_to_native(_, infinity) ->
infinity;
maybe_convert_to_native(DU, Value) ->
case DU of
undefined -> Value;
_ -> to_native(Value, DU)
end.
%% @private
maybe_convert_to_du(_, infinity) ->
infinity;
maybe_convert_to_du(DU, Value) ->
case DU of
undefined -> Value;
_ -> from_native(Value, DU)
end.
%%====================================================================
%% Private Parts
%%====================================================================
duration_unit_from_string(Str, [{SDU, DU}|Rest]) ->
case string:rstr(Str, SDU) of
0 -> duration_unit_from_string(Str, Rest);
_ -> DU
end;
duration_unit_from_string(_, []) ->
undefined.
%% @private
from_native(Value) ->
erlang:convert_time_unit(trunc(Value), native, nano_seconds).
%% @private
from_native(Value, microseconds) ->
Nanoseconds = from_native(Value),
Nanoseconds / 1000;
from_native(Value, milliseconds) ->
Nanoseconds = from_native(Value),
Nanoseconds / 1000000;
from_native(Value, seconds) ->
Nanoseconds = from_native(Value),
Nanoseconds / 1000000000;
from_native(Value, minutes) ->
Nanoseconds = from_native(Value),
Nanoseconds / 60000000000;
from_native(Value, hours) ->
Nanoseconds = from_native(Value),
Nanoseconds / 3600000000000;
from_native(Value, days) ->
Nanoseconds = from_native(Value),
Nanoseconds / 86400000000000.
%% @private
to_native(Value) ->
erlang:convert_time_unit(trunc(Value), nano_seconds, native).
%% @private
to_native(Value, microseconds) ->
to_native(Value * 1000);
to_native(Value, milliseconds) ->
to_native(Value * 1000000);
to_native(Value, seconds) ->
to_native(Value * 1000000000);
to_native(Value, minutes) ->
to_native(Value * 60000000000);
to_native(Value, hours) ->
to_native(Value * 3600000000000);
to_native(Value, days) ->
to_native(Value * 86400000000000). | src/prometheus_time.erl | 0.82566 | 0.622875 | prometheus_time.erl | starcoder |
%%%-------------------------------------------------------------------
%%% @author <NAME> <<EMAIL>>
%%% @copyright (C) 2014, <NAME>
%%% @doc
%%% Interface to the extraction tools.
%%% <p>This module collects the most important functions that
%%% can be used to extract information from eqc models using
%%% symbolic execution.</p>
%%% @end
%%% Created : 4 Feb 2014 by <NAME>
%%%-------------------------------------------------------------------
-module(see).
-export([scan_func/3, scan_func_dbg/3, scan_func_str_args/3,
scan_func_str_args_dbg/3, scan_model/1,
get_arg_var_name/1, get_field_var_name/1,
get_result_var_name/0, scan_and_print_model/1,
idiomize_model_info/1, print_model_info/1,
print_model_info_to_str/1, print_exp_iface/1,
print_exp_iface_to_str/1, scan_and_print_model_to_str/1,
rename_vars_in_model/2]).
-include("records.hrl").
-type syntaxTree() :: any(). %%% as used in the syntax tools
%%% @doc
%%% Extracts possibilities from one particular function.
%%% Arguments must represent Erlang expressions, (they may be
%%% complex Erlang expressions as tuples or records, but not patterns),
%%% and they may include unbound variables at any part. Unbound variable
%%% names should not match variables existing in the function
%%% to analyse. The function will try to expand the functions
%%% called within the function to analyse as long as the calls
%%% belong to functions in the module provided. Infinitely
%%% recursive functions may hang the analyser.
%%% @param FileName is an string or atom representing the
%%% relative path to the source where the function is defined
%%% @param FuncName is an atom representing the function to
%%% analyse
%%% @param Args is a list of the ASTs of the arguments that
%%% should be used to symbolically execute the function.
%%% @return a list of #exp_iface records, each representing
%%% a possibility. A tuple with the atom 'not_expandable' as
%%% is first element may be returned if the function cannot
%%% be analysed.
-spec scan_func(atom() | string(), atom(), [syntaxTree()]) -> [#exp_iface{} | {'not_expandable', _}].
scan_func(FileName, FuncName, Args) ->
clean_nestcond:clean_expansions(declutter_exp_list(scan_func_aux(FileName, FuncName, Args))).
%%% @doc
%%% Equivalent to {@link scan_func/3}, but it does not prune
%%% the result for debugging purposes.
%%% @see scan_func/3
-spec scan_func_dbg(atom() | string(), atom(), [syntaxTree()]) -> [#exp_iface{} | {'not_expandable', _}].
scan_func_dbg(FileName, FuncName, Args) ->
declutter_exp_list(scan_func_dbg_aux(FileName, FuncName, Args)).
%%% @doc
%%% Calls {@link scan_func/3}, but it takes strings as arguments
%%% and converts them to ASTs before calling it.
%%% @see scan_func/3
-spec scan_func_str_args(atom() | string(), atom(), [string()]) -> [#exp_iface{} | {'not_expandable', _}].
scan_func_str_args(FileName, FuncName, Args) ->
clean_nestcond:clean_expansions(declutter_exp_list(scan_func_str_args_aux(FileName, FuncName, Args))).
%%% @doc
%%% Calls {@link scan_func_dbg/3}, but it takes strings as arguments
%%% and converts them to ASTs before calling it.
%%% @see scan_func_dbg/3
-spec scan_func_str_args_dbg(atom() | string(), atom(), [string()]) -> [#exp_iface{} | {'not_expandable', _}].
scan_func_str_args_dbg(FileName, FuncName, Args) ->
declutter_exp_list(scan_func_str_args_dbg_aux(FileName, FuncName, Args)).
%%% @doc
%%% Takes a eqc statem model and extracts possibilities for all
%%% preconditions, postconditions and next state definitions. It
%%% does so by providing as arguments to those calls unbounded
%%% variables defined by the functions {@link get_arg_var_name/1},
%%% {@link get_field_var_name/1}, and {@link get_result_var_name/0}.
%%% Assumes that record information is in the same file,
%%% and that the state is a record.
%%% @param FileName is an string or atom representing the
%%% relative path to the source where the model is defined
%%% @see scan_func/3
%%% @see get_arg_var_name/1
%%% @see get_field_var_name/1
%%% @see get_result_var_name/0
-spec scan_model(atom() | string()) -> #module_iface{call_list::[#call_iface{pre_exp::[#exp_iface{}],next_exp::[#exp_iface{}],post_exp::[#exp_iface{}]}]}.
scan_model(FileName) ->
clean_nestcond:clean_expansions_from_model(
declutter_mif(FileName, model_info:model_info(FileName))).
%%% @doc
%%% Returns the name used for the unbound variables passed
%%% as arguments to the functions when using {@link scan_model/1}.
%%% Argument numbers start with 1.
%%% @param ArgNum the number of the argument whose variable name to
%%% obtain
%%% @return an atom with the corresponding variable name
%%% @see scan_model/1
-spec get_arg_var_name(ArgNum::integer()) -> atom().
get_arg_var_name(ArgNum) when is_integer(ArgNum) ->
model_info:create_arg_name(arg, ArgNum).
%%% @doc
%%% Returns the name used for the unbound variables passed
%%% as fields of the state record to the functions when
%%% using {@link scan_model/1}.
%%% @param FieldName the name of the field whose variable
%%% name to obtain
%%% @return an atom with the corresponding variable name
%%% @see scan_model/1
-spec get_field_var_name(FieldName::atom()) -> atom().
get_field_var_name(FieldName) when is_atom(FieldName) ->
model_info:create_arg_name(state, FieldName).
%%% @doc
%%% Returns the name used for the unbound variable passed
%%% as result of the call to the functions when
%%% using {@link scan_model/1}.
%%% @return an atom with the corresponding variable name
%%% @see scan_model/1
-spec get_result_var_name() -> atom().
get_result_var_name() ->
model_info:create_arg_name(result, result).
%%% @doc
%%% Extracts and prints nicely information from a model.
%%% Is a combination of {@link scan_model/1},
%%% {@link idiomize_model_info/1}, and
%%% {@link print_model_info/1}).
%%% @param FileName is an string or atom representing the
%%% relative path to the source where the model is defined.
%%% @see scan_model/1
%%% @see idiomize_model_info/1
%%% @see print_model_info/1
%%% @see scan_and_print_model_to_str/1
-spec scan_and_print_model(atom() | string()) -> 'ok'.
scan_and_print_model(FileName) ->
Model = scan_model(FileName),
IdiomizedModel = idiomizer:idiomize_module_info(Model),
print_model_info(IdiomizedModel).
%%% @doc
%%% Renames a list of variables inside a model.
%%% @param RenamingList list of variables to
%%% rename and their renamings.
%%% @param ModelInfo record with the information extracted
%%% from a model.
%%% @return record with the information from the model
%%% updated
-spec rename_vars_in_model(RenamingList :: [{OriginalName :: atom(), FinalName :: atom()}],
ModelInfo :: #module_iface{}) -> #module_iface{}.
rename_vars_in_model(RenamingList, ModelInfo) ->
see_logic:rename_vars_in_model(RenamingList, ModelInfo).
%%% @doc
%%% Extracts and prints nicely to a string information from a model.
%%% Is a combination of {@link scan_model/1},
%%% {@link idiomize_model_info/1}, and
%%% {@link print_model_info_to_str/1}).
%%% @param FileName is an string or atom representing the
%%% relative path to the source where the model is defined.
%%% @return a string with the pretty-printed information
%%% @see scan_model/1
%%% @see idiomize_model_info/1
%%% @see print_model_info_to_str/1
%%% @see scan_and_print_model/1
-spec scan_and_print_model_to_str(atom() | string()) -> string().
scan_and_print_model_to_str(FileName) ->
Model = scan_model(FileName),
IdiomizedModel = idiomizer:idiomize_module_info(Model),
print_model_info_to_str(IdiomizedModel).
%%% @doc
%%% Adds information about idioms to a #module_iface{} record.
%%% @param ModelInfo record with the information extracted
%%% from a model.
%%% @see scan_model/1
-spec idiomize_model_info(ModelInfo::#module_iface{}) -> #module_iface{}.
idiomize_model_info(ModelInfo) ->
idiomizer:idiomize_module_info(ModelInfo).
%%% @doc
%%% Prints a #module_iface{} record in a nicer way.
%%% @param ModelInfo record with the information extracted
%%% from a model.
%%% @see scan_model/1
%%% @see print_model_info_to_str/1
-spec print_model_info(ModelInfo::#module_iface{}) -> 'ok'.
print_model_info(ModelInfo) ->
io:format("~s", [print_model_info_to_str(ModelInfo)]),
ok.
%%% @doc
%%% Prints to a string a #module_iface{} record in a nicer way.
%%% @param ModelInfo record with the information extracted
%%% from a model.
%%% @return a string with the pretty-printed information
%%% @see scan_model/1
%%% @see print_model_info/1
-spec print_model_info_to_str(ModelInfo::#module_iface{}) -> string().
print_model_info_to_str(ModelInfo) ->
lists:flatten(model_info:ppr_callinfos(clutter_mif(ModelInfo))).
%%% @doc
%%% Prints an #exp_iface{} record or a list of them in
%%% a nicer way.
%%% @param Exp_iface possibility or list of possibilities
%%% @see scan_func/3
%%% @see print_exp_iface_to_str/1
-spec print_exp_iface([#exp_iface{}] | #exp_iface{}) -> 'ok'.
print_exp_iface(ExpIface) ->
io:format("~s", [print_exp_iface_to_str(ExpIface)]),
ok.
%%% @doc
%%% Prints to a string an #exp_iface{} record or a list of them in
%%% a nicer way.
%%% @param Exp_iface possibility or list of possibilities
%%% @return a string with the pretty-printed information
%%% @see scan_func/3
%%% @see print_exp_iface/1
-spec print_exp_iface_to_str([#exp_iface{}] | #exp_iface{}) -> string().
print_exp_iface_to_str(#exp_iface{} = ExpIface) ->
nestcond:ppr_expansions([clutter_exp(ExpIface)]);
print_exp_iface_to_str(ExpIfaceList) when is_list(ExpIfaceList) ->
nestcond:ppr_expansions(clutter_exp_list(ExpIfaceList)).
%%%-------------------------------------------------------------------
%%% Auxiliar functions
%%%-------------------------------------------------------------------
scan_func_aux(FileName, FuncName, Args) ->
scan_func_dbg_aux(FileName, FuncName, Args).
scan_func_dbg_aux(FileName, FuncName, Args) ->
see_logic:generate_logical_function({FuncName, length(Args)},
Args,
FileName).
scan_func_str_args_aux(FileName, FuncName, Args) ->
scan_func_aux(FileName, FuncName, see_logic:parse_args(Args)).
scan_func_str_args_dbg_aux(FileName, FuncName, Args) ->
scan_func_dbg_aux(FileName, FuncName, see_logic:parse_args(Args)).
declutter_exp_list(List) when is_list(List) ->
lists:map(fun declutter_exp/1, List).
declutter_exp(#expansion{applys = VarDefs,
conds = Conditions,
result = Result,
idioms = Idioms}) ->
#exp_iface{var_defs = VarDefs, conds = Conditions, result = Result, idioms = Idioms};
declutter_exp(Error) -> Error.
clutter_exp_list(List) when is_list(List) ->
lists:map(fun clutter_exp/1, List).
clutter_exp(#exp_iface{var_defs = VarDefs, conds = Conditions, result = Result, idioms = Idioms}) ->
#expansion{applys = VarDefs,
conds = Conditions,
result = Result,
idioms = Idioms}.
declutter_cif(#call_info{name = Name,
num_args = NumArgs,
pre_exp = PrecondExpansion,
next_exp = NextStateExpansion,
post_exp = PostcondExpansion}) ->
#call_iface{name = Name, num_args = NumArgs,
pre_exp = declutter_exp_list(PrecondExpansion),
next_exp = declutter_exp_list(NextStateExpansion),
post_exp = declutter_exp_list(PostcondExpansion)}.
clutter_cif(#call_iface{name = Name,
num_args = NumArgs,
pre_exp = PrecondExpansion,
next_exp = NextStateExpansion,
post_exp = PostcondExpansion}) ->
#call_info{name = Name, num_args = NumArgs,
pre_exp = clutter_exp_list(PrecondExpansion),
next_exp = clutter_exp_list(NextStateExpansion),
post_exp = clutter_exp_list(PostcondExpansion)}.
declutter_mif(FileName, List) when is_list(List) ->
#module_iface{
state_fields = model_info:get_state_fields(FileName),
call_list = lists:map(fun declutter_cif/1, List)
}.
clutter_mif(#module_iface{call_list = CallList}) ->
lists:map(fun clutter_cif/1, CallList). | src/symbolic-execution/see.erl | 0.500732 | 0.42173 | see.erl | starcoder |
%% This module transforms a CPL xml script into a graph
%% representation (during script parsing), that can be used to test
%% various graph properties like; is the graph acyclical, does
%% it contain unreachable states ...
%%
%% The graph can either be interpreted directly as a FSM (finite
%% state machine) when processing SIP request or the graph can be
%% transformed into a FSM implemented in source code (.erl) - each
%% script will then be handled by a specific .beam file, which should
%% execute faster than when runned in a interpreter.
%%
%% Note: all xml CPL tags are represented with atoms ('...' may be
%% needed in some cases) without any changes - this makes it
%% easy to match the specification to the code.
%% Note: all graph nodes have a unique id, ids are list() of int()
%% identifying which branches was selected to reach the
%% node, for example [1,1], [1,2], [1,1,1] ...
%% The incoming tag id is always = [1] and outgoing always =
%% [2], so it's easy to find the start node. Subactions
%% are numbered as [3], [4], ...
%% Note: subaction don't do anything except to point to another node,
%% therefor they aren't stored in the graph and the references
%% to [N] becomes a reference to the action which subaction [N]
%% contained / pointed to.
%%
%% To do:
%% * add more descriptive error messages, include line number,
%% tag/attribute name and value encountered
%% * the current implementation doesn't reject all unexpected sub tags
%% and attributes - they are simply ignored. A error should be
%% generated when they are encountered so that misspelt and
%% unsupported tags/attributes can be detected - this is mainly a
%% problem if scripts are hand coded or if the use extensions to
%% RFC 3880
%% - code checks if sub tags name is allowed as element of a tag
%% - check for otherwise as last element (if otherwise sub tag is
%% used) is done
%% - checks are done for sub tags that occur multiple time when they
%% should only occur 0-1 times
%% * add time zone and summer/winter time support
%% * there is no check for overlapping time intervals - the cost (in
%% time) of this feature is high compared to gain received from
%% implementing it
%% * all atoms could be changed into macros so that the compiler
%% catches misspelt names - "undefined macro used"
%%--------------------------------------------------------------------
-module(xml_parse).
%%--------------------------------------------------------------------
%% External exports
%%--------------------------------------------------------------------
-export([
cpl_script_to_graph/1,
test/0
]).
%%--------------------------------------------------------------------
%% Internal exports
%%--------------------------------------------------------------------
%%--------------------------------------------------------------------
%% Include files
%%--------------------------------------------------------------------
-include("cpl.hrl").
-include("xml_parse.hrl").
-include_lib("xmerl/include/xmerl.hrl").
%%--------------------------------------------------------------------
%% Records
%%--------------------------------------------------------------------
%%--------------------------------------------------------------------
%% Macros
%%--------------------------------------------------------------------
%%====================================================================
%% External functions
%%====================================================================
%%--------------------------------------------------------------------
%% @spec (CPLscript) ->
%% [{VertexId, Label}]
%%
%% CPLscript = string() "xml data"
%%
%% VertexId = [integer()] "a unique node id"
%% Label = term() "the 'code' of the node"
%% Error = bad_xml |
%% cycle_detected_in_graph |
%% unreachable_nodes_detected |
%% atom()
%%
%% @throws {error, Error}
%%
%% @doc parse and validate CPLscript - check ranges and that there
%% are no cycles, return a graph that models the flow of the
%% script - the graph will be used as a FSM by a interpreter
%% (and possibly to generate FSM .erl code) Note : Error =
%% bad_xml (xmerl_scan:string failed) |
%% cycle_detected_in_graph (add_edge/3 failed) |
%% unreachable_nodes_detected | .... and various other parse
%% error
%% @end
%%--------------------------------------------------------------------
cpl_script_to_graph(CPLscript) ->
case xmerl_scan:string(CPLscript) of
%% XXX Rest in {XMLtag, Rest} appears to be empty list, this
%% is probably so, expect if partial parsing is done.
{XMLtag , []} ->
ParseState = parse_xml_main(XMLtag),
G = ParseState#parse_state.current_graph,
Vs = digraph:vertices(G),
Nodes = [digraph:vertex(G, V) || V <- Vs],
check_for_unreachable_nodes(Vs, G),
Nodes;
%% xmerl_scan error
{error,_Reason} ->
throw({error, bad_xml});
{'EXIT',_Reason} ->
throw({error, bad_xml})
end.
%%====================================================================
%% Behaviour functions
%%====================================================================
%%====================================================================
%% Internal functions
%%====================================================================
%%--------------------------------------------------------------------
%% @spec (Vs, G) -> term()
%%
%% Vs = [vertex()] "a list of node ids"
%% G = digraph() "the graph for the CPL script"
%%
%% @throws {error, unreachable_nodes_detected}
%%
%% @doc verify that there are no unreachable nodes in the final
%% graph. Note : this code only checks if all CPL script
%% nodes (tags) are reachable through graph edges, it
%% doesn't check if switch conditions may be ordered in such
%% a way that certain nodes can never be reached.
%% @end
%%--------------------------------------------------------------------
check_for_unreachable_nodes(Vs, G) ->
%% find all nodes reachable by incoming and outgoing
R1 = digraph_utils:reachable([[1]], G),
R2 = digraph_utils:reachable([[2]], G),
%% reachable includes [1] and [2] in R1 and R2, even if they
%% aren't members of the graph.
%% This can be detected as a proper graph of <incoming/> or <outgoing/>
%% will consist of at least [[1],[1,1]] or [[2],[2,1]] as they require
%% at least a terminator node
R = case {R1, R2} of
{[_],[_]} -> [];
{[_], _} -> R2;
{_, [_]} -> R1;
{_,_} -> R1 ++ R2 %% append is ok as a ids are unique
end,
%% order both vertex sets the same way so they can be compared
S1 = lists:sort(Vs),
S2 = lists:sort(R),
%% check that the reachable vertexes are the same as the full set in the graph
case S1 == S2 of
true -> ok;
false -> throw({error, unreachable_nodes_detected})
end.
%%--------------------------------------------------------------------
%% @spec (CPLscript) ->
%% #parse_state{}
%%
%% CPLscript = #xmlElement{} "the toplevel tag of the xml code"
%%
%% Reason = no_incoming_or_outgoing_tag | atom()
%%
%% @throws {error, Reason}
%%
%% @doc the cpl tag can contain subaction tags and a incoming
%% and/or outgoing tag Note : CPL expects tags in order:
%% ancillary, subaction, incoming, outgoing - the order of
%% incoming/outgoing isn't clearly defined in RFC 3880 and
%% not enforced in the code Note : the 'ancillary' tag is
%% ignored - it isn't defined to do anything in RFC 3880
%% @end
%%--------------------------------------------------------------------
parse_xml_main(#xmlElement{name = cpl} = CPLscript) ->
%% there may be 0+ subactions
%% this code is run first to gather all subaction names
SubActions = get_elements(CPLscript, subaction),
ParseState = parse_xml_subactions(SubActions, initial_parse_state()),
%% look for a incoming and outgoing xml tag, they are the two
%% possible starting points (incoming / outgoing id = [1] / [2]),
%% when a CPL script is run
case {get_elements(CPLscript, incoming),
get_elements(CPLscript, outgoing) } of
{[], []} ->
throw({error, no_incoming_or_outgoing_tag});
{[In], []} ->
parse_xml(In, ParseState#parse_state{current_id = [1]});
{[], [Out]} ->
parse_xml(Out, ParseState#parse_state{current_id = [2]});
{[In], [Out]} ->
PState = parse_xml(In, ParseState#parse_state{current_id = [1]}),
parse_xml(Out, PState#parse_state{current_id = [2]})
end.
initial_parse_state() ->
#parse_state{
%% start index used for subaction
current_id = [2],
current_graph = xml_parse_graph:new_graph(),
%% initiate mapping DB
subaction_name_id_mapping = []
}.
%%--------------------------------------------------------------------
%% @spec (SubActionXmlParseTree, ParseState) -> #parse_state{}
%%
%% @doc parse the sub action xml parse tree, ParseState is used to
%% store the currently accumulated graph Note : On the
%% subject of allowing 'sub' tags to only reference already
%% defined subactions (see RFC 3880): - This ensured by two
%% parser features, the
%% #parse_state.subaction_name_id_mapping (see below) only
%% contains the subaction names of currently parsed
%% subactions, so lookup of later subactions will fail,
%% while self references in a sub tag will fail du to the
%% cycle detection (see xml_parse_graph.erl)
%% @end
%%--------------------------------------------------------------------
%% return the accumulated parse state when all tags have been processed
parse_xml_subactions([], ParseState) ->
ParseState;
%% subactions can only be members of the cpl tag, this means that
%% they will have ids = [3], [4] .... [N]
parse_xml_subactions([#xmlElement{name = subaction, content = Content} = E | R], ParseState) ->
SubactionName = case get_attribute(E, id) of
'#no_value' -> throw({error, subaction_tag_is_missing_the_id_attribute});
ID -> ID
end,
case is_subaction_name_unqiue(SubactionName, ParseState) of
true ->
[Index] = ParseState#parse_state.current_id,
NewId = [Index + 1],
Mapping = [{SubactionName, NewId} | ParseState#parse_state.subaction_name_id_mapping],
%% the subaction doesn't do anything so the node isn't stored in the graph
%% parse content of subaction
ParseState2 = parse_xml(get_next_element(subaction, Content),
ParseState#parse_state{current_id = NewId,
subaction_name_id_mapping = Mapping}),
%% then parse the remaining subactions
parse_xml_subactions(R, ParseState2#parse_state{current_id = NewId});
false ->
throw({error, subaction_tag_id_attribute_value_is_not_unique})
end.
%%--------------------------------------------------------------------
%% @spec (TagElement, ParseState) -> #parse_state{}
%%
%% TagElement = #xmlElement{}
%% ParseState = #parse_state{}
%%
%% @doc Processes TagElement tag and it's containing tags. Results
%% are accumulated in ParseState while descending through
%% the xml parse tree
%% @end
%%--------------------------------------------------------------------
%% there are cases when a tag may be empty, and some kind of default
%% action is supposed to be taken, the terminator node signifies this.
parse_xml(empty, ParseState) ->
ParseState2 = xml_parse_graph:add_node(ParseState, terminator),
%% nothing more to parse in this branch, so return current parse state
ParseState2;
%% --------------------- top level entry points
parse_xml(#xmlElement{name = incoming, content = Content}, ParseState) ->
%% add node to graph, this doesn't do anything but it's needed as a start node
ParseState2 = xml_parse_graph:add_node(ParseState, incoming),
{ParseState3, _NextId} = next_id(ParseState2),
parse_xml(get_next_element(incoming, Content), ParseState3);
%% top level action
parse_xml(#xmlElement{name = outgoing, content = Content}, ParseState) ->
%% add node to graph, this doesn't do anything but it's needed as a start node
ParseState2 = xml_parse_graph:add_node(ParseState, outgoing),
{ParseState3, _NextId} = next_id(ParseState2),
parse_xml(get_next_element(outgoing, Content), ParseState3);
%% --------------------- switches
parse_xml(#xmlElement{name = 'address-switch', content = Content} = E, ParseState) ->
Field = attribute_to_atom(get_attribute(E, field)),
%% attribute field is mandatory so it can't be = '#no_value'
xml_parse_util:legal_value(Field, [origin, destination, 'original-destination']),
%% 'tel' is a legal value in RFC 3880, but currently unsupported
SubField = attribute_to_atom(get_attribute(E, subfield)),
xml_parse_util:legal_value(SubField, ['address-type', user, host, port, display, password, '#no_value']),
%% get the branching rules and the code for those nodes, so that they can be parsed
{Conds, Targets} = get_cond(Content, 'address-switch', SubField, ParseState),
%% node stores: type, request index, cond op + match val
ParseState2 = xml_parse_graph:add_node(ParseState, 'address-switch', {Field, SubField}, Conds),
%% process the destination nodes
process_targets('address-switch', Targets, ParseState2);
parse_xml(#xmlElement{name = 'language-switch', content = Content}, ParseState) ->
{Conds, Targets} = get_cond(Content, 'language-switch', no_args, ParseState),
%% node stores: type, request index, cond op + match val
ParseState2 = xml_parse_graph:add_node(ParseState, 'language-switch', Conds),
process_targets('language-switch', Targets, ParseState2);
parse_xml(#xmlElement{name = 'priority-switch', content = Content}, ParseState) ->
{Conds, Targets} = get_cond(Content, 'priority-switch', no_args, ParseState),
%% node stores: type, request index, cond op + match val
ParseState2 = xml_parse_graph:add_node(ParseState, 'priority-switch', Conds),
process_targets('priority-switch', Targets, ParseState2);
parse_xml(#xmlElement{name = 'string-switch', content = Content} = E, ParseState) ->
%% 'display' is also a legal value in RFC 3880, but it's not supported by SIP
Field = normalize_string_switch__field(get_attribute(E, field)),
%% attribute field is mandatory so it can't be = '#no_value'
xml_parse_util:legal_value(Field, [subject, organization, 'user-agent']),
{Conds, Targets} = get_cond(Content, 'string-switch', no_args, ParseState),
%% node stores: type, request index, cond op + match val
ParseState2 = xml_parse_graph:add_node(ParseState, 'string-switch', Field, Conds),
process_targets('string-switch', Targets, ParseState2);
parse_xml(#xmlElement{name = 'time-switch', content = Content} = E, ParseState) ->
%% If a script is uploaded with a "tzid" and "tzurl" which the CPL
%% server does not recognize or cannot resolve, it SHOULD diagnose and
%% reject this at script upload time. If neither "tzid" nor "tzurl" are
%% present, all non-UTC times within this time switch should be
%% interpreted as being "floating" times, i.e., that they are specified
%% in the local time zone of the CPL server.
%%
%% Because of daylight-savings-time changes over the course of a
%% year, it is necessary to specify time switches in a given
%% time zone. UTC offsets are not sufficient, or a time-of-day
%% routing rule which held between 9 am and 5 pm in the eastern
%% United States would start holding between 8 am and 4 pm at the end
%% of October.
%% - RFC 3880
%% XXX time zones are currently unsupported
Tzid = get_attribute(E, tzid),
Tzurl = get_attribute(E, tzurl),
case {Tzid, Tzurl} of
{'#no_value', '#no_value'} ->
{InitialConds, Targets} = get_cond(Content, 'time-switch', no_args, ParseState),
%% node stores: type, request index, cond op + match val
TimeZone = #time_zone{tzid = Tzid, tzurl = Tzurl},
%% check that no date-time or time values use leap seconds
validate_no_leap_second_in_datetime_values(InitialConds),
%% check that all dtstart < dtend values
validate_dtstart_dtend(TimeZone, InitialConds),
%% check that no intervals in reoccurrences can overlap
validate_duration(InitialConds),
%% check that dtstart, dtend and until date-time are in floating format
validate_usage_of_floating_time_with_byxxx(InitialConds),
%% check that bysetpos and lowest level of byxxx / freq can't generate to large selection sets
validate_usage_of_bysetpos(InitialConds),
%% preprocess "count" attribute in "time" tag
Conds = preprocess_count_in_timeswitch(TimeZone, InitialConds),
ParseState2 = xml_parse_graph:add_node(ParseState, 'time-switch', TimeZone, Conds),
process_targets('time-switch', Targets, ParseState2);
_ ->
throw({error, time_switch_tag_tzid_and_tzurl_currently_unsupported})
end;
%% --------------------- modifier
parse_xml(#xmlElement{name = location, content = Content} = E, ParseState) ->
Location = case {get_attribute(E,url), get_attribute(E,priority), get_attribute(E,clear)} of
{'#no_value', _, _} ->
throw({error, location_tag_url_attribute_is_mandatory});
{URL, '#no_value', '#no_value'} ->
#location__attrs{url = check_url(URL)};
{URL, Prio, '#no_value'} ->
#location__attrs{url = check_url(URL), priority = check_prio_value(Prio)};
{URL, '#no_value', Clear} ->
#location__attrs{url = check_url(URL), clear = get_clear_value(Clear)};
{URL, Prio, Clear} ->
#location__attrs{url = check_url(URL), priority = check_prio_value(Prio),
clear = get_clear_value(Clear)}
end,
NextId = ParseState#parse_state.current_id ++ [1],
ParseState2 = xml_parse_graph:add_node(ParseState, location, {Location, NextId}),
ParseState3 = ParseState2#parse_state{current_id = NextId},
parse_xml(get_next_element(location, Content), ParseState3);
parse_xml(#xmlElement{name = lookup, content = Content} = E, ParseState) ->
Lookup = case {get_attribute(E,source), get_attribute(E,timeout), get_attribute(E,clear)} of
{'#no_value', _, _} ->
throw({error, lookup_tag_source_attribute_is_mandatory});
{Source, '#no_value', '#no_value'} ->
#lookup__attrs{source = Source};
{Source, Timeout, '#no_value'} ->
#lookup__attrs{source = Source, timeout = list_to_integer(Timeout)};
{Source, '#no_value', Clear} ->
#lookup__attrs{source = Source, clear = get_clear_value(Clear)};
{Source, Timeout, Clear} ->
#lookup__attrs{source = Source, timeout = list_to_integer(Timeout),
clear = get_clear_value(Clear)}
end,
%% XXX currently only "registration" is supported, but URIs (http)
%% are also supported (RFC 3880 chapter 5.2 p23)
xml_parse_util:legal_value(Lookup#lookup__attrs.source, ["registration"]),
{Conds, Targets} = get_cond(Content, lookup, no_args, ParseState),
%% node stores: type, lookup index, cond ops
ParseState2 = xml_parse_graph:add_node(ParseState, lookup, Lookup ,Conds),
process_targets(lookup, Targets, ParseState2);
parse_xml(#xmlElement{name = 'remove-location', content = Content} = E, ParseState) ->
RmLocation = case get_attribute(E, location) of
'#no_value' ->
#remove_location__attrs{};
URL ->
#remove_location__attrs{location = check_url(URL)}
end,
NextId = ParseState#parse_state.current_id ++ [1],
ParseState2 = xml_parse_graph:add_node(ParseState, 'remove-location', {RmLocation, NextId}),
ParseState3 = ParseState2#parse_state{current_id = NextId},
parse_xml(get_next_element('remove-location', Content), ParseState3);
%% --------------------- sub
parse_xml(#xmlElement{name = sub} = E, ParseState) ->
NextId = case get_attribute(E, ref) of
'#no_value' ->
throw({error, sub_tag_is_missing_ref_attribute});
Ref ->
get_subaction_id(ParseState, Ref)
end,
ParseState2 = xml_parse_graph:add_node(ParseState, sub, NextId),
%% nothing more to parse in this branch, so return current parse state
ParseState2;
%% --------------------- action
%% empty Comment strings are allowed
parse_xml(#xmlElement{name = log, content = Content} = E, ParseState) ->
Log = case {get_attribute(E, name), get_attribute(E, comment)} of
{'#no_value', '#no_value'} ->
throw({error, log_tag_contained_no_name_or_comment_attribute});
{'#no_value', Comment} ->
#log__attrs{name = is_log_dest(default), comment = Comment};
{_Name, '#no_value'} ->
throw({error, log_tag_used_without_comment_attribute});
{Name, Comment} ->
#log__attrs{name = is_log_dest(Name), comment = Comment}
end,
NextId = ParseState#parse_state.current_id ++ [1],
ParseState2 = xml_parse_graph:add_node(ParseState, log, {Log, NextId}),
ParseState3 = ParseState2#parse_state{current_id = NextId},
parse_xml(get_next_element(log, Content), ParseState3);
%% XXX check for empty / incorrect email url (UrlStr) ?
parse_xml(#xmlElement{name = mail, content = Content} = E, ParseState) ->
Mail = case get_attribute(E, url) of
'#no_value' ->
throw({error, mail_tag_contained_no_url_attribute});
UrlStr ->
UrlStr
end,
NextId = ParseState#parse_state.current_id ++ [1],
ParseState2 = xml_parse_graph:add_node(ParseState, mail, {Mail, NextId}),
ParseState3 = ParseState2#parse_state{current_id = NextId},
parse_xml(get_next_element(mail, Content), ParseState3);
parse_xml(#xmlElement{name = proxy, content = Content} = E, ParseState) ->
Recurse = case get_attribute(E,recurse) of
'#no_value' -> yes; % default
"yes" -> yes;
"no" -> no;
_ -> throw({error, proxy_tag_recurse_attribute_set_to_a_non_legal_value})
end,
Ordering = case get_attribute(E,ordering) of
'#no_value' -> parallel;
"parallel" -> parallel;
"sequential" -> sequential;
"first-only" -> 'first-only';
_ -> throw({error, proxy_tag_ordering_attribute_set_to_a_non_legal_value})
end,
{Conds, Targets} = get_cond(Content, proxy, no_args, ParseState),
%% Note that if the value of "recurse" is "yes", the "redirection"
%% output to the script is never taken. -RFC 3880 chapter 6.1 p27
case {lists:keysearch(redirection, 1, Conds), Recurse} of
{false, _} -> ok;
{_, no} -> ok;
{_, yes} -> throw({error, proxy_tag_recurse_attribute_is_yes_can_not_be_used_with_sub_tag_redirect})
end,
%% If this parameter [timeout] is not specified, the default value is 20
%% seconds if the "proxy" node has a "noanswer" or "default" output
%% specified; otherwise the server SHOULD allow the call to ring for a
%% reasonably long period of time (to the maximum extent that server
%% policy allows).
%% - RFC 3880 chapter 6.1 p27
Timeout = case get_attribute(E,timeout) of
'#no_value' ->
case {lists:keymember(noanswear, 1, Conds), lists:keymember(default, 1, Conds)} of
{true, false} -> 20;
{false, true} -> 20;
{true, true} -> 20;
{false, false} -> server_max
end;
TimeoutStr ->
try
list_to_integer(TimeoutStr)
catch
%% run time error
error: _ -> throw({error, proxy_tag_timeout_attribute_not_a_number})
end
end,
ProxyAttrs = #proxy__attrs{timeout = Timeout, recurse = Recurse, ordering = Ordering},
%% node stores: type, lookup index, cond ops
ParseState2 = xml_parse_graph:add_node(ParseState, proxy, ProxyAttrs ,Conds),
process_targets(proxy, Targets, ParseState2);
parse_xml(#xmlElement{name = redirect} = E, ParseState) ->
Permanent = case get_attribute(E, permanent) of
'#no_value' -> no; % default
"yes" -> yes;
"no" -> no;
_ -> throw({error, redirect_tag_permanent_attribute_set_to_a_non_legal_value})
end,
ParseState2 = xml_parse_graph:add_node(ParseState, redirect, {Permanent, terminate}),
%% nothing more to parse in this branch, so return current parse state
ParseState2;
parse_xml(#xmlElement{name = reject} = E, ParseState) ->
StatusReason =
case {get_attribute(E, status), get_attribute(E, reason)} of
{'#no_value', _} ->
throw({error, reject_tag_status_attribute_is_mandatory});
{Status, '#no_value'} ->
#reject__attrs{status = xml_parse_util:status_code_to_sip_error_code(Status), reason = ""};
{Status, Reason} ->
#reject__attrs{status = xml_parse_util:status_code_to_sip_error_code(Status), reason = Reason}
end,
ParseState2 = xml_parse_graph:add_node(ParseState, reject, {StatusReason, terminate}),
%% nothing more to parse in this branch, so return current parse state
ParseState2;
%% redundant, should never occure - as the current code checks its destination tag
parse_xml(_E, _ParseState) ->
throw({error, unkown_cpl_tag_encountered}).
%%--------------------------------------------------------------------
%% @spec (Conds) ->
%% ok
%%
%% Conds = [{Cond, Dest}] "from CondVal in {CondVal, Targets} return value of get_cond/4"
%%
%% Reason = atom()
%%
%% @throws {error, Reason}
%%
%% @doc the DATE-TIME elements used in CPL (RFC 3880)
%% "time-switch" elements support the usage of leap seconds
%% (see RFC 2445 chapter 4.3.12) i.e. times like 23:59:60.
%% This is for various reasons - simplicity of CPL
%% interpreter code and lack of proper erlang/OTP support,
%% impractical to support as script input, this function is
%% therefor used to reject any time values containing leap
%% seconds.
%% @end
%%--------------------------------------------------------------------
validate_no_leap_second_in_datetime_values(Conds) ->
F = fun({Cond, _Dest}) ->
no_leap_sec(Cond)
end,
lists:foreach(F, Conds).
no_leap_sec(Cond) ->
DTStart =
case Cond of
_ when is_record(Cond, time_switch__cond_2) ->
Cond#time_switch__cond_2.dtstart;
_ when is_record(Cond, time_switch__cond_5) ->
Cond#time_switch__cond_5.dtstart;
_ when is_record(Cond, time_switch__cond_7) ->
Cond#time_switch__cond_7.dtstart;
_ when is_record(Cond, time_switch__cond_8) ->
Cond#time_switch__cond_8.dtstart;
_ ->
not_time_switch_cond
end,
no_leap_sec_datetime(DTStart),
DTEnd_Duration =
case Cond of
_ when is_record(Cond, time_switch__cond_2) ->
Cond#time_switch__cond_2.dtend_duration;
_ when is_record(Cond, time_switch__cond_5) ->
Cond#time_switch__cond_5.dtend_duration;
_ when is_record(Cond, time_switch__cond_7) ->
Cond#time_switch__cond_7.dtend_duration;
_ when is_record(Cond, time_switch__cond_8) ->
Cond#time_switch__cond_8.dtend_duration;
_ ->
not_time_switch_cond
end,
case DTEnd_Duration of
{dtend, DTEnd} ->
no_leap_sec_datetime(DTEnd);
{duration, _} ->
ok;
not_time_switch_cond ->
ok
end,
Until_Count =
case Cond of
_ when is_record(Cond, time_switch__cond_2) ->
undef;
_ when is_record(Cond, time_switch__cond_5) ->
Cond#time_switch__cond_5.until_count;
_ when is_record(Cond, time_switch__cond_7) ->
Cond#time_switch__cond_7.until_count;
_ when is_record(Cond, time_switch__cond_8) ->
Cond#time_switch__cond_8.until_count;
_ ->
not_time_switch_cond
end,
case Until_Count of
{until, Until} -> case Until of
_ when is_record(Until, date_time) ->
no_leap_sec_datetime(Until);
{_Year, _Month, _Day} ->
ok
end;
{count, _Count} ->
ok;
repeat_forever ->
ok;
undef ->
ok;
not_time_switch_cond ->
ok
end.
no_leap_sec_datetime(not_time_switch_cond) ->
ok;
no_leap_sec_datetime(DateTime) when is_record(DateTime, date_time) ->
#date_time{time = {_H,_M,S}} = DateTime,
case S of
60 -> throw({error, leap_second_input_not_supported});
_ when S >= 0, S =< 59 -> ok
end.
%%--------------------------------------------------------------------
%% @spec (TimeZone, Conds) ->
%% ok
%%
%% TimeZone = term() "currently not supported"
%% Conds = [{Cond, Dest}] "from CondVal in {CondVal, Targets} return value of get_cond/4"
%%
%% Reason = atom()
%%
%% @throws {error, Reason}
%%
%% @doc examine all "time" elements in a "time-witch" with
%% ts_datetime:dtstart_lt_dtend/3, to see if all dtstart -
%% dtend pairs have the property; dtstart `<' dtend. Throw a
%% exception if they don't conform. Note : see cpl/README
%% about date-time format limitations, in regard to floating
%% date-time values without time zone settings.
%% @end
%%--------------------------------------------------------------------
validate_dtstart_dtend(TimeZone, Conds) ->
Ranges = get_dtstart_and_dtend(Conds),
F = fun({S,E}) ->
ts_datetime:dtstart_lt_dtend(TimeZone, S, E)
end,
case Ranges of
[] -> ok;
_ ->
case lists:all(F, Ranges) of
true -> ok;
false -> throw({error, dtstart_not_less_than_dtend_attribute_in_time_switch_tag})
end
end.
get_dtstart_and_dtend(Conds) ->
F = fun({Cond, _Dest}, Acc) when is_record(Cond, time_switch__cond_2) ->
DTStart = Cond#time_switch__cond_2.dtstart,
case Cond#time_switch__cond_2.dtend_duration of
{dtend, Time} -> [{DTStart, Time} | Acc];
{duration, _} -> Acc
end;
({Cond, _Dest}, Acc) when is_record(Cond, time_switch__cond_5) ->
DTStart = Cond#time_switch__cond_5.dtstart,
case Cond#time_switch__cond_5.dtend_duration of
{dtend, Time} -> [{DTStart, Time} | Acc];
{duration, _} -> Acc
end;
({Cond, _Dest}, Acc) when is_record(Cond, time_switch__cond_7) ->
DTStart = Cond#time_switch__cond_7.dtstart,
case Cond#time_switch__cond_7.dtend_duration of
{dtend, Time} -> [{DTStart, Time} | Acc];
{duration, _} -> Acc
end;
({Cond, _Dest}, Acc) when is_record(Cond, time_switch__cond_8) ->
DTStart = Cond#time_switch__cond_8.dtstart,
case Cond#time_switch__cond_8.dtend_duration of
{dtend, Time} -> [{DTStart, Time} | Acc];
{duration, _} -> Acc
end;
(_, Acc) ->
Acc
end,
lists:foldl(F, [], Conds).
%%--------------------------------------------------------------------
%% @spec (Conds) ->
%% ok
%%
%% Conds = [{Cond, Dest}] "from CondVal in {CondVal, Targets} return value of get_cond/4"
%%
%% Reason = atom()
%%
%% @throws {error, Reason}
%%
%% @doc checks that all durations / "dtend - dtstart" periods are
%% short enough to never overlap
%% @end
%%--------------------------------------------------------------------
validate_duration(Conds) ->
F = fun({Cond, _Dest}) when is_record(Cond, time_switch__cond_2) ->
ts_duration:valid_duration(Cond);
({Cond, _Dest}) when is_record(Cond, time_switch__cond_5) ->
ts_duration:valid_duration(Cond);
({Cond, _Dest}) when is_record(Cond, time_switch__cond_7) ->
ts_duration:valid_duration(Cond);
({Cond, _Dest}) when is_record(Cond, time_switch__cond_8) ->
ts_duration:valid_duration(Cond);
(_) ->
true
end,
case lists:all(F, Conds) of
true ->
ok;
false ->
throw({error, duration_too_long})
end.
%%--------------------------------------------------------------------
%% @spec (InitialConds) ->
%% ok
%%
%% Conds = [{Cond, Dest}] "from CondVal in {CondVal, Targets} return value of get_cond/4"
%%
%% Reason = atom()
%%
%% @throws {error, Reason}
%%
%% @doc check that dtstart and date-time values are in floating
%% format if byxxx parameters are used in a time tag as
%% specified by iCalendar; "When used with a recurrence
%% rule, the 'DTSTART' and 'DTEND' properties MUST be
%% specified in local time ..." - RFC 2445 chapter 4.8.5.4
%% page 117. The this constraint is mainly to make
%% calculations of reoccurring periods with dtstart as
%% offset unambiguous, as the byxxx parameters processes
%% floating (local) wall clock time and use dtstart
%% date-time to initiate undefined time values in their
%% reoccurrence calculations Note : iCalendar require this
%% for _all_ reoccurrences and for both dtstart and dtend,
%% but this implementation only requires this for dtstart
%% used with byxxx parameters
%% @end
%%--------------------------------------------------------------------
validate_usage_of_floating_time_with_byxxx(Conds) ->
F = fun({Cond, _Dest}) when is_record(Cond, time_switch__cond_7) ->
all_floating(Cond);
({Cond, _Dest}) when is_record(Cond, time_switch__cond_8) ->
all_floating(Cond);
(_) ->
true
end,
case lists:all(F, Conds) of
true ->
ok;
false ->
throw({error, time_tag_using_byxxx_can_not_use_utc_date_time_values_in_dtstart})
end.
is_floating(DateTime) when is_record(DateTime, date_time) ->
case DateTime#date_time.type of
floating -> true;
utc -> false
end.
all_floating(TimeSwitchCond) ->
DtStart = time_switch:get_dtstart(TimeSwitchCond),
is_floating(DtStart).
%%--------------------------------------------------------------------
%% @spec (InitialConds) ->
%% ok
%%
%% Conds = [{Cond, Dest}] "from CondVal in {CondVal, Targets} return value of get_cond/4"
%%
%% Reason = atom()
%%
%% @throws {error, Reason}
%%
%% @doc
%% @end
%%--------------------------------------------------------------------
validate_usage_of_bysetpos(Conds) ->
F = fun({Cond, _Dest}) when is_record(Cond, time_switch__cond_8) ->
interpret_time:is_bysetpos_usable(Cond);
(_) ->
true
end,
case lists:all(F, Conds) of
true ->
ok;
false ->
throw({error, bysetpos_combined_with_lowest_byxxx_or_freq_yields_to_large_selection_set})
end.
%%--------------------------------------------------------------------
%% @spec (TimeZone, Conds) ->
%% NewList
%%
%% TimeZone = term() "currently not supported"
%% Conds = [{Cond, Dest}] "from CondVal in {CondVal, Targets} return value of get_cond/4"
%%
%% NewList = [{Cond, Dest}] "an updated version of Conds"
%%
%% @doc check if a "time-switch" tag contains the "count"
%% attribute in any of it's "time" conditions, if it does -
%% interpret_time:get_count_ranges_X/2 will be used to
%% calculate all the intervals as specified. This would
%% otherwise need to be done each time a "count" is
%% processed by the interpreter which is a O(N) procedure -
%% all possible intervals between dtstart - current need to
%% be checked.
%% @end
%%--------------------------------------------------------------------
preprocess_count_in_timeswitch(TimeZone, Conds) ->
F = fun({Cond, Dest}) when is_record(Cond, time_switch__cond_5) ->
case Cond#time_switch__cond_5.until_count of
{count, _} ->
NewCond = Cond#time_switch__cond_5{time_ranges =
interpret_time:get_count_ranges_5(TimeZone, Cond)},
{NewCond, Dest};
_ ->
{Cond, Dest}
end;
({Cond, Dest}) when is_record(Cond, time_switch__cond_7) ->
case Cond#time_switch__cond_7.until_count of
{count, _} ->
NewCond = Cond#time_switch__cond_7{time_ranges =
interpret_time:get_count_ranges_7(TimeZone, Cond)},
{NewCond, Dest};
_ ->
{Cond, Dest}
end;
({Cond, Dest}) when is_record(Cond, time_switch__cond_8) ->
case Cond#time_switch__cond_8.until_count of
{count, _} ->
NewCond = Cond#time_switch__cond_8{time_ranges =
interpret_time:get_count_ranges_8(TimeZone, Cond)},
{NewCond, Dest};
_ ->
{Cond, Dest}
end;
%% ignore non-count time-switch elements
({Cond, Dest}) ->
{Cond, Dest}
end,
lists:map(F, Conds).
%%--------------------------------------------------------------------
%% @spec (ParentSwitchName, Targets, ParseState) -> #parse_state{}
%%
%% ParentSwitchName = term()
%% Targets = term() "a list of #xmlElement{} contained inside a switch tag"
%% ParseState = #parse_state{}
%%
%% @doc This function takes Targets - the xml code for the
%% possible action a certain xml rule (graph node) can take,
%% and parses them into nodes. ParseState contains the
%% currently parsed graph, the return value will contain
%% ParseState + nodes found in Targets.
%% @end
%%--------------------------------------------------------------------
process_targets(ParentSwitchName, Targets, ParseState) ->
Id = ParseState#parse_state.current_id,
%% process the destination nodes
F = fun(Target, {PState, Index}) ->
NewId = Id ++[Index],
PState2 = PState#parse_state{current_id = NewId},
NewPState = parse_xml(get_next_element(ParentSwitchName, Target#xmlElement.content), PState2),
{NewPState, Index + 1}
end,
{FinalParseState, _} = lists:foldl(F, {ParseState, 1}, Targets),
FinalParseState.
%%--------------------------------------------------------------------
%% @spec (ParentTagName, Content) ->
%% #xmlElement{} |
%% empty
%%
%% ParentTagName = atom()
%% Content = ParentContent | SubTagContent
%% ParentContent = term() "xml parse tree inside parent"
%% SubTagContent = term() "xml parse tree inside parents (switch) sub tag"
%%
%% Reason = atom()
%%
%% @throws {error, Reason}
%%
%% @doc return the next node (tag) for a tag type that has a
%% single destination - SubTagContent of sub tags of
%% switches are also handled by this function Note :
%% ParentTagName are listed one by one, to simplify handling
%% them individually - there could be cases where extension
%% tags don't support the same destination node (tag) set
%% @end
%%--------------------------------------------------------------------
get_next_element(Action, Content) ->
case Action of
subaction ->
get_next_element(Content);
incoming ->
get_next_element(Content);
outgoing ->
get_next_element(Content);
location ->
get_next_element(Content);
'remove-location' ->
get_next_element(Content);
log ->
get_next_element(Content);
mail ->
get_next_element(Content);
%% check content of a switch sub tag
'address-switch' ->
get_next_switch_element(Content);
'language-switch' ->
get_next_switch_element(Content);
'priority-switch' ->
get_next_switch_element(Content);
'string-switch' ->
get_next_switch_element(Content);
'time-switch' ->
get_next_switch_element(Content);
lookup ->
get_next_switch_element(Content);
proxy ->
get_next_switch_element(Content);
_ ->
throw({error, tag_is_not_a_single_destination_node})
end.
get_next_element(ParentContent) ->
%% ignore non xmlElement parse data
%% XXX xmlText is probably the only parse data that should be ignored
Elements = [E || E <- ParentContent, is_record(E, xmlElement)],
case Elements of
[] ->
empty;
[Element] ->
%% check that destination is legal, some nodes like incoming,
%% subaction, cpl and outgoing are not allowed as sub tags (destination nodes)
NextTagType = Element#xmlElement.name,
case lists:member(NextTagType,
['address-switch', 'language-switch', 'priority-switch', 'string-switch',
'time-switch', location, lookup, 'remove-location', sub, log, mail, proxy,
redirect, reject]
) of
true -> Element;
false -> throw({tag_contains_ilegal_tag})
end;
_ ->
throw({error, tag_can_only_contain_a_single_tag})
end.
get_next_switch_element(SubTagContent) ->
get_next_element(SubTagContent).
%%--------------------------------------------------------------------
%% @spec (Element, AttrName) -> '#no_value' | string()
%%
%% Element = #xmlElement{}
%% AttrName = atom()
%%
%% @doc return the value of a attribute in a xml tag, e.g.
%% get_attribute(E,bar) in <foo bar="..."> ... </foo> Note :
%% xmlElement attributes can be = IOlist() (see erlang
%% module OTP docs in R10B) | atom() | integer()
%% @end
%%--------------------------------------------------------------------
get_attribute(Element, AttrName) when is_record(Element, xmlElement), is_atom(AttrName) ->
Attrs = Element#xmlElement.attributes,
case lists:keysearch(AttrName, #xmlAttribute.name, Attrs) of
{value, Attr} ->
Val = Attr#xmlAttribute.value,
if
is_atom(Val) -> atom_to_list(Val);
is_integer(Val) -> integer_to_list(Val);
is_list(Val) -> xml_parse_util:iolist_to_str(Val)
end;
false ->
'#no_value'
end.
%%--------------------------------------------------------------------
%% Function:
%% Descrip.:
%% Returns :
%%--------------------------------------------------------------------
attribute_to_atom("address-type") -> 'address-type';
attribute_to_atom("user") -> user;
attribute_to_atom("host") -> host;
attribute_to_atom("port") -> port;
attribute_to_atom("tel") -> tel;
attribute_to_atom("display") -> display;
attribute_to_atom("password") -> password;
attribute_to_atom("origin") -> origin;
attribute_to_atom("destination") -> destination;
attribute_to_atom("original-destination") -> 'original-destination';
attribute_to_atom('#no_value') -> '#no_value'.
%%--------------------------------------------------------------------
%% @spec (Element, SubElementName) -> [#xmlElement{}]
%%
%% Element = #xmlElement{}
%% SubElementName = atom() "name of the tag/s contained in Element"
%%
%% @doc retrieve all xml elements named SubElementName from the
%% contents of Element (for example a switch condition for a
%% address-switch tag)
%% @end
%%--------------------------------------------------------------------
get_elements(Element, SubElementName) when is_record(Element, xmlElement), is_atom(SubElementName) ->
Elems = Element#xmlElement.content,
[E || E <- Elems, is_record(E, xmlElement), E#xmlElement.name == SubElementName].
%%--------------------------------------------------------------------
%% @spec (ParseState, Ref) ->
%% NodeId
%%
%% ParseState = #parse_state{}
%% Ref = string() "the symbolic name of a subaction used in a ``<sub ref ...>'' tag"
%%
%% NodeId = term()
%% Reason = integer()
%%
%% @throws {error, Reason}
%%
%% @doc find the node id of the subaction named Ref
%% @end
%%--------------------------------------------------------------------
get_subaction_id(ParseState, Ref) ->
Mapping = ParseState#parse_state.subaction_name_id_mapping,
case lists:keysearch(Ref, 1, Mapping) of
{value, {_, NextId}} ->
NextId;
_ ->
throw({error, sub_tag_ref_attribute_referenced_unkown_or_later_defined_subaction})
end.
%%--------------------------------------------------------------------
%% @spec (Conditions, SwitchName, ExtraArgs, ParseState) ->
%% {CondVal, Targets}
%%
%% Conditions = term() "#xmlElement.content"
%% SwitchName = 'address-switch' |
%% 'language-switch' |
%% 'priority-switch' |
%% 'string-switch' |
%% 'time-switch' |
%% lookup |
%% proxy
%% ExtraArgs = term() "includes any other switch specific arguments"
%% ParseState = #parse_state{}
%%
%% Targets = #xmlElement{} "from Conditions - the process_targets/2 call does the checking of"
%% CondVal = term() "value returned"
%%
%% @doc retrieve the match operator and value (to compare request
%% against), as well as the destination of a successful
%% match. Note : CondVal depends on the switch type
%% (SwitchName), as seen below:
%% * address-switch CondVal = list() of {{address__is, Val},
%% Dest} | {{address__contains, Val}, Dest} |
%% {{'address__subdomain-of', Val}, Dest} | {'not-present',
%% Dest} | {otherwise, Dest} * language-switch CondVal =
%% list() of {{language__matches, Val}, Dest} |
%% {'not-present', Dest} | {otherwise, Dest} *
%% priority-switch CondVal = list() of {{priority__less,
%% Val}, Dest} | {{priority__greater, Val}, Dest} |
%% {{priority__equal, Val}, Dest} | {otherwise, Dest} *
%% string-switch CondVal = list() of {{string__is, Val},
%% Dest} | {{string__contains, Val}, Dest} | {'not-present',
%% Dest} | {otherwise, Dest} * time-switch CondVal = list()
%% of {time_switch__cond_8 record(), Dest} |
%% {time_switch__cond_7 record(), Dest} |
%% {time_switch__cond_5 record(), Dest} |
%% {time_switch__cond_4 record(), Dest} |
%% {time_switch__cond_2 record(), Dest} | {otherwise, Dest}
%% * lookup CondVal = list() of {success, Dest} | {notfound,
%% Dest} | {failure, Dest} * proxy CondVal = list() of
%% {busy, Dest} | {noanswer, Dest} | {redirection, Dest} |
%% {failure, Dest} | {default, Dest}
%% @end
%%--------------------------------------------------------------------
get_cond(Conditions, SwitchName, ExtraArgs, ParseState) ->
{Conds, _Targets} = Res = get_cond(Conditions, SwitchName, ExtraArgs, ParseState, 1, {[],[]}),
is_otherwise_cond_last(Conds),
check_for_duplicates(SwitchName, Conds),
Res.
%% "The output "otherwise", which MUST be the last output specified if it
%% is present" - RFC 3880 chapter 4 p8
%% throw a error if a otherwise tag is found and it isn't the last element in Conds
is_otherwise_cond_last([]) ->
ok;
is_otherwise_cond_last([{otherwise, _Dest}, _E | _R]) ->
throw({error, otherwise_sub_tag_is_not_last_in_switch_tag});
is_otherwise_cond_last([_E | R]) ->
is_otherwise_cond_last(R).
%% throw a exception if a switch tag (multiple destination tag), contains
%% multiple instances of a sub tag, that should only occur once in the
%% switch - e.g. "<proxy> <busy/> <busy/> </proxy>"
check_for_duplicates(SwitchName, Conds) ->
SubTags = case SwitchName of
proxy -> [busy, noanswer, redirection, failure, default];
lookup -> [success,notfound, failure];
'time-switch' -> [otherwise];
'string-switch' -> ['not-present', otherwise];
'priority-switch' -> [otherwise];
'language-switch' -> ['not-present', otherwise];
'address-switch' -> ['not-present', otherwise]
end,
F = fun(SubTag, Acc) ->
(not is_duplicate(SubTag, Conds)) and Acc
end,
Res = lists:foldl(F, true, SubTags),
case Res of
true -> ok;
false -> throw({error, switch_tag_contained_multiple_instances_of_a_tag_that_should_only_occur_once})
end.
%% is_duplicate(SubTag, Conds)
is_duplicate(SubTag, Conds) ->
count_subtags(SubTag, Conds) > 1.
%% only implements counting for SubTags that are unique
count_subtags(SubTag, Conds) ->
count_subtags(SubTag, Conds, 0).
count_subtags(_SubTag, [], Count) ->
Count;
count_subtags(busy, [{busy,_}|R], Count) ->
count_subtags(busy, R, Count + 1);
count_subtags(noanswer, [{noanswer,_}|R], Count) ->
count_subtags(noanswer, R, Count + 1);
count_subtags(redirection, [{redirection,_}|R], Count) ->
count_subtags(redirection, R, Count + 1);
count_subtags(failure, [{failure,_}|R], Count) ->
count_subtags(failure, R, Count + 1);
count_subtags(default, [{default,_}|R], Count) ->
count_subtags(default, R, Count + 1);
count_subtags(success, [{success,_}|R], Count) ->
count_subtags(success, R, Count + 1);
count_subtags(notfound, [{notfound,_}|R], Count) ->
count_subtags(notfound, R, Count + 1);
count_subtags(otherwise, [{otherwise,_}|R], Count) ->
count_subtags(otherwise, R, Count + 1);
count_subtags('not-present', [{'not-present',_}|R], Count) ->
count_subtags('not-present', R, Count + 1);
%% don't count non-SubTag entries in Conds
count_subtags(SubTag, [_|R], Count) ->
count_subtags(SubTag, R, Count).
%% - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
get_cond([], _SwitchName, _ExtraArgs, _ParseState, _Count, {Conds, Targets}) ->
{lists:reverse(Conds), lists:reverse(Targets)};
get_cond([#xmlElement{name = address} = Cond | R],
'address-switch' = SwitchName, ExtraArgs, ParseState, Count, {Conds,Targets})
when is_record(Cond, xmlElement)->
CondVal = case {get_attribute(Cond, is),
get_attribute(Cond, contains),
get_attribute(Cond, 'subdomain-of')} of
{'#no_value', '#no_value', '#no_value'} ->
throw({error, address_tag_without_expected_attribute});
{IS, '#no_value', '#no_value'} ->
{address__is, IS};
{'#no_value', Contains, '#no_value'} when ExtraArgs == display ->
{address__contains, Contains};
{'#no_value', '#no_value', SubDomainOf} when ExtraArgs == tel; ExtraArgs == host ->
{'address__subdomain-of', SubDomainOf};
_ ->
throw({error, address_tag_subdomain_attribute_can_only_be_used_when_adress_switch_subfield_attribute_is_tel_or_host})
end,
NewCT = {[{CondVal, ParseState#parse_state.current_id ++ [Count]} | Conds],
[Cond | Targets]},
get_cond(R, SwitchName, ExtraArgs, ParseState, Count + 1, NewCT);
get_cond([#xmlElement{name = language} = Cond | R],
'language-switch' = SwitchName, ExtraArgs, ParseState, Count, {Conds, Targets})
when is_record(Cond, xmlElement)->
CondVal = case get_attribute(Cond, matches) of
'#no_value' ->
throw({error, langauge_tag_without_matches_attribute});
Val ->
{language__matches, xml_parse_util:is_language_tag(Val)}
end,
NewCT = {[{CondVal, ParseState#parse_state.current_id ++ [Count]} | Conds],
[Cond | Targets]},
get_cond(R, SwitchName, ExtraArgs, ParseState, Count + 1, NewCT);
get_cond([#xmlElement{name = priority} = Cond | R],
'priority-switch' = SwitchName, ExtraArgs, ParseState, Count, {Conds, Targets})
when is_record(Cond, xmlElement) ->
CondVal = case {get_attribute(Cond, less),
get_attribute(Cond, greater),
get_attribute(Cond, equal) }
of
{'#no_value', '#no_value', '#no_value'} ->
throw({error, priority_tag_without_expected_attribute});
{Less, '#no_value', '#no_value'} ->
{priority__less, xml_parse_util:normalize_prio(Less)};
{'#no_value', Greater, '#no_value'} ->
{priority__greater, xml_parse_util:normalize_prio(Greater)};
{'#no_value', '#no_value', Equal} ->
{priority__equal, xml_parse_util:normalize_prio(Equal)}
end,
NewCT = {[{CondVal, ParseState#parse_state.current_id ++ [Count]} | Conds],
[Cond | Targets]},
get_cond(R, SwitchName, ExtraArgs, ParseState, Count + 1, NewCT);
get_cond([#xmlElement{name = string} = Cond | R],
'string-switch' = SwitchName, ExtraArgs, ParseState, Count, {Conds, Targets})
when is_record(Cond, xmlElement)->
CondVal = case {get_attribute(Cond, is),
get_attribute(Cond, contains)} of
{'#no_value', '#no_value'} ->
throw({error, string_tag_without_expected_attribute});
{Is, '#no_value'} ->
{string__is, Is};
{'#no_value', Contains} ->
{string__contains, Contains}
end,
NewCT = {[{CondVal, ParseState#parse_state.current_id ++ [Count]} | Conds],
[Cond | Targets]},
get_cond(R, SwitchName, ExtraArgs, ParseState, Count + 1, NewCT);
%% - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
get_cond([#xmlElement{name = time} = Cond | R],
'time-switch' = SwitchName, ExtraArgs, ParseState, IndexCount, {Conds, Targets})
when is_record(Cond, xmlElement)->
Dtstart = case get_attribute(Cond, dtstart) of
'#no_value' ->
throw({error, time_tag_no_dtstart_attribute_supplied});
Res ->
DateTime = xml_parse_util:time(Res),
bound_dtstart(DateTime),
DateTime
end,
Dtend_Duration =
case {get_attribute(Cond, dtend), get_attribute(Cond, duration)} of
{'#no_value', '#no_value' } ->
throw({error, time_tag_no_duration_or_dtend_attribute_supplied});
{Dtend, '#no_value' } ->
{dtend, xml_parse_util:time(Dtend)};
{'#no_value', Duration } ->
{duration, xml_parse_util:duration(Duration)};
{_Dtend, _Duration } ->
throw({error, time_tag_both_duration_and_dtend_attribute_supplied})
end,
%% may not need to be calculated here, but simplifies code logic
Until_Count = case {get_attribute(Cond, until), get_attribute(Cond, count)} of
{'#no_value', '#no_value'} ->
repeat_forever;
{Until, '#no_value'} ->
UntilTime = xml_parse_util:parse_until(Until),
{until, UntilTime};
{'#no_value', Count} ->
CountVal = list_to_integer(Count),
case CountVal >= 1 of
true ->
bound_count(CountVal),
{count, CountVal};
false ->
throw({error, time_tag_count_attribute_is_zero_or_less})
end;
{_Until, _Count} ->
throw({error, time_tag_both_until_and_count_attribute_supplied})
end,
Interval = case get_attribute(Cond, interval) of
'#no_value' ->
1;
Val ->
list_to_integer(Val)
end,
%% - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
%% Notes on usage of records based on which attributes the time tag
%% includes:
%%
%% dtstart - required
%% dtend or duration - required (but not both)
%% freq - all parameters mentioned later require freq to be defined
%% --------------------------------------------
%% interval - always include in record (it has the default value 1)
%% until or count - cant be used at the same time (existence of these
%% attributes imply a delimited time period, interval alone
%% result in a infinite repeat)
%% --------------------------------------------
%% byxxx - optional, record contains this field if there is at least
%% one byxxx attribute used
%% wkst - always include this in record (if there is a byxxx attr)
%% as it has a default value ('mo')
%% bysetpos - only included in record if byxxx values are used - as it
%% works on byxxx attributes
%% - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
%% "Parameters other than "dtstart", "dtend", and "duration" SHOULD NOT
%% be specified unless "freq" is present, though CPL servers SHOULD
%% accept scripts with such parameters present, and ignore the other
%% parameters." - RFC 3880 chapter 4.4 p16
CondVal = case get_attribute(Cond, freq) of
'#no_value' ->
%% {time, Dtstart, Dtend_Duration};
#time_switch__cond_2{ dtstart = Dtstart,
dtend_duration = Dtend_Duration
};
FreqStr ->
Freq = freq_str_to_atom(FreqStr),
case get_by_values(Cond) of
[] ->
%% {time, Dtstart, Dtend_Duration, Freq, Interval, Until_Count};
#time_switch__cond_5{ dtstart = Dtstart,
dtend_duration = Dtend_Duration,
freq = Freq,
interval = Interval,
until_count = Until_Count
};
ByValues ->
Wkst = first_work_day(Cond),
case get_attribute(Cond, bysetpos) of
'#no_value' ->
%% {time, Dtstart, Dtend_Duration, Freq,
%% Interval, Until_Count, Wkst};
#time_switch__cond_7{ dtstart = Dtstart,
dtend_duration = Dtend_Duration,
freq = Freq,
interval = Interval,
until_count = Until_Count,
by_values = ByValues,
wkst = Wkst
};
BysetposStr ->
Bysetpos = parse_bysetpos(BysetposStr),
%% {time, Dtstart, Dtend_Duration, Freq,
%% Interval, Until_Count, Wkst, Bysetpos}
#time_switch__cond_8{ dtstart = Dtstart,
dtend_duration = Dtend_Duration,
freq = Freq,
interval = Interval,
until_count = Until_Count,
by_values = ByValues,
wkst = Wkst,
bysetpos = Bysetpos
}
end
end
end,
NewCT = {[{CondVal, ParseState#parse_state.current_id ++ [IndexCount]} | Conds],
[Cond | Targets]},
get_cond(R, SwitchName, ExtraArgs, ParseState, IndexCount + 1, NewCT);
%% - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
get_cond([#xmlElement{name = Name} = Cond | R],
lookup = SwitchName, ExtraArgs, ParseState, Count, {Conds, Targets})
when is_record(Cond, xmlElement)->
case lists:member(Name, [success, notfound, failure]) of
true ->
NewCT = {[{Name, ParseState#parse_state.current_id ++ [Count]} | Conds],
[Cond | Targets]},
get_cond(R, SwitchName, ExtraArgs, ParseState, Count + 1, NewCT);
false ->
throw({error, lookup_tag_has_ilegal_condition})
end;
%% - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
get_cond([#xmlElement{name = Name} = Cond | R],
proxy = SwitchName, ExtraArgs, ParseState, Count, {Conds, Targets})
when is_record(Cond, xmlElement)->
case lists:member(Name, [busy, noanswer, redirection, failure, default]) of
true ->
NewCT = {[{Name, ParseState#parse_state.current_id ++ [Count]} | Conds],
[Cond | Targets]},
get_cond(R, SwitchName, ExtraArgs, ParseState, Count + 1, NewCT);
false ->
throw({error, proxy_tag_has_ilegal_condition})
end;
%% - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
%% 'not-present' will never be matched in a 'time-switch', so don't add it to the graph
get_cond([#xmlElement{name = 'not-present'} = Cond | R],
'time-switch' = SwitchName, ExtraArgs, ParseState, Count, CT)
when is_record(Cond, xmlElement)->
get_cond(R, SwitchName, ExtraArgs, ParseState, Count, CT);
%% 'not-present' will never be matched in a 'priority-switch', so don't add it to the graph
get_cond([#xmlElement{name = 'not-present'} = Cond | R],
'priority-switch' = SwitchName, ExtraArgs, ParseState, Count, CT)
when is_record(Cond, xmlElement)->
get_cond(R, SwitchName, ExtraArgs, ParseState, Count + 1, CT);
%% all other switches use not-present
get_cond([#xmlElement{name = 'not-present'} = Cond | R],
SwitchName, ExtraArgs, ParseState, Count, {Conds, Targets})
when is_record(Cond, xmlElement)->
NewCT = {[{'not-present', ParseState#parse_state.current_id ++ [Count]} | Conds],
[Cond | Targets]},
get_cond(R, SwitchName, ExtraArgs, ParseState, Count + 1, NewCT);
get_cond([#xmlElement{name = otherwise} = Cond | R],
SwitchName, ExtraArgs, ParseState, Count, {Conds, Targets})
when is_record(Cond, xmlElement)->
NewCT = {[{otherwise, ParseState#parse_state.current_id ++ [Count]} | Conds],
[Cond | Targets]},
get_cond(R, SwitchName, ExtraArgs, ParseState, Count + 1, NewCT);
%% ignore (Cond) tags that aren't xmlElement record()
%% XXX this filter should propably only ignore xmlText record() and yield a error otherwise
get_cond([_Cond | R], SwitchName, ExtraArgs, ParseState, Count, CT) ->
get_cond(R, SwitchName, ExtraArgs, ParseState, Count, CT).
%%--------------------------------------------------------------------
%% @spec (Start) ->
%% ok
%%
%% Start = #date_time{}
%%
%% Reason = atom()
%%
%% @throws {error, Reason}
%%
%% @doc limit the minimum that dtstart can be set to, this is done
%% to guard against limitations in the (OTP) calender module
%% - "Date must refer to a local date after Jan 1, 1970"
%% @end
%%--------------------------------------------------------------------
bound_dtstart(Start) when is_record(Start, date_time)->
%% 1970-01-03 is used to ensure that Start will always be a legal
%% utc value handled properly by OTP
case {Start#date_time.date, Start#date_time.time} >= {{1970,1,3}, {0,0,0}} of
true -> ok;
false -> throw({error, dtstart_attribute_in_time_sub_tag_in_time_switch_set_to_a_to_early_date_time})
end.
%%--------------------------------------------------------------------
%% @spec (CountVal) ->
%% ok
%%
%% CountVal = integer() ">= 1"
%%
%% Reason = atom()
%%
%% @throws {error, Reason}
%%
%% @doc to limit storage space used by
%% #time_switch__cond_x.time_ranges when storing scripts, as
%% well as to some extent, limit CPU load when processing
%% sip request with interpret_time.erl - this functions
%% limits the maximum value for the "count" attribute in the
%% "time" subtag used with "time-switch"
%% @end
%%--------------------------------------------------------------------
bound_count(CountVal) ->
{ok, ConfiguredMax} = yxa_config:get_env(cpl_time_switch_count_max),
case CountVal > ConfiguredMax of
true ->
throw({error, count_in_time_sub_tag_in_time_switch_tag_set_to_high});
false ->
ok
end.
%%--------------------------------------------------------------------
%% return list of keyed values; {AttrName, Value}
get_by_values(Cond) ->
AttrList =
%% remove duplicates
lists:usort(
get_by_attribute(Cond, bysecond, fun(Val) -> parse_by_range(Val, {0,59}) end) ++
get_by_attribute(Cond, byminute, fun(Val) -> parse_by_range(Val, {0,59}) end) ++
get_by_attribute(Cond, byhour, fun(Val) -> parse_by_range(Val, {0,23}) end) ++
get_by_attribute(Cond, byday, fun(Val) -> xml_parse_util:parse_byday(Val) end) ++
get_by_attribute(Cond, bymonthday, fun(Val) -> parse_by_range(Val, [{1,31}, {-1,-31}]) end) ++
get_by_attribute(Cond, byyearday, fun(Val) -> parse_by_range(Val, [{1,366}, {-1,-366}]) end) ++
get_by_attribute(Cond, byweekno, fun(Val) -> parse_by_range(Val, [{1,53}, {-1,-53}]) end) ++
get_by_attribute(Cond, bymonth, fun(Val) -> parse_by_range(Val, {1,12}) end)
),
unfold_by_values(AttrList).
%% convert list of [{Attr, ValList}, ...] to list of [{Attr, Val}, ...]
unfold_by_values(AttrList) ->
F = fun({Attr,Vals}, Acc) ->
[{Attr,Val} || Val <- Vals] ++ Acc
end,
lists:foldl(F,[],AttrList).
%% return : [] | [{AttrName, Val}]
%% AttrNamn = atom(), name of a byxxx field in a time tag (used by time-switch)
%% Val = usually a list of values, see time_switch__cond_8 record() in cpl.hrl
get_by_attribute(Cond, AttrName, PostFun) ->
case get_attribute(Cond, AttrName) of
'#no_value' -> [];
BySecond ->
[{AttrName, PostFun(BySecond)}]
end.
parse_by_range(Str, {Min,Max}) ->
IntStrs = string:tokens(Str, ","),
Ints = [list_to_integer(E) || E <- IntStrs],
F = fun(Int) ->
case (Int >= Min) and (Int =< Max) of
true -> ok;
false ->
throw({error, byxxx_attribute_out_of_range})
end
end,
lists:foreach(F, Ints),
lists:sort(Ints);
parse_by_range(Str, [{Min,Max}, {MMin, MMax}]) ->
IntStrs = string:tokens(Str, ","),
Ints = [list_to_integer(E) || E <- IntStrs],
F = fun(Int) ->
case ((Int >= Min) and (Int =< Max)) or
((Int =< MMin) and (Int >= MMax)) of
true -> ok;
false ->
throw({error, byxxx_attribute_out_of_range})
end
end,
lists:foreach(F, Ints),
lists:sort(Ints).
%% return: mo | tu | we | th | fr | sa | su
first_work_day(Cond) ->
case get_attribute(Cond, wkst) of
'#no_value' -> mo;
Wkst ->
case string:to_lower(Wkst) of
"mo" -> mo;
"tu" -> tu;
"we" -> we;
"th" -> th;
"fr" -> fr;
"sa" -> sa;
"su" -> su;
_ ->
throw({error, wkst_attribute_value_not_a_day})
end
end.
%% convert freq value to a atom
freq_str_to_atom(Str) ->
freq_str_to_atom2(string:to_lower(Str)).
freq_str_to_atom2("secondly") -> secondly;
freq_str_to_atom2("minutely") -> minutely;
freq_str_to_atom2("hourly") -> hourly;
freq_str_to_atom2("daily") -> daily;
freq_str_to_atom2("weekly") -> weekly;
freq_str_to_atom2("monthly") -> monthly;
freq_str_to_atom2("yearly") -> yearly;
freq_str_to_atom2(_Freq) -> throw({error, freq_attribute_value_not_legal}).
%%--------------------------------------------------------------------
%% @spec (BysetposStr) -> [integer()]
%%
%% BysetposStr = string() "comma separated integers"
%%
%% @doc ensure that all bysetpos values are in the [1,366] or
%% [-1,-366] range (days in year)
%% @end
%%--------------------------------------------------------------------
parse_bysetpos(BysetposStr) ->
TokenList = string:tokens(BysetposStr,","),
Ints = [list_to_integer(E) || E <- TokenList],
ValidInts = [xml_parse_util:check_range(E, {[1,366], [-1,-366]}) || E <- Ints],
lists:sort(ValidInts).
%%--------------------------------------------------------------------
%% Function:
%% Descrip.:
%% Returns :
%%--------------------------------------------------------------------
get_clear_value("yes") ->
yes;
get_clear_value("no") ->
no;
get_clear_value(_) ->
throw({error, clear_attribute_not_set_to_yes_or_no}).
%%--------------------------------------------------------------------
%% Function:
%% Descrip.:
%% Returns :
%%--------------------------------------------------------------------
check_prio_value(PrioStr) ->
Float = sipparse_util:str_to_float(PrioStr),
case xml_parse_util:check_range(Float, [0.0, 1.0]) of
true -> PrioStr;
false -> throw({error, prio_value_out_of_range})
end.
%%--------------------------------------------------------------------
%% @spec (FieldStr) ->
%% subject | organization | 'user-agent' | display
%%
%% Reason = atom()
%%
%% @throws {error, Reason}
%%
%% @doc convert field value used by string-switch in the attribute
%% field, to a standard atom() format
%% @end
%%--------------------------------------------------------------------
normalize_string_switch__field(FieldStr) ->
case FieldStr of
"subject" -> subject;
"organization" -> organization;
"user-agent" -> 'user-agent';
"display" -> display;
_ -> throw({error, not_a_legal_string_switch_tag_field_attribute_value})
end.
%%--------------------------------------------------------------------
%% Function:
%% Descrip.: "If a basic location node specifies a location which
%% isn't supported by the underlying signaling protocol,
%% the script server SHOULD detect this and report it to
%% the user at the time the script is submitted." - RFC 3880
%% chapter 5.1 p22
%% Returns : URL |
%% throw({error, Reason})
%% Reason = atom()
%% Note : the current yxa implementation only handles sip urls,
%% when this changes there may be a need to update this
%% function.
%%--------------------------------------------------------------------
check_url(URL) ->
try sipurl:parse(URL) of
_ -> URL
catch
throw: {yxa_unparsable, url, _Error} ->
throw({error, url_attribute_not_set_to_proper_url})
end.
%%--------------------------------------------------------------------
%% @spec (ParseState) ->
%% {NewParseState, NextId}
%%
%% ParseState = #parse_state{}
%%
%% NewParseState = #parse_state{} "the updated one"
%% NextId = term() "the id of the next node"
%%
%% @doc This function is used to update the current_id in a
%% parse_state for a CPL rule that has a single possible
%% destination, e.g. location and remove-location
%% @end
%%--------------------------------------------------------------------
next_id(ParseState) ->
CurrentId = ParseState#parse_state.current_id,
NextId = CurrentId ++ [1],
ParseState2 = ParseState#parse_state{current_id = NextId},
{ParseState2, NextId}.
%%--------------------------------------------------------------------
%% @spec (LogName) ->
%% LogName
%%
%% LogName = default | string()
%%
%% Reason = atom()
%%
%% @throws {error, Reason}
%%
%% @doc check if the name attribute in the log tag, refers to a
%% log that can be used by cpl.
%% @end
%%--------------------------------------------------------------------
is_log_dest(LogName) ->
case local:cpl_is_log_dest(LogName) of
undefined ->
is_log_dest2(LogName);
Res ->
Res
end.
is_log_dest2(_) ->
throw({error, log_tag_attribute_name_is_not_a_legal_log}).
%%--------------------------------------------------------------------
%% @spec (SubactionName, ParseState) -> true | false
%%
%% @doc determine if SubactionName can be used as id attribute by
%% a subaction tag - all names must be unique
%% @end
%%--------------------------------------------------------------------
is_subaction_name_unqiue(SubactionName, ParseState) ->
Mapping = ParseState#parse_state.subaction_name_id_mapping,
case lists:keysearch(SubactionName, 1, Mapping) of
%% SubactionName not (currently) used as key
false ->
true;
_ ->
false
end.
%%====================================================================
%% Test functions
%%====================================================================
%%--------------------------------------------------------------------
%% @spec () -> ok
%%
%% @doc autotest callback Note : moved test cases to
%% xml_parse_test, to keep file size manageable
%% @hidden
%% @end
%%--------------------------------------------------------------------
-ifdef( YXA_NO_UNITTEST ).
test() ->
{error, "Unit test code disabled at compile time"}.
-else.
test() ->
xml_parse_test:test(),
ok.
-endif. | src/cpl/xml_parse.erl | 0.564339 | 0.694018 | xml_parse.erl | starcoder |
%% This file is a copy of http_uri.erl from the R13B-1 Erlang/OTP
%% distribution with several modifications.
%% All modifications are Copyright (c) 2009-2020 VMware, Inc. or its affiliates.
%% ``The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved via the world wide web at https://www.erlang.org/.
%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
%%
%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
%% AB. All Rights Reserved.''
%% See https://tools.ietf.org/html/rfc3986
-module(uri_parser).
-export([parse/2]).
%%%=========================================================================
%%% API
%%%=========================================================================
%% Returns a key list of elements extracted from the URI. Note that
%% only 'scheme' is guaranteed to exist. Key-Value pairs from the
%% Defaults list will be used absence of a non-empty value extracted
%% from the URI. The values extracted are strings, except for 'port'
%% which is an integer, 'userinfo' which is a list of strings (split
%% on $:), and 'query' which is a list of strings where no $= char
%% found, or a {key,value} pair where a $= char is found (initial
%% split on $& and subsequent optional split on $=). Possible keys
%% are: 'scheme', 'userinfo', 'host', 'port', 'path', 'query',
%% 'fragment'.
-spec parse(AbsURI, Defaults :: list())
-> [{atom(), string()}] | {error, no_scheme | {malformed_uri, AbsURI, any()}}
when AbsURI :: string() | binary().
parse(AbsURI, Defaults) ->
AbsUriString = rabbit_data_coercion:to_list(AbsURI),
case parse_scheme(AbsUriString) of
{error, Reason} ->
{error, Reason};
{Scheme, Rest} ->
case (catch parse_uri_rest(Rest, true)) of
[_|_] = List ->
merge_keylists([{scheme, Scheme} | List], Defaults);
E ->
{error, {malformed_uri, AbsURI, E}}
end
end.
%%%========================================================================
%%% Internal functions
%%%========================================================================
parse_scheme(AbsURI) ->
split_uri(AbsURI, ":", {error, no_scheme}).
parse_uri_rest("//" ++ URIPart, true) ->
%% we have an authority
{Authority, PathQueryFrag} =
split_uri(URIPart, "/|\\?|#", {URIPart, ""}, 1, 0),
AuthorityParts = parse_authority(Authority),
parse_uri_rest(PathQueryFrag, false) ++ AuthorityParts;
parse_uri_rest(PathQueryFrag, _Bool) ->
%% no authority, just a path and maybe query
{PathQuery, Frag} = split_uri(PathQueryFrag, "#", {PathQueryFrag, ""}),
{Path, QueryString} = split_uri(PathQuery, "\\?", {PathQuery, ""}),
QueryPropList = split_query(QueryString),
[{path, Path}, {'query', QueryPropList}, {fragment, Frag}].
parse_authority(Authority) ->
{UserInfo, HostPort} = split_uri(Authority, "@", {"", Authority}),
UserInfoSplit = case re:split(UserInfo, ":", [{return, list}]) of
[""] -> [];
UIS -> UIS
end,
[{userinfo, UserInfoSplit} | parse_host_port(HostPort)].
parse_host_port("[" ++ HostPort) -> %ipv6
{Host, ColonPort} = split_uri(HostPort, "\\]", {HostPort, ""}),
[{host, Host} | case split_uri(ColonPort, ":", not_found, 0, 1) of
not_found -> case ColonPort of
[] -> [];
_ -> throw({invalid_port, ColonPort})
end;
{_, Port} -> [{port, list_to_integer(Port)}]
end];
parse_host_port(HostPort) ->
{Host, Port} = split_uri(HostPort, ":", {HostPort, not_found}),
[{host, Host} | case Port of
not_found -> [];
_ -> [{port, list_to_integer(Port)}]
end].
split_query(Query) ->
case re:split(Query, "&", [{return, list}]) of
[""] -> [];
QParams -> [split_uri(Param, "=", Param) || Param <- QParams]
end.
split_uri(UriPart, SplitChar, NoMatchResult) ->
split_uri(UriPart, SplitChar, NoMatchResult, 1, 1).
split_uri(UriPart, SplitChar, NoMatchResult, SkipLeft, SkipRight) ->
case re:run(UriPart, SplitChar) of
{match, [{Match, _}]} ->
{string:substr(UriPart, 1, Match + 1 - SkipLeft),
string:substr(UriPart, Match + 1 + SkipRight, length(UriPart))};
nomatch ->
NoMatchResult
end.
merge_keylists(A, B) ->
{AEmpty, ANonEmpty} = lists:partition(fun ({_Key, V}) -> V =:= [] end, A),
[AEmptyS, ANonEmptyS, BS] =
[lists:ukeysort(1, X) || X <- [AEmpty, ANonEmpty, B]],
lists:ukeymerge(1, lists:ukeymerge(1, ANonEmptyS, BS), AEmptyS). | erlang_server/_build/default/lib/amqp_client/src/uri_parser.erl | 0.596198 | 0.436502 | uri_parser.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% Copyright (c) 2012 Basho Technologies, Inc. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
-module(yz_extractor).
-compile(export_all).
-include("yokozuna.hrl").
-type register_opts() :: [overwrite].
-type get_def_opts() :: [check_default].
%% NOTE: The map is treated as an orddict so these entries must be
%% ordered correctly or `get_def' may report no extractor is
%% registered when there is one.
-define(DEFAULT_MAP, [{default, yz_noop_extractor},
{"application/json",yz_json_extractor},
{"application/riak_counter", yz_dt_extractor},
{"application/riak_map", yz_dt_extractor},
{"application/riak_set", yz_dt_extractor},
{"application/xml",yz_xml_extractor},
{"text/plain",yz_text_extractor},
{"text/xml",yz_xml_extractor}
]).
-define(META_EXTRACTOR_MAP, yokozuna_extractor_map).
%% @doc Get the extractor definition registered for the given
%% `MimeType'. Return `none' if there is nothing registered.
-spec get_def(mime_type()) -> extractor_def() | none.
get_def(MimeType) ->
get_def(MimeType, [check_default]).
%% @doc The same as `get_def/1' but allows options to be passed.
%%
%% Options:
%%
%% `check_default' - If no entry can be found for the given
%% `MimeType' then check for a default extractor.
%%
-spec get_def(mime_type(), get_def_opts()) -> extractor_def() | none.
get_def(MimeType, Opts) ->
Map = get_map(),
get_def(Map, MimeType, Opts).
%% @doc The same as `get_def/2' but takes the extractor `Map' as
%% argument instead of fetching it.
-spec get_def(extractor_map(), mime_type(), get_def_opts()) ->
extractor_def() | none.
get_def(Map, MimeType, Opts) ->
CheckDefault = proplists:get_bool(check_default, Opts),
case orddict:find(MimeType, Map) of
{ok, {_, _}=ModuleOpts} -> ModuleOpts;
{ok, Module} -> Module;
error ->
if CheckDefault -> get_default(Map);
true -> none
end
end.
%% @doc Get the default entry from the extractor `Map' or return
%% `none' if there isn't one.
-spec get_default(extractor_map()) -> extractor_def() | none.
get_default(Map) ->
case orddict:find(default, Map) of
{ok, Def} -> Def;
error -> none
end.
%% @doc Get the extractor map.
-spec get_map() -> extractor_map().
get_map() ->
get_map(yz_misc:get_ring(transformed)).
%% @doc Like `get_map/0' but takes the `Ring' as argument instead of
%% fetching it.
-spec get_map(ring()) -> extractor_map().
get_map(Ring) ->
case riak_core_ring:get_meta(?META_EXTRACTOR_MAP, Ring) of
{ok, Map} -> Map;
undefined -> ?DEFAULT_MAP
end.
%% @doc Check if there is an entry for the given extractor `Name'.
-spec is_registered(extractor_name()) -> boolean().
is_registered(ExtractorName) ->
Map = get_map(),
F = fun({_, {Name, _}}) ->
Name == ExtractorName;
({_, Name}) ->
Name == ExtractorName
end,
lists:any(F, Map).
%% @doc Register an extractor `Def' under the given `MimeType'. If
%% there is already an entry then return `already_registered'.
%% The `register/2' call can be used to overwrite an entry.
-spec register(mime_type(), extractor_def()) ->
extractor_map() | already_registered.
register(MimeType, Def) ->
?MODULE:register(MimeType, Def, []).
%% @doc The same as `register/2' but allows options to be passed.
%%
%% Options:
%%
%% `overwrite' - Overwrite the entry if one already exists.
%%
-spec register(mime_type(), extractor_def(), register_opts()) ->
extractor_map() | already_registered.
register(MimeType, Def, Opts) ->
case yz_misc:set_ring_meta(?META_EXTRACTOR_MAP,
?DEFAULT_MAP,
fun register_map/2,
{MimeType, Def, Opts}) of
{ok, Ring} ->
get_map(Ring);
not_changed ->
already_registered
end.
register_map(Map, {MimeType, Def, Opts}) ->
CurrentDef = get_def(Map, MimeType, []),
Overwrite = proplists:get_bool(overwrite, Opts),
case {CurrentDef, Overwrite} of
{none, _} ->
orddict:store(MimeType, Def, Map);
{_, true} ->
orddict:store(MimeType, Def, Map);
{_, false} ->
ignore
end.
%% @doc Run the extractor def against the `Value' to produce a list of
%% fields.
-spec run(binary(), extractor_def()) -> fields() | {error, any()}.
run(Value, {Module, Opts}) ->
Module:extract(Value, Opts);
run(Value, Module) ->
Module:extract(Value). | deps/yokozuna/src/yz_extractor.erl | 0.67694 | 0.453201 | yz_extractor.erl | starcoder |
-module(rebar3_bsp_uri).
-export([ file/1
, dir/1
, profile/1, profile/2
, sanitize/1
, compose/1
, extract/3
, normalize/1
, normalize/2
, parse/1
]).
-type uri_string() :: uri_string:uri_string().
-type uri_map() :: uri_string:uri_map().
-define(PROFILE_SCHEME, <<"profile">>).
-define(FILE_SCHEME, <<"file">>).
-spec file(file:name_all()) -> binary().
file(Filename) ->
%% Filenames must never have a trailing slash
Sanitized = sanitize(Filename),
compose(#{ scheme => ?FILE_SCHEME, path => Sanitized }).
-spec dir(file:name_all()) -> binary().
dir(Dirname) ->
%% Dirnames must always end with a slash
Sanitized = case sanitize(Dirname) of
<<"">> ->
<<"">>;
<<"/">> ->
<<"/">>;
Else ->
<<Else/binary, "/">>
end,
compose(#{ scheme => ?FILE_SCHEME, path => Sanitized }).
-spec profile(atom() | unicode:chardata()) -> binary().
profile(Profile) ->
profile(Profile, []).
-spec profile(atom() | unicode:chardata(), [{unicode:chardata(), unicode:chardata() | true}]) -> binary().
profile(Profile, Params) ->
BaseMap = #{ scheme => ?PROFILE_SCHEME, path => rebar3_bsp_util:to_binary(Profile) },
UriMap = case Params of
[] ->
BaseMap;
Params ->
BaseMap#{ query => uri_string:compose_query(Params) }
end,
compose(UriMap).
-spec sanitize(file:name_all()) -> binary().
sanitize(Filename) ->
Flattened = filename:flatten(Filename),
Parts = filename:split(Flattened),
Rejoined = case Parts of
[] ->
"";
Parts ->
filename:join(Parts)
end,
rebar3_bsp_util:to_binary(Rejoined).
-spec compose(uri_map()) -> binary().
compose(UriMap) ->
Uri = uri_string:recompose(UriMap),
rebar3_bsp_util:to_binary(Uri).
-spec extract(atom(), uri_string() | uri_map(), uri_map()) -> binary().
extract(Key, Uri, Checks) ->
NormalizedUri = normalize(Uri, [return_map]),
%% Checks might not be a full uri_map, so a regular normalize call might barf
%% - just ensure the values are binaries
NormalizedChecks = maps:map(fun(_K, V) -> rebar3_bsp_util:to_binary(V) end, Checks),
%% Verify that the requested checks match
NormalizedChecks = maps:with(maps:keys(NormalizedChecks), NormalizedUri),
%% Uri checks out, extract the requested part
rebar3_bsp_util:to_binary(maps:get(Key, NormalizedUri)).
-spec normalize(uri_string() | uri_map()) -> uri_string().
normalize(Uri) ->
normalize(Uri, []).
-spec normalize(uri_string() | uri_map(), [] | [return_map]) -> uri_string() | uri_map().
normalize(Uri, Opts) ->
NormalizedUri = uri_string:normalize(Uri),
BinaryUri = rebar3_bsp_util:to_binary(NormalizedUri),
case Opts of
[] ->
BinaryUri;
[return_map] ->
uri_string:parse(BinaryUri)
end.
-spec parse(uri_string()) -> uri_map().
parse(Uri) ->
normalize(Uri, [return_map]). | src/rebar3_bsp_uri.erl | 0.52074 | 0.404684 | rebar3_bsp_uri.erl | starcoder |
-module(solver).
-export([solve/1]).
-include_lib("eunit/include/eunit.hrl").
% Solve the puzzle by placing tiles onto a grid in appropriate places.
solve(Tiles) ->
Adjs = build_adjacencies(Tiles),
Size = trunc(math:sqrt(map_size(Tiles))),
{ok, Grid} = solve(Tiles, Adjs, 1, 1, Size, grid:new(Size)),
Grid.
% The top left corner: find the corner tiles and pick one that we don't have
% to reorient because there happens to be one of those in my input data. :)
solve(Tiles, Adjs, X, Y, Size, Grid) when X == 1 andalso Y == 1 ->
Corners = find_corners(Adjs),
[{TL, _}] = lists:filter(fun ({_, V}) ->
maps:is_key(left, V) andalso maps:is_key(top, V)
end, Corners),
NewGrid = grid:put(X, Y, TL, maps:get(TL, Tiles), Grid),
% io:format("solve(1, 1): ~p~n", [TL]),
solve(maps:remove(TL, Tiles), Adjs, X+1, Y, Size, NewGrid);
% The rest of the top row: pick the only tile that can be adjacent to the
% right edge of the previous tile (as luck has it there is only one), and
% orient it appropriately.
solve(Tiles, Adjs, X, Y, Size, Grid) when X =< Size andalso Y == 1 ->
Prev = grid:tile_at(X-1, 1, Grid),
Right = tile:right(Prev),
Options = maps:get(Right, Adjs),
solve_right(Tiles, Adjs, X, Y, Size, Grid, Options);
% Off the edge of a row: start solving the next row.
solve(Tiles, Adjs, X, Y, Size, Grid) when X > Size ->
solve(Tiles, Adjs, 1, Y+1, Size, Grid);
% Down the left column of the grid: pick a tile that can be adjacent to the
% bottom edge of the row above.
solve(Tiles, Adjs, X, Y, Size, Grid) when X == 1 andalso Y =< Size ->
Prev = grid:tile_at(X, Y-1, Grid),
Bottom = tile:bottom(Prev),
Options = maps:get(Bottom, Adjs),
solve_down(Tiles, Adjs, X, Y, Size, Grid, Options);
% The rest of the tiles: match both the tile to the left AND the tile above.
solve(Tiles, Adjs, X, Y, Size, Grid) when X =< Size andalso Y =< Size ->
Prev = grid:tile_at(X-1, Y, Grid),
Right = tile:right(Prev),
Options = maps:get(Right, Adjs),
solve_both(Tiles, Adjs, X, Y, Size, Grid, Options);
% Off the bottom edge of the grid: we're all finished!
solve(_, _, _, Y, Size, Grid) when Y > Size -> {ok, Grid}.
solve_right(Tiles, Adjs, X, Y, Size, Grid, [Option | Rest]) ->
case solve_right_helper(Tiles, Adjs, X, Y, Size, Grid, Option) of
{ok, Ret} -> {ok, Ret};
error -> solve_right(Tiles, Adjs, X, Y, Size, Grid, Rest)
end;
solve_right(_, _, _, _, _, _, []) -> error.
solve_right_helper(Tiles, Adjs, X, Y, Size, Grid, {ID, Side, Flip}) ->
case maps:find(ID, Tiles) of
{ok, T} ->
Tile = orient_left(T, Side, Flip),
NewGrid = grid:put(X, Y, ID, Tile, Grid),
% io:format("solve(~p, ~p): ~p~n", [X, Y, ID]),
case solve(maps:remove(ID, Tiles), Adjs, X+1, Y, Size, NewGrid) of
{ok, Ret} -> {ok, Ret};
error -> error
end;
error -> error
end.
solve_down(Tiles, Adjs, X, Y, Size, Grid, [Option | Rest]) ->
case solve_down_helper(Tiles, Adjs, X, Y, Size, Grid, Option) of
{ok, Ret} -> {ok, Ret};
error -> solve_down(Tiles, Adjs, X, Y, Size, Grid, Rest)
end;
solve_down(_, _, _, _, _, _, []) -> error.
solve_down_helper(Tiles, Adjs, X, Y, Size, Grid,{ID, Side, Flip}) ->
case maps:find(ID, Tiles) of
{ok, T} ->
Tile = orient_up(T, Side, Flip),
NewGrid = grid:put(X, Y, ID, Tile, Grid),
% io:format("solve(~p, ~p): ~p~n", [X, Y, ID]),
case solve(maps:remove(ID, Tiles), Adjs, X+1, Y, Size, NewGrid) of
{ok, Ret} -> {ok, Ret};
error -> error
end;
error -> error
end.
solve_both(Tiles, Adjs, X, Y, Size, Grid, [Option | Rest]) ->
case solve_both_helper(Tiles, Adjs, X, Y, Size, Grid, Option) of
{ok, Ret} -> {ok, Ret};
error -> solve_both(Tiles, Adjs, X, Y, Size, Grid, Rest)
end;
solve_both(_, _, _, _, _, _, []) -> error.
solve_both_helper(Tiles, Adjs, X, Y, Size, Grid, {ID, Side, Flip}) ->
case maps:find(ID, Tiles) of
{ok, T} ->
Tile = orient_left(T, Side, Flip),
Above = grid:tile_at(X, Y-1, Grid),
Top = tile:top(Tile),
Match = tile:bottom(Above),
case Top of
Match ->
NewGrid = grid:put(X, Y, ID, Tile, Grid),
% io:format("solve(~p, ~p): ~p~n", [X, Y, ID]),
case solve(maps:remove(ID, Tiles), Adjs, X+1, Y, Size, NewGrid) of
{ok, Ret} -> {ok, Ret};
error -> error
end;
_ -> error
end;
error -> error
end.
orient_left(Tile, Side, Flip) ->
case {Side, Flip} of
{left, false} -> Tile;
{left, true} -> tile:rotate(tile:flip(Tile), 180);
{right, false} -> tile:flip(Tile);
{right, true} -> tile:rotate(Tile, 180);
{top, false} -> tile:rotate(tile:flip(Tile), 270);
{top, true} -> tile:rotate(Tile, 270);
{bottom, false} -> tile:rotate(Tile, 90);
{bottom, true} -> tile:rotate(tile:flip(Tile), 90)
end.
orient_up(Tile, Side, Flip) ->
case {Side, Flip} of
{left, false} -> tile:rotate(tile:flip(Tile), 270);
{left, true} -> tile:rotate(Tile, 90);
{right, false} -> tile:rotate(Tile, 270);
{right, true} -> tile:rotate(tile:flip(Tile), 90);
{top, false} -> Tile;
{top, true} -> tile:flip(Tile);
{bottom, false} -> tile:rotate(tile:flip(Tile), 180);
{bottom, true} -> tile:rotate(Tile, 180)
end.
% Builds an index of which tiles/orientations are able to match a given pattern.
build_adjacencies(Tiles) ->
maps:fold(fun (ID, Tile, Acc) ->
build_adjacencies(ID, Tile, Acc)
end, #{}, Tiles).
build_adjacencies(ID, Tile, Map) ->
Dirs = [
{top, tile:top(Tile)},
{left, tile:left(Tile)},
{right, tile:right(Tile)},
{bottom, tile:bottom(Tile)}
],
lists:foldl(fun ({Side, String}, Acc) ->
build_adjacencies(ID, Side, String, Acc)
end, Map, Dirs).
build_adjacencies(ID, Side, String, Map) ->
lists:foldl(fun ({Flip, Key}, Acc) ->
Existing = maps:get(Key, Acc, []),
Acc#{ Key => [{ID, Side, Flip} | Existing] }
end, Map, [{false, String}, {true, lists:reverse(String)}]).
% Find corner pieces by looking for tiles with two edges that can't be adjacent
% to any other edges.
find_corners(Adjs) ->
UnmatchedEdges = maps:filter(fun (_, V) -> length(V) == 1 end, Adjs),
ByID = partition_by_id(UnmatchedEdges),
fold_out_corners(ByID).
partition_by_id(Edges) ->
Folder = fun (_, [{ID, Side, _}], Acc) ->
ExistingSet = maps:get(ID, Acc, #{}),
Acc#{ ID => ExistingSet#{ Side => true } }
end,
maps:fold(Folder, #{}, Edges).
fold_out_corners(ByID) ->
maps:fold(fun (K, V, Acc) ->
case map_size(V) of
2 -> [{K, V} | Acc];
_ -> Acc
end
end, [], ByID).
-ifdef(TEST).
test_data() -> tile:load("test.txt").
find_corners_test() ->
Tiles = test_data(),
Adj = build_adjacencies(Tiles),
Corners = find_corners(Adj),
?assertEqual(4, length(Corners)).
solve_test() ->
Soln = solve(test_data()),
?assertEqual("", Soln).
-endif. | day20/solver.erl | 0.605799 | 0.733273 | solver.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% cuttlefish_flag: datatype for simple boolean settings with
%% customizable names and values
%%
%% Copyright (c) 2013 Basho Technologies, Inc. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
-module(cuttlefish_flag).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-export([
parse/1,
parse/2,
to_string/2
]).
-define(FMT(F, A), lists:flatten(io_lib:format(F, A))).
parse(Value) ->
cuttlefish_enum:parse(Value, to_enum(flag)).
parse(Value, Flag) ->
cuttlefish_enum:parse(Value, to_enum(Flag)).
to_string(Value, Flag) ->
cuttlefish_enum:to_string(Value, to_enum(Flag)).
to_enum({flag, {On, OnValue}, {Off, OffValue}}) ->
{enum, [{On, OnValue}, {Off, OffValue}]};
to_enum({flag, On, Off}) ->
{enum, [{On, true}, {Off, false}]};
to_enum(flag) ->
{enum, [{on, true}, {off, false}]}.
-ifdef(TEST).
parse_test() ->
?assertEqual(true, parse("on")),
?assertEqual(false, parse("off")),
?assertEqual(true, parse("enabled", {flag, enabled, disabled})),
?assertEqual(false, parse("disabled", {flag, enabled, disabled})),
?assertEqual(tyk, parse("on", {flag, {on, tyk}, {off, torp}})),
?assertEqual(torp, parse("off", {flag, {on, tyk}, {off, torp}})),
?assertEqual({long, tuple, value},
parse("foo", {flag, {simple, ok},
{foo, {long, tuple, value}}})),
?assertEqual(ok,
parse("simple", {flag, {simple, ok},
{foo, {long, tuple, value}}})).
to_string_test() ->
?assertEqual(to_string(true, flag), "on"),
?assertEqual(to_string(on, flag), "on"),
?assertEqual(to_string(false, flag), "off"),
?assertEqual(to_string(off, flag), "off"),
?assertEqual(to_string(true, {flag, enabled, disabled}), "enabled"),
?assertEqual(to_string(enabled, {flag, enabled, disabled}), "enabled"),
?assertEqual(to_string(false, {flag, enabled, disabled}), "disabled"),
?assertEqual(to_string(disabled, {flag, enabled, disabled}), "disabled"),
?assertEqual(to_string(tyk, {flag, {on, tyk}, {off, torp}}), "on"),
?assertEqual(to_string(on, {flag, {on, tyk}, {off, torp}}), "on"),
?assertEqual(to_string(torp, {flag, {on, tyk}, {off, torp}}), "off"),
?assertEqual(to_string(off, {flag, {on, tyk}, {off, torp}}), "off"),
?assertEqual(to_string({long, tuple, value}, {flag, {simple, ok},
{foo, {long, tuple, value}}}),
"foo"),
?assertEqual(to_string(foo, {flag, {simple, ok},
{foo, {long, tuple, value}}}),
"foo"),
?assertEqual(to_string(ok, {flag, {simple, ok},
{foo, {long, tuple, value}}}),
"simple"),
?assertEqual(to_string(simple, {flag, {simple, ok},
{foo, {long, tuple, value}}}),
"simple").
-endif. | src/cuttlefish_flag.erl | 0.612194 | 0.514217 | cuttlefish_flag.erl | starcoder |
-module(multihash).
-export([digest/2, hash/1, code/1]).
-include("multihash.hrl").
-type code() :: non_neg_integer().
-type hash() ::
identity |
sha1 |
sha2_256 |
sha2_512 |
sha3_224 |
sha3_256 |
sha3_384 |
sha3_512 |
keccak224 |
keccak256 |
keccak384 |
keccak512 |
blake2b256 |
blake2b512 |
blake2s128 |
blake2s256.
-export_types([hash/0, code/0]).
-spec digest(binary(), hash() | code()) -> {ok, binary()} | {error, term()}.
digest(Bin, Hash) when is_integer(Hash) ->
multihash_nif:digest(Bin, Hash);
digest(Bin, Hash) when is_atom(Hash) ->
digest(Bin, hash_to_code(Hash)).
-spec hash(binary()) -> {ok, hash()} | {error, term()}.
hash(Bin) ->
case code(Bin) of
{ok, Code} -> {ok, code_to_hash(Code)};
{error, _} = Error -> Error
end.
-spec code(binary()) -> {ok, non_neg_integer()} | {error, term()}.
code(Bin) ->
multihash_nif:code(Bin).
hash_to_code(identity) -> ?identity;
hash_to_code(sha1) -> ?sha1;
hash_to_code(sha2_256) -> ?sha2_256;
hash_to_code(sha2_512) -> ?sha2_512;
hash_to_code(sha3_224) -> ?sha3_224;
hash_to_code(sha3_256) -> ?sha3_256;
hash_to_code(sha3_384) -> ?sha3_384;
hash_to_code(sha3_512) -> ?sha3_512;
hash_to_code(keccak224) -> ?keccak224;
hash_to_code(keccak256) -> ?keccak256;
hash_to_code(keccak384) -> ?keccak384;
hash_to_code(keccak512) -> ?keccak512;
hash_to_code(blake2b256) -> ?blake2b256;
hash_to_code(blake2b512) -> ?blake2b512;
hash_to_code(blake2s128) -> ?blake2s128;
hash_to_code(blake2s256) -> ?blake2s256;
hash_to_code(_) -> error(badarg).
code_to_hash(?identity) -> identity;
code_to_hash(?sha1) -> sha1;
code_to_hash(?sha2_256) -> sha2_256;
code_to_hash(?sha2_512) -> sha2_512;
code_to_hash(?sha3_224) -> sha3_224;
code_to_hash(?sha3_256) -> sha3_256;
code_to_hash(?sha3_384) -> sha3_384;
code_to_hash(?sha3_512) -> sha3_512;
code_to_hash(?keccak224) -> keccak224;
code_to_hash(?keccak256) -> keccak256;
code_to_hash(?keccak384) -> keccak384;
code_to_hash(?keccak512) -> keccak512;
code_to_hash(?blake2b256) -> blake2b256;
code_to_hash(?blake2b512) -> blake2b512;
code_to_hash(?blake2s128) -> blake2s128;
code_to_hash(?blake2s256) -> blake2s256;
code_to_hash(_) -> error(invalid_code).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
hash_test() ->
Bin = <<"foo">>,
Hashes = [
identity,
sha1,
sha2_256,
sha2_512,
sha3_224,
sha3_256,
sha3_384,
sha3_512,
keccak224,
keccak256,
keccak384,
keccak512,
blake2b256,
blake2b512,
blake2s128,
blake2s256
],
lists:foreach(
fun(Hash) ->
{ok, Digest} = digest(Bin, Hash),
?assertEqual({ok, Hash}, hash(Digest))
end,
Hashes
),
ok.
invalid_hash_test() ->
?assertEqual({error, invalid_code}, digest(<<"foo">>, 16#FF)),
?assertError(badarg, digest(<<"foo">>, unknown)),
?assertEqual({error, invalid_code}, code(<<"foo">>)),
?assertEqual({error, invalid_code}, hash(<<"foo">>)),
ok.
-endif. | src/multihash.erl | 0.528047 | 0.505798 | multihash.erl | starcoder |
%%%
%%% Asynchronous Game of Life
%%% http://en.wikipedia.org/wiki/Conway's_Game_of_Life
%%% http://en.wikipedia.org/wiki/Asynchronous_cellular_automaton
%%%
%%% This implementation can be run step by step, controlled from
%%% the shell, in which case it preserves the classic game behaviour.
%%% Or it can be run completely asynchronously until it's stopped by
%%% 'stop' message. In this case the behaviour of the game is
%%% eventually consistent, meaning that at any particular point
%%% there might be cells on the grid from different generations,
%%% which you wouldn't expect in the classic game. However,
%%% since cells "synchronize" with each other, the results of the
%%% async game "on average" will be the same as in the classic one.
%%%
%%% In either case, the default state of the grid can be examined
%%% by running 'snapshot' command.
%%%
%%% To run the game forever, uncomment line 153.
%%%
%%% See also:
%%% life.erl - standard game implementation,
%%% life_async - async implementation with synchronization process.
%%%
-module(life_async_grid).
-author("<NAME> <<EMAIL>>").
%% Published API
-export([new_game/1, make_alive/2, start/1, snapshot/1, stop/1]).
%% Internal functions for process spawning
-export([cell/1]).
%% Cell state as a convenient data holder
-record(cell, {coord,
gen = 0,
state = dead,
prev_state = dead,
response_count = 0,
alive_count = 0,
neighbours = []}).
%% ------------------------------------------------------------------
%% Game Logic
%%
%% Grid = life_async_grid:new_game(10).
%% life_async_grid:make_alive([{3,2},{3,3},{3,4}], Grid).
%% life_async_grid:snapshot(Grid).
%% life_async_grid:start(Grid).
%% life_async_grid:snapshot(Grid).
%% life_async_grid:stop(Grid).
%% ------------------------------------------------------------------
%% @doc Creates a new grid of connected dead cells.
%% Users should keep the reference to the grid to play the game.
new_game(Size) ->
Grid = new_grid(Size),
connect_all(Grid),
Grid.
%% @doc Builds a new grid of disconnected dead cells.
%% Grid is a hashmap where keys are coordinates and values are
%% cell processes.
new_grid(Size) ->
lists:foldl(
fun(Coord, Acc) ->
Cell = spawn(?MODULE, cell, [#cell{coord = Coord}]),
add(Coord, Cell, Acc)
end,
new_grid(), cartesian_plane(Size)).
%% @doc Sends 'connect' message to all cells.
%% It's done once during the initialization of the game.
%% Upon receiving this message, cells will discover their
%% neighbours and save reference to them.
connect_all(Grid) ->
ok = send(Grid, coordinates(Grid), {connect, Grid}).
%% @doc Sends 'alive' message to specific cells.
%% Upon receiving this message, cells will change their state to alive.
make_alive(Coords, Grid) ->
ok = send(Grid, Coords, alive).
%% @doc Sends 'start' message to all cells.
%% That will initiate the first step of the game.
start(Grid) ->
ok = send(Grid, coordinates(Grid), start).
%% @doc Sends 'current_status' message to all cells, requesting
%% their status. Blocks until receiving responses from all cells.
%% Returns coordinates of all live cells together with their
%% generation numbers.
snapshot(Grid) ->
All = coordinates(Grid),
ok = send(Grid, All, {current_status, self()}),
lists:foldl(
fun(C, Acc) ->
receive
{current_status, C, alive, Gen} -> [{C,Gen}|Acc];
{current_status, C, dead, _} -> Acc
end
end, [], All).
%% @doc Sends 'stop' message to all cells, forcing them to leave game.
stop(Grid) ->
ok = send(Grid, coordinates(Grid), stop).
%% ------------------------------------------------------------------
%% Cell process
%% ------------------------------------------------------------------
%% @doc Main loop of a cell is to respond to messages from shell
%% and other cells.
cell(State) ->
CurrentGen = State#cell.gen,
PrevGen = CurrentGen - 1,
receive
{connect, Grid} ->
NCoords = neighbours(State#cell.coord, grid_size(Grid)),
NewNeighbours = [cell(C, Grid) || C <- NCoords],
cell(State#cell{neighbours = NewNeighbours});
alive ->
cell(State#cell{state = alive});
{current_status, Sender} ->
Sender ! {current_status, State#cell.coord, State#cell.state, State#cell.gen},
cell(State);
{your_status, CurrentGen, Sender} ->
Sender ! {my_status, State#cell.state},
cell(State);
{your_status, PrevGen, Sender} ->
Sender ! {my_status, State#cell.prev_state},
cell(State);
start ->
self() ! step,
cell(State);
step ->
lists:foreach(
fun(N) -> N ! {your_status, State#cell.gen, self()} end,
State#cell.neighbours),
cell(State);
{my_status, NStatus} ->
NewState = new_state(State, NStatus),
self() ! {transition, NewState#cell.response_count, NewState#cell.alive_count},
cell(NewState);
{transition, 8, NAlive} ->
NewState = State#cell{state = new_status(State#cell.state, NAlive),
prev_state = State#cell.state,
gen = State#cell.gen + 1,
response_count = 0,
alive_count = 0},
ok = print_transition(NewState),
%% uncomment to disable global clock
%self() ! step,
cell(NewState);
{transition, _, _} ->
cell(State);
stop -> ok
end.
%% @doc Updates response count and live neighbours count.
new_state(State, alive) ->
State#cell{response_count = State#cell.response_count + 1,
alive_count = State#cell.alive_count + 1};
new_state(State, dead) ->
State#cell{response_count = State#cell.response_count + 1}.
%% @doc Implementation of game rules.
new_status(_, 3) -> alive;
new_status(alive, 2) -> alive;
new_status(_, _) -> dead.
%% @doc Print state transition.
%% If state hasn't changed, don't print it.
print_transition(#cell{prev_state = alive, state = dead} = State) ->
pretty_print(State);
print_transition(#cell{prev_state = dead, state = alive} = State) ->
pretty_print(State);
print_transition(_) -> ok.
pretty_print(#cell{state = State, coord = Coord, gen = Gen}) ->
io:format("~p: ~p -> ~p~n", [Gen, Coord, State]).
%% ------------------------------------------------------------------
%% Helper functions
%% ------------------------------------------------------------------
%% @doc Returns cartesian square plane with side length of Size.
cartesian_plane(Size) ->
[{X,Y} || X <- seq(Size), Y <- seq(Size)].
seq(Size) -> lists:seq(0, Size - 1).
grid_size(Grid) -> round(math:sqrt(population(Grid))).
%% @doc The game grid is modelled as torus.
neighbours({X, Y}, Size) ->
[{mod(X + DX, Size), mod(Y + DY, Size)} ||
DX <- [-1, 0, 1],
DY <- [-1, 0, 1],
{DX, DY} =/= {0, 0}].
mod(X,Y) -> (X rem Y + Y) rem Y.
%% @doc Sends Message to cells with specified Coords on the Grid.
send(Grid, Coords, Message) ->
lists:foreach(
fun(Coord) ->
cell(Coord, Grid) ! Message
end,
Coords),
ok.
%% ------------------------------------------------------------------
%% Data Abstractions
%%
%% Currently dict is used. With Erlang 17 we can use frames.
%% ------------------------------------------------------------------
new_grid() -> dict:new().
population(Grid) -> dict:size(Grid).
add(Coord, Cell, Grid) -> dict:store(Coord, Cell, Grid).
coordinates(Grid) ->
[Coord || {Coord, _} <- dict:to_list(Grid)].
cell(Coord, Grid) -> dict:fetch(Coord, Grid). | src/life_async_grid.erl | 0.608827 | 0.530784 | life_async_grid.erl | starcoder |
% Licensed under the Apache License, Version 2.0 (the "License"); you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
-module(couch_task_status).
-behaviour(gen_server).
% This module allows is used to track the status of long running tasks.
% Long running tasks register (add_task/3) then update their status (update/1)
% and the task and status is added to tasks list. When the tracked task dies
% it will be automatically removed the tracking. To get the tasks list, use the
% all/0 function
-export([start_link/0,init/1,terminate/2,handle_call/3,handle_cast/2,handle_info/2,
code_change/3,add_task/3,update/1,update/2,all/0,set_update_frequency/1]).
-include("couch_db.hrl").
start_link() ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
to_binary(L) when is_list(L) ->
?l2b(L);
to_binary(B) when is_binary(B) ->
B.
add_task(Type, TaskName, StatusText) ->
put(task_status_update, {{0,0,0}, 0}),
gen_server:call(?MODULE, {add_task, to_binary(Type),
to_binary(TaskName), to_binary(StatusText)}).
set_update_frequency(Msecs) ->
put(task_status_update, {{0,0,0}, Msecs * 1000}).
update(StatusText) ->
update("~s", [StatusText]).
update(Format, Data) ->
{LastUpdateTime, Frequency} = get(task_status_update),
case timer:now_diff(Now = now(), LastUpdateTime) >= Frequency of
true ->
put(task_status_update, {Now, Frequency}),
gen_server:cast(?MODULE, {update_status, self(), ?l2b(io_lib:format(Format, Data))});
false ->
ok
end.
% returns a list of proplists. Each proplist describes a running task.
all() ->
[[{type,Type},
{task,Task},
{status,Status},
{pid,?l2b(pid_to_list(Pid))}] ||
{Pid, {Type,Task,Status}} <- ets:tab2list(tasks_by_pid)].
init([]) ->
% read configuration settings and register for configuration changes
ets:new(tasks_by_pid, [ordered_set, protected, named_table]),
{ok, nil}.
terminate(_Reason,_State) ->
ok.
handle_call({add_task,Type,TaskName,StatusText}, {From, _}, Server) ->
case ets:lookup(tasks_by_pid, From) of
[] ->
true = ets:insert(tasks_by_pid, {From,{Type,TaskName,StatusText}}),
erlang:monitor(process, From),
{reply, ok, Server};
[_] ->
{reply, {add_task_error, already_registered}, Server}
end.
handle_cast({update_status, Pid, StatusText}, Server) ->
[{Pid, {Type,TaskName,_StatusText}}] = ets:lookup(tasks_by_pid, Pid),
true = ets:insert(tasks_by_pid, {Pid, {Type,TaskName,StatusText}}),
{noreply, Server}.
handle_info({'DOWN', _MonitorRef, _Type, Pid, _Info}, Server) ->
ets:delete(tasks_by_pid, Pid),
{noreply, Server}.
code_change(_OldVsn, State, _Extra) ->
{ok, State}. | src/couchdb/couch_task_status.erl | 0.641535 | 0.426023 | couch_task_status.erl | starcoder |
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Copyright 2018 Pentland Edge Ltd.
%%
%% Licensed under the Apache License, Version 2.0 (the "License"); you may not
%% use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
%% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
%% License for the specific language governing permissions and limitations
%% under the License.
%%
-module(haversine).
-export([distance/2, initial_bearing/2, destination/3, fmod/2]).
%% Mean radius of the earth used for haversine
-define(EARTH_MEAN_RAD, 6371000).
%% Calculate the great circle distance between two points using the haversine
%% formula. Reference http://www.movable-type.co.uk/scripts/latlong.html.
distance({Lat1, Lon1}, {Lat2, Lon2}) ->
LatRad1 = coords:deg_to_rad(Lat1),
LonRad1 = coords:deg_to_rad(Lon1),
LatRad2 = coords:deg_to_rad(Lat2),
LonRad2 = coords:deg_to_rad(Lon2),
DeltaLat = LatRad2 - LatRad1,
DeltaLon = LonRad2 - LonRad1,
SinHalfDLat = math:sin(DeltaLat/2),
SinSqHalfDLat = SinHalfDLat * SinHalfDLat,
SinHalfDLon = math:sin(DeltaLon/2),
SinSqHalfDLon = SinHalfDLon * SinHalfDLon,
A = SinSqHalfDLat + math:cos(LatRad1) * math:cos(LatRad2) * SinSqHalfDLon,
C = 2 * math:atan2(math:sqrt(A), math:sqrt(1-A)),
Distance = ?EARTH_MEAN_RAD * C,
Distance.
%% Calculates the initial bearing on the great circle path from point 1 to
%% point 2.
%% θ = atan2(sin Δλ ⋅ cos φ2 , cos φ1 ⋅ sin φ2 − sin φ1 ⋅ cos φ2 ⋅ cos Δλ)
%% where λ = Lon, φ = Lat.
%% Angles in radians.
%% The output from the atan2 is converted from +-180 to 0-360 degrees.
initial_bearing({Lat1, Lon1}, {Lat2, Lon2}) ->
LatRad1 = coords:deg_to_rad(Lat1),
LonRad1 = coords:deg_to_rad(Lon1),
LatRad2 = coords:deg_to_rad(Lat2),
LonRad2 = coords:deg_to_rad(Lon2),
DeltaLon = LonRad2 - LonRad1,
Y = math:sin(DeltaLon) * math:cos(LatRad2),
X = math:cos(LatRad1) * math:sin(LatRad2) -
math:sin(LatRad1) * math:cos(LatRad2) * math:cos(DeltaLon),
RadBearing = math:atan2(Y, X),
DegBearing = coords:rad_to_deg(RadBearing),
case DegBearing >= 0.0 of
true -> DegBearing;
_ -> DegBearing + 360.0
end.
%% Calculate the destination given a start point, bearing and distance along
%% a great circle arc.
%% From http://www.movable-type.co.uk/scripts/latlong.html
%% φ2 = asin( sin φ1 ⋅ cos δ + cos φ1 ⋅ sin δ ⋅ cos θ )
%% λ2 = λ1 + atan2( sin θ ⋅ sin δ ⋅ cos φ1, cos δ − sin φ1 ⋅ sin φ2 )
%% where where φ is latitude, λ is longitude, θ is the bearing (clockwise
%% from north), δ is the angular distance d/R; d being the distance travelled,
%% R the earth’s radius.
destination({StartLat, StartLon}, Bearing, Distance) ->
LatRad1 = coords:deg_to_rad(StartLat),
LonRad1 = coords:deg_to_rad(StartLon),
BearRad = coords:deg_to_rad(Bearing),
AngDist = Distance/?EARTH_MEAN_RAD,
Prod1 = math:sin(LatRad1) * math:cos(AngDist),
Prod2 = math:cos(LatRad1) * math:sin(AngDist) * math:cos(BearRad),
LatRad2 = math:asin(Prod1 + Prod2),
Term1 = math:sin(BearRad) * math:sin(AngDist) * math:cos(LatRad1),
Term2 = math:cos(AngDist) - math:sin(LatRad1) * math:sin(LatRad2),
LonRad2 = LonRad1 + math:atan2(Term1, Term2),
%% Convert angles back to degrees and normalise.
LatDeg = coords:rad_to_deg(LatRad2),
LonDeg = coords:rad_to_deg(LonRad2),
NormLonDeg = fmod((LonDeg + 540.0), 360.0) - 180.0,
{LatDeg, NormLonDeg}.
%% Calculate the floating point remainder. Referenced rvirding's luerl.
fmod(X, Y) ->
Div = float(trunc(X/Y)),
Rem = X - Div*Y,
Rem. | src/haversine.erl | 0.749454 | 0.552962 | haversine.erl | starcoder |
%% Copyright (c) 2012-2015 <NAME> <<EMAIL>>
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(lighthouse_node).
-behaviour(gen_server).
-export([
start_link/0,
start_link/1,
start_link/2,
start_link/3,
name/1,
samples/1,
samples/2,
values/1,
created/1,
updated/1,
properties/1,
property/3,
property/2,
edges/1,
edge/2,
add_edge/3,
edges_to/1,
edges_to_with_label/2,
stop/1,
subscribe/1,
subscribers/1,
node_ref/1,
where_is/1,
root/0,
counter/1
]).
-export([
init/1,
code_change/3,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2
]).
-define(ROOT, root).
start_link() ->
start_link(?ROOT).
start_link(Name) ->
start_link(Name, #{}).
start_link(Name, Properties) ->
start_link(Name, Properties, #{}).
start_link(Name, Properties, Edges) ->
gen_server:start_link(node_ref(Name), ?MODULE, [Name, Properties, Edges], []).
name(Node) ->
gen_server:call(Node, name).
samples(Node) ->
gen_server:call(Node, samples).
samples(Node, Samples) ->
gen_server:call(Node, {samples, Samples}).
values(Node) ->
gen_server:call(Node, values).
created(Node) ->
gen_server:call(Node, created).
updated(Node) ->
gen_server:call(Node, updated).
properties(Node) ->
gen_server:call(Node, properties).
property(Node, Key, Value) ->
gen_server:call(Node, {property, Key, Value}).
property(Node, Key) ->
gen_server:call(Node, {property, Key}).
edges(Node) ->
gen_server:call(Node, edges).
add_edge(FromNode, Label, ToNode) ->
gen_server:call(FromNode, {add_edge, Label, ToNode}).
edge(Node, Label) ->
gen_server:call(Node, {edge, Label}).
edges_to(Node) ->
gproc:lookup_local_properties({?MODULE, edge, lighthouse_node:name(Node)}).
edges_to_with_label(ToNode, Label) ->
[FromNode || {FromNode, L} <- edges_to(ToNode), L == Label].
stop(Node) ->
gen_server:cast(Node, stop).
subscribe(Node) ->
gproc:reg({p, l, {?MODULE, node, lighthouse_node:name(Node)}}).
subscribers(Node) ->
[Pid || {Pid, _} <- gproc:lookup_local_properties({?MODULE, node, lighthouse_node:name(Node)})].
root() ->
node_ref(?ROOT).
node_ref(Name) ->
{via, gproc, {n, l, {?MODULE, node, Name}}}.
init([Name, Properties, Edges]) ->
lists:foreach(fun
({Edge, ToNode}) ->
self() ! {add_edge, Edge, ToNode}
end,
maps:to_list(Edges)),
Now = calendar:universal_time(),
true = gproc:add_local_counter(counter(messages), 0),
{ok, #{name => Name, properties => Properties, created => Now, updated => Now, counter => 0, values => [], samples => 0}}.
handle_call({edge, Label}, _, S) ->
case gproc:select([{{{p, l, {?MODULE, edge, '$1'}}, self(), Label}, [], ['$1']}]) of
[Node] ->
{reply, node_ref(Node), S};
[] ->
{reply, error, S}
end;
handle_call(name, _, #{name := Name} = S) ->
{reply, Name, S};
handle_call(samples, _, #{samples := Samples} = S) ->
{reply, Samples, S};
handle_call({samples, Samples}, _, S) ->
{reply, S#{samples => Samples}, S};
handle_call(values, _, #{values := Values} = S) ->
{reply, Values, S};
handle_call(created, _, #{created := Created} = S) ->
{reply, Created, S};
handle_call(updated, _, #{updated := Updated} = S) ->
{reply, Updated, S};
handle_call(properties, _, #{properties := Properties} = S) ->
{reply, Properties, S};
handle_call({property, Key}, _, #{properties := Properties} = S) ->
{reply, maps:find(Key, Properties), S};
handle_call({property, Key, Value}, _, #{properties := Properties, counter := Counter, samples := 0} = S) ->
case maps:find(Key, Properties) of
{ok, Value} ->
{reply, ok, S};
_ ->
notify({change, Key}, Value, S),
{reply, ok, S#{properties := maps:put(Key, Value, Properties), updated => calendar:universal_time(), counter := Counter+1}}
end;
handle_call({property, Key, Value}, _, #{properties := Properties, counter := Counter, values := Values, samples := Samples} = S) ->
case maps:find(Key, Properties) of
{ok, Value} ->
{reply, ok, S};
_ ->
notify({change, Key}, Value, S),
{reply, ok, S#{properties := maps:put(Key, Value, Properties), updated => calendar:universal_time(), counter := Counter+1, values := [{Counter, Properties} | lists:sublist(Values, Samples)]}}
end;
handle_call(edges, _, S) ->
{reply, maps:from_list([{Edge, lighthouse_node:node_ref(Node)} || {Edge, Node} <- gproc:select([{{{p, l, {?MODULE, edge, '$2'}}, self(), '$1'}, [], [{{'$1', '$2'}}]}])]), S};
handle_call({add_edge, Label, ToNode}, _, S) ->
gproc:add_local_property({?MODULE, edge, lighthouse_node:name(ToNode)}, Label),
{reply, ok, S#{updated => calendar:universal_time()}}.
handle_cast(stop, S) ->
{stop, normal, S}.
handle_info({add_edge, Label, ToNode}, S) ->
gproc:add_local_property({?MODULE, edge, lighthouse_node:name(ToNode)}, Label),
{noreply, S}.
code_change(_, State, _) ->
{ok, State}.
terminate(_, _) ->
gproc:goodbye().
notify(Event, Value, #{name := Name, counter := Counter, created := Created}) ->
gproc:update_counter({c, l, counter(messages)}, 1),
gproc:send({p, l, {?MODULE, node, Name}}, #{module => ?MODULE, from => self(), event => Event, value => Value, counter => Counter, created => Created}).
where_is(Label) ->
lighthouse_util:where_is(node_ref(Label)).
counter(Type) ->
{?MODULE, Type}. | src/lighthouse_node.erl | 0.681303 | 0.47457 | lighthouse_node.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% Copyright (c) 2007-2010 Basho Technologies, Inc. All Rights Reserved.
%%
%% -------------------------------------------------------------------
-module(riak_search_op_proximity).
-export([
extract_scoring_props/1,
preplan/2,
chain_op/4
]).
-include("riak_search.hrl").
-ifdef(TEST).
-ifdef(EQC).
-include_lib("eqc/include/eqc.hrl").
-endif.
-include_lib("eunit/include/eunit.hrl").
-endif.
-define(INDEX_DOCID(Term), ({element(1, Term), element(2, Term)})).
%%% Conduct a proximity match over the terms. The #proximity operator
%%% expects to be handed a list of #term operators. The #term operator
%%% reads a sorted list of results from the index along with
%%% positional data, plus a "maximum distance" between the terms.
%%%
%%% The select_fun, when combining results, creates a list of the
%%% positions in order, in other words, if we search for ("see spot
%%% run"~10), then we'd have a list containing three sublists, the
%%% first is the positions for "see", the second is the positions for
%%% "spot", and the third is the positions for "run".
%%%
%%% A final iterator compares the positions to ensure that we find the
%%% terms all within the right distance from eachother. This acts
%%% differently depending on whether we are doing an exact phrase search vs. a proximity search.
%%%
%%% - For exact phrase search, the terms should be within N words from
%%% eachother, where N is the number of words in the phrase
%%% (accounting for stopwords). The sublists should line up so that
%%% some combination of word positions is M, M+1, M+2, etc.
%%%
%%% - For proximity search, the terms should be within N words from
%%% eachother, where N is specified by the user, ie: ("spot see
%%% run"~5). This works by continually peeling off the smallest value
%%% that we find in a sublist, and then check the min and maximum
%%% value across *all* sublists. If max-min < N then we have a match.
extract_scoring_props(Op) ->
riak_search_op:extract_scoring_props(Op#proximity.ops).
preplan(Op, State) ->
ChildOps = riak_search_op:preplan(Op#proximity.ops, State),
Op#proximity { ops=ChildOps }.
chain_op(Op, OutputPid, OutputRef, State) ->
%% Create an iterator chain...
OpList = Op#proximity.ops,
Iterator1 = riak_search_op_utils:iterator_tree(fun select_fun/2, OpList, State),
%% Wrap the iterator depending on whether this is an exact match
%% or proximity search.
case Op#proximity.proximity of
exact ->
Iterator2 = make_exact_match_iterator(Iterator1);
Proximity ->
Iterator2 = make_proximity_iterator(Iterator1, Proximity)
end,
%% Spawn up pid to gather and send results...
F = fun() ->
erlang:link(State#search_state.parent),
riak_search_op_utils:gather_iterator_results(OutputPid, OutputRef, Iterator2())
end,
erlang:spawn_link(F),
%% Return.
{ok, 1}.
%% Given a result iterator, only return results that are an exact
%% match to the provided phrase.
make_exact_match_iterator(Iterator) ->
fun() -> exact_match_iterator(Iterator()) end.
exact_match_iterator({Term, PositionLists, Iterator}) ->
case is_exact_match(PositionLists) of
true ->
%% It's a match! Return the result...
NewIterator = fun() -> exact_match_iterator(Iterator()) end,
{Term, PositionLists, NewIterator};
false ->
%% No match, so skip.
exact_match_iterator(Iterator())
end;
exact_match_iterator({eof, _}) ->
{eof, ignore}.
%% Return true if all of the terms exist within Proximity words from
%% eachother.
is_exact_match(Positions) ->
case get_position_heads(Positions) of
undefined ->
%% We've come to the end of a position list. Not an exact match.
false;
Heads ->
%% This position list represents an exact match if it is
%% sequential, ie: it's in ascending order with no gaps.
case is_sequential(Heads) of
true ->
true;
false ->
NewPositions = remove_next_term_position(Positions),
is_exact_match(NewPositions)
end
end.
%% Return true if the provided list of integers is sequential.
is_sequential([X|Rest]) ->
case Rest of
[Y|_] when Y =:= X + 1 ->
is_sequential(Rest);
[] ->
true;
_ ->
false
end;
is_sequential([]) ->
true.
%% Given a result iterator, only return results that are within a
%% certain proximity of eachother.
make_proximity_iterator(Iterator, Proximity) ->
fun() -> proximity_iterator(Iterator(), Proximity) end.
proximity_iterator({Term, PositionLists, Iterator}, Proximity) ->
case within_proximity(Proximity, PositionLists) of
true ->
%% It's a match! Return the result...
NewIterator = fun() -> proximity_iterator(Iterator(), Proximity) end,
{Term, PositionLists, NewIterator};
false ->
%% No match, so skip.
proximity_iterator(Iterator(), Proximity)
end;
proximity_iterator({eof, _}, _) ->
{eof, ignore}.
%% Return true if all of the terms exist within Proximity words from
%% eachother.
within_proximity(Proximity, Positions) ->
case get_position_heads(Positions) of
undefined ->
%% We've come to the end of a position list. Not a proximity match.
false;
Heads ->
%% This position list represents a phrase match if the Max
%% minus the Min value is less than our Proximity target.
IsInProximity = abs(lists:max(Heads) - lists:min(Heads)) < Proximity,
case IsInProximity of
true ->
true;
false ->
NewPositions = remove_next_term_position(Positions),
within_proximity(Proximity, NewPositions)
end
end.
%% Given a list of term positions, remove a single term position
%% according to a set of rules, and return the new list.
%%
%% The term position to remove is either the smallest duplicated
%% position, or if there are no duplicates, the smallest position
%% overall.
%%
%% This essentially mimics walking through the document from start to
%% finish, continually removing the earliest occurring term from
%% consideration. The functions that call this one then check to see
%% if the new set of positions represent a successfull proximity match
%% or exact phrase match, depending upon the search query.
remove_next_term_position([]) ->
undefined;
remove_next_term_position(Positions) ->
case get_position_heads(Positions) of
undefined ->
%% Can't calculate position heads. That means one of the
%% lists is empty, so return undefined.
undefined;
List ->
%% Figure out which value to remove. This is the smallest
%% duplicated value, or if no values are duplicated, then the
%% smallest value.
ToRemove = get_smallest_duplicate_or_not(List),
remove_next_term_position(ToRemove, lists:reverse(Positions), [])
end.
remove_next_term_position(ToRemove, [[ToRemove|Ps]|Rest], NextPass) ->
%% We've found the value to remove, so toss it and don't remove
%% anything else.
remove_next_term_position(undefined, [Ps|Rest], NextPass);
remove_next_term_position(_, [[]|_], _) ->
%% Reached the end of a position list, no way to continue.
undefined;
remove_next_term_position(ToRemove, [Ps|Rest], NextPass) ->
%% Just loop...
remove_next_term_position(ToRemove, Rest, [Ps|NextPass]);
remove_next_term_position(_ToRemove, [], NextPass) ->
%% We've processed all the position lists for this pass, so continue.
NextPass.
get_position_heads(undefined) ->
undefined;
get_position_heads(List) ->
get_position_heads(List, []).
get_position_heads([[]|_Rest], _Acc) ->
undefined;
get_position_heads([[H|_]|Rest], Acc) ->
get_position_heads(Rest, [H|Acc]);
get_position_heads([], Acc) ->
lists:reverse(Acc).
%% Given a list of integers, return the smallest integer that is a duplicate.
get_smallest_duplicate_or_not(List) ->
get_smallest_duplicate_or_not(lists:sort(List), undefined, undefined).
get_smallest_duplicate_or_not([H,H|Rest], SmallestDup, SmallestVal)
when SmallestDup == undefined orelse H < SmallestDup ->
%% We found a duplicate, and it's the first duplicate we've seen,
%% or it's smaller than the previous one we found, so use this as
%% the new duplicate value.
get_smallest_duplicate_or_not(Rest, H, SmallestVal);
get_smallest_duplicate_or_not([H|Rest], SmallestDup, SmallestVal)
when SmallestVal == undefined orelse H < SmallestVal ->
%% We found a new smallest value.
get_smallest_duplicate_or_not(Rest, SmallestDup, H);
get_smallest_duplicate_or_not([_|Rest], SmallestDup, SmallestVal) ->
%% Next result is not special, ignore it.
get_smallest_duplicate_or_not(Rest, SmallestDup, SmallestVal);
get_smallest_duplicate_or_not([], SmallestDup, SmallestVal) ->
%% Nothing left to process. Return either the smallest duplicate,
%% the smallest value, or undefined.
if
SmallestDup /= undefined -> SmallestDup;
SmallestVal /= undefined -> SmallestVal;
true -> undefined
end.
%% Given a pair of iterators, combine into a single iterator returning
%% results of the form {Term, PositionLists, NewIterator}. Apart from
%% the PositionLists, this is very similar to the #intersection
%% operator logic, except it doesn't need to worry about any #negation
%% operators.
select_fun({{Index, DocID, Props}, Op, Iterator}, I2) when is_record(Op, term) ->
%% Normalize the first iterator result, replacing Op with a list of positions.
Positions = proplists:get_value(p, Props, []),
Positions1 = lists:sort(Positions),
select_fun({{Index, DocID, Props}, [Positions1], Iterator}, I2);
select_fun(I1, {{Index, DocID, Props}, Op, Iterator}) when is_record(Op, term) ->
%% Normalize the second iterator result, replacing Op with a list of positions.
Positions = proplists:get_value(p, Props, []),
Positions1 = lists:sort(Positions),
select_fun(I1, {{Index, DocID, Props}, [Positions1], Iterator});
select_fun({Term1, Positions1, Iterator1}, {Term2, Positions2, Iterator2}) when ?INDEX_DOCID(Term1) == ?INDEX_DOCID(Term2) ->
%% If terms are equal, then combine the terms, concatenate the
%% position list, and return the result.
NewTerm = riak_search_utils:combine_terms(Term1, Term2),
{NewTerm, Positions1 ++ Positions2, fun() -> select_fun(Iterator1(), Iterator2()) end};
select_fun({Term1, _, Iterator1}, {Term2, Positions2, Iterator2}) when ?INDEX_DOCID(Term1) < ?INDEX_DOCID(Term2) ->
%% Terms not equal, so iterate one of them...
select_fun(Iterator1(), {Term2, Positions2, Iterator2});
select_fun({Term1, Positions1, Iterator1}, {Term2, _, Iterator2}) when ?INDEX_DOCID(Term1) > ?INDEX_DOCID(Term2) ->
%% Terms not equal, so iterate one of them...
select_fun({Term1, Positions1, Iterator1}, Iterator2());
select_fun({eof, _}, _) ->
%% Hit an eof, no more results...
{eof, []};
select_fun(_, {eof, _}) ->
%% Hit an eof, no more results...
{eof, []}.
-ifdef(TEST).
remove_next_term_position_1_test() ->
%% Test when ToRemove == undefined...
Input =
[[1,2,3],
[4,5,6],
[7,8,9]],
Expected =
[[2,3],
[4,5,6],
[7,8,9]],
?assertEqual(remove_next_term_position(Input), Expected).
remove_next_term_position_2_test() ->
%% Test when there is a single matching position in middle list...
Input =
[[4,5,6],
[1,2,3],
[7,8,9]],
Expected =
[[4,5,6],
[2,3],
[7,8,9]],
?assertEqual(remove_next_term_position(Input), Expected).
remove_next_term_position_3_test() ->
%% Test when there is a single matching position in end list...
Input =
[[4,5,6],
[7,8,9],
[1,2,3]],
Expected =
[[4,5,6],
[7,8,9],
[2,3]],
?assertEqual(remove_next_term_position(Input), Expected).
remove_next_term_position_4_test() ->
%% Test when there are multiple matching positions. Should only remove the last one.
Input =
[[1,2,3],
[4,5,6],
[7,8,9],
[1,2,3]],
Expected =
[[1,2,3],
[4,5,6],
[7,8,9],
[2,3]],
?assertEqual(remove_next_term_position(Input), Expected).
remove_next_term_position_5_test() ->
%% Test when there are multiple matching positions. Should only remove the last one.
Input =
[[0],
[1,2,3],
[4,5,6],
[7,8,9],
[1,2,3]],
Expected =
[[0],
[1,2,3],
[4,5,6],
[7,8,9],
[2,3]],
?assertEqual(remove_next_term_position(Input), Expected).
remove_next_term_position_6_test() ->
%% Test when a list is empty...
Input =
[[1,2,3],
[4,5,6],
[],
[1,2,3]],
Expected =
undefined,
?assertEqual(remove_next_term_position(Input), Expected).
-ifdef(EQC).
is_sequential_prop() ->
?FORALL(L, non_empty(list(int())),
begin
A = lists:min(L),
Z = lists:max(L),
?assertEqual(L =:= lists:seq(A, Z),
is_sequential(L)),
true
end).
is_sequential_test() ->
true = eqc:quickcheck(eqc:numtests(5000, is_sequential_prop())).
-endif. % EQC
-endif. | src/riak_search_op_proximity.erl | 0.572723 | 0.548432 | riak_search_op_proximity.erl | starcoder |
% -*- indent-tabs-mode:nil; -*-
%%%-------------------------------------------------------------------
%%% @author <NAME> <<EMAIL>>
%%% @copyright (C) 2019, <NAME>
%%% @doc
%%%
%%% This module handles the functions for the Discrete Fourier
%%% Transforms (DFT).
%%%
%%% All the computation here is based on a freely available DSP ebook
%%% <a href="http://www.dspguide.com/pdfbook.htm">here</a>.
%%%
%%% The main equations used are from
%%% <a href="http://www.dspguide.com/ch8.htm">chapter 8</a>.
%%%
%%% For equations 8-2 and 8-3 see
%%% <a href="http://www.dspguide.com/ch8/5.htm">this section</a>
%%%
%%% For equations 8-4 see
%%% <a href="http://www.dspguide.com/ch8/6.htm">this section</a>
%%%
%%% @end
%%% Created : 22 Sep 2019 by <NAME> <<EMAIL>>
%%%-------------------------------------------------------------------
-module(tsefft_dft).
-export([sum/1, do_sum/3, dft_by_corr/1, inverse_dft/1]).
%%--------------------------------------------------------------------
%% @doc Initiate the worker process to perform the sum.
%%
%% Compute the sum of a set of data points. The data points are
%% expected to be in the TS tagged with `Tag'.
%%
%% The `Tag' can be any term, although, the best choices are either an
%% atom unique to this data set, or perhaps `{tag,''_'`}', where the
%% second element may identify the index of the data element within
%% the set. But we should use the '_' pattern as wild card to capture
%% all the data.
%%
%% There should be two types of tuples: the count of data,
%% `{Tag,count,Count}', and the `Count' data values: `{Tag, value,
%% Value}'.
%%
%% The sum will be performed within a single process, during which,
%% the `count' and `value' tuples will be consumed.
%%
%% Once complete, the sum will be added to the TS as `{Tag, sum, Sum}'
%%
%% @end
%%--------------------------------------------------------------------
-spec sum(term()) -> ok.
sum(Tag) ->
{[Count], _} = espace:in({Tag, count, '$1'}),
espace:worker({?MODULE, do_sum, [Tag, Count, 0]}),
ok.
%%--------------------------------------------------------------------
%% @doc
%%
%% This is the worker process that will consume the data points and
%% produce their sum.
%%
%%
%%
%% @end
%%--------------------------------------------------------------------
-spec do_sum(term(), integer(), number()) -> ok.
do_sum(Tag, 0, Sum) ->
espace:out({Tag, sum, Sum}),
ok;
do_sum(Tag, Count, Sum) ->
{[Value], _} = espace:in({Tag, value, '$1'}),
do_sum(Tag, Count-1, Sum+Value).
%%--------------------------------------------------------------------
%% @doc Compute the DFT by Correlation.
%%
%% Compute the `Re' and `Im' frequency components of `Signal' using
%% the equations defined in equation 8-4 of chapter 8 of the the DSP
%% Guide, see http://www.dspguide.com/ch8/6.htm.
%%
%% For each of the N input data points we generate 2 x N/2 tuples,
%% which are then summed up to form the N/2 {Re_x,K] and {Im_x,K} data sets.
%%
%% The function returns N/2 complex pairs.
%%
%% @end
%%--------------------------------------------------------------------
-spec dft_by_corr([float()]) -> [{float(), float()}].
dft_by_corr([]) ->
[];
dft_by_corr(Signal) ->
NN = length(Signal),
Pi = math:pi(),
S = fun (K, I, N) -> math:sin(2*Pi*K*I/N) end,
C = fun (K, I, N) -> math:cos(2*Pi*K*I/N) end,
% load the data points into the TS
dft_by_corr_data(Signal, 0, S, NN, im_x),
dft_by_corr_data(Signal, 0, C, NN, re_x),
% sum up the data points, the Im sums will need to be negated afterwards
lists:foreach(fun (K) -> tsefft_dft:sum({re_x, K}) end, lists:seq(0, NN div 2)),
lists:foreach(fun (K) -> tsefft_dft:sum({im_x, K}) end, lists:seq(0, NN div 2)),
% extrat the frequence coordinate pairs
Ext_sum = fun (Tag) -> {[Sum], _} = espace:in({Tag, sum, '$1'}), Sum end,
[ {Ext_sum({re_x, K}), -Ext_sum({im_x, K})} || K <- lists:seq(0, NN div 2) ].
%%--------------------------------------------------------------------
%% @doc Generate the data points for the Re_X_k and Im_X_k values.
%%
%% The data points will be evaluated concurrently via espace
%% eval. When done, we will end up with 2 x N x (N/2+1) tuples.
%%
%% The tuples will have tags `{re_x, K}' and `{im_x, K}'. `K' will
%% range from 0 to `N/2'. There will be one tuple corresponding to
%% each data point in the input Signal.
%%
%% @end
%%--------------------------------------------------------------------
-spec dft_by_corr_data([float()], integer(), function(), integer(), term()) -> ok.
dft_by_corr_data([], _I, _C_S, N, Tag) ->
lists:foreach(
fun (K) -> espace:out({ {Tag, K}, count, N}) end,
lists:seq(0, N div 2)),
ok;
dft_by_corr_data([H_sig|T_sig], I, C_S, N, Tag) ->
lists:foreach(
fun (K) -> espace:eval({ {Tag, K}, value, fun ()-> H_sig*C_S(K, I, N) end}) end,
lists:seq(0, N div 2)),
dft_by_corr_data(T_sig, I+1, C_S, N, Tag),
ok.
%%--------------------------------------------------------------------
%% @doc Calculate the inverse DFT from the cosine and sine amplitudes.
%%
%% The input is a list of N/2 complex pairs.
%%
%% We are applying equations 8-2 and 8-3 from chapter 8 of the the DSP
%% Guide, see http://www.dspguide.com/ch8/5.htm.
%%
%% From the N/2 complex pairs, we generate the 2 x `N/2' scaled, eqn
%% 8-3, `{re_x_s,K}' and `{im_x_s,K}'.
%%
%% From the `{re_x_s,K}' and `{im_x_s,K}' we generate the 2 x N x N/2
%% products, `{x_re,I}' and `{x_im,I}', which when summed up, eqn
%% 8-2, produce the N x `{x,I}' values for the result.
%%
%% @end
%%--------------------------------------------------------------------
-spec inverse_dft([ {float(), float()} ]) -> [float()].
inverse_dft([]) ->
[];
inverse_dft(Re_Im_x) ->
N2 = length(Re_Im_x)-1, %% this is N/2 in the eqns, note index is 0..(N/2)
NN = 2*N2, %% this is N in the eqns, index is 0..(N-1)
% split and add the individual complex pairs into the TS
split_out(Re_Im_x, 0),
% apply the scale factors to the tuples (concurrently)
scale_coeff(N2),
Pi = math:pi(),
C = fun (K, I) -> math:cos(2*Pi*K*I/NN) end,
S = fun (K, I) -> math:sin(2*Pi*K*I/NN) end,
% generate the 2 x N x N/2 elements that will be summed up
lists:foreach(fun (I) -> calc_x_I_K(I, N2, C, re) end, lists:seq(0, NN-1)),
lists:foreach(fun (I) -> calc_x_I_K(I, N2, S, im) end, lists:seq(0, NN-1)),
% sum up the results
lists:foreach(fun (I) -> tsefft_dft:sum({x_re, I}) end, lists:seq(0, NN-1)),
lists:foreach(fun (I) -> tsefft_dft:sum({x_im, I}) end, lists:seq(0, NN-1)),
% generate the x_i set
lists:foreach(fun (I) -> espace:eval({{x,I}, {fun calc_x_I/1, [I]}}) end, lists:seq(0, NN-1)),
% flush the input data, as they're no longer needed
lists:foreach(fun (K) ->
espace:in({{re_x_s,K}, '_'}),
espace:in({{im_x_s,K}, '_'})
end,
lists:seq(0,N2)),
% extract the calculated x_I into an ordered sequence
Ext_x_i = fun (I) -> {[X_i], _} = espace:in({{x,I}, '$1'}), X_i end,
Signal = [ Ext_x_i(I) || I <- lists:seq(0,NN-1)],
Signal.
%%--------------------------------------------------------------------
%% @doc Apply scaling all Re/Im coefficients.
%%
%% All {{re_x,K},Value} tuples will be replaced with
%% {{re_x_s,K},Scaled_value}, and similarly for the im_x ones
%%
%% The scaling will be performed concurrently for all the tuples.
%%
%% @end
%%--------------------------------------------------------------------
-spec scale_coeff(integer()) -> ok.
scale_coeff(N2) ->
lists:foreach(fun (K) ->
espace:eval({{re_x_s,K}, {fun scale_re/2, [K, N2]}})
end,
lists:seq(0, N2)),
lists:foreach(fun (K) ->
espace:eval({{im_x_s,K}, {fun scale_im/2, [K, N2]}})
end,
lists:seq(0, N2)),
ok.
%%--------------------------------------------------------------------
%% @doc Given a list of {Re, Im} pairs, dump them as indexed tuples.
%%
%% This is the first stage of the inverse DFT calculation.
%%
%% @end
%%--------------------------------------------------------------------
-spec split_out(list(), integer()) -> ok.
split_out([], _K) ->
ok;
split_out([{Re, Im}|Rest], K) ->
espace:out({{re_x,K}, Re}),
espace:out({{im_x,K}, Im}),
split_out(Rest, K+1).
%%--------------------------------------------------------------------
%% @doc apply scaling to the input Im coefficients, and return the
%% scaled value.
%%
%% This is only need to be done once for each coefficient, so the
%% coefficient tuple is removed from the Tuple Space.
%%
%% @end
%%--------------------------------------------------------------------
-spec scale_im(integer(), integer()) -> float().
scale_im(K, N2) ->
{[Im], _} = espace:in({{im_x, K}, '$1'}),
Im / N2.
%%--------------------------------------------------------------------
%% @doc apply scaling to the input Re coefficients, and return the
%% scaled value.
%%
%% This is only need to be done once for each coefficient, so the
%% coefficient tuple is removed from the Tuple Space.
%%
%% @end
%%--------------------------------------------------------------------
-spec scale_re(integer(), integer()) -> float().
scale_re(K, N2) ->
{[Re], _} = espace:in({{re_x, K}, '$1'}),
case K of
0 -> Re / (2*N2);
N2 -> Re / (2*N2);
_ -> Re / N2
end.
%%--------------------------------------------------------------------
%% @doc initiate calculation of individual x_i products.
%%
%% For a given x_i we generate a set of N/2+1 calculations.
%%
%% The ultimate collective output of this call will be N/2+1 tuples
%% each for the cosine and sine products, as in eqn 8-2.
%%
%% We get espace to perform the calculations independently, producing
%% N/2+1 products. These weill later be used to compute the larger Re
%% and Im sums.
%%
%% `C_S' is either the cosine or sine function, `Tag' is either `re_x'
%% or `im_x'
%%
%% @end
%%--------------------------------------------------------------------
-spec calc_x_I_K(integer(), integer(), function(), term()) -> ok.
calc_x_I_K(I, N2, C_S, re) ->
lists:foreach(
fun (K) ->
espace:eval({ {x_re, I}, value, {fun calc_product/4, [re, C_S, I, K]} })
end,
lists:seq(0, N2)),
espace:out({{x_re, I}, count, N2+1}),
ok;
calc_x_I_K(I, N2, C_S, im) ->
lists:foreach(
fun (K) ->
espace:eval({ {x_im, I}, value, {fun calc_product/4, [im, C_S, I, K]} })
end,
lists:seq(0, N2)),
espace:out({{x_im, I}, count, N2+1}),
ok.
%%--------------------------------------------------------------------
%% @doc calculate the product of a given Re/Im coefficient and its
%% corresponding cosine/sine function.
%%
%% These are the individual products in the sums in eqn 8-2.
%%
%% Note that unlike the rest of this module, here we need to use
%% `espace:rd/1' instead of `espace:in/1', since each Re/Im
%% coefficient is needed by all `x_i'. These will be removed, once the
%% entire computation is complete.
%%
%% @end
%%--------------------------------------------------------------------
-spec calc_product(re|im, function(), integer(), integer()) -> float().
calc_product(re, C_S, I, K) ->
{[X_s_k], _} = espace:rd({{re_x_s,K}, '$1'}),
X_s_k * C_S(I,K);
calc_product(im, C_S, I, K) ->
{[X_s_k], _} = espace:rd({{im_x_s,K}, '$1'}),
X_s_k * C_S(I,K).
%%--------------------------------------------------------------------
%% @doc calculate the x_i as the sum of the two Re and Im sums
%%
%% This is basically the final stage of calculation of an x_i.
%%
%% @end
%%--------------------------------------------------------------------
-spec calc_x_I(integer()) -> float().
calc_x_I(I) ->
{[Re_sum],_} = espace:in({{x_re,I}, sum, '$1'}),
{[Im_sum],_} = espace:in({{x_im,I}, sum, '$1'}),
Re_sum+Im_sum. | src/tsefft_dft.erl | 0.620047 | 0.599192 | tsefft_dft.erl | starcoder |
%% @doc Basic GIF encoder
%%
%% [https://www.w3.org/Graphics/GIF/spec-gif89a.txt]
%% Originally based on [https://github.com/huacnlee/rucaptcha/tree/master/ext/rucaptcha]
-module(ecaptcha_gif).
-export([encode/4]).
%% For tests
-export([min_bits_to_fit/1]).
-spec encode(
binary(),
pos_integer(),
pos_integer(),
ecaptcha:color_name() | ecaptcha:color_rgb()
) -> iodata().
encode(Pixels, Width, Height, ColorName) when is_atom(ColorName) ->
encode(Pixels, Width, Height, ecaptcha_color:by_name(ColorName));
encode(Pixels, Width, Height, Color) when byte_size(Pixels) =:= (Width * Height) ->
Palette = ecaptcha_color:new_palette(Pixels, Color),
[
header(Width, Height, Palette),
encode_raster_data(Pixels, Palette),
trailer()
].
%% Header
header(Width, Height, Palette) ->
NumColors = ecaptcha_color:palette_size(Palette),
[
screen_descriptor(Width, Height, NumColors),
palette(Palette),
image_descriptor(Width, Height)
].
%% erlfmt-ignore
screen_descriptor(Width, Height, PaletteSize) ->
Bits = min_bits_to_fit(PaletteSize) - 1,
<<
"GIF89a",
Width:16/little, % Logical screen width
Height:16/little, % Logical screen height
1:1, % GCTFlag:1 - 1 - Global Color Table exists
Bits:3, % ColorResolution:3
0:1, % SortFlag:1 - 0 - not sorted
Bits:3, % GCTSize:3 - `2^(3+1)' - size of GCT
0, % BackgroundColor index in palette
0 % Aspect ratio
>>.
palette(Palette) ->
%% Palette size must be power of 2. If number of colors is not exactly power of 2, we fill
%% extra space with dumy color (white).
NumColors = ecaptcha_color:palette_size(Palette),
ColorsRGB = ecaptcha_color:palette_colors_by_frequency(Palette),
PaletteLenBits = min_bits_to_fit(NumColors),
% 2 ^ PaletteLenBits
PaletteSize = 1 bsl PaletteLenBits,
NumDummyColors = PaletteSize - NumColors,
White = ecaptcha_color:by_name(white),
PaletteAndDummy = ColorsRGB ++ lists:duplicate(NumDummyColors, White),
lists:map(fun ecaptcha_color:bin_3b/1, PaletteAndDummy).
min_bits_to_fit(Size) ->
if
Size =< 2 -> 1;
Size =< 4 -> 2;
Size =< 8 -> 3;
Size =< 16 -> 4;
Size =< 32 -> 5;
Size =< 64 -> 6;
Size =< 128 -> 7;
Size =< 256 -> 8;
true -> error(size_overflow)
end.
%% erlfmt-ignore
image_descriptor(Width, Height) ->
<<
",",
0:16/little, 0:16/little, % (x0, y0) - start of image
Width:16/little, Height:16/little, % (xN, yN) - end of image
0:1, % no local color table
0:1, % sequential order (not interlaced)
0:3, % reserved
0:3 % Local color table size
>>.
%% Body
encode_raster_data(Pixels, Palette) ->
PaletteSize = ecaptcha_color:palette_size(Palette),
PalettePixels = map_to_palette(Pixels, Palette),
CodeSize = max(min_bits_to_fit(PaletteSize), 2),
Compressed = ecaptcha_gif_lzw:compress(PalettePixels, CodeSize),
% LZW minimum code size
[CodeSize | encode_chunks(Compressed)].
map_to_palette(Pixels, Palette) ->
<<
<<(ecaptcha_color:palette_get_index(Pixel, Palette))>>
|| <<Pixel>> <= Pixels
>>.
encode_chunks(<<Chunk:255/binary, Tail/binary>>) ->
[255, Chunk | encode_chunks(Tail)];
encode_chunks(<<>>) ->
%Stream terminator
[0];
encode_chunks(Remaining) ->
[
byte_size(Remaining),
Remaining,
%Stream terminator
0
].
%% Trailer
trailer() ->
<<";">>. | src/ecaptcha_gif.erl | 0.522202 | 0.582966 | ecaptcha_gif.erl | starcoder |
-module(mapz).
% API
-export([deep_find/2]).
-export([deep_get/2]).
-export([deep_get/3]).
-export([deep_put/3]).
-export([deep_update/3]).
-export([deep_update_with/3]).
-export([deep_update_with/4]).
-export([deep_remove/2]).
-export([deep_merge/1]).
-export([deep_merge/2]).
-export([deep_merge/3]).
-export([deep_merge_with/2]).
-export([deep_merge_with/3]).
-export([deep_iterator/1]).
-export([deep_next/1]).
-export([inverse/1]).
-export([format_error/2]).
% We must inline this so that the stack trace points to the correct function.
-compile({inline, [error_info/2]}).
%--- Types ---------------------------------------------------------------------
-export_type([path/0]).
-export_type([iterator/0]).
-export_type([combiner/0]).
-type path() :: [term()].
% A list of keys that are used to iterate deeper into a map of maps.
-opaque iterator() :: {?MODULE, none | maps:iterator(_, _) | {_, _, maps:iterator(_, _)}, path(), [maps:iterator(_, _)]}.
% An iterator representing the associations in a map with keys of type Key and values of type Value.
%
% Created using {@link deep_iterator/1}.
%
% Consumed by {@link deep_next/1}.
-type combiner() :: fun((Path::path(), Old::term(), New::term()) -> term()).
% A combiner function that takes a path, and its two conflicting old values and
% returns a new value.
%--- API ----------------------------------------------------------------------
% @doc Returns a tuple `{ok,Value}', where Value is the value associated with
% `Path', or `error' if no value is associated with `Path' in `Map'.
%
% The call can raise the following exceptions:
% <ul>
% <li>`{badmap,Map}' if `Map' is not a map</li>
% <li>`{badpath,Path}' if `Path' is not a path</li>
% </ul>
-spec deep_find(path(), map()) -> {ok, term()} | error.
deep_find(Path, Map) ->
check(Path, Map),
search(Map, Path,
fun(Value) -> {ok, Value} end,
fun(_Existing, _Key) -> error end
).
% @doc Returns value `Value' associated with `Path' if `Map' contains `Path'.
%
% The call can raise the following exceptions:
% <ul>
% <li>`{badmap,Map}' if `Map' is not a map</li>
% <li>`{badpath,Path}' if `Path' is not a path</li>
% <li>`{badvalue,P}' if a term that is not a map exists as a intermediate key at
% the path `P'</li>
% <li>`{badkey,Path}' if no value is associated with path `Path'</li>
% </ul>
-spec deep_get(path(), map()) -> term().
deep_get(Path, Map) ->
check(Path, Map),
search(Map, Path,
fun(Value) -> Value end,
fun
({ok, _Existing}, P) -> error({badvalue, P});
(error, P) -> error({badkey, P})
end
).
% @doc Returns value `Value' associated with `Path' if `Map' contains `Path'. If
% no value is associated with `Path', `Default' is returned.
%
% The call can raise the following exceptions:
% <ul>
% <li>`{badmap,Map}' if `Map' is not a map</li>
% <li>`{badpath,Path}' if `Path' is not a path</li>
% <li>`{badvalue,P}' if a term that is not a map exists as a intermediate key at
% the path `P'</li>
% </ul>
-spec deep_get(path(), map(), term()) -> term().
deep_get(Path, Map, Default) ->
check(Path, Map),
search(Map, Path,
fun(Value) -> Value end,
fun(_Existing, _P) -> Default end
).
% @doc Associates `Path' with value `Value' and inserts the association into map
% `Map2'. If path `Path' already exists in map `Map1', the old associated value
% is replaced by value `Value'. The function returns a new map `Map2' containing
% the new association and the old associations in `Map1'.
%
% The call can raise the following exceptions:
% <ul>
% <li>`{badmap,Map}' if `Map1' is not a map</li>
% <li>`{badpath,Path}' if `Path' is not a path</li>
% <li>`{badvalue,P}' if a term that is not a map exists as a intermediate key at
% the path `P'</li>
% </ul>
-spec deep_put(path(), term(), map()) -> map().
deep_put(Path, Value, Map1) ->
check(Path, Map1),
update(Map1, Path, fun(_Existing) -> Value end, fun(P, Rest, V) ->
badvalue_and_create(P, Rest, V, Value)
end).
% @doc If `Path' exists in `Map1', the old associated value is replaced by value
% `Value'. The function returns a new map `Map2' containing the new associated
% value.
%
% The call can raise the following exceptions:
% <ul>
% <li>`{badmap,Map}' if `Map1' is not a map</li>
% <li>`{badpath,Path}' if `Path' is not a path</li>
% <li>`{badvalue,P}' if a term that is not a map exists as a intermediate key at
% the path `P'</li>
% <li>`{badkey,Path}' if no value is associated with path `Path'</li>
% </ul>
-spec deep_update(path(), term(), map()) -> map().
deep_update(Path, Value, Map1) ->
check(Path, Map1),
update(Map1, Path, fun(_Existing) -> Value end, fun badvalue_and_badkey/3).
% @doc Update a value in a `Map1' associated with `Path' by calling `Fun' on the
% old value to get a new value.
%
% The call can raise the following exceptions:
% <ul>
% <li>`{badmap,Map}' if `Map1' is not a map</li>
% <li>`{badpath,Path}' if `Path' is not a path</li>
% <li>`{badvalue,P}' if a term that is not a map exists as a intermediate key at
% the path `P'</li>
% <li>`{badkey,Path}' if no value is associated with path `Path'</li>
% <li>`badarg' if `Fun' is not a function of arity 1</li>
% </ul>
-spec deep_update_with(path(), fun((term()) -> term()), map()) -> map().
deep_update_with(Path, Fun, Map1) ->
deep_update_with_1(Path, Fun, Map1, fun badvalue_and_badkey/3).
% @doc Update a value in a `Map1' associated with `Path' by calling `Fun' on the
% old value to get a new value. If `Path' is not present in `Map1' then `Init'
% will be associated with `Path'.
%
% The call can raise the following exceptions:
% <ul>
% <li>`{badmap,Map}' if `Map1' is not a map</li>
% <li>`{badpath,Path}' if `Path' is not a path</li>
% <li>`{badvalue,P}' if a term that is not a map exists as a intermediate key at
% the path `P'</li>
% <li>`badarg' if `Fun' is not a function of arity 1</li>
% </ul>
-spec deep_update_with(path(), fun((term()) -> term()), any(), map()) -> map().
deep_update_with(Path, Fun, Init, Map1) ->
deep_update_with_1(Path, Fun, Map1, fun(P, Rest, Value) ->
badvalue_and_create(P, Rest, Value, Init)
end).
deep_update_with_1(Path, Fun, Map1, Default) ->
check(Path, Map1),
check_fun(Fun, 1),
update(Map1, Path,
fun(Value) -> Fun(Value) end,
Default
).
% @doc Removes the last existing key of `Path', and its associated value from
% `Map1' and returns a new map `Map2' without that key. Any deeper non-existing
% keys are ignored.
%
% The call can raise the following exceptions:
% <ul>
% <li>`{badmap,Map}' if `Map' is not a map</li>
% <li>`{badpath,Path}' if `Path' is not a path</li>
% </ul>
-spec deep_remove(path(), map()) -> map().
deep_remove(Path, Map) ->
check(Path, Map),
remove(Map, Path).
% @doc Merges a list of maps recursively into a single map. If a path exist in
% several maps, the value in the first nested map is superseded by the value in
% a following nested map.
%
% The call can raise the following exceptions:
% <ul>
% <li>`{badmap,Map}' exception if any of the maps is not a map</li>
% </ul>
%
% @equiv deep_merge(fun (_, V) -> V end, #{}, Maps)
-spec deep_merge([map()]) -> map().
deep_merge(Maps) when is_list(Maps) ->
deep_merge_with(fun(_Path, _V1, V2) -> V2 end, Maps).
% @equiv deep_merge([Map1, Map2])
-spec deep_merge(map(), map()) -> map().
deep_merge(Map1, Map2) when is_map(Map1), is_map(Map2) ->
deep_merge([Map1, Map2]).
% @doc Merges a list of maps `Maps' recursively into a single map `Target'. If a
% path exist in several maps, the function `Fun' is called with the previous and
% the conflicting value to resolve the conflict. The return value from the
% function is put into the resulting map.
%
% The call can raise the following exceptions:
% <ul>
% <li>`{badmap,Map}' exception if any of the maps is not a map</li>
% </ul>
% map.
%
% @deprecated Please use the module {@link deep_merge_with/3} instead.
-spec deep_merge(fun((Old::term(), New::term()) -> term()), map(), map() | [map()]) -> map().
deep_merge(Fun, Target, Maps) ->
deep_merge_with(fun(_P, V1, V2) -> Fun(V1, V2) end, Target, Maps).
% @doc Merges a list of maps `Maps' recursively into a single map. If a path
% exist in several maps, the function `Fun' is called with the path, the
% previous and the conflicting value to resolve the conflict. The return value
% from the function is put into the resulting map.
%
% The call can raise the following exceptions:
% <ul>
% <li>`{badmap,Map}' exception if any of the maps is not a map</li>
% </ul>
% map.
-spec deep_merge_with(Fun::combiner(), Maps::[map()]) -> map().
deep_merge_with(Fun, [Target|Maps]) ->
deep_merge_with1(Fun, Target, Maps, []).
% @doc Merges a list of maps `Maps' recursively into a single map. If a path
% exist in several maps, the function `Fun' is called with the path, the
% previous and the conflicting value to resolve the conflict. The return value
% from the function is put into the resulting map.
%
% The call can raise the following exceptions:
% <ul>
% <li>`{badmap,Map}' exception if any of the maps is not a map</li>
% </ul>
% map.
-spec deep_merge_with(Fun::combiner(), Map1::map(), Map2::map()) -> map().
deep_merge_with(Fun, Map1, Map2) when is_map(Map1), is_map(Map2) ->
deep_merge_with(Fun, [Map1, Map2]).
deep_merge_with1(_Fun, Target, [], _Path) when is_map(Target) ->
Target;
deep_merge_with1(Fun, Target, [From|Maps], Path) ->
deep_merge_with1(Fun, deep_merge_with1(Fun, Target, From, Path), Maps, Path);
deep_merge_with1(Fun, Target, Map, Path) when is_map(Map) ->
check_map(Target),
check_map(Map),
maps:fold(
fun(K, V, T) ->
case maps:find(K, T) of
{ok, Value} when is_map(Value), is_map(V) ->
maps:put(K, deep_merge_with1(Fun, Value, [V], Path ++ [K]), T);
{ok, Value} ->
maps:put(K, Fun(Path ++ [K], Value, V), T);
error ->
maps:put(K, V, T)
end
end,
Target,
Map
).
% @doc Returns a map iterator Iterator that can be used by {@link deep_next/1}
% to recursively traverse the path-value associations in a deep map structure.
%
% The call fails with a `{badmap,Map}' exception if Map is not a map.
-spec deep_iterator(map()) -> iterator().
deep_iterator(Map) when is_map(Map) ->
{?MODULE, maps:next(maps:iterator(Map)), [], []};
deep_iterator(Map) ->
error_info({badmap, Map}, [Map]).
% @doc Returns the next path-value association in Iterator and a new iterator
% for the remaining associations in the iterator.
%
% If the value is another map the iterator will first return the map as a value
% with its path. Only on the next call the inner value with its path is
% returned. That is, first `{Path, map(), iterator()}' and then
% `{InnerPath, Value, iterator()}'.
%
% If there are no more associations in the iterator, `none' is returned.
-spec deep_next(iterator()) -> {path(), term(), iterator()} | none.
deep_next({?MODULE, I, Trail, Stack}) ->
case {I, Stack} of
{none, []} ->
none;
{none, [Prev|Rest]} ->
deep_next({?MODULE, maps:next(Prev), lists:droplast(Trail), Rest});
{{K, V, I2}, Stack} when is_map(V) ->
Path = Trail ++ [K],
{Path, V, {?MODULE, maps:next(maps:iterator(V)), Path, [I2|Stack]}};
{{K, V, I2}, Stack} ->
Path = Trail ++ [K],
{Path, V, {?MODULE, I2, Trail, Stack}}
end;
deep_next(Iter) ->
error_info(badarg, [Iter]).
% @doc Inverts `Map' by inserting each value as the key with its corresponding
% key as the value. If two keys have the same value, one of the keys will be
% overwritten by the other in an undefined order.
%
% The call can raise the following exceptions:
% <ul>
% <li>`{badmap,Map}' if `Map' is not a map</li>
% </ul>
-spec inverse(map()) -> map().
inverse(Map) ->
maps:fold(fun(K, V, Acc) -> maps:put(V, K, Acc) end, #{}, Map).
% @hidden
format_error(_Reason, [{_M, F, As, _Info}|_]) ->
error_args(F, As).
%--- Internal Functions -------------------------------------------------------
check(Path, Map) ->
check_path(Path),
check_map(Map).
check_path(Path) when is_list(Path) -> ok;
check_path(Path) -> error_info({badpath, Path}, [Path]).
check_map(Map) when is_map(Map) -> ok;
check_map(Map) -> error_info({badmap, Map}, [Map]).
check_fun(Fun, Arity) when is_function(Fun, Arity) -> ok;
check_fun(_Fun, _Arity) -> exit(badarg).
search(Map, Path, Wrap, Default) -> search(Map, Path, Wrap, Default, []).
search(Element, [], Wrap, _Default, _Acc) ->
Wrap(Element);
search(Map, [Key|Path], Wrap, Default, Acc) when is_map(Map) ->
case maps:find(Key, Map) of
{ok, Value} -> search(Value, Path, Wrap, Default, [Key|Acc]);
error -> Default(error, lists:reverse([Key|Acc]))
end;
search(Value, [_Key|_Path], _Wrap, Default, Acc) ->
Default({ok, Value}, lists:reverse(Acc)).
update(Map, Path, Wrap, Default) -> update(Map, Path, Wrap, Default, []).
update(Map, [Key|Path], Wrap, Default, Acc) ->
Hist = [Key|Acc],
Value = case maps:find(Key, Map) of
{ok, Existing} when is_map(Existing) ->
update(Existing, Path, Wrap, Default, Hist);
{ok, Existing} ->
case Path of
[] -> Wrap(Existing);
_ -> Default(lists:reverse(Hist), Path, {ok, Existing})
end;
error ->
Default(lists:reverse(Hist), Path, error)
end,
maps:put(Key, Value, Map);
update(Map, [], Wrap, _Default, _Acc) ->
Wrap(Map).
remove(Map, []) ->
Map;
remove(Map, [First]) ->
maps:remove(First, Map);
remove(Map, [First, Second|Path]) when is_map(Map) ->
case maps:find(First, Map) of
{ok, Sub} when is_map(Sub) ->
case maps:find(Second, Sub) of
{ok, _SubSub} ->
maps:update(First, remove(Sub, [Second|Path]), Map);
error ->
maps:remove(First, Map)
end;
{ok, _Value} ->
maps:remove(First, Map);
error ->
Map
end.
create(Path, Value) ->
lists:foldr(fun(Key, Acc) -> #{Key => Acc} end, Value, Path).
badvalue_and_badkey(P, _Rest, {ok, _Existing}) -> error({badvalue, P});
badvalue_and_badkey(P, _Rest, error) -> error({badkey, P}).
badvalue_and_create(P, _Rest, {ok, _Existing}, _Init) -> error({badvalue, P});
badvalue_and_create(_P, Rest, error, Init) -> create(Rest, Init).
error_info(Reason, Args) ->
erlang:error(Reason, Args, [{error_info, #{module => ?MODULE}}]).
error_args(iterator, [_Map]) ->
#{1 => <<"not a map">>};
error_args(deep_next, [_Iter]) ->
#{1 => <<"not a valid iterator">>}. | src/mapz.erl | 0.752831 | 0.541409 | mapz.erl | starcoder |
-module(rstar_insert).
-export([insert/3]).
-ifdef(TEST).
-compile(export_all).
-endif.
-include("../include/rstar.hrl").
% ChooseSubtree:
% CS1: Set N to be the root
% CS2: If N is leaf, return N
% CS2b: If N points to leaves, select on
% 1) Minimal Overlap
% 2) Minimal Area Change
% 3) Smallest Area
% CS2c: IF N points to nodes, select on
% 1) Minimal Area Change
% 2) Smallest Area
choose_subtree(Node=#geometry{value=Value}, _, Path) when is_record(Value, leaf) -> [Node | Path];
choose_subtree(Node, Geo, Path) ->
% Extract the children
Children = Node#geometry.value#node.children,
% Get the first child
[FirstChild | _] = Children,
% Check what kind of nodes the children are
OptimialChildren = case FirstChild#geometry.value of
#leaf{} ->
MinOverlapDelta = minimal_overlap_delta(Geo, Children),
MinAreaDelta = minimal_area_delta(Geo, MinOverlapDelta),
minimal_area(MinAreaDelta);
#node{} ->
MinAreaDelta = minimal_area_delta(Geo, Children),
minimal_area(MinAreaDelta)
end,
% Choose the first child
[FirstOptimal | _] = OptimialChildren,
% Recurse
choose_subtree(FirstOptimal, Geo, [Node | Path]).
% Computes the overlap between a given geometry
% and a list of other geometries
overlap(Geo, OtherGeo) ->
overlap_r(Geo, OtherGeo, 0).
% Do not computer overlap with original node
overlap_r(Geo, [Geo | Other], Sum) ->
overlap_r(Geo, Other, Sum);
overlap_r(Geo, [G | Other], Sum) ->
Intersect = rstar_geometry:intersect(Geo, G),
case Intersect of
undefined -> overlap_r(Geo, Other, Sum);
_ ->
Area = rstar_geometry:area(Intersect),
overlap_r(Geo, Other, Sum + Area)
end;
% Return the overlap when no more points remain
overlap_r(_, [], Sum) -> Sum.
% Returns the list of geometry objects
% that will have minimal change in overlap given the
% addition of a new geometry
minimal_overlap_delta(_Geo, L=[_X]) -> L;
minimal_overlap_delta(Geo, OtherGeo) ->
strip_delta(minimal_overlap_delta_helper(Geo, OtherGeo)).
minimal_overlap_delta_helper(Geo, OtherGeo) ->
Overlap = lists:map(fun (G) ->
% Get a geometry that encompases this point
Union = rstar_geometry:bounding_box([Geo, G]),
% Compute the change in overlap
Delta = overlap(Union, lists:delete(G, OtherGeo)) - overlap(G, OtherGeo),
% Make a tuple with the delta and the Geo
{Delta, G}
end, OtherGeo),
% Sort on the delta
SortedOverlap = lists:keysort(1, Overlap),
% Grab the head element
[{FirstDelta, _FirstGeo} | _Tail] = SortedOverlap,
% Filter the list to only those with equal overlap
lists:takewhile(fun ({Delta, _}) ->
Delta == FirstDelta
end, SortedOverlap).
% Returns the list of geometry objects
% that will have minimal change in area given the
% addition of a geometryobject
minimal_area_delta(_Geo, L=[_X]) -> L;
minimal_area_delta(Geo, OtherGeo) ->
strip_delta(minimal_area_delta_helper(Geo, OtherGeo)).
minimal_area_delta_helper(Geo, OtherGeo) ->
Areas = lists:map(fun (G) ->
% Get a geometry that encompases this point
Union = rstar_geometry:bounding_box([Geo, G]),
% Compute the change in overlap
Delta = rstar_geometry:area(Union) - rstar_geometry:area(G),
% Make a tuple with the delta and the Geo
{Delta, G}
end, OtherGeo),
% Sort on the delta
SortedArea = lists:keysort(1, Areas),
% Grab the head element
[{FirstDelta, _FirstGeo} | _Tail] = SortedArea,
% Filter the list to only those with equal area delta
lists:takewhile(fun ({Delta, _}) ->
Delta == FirstDelta
end, SortedArea).
% Returns the list of geometry objects
% that will have minimal area
minimal_area(L=[_X]) -> L;
minimal_area(OtherGeo) ->
strip_delta(minimal_area_helper(OtherGeo)).
minimal_area_helper(OtherGeo) ->
Areas = lists:map(fun (G) ->
% Compute the area
Area = rstar_geometry:area(G),
% Make a tuple with the area and the Geo
{Area, G}
end, OtherGeo),
% Sort on the delta
SortedArea = lists:keysort(1, Areas),
% Grab the head element
[{FirstArea, _FirstGeo} | _Tail] = SortedArea,
% Filter the list to only those with equal area delta
lists:takewhile(fun ({Area, _}) ->
Area == FirstArea
end, SortedArea).
% Helper method that is used to strip the delta
% or area associated with the minimal_* functions.
strip_delta(Geo) -> [G || {_Delta, G} <- Geo].
% Split:
% S1: Invoke ChooseSplitAxis to determine axis to split on
% S2: Invoke ChooseSplitIndex to split into 2 groups along axis
% S3: Distribute into 2 groups
split(Params, Node) ->
Axis = choose_split_axis(Params, Node),
{GroupA, GroupB} = choose_split_index(Params, Node, Axis),
% Create a bounding box for the distributions
G1Geo = rstar_geometry:bounding_box(GroupA),
G2Geo = rstar_geometry:bounding_box(GroupB),
% Assign the children. The new nodes are a leaf type
% only if we are splitting a leaf, otherwise they are
% interior nodes.
case Node#geometry.value of
#leaf{} ->
G1 = G1Geo#geometry{value=#leaf{entries=GroupA}},
G2 = G2Geo#geometry{value=#leaf{entries=GroupB}};
#node{} ->
G1 = G1Geo#geometry{value=#node{children=GroupA}},
G2 = G2Geo#geometry{value=#node{children=GroupB}}
end,
% Return the two new nodes
{G1, G2}.
% ChooseSplitAxis
% CSA1: For each axis: Sort by lower, upper values of axes. Generate (M - 2*m + 2) distributions.
% Compute S, Sum of Margin values for all distributions.
% CSA2: Choose Axis with minimum S
choose_split_axis(Params, Node) ->
N = Node#geometry.dimensions,
Scored = [{axis_split_score(Params, Node, Axis), Axis} || Axis <- lists:seq(1, N)],
Sorted = lists:keysort(1, Scored),
[{_, MinAxis} | _] = Sorted,
MinAxis.
% Computes the Score S for spliting on a given axis
axis_split_score(Params, Node, Axis) ->
% Compute the distributions
Distributions = axis_distributions(Params, Node, Axis),
% Compute the margin sum
lists:foldl(fun({GroupA, GroupB}, Sum) ->
BoundA = rstar_geometry:bounding_box(GroupA),
BoundB = rstar_geometry:bounding_box(GroupB),
MarginA = rstar_geometry:margin(BoundA),
MarginB = rstar_geometry:margin(BoundB),
Sum + MarginA + MarginB
end, 0, Distributions).
% Returns a list of all the possible distributions
% along the given axis
axis_distributions(Params, Node, Axis) ->
% Ignore the record type, get the children
{_, Children} = Node#geometry.value,
% Sort the children on the appropriate axis
Sorted = lists:sort(fun (A, B) ->
{MinA, MaxA} = lists:nth(Axis, A#geometry.mbr),
{MinB, MaxB} = lists:nth(Axis, B#geometry.mbr),
% Sort on the lower and then upper part of the axis
if
MinA < MinB -> true;
MinA == MinB -> MaxA =< MaxB;
true -> false
end
end, Children),
% Extract the tree parameters
Min = Params#rt_params.min,
Max = Params#rt_params.max,
% Build the distribution
[lists:split(K, Sorted) || K <- lists:seq(Min, Max - Min + 1)].
% ChooseSplitIndex
% CSI1: Along choosen axis, choose the distribution with minimum overlap value, resolve tie with minimum area value
choose_split_index(Params, Node, Axis) ->
% Compute the distributions
Distributions = axis_distributions(Params, Node, Axis),
% Compute the overlap and area scores
Scored = lists:map(fun(D={GroupA, GroupB}) ->
BoundA = rstar_geometry:bounding_box(GroupA),
BoundB = rstar_geometry:bounding_box(GroupB),
Overlap = overlap(BoundA, [BoundB]),
Area = rstar_geometry:area(BoundA) + rstar_geometry:area(BoundB),
% Store with the overlap and area
{Overlap, Area, D}
end, Distributions),
% Sort to get the lowest score. This will first sort on overlap,
% and then on area
Sorted = lists:sort(Scored),
% Returns the best distribution
[{_, _, Distrib} | _] = Sorted,
Distrib.
% InsertData:
% I1: Invoke Insert with the leaf level as a param to add new geometry
insert(Params, Root, Geo) ->
% Do an internal insert allowing for reinsertion
{Root1, ReInsert} = insert_internal(Params, true, Root, Geo),
% Re-insert without allowing for more reinsertion
lists:foldl(fun(R, TmpRoot) ->
{TmpRoot1, []} = insert_internal(Params, false, TmpRoot, R),
TmpRoot1
end, Root1, ReInsert).
% Insert:
% I1: Invoke ChooseSubtree with level as param to find node N for E
insert_internal(Params, AllowReinsert, Root, Geo) ->
% Determine the traversal path, from root to leaf
Path = choose_subtree(Root, Geo, []),
RootToLeaf = lists:reverse(Path),
% Perform a recursive insert and tree repair, since all
% nodes on the path need to be adjusted
{NewRoot, ReInsert} = insert_recursive(Params, AllowReinsert, 0, RootToLeaf, Geo),
% Check if the root was split and needs to be reconstructed
ReturnRoot = case NewRoot of
{R1, R2} ->
Children = [R1, R2],
RootGeo = rstar_geometry:bounding_box(Children),
RootGeo#geometry{value=#node{children=Children}};
R -> R
end,
% Return the new root along with any nodes to be re-inserted
{ReturnRoot, ReInsert}.
% Insert:
% I2: If N has < M entries, insert E. If N has M entries, invoke OverflowTreatment with the level of N as param.
% I3: If OverflowTreatment is called and a split performed, propogate OverflowTreatment upward. If OFT cause split of root, create new root.
insert_recursive(Params, AllowReinsert, Level, [Leaf], Geo) ->
% Add the entry to the leaf
NewEntries = [Geo | Leaf#geometry.value#leaf.entries],
NewGeo = rstar_geometry:bounding_box(NewEntries),
NewLeaf = NewGeo#geometry{value=#leaf{entries=NewEntries}},
if
% Check if there is space for this entry
length(NewEntries) =< Params#rt_params.max ->
{NewLeaf, []};
% Overflow case, check for split or re-insert
(Level == 0) or (not AllowReinsert) ->
{N1, N2} = split(Params, NewLeaf),
{{N1, N2}, []};
% Allow reinsert in the special case
true ->
ReInserted = reinsert(Params, NewLeaf),
ReducedChild = NewEntries -- ReInserted,
ReducedGeo = rstar_geometry:bounding_box(ReducedChild),
ReducedNode = ReducedGeo#geometry{value=#leaf{entries=ReducedChild}},
{ReducedNode, ReInserted}
end;
% I4: Adjust all the MBR in the insertion path
insert_recursive(Params, AllowReinsert, Level, [Parent | Tail=[Child| _]], Geo) ->
{NewChild, ReInsert} = insert_recursive(Params, AllowReinsert, Level + 1, Tail, Geo),
% Get the new children
{_, Children} = Parent#geometry.value,
NewChildren = case NewChild of
{N1, N2} -> [N1, N2 | lists:delete(Child, Children)];
NewNode -> [NewNode | lists:delete(Child, Children)]
end,
% Update the bounding geometry and propogate
NewGeo = rstar_geometry:bounding_box(NewChildren),
NewParent = NewGeo#geometry{value=#node{children=NewChildren}},
% Check for the split case
AdjustedParent = case length(NewChildren) of
% OFT: It is possible to handle overflow here with a reinsert or split,
% however we only perform a split for simplicity
L when L > Params#rt_params.max -> split(Params, NewParent);
_ -> NewParent
end,
% Return the reconstructed level
{AdjustedParent, ReInsert}.
% ReInsert:
% RI1: For all M+1 entries of N, compute distance from center of N to center of entry
% RI2: Sort the entries in decreasing order of distance
% RI3: Remove the first P entries from N and adjust MBR of N
% RI4: Invoke Insert with the removed entries (p= 30% of M), starting with farthest or closest
reinsert(Params, Node) ->
% Get the center
Center = rstar_geometry:center(Node),
% Compute the distances
{_, Children} = Node#geometry.value,
Distances = lists:map(fun(G) ->
{rstar_geometry:distance(Center, rstar_geometry:center(G)), G}
end, Children),
% Sort on distance
Sorted = lists:sort(Distances),
% Get the the number P of nodes to reinsert
P = Params#rt_params.reinsert,
% Determine what the nth tail would be
LastP = Params#rt_params.max - P + 1,
% Return the LastP nodes
[N || {_Distance, N} <- lists:nthtail(LastP, Sorted)]. | src/rstar_insert.erl | 0.644225 | 0.677813 | rstar_insert.erl | starcoder |
%% @author <NAME> (<EMAIL>)
%% @copyright <NAME> 2007
%% @doc This module implements the Mnesia driver for ErlyDB.
%%
%% This is an internal ErlyDB module that you normally shouldn't have to
%% use directly. For most situations, all you have to know
%% about this module is the options you can pass to {@link start/1}, which
%% is called by {@link erlydb:start/2}. Currently (Erlyweb 0.6), no options are
%% recognized/used.
%%
%%
%% == Contents ==
%%
%% {@section Introduction}<br/>
%% {@section Conventions}<br/>
%% {@section Types}<br/>
%% {@section Example}<br/>
%% {@section What's Not Supported}<br/>
%%
%%
%% == Introduction ==
%%
%% Mnesia is Erlang's distributed DataBase Management System (DBMS). Please read the
%% Mnesia Reference Manual for more information about Mnesia.
%%
%% This driver executes Erlsql queries against Mnesia. Most Erlsql queries are
%% dynamically converted into Query List Comprehension (QLC) expressions before
%% execution. Please see the qlc module documentation for more information on QLC.
%% Please read the Erlsql documentation for more information on Erlsql.
%%
%% This driver does not add relational support to Mnesia (constraints,
%% cascades, etc). Some relational support for Mnesia has been implemented by
%% Ulf Wiger in the user contribution rdbms (see http://erlang.org/user.html).
%% For more information visit http://ulf.wiger.net/rdbms/doc/rdbms.html. You can download a more
%% recent version of rdbms at http://ulf.wiger.net/rdbms/download/.
%%
%%
%% == Conventions ==
%%
%% The driver uses a table named 'counter' for auto-incrementing (identity) primary key columns.
%% (only valid for set or ordered-set tables). The 'counter' table must be created
%% using the following:
%%
%% mnesia:create_table(counter, [{disc_copies, [node()]}, {attributes, [key, counter]}])
%%
%% The key column will contain table names of the mnesia tables utilizing identity columns. The
%% counter contains the value of the last used identity (serial integer). The counter is updated
%% using:
%%
%% mnesia:dirty_update_counter(counter, Table, 1)
%%
%% You can initialize/start the identity of a particular table by executing the above statement
%% with an arbitrary number (greater than 0). The above operation is atomic (the function name
%% is misleading). Please read the Mnesia docs for more information. The use of the 'counter'
%% table is currently not customizable but that will hopefully change soon.
%%
%% All columns named 'id' or ending with 'id' are treated as integers. If the column named 'id'
%% is the first attribute (column) in the mnesia table, then it is also treated as an
%% auto-incrementing identity column.
%%
%%
%% == Types ==
%%
%% This driver stores all fields as binary unless the field name ends with id and in that
%% case the field is treated as an integer (as discussed above). This can be customized
%% by utilizing the user_properties for a mnesia table. The driver will do a limited
%% amount of type conversion utilizing these properties. The driver will recognize
%% user_properties for a field if defined in the following format:
%%
%% {Field, {Type, Modifier}, Null, Key, Default, Extra, MnesiaType}
%%
%% where Field is an atom and must be the same as the field (attribute) name,
%% Type through Extra is are as defined in erlydb_field:new/6
%% MnesiaType is the type to store the field as in mnesia.
%%
%% Currently, only the following values for MnesiaType are recognized:
%%
%% atom, list, binary, integer, float, datetime, date, time, undefined
%%
%% The erlydb_mnesia driver will attempt to convert field values into
%% the specified type before insertion/update/query of the record in
%% mnesia... If the MnesiaType has a value of undefined then no type
%% conversion is attempted for the field.
%%
%%
%% == Example ==
%%
%% Given the following record:
%%
%% -record(person, {myid, type, name, age, country, office, department, genre, instrument, created_on})
%%
%% Create a Mnesia table with types for the driver using:
%%
%% {atomic, ok} = mnesia:create_table(person, [
%% {disc_copies, [node()]},
%% {attributes, record_info(fields, person)},
%% {user_properties, [{myid, {integer, undefined}, false, primary, undefined, identity, integer},
%% {type, {varchar, undefined}, false, undefined, undefined, undefined, atom},
%% {age, {integer, undefined}, true, undefined, undefined, undefined, integer},
%% {created_on, {datetime, undefined}, true, undefined, undefined, undefined, undefined}]}])
%%
%% Note the following:
%% 1) The primary key column is called myid and is an auto-incrementing integer column. This is the
%% same as if the column had been named 'id'.
%% 2) The type and age columns have customized types. The driver will try to convert all values
%% inserted into the table into the specified types.
%% 3) The created_on column is defined as a datetime for Erlyweb but is of type undefined for the
%% Mnesia driver. This means that no type conversion will be attempted for the created_on
%% column resulting in a Erlang datetime tuple to be stored in the column
%% {{Year, Month, Day}, {Hour, Minute, Second}} or {datetime, {{Year, Month, Day},{Hour,Minute,Second}}}
%% depending on how you create the record (creating a record from strings will result in the
%% tuple beginning with datetime).
%% 4) Changing the user property for the created_on column to specify a mnesia type of datetime like
%% {created_on, {datetime, undefined}, true, undefined, undefined, undefined, datetime}
%% will result in the erlang date time tuple {{Year,Month,Day},{Hour,Minute,Second}}
%% to be stored regardless of how the record was created (ie. it will strip the redundant
%% datetime atom from the tuple
%%
%%
%% See test/erlydb/erlydb_mnesia_schema for more examples of how to create mnesia tables
%% with user_properties...
%%
%%
%% == What's Not Supported ==
%%
%% This driver is very much still alpha quality. Much is not supported but the most glaring
%% are unions and sub-queries.
%%
%% For license information see LICENSE.txt
-module(erlydb_mnesia).
-author("<NAME> (<EMAIL>)").
-export([start/0,
start/1,
get_metadata/1,
q/1,
q/2,
transaction/2,
select/2,
select_as/3,
update/2,
get_last_insert_id/2]).
%% Useful for debugging
-define(L(Msg), io:format("~p:~b ~p ~n", [?MODULE, ?LINE, Msg])).
-define(S(Obj), io:format("LOG ~w ~s\n", [?LINE, Obj])).
-record(qhdesc, {expressions = [],
generators = [],
filters = [],
bindings = erl_eval:new_bindings(),
options = [],
evalfun = fun qlc:e/2,
postqh = fun postqh/2,
posteval = fun posteval/1,
metadata = dict:new()}).
start() ->
case mnesia:system_info(is_running) of
no -> mnesia:start();
_ -> ok
% FIXME this could fail if system_info returns 'stopping'
end.
%% @doc Start the Mnesia driver using the options property list. Currently, no options are recognized.
%%
%% @spec start(StartOptions::proplist()) -> ok | {error, Error}
start(_Options) ->
ok = start().
%% @doc Get the table names and fields for the database.
%%
%% @spec get_metadata(Options::proplist()) -> gb_trees()
get_metadata(_Options) ->
% NOTE Integration with mnesia_rdbms would be interesting...
Tables = mnesia:system_info(tables) -- [schema],
Tree = lists:foldl(
fun(Table, TablesTree) ->
gb_trees:enter(Table, get_metadata(Table, table_fields(Table)), TablesTree)
end, gb_trees:empty(), Tables),
{ok, Tree}.
get_metadata(Table, Fields) when is_list(Fields) ->
[get_metadata(Table, Field) || Field <- Fields];
get_metadata(Table, Field) ->
{Field, {Type, Modifier}, Null, Key, Default, Extra, _MnesiaType} = get_user_properties(Table, Field),
erlydb_field:new(Field, {Type, Modifier}, Null, Key, Default, Extra).
q(Statement) ->
q(Statement, undefined).
q({esql, Statement}, Options) ->
?L(["In q with: ", Statement]),
q2(Statement, Options);
q(Statement, Options) when is_binary(Statement); is_list(Statement) ->
?L(["Unhandled binary or list query", Statement, Options]),
exit("Unhandled binary or list query").
q2({select, {call, count, _What}, {from, Table}, {where, undefined}, undefined}, _Options)
when is_list(Table) == false ->
{ok, [{table_size(Table)}]};
q2({select, Table}, Options) when is_list(Table) == false ->
q2({select, [Table]}, Options);
q2({select, Tables}, Options) when is_list(Tables) ->
{atomic, Results} = transaction(fun() -> lists:foldl(
fun(Table, Acc) -> Acc ++ mnesia:match_object(mnesia:table_info(Table, wild_pattern)) end,
[], Tables) end, Options),
{data, Results};
q2({select, '*', {from, Tables}}, Options) ->
q2({select, Tables}, Options);
q2({select, '*', {from, Tables}, {where, undefined}, undefined}, Options) ->
q2({select, Tables}, Options);
%% QLC queries
q2({select, Fields, {from, Tables}}, Options) ->
select(undefined, Fields, Tables, undefined, undefined, Options);
q2({select, Fields, {from, Tables}, {where, WhereExpr}}, Options) ->
select(undefined, Fields, Tables, WhereExpr, undefined, Options);
q2({select, Fields, {from, Tables}, {where, WhereExpr}, Extras}, Options) ->
select(undefined, Fields, Tables, WhereExpr, Extras, Options);
q2({select, Fields, {from, Tables}, WhereExpr, Extras}, Options) ->
select(undefined, Fields, Tables, WhereExpr, Extras, Options);
q2({select, Fields, {from, Tables}, Extras}, Options) ->
select(undefined, Fields, Tables, undefined, Extras, Options);
q2({select, Tables, {where, WhereExpr}}, Options) ->
select(undefined, undefined, Tables, WhereExpr, undefined, Options);
q2({select, Tables, WhereExpr}, Options) ->
select(undefined, undefined, Tables, WhereExpr, undefined, Options);
q2({select, Modifier, Fields, {from, Tables}}, Options) ->
select(Modifier, Fields, Tables, undefined, undefined, Options);
q2({select, Modifier, Fields, {from, Tables}, {where, WhereExpr}}, Options) ->
select(Modifier, Fields, Tables, WhereExpr, undefined, Options);
q2({select, Modifier, Fields, {from, Tables}, Extras}, Options) ->
select(Modifier, Fields, Tables, undefined, Extras, Options);
q2({select, Modifier, Fields, {from, Tables}, {where, WhereExpr}, Extras}, Options) ->
select(Modifier, Fields, Tables, WhereExpr, Extras, Options);
q2({select, Modifier, Fields, {from, Tables}, WhereExpr, Extras}, Options) ->
select(Modifier, Fields, Tables, WhereExpr, Extras, Options);
%% q2({Select1, union, Select2}, Options) ->
%% [$(, q2(Select1, Options), <<") UNION (">>, q2(Select2, Options), $)];
%% q2({Select1, union, Select2, {where, WhereExpr}}, Options) ->
%% [q2({Select1, union, Select2}, Options), where(WhereExpr, Options)];
%% q2({Select1, union, Select2, Extras}, Options) ->
%% [q2({Select1, union, Select2}, Options), extra_clause(Extras, Options)];
%% q2({Select1, union, Select2, {where, _} = Where, Extras}, Options) ->
%% [q2({Select1, union, Select2, Where}, Options), extra_clause(Extras, Options)];
q2({insert, Table, Params}, Options) ->
{Fields, Values} = lists:unzip(Params),
q2({insert, Table, Fields, [Values]}, Options);
q2({insert, Table, Fields, ValuesList}, _Options) ->
{Fields1, ValuesList1} =
case get_identity_field(Table) of
undefined -> {Fields, ValuesList};
Field -> NewFields = [Field | Fields],
MaxId = mnesia:dirty_update_counter(counter, Table, length(ValuesList)),
put(mnesia_last_insert_id, MaxId),
{NewValuesList, _} =
lists:mapfoldr(fun(Values, Id) -> {[Id | Values], Id-1} end, MaxId, ValuesList),
{NewFields, NewValuesList}
end,
QLCData = get_qlc_metadata(Table),
lists:foreach(fun(Values) ->
ok = write(dict:fetch({new_record, Table}, QLCData), Fields1, Values, QLCData)
end, ValuesList1),
{ok, length(ValuesList1)};
q2({update, Table, Params}, _Options) ->
{Fields, Values} = lists:unzip(Params),
QLCData = get_qlc_metadata(Table),
TraverseFun = fun(Record, {Fields1, Values1, QLCData1}) ->
write(Record, Fields1, Values1, QLCData1),
{Fields1, Values1, QLCData1}
end,
{atomic, _} = traverse(TraverseFun, {Fields, Values, QLCData}, Table),
{ok, table_size(Table)};
q2({update, Table, Params, {where, Where}}, Options) ->
q2({update, Table, Params, Where}, Options);
q2({update, Table, Params, Where}, Options) ->
QHDesc = #qhdesc{metadata = get_qlc_metadata(Table)},
{Fields, Values} = lists:unzip(Params),
QLCData = QHDesc#qhdesc.metadata,
{atomic, Num} = mnesia:transaction(
fun() ->
{data, Records} = select(undefined, undefined, Table, Where, undefined, Options, QHDesc),
lists:foreach(fun(Record) -> write(Record, Fields, Values, QLCData) end, Records),
length(Records)
end),
{ok, Num};
q2({delete, {from, Table}, {where, undefined}}, Options) ->
q2({delete, Table}, Options);
q2({delete, {from, Table}}, Options) ->
q2({delete, Table}, Options);
q2({delete, Table}, _Options) ->
% cannot use mnesia:clear_table(Table) here because sometimes this gets called inside a transaction...
TraverseFun = fun(Record, Num) ->
mnesia:delete_object(Record),
Num + 1
end,
{atomic, Num} = traverse(TraverseFun, 0, Table),
{ok, Num};
q2({delete, {from, Table}, {where, Where}}, Options) ->
q2({delete, Table, Where}, Options);
q2({delete, Table, {where, Where}}, Options) ->
q2({delete, Table, Where}, Options);
q2({delete, Table, Where}, Options) ->
{atomic, Num} = mnesia:transaction(
fun() ->
{data, Records} = q2({select, Table, Where}, Options),
lists:foreach(fun(Record) -> mnesia:delete_object(Record) end, Records),
length(Records)
end),
{ok, Num};
q2(Statement, Options) ->
?L(["Unhandled statement and options: ", Statement, Options]),
exit("Unhandled statement").
select(Modifier, Fields, Tables, WhereExpr, Extras, Options) ->
QHDesc = #qhdesc{metadata = get_qlc_metadata(Tables)},
select(Modifier, Fields, Tables, WhereExpr, Extras, Options, QHDesc).
select(Modifier, Fields, Tables, WhereExpr, Extras, Options, QHDesc) ->
QHDesc1 = modifier(Modifier, QHDesc),
QHDesc2 = fields(Fields, QHDesc1),
QHDesc3 = tables(Tables, QHDesc2),
QHDesc4 = where(WhereExpr, QHDesc3),
QHDesc5 = extras(Extras, QHDesc4),
Desc = QHDesc5,
QLC = if length(Desc#qhdesc.expressions) > 1 -> "[{" ++ comma(Desc#qhdesc.expressions) ++ "}";
true -> "[" ++ comma(Desc#qhdesc.expressions)
end,
QLC1 = QLC ++ " || " ++ comma(Desc#qhdesc.generators ++ lists:reverse(Desc#qhdesc.filters)) ++ "].",
?L(["About to execute QLC: ", QLC1]),
{atomic, Results} = transaction(
fun() ->
QHOptions = Desc#qhdesc.options,
QH = qlc:string_to_handle(QLC1, QHOptions, Desc#qhdesc.bindings),
PostQH = Desc#qhdesc.postqh,
QH1 = PostQH(QH, QHOptions),
EvalFun = Desc#qhdesc.evalfun,
EvalFun(QH1, QHOptions)
end, Options),
?L(["Found Results: ", Results]),
PostEval = Desc#qhdesc.posteval,
PostEval(Results).
modifier(undefined, #qhdesc{} = QHDesc) ->
QHDesc;
modifier(distinct, #qhdesc{options = Options} = QHDesc) ->
QHDesc#qhdesc{options = [{unique_all, true} | Options]};
modifier(Other, _QHDesc) ->
?L(["Unhandled modifier: ", Other]),
exit("Unhandled modifier").
fields(undefined, QHDesc) ->
fields('*', QHDesc);
fields('*', #qhdesc{expressions = Fields, metadata = QLCData} = QHDesc) ->
QHDesc#qhdesc{expressions = dict:fetch(aliases, QLCData) ++ Fields};
fields({call, avg, Field}, #qhdesc{metadata = QLCData} = QHDesc) when is_atom(Field) ->
[Table | _Tables] = dict:fetch(tables, QLCData),
fields({call, avg, {Table, Field}}, QHDesc);
fields({call, avg, {Table, Field}}, #qhdesc{metadata = QLCData} = QHDesc) ->
Index = dict:fetch({index,Table,Field}, QLCData),
QHDesc1 = QHDesc#qhdesc{posteval =
fun(Results) ->
Total = lists:foldl(fun(Record, Sum) -> element(Index, Record) + Sum end, 0, Results),
{ok, [{Total/length(Results)}]}
end},
fields('*', QHDesc1);
%% Count functions
fields({call, count, What}, #qhdesc{metadata = QLCData} = QHDesc) when is_atom(What) ->
QHDesc1 = case re:split(atom_to_list(What), "distinct ") of
[[], Field] ->
[Table | _] = resolve_field(Field, QLCData),
QHDesc#qhdesc{
expressions = [dict:fetch({alias, Table}, QLCData)],
postqh = fun(QH, _QHOptions) ->
qlc:keysort(dict:fetch({index,Table,list_to_atom(Field)}, QLCData), QH, [{unique, true}])
end};
_Other -> fields('*', QHDesc)
end,
QHDesc1#qhdesc{posteval = fun count/1};
fields({call, count, _What}, QHDesc) ->
fields('*', QHDesc#qhdesc{posteval = fun count/1});
%% Max/Min functions
fields({call, max, Field}, QHDesc) ->
min_max(Field, QHDesc, [{order, descending}, {unique, true}]);
fields({call, min, Field}, QHDesc) ->
min_max(Field, QHDesc, [{unique, true}]);
fields([Field | Fields], QHDesc) ->
fields(Fields, fields(Field, QHDesc));
fields([], QHDesc) ->
QHDesc#qhdesc{expressions = lists:reverse(QHDesc#qhdesc.expressions)};
fields(Field, #qhdesc{metadata = QLCData} = QHDesc) when is_tuple(Field) == false ->
[Table | _Tables] = dict:fetch(tables, QLCData),
fields({Table,Field}, QHDesc);
fields({_,_} = Field, #qhdesc{expressions = Fields, metadata = QLCData} = QHDesc) ->
QHDesc#qhdesc{expressions = [dict:fetch(Field, QLCData) | Fields]}.
tables(Table, QHDesc) when is_list(Table) == false ->
tables([Table], QHDesc);
tables([Table | Tables], #qhdesc{generators = Generators, metadata = QLCData} = QHDesc) ->
tables(Tables, QHDesc#qhdesc{generators = [dict:fetch(Table, QLCData) | Generators]});
tables([], #qhdesc{generators = Generators} = QHDesc) ->
QHDesc#qhdesc{generators = lists:reverse(Generators)}.
where({Where1, 'and', Where2}, QHDesc) ->
QHDesc1 = where(Where1, QHDesc),
where(Where2, QHDesc1);
where({'or', Where}, #qhdesc{filters = Filters} = QHDesc) when is_list(Where) ->
QHDesc1 = where(Where, QHDesc#qhdesc{filters = []}),
OrFilter = "(" ++ combinewith(" orelse ", QHDesc1#qhdesc.filters) ++ ")",
QHDesc1#qhdesc{filters = [OrFilter | Filters]};
where({'not', Where}, #qhdesc{filters = Filters} = QHDesc) ->
QHDesc1 = where(Where, QHDesc#qhdesc{filters = []}),
NotFilter = "false == (" ++ combinewith(" andalso ", QHDesc1#qhdesc.filters) ++ ")",
QHDesc1#qhdesc{filters = [NotFilter | Filters]};
where({'and', Where}, #qhdesc{filters = Filters} = QHDesc) ->
QHDesc1 = where(Where, QHDesc#qhdesc{filters = []}),
AndFilter = "(" ++ combinewith(" andalso ", QHDesc1#qhdesc.filters) ++ ")",
QHDesc1#qhdesc{filters = [AndFilter | Filters]};
where([Where | Rest], QHDesc) ->
where(Rest, where(Where, QHDesc));
where([], QHDesc) ->
QHDesc;
where({From, Op, To}, #qhdesc{metadata = QLCData} = QHDesc) when is_tuple(From) == false ->
[Table | _] = resolve_field(From, QLCData),
where({{Table,From}, Op, To}, QHDesc);
where({{_,_} = From, 'is', 'null'}, #qhdesc{filters = Filters, metadata = QLCData} = QHDesc) ->
QHDesc#qhdesc{filters = [dict:fetch(From, QLCData) ++ " == undefined" | Filters]};
where({{_,_} = From, 'like', To}, QHDesc) when is_binary(To) ->
where({From, 'like', erlang:binary_to_list(To)}, QHDesc);
where({{Table,Field} = From, 'like', To}, #qhdesc{filters = Filters, metadata = QLCData} = QHDesc) ->
To1 = re:replace(To, "%", ".*", [{return,list}, global]),
To2 = "\"^" ++ To1 ++ "$\"",
Filter = case mnesia_type(Table, Field) of
binary -> "erlang:binary_to_list(" ++ dict:fetch(From, QLCData) ++ ")";
_Other -> dict:fetch(From, QLCData)
end,
QHDesc#qhdesc{filters = ["re:run(" ++ Filter ++ ", " ++ To2 ++ ") /= nomatch" | Filters]};
where({{_, _} = From, '=', To}, QHDesc) ->
where({From, "==", To}, QHDesc);
where({{_, _} = From, Op, To}, QHDesc) when is_atom(Op) ->
where({From, atom_to_list(Op), To}, QHDesc);
where({{_, _} = From, Op, {Table, Field} = To}, #qhdesc{filters = Filters, metadata = QLCData} = QHDesc)
when is_atom(Table), is_atom(Field) ->
QHDesc#qhdesc{filters = [lists:concat([dict:fetch(From, QLCData), " ", Op, " ",
dict:fetch(To, QLCData)]) | Filters]};
where({{Table, Field} = From, Op, To}, #qhdesc{filters = Filters, bindings = Bindings, metadata = QLCData} = QHDesc) ->
case resolve_field(To, QLCData) of
[ToTable | _] -> where({From, Op, {ToTable, To}}, QHDesc);
[] -> Var = list_to_atom("Var" ++ integer_to_list(random:uniform(100000))),
QHDesc#qhdesc{
filters = [lists:concat([dict:fetch(From, QLCData), " ", Op, " ", Var]) | Filters],
bindings = erl_eval:add_binding(Var, convert(Table, Field, To), Bindings)}
end;
where(undefined, QHDesc) ->
QHDesc;
where(Where, _QHDesc) ->
?L(["Unhandled where: ", Where]),
exit("Unhandled where").
extras([Extra | Extras], QHDesc) ->
QHDesc1 = extras(Extra, QHDesc),
extras(Extras, QHDesc1);
extras([], QHDesc) ->
QHDesc;
extras({order_by, {Field, Order}}, #qhdesc{metadata = QLCData} = QHDesc) when is_atom(Field) ->
QHDesc#qhdesc{postqh =
fun(QH, QHOptions) ->
[Table | _Rest] = dict:fetch(tables, QLCData),
SortOptions = [{order, translate_order(Order)} | QHOptions],
qlc:keysort(dict:fetch({index,Table,Field}, QLCData), QH,
SortOptions)
end};
extras({limit, Limit}, QHDesc) ->
QHDesc#qhdesc{evalfun =
fun(QH, QHOptions) ->
QHCursor = qlc:cursor(QH, QHOptions),
Results = qlc:next_answers(QHCursor, Limit),
qlc:delete_cursor(QHCursor),
Results
end};
extras({limit, 0, Limit}, QHDesc) ->
extras({limit, Limit}, QHDesc);
extras({limit, From, Limit}, QHDesc) ->
QHDesc#qhdesc{evalfun =
fun(QH, QHOptions) ->
QHCursor = qlc:cursor(QH, QHOptions),
qlc:next_answers(QHCursor, From),
Results = qlc:next_answers(QHCursor, Limit),
qlc:delete_cursor(QHCursor),
Results
end};
extras(undefined, QHDesc) ->
QHDesc;
extras(Extras, _QHDesc) ->
?L(["Unhandled extras: ", Extras]),
exit("Unhandled extras").
translate_order(asc) -> ascending;
translate_order(desc) -> descending.
postqh(QueryHandle, _QHOptions) ->
QueryHandle.
posteval(Results) ->
{data, Results}.
count(Results) ->
{ok, [{length(Results)}]}.
min_max(Field, #qhdesc{metadata = QLCData} = QHDesc, Options) ->
[Table | _] = resolve_field(Field, QLCData),
QHDesc1 = QHDesc#qhdesc{postqh =
fun(QH, _QHOptions) ->
qlc:keysort(dict:fetch({index,Table,Field}, QLCData), QH, Options)
end},
QHDesc2 = QHDesc1#qhdesc{posteval =
fun(Results) ->
{ok, [{element(dict:fetch({index,Table,Field}, QLCData), hd(Results))}]}
end},
QHDesc2#qhdesc{expressions = [dict:fetch({alias, Table}, QLCData)]}.
% For each table, add the metadata for the table's fields to the dictionary and then
% add TABLE_ROW_VAR <- mnesia:table(Table) to the dictionary where TABLE_ROW_VAR is the variable
% representing the current row of the table and Table is the table name as an atom (TABLE_ROW_VAR
% defaults to the table name in all caps)
get_qlc_metadata(Table) when is_list(Table) == false ->
get_qlc_metadata([Table]);
get_qlc_metadata(Tables) when is_list(Tables) ->
QLCData = dict:store(tables, [], dict:new()),
QLCData1 = dict:store(aliases, [], QLCData),
get_qlc_metadata(Tables, QLCData1).
get_qlc_metadata([Table | Tables], QLCData) when is_tuple(Table) == false ->
% Create an alias for the table (table name in all caps)
get_qlc_metadata({Table, 'as', string:to_upper(atom_to_list(Table))}, Tables, QLCData);
get_qlc_metadata([{_, 'as', _} = Table | Tables], QLCData) ->
get_qlc_metadata(Table, Tables, QLCData);
get_qlc_metadata([], QLCData) ->
QLCData.
% For each table store the following key => value pairs:
% {alias, Table} => Alias where Table is atom and Alias is string
% {table, Alias} => Table where Table is atom and Alias is string
% {new_record, Table} => {Table, undefined, undefined...} where Table is atom and value is a tuple
% Table => MnesiaTable where Table is atom and MnesiaTable is the string "Alias <- mnesia:table(Table)"
%
% Also store:
% tables => [Tables] where Tables is a list of all tables in query
% aliases => [Aliases] where Aliases is a list of all table aliases in query
get_qlc_metadata({Table, 'as', Alias}, Tables, QLCData) ->
QLCData1 = dict:store({alias, Table}, Alias, QLCData),
QLCData2 = dict:store({table, Alias}, Table, QLCData1),
QLCData3 = dict:store({new_record, Table}, {Table}, QLCData2),
QLCData4 = get_qlc_metadata(table_fields(Table), 2, Table, Alias, QLCData3),
MnesiaTable = lists:concat([Alias, " <- mnesia:table(", Table, ")"]),
QLCData5 = dict:store(Table, MnesiaTable, QLCData4),
QLCData6 = dict:store(tables, dict:fetch(tables, QLCData5) ++ [Table], QLCData5),
QLCData7 = dict:store(aliases, dict:fetch(aliases, QLCData6) ++ [Alias], QLCData6),
get_qlc_metadata(Tables, QLCData7).
% for each table field (column), store the following:
% {Table, Field} => "element(Alias, FieldIndex)" where Table and Field are atoms
% {Alias, Field} => "element(Alias, FieldIndex)" where Alias is a string and Field is an atom
% {index, Table, Field} => FieldIndex where Table and Field are atoms and FieldIndex is an integer
get_qlc_metadata([Field | Fields], FieldIndex, Table, Alias, QLCData) ->
Data = "element(" ++ integer_to_list(FieldIndex) ++ ", " ++ Alias ++ ")",
QLCData1 = dict:store({Table,Field}, Data, QLCData),
QLCData2 = dict:store({Alias,Field}, Data, QLCData1),
QLCData3 = dict:store({index,Table,Field}, FieldIndex, QLCData2),
TableRecord = dict:fetch({new_record, Table}, QLCData3),
QLCData4 = dict:store({new_record, Table}, erlang:append_element(TableRecord, undefined), QLCData3),
get_qlc_metadata(Fields, FieldIndex + 1, Table, Alias, QLCData4);
get_qlc_metadata([], _FieldIndex, _Table, _Alias, QLCData) ->
QLCData.
%% User_properties for field is defined as:
%% {Field, {Type, Modifier}, Null, Key, Default, Extra, MnesiaType}
%% where Field is an atom,
%% Type through Extra is are as defined in erlydb_field:new/6
%% MnesiaType is the type to store the field as in mnesia.
%%
%% Currently the driver tries to do a limited bit of conversion of types. For example, you may want
%% to store strings as binaries in mnesia. Erlydb may pass in strings during querying, updates, etc
%% and the string will need to be converted to/from a binary.
get_user_properties(Table, Field) ->
case lists:keysearch(Field, 1, mnesia:table_info(Table, user_properties)) of
{value, {Field, {_Type, _Modifier}, _Null, _Key, _Default, _Extra, _MnesiaType} = UserProperties} -> UserProperties;
false -> get_default_user_properties(table_type(Table), Field, field_index(Table, Field))
end.
get_default_user_properties(TableType, Field, 1) ->
case lists:suffix("id", atom_to_list(Field)) of
true -> if TableType =:= bag -> {Field, {integer, undefined}, false, primary, undefined, undefined, integer};
true -> {Field, {integer, undefined}, false, primary, undefined, identity, integer}
end;
_False -> {Field, {varchar, undefined}, false, primary, undefined, undefined, binary}
end;
get_default_user_properties(_TableType, Field, Index) when Index > 1 ->
case lists:suffix("id", atom_to_list(Field)) of
true -> {Field, {integer, undefined}, true, undefined, undefined, undefined, integer};
_False -> {Field, {varchar, undefined}, true, undefined, undefined, undefined, binary}
end.
%% @doc Return the first field of the given table if it is an identity field (auto-incrementing)
%% or return undefined
get_identity_field(Table) ->
[Field | _Rest] = table_fields(Table),
case get_user_properties(Table, Field) of
{Field, {_Type, _Modifier}, _Null, _Key, _Default, identity, _MnesiaType} -> Field;
_Other -> undefined
end.
%% @doc Find the field's position in the given table
field_index(Table, Field) ->
field_index(Field, 1, table_fields(Table)).
field_index(Field, Index, [Field | _Rest]) ->
Index;
field_index(Field, Index, [_Field | Rest]) ->
field_index(Field, Index + 1, Rest);
field_index(_Field, _Index, []) ->
0.
table_type(Table) ->
mnesia:table_info(Table, type).
table_fields(Table) ->
mnesia:table_info(Table, attributes).
table_size(Table) ->
mnesia:table_info(Table, size).
mnesia_type(Table, Field) ->
{Field, {_Type, _Modifier}, _Null, _Key, _Default, _Extra, MnesiaType} =
get_user_properties(Table, Field),
MnesiaType.
%% Convert Value to the type of the given Table Field. No conversion takes place if there is
%% no defined type for Field.
convert(Table, Field, Value) ->
convert(Value, mnesia_type(Table, Field)).
% FIXME there has to be some utility out there to do this conversion stuff...
convert(undefined, _Type) ->
undefined;
convert(Value, undefined) ->
Value;
convert(Value, integer) when is_integer(Value) ->
Value;
convert(Value, float) when is_integer(Value) ->
Value;
convert(Value, list) when is_integer(Value) ->
integer_to_list(Value);
convert(Value, atom) when is_integer(Value) ->
list_to_atom(integer_to_list(Value));
convert(Value, binary) when is_integer(Value) ->
list_to_binary(integer_to_list(Value));
convert(Value, integer) when is_float(Value) ->
trunc(Value);
convert(Value, float) when is_float(Value) ->
Value;
convert(Value, list) when is_float(Value) ->
float_to_list(Value);
convert(Value, atom) when is_float(Value) ->
list_to_atom(float_to_list(Value));
convert(Value, binary) when is_float(Value) ->
list_to_binary(float_to_list(Value));
convert(Value, integer) when is_list(Value) ->
list_to_integer(Value);
convert(Value, float) when is_list(Value) ->
list_to_float(Value);
convert(Value, list) when is_list(Value) ->
Value;
convert(Value, atom) when is_list(Value) ->
list_to_atom(Value);
convert(Value, binary) when is_list(Value) ->
list_to_binary(Value);
convert(Value, integer) when is_atom(Value) ->
list_to_integer(atom_to_list(Value));
convert(Value, float) when is_atom(Value) ->
list_to_float(atom_to_list(Value));
convert(Value, list) when is_atom(Value) ->
atom_to_list(Value);
convert(Value, atom) when is_atom(Value) ->
Value;
convert(Value, binary) when is_atom(Value) ->
list_to_binary(atom_to_list(Value));
convert(Value, integer) when is_binary(Value) ->
list_to_integer(binary_to_list(Value));
convert(Value, float) when is_binary(Value) ->
list_to_float(binary_to_list(Value));
convert(Value, list) when is_binary(Value) ->
binary_to_list(Value);
convert(Value, atom) when is_binary(Value) ->
list_to_atom(binary_to_list(Value));
convert(Value, binary) when is_binary(Value) ->
Value;
convert(Value, binary) ->
% catch all
Value;
convert({datetime, Value}, datetime) ->
Value;
convert(Value, datetime) ->
Value;
convert({date, Value}, date) ->
Value;
convert(Value, date) ->
Value;
convert({time, Value}, time) ->
Value;
convert(Value, time) ->
Value.
resolve_field(From, QLCData) ->
resolve_field(From, dict:fetch(tables, QLCData), QLCData).
resolve_field(From, Tables, QLCData) when is_list(From) ->
resolve_field(list_to_atom(From), Tables, QLCData);
resolve_field(From, Tables, QLCData) ->
lists:foldl(
fun(Table, Acc) -> case dict:is_key({Table,From}, QLCData) of true -> [Table | Acc]; _ -> Acc end end,
[],
Tables).
write(Record, Field, Value, QLCData) when is_list(Field) == false ->
write(Record, [Field], [Value], QLCData);
write(Record, [Field | Fields], [Value | Values], QLCData) ->
Table = element(1, Record),
FieldIndex = dict:fetch({index, Table, Field}, QLCData),
Record1 = setelement(FieldIndex, Record, convert(Table, Field, Value)),
write(Record1, Fields, Values, QLCData);
write(Record, [], [], _QLCData) ->
ok = mnesia:write(Record).
%% @doc Traverse the table executing the given function with each record. The function must
%% except the record followed by the given arguments and return its arguments which will be
%% supplied in the next call. For example: sum(Record, Num) -> Num + 1.
traverse(Fun, Args, Table) ->
mnesia:transaction(fun() -> traverse(Fun, Args, Table, mnesia:first(Table)) end).
traverse(_Fun, Args, _Table, '$end_of_table') ->
{ok, Args};
traverse(Fun, Args, Table, Key) ->
% for set and ordered_set tables this will execute once, for bag tables this could execute many times...
Args2 = lists:foldl(fun(Record, Args1) -> Fun(Record, Args1) end, Args, mnesia:read(Table, Key, write)),
traverse(Fun, Args2, Table, mnesia:next(Table, Key)).
comma(List) ->
combinewith(", ", List).
combinewith(Separator, List) ->
Length = length(List),
lists:foldl(
fun(Elem, {Len, String}) when Len < Length -> {Len + 1, lists:concat([String, Elem, Separator])};
(Elem, {Len, String}) when Len == Length -> lists:concat([String, Elem]) end, {1, ""}, List).
%% @doc Execute a group of statements in a transaction.
%% Fun is the function that implements the transaction.
%% Fun can contain an arbitrary sequence of calls to
%% the erlydb_mnesia's query functions. If Fun crashes or returns
%% or throws 'error' or {error, Err}, the transaction is automatically
%% rolled back.
%%
%% @spec transaction(Fun::function(), Options::options()) ->
%% {atomic, Result} | {aborted, Reason}
transaction(Fun, _Options) ->
mnesia:transaction(Fun).
%% @doc Execute a statement against Mnesia.
%%
%% @spec select(Statement::statement(), Options::options()) ->
%% {ok, Rows::list()} | {error, Error}
select(Statement, Options) ->
select2(Statement, Options, []).
%% @doc Execute a statement for records belonging to the given module,
%% returning all rows with additional data to support
%% higher-level ErlyDB features.
%%
%% @spec select_as(Module::atom(), Statement::statement(), Options::options()) ->
%% {ok, Rows} | {error, Error}
select_as(Module, Statement, Options) ->
select2(Statement, Options, [Module, false]).
select2(Statement, Options, FixedVals) ->
get_select_result(q(Statement, Options), FixedVals).
get_select_result({data, Data}, undefined) ->
Result = lists:foldl(fun(DataTuple, Acc) -> [tuple_to_list(DataTuple) | Acc] end, [], Data),
{ok, lists:reverse(Result)};
get_select_result({data, Data}, [Table | _Rest] = FixedVals)->
Results = lists:foldl(
fun(DataTuple, Acc) ->
% some data tuples are the records themselves with the table/record name as the first element...
[Table2 | Fields] = DataList = tuple_to_list(DataTuple),
Row = if Table == Table2 -> FixedVals ++ Fields;
true -> FixedVals ++ DataList
end,
[list_to_tuple(Row) | Acc]
end, [], Data),
{ok, lists:reverse(Results)};
get_select_result(Other, _) -> Other.
%% @doc Execute a update to Mnesia.
%%
%% @spec update(Statement::statement(), Options::options()) ->
%% {ok, NumAffected} | {error, Err}
update(Statement, Options) ->
q(Statement, Options).
%% @doc Get the id of the last inserted record.
%%
%% @spec get_last_insert_id(Table::atom(), Options::options()) -> term()
get_last_insert_id(_Table, _Options) ->
Val = get(mnesia_last_insert_id),
{ok, Val}. | src/erlydb/erlydb_mnesia.erl | 0.644449 | 0.52342 | erlydb_mnesia.erl | starcoder |
%% Copyright ProcessOne 2006-2009. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
%% @author <NAME> <<EMAIL>>
%% @doc
%% The module <strong>{@module}</strong> provides functions which
%% doesn't deserve a dedicated module.
-module(exmpp_utils).
%% Binary and string helpers.
-export([
any_to_list/1,
any_to_binary/1,
strip/1,
strip/2
]).
%% Utils.
-export([
random_id/0,
random_id/1
]).
%% --------------------------------------------------------------------
%% Binary and string helpers.
%% --------------------------------------------------------------------
%% @spec (Any) -> String
%% Any = binary() | string() | atom() | integer()
%% String = string()
%% @doc Convert any type to its `string()' form.
%%
%% For an atom, {@link erlang:atom_to_list/1} is used. For an integer,
%% {@link erlang:integer_to_list/1} is used. For a binary, {@link
%% erlang:binary_to_list/1} is used. A string is returned as is.
-spec(any_to_list/1 ::
(binary() | string() | integer() | atom()) -> string()).
any_to_list(Atom) when is_atom(Atom) ->
atom_to_list(Atom);
any_to_list(Integer) when is_integer(Integer) ->
integer_to_list(Integer);
any_to_list(String) when is_list(String) ->
String;
any_to_list(Binary) when is_binary(Binary) ->
binary_to_list(Binary).
%% @spec (Any) -> Binary
%% Any = binary() | string() | atom() | integer()
%% Binary = binary()
%% @doc Convert any type to its `binary()' form.
%%
%% For an atom, {@link erlang:atom_to_list/1} is used. For an integer,
%% {@link erlang:integer_to_list/1} is used. For a string, {@link
%% erlang:list_to_binary/1} is used. A binary is returned as is.
-spec(any_to_binary/1 ::
(binary() | string() | integer() | atom()) -> binary()).
any_to_binary(Atom) when is_atom(Atom) ->
any_to_binary(atom_to_list(Atom));
any_to_binary(Integer) when is_integer(Integer) ->
any_to_binary(integer_to_list(Integer));
any_to_binary(String) when is_list(String) ->
list_to_binary(String);
any_to_binary(Binary) when is_binary(Binary) ->
Binary.
%% @spec strip(Stream) -> Stripped
%% Stream = binary() | string()
%% Stripped = binary() | string()
%% @doc Strip leading and trailing blanks.
%%
%% @see strip/3.
-spec(strip/1 ::
(binary()) -> binary();
(string()) -> string()
).
strip(Stream) ->
strip(Stream, both).
%% @spec strip(Stream, Direction) -> Stripped
%% Stream = binary() | string()
%% Direction = left | right | both
%% Stripped = binary() | string()
%% @doc Strip leading and/or trailing blanks, depending on the `Direction'.
%%
%% Blanks characters are `\s', `\t', `\n' and `\r'.
%%
%% The binary version was made by <NAME> in his
%% <a href="http://www.capflam.org/?p=9">stream module</a>.
%%
%% @see strip/3.
-spec(strip/2 ::
(binary(), left | right | both) -> binary();
(string(), left | right | both) -> string()
).
strip(Stream, left) ->
strip_left(Stream);
strip(Stream, right) ->
strip_right(Stream);
strip(Stream, both) ->
strip_right(strip_left(Stream)).
strip_left(<<C:8, Rest/binary>>) when C == $\s; C == $\t; C == $\n; C == $\r ->
strip_left(Rest);
strip_left([C | Rest]) when C == $\s; C == $\t; C == $\n; C == $\r ->
strip_left(Rest);
strip_left(Stripped) ->
Stripped.
strip_right(<<C:8, Rest/binary>>) when C == $\s; C == $\t; C == $\n; C == $\r ->
case strip_right(Rest) of
<<>> -> <<>>;
T -> <<C:8, T/binary>>
end;
strip_right(<<C:8, Rest/binary>>) ->
T = strip_right(Rest),
<<C:8, T/binary>>;
strip_right(<<>>) ->
<<>>;
strip_right([C | Rest]) when C == $\s; C == $\t; C == $\n; C == $\r ->
case strip_right(Rest) of
[] -> [];
T -> [C | T]
end;
strip_right([C | Rest]) ->
[C | strip_right(Rest)];
strip_right([]) ->
[].
%% --------------------------------------------------------------------
%% Utils.
%% --------------------------------------------------------------------
%% @spec () -> ID
%% ID = string()
%% @doc Generate a random ID.
%%
%% Use the `exmpp' prefix.
%%
%% @see random_id/1.
-spec(random_id/0 :: () -> string()).
random_id() ->
random_id("exmpp").
%% @spec (Prefix) -> ID
%% Prefix = string()
%% ID = string()
%% @doc Generate a random stanza ID.
%%
%% This function uses {@link random:uniform/1}. It's up to the caller to
%% seed the generator.
%%
%% The ID is not guaranted to be unique.
-spec(random_id/1 :: (string() | undefined) -> string()).
random_id(undefined) ->
integer_to_list(random:uniform(65536 * 65536));
random_id("") ->
random_id(undefined);
random_id(Prefix) when is_atom(Prefix) ->
random_id(atom_to_list(Prefix));
random_id(Prefix) when is_list(Prefix) ->
Prefix ++ "-" ++ random_id(undefined). | contrib/xmpp/deps/exmpp-0.9.1-r863/src/core/exmpp_utils.erl | 0.718002 | 0.659282 | exmpp_utils.erl | starcoder |
-module(day2).
-include_lib("eunit/include/eunit.hrl").
-export([run/1]).
-behaviour(aoc).
-type direction() :: forward | down | up.
-type increment() :: pos_integer().
-type command() :: {direction(), increment()}.
-type position1() :: #{
horizontal := non_neg_integer(),
depth := non_neg_integer()
}.
-type position2() :: #{
horizontal := non_neg_integer(),
depth := non_neg_integer(),
aim := non_neg_integer()
}.
-spec run(argparse:part()) -> ok.
run(Part) ->
Commands = read_input(),
FinalPosition =
case Part of
1 -> calculate_final_position1(Commands);
2 -> calculate_final_position2(Commands)
end,
io:format("Final position: ~p~n", [FinalPosition]),
#{horizontal := Horizontal, depth := Depth} = FinalPosition,
io:format("Product: ~p~n", [Horizontal * Depth]).
-spec calculate_final_position1([command()]) -> position1().
calculate_final_position1(Commands) ->
StartPosition = #{horizontal => 0, depth => 0},
lists:foldl(fun move1/2, StartPosition, Commands).
-spec move1(command(), position1()) -> position1().
move1({forward, Increment}, Position = #{horizontal := Horizontal}) ->
Position#{horizontal => Horizontal + Increment};
move1({down, Increment}, Position = #{depth := Depth}) ->
Position#{depth => Depth + Increment};
move1({up, Increment}, Position = #{depth := Depth}) ->
Position#{depth => Depth - Increment}.
calculate_final_position1_test() ->
?assertEqual(
#{horizontal => 15, depth => 10},
calculate_final_position1(example_commands())
).
-spec calculate_final_position2([command()]) -> position2().
calculate_final_position2(Commands) ->
StartPosition = #{horizontal => 0, depth => 0, aim => 0},
lists:foldl(fun move2/2, StartPosition, Commands).
-spec move2(command(), position2()) -> position2().
move2(
{forward, Increment},
Position = #{horizontal := Horizontal, depth := Depth, aim := Aim}
) ->
Position#{
horizontal => Horizontal + Increment,
depth => Depth + (Aim * Increment)
};
move2({down, Increment}, Position = #{aim := Aim}) ->
Position#{aim => Aim + Increment};
move2({up, Increment}, Position = #{aim := Aim}) ->
Position#{aim => Aim - Increment}.
calculate_final_position2_test() ->
?assertEqual(
#{horizontal => 15, depth => 60, aim => 10},
calculate_final_position2(example_commands())
).
-spec example_commands() -> [command()].
example_commands() ->
[
{forward, 5},
{down, 5},
{forward, 8},
{up, 3},
{down, 8},
{forward, 2}
].
-spec read_input() -> Input :: [command()].
read_input() ->
{ok, Binary} = file:read_file("input/day2"),
StringList = string:lexemes(binary_to_list(Binary), "\n"),
lists:map(fun string_to_command/1, StringList).
-spec string_to_command(string()) -> command().
string_to_command(String) ->
[Command, Increment | []] = string:lexemes(String, " "),
{list_to_atom(Command), list_to_integer(Increment)}.
string_to_command_test() ->
?assertEqual({forward, 3}, string_to_command("forward 3")). | src/day2.erl | 0.617397 | 0.552902 | day2.erl | starcoder |
-module(bo_missing_operations).
-behaviour(bo_task).
-export([description/0, spec/0, score/0, timeout/0, tests/0]).
%%==============================================================================
%% API
%%==============================================================================
-spec description() -> binary().
description() -> <<"Missing operations: You are tasked with finding the correct"
" combination of operators to solve a simple problem. Create"
" a function that receives a list of integers and a solution"
" (another integer). Return a list of operators (the '+' and"
" '-' atoms) that will solve the equation.\n"
"For example, given ([2, 3, 1], 4) return ['+', '-'] because"
" 2 + 3 - 1 = 4. If there's no valid list of operators, just"
" return the atom 'notfound'.">>.
-spec spec() -> bo_task:spec().
spec() ->
#{ input => [<<"[pos_integer()]">>, <<"pos_integer()">>]
, output => <<"notfound | ['+', '-']">>
}.
-spec score() -> 250.
score() -> 250.
-spec timeout() -> 1000.
timeout() -> 1000.
-spec tests() -> [bo_task:test()].
tests() -> [build_test(Case) || Case <- cases()].
build_test({Operands, Solution}) ->
fun(Fun) ->
try Fun(Operands, Solution) of
notfound -> check_impossible(Operands, Solution);
Answer -> check_answer(Operands, Solution, Answer)
catch
_:Error ->
{error, #{ input => [Operands, Solution]
, output => Error
, expected => <<"Not an error, that's for sure.">>}
}
end
end.
%%==============================================================================
%% Utils
%%==============================================================================
cases() ->
[make_case(N) || N <- lists:seq(1, 8)].
make_case(Index) ->
L = [rand:uniform(256) || _ <- lists:seq(1, 2 * Index)],
case rand:uniform() < 0.5 of
true ->
Operators = [case rand:uniform() < 0.5 of
true -> '+';
false -> '-'
end || _ <- lists:seq(1, length(L) - 1)],
{L, test_result(['+' | Operators], L, 0)};
false ->
{L, rand:uniform(256)}
end.
check_answer(Operands, Solution, Answer) ->
case test_result(['+' | Answer], Operands, 0) of
bad_list ->
{error, #{ input => [Operands, Solution]
, output => Answer
, expected => <<"A list of the right length.">>
}};
bad_operator ->
{error, #{ input => [Operands, Solution]
, output => Answer
, expected => <<"A list with the right operators.">>
}};
Solution ->
ok;
Result ->
{error, #{ input => [Operands, Solution]
, output => Answer
, expected => { <<"A list of operators that result in">>
, Solution
, <<"not in">>
, Result
}
}}
end.
test_result(['+' | Operators], [H | T], Acc) ->
test_result(Operators, T, Acc + H);
test_result(['-' | Operators], [H | T], Acc) ->
test_result(Operators, T, Acc - H);
test_result([_BadOperator | _Operations], _Operands, _Acc) ->
bad_operator;
test_result([], [], Acc) ->
Acc;
test_result([], _Operands, _Acc) ->
bad_list;
test_result(_Operators, [], _Acc) ->
bad_list.
check_impossible(Operands, Solution) ->
Combinations = get_operator_combinations(length(Operands) - 1),
case lists:filter(fun(C) ->
Solution =:= test_result(['+' | C], Operands, 0)
end, Combinations) of
[] -> ok;
[_ | _] -> {error, #{ input => [Operands, Solution]
, output => notfound
, expected => <<"Come on, there's a solution.">>
}}
end.
get_operator_combinations(1) ->
[['+'], ['_']];
get_operator_combinations(N) ->
C = get_operator_combinations(N - 1),
[['+' | O] || O <- C] ++ [['-' | O] || O <- C]. | src/tasks/bo_missing_operations.erl | 0.588061 | 0.755344 | bo_missing_operations.erl | starcoder |
%%--------------------------------------------------------------------
%% Copyright (c) 2019 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_misc).
-include("types.hrl").
-export([ merge_opts/2
, maybe_apply/2
, run_fold/2
, run_fold/3
, pipeline/3
, start_timer/2
, start_timer/3
, cancel_timer/1
, proc_name/2
, proc_stats/0
, proc_stats/1
, index_of/2
]).
-export([ drain_deliver/0
, drain_deliver/1
, drain_down/1
]).
-compile({inline,
[ start_timer/2
, start_timer/3
]}).
%% @doc Merge options
-spec(merge_opts(Opts, Opts) -> Opts when Opts :: proplists:proplist()).
merge_opts(Defaults, Options) ->
lists:foldl(
fun({Opt, Val}, Acc) ->
lists:keystore(Opt, 1, Acc, {Opt, Val});
(Opt, Acc) ->
lists:usort([Opt | Acc])
end, Defaults, Options).
%% @doc Apply a function to a maybe argument.
-spec(maybe_apply(fun((maybe(A)) -> maybe(A)), maybe(A))
-> maybe(A) when A :: any()).
maybe_apply(_Fun, undefined) ->
undefined;
maybe_apply(Fun, Arg) when is_function(Fun) ->
erlang:apply(Fun, [Arg]).
run_fold([], Acc) ->
Acc;
run_fold([Fun|More], Acc) ->
run_fold(More, Fun(Acc)).
%% @doc RunFold
run_fold([], Acc, _State) ->
Acc;
run_fold([Fun|More], Acc, State) ->
run_fold(More, Fun(Acc, State), State).
%% @doc Pipeline
pipeline([], Input, State) ->
{ok, Input, State};
pipeline([Fun|More], Input, State) ->
case Fun(Input, State) of
ok -> pipeline(More, Input, State);
{ok, NState} ->
pipeline(More, Input, NState);
{ok, NInput, NState} ->
pipeline(More, NInput, NState);
{error, Reason} ->
{error, Reason, State};
{error, Reason, NState} ->
{error, Reason, NState}
end.
-spec(start_timer(integer(), term()) -> reference()).
start_timer(Interval, Msg) ->
start_timer(Interval, self(), Msg).
-spec(start_timer(integer(), pid() | atom(), term()) -> reference()).
start_timer(Interval, Dest, Msg) ->
erlang:start_timer(Interval, Dest, Msg).
-spec(cancel_timer(maybe(reference())) -> ok).
cancel_timer(Timer) when is_reference(Timer) ->
case erlang:cancel_timer(Timer) of
false ->
receive {timeout, Timer, _} -> ok after 0 -> ok end;
_ -> ok
end;
cancel_timer(_) -> ok.
-spec(proc_name(atom(), pos_integer()) -> atom()).
proc_name(Mod, Id) ->
list_to_atom(lists:concat([Mod, "_", Id])).
-spec(proc_stats() -> list()).
proc_stats() ->
proc_stats(self()).
-spec(proc_stats(pid()) -> list()).
proc_stats(Pid) ->
case process_info(Pid, [message_queue_len, heap_size,
total_heap_size, reductions, memory]) of
undefined -> [];
[{message_queue_len, Len}|Stats] ->
[{mailbox_len, Len}|Stats]
end.
%% @doc Drain delivers from the channel's mailbox.
drain_deliver() ->
drain_deliver([]).
drain_deliver(Acc) ->
receive
Deliver = {deliver, _Topic, _Msg} ->
drain_deliver([Deliver|Acc])
after 0 ->
lists:reverse(Acc)
end.
%% @doc Drain process down events.
-spec(drain_down(pos_integer()) -> list(pid())).
drain_down(Cnt) when Cnt > 0 ->
drain_down(Cnt, []).
drain_down(0, Acc) ->
lists:reverse(Acc);
drain_down(Cnt, Acc) ->
receive
{'DOWN', _MRef, process, Pid, _Reason} ->
drain_down(Cnt - 1, [Pid|Acc])
after 0 ->
drain_down(0, Acc)
end.
%% lists:index_of/2
index_of(E, L) ->
index_of(E, 1, L).
index_of(_E, _I, []) ->
error(badarg);
index_of(E, I, [E|_]) ->
I;
index_of(E, I, [_|L]) ->
index_of(E, I+1, L). | src/emqx_misc.erl | 0.590661 | 0.479991 | emqx_misc.erl | starcoder |
-module(common_eunit).
-export([test/1,
test/2,
test/3,
test_generator/2]).
%% This is called Config in Common Test. It is the return type of the
%% init_per_* functions.
-type fixtures() :: [proplists:property()].
%% A representation of a test set that EUnit understands.
-type eu_test_rep() :: {{atom(), atom(), arity()}, fun()}
| {'setup', eu_setup(), eu_instant()}
| {'setup', eu_setup(), eu_cleanup(), eu_instant()}
| eu_control().
%% http://www.erlang.org/doc/apps/eunit/chapter.html#Fixtures
-type eu_setup() :: fun(() -> fixtures()).
-type eu_cleanup() :: fun((fixtures()) -> _).
%% EUnit test instantiator.
-type eu_instant() :: fun((fixtures()) -> eu_test_rep() | [eu_test_rep()]).
%% http://www.erlang.org/doc/apps/eunit/chapter.html#Control
-type eu_control() :: {atom(), eu_test_rep()}
| {atom(), term(), eu_test_rep()}.
%% This is an input type for us, which we transform into eu_control().
-type prop() :: atom() % E.g. 'parallel'
| {atom(), term()}. % E.g. {timetrap, {seconds, 30}}
%% Test case identification.
-type case_id() :: atom() % Name of a test function
| {group, atom()}. % Name of a test group
-type level() :: 'suite' | 'group' | 'testcase'.
%%%
%%% Interface
%%%
-spec test(atom()) -> 'ok' | 'error'.
test(Module) ->
test(Module, Module:all()).
-spec test(atom(), [case_id()]) -> 'ok' | 'error'.
test(Module, Cases) ->
test(Module, Cases, []).
test(Module, Cases, Options) ->
eunit:test(test_generator(test_module_name(Module), Cases), Options).
-spec test_generator(atom(), [case_id()]) -> eu_test_rep().
test_generator(Module, Cases) ->
Props = case erlang:function_exported(Module, suite, 0) of
true -> Module:suite();
false -> []
end,
Instant0 = expand_cases(Module, Cases),
Instant1 = wrap(Module, Instant0, Props, suite, []),
% Start with no fixtures, return the full test representation.
Instant1([]).
%%%
%%% Implementation
%%%
-spec fixture_callbacks(level()) -> {atom(), atom()}.
fixture_callbacks(suite) -> {init_per_suite, end_per_suite};
fixture_callbacks(group) -> {init_per_group, end_per_group};
fixture_callbacks(testcase) -> {init_per_testcase, end_per_testcase}.
%% Valid test execution properties, in the order they should be applied.
%%
%% The first group is applied before the init_per_ call, the second group
%% between init_per_ and the underlying test cases.
%%
-spec props(level()) -> {[atom()], [atom()]}.
props(suite) -> {[timetrap, node, spawn], [parallel, inorder, repeat]};
props(group) -> {[node, spawn, timetrap], [parallel, inorder, repeat]};
props(testcase) -> {[node, spawn, timetrap], [parallel, inorder, repeat]}.
%% Add test properties and calls to the appropriate init_per_ and end_per_.
%%
-spec wrap(atom(), eu_instant(), [prop()], level(), list())-> eu_instant().
wrap(Module, Instant0, Props, Level, Args) ->
{InitPer, EndPer} = fixture_callbacks(Level),
{PreProps, PostProps} = partition_props(Props, Level),
Instant1 = add_props(PostProps, Instant0),
Instant2 = add_setup(Module, Instant1, InitPer, EndPer, Args),
add_props(PreProps, Instant2).
%% Recurse down through groups and test cases and return fully wrapped
%% instantiators.
%%
-spec expand_cases(atom(), [case_id()]) -> eu_instant().
expand_cases(Module, Cases) ->
Instants = [ expand_case(Module, Case) || Case <- Cases ],
fun(Fixtures) -> [ F(Fixtures) || F <- Instants ] end.
-spec expand_case(atom(), case_id()) -> eu_instant().
expand_case(Module, Case) when is_atom(Case) ->
Props = case erlang:function_exported(Module, Case, 0) of
true -> Module:Case();
false -> []
end,
Instant = fun(Fixtures) ->
% There are several ways to represent a test case to EUnit. We use:
% {{M, F, A}, Fun/0}
% It has the advantage that it lets EUnit give good feedback about
% exactly which test function is being executed.
%
% EUnit applies a rather short default timeout to all tests.
% You can override it with a *shorter* timeout on a higher level,
% but if you want a *longer* timeout you must do it directly around
% the actual test representation (where we are now). So we'll set
% a really long timeout here to give us more flexibility.
{timeout, 3600,
{{Module, Case, 1}, fun() -> apply(Module, Case, [Fixtures]) end}}
end,
wrap(Module, Instant, Props, testcase, [Case]);
expand_case(Module, {group, Group}) ->
{Props, Cases} = group_specification(Module, Group),
Instant = expand_cases(Module, Cases),
wrap(Module, Instant, Props, group, [Group]).
%% See props/1 and http://www.erlang.org/doc/apps/eunit/chapter.html#Control
%%
-spec add_props([prop()], eu_instant()) -> eu_instant().
add_props([Prop|T], Instant0) ->
Instant1 = add_props(T, Instant0),
case Prop of
parallel ->
fun(Fixtures) ->
{inparallel, Instant1(Fixtures)}
end;
{parallel, N} ->
fun(Fixtures) ->
{inparallel, N, Instant1(Fixtures)}
end;
inorder ->
fun(Fixtures) ->
{inorder, Instant1(Fixtures)}
end;
{timetrap, Time} ->
Seconds = case Time of
{hours, Hs} -> Hs * 60 * 60;
{minutes, Ms} -> Ms * 60;
{seconds, Ss} -> Ss;
MSs -> MSs / 1000
end,
fun(Fixtures) ->
{timeout, Seconds, Instant1(Fixtures)}
end;
{repeat, N} ->
fun(Fixtures) ->
[ Instant1(Fixtures) || _ <- lists:seq(1, N) ]
end;
{node, Name} ->
start_node_wrapper(Name, Instant1);
{spawn, Name} ->
spawn_on_node_wrapper(Name, Instant1)
end;
add_props([], Instant) ->
Instant.
%% Partition a list of execution properties into two groups to be applied
%% before and after the appropriate init_per_ callback. Also orders the
%% individual properties correctly.
%%
-spec partition_props([prop()], level()) -> {[prop()], [prop()]}.
partition_props(Props, Level) ->
{PreKeys, PostKeys} = props(Level),
Unfolded = proplists:unfold(Props),
Pre = filter_props(PreKeys, Unfolded),
Post = filter_props(PostKeys, Unfolded),
case Unfolded -- (Pre ++ Post) of
[] ->
{proplists:compact(Pre), proplists:compact(Post)};
[{Illegal, _}|_] ->
exit({illegal_property, Illegal})
end.
-spec filter_props([atom()], [{atom(), term()}]) -> [{atom(), term()}].
filter_props([Key|T], Props) ->
case lists:keysearch(Key, 1, Props) of
{value, P} -> [P|filter_props(T, Props)];
false -> filter_props(T, Props)
end;
filter_props([], _) ->
[].
%% Wrap an instantiator in an EUnit setup tuple with callbacks to init_per_ and
%% end_per_ functions.
%% See http://www.erlang.org/doc/apps/eunit/chapter.html#Fixtures
%%
-spec add_setup(atom(), eu_instant(), atom(), atom(), list()) -> eu_instant().
add_setup(Module, Instant, InitPer, EndPer, Args) ->
Arity = length(Args) + 1,
case erlang:function_exported(Module, InitPer, Arity) of
true ->
case erlang:function_exported(Module, EndPer, Arity) of
true ->
fun(Fixtures0) ->
{setup,
fun() ->
apply(Module, InitPer, Args ++ [Fixtures0])
end,
fun(Fixtures1) ->
apply(Module, EndPer, Args ++ [Fixtures1])
end,
Instant}
end;
false ->
fun(Fixtures) ->
{setup,
fun() ->
apply(Module, InitPer, Args ++ [Fixtures])
end,
Instant}
end
end;
false ->
Instant
end.
%% Call Module:groups() and extract the specification for the given group.
%%
%% Return both a list of any group specific test properties as well as a
%% list of all the group's members (test cases and other groups).
%%
-spec group_specification(atom(), atom()) -> {[prop()], [case_id()]}.
group_specification(Module, Group) ->
case lists:keyfind(Group, 1, Module:groups()) of
{_, Props, Cases} when is_list(Props), is_list(Cases) ->
{Props, Cases};
{_, Cases} when is_list(Cases) ->
{[], Cases};
false ->
exit({missing_group, Group});
_ ->
exit({bad_group_spec, Group})
end.
-spec start_node_wrapper(atom(), eu_instant()) -> eu_instant().
start_node_wrapper(Name, Instant) ->
[_, Host] = string:tokens(atom_to_list(node()), "@"),
fun(Fixtures0) ->
{setup,
fun() ->
{ok, Node} = slave:start_link(Host, Name),
true = rpc:call(Node, code, set_path, [code:get_path()]),
{module, _} = rpc:call(Node, code, load_file, [?MODULE]),
[{Name, Node}|Fixtures0]
end,
fun(Fixtures1) ->
Node = proplists:get_value(Name, Fixtures1),
ok = slave:stop(Node)
end,
Instant}
end.
-spec spawn_on_node_wrapper(atom(), eu_instant()) -> eu_instant().
spawn_on_node_wrapper(Name, Instant) ->
fun(Fixtures) ->
Node = proplists:get_value(Name, Fixtures),
{spawn, Node, Instant(Fixtures)}
end.
%% Add a _tests suffix to the module name if missing.
%%
-spec test_module_name(atom()) -> atom().
test_module_name(Module) ->
TestModuleSuffix = "_tests",
case lists:suffix(TestModuleSuffix, atom_to_list(Module)) of
true -> Module;
false -> list_to_atom(atom_to_list(Module) ++ TestModuleSuffix)
end. | src/common_eunit.erl | 0.641535 | 0.633439 | common_eunit.erl | starcoder |
%%%------------------------------------------------------------------------
%% Copyright 2017, OpenCensus Authors
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% @doc oc_transform provides a parse transform for wrapping a function in
%% a start and a finish of a span.
%% @end
%%%-----------------------------------------------------------------------
-module(oc_transform).
-export([parse_transform/2,
format_error/1]).
parse_transform(Ast, _Options)->
try lists:mapfoldl(fun form/2, {false, [], []}, Ast) of
{Ast1, _} ->
lists:flatten(lists:filter(fun(Node) -> Node =/= nil end, Ast1))
catch
throw:E ->
E
end.
form(Node={attribute, _Line, module, Module}, _) ->
{Node, {false, Module, []}};
form({attribute, _Line, span, Args}, {_, Module, _}) ->
{nil, {true, Module, Args}};
form(Node={function, _Line, _FuncName, _Arity, _Clauses}, {false, Module, []}) ->
{Node, {false, Module, []}};
form(Node={function, _Line, _FuncName, _Arity, _Clauses}, {true, Module, Args}) ->
{trace(Node, Module, Args), {false, Module, []}};
form(Node, Trace) ->
{Node, Trace}.
trace({function, Line, Name, Arity, Clauses}, Module, Args) ->
case args_proplist(Args) of
{error, Reason} ->
{error, {Line, ?MODULE, Reason}};
ArgsPropList ->
SpanName = proplists:get_value(name, ArgsPropList, io_lib:format("~s:~s/~w", [Module, Name, Arity])),
Clauses1 = trace_clauses(Clauses, SpanName),
{function, Line, Name, Arity, Clauses1}
end.
trace_clauses([], _) ->
[];
trace_clauses([{clause, Line, H, G, B} | Cs], Name) ->
CurrentSpan = make_varname("CurrentSpan", Line),
StartSpan = [{match, Line, {var, Line, CurrentSpan},
{call, Line,
{remote, Line, {atom, Line, ocp}, {atom, Line, current_span_ctx}},
[]}},
{call, Line,
{remote, Line, {atom, Line, ocp}, {atom, Line, with_child_span}},
[{string, Line, to_binary(Name)}]}],
FinishSpan = [{call, Line,
{remote, Line, {atom, Line, ocp}, {atom, Line, finish_span}},
[]},
{call, Line,
{remote, Line, {atom, Line, ocp}, {atom, Line, with_span_ctx}},
[{var, Line, CurrentSpan}]}],
Trace = StartSpan ++ [{'try', Line, B, [], [], FinishSpan}],
[{clause, Line, H, G, Trace} | trace_clauses(Cs, Name)].
make_varname(Prefix, Line) ->
list_to_atom(Prefix ++ atom_to_list(get(module)) ++ integer_to_list(Line)).
to_binary(X) when is_binary(X) ->
X;
to_binary(X) when is_list(X) ->
list_to_binary(X);
to_binary(X) when is_atom(X) ->
atom_to_binary(X, utf8).
args_proplist(A) when is_binary(A) ->
[{name, A}];
args_proplist(A) when is_atom(A) ->
[{name, A}];
args_proplist([]) ->
[];
args_proplist(A) when is_list(A) ->
try unicode:characters_to_nfc_binary(A) of
B ->
[{name, B}]
catch
%% must not be a string, use as a proplist
error:function_clause ->
A
end;
args_proplist(A) ->
{error, {bad_trace_args, A}}.
format_error({bad_trace_args, Args}) ->
io_lib:format("Bad trace arguments. Must be binary, atom, list string or proplist. Got: ~p", [Args]);
format_error({bad_name, Args}) ->
io_lib:format("Bad span name. Name must be an atom, binary or printable list. Got: ~p", [Args]). | src/oc_transform.erl | 0.595963 | 0.467028 | oc_transform.erl | starcoder |
%% @doc This module contains code for the Pi to which the sensor is connected
%% (the sender) and the base station.
%% The idea is to start execution on the base station, which will spawn the
%% sender process on the Pi before getting ready to receive its messages.
%%
%% @type measurement() = {Time::integer(), {Temperature::float(),
%% Humidity::float()} | failure}.
%% A temperature and humidity measurement with a seconds timestamp in UTC.
-module(weather).
-export([sender/2, start/2, server/0]).
-import(util, [get_average_measurement/3, format_time/1, format_csv_line/1]).
%% @spec start(SenderNode::node(), Pin::string()) -> no_return()
%% @doc Spawns the sender process on the sender node and starts receiving.
start(SenderNode, Pin) ->
% Make sure unicode works even in -noshell mode
io:setopts(standard_io, [{encoding, unicode}]),
% Start and register server process
register(weatherserver, spawn(?MODULE, server, [])),
% Start inets httpd server for mod_esi
inets:start(),
inets:start(
httpd, [{port, 8081}, {server_name, "weather"}, {document_root, "."},
{modules, [mod_esi]}, {server_root, "."},
{erl_script_alias, {"/weather", [esi]}}]
),
% Spawn sender process on the node with the sensor (Raspberry Pi)
spawn(SenderNode, ?MODULE, sender, [self(), Pin]),
% Start receiving messages from sender
receiver().
%% @spec sender(Receiver::pid(), Pin::string()) -> no_return()
%% @doc Reads data from sensor in some interval and sends the measurements
%% to the receiver process.
sender(Receiver, Pin) ->
sender(Receiver, Pin, []).
%% @spec sender(Receiver::pid(), Pin::string(), Queue::[measurement()]) ->
%% no_return()
%% @doc Reads data from sensor in some interval and sends the measurements
%% to the receiver process. In case the receiver can't be reached, the
%% measurements are kept in the queue to be sent later. The queue is in reverse
%% chronological order, because new measurements are appended to the head to
%% prevent having to iterate over a possibly very long list every time.
sender(Receiver, Pin, Queue) ->
% Check if previous send has been acknowledged
receive
ack ->
% If so, empty the queue
CurrentQueue = []
after
1000 ->
% If not, keep the queue to send its contents again
CurrentQueue = Queue
end,
% Take measurement and add it to the head of the queue
NewQueue = [get_average_measurement(Pin, 5, 60000) | CurrentQueue],
% Send the whole queue to the receiver
Receiver ! {self(), NewQueue},
sender(Receiver, Pin, NewQueue).
%% @spec receiver() -> no_return()
%% @doc Receives messages and logs the data to file.
receiver() ->
receiver(0).
%% @spec receiver(LastTime::integer()) -> no_return()
%% @doc Receives messages and logs the data to file. `LastTime' is the
%% timestamp of the last measurement that has been received and is used
%% to ensure that no duplicates are logged, even if there has been duplication
%% due to network issues.
receiver(LastTime) ->
receive
{Sender, Measurements} ->
% Format measurements for CSV file
Lines = treat_measurements(
lists:reverse(Measurements), LastTime),
% Append the lines to file
file:write_file("history.csv", Lines, [append])
end,
%% Keep timestamp of latest measurement
{NewLast, _} = lists:nth(1, Measurements),
% Send acknowledgement to sender
Sender ! ack,
receiver(NewLast).
%% @spec treat_measurements(Measurements::[measurement()],
%% LastTime::integer) -> string()
%% @doc Iterates over the measurements, shows them in stdout, sends them to
%% the server process and returns formatted lines for the CSV file.
%% `LastTime' is used to make sure that no duplicates are logged, even if
%% there has been duplication due to network issues.
treat_measurements([], _) ->
[];
treat_measurements([{SecondsUTC, failure} | R], LastTime)
when SecondsUTC > LastTime ->
io:format("~s: Failure~n", [format_time(SecondsUTC)]),
% Don't add failure to CSV, continue with next measurement
treat_measurements(R, LastTime);
treat_measurements([{SecondsUTC, {Temp, Hum}} | R], LastTime)
when SecondsUTC > LastTime ->
% Send to server
weatherserver ! {SecondsUTC, {Temp, Hum}},
% Print data to output nicely
io:format("~s: ~p \x{b0}C, ~p%~n", [format_time(SecondsUTC), Temp, Hum]),
% Format the data for CSV
Line = format_csv_line({SecondsUTC, {Temp, Hum}}),
% Continue with next measurement
[Line | treat_measurements(R, LastTime)];
treat_measurements([_ | R], LastTime) ->
% This happens when the measurement is a duplicate, meaning that
% its time is less than the time of the latest measurement and has
% therefore been logged already. Continue with the next one.
treat_measurements(R, LastTime).
%% @spec server() -> no_return()
%% @doc Starts the weatherserver, which will be used by the ESI functions
%% to get the data that the client requested. On startup, it will attempt to
%% read any existing measurements from file, so it has the complete history.
server() ->
% Read previous measurements from file
Existing = util:read_history("history.csv"),
% Start server with this data
server(Existing).
%% @spec server(Measurements::[measurement()]) -> no_return()
%% @doc The weatherserver, which will be used by the ESI functions
%% to get the data that the client requested.
%% Whenever a new measurement is received, it will get a message containing it
%% to add it to its own list of measurements. The measurements list is in
%% reverse chronological order to quickly add new items to the head. This is
%% also better for looking up measurements, because the recent data is most
%% often requested.
server(Measurements) ->
receive
{latest, PID} ->
% Send the most recent measurement to the requesting process
PID ! lists:nth(1, Measurements),
% Don't change measurements
UpMeasurements = Measurements;
{history, SecondsUTC, PID} ->
% Filter out measurements more recent than SecondsUTC
F = lists:filter(fun({T, _}) -> T > SecondsUTC end, Measurements),
% Send them to the requesting process
PID ! F,
% Don't change measurements
UpMeasurements = Measurements;
Measurement ->
% Received new measurement, add it to list
UpMeasurements = [Measurement | Measurements]
end,
server(UpMeasurements). | pi/weather.erl | 0.619817 | 0.666982 | weather.erl | starcoder |
%%%-------------------------------------------------------------------
%%% File: principe_table.erl
%%% @author <NAME> <<EMAIL>>
%%% @copyright Copyright (c) 2009, <NAME>. All Rights Reserved.
%%%
%%% @doc
%%% An extension to the principe module that handles tables. See the
%%% principe module docs for a note about Tyrant and server byte-order
%%% issues. When using tyrant in table mode this matters far less than
%%% it does for Tyrant in other modes; in most cases Tyrant will encode
%%% table column values internally as strings. The only place that this
%%% matters is using addint or adddouble in conjunction with a row in
%%% which you manually added a magic "_num" column. For this case you
%%% will need to do a bit of magic on your own to properly encode the
%%% float or int using the put() function. See the @see principe module
%%% for examples (use the "bigendian" property from a stat() call to
%%% figure out what your server expects.)
%%% @end
%%%-------------------------------------------------------------------
-module(principe_table).
-export([connect/0, connect/1, put/3, putkeep/3, putcat/3, update/3, out/2,
get/2, mget/2, vsiz/2, iterinit/1, iternext/1, fwmkeys/3, sync/1, optimize/2,
vanish/1, rnum/1, size/1, stat/1, copy/2, restore/3, addint/3, adddouble/3,
adddouble/4, setmst/3, setindex/3, query_limit/3, query_limit/4, query_add_condition/5,
query_order/4, search/2, genuid/1, searchcount/2, searchout/2]).
-include("principe.hrl").
%%====================================================================
%% The Tokyo Tyrant access functions
%%====================================================================
%% @spec connect() -> {ok, port()} | error()
%%
%% @doc
%% Establish a connection to the tyrant service.
%% @end
connect() ->
connect([]).
%% @spec connect(ConnectProps::proplist()) -> {ok, port()} | error()
%%
%% @doc
%% Establish a connection to the tyrant service using properties in the
%% ConnectProps proplist to determine the hostname, port number and tcp
%% socket options for the connection. Any missing parameters are filled
%% in using the module defaults.
%% @end
connect(ConnectProps) ->
{ok, Socket} = principe:connect(ConnectProps),
% make sure we are connection to a tyrant server in table mode
case proplists:get_value(type, principe:stat(Socket)) of
"table" ->
{ok, Socket};
_ ->
{error, no_table_server}
end.
%%====================================================================
%% Standard tyrant functions (straight pass-through to principe.erl)
%%====================================================================
%% @spec mget(Socket::port(),
%% KeyList::keylist()) -> [{Key::binary(), Value::proplist()}] | error()
%%
%% @doc
%% Get the values for a list of keys. Due to the way that columns are returned
%% via the tyrant protocol a null seperator is used to break
mget(Socket, KeyList) ->
case principe:mget(Socket, KeyList) of
{error, Reason} ->
{error, Reason};
MgetResults ->
lists:keymap(fun(BinaryToSplit) ->
columnize_values(binary_to_list(BinaryToSplit), [], [])
end, 2, MgetResults)
end.
%% @spec columnize_values(ColumnValues::list(),
%% Current::list(),
%% Stack::[binary()]) -> [{ColumnName::binary(), ColumnValue::binary()}]
%%
%% @private
%% Convert a list of bytes (generally one that was converted running binary_to_list
%% on the results returned from tyrant) into a proplist of column names and values.
%% This function (like tyrant) uses a null value as the separator for column names
%% and values. A column name that contains a null will cause this function to choke
%% or return invalid data.
%% @end
columnize_values([], Current, Stack) ->
FinalStack = lists:reverse([list_to_binary(lists:reverse(Current)) | Stack]),
return_column_vals(FinalStack, []);
columnize_values([0 | T], [], Stack) ->
columnize_values(T, [], [<<"">>|Stack]);
columnize_values([0 | T], Current, Stack) ->
columnize_values(T, [], [list_to_binary(lists:reverse(Current)) | Stack]);
columnize_values([H | T], Current, Stack) ->
columnize_values(T, [H | Current], Stack).
%% @spec return_column_vals(ValuesToParse::list(),
%% FinalResult::proplist()) -> proplist()
%%
%% @private Take a list with an even number of elements and make it a proplist
return_column_vals([], Cols) ->
Cols;
return_column_vals([K, V | Tail], Cols) ->
return_column_vals(Tail, [{K, V} | Cols]).
%% @spec addint(Socket::port(),
%% Key::key(),
%% Int::integer()) -> integer() | error()
%%
%% @doc Add an integer value to the _num column of a given a key. The
%% _num column will be created if it does not already exist.
%%
%% NOTE: Something truly wierd about this _num column setup is that tc/tyrant
%% expects the column to be a string() value internally. I am assuming this
%% is because for table databases a null is used as a column separator, if the
%% _num value was stored as an integer then differing server byte order (which TC
%% suffers from) would confuse the server. If you put() an integer() value to
%% the _num column it will get overwritten by an addint() call, but if you write
%% a integer_to_list(integer()) value to the num column via a normal put() call
%% things will work correctly.
%% @end
addint(Socket, Key, Int) ->
principe:addint(Socket, Key, Int).
%% @spec adddouble(Socket::port(),
%% Key::key(),
%% Double::float()) -> {Integral::integer(), Fractional::integer()} | error()
%%
%% @doc Add an float value to the _num column of a given a key. The
%% _num column will be created if it does not already exist.
%% @end
adddouble(Socket, Key, Double) ->
principe:adddouble(Socket, Key, Double).
%% @spec adddouble(Socket::port(),
%% Key::key(),
%% Integral::integer(),
%% Fractional::integer()) -> {Integral::integer(), Fractional::integer()} | error()
%%
%% @doc The raw adddouble function for those who need a bit more control on float adds.
adddouble(Socket, Key, Integral, Fractional) ->
principe:adddouble(Socket, Key, Integral, Fractional).
%% @spec iterinit(Socket::port()) -> ok | error()
%%
%% @doc Start iteration protocol. WARNING: The tyrant iteration protocol has no
%% concurrency controls whatsoever, so if multiple clients try to do iteration
%% they will stomp all over each other!
%% @end
iterinit(Socket) ->
principe:iterinit(Socket).
%% @spec iternext(Socket::port()) -> {Key::binary(), Value::binary()} | error()
%%
%% @doc Get the next key/value pair in the iteration protocol.
iternext(Socket) ->
principe:iternext(Socket).
%% @spec fwmkeys(Socket::port(),
%% Prefix::iolist(),
%% MaxKeys::integer()) -> [binary()]
%%
%% @doc Return a number of keys that match a given prefix.
fwmkeys(Socket, Prefix, MaxKeys) ->
principe:fwmkeys(Socket, Prefix, MaxKeys).
%% @spec vsiz(Socket::port(),
%% Key::key()) -> integer()
%%
%% @doc
%% Get the size of the value for a given key. The value returned for
%% a key will be the total of the column size values, and each column
%% size will be the size of the column name (in bytes), the size of the
%% column value (in bytes), plus one for the internal null seperator
%% between column name and value plus one for the null terminator for
%% the column (i.e. length(ColumnName) + length(ColumnValue) + 2 for each
%% column.)
%% @end
vsiz(Socket, Key) ->
principe:vsiz(Socket, Key).
%% @spec sync(Socket::port()) -> ok | error()
%%
%% @doc Call sync() on the remote database.
sync(Socket) ->
principe:sync(Socket).
%% @spec vanish(Socket::port()) -> ok | error()
%%
%% @doc Remove all records from the remote database.
vanish(Socket) ->
principe:vanish(Socket).
%% @spec optimize(Socket::port(),
%% Params::list()) -> ok | error()
%%
%% @doc Change the remote database tuning parameters
optimize(Socket, Params) ->
principe:optimize(Socket, Params).
%% Get the number of records in the remote database
rnum(Socket) ->
principe:rnum(Socket).
%% @spec size(Socket::port()) -> integer() | error()
%%
%% @doc Get the size in bytes of the remote database.
size(Socket) ->
principe:size(Socket).
%% @spec stat(Socket::port()) -> proplist() | error()
%%
%% @doc Get the status string of a remote database.
stat(Socket) ->
principe:stat(Socket).
%% @spec copy(Socket::port(),
%% iolist()) -> ok | error()
%%
%% @doc Make a copy of the database file of the remote database.
copy(Socket, PathName) ->
principe:copy(Socket, PathName).
%% @spec restore(Socket::port(),
%% PathName::iolist(),
%% TimeStamp::integer) -> ok | error()
%%
%% @doc Restore the database to a particular point in time from the update log.
restore(Socket, PathName, TimeStamp) ->
principe:restore(Socket, PathName, TimeStamp).
%% @spec setmst(Socket::port(),
%% HostName::iolist(),
%% Port::integer) -> ok | error()
%%
%% @doc Set the replication master of a remote database server.
setmst(Socket, HostName, Port) ->
principe:setmst(Socket, HostName, Port).
%%====================================================================
%% Table functions
%%====================================================================
%% @spec put(Socket::port(),
%% Key::key(),
%% Cols::coldata()) -> [] | error()
%%
%% @doc
%% Call the Tyrant server to store a new set of column values for the given key.
%% @end
put(Socket, Key, Cols) ->
Data = encode_table(Cols),
?TSimple(<<"put">>, [Key | Data]).
%% @spec putkeep(Socket::port(),
%% Key::key(),
%% Cols::coldata()) -> [] | error()
%%
%% @doc
%% Call the Tyrant server to add a set of column values for a given key. Will
%% return an error if Key is already in the remote database.
%% @end
putkeep(Socket, Key, Cols) ->
Data = encode_table(Cols),
?TSimple(<<"putkeep">>, [Key | Data]).
%% @spec putcat(Socket::port(),
%% Key::key(),
%% Cols::coldata()) -> [] | error()
%%
%% @doc
%% Concatenate a set of column values to the existing value of Key (or
%% create a new entry for Key with the given column values if Key is not
%% in the remote database.) If any columns in Cols already have values
%% for the given key then the entries provided in the Cols parameter for
%% those specific columns will be ignored by the remote database. Use the
%% update() function to overwrite existing column values.
%% @end
putcat(Socket, Key, Cols) ->
Data = encode_table(Cols),
?TSimple(<<"putcat">>, [Key | Data]).
%% @spec update(Socket::port(),
%% Key::key(),
%% Cols::coldata()) -> [] | error()
%%
%% @doc
%% Update a table entry by merging Cols into existing data for given key. The
%% end result of this function should be to create a new entry for Key whose
%% column values are the new data from the Cols parameter as well as any previous
%% columns for Key that were not in the Cols proplist.
%% @end
%%
%% TODO: better way would be to use a lua server script to perform the merge?
update(Socket, Key, Cols) ->
case principe:misc(Socket, <<"get">>, [Key]) of
{error, _Reason} ->
UpdatedProps = Cols;
ExistingData ->
OldProps = decode_table(ExistingData),
NewProps = lists:foldl(fun({K, V}, AccIn) when is_list(K) ->
[{list_to_binary(K), V} | AccIn];
({K, V}, AccIn) when is_atom(K) ->
[{list_to_binary(atom_to_list(K)), V} | AccIn];
(Other, AccIn) -> [Other | AccIn]
end, OldProps, Cols),
UpdatedProps = [{K, proplists:get_value(K, NewProps)} || K <- proplists:get_keys(NewProps)]
end,
Data = encode_table(UpdatedProps),
?TSimple(<<"put">>, [Key | Data]).
%% @spec out(Socket::port(),
%% Key::key()) -> ok | error()
%%
%% @doc
%% Remove a key from the remote database. Will return an error if Key is
%% not in the database.
%% @end
out(Socket, Key) ->
?TSimple(<<"out">>, [Key]).
%% @spec get(Socket::port(),
%% Key::key()) -> proplist() | error()
%%
%% @doc Get the value for a given key. Table data is returned in a proplist of
%% {ColumnName, ColumnValue} tuples.
%% @end
get(Socket, Key) ->
case ?TRaw(<<"get">>, [Key]) of
{error, Reason} ->
{error, Reason};
RecList ->
decode_table(RecList)
end.
%% @spec setindex(Socket::port(),
%% ColName::index_col(),
%% Type::index_type()) -> ok | error()
%%
%% @doc
%% Tell the tyrant server to build an index for a column. The ColName
%% should be either the atom "primary" (to index on the primary key) or a
%% iolist() that names the column to be indexed. Type should be an atom
%% selected from decimal (index column as decimal data), lexical (index as
%% character/string data) or void (remove an existing index for ColName).
%% @end
setindex(Socket, primary, Type) when is_atom(Type) ->
?TSimple(<<"setindex">>, [?NULL, setindex_request_val(Type)]);
setindex(Socket, ColName, Type) when is_atom(Type) ->
?TSimple(<<"setindex">>, [ColName, setindex_request_val(Type)]).
%% @spec genuid(Socket::port()) -> binary() | error()
%%
%% @doc Generate a unique id within the set of primary keys
genuid(Socket) ->
case ?TRaw(<<"genuid">>, []) of
[NewId] ->
NewId;
Error ->
Error
end.
%% @spec query_add_condition(Query::proplist(),
%% ColName::iolist(),
%% Op::query_opcode(),
%% ExprList::query_expr()) -> proplist()
%%
%% @doc
%% Add a condition for a query. ExprList should be a list of one or more
%% values where each value is either a binary, string, or integer. Op can be
%% either an atom or a tuple of atoms describing the operation. If the first
%% atom in an Op tuple is "no" then the condition is a negation query and if
%% the last atom is no_index an existing index on the remote database server will
%% be bypassed.
%% @end
query_add_condition(_Sock, Query, ColName, Op, ExprList) when is_list(ExprList) ->
[{{add_cond, ColName, Op, ExprList},
["addcond",
?NULL,
ColName,
?NULL,
integer_to_list(add_condition_op_val(Op)),
?NULL,
convert_query_exprlist(ExprList)]
} | Query].
%% @spec query_limit(Query::proplist(),
%% Max::integer(),
%% Skip::integer()) -> proplist()
%%
%% @doc Set a limit on the number of returned values for Query, skip the first Skip records.
query_limit(_Sock, Query, Max, Skip) when is_integer(Max), Max > 0, is_integer(Skip), Skip >= 0 ->
LimitKey = {set_limit, Max, Skip},
LimitValue = ["setlimit",
?NULL,
integer_to_list(Max),
?NULL,
integer_to_list(Skip)],
case lists:keysearch(set_limit, 1, proplists:get_keys(Query)) of
false ->
[{LimitKey, LimitValue} | Query];
{value, ExistingKey} ->
[{LimitKey, LimitValue} | proplists:delete(ExistingKey, Query)]
end.
%% @spec query_limit(Query::proplist(),
%% Max::integer()) -> proplist()
%%
%% @doc Set a limit on the number of returned values for Query.
%%
%% XXX: should the missing skip be 0 or -1 (protocol ref and perl versions seem to disagree)
query_limit(_Sock, Query, Max) ->
query_limit(_Sock, Query, Max, 0).
%% @spec query_order(Query::proplist(),
%% ColName::index_col(),
%% Type::order_type()) -> proplist()
%%
%% @doc Set the order for returned values in Query.
query_order(_Sock, Query, primary, Type) when is_atom(Type) ->
OrderKey = {set_order, primary, Type},
OrderValue = ["setorder",
?NULL,
"",
?NULL,
integer_to_list(order_request_val(Type))],
case lists:keysearch(set_order, 1, proplists:get_keys(Query)) of
false ->
[{OrderKey, OrderValue} | Query];
{value, ExistingKey} ->
[{OrderKey, OrderValue} | proplists:delete(ExistingKey, Query)]
end;
query_order(_Sock, Query, ColName, Type) when is_atom(Type) ->
OrderKey = {set_order, ColName, Type},
OrderValue = ["setorder",
?NULL,
ColName,
?NULL,
integer_to_list(order_request_val(Type))],
case lists:keysearch(set_order, 1, proplists:get_keys(Query)) of
false ->
[{OrderKey, OrderValue} | Query];
{value, ExistingKey} ->
[{OrderKey, OrderValue} | proplists:delete(ExistingKey, Query)]
end.
%% @spec search(Socket::port,
%% Query::proplist()) -> keylist() | error()
%%
%% @doc Run a prepared query against the table and return matching keys.
search(Socket, Query) ->
?TRaw(<<"search">>, [V || {_K, V}=Prop <- Query, is_tuple(Prop), erlang:size(Prop)==2]).
%% @spec searchcount(Socket::port,
%% Query::proplist()) -> [integer()] | error()
%%
%% @doc Run a prepared query against the table and get the count of matching keys.
searchcount(Socket, Query) ->
case ?TRaw(<<"search">>, [V || {_K, V}=Prop <- Query, is_tuple(Prop), erlang:size(Prop)==2] ++ ["count"]) of
{error, Reason} ->
{error, Reason};
[] ->
0;
[Count] ->
list_to_integer(binary_to_list(Count))
end.
%% @spec searchout(Socket::port,
%% Query::proplist()) -> ok | error()
%%
%% @doc Run a prepared query against the table and remove the matching records.
searchout(Socket, Query) ->
?TSimple(<<"search">>, [V || {_K, V}=Prop <- Query, is_tuple(Prop), erlang:size(Prop)==2] ++ ["out"]).
%% %% Run a prepared query against the table and get the matching records. Due
%% %% to protocol restraints, the returned result cannot include columns whose
%% %% name or value include the null (0x0) character.
%% tblsearchget(Socket, TblQuery) ->
%% void.
%% tblrescols(Socket, TblQuery) ->
%% void.
%%====================================================================
%% Table utility functions
%%====================================================================
%% @spec add_condition_op_val(query_op()) -> integer()
%%
%% @private Decode add_contition operation tag
add_condition_op_val({no, Op}) when is_atom(Op) ->
?QCNEGATE bor add_condition_op_val(Op);
add_condition_op_val({Op, no_index}) when is_atom(Op) ->
?QCNOIDX bor add_condition_op_val(Op);
add_condition_op_val({no, Op, no_index}) when is_atom(Op)->
?QCNEGATE bor ?QCNOIDX bor add_condition_op_val(Op);
add_condition_op_val({Op}) when is_atom(Op) ->
add_condition_op_val(Op);
add_condition_op_val(Op) when is_atom(Op) ->
case Op of
str_eq ->
?QCSTREQ;
str_inc ->
?QCSTRINC;
str_begin ->
?QCSTRBW;
str_end ->
?QCSTREW;
str_and ->
?QCSTRAND;
str_or ->
?QCSTROR;
str_in_list ->
?QCSTROREQ;
str_regex ->
?QCSTRRX;
num_eq ->
?QCNUMEQ;
num_gt ->
?QCNUMGT;
num_ge ->
?QCNUMGE;
num_lt ->
?QCNUMLT;
num_le ->
?QCNUMLE;
num_between ->
?QCNUMBT;
num_in_list ->
?QCNUMOREQ
end.
%% @spec setindex_request_val(index_type()) -> integer()
%%
%% @private Decode set_index request tag
setindex_request_val(Type) ->
case Type of
lexical ->
?ITLEXICAL;
decimal ->
?ITDECIMAL;
optimized ->
?ITOPT;
void ->
?ITVOID
end.
%% @spec order_request_val(order_type()) -> integer()
%%
%% @private Decode result order tag
order_request_val(Type) ->
case Type of
str_ascending ->
?QOSTRASC;
str_descending ->
?QOSTRDESC;
num_ascending ->
?QONUMASC;
num_descending ->
?QONUMDESC
end.
%% @spec convert_query_exprlist(query_expr()) -> [string()]
%%
%% @private
%% Convert query expression list to comma-seperated list of string values.
convert_query_exprlist(ExprList) ->
convert_query_exprlist(ExprList, []).
convert_query_exprlist([H | T], []) when is_integer(H) ->
convert_query_exprlist(T, [integer_to_list(H)]);
convert_query_exprlist([H | T], []) when is_binary(H) ->
convert_query_exprlist(T, [binary_to_list(H)]);
convert_query_exprlist([H | T], []) ->
convert_query_exprlist(T, [H]);
convert_query_exprlist([H | T], Acc) when is_integer(H) ->
convert_query_exprlist(T, [integer_to_list(H) | ["," | Acc]]);
convert_query_exprlist([H | T], Acc) when is_binary(H) ->
convert_query_exprlist(T, [binary_to_list(H) | ["," | Acc]]);
convert_query_exprlist([H | T], Acc) ->
convert_query_exprlist(T, [H | ["," | Acc]]);
convert_query_exprlist([], Acc) ->
lists:reverse(Acc).
%% @spec encode_table(proplist()) -> [value_or_num()]
%%
%% @private Convert proplist to a list of key, value sequences.
encode_table(Data) when is_list(Data) ->
encode_table(Data, []).
encode_table([], Acc) ->
lists:reverse(Acc);
encode_table([{K, V} | Tail], Acc) ->
encode_table(Tail, [V | [ K | Acc]]).
%% @spec decode_table([value_or_num()]) -> proplist() | error()
%%
%% @private Convert list of key, value pairs to a proplist
decode_table({error, Code}) ->
{error, Code};
decode_table(Data) when is_list(Data) ->
decode_table(Data, []).
decode_table([], Acc) ->
lists:reverse(Acc);
decode_table([K, V | Tail], Acc) ->
decode_table(Tail, [{K, V} | Acc]).
%% Some standard types for edoc
%%
%% @type endian() = big | little
%% @type key() = iolist()
%% @type value() = iolist()
%% @type value_or_num() = iolist() | integer() | float()
%% @type keylist() = [key()]
%% @type coldata() = [{key(), value_or_num()}]
%% @type error() = {error, term()}
%% @type index_col() = primary | iolist()
%% @type index_type() = lexical | decimal | void
%% @type query_opcode() = atom() | tuple()
%% @type query_expr() = [binary() | string() | integer()]
%% @type order_type() = str_ascending | str_descending | num_ascending | num_descending
%% EUnit tests
%%
-ifdef(EUNIT).
test_setup() ->
{ok, Socket} = ?MODULE:connect(),
ok = ?MODULE:vanish(Socket),
Socket.
test_setup_with_data() ->
Socket = test_setup(),
ColData = [{"rec1", [{"name", "alice"}, {"sport", "baseball"}]},
{"rec2", [{"name", "bob"}, {"sport", "basketball"}]},
{"rec3", [{"name", "carol"}, {"age", "24"}]},
{"rec4", [{"name", "trent"}, {"age", "33"}, {"sport", "football"}]},
{"rec5", [{"name", "mallet"}, {"sport", "tennis"}, {"fruit", "apple"}]}
],
lists:foreach(fun({Key, ValProplist}) ->
ok = ?MODULE:put(Socket, Key, ValProplist)
end, ColData),
Socket.
put_get_test() ->
Socket = test_setup_with_data(),
?assertMatch([{<<"age">>, <<"24">>}, {<<"name">>, <<"carol">>}], lists:sort(?MODULE:get(Socket, "rec3"))),
ok = ?MODULE:put(Socket, <<"put_get1">>, [{"num", 32}]),
% Note that by default integers go over to Tyrant in network byte-order
?assertMatch([{<<"num">>, <<32:32>>}], lists:sort(?MODULE:get(Socket, <<"put_get1">>))),
ok.
putkeep_test() ->
Socket = test_setup(),
ok = ?MODULE:put(Socket, "putkeep1", [{"col1", "testval1"}]),
?assertMatch([{<<"col1">>, <<"testval1">>}], ?MODULE:get(Socket, "putkeep1")),
?assertMatch({error, _}, ?MODULE:putkeep(Socket, <<"putkeep1">>, [{"col1", "testval2"}])),
?assertMatch([{<<"col1">>, <<"testval1">>}], ?MODULE:get(Socket, "putkeep1")),
ok = ?MODULE:putkeep(Socket, <<"putkeep2">>, [{"col1", "testval2"}]),
?assertMatch([{<<"col1">>, <<"testval2">>}], ?MODULE:get(Socket, "putkeep2")),
ok.
putcat_test() ->
Socket = test_setup_with_data(),
?assertMatch([{<<"age">>, <<"24">>}, {<<"name">>, <<"carol">>}],
lists:sort(?MODULE:get(Socket, "rec3"))),
ok = ?MODULE:putcat(Socket, "rec3", [{"sport", "golf"}]),
?assertMatch([{<<"age">>, <<"24">>}, {<<"name">>, <<"carol">>}, {<<"sport">>, <<"golf">>}],
lists:sort(?MODULE:get(Socket, "rec3"))),
ok.
update_test() ->
Socket = test_setup_with_data(),
?assertMatch([{<<"name">>, <<"alice">>}, {<<"sport">>, <<"baseball">>}], ?MODULE:get(Socket, "rec1")),
ok = ?MODULE:update(Socket, "rec1", [{"sport", "swimming"}, {"pet", "dog"}]),
?assertMatch([{<<"name">>, <<"alice">>}, {<<"pet">>, <<"dog">>}, {<<"sport">>, <<"swimming">>}],
lists:sort(?MODULE:get(Socket, "rec1"))),
ok.
out_test() ->
Socket = test_setup_with_data(),
ok = ?MODULE:out(Socket, <<"rec1">>),
?assertMatch({error, _}, ?MODULE:get(Socket, <<"rec1">>)),
ok.
vsiz_test() ->
Socket = test_setup(),
ColName = "col1",
ColVal = "vsiz test",
ok = ?MODULE:put(Socket, "vsiz1", [{ColName, ColVal}]),
%% size = col + null sep + val + null column stop
ExpectedLength = length(ColName) + length(ColVal) + 2,
?assert(?MODULE:vsiz(Socket, "vsiz1") =:= ExpectedLength),
ColName2 = "another col",
ColVal2 = "more bytes",
ok = ?MODULE:put(Socket, "vsiz2", [{ColName, ColVal}, {ColName2, ColVal2}]),
ExpectedLength2 = ExpectedLength + length(ColName2) + length(ColVal2) + 2,
?assert(?MODULE:vsiz(Socket, "vsiz2") =:= ExpectedLength2),
ok.
vanish_test() ->
Socket = test_setup(),
ok = ?MODULE:put(Socket, "vanish1", [{"col1", "going away"}]),
ok = ?MODULE:vanish(Socket),
?assertMatch({error, _}, ?MODULE:get(Socket, "vanish1")),
ok.
addint_test() ->
Socket = test_setup(),
?assert(?MODULE:addint(Socket, "addint1", 100) =:= 100),
ok = ?MODULE:put(Socket, "addint2", [{"_num", "10"}]), % see ?MODULE:addint edoc for why a string() is used
?assert(?MODULE:addint(Socket, "addint2", 10) =:= 20),
?assertMatch([{<<"_num">>, <<"100">>}], ?MODULE:get(Socket, "addint1")),
?assertMatch([{<<"_num">>, <<"20">>}], ?MODULE:get(Socket, "addint2")),
ok.
sync_test() ->
Socket = test_setup(),
ok = ?MODULE:sync(Socket),
ok.
rnum_test() ->
Socket = test_setup_with_data(),
?assert(?MODULE:rnum(Socket) =:= 5),
ok = ?MODULE:out(Socket, "rec1"),
?assert(?MODULE:rnum(Socket) =:= 4),
ok = ?MODULE:vanish(Socket),
?assert(?MODULE:rnum(Socket) =:= 0),
ok.
size_test() ->
Socket = test_setup(),
?MODULE:size(Socket),
ok.
stat_test() ->
Socket = test_setup(),
?MODULE:stat(Socket),
ok.
mget_test() ->
Socket = test_setup_with_data(),
MGetData = ?MODULE:mget(Socket, ["rec1", "rec3", "rec5"]),
?assertMatch([{<<"name">>, <<"alice">>},{<<"sport">>, <<"baseball">>}],
lists:sort(proplists:get_value(<<"rec1">>, MGetData))),
?assert(proplists:get_value(<<"rec2">>, MGetData) =:= undefined),
?assertMatch([<<"rec1">>, <<"rec3">>, <<"rec5">>], lists:sort(proplists:get_keys(MGetData))),
ok.
iter_test() ->
Socket = test_setup_with_data(),
AllKeys = [<<"rec1">>, <<"rec2">>, <<"rec3">>, <<"rec4">>, <<"rec5">>],
ok = ?MODULE:iterinit(Socket),
First = ?MODULE:iternext(Socket),
?assert(lists:member(First, AllKeys)),
IterAll = lists:foldl(fun(_Count, Acc) -> [?MODULE:iternext(Socket) | Acc] end,
[First],
lists:seq(1, length(AllKeys)-1)),
?assertMatch(AllKeys, lists:sort(IterAll)),
?assertMatch({error, _}, ?MODULE:iternext(Socket)),
ok.
fwmkeys_test() ->
Socket = test_setup_with_data(),
ok = ?MODULE:put(Socket, "fwmkeys1", [{"foo", "bar"}]),
?assert(length(?MODULE:fwmkeys(Socket, "rec", 4)) =:= 4),
?assert(length(?MODULE:fwmkeys(Socket, "rec", 8)) =:= 5),
?assertMatch([<<"fwmkeys1">>], ?MODULE:fwmkeys(Socket, "fwm", 3)),
?assertMatch([<<"rec1">>, <<"rec2">>, <<"rec3">>], ?MODULE:fwmkeys(Socket, "rec", 3)),
ok.
query_generation_test() ->
?assertMatch([{{set_order, primary, str_descending}, ["setorder", <<0:8>>, "", <<0:8>>, "1"]}],
?MODULE:query_order([], primary, str_descending)),
?assertMatch([{{set_order, "foo", str_ascending}, ["setorder", <<0:8>>, "foo", <<0:8>>, "0"]}],
?MODULE:query_order([{{set_order, blah}, ["foo"]}], "foo", str_ascending)),
?assertMatch([{{set_limit, 2, 0}, ["setlimit", <<0:8>>, "2", <<0:8>>, "0"]}],
?MODULE:query_limit([], 2)),
?assertMatch([{{set_limit, 4, 1}, ["setlimit", <<0:8>>, "4", <<0:8>>, "1"]}],
?MODULE:query_limit([{{set_limit, blah}, ["foo"]}], 4, 1)),
?assertMatch([{{add_cond, "foo", str_eq, ["bar"]}, ["addcond", <<0:8>>, "foo", <<0:8>>, "0", <<0:8>>, ["bar"]]}],
?MODULE:query_condition([], "foo", str_eq, ["bar"])),
?assertMatch([{{add_cond, "foo", {no, str_and}, ["bar","baz"]},
["addcond", <<0:8>>, "foo", <<0:8>>, "16777220", <<0:8>>, ["bar",",","baz"]]}],
?MODULE:query_condition([], "foo", {no, str_and}, ["bar", "baz"])),
ok.
search_test() ->
Socket = test_setup_with_data(),
Query1 = ?MODULE:query_condition([], "name", str_eq, ["alice"]),
?assertMatch([<<"rec1">>], ?MODULE:search(Socket, Query1)),
Query2 = ?MODULE:query_condition([], "name", {no, str_eq}, ["alice"]),
Query2A = ?MODULE:query_limit(Query2, 2),
?assertMatch([<<"rec2">>, <<"rec3">>], ?MODULE:search(Socket, Query2A)),
Query3 = ?MODULE:query_condition([], "age", num_ge, [25]),
?assertMatch([<<"rec4">>], ?MODULE:search(Socket, Query3)),
Query4 = ?MODULE:query_condition([], "name", {no, str_eq}, ["alice"]),
Query4A = ?MODULE:query_order(Query4, "name", str_descending),
?assertMatch([<<"rec4">>, <<"rec5">>, <<"rec3">>, <<"rec2">>], ?MODULE:search(Socket, Query4A)),
Query5 = ?MODULE:query_order([], primary, str_descending),
?assertMatch([<<"rec5">>, <<"rec4">>, <<"rec3">>, <<"rec2">>, <<"rec1">>], ?MODULE:search(Socket, Query5)),
ok.
searchcount_test() ->
Socket = test_setup_with_data(),
Query1 = ?MODULE:query_condition([], "name", str_or, ["alice", "bob"]),
?assert(?MODULE:searchcount(Socket, Query1) =:= 2),
ok.
searchout_test() ->
Socket = test_setup_with_data(),
?assert(?MODULE:rnum(Socket) =:= 5),
%% Also testing regex matches, should hit "baseball" and "basketball" but
%% skip "football"
Query1 = ?MODULE:query_condition([], "sport", str_regex, ["^ba"]),
ok = ?MODULE:searchout(Socket, Query1),
?assert(?MODULE:rnum(Socket) =:= 3),
?assertMatch({error, _}, ?MODULE:get(Socket, "rec1")),
ok.
-endif. | SRC/medici/principe_table.erl | 0.504883 | 0.461866 | principe_table.erl | starcoder |
-module(cowboy_compiled_router_parser).
-export([parse/1]).
%% graciously stolen from https://github.com/ninenines/cowboy/blob/999dc5b7c1665fb620c14f6303610793313efe58/src/cowboy_router.erl#L51
parse(Routes) ->
compile(Routes, []).
compile([], Acc) ->
lists:reverse(Acc);
compile([{Host, Paths}|Tail], Acc) ->
compile([{Host, [], Paths}|Tail], Acc);
compile([{HostMatch, Fields, Paths}|Tail], Acc) ->
HostRules = case HostMatch of
'_' -> '_';
_ -> compile_host(HostMatch)
end,
PathRules = compile_paths(Paths, []),
Hosts = case HostRules of
'_' -> [{'_', Fields, PathRules}];
_ -> [{R, Fields, PathRules} || R <- HostRules]
end,
compile(Tail, Hosts ++ Acc).
compile_host(HostMatch) when is_list(HostMatch) ->
compile_host(list_to_binary(HostMatch));
compile_host(HostMatch) when is_binary(HostMatch) ->
compile_rules(HostMatch, $., [], [], <<>>).
compile_paths([], Acc) ->
lists:reverse(Acc);
compile_paths([{PathMatch, Handler, Opts}|Tail], Acc) ->
compile_paths([{PathMatch, [], Handler, Opts}|Tail], Acc);
compile_paths([{PathMatch, Fields, Handler, Opts}|Tail], Acc)
when is_list(PathMatch) ->
compile_paths([{iolist_to_binary(PathMatch),
Fields, Handler, Opts}|Tail], Acc);
compile_paths([{'_', Fields, Handler, Opts}|Tail], Acc) ->
compile_paths(Tail, [{'_', Fields, Handler, Opts}] ++ Acc);
compile_paths([{<< $/, PathMatch/bits >>, Fields, Handler, Opts}|Tail],
Acc) ->
PathRules = compile_rules(PathMatch, $/, [], [], <<>>),
Paths = [{lists:reverse(R), Fields, Handler, Opts} || R <- PathRules],
compile_paths(Tail, Paths ++ Acc);
compile_paths([{PathMatch, _, _, _}|_], _) ->
error({badarg, "The following route MUST begin with a slash: "
++ binary_to_list(PathMatch)}).
compile_rules(<<>>, _, Segments, Rules, <<>>) ->
[Segments|Rules];
compile_rules(<<>>, _, Segments, Rules, Acc) ->
[[Acc|Segments]|Rules];
compile_rules(<< S, Rest/bits >>, S, Segments, Rules, <<>>) ->
compile_rules(Rest, S, Segments, Rules, <<>>);
compile_rules(<< S, Rest/bits >>, S, Segments, Rules, Acc) ->
compile_rules(Rest, S, [Acc|Segments], Rules, <<>>);
compile_rules(<< $:, Rest/bits >>, S, Segments, Rules, <<>>) ->
{NameBin, Rest2} = compile_binding(Rest, S, <<>>),
Name = binary_to_atom(NameBin, utf8),
compile_rules(Rest2, S, Segments, Rules, Name);
compile_rules(<< $:, _/bits >>, _, _, _, _) ->
error(badarg);
compile_rules(<< $[, $., $., $., $], Rest/bits >>, S, Segments, Rules, Acc)
when Acc =:= <<>> ->
compile_rules(Rest, S, ['...'|Segments], Rules, Acc);
compile_rules(<< $[, $., $., $., $], Rest/bits >>, S, Segments, Rules, Acc) ->
compile_rules(Rest, S, ['...', Acc|Segments], Rules, Acc);
compile_rules(<< $[, S, Rest/bits >>, S, Segments, Rules, Acc) ->
compile_brackets(Rest, S, [Acc|Segments], Rules);
compile_rules(<< $[, Rest/bits >>, S, Segments, Rules, <<>>) ->
compile_brackets(Rest, S, Segments, Rules);
%% Open bracket in the middle of a segment.
compile_rules(<< $[, _/bits >>, _, _, _, _) ->
error(badarg);
%% Missing an open bracket.
compile_rules(<< $], _/bits >>, _, _, _, _) ->
error(badarg);
compile_rules(<< C, Rest/bits >>, S, Segments, Rules, Acc) ->
compile_rules(Rest, S, Segments, Rules, << Acc/binary, C >>).
%% Everything past $: until the segment separator ($. for hosts,
%% $/ for paths) or $[ or $] or end of binary is the binding name.
compile_binding(<<>>, _, <<>>) ->
error(badarg);
compile_binding(Rest = <<>>, _, Acc) ->
{Acc, Rest};
compile_binding(Rest = << C, _/bits >>, S, Acc)
when C =:= S; C =:= $[; C =:= $] ->
{Acc, Rest};
compile_binding(<< C, Rest/bits >>, S, Acc) ->
compile_binding(Rest, S, << Acc/binary, C >>).
compile_brackets(Rest, S, Segments, Rules) ->
{Bracket, Rest2} = compile_brackets_split(Rest, <<>>, 0),
Rules1 = compile_rules(Rest2, S, Segments, [], <<>>),
Rules2 = compile_rules(<< Bracket/binary, Rest2/binary >>,
S, Segments, [], <<>>),
Rules ++ Rules2 ++ Rules1.
%% Missing a close bracket.
compile_brackets_split(<<>>, _, _) ->
error(badarg);
%% Make sure we don't confuse the closing bracket we're looking for.
compile_brackets_split(<< C, Rest/bits >>, Acc, N) when C =:= $[ ->
compile_brackets_split(Rest, << Acc/binary, C >>, N + 1);
compile_brackets_split(<< C, Rest/bits >>, Acc, N) when C =:= $], N > 0 ->
compile_brackets_split(Rest, << Acc/binary, C >>, N - 1);
%% That's the right one.
compile_brackets_split(<< $], Rest/bits >>, Acc, 0) ->
{Acc, Rest};
compile_brackets_split(<< C, Rest/bits >>, Acc, N) ->
compile_brackets_split(Rest, << Acc/binary, C >>, N). | src/cowboy_compiled_router_parser.erl | 0.520253 | 0.497559 | cowboy_compiled_router_parser.erl | starcoder |
%% @doc MQTT packet encoder
%% Copyright 2018 <NAME>
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(mqtt_packet_map_topic).
-author('<NAME> <<EMAIL>>').
-export([
validate_topic/1,
validate_topic_publish/1,
is_valid_topic/1,
normalize_topic/1,
flatten_topic/1,
is_wildcard_topic/1
]).
%% @doc Validate a topic, return the normalized topic if it is a valid topic or topic filter.
-spec validate_topic( mqtt_packet_map:mqtt_topic() ) -> {ok, mqtt_packet_map:mqtt_topic()} | {error, invalid_topic}.
validate_topic(T) ->
T1 = normalize_topic(T),
case is_valid_topic(T1) of
true -> {ok, T1};
false -> {error, invalid_topic}
end.
%% @doc Validate a topic, return the normalized topic. Must not contain any wild cards.
-spec validate_topic_publish( mqtt_packet_map:mqtt_topic() ) -> {ok, mqtt_packet_map:mqtt_topic()} | {error, invalid_topic}.
validate_topic_publish(T) ->
case validate_topic(T) of
{ok, T1} ->
case is_wildcard_topic(T1) of
true -> {error, invalid_topic};
false -> {ok, T1}
end;
{error, _} = Error ->
Error
end.
%% @doc Check if a topic is valid, the topic must have been normalized.
%% All topic characters must be utf-8 and topic levels shouldn't contain + and # characters.
-spec is_valid_topic( list() ) -> boolean().
is_valid_topic([]) ->
false;
is_valid_topic(L) when is_list(L) ->
is_valid_topic_1(L).
is_valid_topic_1([]) ->
true;
is_valid_topic_1([ '#' ]) ->
true;
is_valid_topic_1([ '#' | _ ]) ->
false;
is_valid_topic_1([ H | T ]) ->
case is_valid_topic_part(H) of
true -> is_valid_topic_1(T);
false -> false
end.
is_valid_topic_part('#') -> true;
is_valid_topic_part('+') -> true;
is_valid_topic_part(N) when is_integer(N) -> true;
is_valid_topic_part(B) when is_binary(B) -> is_valid_topic_part_chars(B).
is_valid_topic_part_chars(<<>>) -> true;
is_valid_topic_part_chars(<<$+, _/binary>>) -> false;
is_valid_topic_part_chars(<<$#, _/binary>>) -> false;
is_valid_topic_part_chars(<<$/, _/binary>>) -> false;
is_valid_topic_part_chars(<<0, _/binary>>) -> false;
is_valid_topic_part_chars(<<_/utf8, Rest/binary>>) -> is_valid_topic_part_chars(Rest);
is_valid_topic_part_chars(_) -> false.
%% @doc Normalize a topic to a list. Wildcards are replace by the atoms '+' and '#' (as used by the router).
-spec normalize_topic( mqtt_packet_map:mqtt_topic() ) -> mqtt_packet_map:mqtt_topic().
normalize_topic(<<>>) ->
[];
normalize_topic(B) when is_binary(B) ->
normalize_topic( binary:split(B, <<"/">>, [global]) );
normalize_topic(L) when is_list(L) ->
lists:map(fun normalize_topic_part/1, L).
normalize_topic_part('+') -> '+';
normalize_topic_part('#') -> '#';
normalize_topic_part(<<"+">>) -> '+';
normalize_topic_part(<<"#">>) -> '#';
normalize_topic_part(T) when is_integer(T) -> integer_to_binary(T);
normalize_topic_part(T) when is_binary(T) -> T;
normalize_topic_part(T) -> z_convert:to_binary(T).
%% @doc Recombine a normalized topic to a single binary string.
-spec flatten_topic( mqtt_packet_map:mqtt_topic() ) -> binary().
flatten_topic(B) when is_binary(B) ->
B;
flatten_topic([]) ->
<<>>;
flatten_topic([ H | T ]) ->
flatten_topic_list(T, to_binary(H)).
flatten_topic_list([], Acc) ->
Acc;
flatten_topic_list([ H | T ], Acc) ->
H1 = to_binary(H),
flatten_topic_list(T, <<Acc/binary, $/, H1/binary>>).
-spec is_wildcard_topic( list() ) -> boolean().
is_wildcard_topic(L) ->
lists:any(fun is_wildcard/1, L).
is_wildcard('+') -> true;
is_wildcard('#') -> true;
is_wildcard(_) -> false.
to_binary(B) when is_binary(B) -> B;
to_binary('+') -> <<"+">>;
to_binary('#') -> <<"#">>;
to_binary(N) -> z_convert:to_binary(N). | src/mqtt_packet_map_topic.erl | 0.567337 | 0.443661 | mqtt_packet_map_topic.erl | starcoder |
%% Copyright (c) Facebook, Inc. and its affiliates.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(assert_diagnostic_SUITE).
-include_lib("common_test/include/ct.hrl").
-include_lib("stdlib/include/assert.hrl").
-include_lib("test/assert_diagnostic.hrl").
%% TestServer callbacks ++ test cases.
-compile(export_all).
groups() ->
[
{my_test_group, [parallel], [
test_equal,
test_expected_is_longer,
test_actual_is_longer,
test_one_distinc_item
]}
].
all() ->
[{group, my_test_group}].
test_equal(_) ->
L = [rifi, fifi, loulou],
% Must return 'ok' to respect ?assert API.
% NB: Must use a temporary result variable, doesn't work inline!
Res = ?assertListEqual(L, L),
?assertEqual(ok, Res).
test_expected_is_longer(_) ->
L0 = [coffee, brownie],
L1 = [coffee],
checkListEqualMessage(L0, L1, ["Actual list lacks 1 expected items: [brownie]"]).
test_actual_is_longer(_) ->
L0 = [breath],
L1 = [breath, explode],
checkListEqualMessage(L0, L1, ["Actual list has 1 unexpected items: [explode]"]).
test_one_distinc_item(_) ->
L0 = [sea, sax, sun],
L1 = [sea, tex, sun],
checkListEqualMessage(L0, L1, [
"Item 2 differs:\n"
"Expected: sax\n"
"Value: tex"
]).
%% Helper ensuring comparison of mismatching lists give the expected message.
checkListEqualMessage(L0, L1, Msg) ->
case (catch ?assertListEqual(L0, L1)) of
ok ->
ct:fail("Got 'ok', was expecting exception: ~p", [Msg]);
% Erlang doc: For exceptions of class error, that is, run-time errors,
% {'EXIT',{Reason,Stack}} is returned.
% ?assertListEqual follows ?assert API and returns Reason as:
% {assert, [infos]}
{'EXIT', {{assert, Info}, _ST}} ->
Comment = [C || {comment, C} <- Info],
?assertEqual([Msg], Comment);
X ->
ct:fail("Expected exception: ~p~nGot: ~p", [Msg, X])
end. | test/assert_diagnostic_SUITE.erl | 0.735357 | 0.485051 | assert_diagnostic_SUITE.erl | starcoder |
%%
%% @doc Another implementations of
%% [https://groups.google.com/g/erlang-programming/c/ZUHZpH0wsOA
%% coinductive data types].
%%
%% @see lazy
%%
-module(lazy2).
-author("<NAME> <<EMAIL>>").
-export([gen/2, filter/2, foldl/3, map/2, take/2]).
-export([natural_numbers/0]).
-dialyzer(no_improper_lists).
-type lazy_seq() :: [term() | fun(() -> lazy_seq())].
-type integers() :: [pos_integer() | fun(() -> integers())].
%%
%% @doc Generates lazy (infinite) sequence of elements `E'
%% using generating function `F'.
%%
gen(E, F) -> [E | fun() -> gen(F(E), F) end].
%%
%% @doc Generates sequence of integers.
%%
integers_from(K) -> gen(K, fun(N) -> N + 1 end).
%%
%% @doc Generates sequence of natural numbers.
%%
natural_numbers() -> integers_from(1).
%%
%% @doc Filters the given lazy sequence using predicate `Pred'.
%%
filter(_, []) -> [];
filter(Pred, [X | Gen]) ->
case Pred(X) of
true -> [X | fun() -> filter(Pred, Gen()) end];
false -> filter(Pred, Gen())
end.
%%
%% @doc Left folds the given lazy sequence using function `Fun' and accumulator `Acc'.
%%
foldl(_, Acc, []) -> {Acc, []};
foldl(Fun, Acc, [X | Gen]) -> {Fun(X, Acc), Gen()}.
%%
%% @doc Maps the given lazy sequence using function `Fun'.
%%
map(_, []) -> [];
map(Fun, [X | Gen]) -> [Fun(X) | fun() -> map(Fun, Gen()) end].
%%
%% @doc Returns first `N' elements of the given lazy sequence.
%%
take(N, LazySeq) -> take([], N, LazySeq).
take(Acc, 0, _) -> Acc;
take(Acc, N, [X | Gen]) -> take(Acc ++ [X], N - 1, Gen()).
%% =============================================================================
%% Unit tests
%% =============================================================================
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
filter_first(N, Pred, LazyList) -> take(N, filter(Pred, LazyList)).
foldl_first(0, _, Acc, _) -> Acc;
foldl_first(N, Fun, Acc, LazyList) ->
{NewAcc, LazyTail} = foldl(Fun, Acc, LazyList),
foldl_first(N - 1, Fun, NewAcc, LazyTail).
map_first(N, Fun, LazyList) -> take(N, map(Fun, LazyList)).
first_natural_numbers(N) -> take(N, natural_numbers()).
first_even_numbers(N) -> filter_first(N, fun(X) -> X rem 2 == 0 end, natural_numbers()).
first_squares(N) -> map_first(N, fun(X) -> X * X end, natural_numbers()).
first_sum(N) -> foldl_first(N, fun(X, Sum) -> X + Sum end, 0, natural_numbers()).
filter_test() ->
[X | _] = filter(fun(X) -> 10 < X end, natural_numbers()),
?assertEqual(11, X).
foldl_test() ->
{P, _} = foldl(fun(X, Prod) -> X * Prod end, 1, natural_numbers()),
?assertEqual(1, P).
map_test() ->
[X | _] = map(fun(X) -> X * 2 end, natural_numbers()),
?assertEqual(2, X).
first_natural_numbers_test() ->
?assertEqual([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], first_natural_numbers(10)).
first_even_numbers_test() ->
?assertEqual([2, 4, 6, 8, 10, 12, 14, 16, 18, 20], first_even_numbers(10)).
first_squares_test() ->
?assertEqual([1, 4, 9, 16, 25, 36, 49, 64, 81, 100], first_squares(10)).
first_sum_test() ->
?assertEqual(55, first_sum(10)).
-endif. | lib/ndpar/src/lazy2.erl | 0.616705 | 0.704967 | lazy2.erl | starcoder |
%%%=============================================================================
%%% @copyright 2017, <NAME>
%%% @doc Stochastic pool.
%%%
%%% Opaque data structure that handle a collection of peers.
%%% Implements the design described in:
%%% [https://github.com/aeternity/protocol/blob/master/GOSSIP.md]
%%%
%%% Usage:
%%% <ul>
%%% <li>When setting up, add trusted peers:
%%% <ul>
%%% <li>Call {@link update/7} with the trust flag set; this will add
%%% the peer to the verified pool and prevent it from ever being
%%% evicted.
%%% </li>
%%% </ul>
%%% </li>
%%% <li>When receiving a peer through gossip:
%%% <ul>
%%% <li>Call {@link update/7}; if this is a new peer this will add the
%%% peer to the unverifiedpool; if the peer is already pooled it will
%%% just refresh it.
%%% </li>
%%% </ul>
%%% </li>
%%% <li>To select a peer to connect to:
%%% <ul>
%%% <li>Call {@link random_select/4}; this will return a peer and mark it
%%% as selected so it is not returned from any further selection.
%%% </li>
%%% </ul>
%%% </li>
%%% <li>When an outbound connection failed:
%%% <ul>
%%% <li>Call {@link reject/3}; this put the connection on standby for a
%%% time related to the number of time it got rejected (exponentional
%%% backoff). After the standby time has passed it will be selectable
%%% again.
%%% </li>
%%% </ul>
%%% </li>
%%% <li>When an outbound connection succeed:
%%% <ul>
%%% <li>Call {@link verify/3} to move the peer to the verified pool.</li>
%%% </ul>
%%% </li>
%%% <li>When a connection is closed:
%%% <ul>
%%% <li>Call {@link release/3}; this will reset the rejection counter and
%%% make the peer selectable again.
%%% </li>
%%% </ul>
%%% </li>
%%% <li>When an inbound connection is established and the first gossip
%%% message is received:
%%% <ul>
%%% <li>Call {@link update/7} to add the connecting peer to the pool.</li>
%%% <li>Call {@link verify/3} to move it to the verified pool.</li>
%%% <li>Call {@link select/4} to mark it as selected.</li>
%%% </ul>
%%% </li>
%%% <li>When building a gossip message
%%% <ul>
%%% <li>Call {@link random_subset/3} to get a random subset of the pooled
%%% peer</li>
%%% </ul>
%%% </li>
%%% </ul>
%%%
%%% This data structur do not make any assumtion about the time; it requires
%%% it to be passed as parameter to function requiring it.
%%% Some functions takes a time in millisecond as it would be returned from
%%% `erlang:system_time(millisecond)' and uses it as if it was the current time.
%%% Every calls should be given a time greater or equal to the last call.
%%%
%%% To support selecting peers from a new address group, a filter function
%%% must be given to {@link random_select/4} that will reject peers from the
%%% current connections address groups.
%%%
%%% @end
%%%=============================================================================
-module(aec_peers_pool).
%=== INCLUDES ==================================================================
-include_lib("stdlib/include/assert.hrl").
%=== EXPORTS ===================================================================
-export([address_group/1]).
-export([new/1]).
-export([count/3]).
-export([find/2]).
-export([peer_state/2]).
-export([is_verified/2]).
-export([is_unverified/2]).
-export([is_available/2]).
-export([update/7]).
-export([verify/3]).
-export([random_subset/3]).
-export([random_select/4]).
-export([select/3]).
-export([reject/3]).
-export([release/3]).
-export([delete/2]).
%% Functions for debug/testing.
-export([available/2]).
-ifdef(TEST).
-compile([export_all, nowarn_export_all]).
-endif.
%=== MACROS ====================================================================
-define(ST, ?MODULE).
%% The size in bytes of the generated secret.
-define(SECRET_SIZE, 128).
%% The algorithm to use for weak random number generator.
-define(RAND_ALGO, exrop).
%% The initial size of the lookup tables.
-define(LOOKUP_START_SIZE, 8).
%% The maximum increment of size of the lookup tables underlying arrays;
%% must be a multiple of ?LOOKUP_START_SIZE.
-define(MAX_LOOKUP_SIZE_INC, ?LOOKUP_START_SIZE * 16).
%% The default number of buckets in the verified pool.
-define(DEFAULT_VERIF_BUCKET_COUNT, 256).
%% The default number of peers in each verified pool buckets.
-define(DEFAULT_VERIF_BUCKET_SIZE, 32).
%% The default number of possible buckets for the same peer address group.
-define(DEFAULT_VERIF_GROUP_SHARD, 8).
%% The default number of buckets in the unverified pool.
-define(DEFAULT_UNVER_BUCKET_COUNT, 1024).
%% The default number of peers in each unverified buckets.
-define(DEFAULT_UNVER_BUCKET_SIZE, 64).
%% The default number of possible buckets for the same source address group.
%% MUST BE A MULTIPLE OF ?DEFAULT_UNVER_GROUP_SHARD.
-define(DEFAULT_UNVER_SOURCE_SHARD, 64).
%% The default number of possible buckets for the same peer
%% and source address groups.
-define(DEFAULT_UNVER_GROUP_SHARD, 4).
%% The default maximum number of unverified peer references
%% in the unverified pool.
-define(DEFAULT_UNVER_MAX_REFS, 8).
%% The default eviction skew toward older peers;
%% it is used to skew the randomly evicted peer.
-define(DEFAULT_EVICTION_SKEW, 1.2).
%% The default probability of selecting a peer from the verified pool.
-define(DEFAULT_SELECT_VERIFIED_PROB, 0.5).
%% The default time without a peer being updated after which it get removed.
-define(DEFAULT_MAX_UPDATE_LAPSE, 30 * 24 * 50 * 60 * 1000). % 30 days
%% The default backoff lookup table for standby duration in milliseconds.
-define(DEFAULT_STANDBY_TIMES,
[5000, 15000, 30000, 60000, 120000, 300000, 600000]).
%% The default maximum number of times a peer can get rejected;
%% when reached, the peer is downgraded/removed (if not trusted).
-define(DEFAULT_MAX_REJECTIONS, 7).
%=== TYPES =====================================================================
-record(peer, {
% The peer unique identifier.
id :: binary(),
% If the peer is trusted and should never be downgraded.
trusted = false :: boolean(),
% The peer's IP address.
addr :: peer_addr(),
% The IP address of the source of the peer.
source :: peer_addr(),
% Some opaque extra information.
extra :: extra(),
% The index of the verified pool's bucket the peer is located in.
vidx :: non_neg_integer() | undefined,
% A list of unverified pool bucket index the peer is located in.
uidxs = [] :: [non_neg_integer()],
% If the peer has been selected.
selected = false :: boolean(),
% The number of time peers got rejected.
rejected = 0 :: non_neg_integer(),
% The time the peer was last updated.
update_time :: pos_integer() | undefined,
% The time the peer was last selected.
select_time :: pos_integer() | undefined,
% The time the peer was last rejected.
reject_time :: pos_integer() | undefined,
% The index in the randomized lookup table for all peers.
lookup_all_idx :: non_neg_integer() | undefined,
% The index in the randomized lookup table for available verified peers.
lookup_verif_idx :: non_neg_integer() | undefined,
% The index in the randomized lookup table for available unverified peers.
lookup_unver_idx :: non_neg_integer() | undefined
}).
-record(pool, {
% The number of peers in the pool.
size :: non_neg_integer(),
% The maximum number of references in the pool.
max_refs :: pos_integer(),
% The eviction skew.
skew :: float(),
% The number of buckets in the pool.
bucket_count :: pos_integer(),
% The pool's bucket size.
bucket_size :: pos_integer(),
% The pool's buckets.
buckets :: buckets(peer_id())
}).
-record(lookup, {
% The number of element in the lookup table.
size :: non_neg_integer(),
% The array of element; its size may be larger.
array :: array:array(peer_id())
}).
-record(?MODULE, {
% The secret to randomize the pool.
secret :: binary(),
% The state of the weak random generator.
rand :: rand_state(),
% The map of all the pooled peers.
peers :: peer_map(),
% The set of peers that have been rejected and are now on standby.
standby :: #{peer_id() => true},
% The verified pool.
verif_pool :: pool(),
% The unverified pool.
unver_pool :: pool(),
% The randomized list of all peers.
lookup_all :: lookup(),
% The randomized list of verified peers that are neither selected
% nor rejected.
lookup_verif :: lookup(),
% The randomized list of unverified peers that are neither selected
% nor rejected.
lookup_unver :: lookup(),
% The probability to select a verified peer when selecting from both pools.
select_verif_prob :: float(),
% The time after which a peer got removed if never updated.
max_update_lapse :: pos_integer(),
% The sharding configuration.
verif_group_shard :: pos_integer(),
unver_source_shard :: pos_integer(),
unver_group_shard :: pos_integer(),
% If a strong random number should be used as an offset of weak ransdom
% number for random_select/4 and random_subset/3.
use_rand_offset :: boolean(),
% The lookupt table for standby time.
standby_times :: [non_neg_integer()],
% The maximum time a peer can be rejected.
max_rejections :: pos_integer()
}).
-type state() :: #aec_peers_pool{}.
-type pool() :: #pool{}.
-type lookup() :: #lookup{}.
-type peer() :: #peer{}.
-type peer_map() :: #{peer_id() => peer()}.
-type extra() :: term().
-type rand_state() :: term().
-type peer_id() :: binary().
-type peer_addr() :: inet:ip_address().
-type ext_peer() :: {peer_id(), term()}.
-type millitimestamp() :: pos_integer().
-type milliseconds() :: pos_integer().
-type bucket(Type) :: [Type].
-type buckets(Type) :: array:array(bucket(Type)).
-type select_target() :: verified | unverified | both.
-type filter_fun() :: fun((peer_id(), extra()) -> boolean()).
-type int_filter_fun() :: fun((peer_id()) -> boolean()).
-type bucket_filter_fun() :: fun((peer_id()) -> keep | remove | evict).
-type bucket_sort_key_fun() :: fun((peer_id()) -> term()).
-type select_fun() :: fun((state(), millitimestamp(),
int_filter_fun() | undefined)
-> {unavailable, state()} | {peer_id(), state()}).
-type options() :: [option()].
-type option() :: {verif_bcount, pos_integer()}
| {verif_bsize, pos_integer()}
| {verif_group_shard, pos_integer()}
| {unver_bcount, pos_integer()}
| {unver_bsize, pos_integer()}
| {unver_source_shard, pos_integer()}
| {unver_group_shard, pos_integer()}
| {unver_max_refs, pos_integer()}
| {eviction_skew, float()}
| {select_verif_prob, float()}
| {max_update_lapse, pos_integer()}
| {secret, binary()}
| {seed, {integer(), integer(), integer()}}
| {disable_strong_random, boolean()}
| {standby_times, [non_neg_integer()]}
| {max_rejections, pos_integer()}.
-export_type([filter_fun/0, state/0]).
%=== API FUNCTIONS =============================================================
%% @doc Returns a binary representing the address group of given IP address.
%% Only supports IPv4 for now.
-spec address_group(peer_addr()) -> binary().
address_group({A, B, _, _}) -> <<A:8, B:8>>.
%% @doc Creates a new stochastic pool.
%%
%% Options:
%% <ul>
%% <li>`verif_bcount': Number of buckets in the verified pool.
%% Default: `256'.</li>
%% <li>`verif_bsize': Verified pool's buckets size. Default: `32'.</li>
%% <li>`verif_group_shard': The number of possible buckets for the same peer
%% address group. Default: `8'.</li>
%% <li>`unver_bcount': Number of buckets in the unverified pool.
%% Default: `1024'.</li>
%% <li>`unver_bsize': Unverified pool's bucket size. Default: `64'.</li>
%% <li>`unver_source_shard': The default number of possible buckets for the
%% same source address group. Default: `64'.
%% This <b>must</b> be a multiple of `unver_group_shard'.</li>
%% <li>`unver_group_shard': The default number of possible buckets for the same
%% peer and source address groups. Default: `4'.</li>
%% <li>`unver_max_refs': Maximum number of peer references in the unverified
%% pool. Default: `8'.</li>
%% <li>`eviction_skew': The skew of the eviction algorithm. `1.0' means no
%% skew, and the larger the value the more the eviction is skewed toward the
%% oldest peers. Default: `1.2'.</li>
%% <li>`select_verif_prob': The probability of selecting a verified peer when
%% selecting from both pools. Regardless of the probability, if there are no
%% peers in one of the pool, it will be taken from the other one.
%% Default: `0.5'.</li>
%% <li>`max_update_lapse': The maximum time after which a peer is removed if
%% never updated in milliseconds. Default: `30 days'.</li>
%% <li>`secret': A secret binary used to further randomize the pool. If not
%% specified a random one will be generated.</li>
%% <li>`seed': A seed for the weak random generator. If not specified, a seed
%% will be generated from a strong source of randomness.</li>
%% <li>`disable_strong_random': If `true' no strong random number will be used
%% as offset for {@link random_subset/3} and {@link random_select/4};
%% this ensure reproducibility if a specific seed was provided.</li>
%% <li>`standby_times': The lookup table to use to know the time in seconds
%% a peer should be put on standby when rejected; if rejected more than
%% the size of the given list due to `max_rejections' being larger than
%% the list, the last value of the list is used multiple times.
%% Default: `[5, 15, 30, 60, 120, 300, 600]'.</li>
%% <li>`max_rejections': The maximum number of time a peer can be rejected
%% before it is downgraded from the verified pool or removed from
%% the unverified pool. Default: `7'.</li>
%% </ul>
-spec new(options()) -> state().
new(Opts) ->
Secret = get_opt(secret, Opts, gen_secret()),
Seed = get_opt(seed, Opts, gen_seed()),
VBCount = get_opt(verif_bcount, Opts, ?DEFAULT_VERIF_BUCKET_COUNT),
VBSize = get_opt(verif_bsize, Opts, ?DEFAULT_VERIF_BUCKET_SIZE),
VGroupShard = get_opt(verif_group_shard, Opts, ?DEFAULT_VERIF_GROUP_SHARD),
UBCount = get_opt(unver_bcount, Opts, ?DEFAULT_UNVER_BUCKET_COUNT),
UBSize = get_opt(unver_bsize, Opts, ?DEFAULT_UNVER_BUCKET_SIZE),
USourceShard = get_opt(unver_source_shard, Opts, ?DEFAULT_UNVER_SOURCE_SHARD),
UGroupShard = get_opt(unver_group_shard, Opts, ?DEFAULT_UNVER_GROUP_SHARD),
UMaxRef = get_opt(unver_max_refs, Opts, ?DEFAULT_UNVER_MAX_REFS),
EvictSkew = get_opt(eviction_skew, Opts, ?DEFAULT_EVICTION_SKEW),
SelectProb = get_opt(select_verif_prob, Opts, ?DEFAULT_SELECT_VERIFIED_PROB),
MaxLapse = get_opt(max_update_lapse, Opts, ?DEFAULT_MAX_UPDATE_LAPSE),
DisableStrongRandom = get_opt(disable_strong_random, Opts, false),
StandbyTimes = get_opt(standby_times, Opts, ?DEFAULT_STANDBY_TIMES),
MaxRejections = get_opt(max_rejections, Opts, ?DEFAULT_MAX_REJECTIONS),
?assert(VBCount > 0),
?assert(VBSize > 0),
?assert(VGroupShard > 0),
?assert(UBCount > 0),
?assert(UBSize > 0),
?assert(USourceShard > 0),
?assert(UGroupShard > 0),
?assert((USourceShard rem UGroupShard) =:= 0),
?assert(UMaxRef > 0),
?assert(SelectProb >= 0),
?assert(MaxLapse > 0),
?assert(is_list(StandbyTimes)),
?assert(length(StandbyTimes) > 0),
?assert(MaxRejections > 0),
RSt = rand:seed_s(?RAND_ALGO, Seed),
#?ST{
secret = Secret,
rand = RSt,
peers = #{},
standby = #{},
verif_pool = pool_new(VBCount, VBSize, 1, EvictSkew),
unver_pool = pool_new(UBCount, UBSize, UMaxRef, EvictSkew),
lookup_all = lookup_new(),
lookup_verif = lookup_new(),
lookup_unver = lookup_new(),
select_verif_prob = SelectProb,
max_update_lapse = MaxLapse,
verif_group_shard = VGroupShard,
unver_group_shard = UGroupShard,
unver_source_shard = USourceShard div UGroupShard,
use_rand_offset = not DisableStrongRandom,
standby_times = StandbyTimes,
max_rejections = MaxRejections
}.
%% @doc Counts the pooled peers given discriminators.
%%
%% Descriminators:
%% <ul>
%% <li>`all' + `both': Returns the total number of pooled peers.</li>
%% <li>`all' + `verified': Returns the total number of verified peers.</li>
%% <li>`all' + `unverified': Returns the total number of unverified peers.</li>
%% <li>`available' + `both': Returns the number of peers selectable peers.</li>
%% <li>`available' + `verified': Returns the number of selectable verified
%% peers.</li>
%% <li>`available' + `unverified': Returns the number of selectable unverified
%% peers.</li>
%% <li>`standby' + `both': Returns the number of peers currently on standby
%% due to being rejected.</li>
%% <li>`standby' + `verified': Returns the number of verified peers
%% currently on standby due to being rejected.</li>
%% <li>`standby' + `unverified': Returns the number of unverified peers
%% currently on standby due to being rejected.</li>
%% </ul>
%%
%% Note that the result of `available' peers do not take into account any
%% additional restrictions that could be given to {@link random_select/4}.
%%
%% In addition, peers currently in standby that may have exhausted there standby
%% time will <b>not</b> be counted.
-spec count(state(), all | available | standby, select_target())
-> non_neg_integer().
count(St, all, both) ->
#?ST{verif_pool = VerifPool, unver_pool = UnverPool} = St,
pool_size(VerifPool) + pool_size(UnverPool);
count(St, all, verified) ->
#?ST{verif_pool = VerifPool} = St,
pool_size(VerifPool);
count(St, all, unverified) ->
#?ST{unver_pool = UnverPool} = St,
pool_size(UnverPool);
count(St, available, both) ->
#?ST{lookup_verif = VerifLookup, lookup_unver = UnverLookup} = St,
lookup_size(VerifLookup) + lookup_size(UnverLookup);
count(St, available, verified) ->
#?ST{lookup_verif = VerifLookup} = St,
lookup_size(VerifLookup);
count(St, available, unverified) ->
#?ST{lookup_unver = UnverLookup} = St,
lookup_size(UnverLookup);
count(St, standby, both) ->
#?ST{standby = Standby} = St,
maps:size(Standby);
count(St, standby, verified) ->
#?ST{standby = Standby} = St,
length([I || I <- maps:keys(Standby), is_verified(St, I)]);
count(St, standby, unverified) ->
#?ST{standby = Standby} = St,
length([I || I <- maps:keys(Standby), is_unverified(St, I)]).
%% @doc Returns the extra data for given peer identifier.
-spec find(state(), peer_id()) -> {ok, extra()} | error.
find(St, PeerId) ->
case find_peer(St, PeerId) of
#peer{extra = Extra} -> {ok, Extra};
undefined -> error
end.
%% @doc Returns where a peer identifier is pooled and if it is available.
-spec peer_state(state(), peer_id())
-> {verified | unverified | undefined, boolean() | undefined}.
peer_state(St, PeerId) ->
#?ST{standby = Standby} = St,
case find_peer(St, PeerId) of
undefined ->
{undefined, undefined};
Peer ->
IsAvailable = not (peer_is_selected(Peer)
or maps:is_key(PeerId, Standby)),
{peer_state(Peer), IsAvailable}
end.
%% @doc Returns if the given peer identifier is pooled as verified.
%% If given an unknown identifier it returns `undefined'.
-spec is_verified(state(), peer_id()) -> undefined | boolean().
is_verified(St, PeerId) ->
case find_peer(St, PeerId) of
undefined -> undefined;
Peer -> peer_state(Peer) =:= verified
end.
%% @doc Returns if the given peer identifier is pooled as unverified.
%% If given an unknown identifier it returns `undefined'.
-spec is_unverified(state(), peer_id()) -> undefined | boolean().
is_unverified(St, PeerId) ->
case find_peer(St, PeerId) of
undefined -> undefined;
Peer -> peer_state(Peer) =:= unverified
end.
%% @doc Returns if the given peer identifier is available for selection.
%% If given an unknown identifier it returns `undefined'.
-spec is_available(state(), peer_id()) -> undefined | boolean().
is_available(St, PeerId) ->
#?ST{standby = Standby} = St,
case find_peer(St, PeerId) of
undefined -> undefined;
Peer -> not (peer_is_selected(Peer) or maps:is_key(PeerId, Standby))
end.
%% @doc Updates/inserts a peer.
%%
%% It adds or refresh a peer.
%%
%% If it is the first time this peer is seen, it is added to the verified
%% pool if trusted, or to the unverified pool otherwise.
%%
%% If the peer is already pooled but its address changed, the update is ignored;
%% this is so the pool cannot get poisoned by nodes gossiping wrong addresses
%% for valid peers. If a peer rightfully changed its address, the old one will
%% get removed after the standard retry procedure; at that point, the new one
%% will be added. Note that this could take quite some time.
%%
%% If the peer is already in the verified pool, nothing is done beside updating
%% the peer last update time and source address.
%%
%% If the peer is already in the unverified pool but has not reached the maximum
%% number of references, another reference may be added to the unverified pool;
%% the peer last update time and source address fields are updated too.
%%
%% Some opaque term can be specified; this term will be returned by
%% {@link random_select/4} alongside the peer identifier. This could be used by
%% the caller to store extra connection information like protocol and port
%% number.
%% This extra information will <b>not</b> be updated if the peer already exists.
-spec update(state(), millitimestamp(), peer_id(),
peer_addr(), peer_addr(), boolean(), extra())
-> {verified | unverified | ignored, state()}.
update(St, Now, PeerId, PeerAddr, SourceAddr, IsTrusted, Extra) ->
?assertNotEqual(undefined, PeerId),
?assertNotEqual(undefined, PeerAddr),
?assertNotEqual(undefined, SourceAddr),
case update_peer(St, Now, PeerId, PeerAddr, SourceAddr, IsTrusted, Extra) of
ignored ->
{ignored, St};
{updated, St2} ->
Result = case IsTrusted of
true -> verified_maybe_add(St2, Now, PeerId);
false -> unverified_maybe_add(St2, Now, PeerId, undefined)
end,
case Result of
{ignored, St3} ->
% Couldn't be added to any pool; must be removed.
{ignored, del_peer(St3, PeerId)};
_ -> Result
end
end.
%% @doc Marks a peer as verified.
%%
%% If the peer is in the unverified pool, it is added to the verified pool
%% and all its references are removed from the unverified pool. If for some
%% reason the insertion in the verified pool is not possible, the peer stays in
%% the unverified pool.
-spec verify(state(), millitimestamp(), peer_id())
-> {verified | ignored, state()}.
verify(St, Now, PeerId) ->
verified_maybe_add(St, Now, PeerId).
%% @doc Returns a random subset of all the pooled peers.
%%
%% Return a list of peer identifiers and there corresponding extra data given
%% the <b>first</b> time it was added by {@link update/7}.
%%
%% A filtering function can be specified to limit the possible results.
-spec random_subset(state(), all | pos_integer(), filter_fun() | undefined)
-> {[ext_peer()], state()}.
random_subset(St, Size, ExtFilterFun) ->
#?ST{rand = RState, use_rand_offset = ROffset, lookup_all = Lookup} = St,
IntFilterFun = wrap_filter_fun(St, ExtFilterFun),
{PeerIds, RState2} = lookup_sample(Lookup, RState, ROffset,
Size, IntFilterFun),
Result = export_results(St, PeerIds),
{Result, St#?ST{rand = RState2}}.
%% @doc Select a random peer from the available ones.
%%
%% The target parameter is used to select from which pool the peer should be
%% selected:
%% <ul>
%% <li>`verified': Only verified peers will get selected.</li>
%% <li>`unverified': Only unverified peers will get selected.</li>
%% <li>`both': A peer will be selected from any of the pools; the pool the peer
%% will come from will depend on the configured probability option
%% `select_verif_prob'.</li>
%% </ul>
%%
%% A peer is considered available if it is not selected and if it is not in
%% standby after being rejected; for a peer to become available again,
%% {@link release/3} has to be called or the standby time for rejection has to
%% be exhausted after calling {@link reject/3}.
%%
%% A filter function can be specified to restrict further the set of peer
%% that can be selected.
%%
%% If a peer is available in targeted pools, the call returns the peer
%% identifier and its extra data given the <b>first</b> time it was added by
%% {@link update/7}.
%%
%% If there is no peers readily available because they are on standby, the call
%% returns the minimum delay afterwich one will get out of standby.
%%
%% When a peer is returned it is considered selected; it means that it will
%% <b>never</b> be returned by @see select/3 again until it is released with
%% {@link release/3} or marked as rejected with {@link reject/3} and the standby
%% time is exausted.
-spec random_select(state(), millitimestamp(), select_target(),
filter_fun() | undefined)
-> {selected, ext_peer(), state()}
| {wait, milliseconds(), state()}
| {unavailable, state()}.
random_select(St, Now, Target, FilterFun) ->
select_peer(St, Now, Target, FilterFun).
%% @doc Marks a peer as selected.
%%
%% This call is intended to mark a peer retrieved through other means than
%% {@link random_select/4} as selected. For example, a peer that was just added
%% to the pool with {@link update/7} and {@link verify/3}.
%%
%% If the peer wasn't selected, this call mark it as selected; it means that it
%% will <b>never</b> be returned by {@link select/3} again until it is released
%% with {@link release/3} or marked as rejected with {@link reject/3}.
%%
%% If the peer was in standby due to rejection, this call removes it from
%% the standby list, but it doesn't reset the rejection counter.
-spec select(state(), millitimestamp(), peer_id()) -> state().
select(St, Now, PeerId) ->
make_selected(St, Now, PeerId).
%% @doc Marks a peer as rejected.
%%
%% This means that the peer couldn't be contacted and shouldn't be selected
%% again before some time has passed. The time the peer will stay on standby
%% depends on the number of time it got rejected.
%%
%% If the peer reached the maximum number of rejections, it is downgraded to
%% the unverified pool if verified, or removed completely if unverified; the
%% rejection counter is reset when downgrading.
-spec reject(state(), millitimestamp(), peer_id()) -> state().
reject(St, Now, PeerId) ->
reject_peer(St, Now, PeerId).
%% @doc Releases a peer that was previously selected/verified.
%%
%% The rejection counter for the peer is reset.
%%
%% Remove the select mark from the peer; after this call, it can be returned
%% again by {@link random_select/4}.
-spec release(state(), millitimestamp(), peer_id()) -> state().
release(St, Now, PeerId) ->
release_peer(St, Now, PeerId).
%% @doc Deletes given peer from the pool.
-spec delete(state(), peer_id()) -> state().
delete(St, PeerId) ->
del_peer(St, PeerId).
%% @doc Returns the list of available peers.
%% The result could be very large; use only for debugging/testing.
-spec available(state(), both | verified | unverified) -> [ext_peer()].
available(St, verified) ->
#?ST{lookup_verif = Lookup} = St,
export_results(St, lookup_to_list(Lookup));
available(St, unverified) ->
#?ST{lookup_unver = Lookup} = St,
export_results(St, lookup_to_list(Lookup));
available(St, both) ->
#?ST{lookup_verif = Verif, lookup_unver = Unver} = St,
export_results(St, lookup_to_list(Verif) ++ lookup_to_list(Unver)).
%=== INTERNAL FUNCTIONS ========================================================
-spec get_opt(atom(), options(), term()) -> term().
get_opt(Key, Opts, Default) when is_list(Opts) ->
proplists:get_value(Key, Opts, Default).
%% Generate a new secret from a strong random source.
-spec gen_secret() -> binary().
gen_secret() ->
crypto:strong_rand_bytes(?SECRET_SIZE).
%% Generate a seed for the weak random generator from a strong random source.
-spec gen_seed() -> {integer(), integer(), integer()}.
gen_seed() ->
<<A:32, B:32, C:32>> = crypto:strong_rand_bytes(4 * 3),
{A, B, C}.
%% Generates a strongly random 16 bits unsinged integer.
-spec strong_randword(boolean()) -> non_neg_integer().
strong_randword(false) -> 0;
strong_randword(true) ->
<<Result:16/unsigned-integer>> = crypto:strong_rand_bytes(2),
Result.
%% Returns a weak random integer `X` where `0 <= X < MAX`
-spec randint(rand_state(), non_neg_integer())
-> {non_neg_integer(), rand_state()}.
randint(RSt, Max) ->
{RandInt, RSt2} = rand:uniform_s(Max, RSt),
{RandInt - 1, RSt2}.
%% Generates a weak random integer `X' where `0 <= X < MAX' with a skewed
%% distribution. If the given skew is `1.0' the distribution is uniform.
%% The larger than `1.0' the skew is, the more the distribution is skewed
%% toward the small values.
-spec skewed_randint(rand_state(), non_neg_integer(), number())
-> {non_neg_integer(), rand_state()}.
skewed_randint(RSt, Max, Skew) ->
{RandFloat, RSt2} = rand:uniform_s(RSt),
{floor(Max * math:pow(RandFloat, Skew)), RSt2}.
-spec safe_min(undefined | number(), number()) -> number().
safe_min(undefined, Value) -> Value;
safe_min(Value1, Value2) -> min(Value1, Value2).
%% Returns the time in miliseconds a peer should stay in standby when rejected.
-spec rejection_delay([non_neg_integer()], pos_integer()) -> pos_integer().
rejection_delay(BackoffTable, RejectionCount) ->
BackoffIndex = min(RejectionCount, length(BackoffTable)),
lists:nth(BackoffIndex, BackoffTable).
%% Tells if we can add another reference to a pool.
-spec should_add_ref(rand_state(), pos_integer()) -> {boolean(), rand_state()}.
should_add_ref(RSt, RefCount) ->
{RandVal, RSt2} = randint(RSt, floor(math:pow(2, RefCount))),
{RandVal =:= 0, RSt2}.
%% Returns the modulo of the integer extracted from the SHA1 hash of the binary.
-spec hash_modulo(binary(), pos_integer()) -> non_neg_integer().
hash_modulo(Bin, Modulo) ->
<<I:160/little-unsigned-integer>> = crypto:hash(sha, Bin),
I rem Modulo.
%% Returns a binary describing the given IP address.
%% Only supports IPv4 for now.
-spec address_descriptor(peer_addr()) -> binary().
address_descriptor({A, B, C, D}) -> <<A:8, B:8, C:8, D:8>>.
%--- STATE HANDLING FUNCTIONS --------------------------------------------------
-ifdef(TEST).
reference_count(St, PeerId) ->
#peer{uidxs = Idxs} = get_peer(St, PeerId),
length(Idxs).
-endif.
%% Returns a peer record if it exists, `undefined' otherwise.
-spec find_peer(state(), peer_id()) -> undefined | peer().
find_peer(St, PeerId) ->
#?ST{peers = Peers} = St,
case maps:find(PeerId, Peers) of
error -> undefined;
{ok, Peer} -> Peer
end.
%% Gets a peer record by peer identifier; fails if the id doesn't exists.
-spec get_peer(state(), peer_id()) -> peer().
get_peer(St, PeerId) ->
#?ST{peers = Peers} = St,
#{PeerId := Peer} = Peers,
Peer.
%% Sets a peer record by peer identifier; fails if the id doesn't exists.
-spec set_peer(state(), peer_id(), peer()) -> state().
set_peer(St, PeerId, Peer) ->
#?ST{peers = Peers} = St,
Peers2 = Peers#{PeerId := Peer},
St#?ST{peers = Peers2}.
%% Updates or adds a peer; only the source address can be updated.
-spec update_peer(state(), millitimestamp(), peer_id(), peer_addr(),
peer_addr(), boolean(), extra())
-> ignored | {updated, state()}.
update_peer(St, Now, PeerId, PeerAddr, SourceAddr, IsTrusted, Extra) ->
case find_peer(St, PeerId) of
undefined ->
#?ST{peers = Peers} = St,
Peer = peer_new(PeerId, PeerAddr, SourceAddr, IsTrusted, Extra),
Peer2 = Peer#peer{update_time = Now},
Peers2 = Peers#{PeerId => Peer2},
St2 = St#?ST{peers = Peers2},
{updated, add_lookup_all(St2, PeerId)};
#peer{addr = PeerAddr, trusted = IsTrusted} = CurrPeer ->
Peer2 = CurrPeer#peer{update_time = Now, source = SourceAddr},
{updated, set_peer(St, PeerId, Peer2)};
_ ->
ignored
end.
%% Deletes a peer and all its references.
-spec del_peer(state(), peer_id()) -> state().
del_peer(St, PeerId) ->
St2 = standby_del(St, PeerId),
St3 = verified_del(St2, PeerId),
St4 = unverified_del(St3, PeerId),
St5 = lists:foldl(fun({LookupRecField, PeerRecField}, S) ->
#?ST{peers = Peers} = S,
Lookup = element(LookupRecField, S),
{Lookup2, Peers2} =
peers_lookup_del(Peers, PeerId, Lookup, PeerRecField),
S2 = setelement(LookupRecField, S, Lookup2),
S2#?ST{peers = Peers2}
end, St4, [
{#?ST.lookup_all, #peer.lookup_all_idx},
{#?ST.lookup_verif, #peer.lookup_verif_idx},
{#?ST.lookup_unver, #peer.lookup_unver_idx}
]),
#?ST{peers = Peers} = St5,
?assert(maps:is_key(PeerId, Peers)),
?assertEqual(false, (maps:get(PeerId, Peers))#peer.selected),
St5#?ST{peers = maps:remove(PeerId, Peers)}.
%% Returns the order in which pools should be used to select a peer.
-spec select_order(state(), select_target()) -> {[select_fun()], state()}.
select_order(St, verified) ->
{[fun verified_select/3], St};
select_order(St, unverified) ->
{[fun unverified_select/3], St};
select_order(St, both) ->
#?ST{rand = RSt, select_verif_prob = Prob} = St,
IntProb = floor(Prob * 1000),
{RandInt, RSt2} = randint(RSt, 1001),
St2 = St#?ST{rand = RSt2},
case RandInt < IntProb of
true -> {[fun verified_select/3, fun unverified_select/3], St2};
false -> {[fun unverified_select/3, fun verified_select/3], St2}
end.
%% Selects a peer from the pools in given order using given restrictions.
-spec select_peer(state(), millitimestamp(), select_target(),
filter_fun() | undefined)
-> {unavailable, state()}
| {selected, peer_id(), state()}
| {wait, milliseconds(), state()}.
select_peer(St, Now, Target, ExtFilterFun) ->
IntFilterFun = wrap_filter_fun(St, ExtFilterFun),
{Order, St2} = select_order(St, Target),
St3 = standby_refresh(St2, Now),
case select_available_peer(St3, Now, Order, IntFilterFun) of
{selected, _, _} = Result -> Result;
{unavailable, St4} ->
select_standby_peer(St4, Now, Target, IntFilterFun)
end.
%% Selects a peer using given selection functions.
-spec select_available_peer(state(), millitimestamp(), [select_fun()],
int_filter_fun() | undefined)
-> {unavailable, state()} | {selected, peer_id(), state()}.
select_available_peer(St, _Now, [], _FilterFun) ->
{unavailable, St};
select_available_peer(St, Now, [Selector | Rest], FilterFun) ->
case Selector(St, Now, FilterFun) of
{unavailable, St2} ->
select_available_peer(St2, Now, Rest, FilterFun);
{PeerId, St2} ->
St3 = make_selected(St2, Now, PeerId),
{selected, export_result(St3, PeerId), St3}
end.
%% Return the minimum time to wait for a standby peer matching given
%% target and restrictions.
-spec select_standby_peer(state(), millitimestamp(), select_target(),
int_filter_fun() | undefined)
-> {unavailable, state()} | {wait, milliseconds(), state()}.
select_standby_peer(St, Now, Target, FilterFun) ->
#?ST{peers = Peers, standby = Standby, standby_times = StandbyTimes} = St,
MinExpiration = maps:fold(fun(PeerId, _, Min) ->
#{PeerId := Peer} = Peers,
PeerState = peer_state(Peer),
?assertNotEqual(undefined, PeerState),
IsAccepted = (FilterFun =:= undefined) orelse FilterFun(PeerId),
case IsAccepted and ((Target =:= both) or (PeerState =:= Target)) of
false -> Min;
true ->
StandbyTime = peer_standby_recovery_time(Peer, StandbyTimes),
safe_min(Min, StandbyTime)
end
end, undefined, Standby),
case MinExpiration of
undefined -> {unavailable, St};
Exp -> {wait, Exp - Now, St}
end.
% Makes a peer unavailable.
-spec make_selected(state(), millitimestamp(), peer_id()) -> state().
make_selected(St, Now, PeerId) ->
Peer = get_peer(St, PeerId),
?assertEqual(false, Peer#peer.selected),
Peer2 = peer_select(Peer, Now),
St2 = set_peer(St, PeerId, Peer2),
St3 = standby_del(St2, PeerId),
make_unavailable(St3, PeerId).
%% Rejects a selected peer.
-spec reject_peer(state(), millitimestamp(), peer_id()) -> state().
reject_peer(St, Now, PeerId) ->
case find_peer(St, PeerId) of
#peer{selected = true} = Peer ->
#?ST{max_rejections = MaxRejections} = St,
St2 = make_unavailable(St, PeerId),
Peer = get_peer(St2, PeerId),
Peer2 = peer_reject(Peer, Now),
Peer3 = peer_deselect(Peer2, Now),
St3 = set_peer(St2, PeerId, Peer3),
case {peer_has_expired(Peer3, MaxRejections), peer_state(Peer3)} of
{true, unverified} ->
del_peer(St3, PeerId);
{true, verified} ->
verified_downgrade(St3, Now, PeerId);
{false, _} ->
standby_add(St3, PeerId)
end;
_ ->
St
end.
%% Releases a selected peer.
-spec release_peer(state(), millitimestamp(), peer_id()) -> state().
release_peer(St, Now, PeerId) ->
case find_peer(St, PeerId) of
#peer{selected = true} = Peer ->
Peer2 = peer_reset(Peer, Now),
Peer3 = peer_deselect(Peer2, Now),
St2 = set_peer(St, PeerId, Peer3),
make_available(St2, PeerId);
_ ->
St
end.
%% Puts a peer on standby.
-spec standby_add(state(), peer_id()) -> state().
standby_add(St, PeerId) ->
#?ST{standby = Standby} = St,
Standby2 = Standby#{PeerId => true},
St#?ST{standby = Standby2}.
%% removed a peer from standby.
-spec standby_del(state(), peer_id()) -> state().
standby_del(St, PeerId) ->
#?ST{standby = Standby} = St,
Standby2 = maps:remove(PeerId, Standby),
St#?ST{standby = Standby2}.
%% Checks peers on standby and make them available again if they exausted
%% there standby time.
-spec standby_refresh(state(), millitimestamp()) -> state().
standby_refresh(St0, Now) ->
#?ST{standby = Standby, standby_times = StandbyTimes} = St0,
lists:foldl(fun(PeerId, St) ->
Peer = get_peer(St, PeerId),
case peer_is_in_standby(Peer, StandbyTimes, Now) of
true -> St;
false ->
St2 = set_peer(St, PeerId, Peer),
St3 = make_available(St2, PeerId),
standby_del(St3, PeerId)
end
end, St0, maps:keys(Standby)).
%% Make a peer unavailable for selection.
-spec make_available(state(), peer_id()) -> state().
make_available(St, PeerId) ->
Peer = get_peer(St, PeerId),
case {peer_state(Peer), Peer} of
{verified, #peer{lookup_verif_idx = undefined}} ->
add_lookup_verif(St, PeerId);
{unverified, #peer{lookup_unver_idx = undefined}} ->
add_lookup_unver(St, PeerId);
_ -> St
end.
%% Make a peer unavailable for selection.
-spec make_unavailable(state(), peer_id()) -> state().
make_unavailable(St, PeerId) ->
Peer = get_peer(St, PeerId),
case {peer_state(Peer), Peer} of
{verified, #peer{lookup_verif_idx = Idx}} when Idx =/= undefined ->
del_lookup_verif(St, PeerId);
{unverified, #peer{lookup_unver_idx = Idx}} when Idx =/= undefined ->
del_lookup_unver(St, PeerId);
_ -> St
end.
-spec add_lookup_all(state(), peer_id()) -> state().
add_lookup_all(St, PeerId) ->
#?ST{peers = Peers, rand = Rand, lookup_all = Lookup} = St,
{Lookup2, Rand2, Peers2} =
peers_lookup_add(Peers, Rand, PeerId, Lookup, #peer.lookup_all_idx),
St#?ST{peers = Peers2, rand = Rand2, lookup_all = Lookup2}.
-spec add_lookup_verif(state(), peer_id()) -> state().
add_lookup_verif(St, PeerId) ->
#?ST{peers = Peers, rand = Rand, lookup_verif = Lookup} = St,
{Lookup2, Rand2, Peers2} =
peers_lookup_add(Peers, Rand, PeerId, Lookup, #peer.lookup_verif_idx),
St#?ST{peers = Peers2, rand = Rand2, lookup_verif = Lookup2}.
-spec add_lookup_unver(state(), peer_id()) -> state().
add_lookup_unver(St, PeerId) ->
#?ST{peers = Peers, rand = Rand, lookup_unver = Lookup} = St,
{Lookup2, Rand2, Peers2} =
peers_lookup_add(Peers, Rand, PeerId, Lookup, #peer.lookup_unver_idx),
St#?ST{peers = Peers2, rand = Rand2, lookup_unver = Lookup2}.
-spec del_lookup_verif(state(), peer_id()) -> state().
del_lookup_verif(St, PeerId) ->
#?ST{peers = Peers, lookup_verif = Lookup} = St,
{Lookup2, Peers2} =
peers_lookup_del(Peers, PeerId, Lookup, #peer.lookup_verif_idx),
St#?ST{peers = Peers2, lookup_verif = Lookup2}.
-spec del_lookup_unver(state(), peer_id()) -> state().
del_lookup_unver(St, PeerId) ->
#?ST{peers = Peers, lookup_unver = Lookup} = St,
{Lookup2, Peers2} =
peers_lookup_del(Peers, PeerId, Lookup, #peer.lookup_unver_idx),
St#?ST{peers = Peers2, lookup_unver = Lookup2}.
%% Exports a list of peer identifier to the external format.
-spec export_results(state(), [peer_id()]) -> [ext_peer()].
export_results(St, PeerIds) ->
lists:foldl(fun(PeerId, Acc) ->
[export_result(St, PeerId) | Acc]
end, [], PeerIds).
%% Exports a peer identifier to the external format.
-spec export_result(state(), peer_id()) -> ext_peer().
export_result(St, PeerId) ->
#peer{extra = Extra} = get_peer(St, PeerId),
{PeerId, Extra}.
%% Wraps a filtering function to only require the peer identifier.
%% The result should not be used if the list of peers is mutated.
-spec wrap_filter_fun(state(), filter_fun() | undefined)
-> int_filter_fun() | undefined.
wrap_filter_fun(_St, undefined) -> undefined;
wrap_filter_fun(St, FilterFun) ->
#?ST{peers = Peers} = St,
fun(PeerId) ->
#{PeerId := Peer} = Peers,
FilterFun(PeerId, Peer#peer.extra)
end.
%--- FUNCTIONS FOR BOTH VERIFIED AND UNVERIFIED POOLS --------------------------
%% Creates a generic bucket filtering function that always keeps selected and
%% trusted peers, removes out dated peers and elect the rest for eviction.
-spec make_bucket_filtering_fun(state(), millitimestamp(),
peer_id() | undefined)
-> bucket_filter_fun().
make_bucket_filtering_fun(St, Now, KeepPeerId) ->
#?ST{peers = Peers, max_update_lapse = MaxLapse} = St,
fun
(PeerId) when PeerId =:= KeepPeerId -> keep;
(PeerId) ->
#{PeerId := Peer} = Peers,
case Peer of
#peer{trusted = true} -> keep;
#peer{selected = true} -> keep;
#peer{update_time = T} when (Now - T) > MaxLapse -> remove;
_ -> evict
end
end.
%--- VERIFIED POOL HANDLING FUNCTIONS ------------------------------------------
%% Returns the verified pool bucket index for given address.
%% For the same peer address group, it will return at most `verif_group_shard'
%% different indexes.
-spec verified_bucket_index(state(), peer_addr()) -> non_neg_integer().
verified_bucket_index(St, PeerAddr) ->
#?ST{
secret = Secret,
verif_pool = Pool,
verif_group_shard = GroupShard
} = St,
#pool{bucket_count = BucketCount} = Pool,
PeerGroup = address_group(PeerAddr),
PeerDesc = address_descriptor(PeerAddr),
GroupSlot = hash_modulo(<<Secret/binary, PeerDesc/binary>>, GroupShard),
hash_modulo(<<Secret/binary, PeerGroup/binary, GroupSlot:8>>, BucketCount).
%% Adds a peer to the verified pool if required.
-spec verified_maybe_add(state(), millitimestamp(), peer_id())
-> {verified, state()} | {unverified, state()} | {ignored, state()}.
verified_maybe_add(St, Now, PeerId) ->
Peer = get_peer(St, PeerId),
case peer_state(Peer) of
verified ->
% Already verified.
{verified, St};
undefined ->
% Not yet pooled.
verified_add(St, Now, Peer);
unverified ->
% Currently unverified
case verified_add(St, Now, Peer) of
{verified, St2} ->
% Succeed to add to the verified pool;
% remove it from unverified.
St3 = unverified_del(St2, PeerId),
{verified, St3};
{ignored, St2} ->
% Failed to add to verified pool;
% keep the peer as unverified.
{unverified, St2}
end
end.
%% Adds given peer to the verified pool; doesn't check if it is already there.
-spec verified_add(state(), millitimestamp(), peer())
-> {verified, state()} | {ignored, state()}.
verified_add(St, Now, Peer) ->
?assertEqual(undefined, Peer#peer.vidx),
#peer{id = PeerId, addr = PeerAddr} = Peer,
BucketIdx = verified_bucket_index(St, PeerAddr),
case verified_make_space_for(St, Now, BucketIdx, PeerId) of
{no_space, St2} ->
% Failed to allocate space in the pool bucket.
{ignored, St2};
{free_space, St2} ->
% There is space to add the peer to the bucket.
#?ST{verif_pool = Pool} = St2,
Pool2 = pool_add(Pool, BucketIdx, PeerId),
Pool3 = pool_update_size(Pool2, 1),
St3 = St2#?ST{verif_pool = Pool3},
% Peer may have been changed by verified_make_space_for/4.
Peer2 = get_peer(St3, PeerId),
Peer3 = Peer2#peer{vidx = BucketIdx},
St4 = set_peer(St3, PeerId, Peer3),
case is_available(St4, PeerId) of
false ->
{verified, St4};
true ->
{verified, add_lookup_verif(St4, PeerId)}
end
end.
%% Deletes a peer from the verified pool.
%% It ONLY deletes the peer from the verified pool and verified lookup
%% table.
-spec verified_del(state(), peer_id()) -> state().
verified_del(St, PeerId) ->
case get_peer(St, PeerId) of
#peer{vidx = undefined} -> St;
#peer{vidx = VIdx} = Peer ->
#?ST{verif_pool = Pool} = St,
Pool2 = pool_del(Pool, VIdx, PeerId),
Pool3 = pool_update_size(Pool2, -1),
St2 = St#?ST{verif_pool = Pool3},
Peer2 = Peer#peer{vidx = undefined},
St3 = set_peer(St2, PeerId, Peer2),
del_lookup_verif(St3, PeerId)
end.
%% Downgrade a verified peer to unverified.
%% Returns a boolean stating if the peer was added to the unverified pool of
%% deleted COMPLETLY.
-spec verified_downgrade(state(), millitimestamp(), peer_id()) -> state().
verified_downgrade(St, Now, PeerId) ->
Peer = get_peer(St, PeerId),
Peer2 = peer_reset(Peer, Now),
St2 = set_peer(St, PeerId, Peer2),
St3 = verified_del(St2, PeerId),
case unverified_maybe_add(St3, Now, PeerId, undefined) of
{unverified, St4} -> St4;
{ignored, St4} ->
% Failed to add it to unverified pool; removing peer completly.
del_peer(St4, PeerId)
end.
%% Tries to free space in a verified pool bucket for given peer.
%% The peer is required because we don't want it evicted from the unverified
%% pool by a downgraded peer.
-spec verified_make_space_for(state(), millitimestamp(),
non_neg_integer(), peer_id())
-> {no_space, state()} | {free_space, state()}.
verified_make_space_for(St, Now, BucketIdx, PeerId) ->
% When evicting we want to skew the random selection in favor of the peers
% selected the longest time ago, but we never evict selected peers.
#?ST{rand = RSt, peers = Peers, verif_pool = Pool} = St,
BucketFilterFun = make_bucket_filtering_fun(St, Now, undefined),
SortKeyFun = fun(I) ->
#{I := Peer} = Peers,
case Peer of
#peer{select_time = undefined} -> 0;
#peer{select_time = Time} -> Time
end
end,
case pool_make_space(Pool, RSt, BucketIdx,
BucketFilterFun, SortKeyFun) of
no_space ->
{no_space, St};
{free_space, [], undefined, RSt2, Pool2} ->
St2 = St#?ST{rand = RSt2, verif_pool = Pool2},
{free_space, St2};
{free_space, RemovedIds, undefined, RSt2, Pool2} ->
Pool3 = pool_update_size(Pool2, -length(RemovedIds)),
St2 = St#?ST{rand = RSt2, verif_pool = Pool3},
St3 = lists:foldl(fun(I, S) ->
Peer = get_peer(S, I),
Peer2 = Peer#peer{vidx = undefined},
S2 = set_peer(S, I, Peer2),
del_peer(S2, I)
end, St2, RemovedIds),
{free_space, St3};
{free_space, [], EvictedId, RSt2, Pool2} ->
% Try downgrading the evicted peer, if any;
% delete it if downgrade fail.
Pool3 = pool_update_size(Pool2, -1),
St2 = St#?ST{rand = RSt2, verif_pool = Pool3},
Peer = get_peer(St2, EvictedId),
Peer2 = Peer#peer{vidx = undefined},
St3 = set_peer(St2, EvictedId, Peer2),
St4 = del_lookup_verif(St3, EvictedId),
case unverified_maybe_add(St4, Now, EvictedId, PeerId) of
{unverified, St5} ->
{free_space, St5};
{ignored, St5} ->
{free_space, del_peer(St5, EvictedId)}
end
end.
%% Selects a random available peer from the verified pool.
-spec verified_select(state(), millitimestamp(), int_filter_fun() | undefined)
-> {unavailable, state()} | {peer_id(), state()}.
verified_select(St, _Now, FilterFun) ->
#?ST{rand = RState, use_rand_offset = ROffset, lookup_verif = Lookup} = St,
{Result, RSt2} = lookup_select(Lookup, RState, ROffset, FilterFun),
{Result, St#?ST{rand = RSt2}}.
%--- UNVERIFIED POOL HANDLING FUNCTIONS ----------------------------------------
%% Returns the unverified pool bucket index for given source and peer address.
%% For the same source address group, it will return at most
%% `verif_source_shard' different indexes.
%% For the same source and peer address group, it will return at most
%% `verif_group_shard' different indexes.
-spec unverified_bucket_index(state(), peer_addr(), peer_addr())
-> non_neg_integer().
unverified_bucket_index(St, SourceAddr, PeerAddr) ->
#?ST{
secret = Secret,
unver_pool = Pool,
unver_source_shard = SourceShard,
unver_group_shard = GroupShard
} = St,
#pool{bucket_count = BucketCount} = Pool,
SourceGroup = address_group(SourceAddr),
PeerGroup = address_group(PeerAddr),
PeerDesc = address_descriptor(PeerAddr),
SourceSlot = hash_modulo(<<Secret/binary, PeerGroup/binary>>, SourceShard),
GroupSlot = hash_modulo(<<Secret/binary, PeerDesc/binary>>, GroupShard),
hash_modulo(<<Secret/binary, SourceGroup/binary,
SourceSlot:8, GroupSlot:8>>, BucketCount).
%% Adds a peer to the unverified pool if required.
%% A peer identifier that MUST NOT be removed can be specified;
%% this is required when a peer is upgraded while still being kept in
%% the unverified pool.
-spec unverified_maybe_add(state(), millitimestamp(), peer_id(),
peer_id() | undefined)
-> {verified, state()} | {unverified, state()} | {ignored, state()}.
unverified_maybe_add(St, Now, PeerId, KeepPeerId) ->
Peer = get_peer(St, PeerId),
case peer_state(Peer) of
verified ->
{verified, St};
undefined ->
unverified_add(St, Now, Peer, KeepPeerId);
unverified ->
#?ST{rand = RSt, unver_pool = Pool} = St,
#peer{uidxs = Idxs} = Peer,
case pool_should_add_ref(Pool, RSt, Idxs) of
{false, RSt2} ->
{unverified, St#?ST{rand = RSt2}};
{true, RSt2} ->
St2 = St#?ST{rand = RSt2},
St3 = unverified_add_reference(St2, Now, Peer, KeepPeerId),
{unverified, St3}
end
end.
%% Adds a peer to the unverified pool without any check.
-spec unverified_add(state(), millitimestamp(), peer(), peer_id() | undefined)
-> {unverified, state()} | {ignored, state()}.
unverified_add(St, Now, Peer, KeepPeerId) ->
#peer{id = PeerId, addr = PeerAddr, source = SourceAddr} = Peer,
?assertEqual([], Peer#peer.uidxs),
BucketIdx = unverified_bucket_index(St, SourceAddr, PeerAddr),
case unverified_make_space(St, Now, BucketIdx, KeepPeerId) of
{no_space, St2} ->
{ignored, St2};
{free_space, St2} ->
#?ST{unver_pool = Pool} = St2,
Pool2 = pool_add(Pool, BucketIdx, PeerId),
Pool3 = pool_update_size(Pool2, 1),
St3 = St2#?ST{unver_pool = Pool3},
% Peer may have been changed by unverified_make_space/4.
Peer2 = get_peer(St3, PeerId),
Peer3 = Peer2#peer{uidxs = [BucketIdx]},
St4 = set_peer(St3, PeerId, Peer3),
case is_available(St4, PeerId) of
true ->
{unverified, add_lookup_unver(St4, PeerId)};
false ->
{unverified, St4}
end
end.
%% Adds another reference to a peer already in the unverified pool.
-spec unverified_add_reference(state(), millitimestamp(), peer(),
peer_id() | undefined)
-> state().
unverified_add_reference(St, Now, Peer, KeepPeerId) ->
#peer{
id = PeerId,
addr = PeerAddr,
source = SourceAddr,
uidxs = Idxs
} = Peer,
?assertNotEqual([], Idxs),
BucketIdx = unverified_bucket_index(St, SourceAddr, PeerAddr),
case lists:member(BucketIdx, Idxs) of
true -> St;
false ->
case unverified_make_space(St, Now, BucketIdx, KeepPeerId) of
{no_space, St2} -> St2;
{free_space, St2} ->
#?ST{unver_pool = Pool} = St2,
Pool2 = pool_add(Pool, BucketIdx, PeerId),
St3 = St2#?ST{unver_pool = Pool2},
% Peer may have been changed by unverified_make_space/4.
Peer2 = get_peer(St3, PeerId),
Peer3 = Peer2#peer{uidxs = [BucketIdx | Idxs]},
set_peer(St3, PeerId, Peer3)
end
end.
%% Deletes all the references to a peer from the unverified pool.
%% MUST work even when peers are in both pools because peers are first added
%% to the verified pool and THEN deleted from the unverified pool.
%% It ONLY deletes the peer from the unverified pool and unverified lookup
%% table.
-spec unverified_del(state(), peer_id()) -> state().
unverified_del(St, PeerId) ->
case get_peer(St, PeerId) of
#peer{uidxs = []} -> St;
#peer{uidxs = BucketIdxs} = Peer->
#?ST{unver_pool = Pool} = St,
Pool2 = lists:foldl(fun(I, P) ->
pool_del(P, I, PeerId)
end, Pool, BucketIdxs),
Pool3 = pool_update_size(Pool2, -1),
St2 = St#?ST{unver_pool = Pool3},
Peer2 = Peer#peer{uidxs = []},
St3 = set_peer(St2, PeerId, Peer2),
del_lookup_unver(St3, PeerId)
end.
%% Tries to free space in an unverified pool bucket.
%% If not `undefined', the specified peer identifier will never be removed
%% or evicted.
-spec unverified_make_space(state(), millitimestamp(), non_neg_integer(),
peer_id() | undefined)
-> {no_space, state()} | {free_space, state()}.
unverified_make_space(St, Now, BucketIdx, KeepPeerId) ->
% When evicting we want to skew the random selection in favor of the peers
% updated the longest time ago, but we never evict selected peers.
#?ST{rand = RSt, peers = Peers, unver_pool = Pool} = St,
BucketFilterFun = make_bucket_filtering_fun(St, Now, KeepPeerId),
SortKeyFun = fun(PeerId) ->
#{PeerId := Peer} = Peers,
Peer#peer.update_time
end,
case pool_make_space(Pool, RSt, BucketIdx,
BucketFilterFun, SortKeyFun) of
no_space -> {no_space, St};
{free_space, RemovedIds, EvictedId, RSt2, Pool2} ->
St2 = St#?ST{rand = RSt2, unver_pool = Pool2},
St3 = lists:foldl(fun
(undefined, S) -> S;
(PeerId, S) ->
unverified_ref_deleted(S, PeerId, BucketIdx)
end, St2, [EvictedId | RemovedIds]),
{free_space, St3}
end.
%% Acts on a peer reference being removed from the given unverified pool bucket.
%% If it is the last reference, the pool size is decremented and the peer is
%% COMPLETLY removed.
-spec unverified_ref_deleted(state(), peer_id(), non_neg_integer()) -> state().
unverified_ref_deleted(St, PeerId, BucketIdx) ->
case get_peer(St, PeerId) of
#peer{uidxs = [BucketIdx]} = Peer ->
% Last reference is removed
#?ST{unver_pool = Pool} = St,
Pool2 = pool_update_size(Pool, -1),
Peer2 = Peer#peer{uidxs = []},
St2 = St#?ST{unver_pool = Pool2},
St3 = set_peer(St2, PeerId, Peer2),
del_peer(St3, PeerId);
#peer{uidxs = RefIdxs} = Peer ->
% Peer is still referenced in other buckets
?assert(lists:member(BucketIdx, RefIdxs)),
RefIdxs2 = lists:delete(BucketIdx, RefIdxs),
?assertNotMatch([], RefIdxs2),
Peer2 = Peer#peer{uidxs = RefIdxs2},
set_peer(St, PeerId, Peer2)
end.
%% Selects a random available peer from the unverified pool.
-spec unverified_select(state(), millitimestamp(), int_filter_fun())
-> {unavailable, state()} | {peer_id(), state()}.
unverified_select(St, _Now, FilterFun) ->
#?ST{rand = RState, use_rand_offset = ROffset, lookup_unver = Lookup} = St,
{Result, RSt2} = lookup_select(Lookup, RState, ROffset, FilterFun),
{Result, St#?ST{rand = RSt2}}.
%--- PEER HANDLING FUNCTIONS ---------------------------------------------------
%% Creates a new peer record.
-spec peer_new(peer_id(), peer_addr(), peer_addr(), boolean(), extra())
-> peer().
peer_new(PeerId, PeerAddr, SourceAddr, IsTrusted, Extra) ->
#peer{
id = PeerId,
trusted = IsTrusted,
addr = PeerAddr,
source = SourceAddr,
extra = Extra
}.
%% Returns if the given peer is verified or unverified;
%% does extra exhaustive check for sanity, and thus should not be called
%% when the peer is in both verified and unverified pool (when upgrading).
-spec peer_state(peer()) -> verified | unverified | undefined.
peer_state(#peer{vidx = undefined, uidxs = []}) -> undefined;
peer_state(#peer{vidx = undefined, uidxs = [_|_]}) -> unverified;
peer_state(#peer{vidx = _, uidxs = []}) -> verified.
peer_is_selected(#peer{selected = Selected}) -> Selected.
%% Updates peer rejection status.
-spec peer_reject(peer(), millitimestamp()) -> peer().
peer_reject(Peer, Now) ->
#peer{rejected = Rejected} = Peer,
Peer#peer{rejected = Rejected + 1, reject_time = Now}.
%% Updates peer rejection status.
-spec peer_reset(peer(), millitimestamp()) -> peer().
peer_reset(Peer, _Now) ->
Peer#peer{rejected = 0, reject_time = undefined}.
%% Makes the peer selected.
-spec peer_select(peer(), millitimestamp()) -> peer().
peer_select(Peer, Now) ->
Peer#peer{selected = true, select_time = Now}.
%% Makes the peer deselected.
-spec peer_deselect(peer(), millitimestamp()) -> peer().
peer_deselect(Peer, _Now) ->
Peer#peer{selected = false}.
%% Gives the time at which the peer should get out of standby
-spec peer_standby_recovery_time(peer(), [non_neg_integer()])
-> millitimestamp().
peer_standby_recovery_time(Peer, BackoffTable) ->
#peer{rejected = RejectCount, reject_time = RejectTime} = Peer,
RejectTime + rejection_delay(BackoffTable, RejectCount).
%% Returns if a peer is currently in standby.
-spec peer_is_in_standby(peer(), [non_neg_integer()], millitimestamp())
-> boolean().
peer_is_in_standby(Peer, BackoffTable, Now) ->
Now < peer_standby_recovery_time(Peer, BackoffTable).
%% Returns if the given peer reached its rejection limit.
-spec peer_has_expired(peer(), pos_integer()) -> boolean().
peer_has_expired(#peer{trusted = true}, _MaxRejections) -> false;
peer_has_expired(#peer{rejected = Rejections}, MaxRejections) ->
Rejections > MaxRejections.
%% Adds a peer to a lookup table, handling other peers being moved.
-spec peers_lookup_add(peer_map(), rand_state(), peer_id(),
lookup(), pos_integer())
-> {lookup(), rand_state(), peer_map()}.
peers_lookup_add(Peers, RSt, PeerId, Lookup, RecField) ->
#{PeerId := Peer} = Peers,
?assertEqual(undefined, element(RecField, Peer)),
case lookup_add(Lookup, RSt, PeerId) of
{Idx, undefined, RSt2, Lookup2} ->
Peer2 = setelement(RecField, Peer, Idx),
Peers2 = Peers#{PeerId := Peer2},
{Lookup2, RSt2, Peers2};
{Idx, {MovedPeerIdx, MovedPeerId}, RSt2, Lookup2} ->
#{MovedPeerId := MovedPeer} = Peers,
Peer2 = setelement(RecField, Peer, Idx),
MovedPeer2 = setelement(RecField, MovedPeer, MovedPeerIdx),
Peers2 = Peers#{PeerId := Peer2, MovedPeerId := MovedPeer2},
{Lookup2, RSt2, Peers2}
end.
%% remove a peer from a lookup table, handling other peers being moved.
-spec peers_lookup_del(peer_map(), peer_id(), lookup(), pos_integer())
-> {lookup(), peer_map()}.
peers_lookup_del(Peers, PeerId, Lookup, RecField) ->
#{PeerId := Peer} = Peers,
case element(RecField, Peer) of
undefined ->
{Lookup, Peers};
Idx ->
?assertEqual(PeerId, lookup_get(Lookup, Idx)),
case lookup_del(Lookup, Idx) of
{undefined, Lookup2} ->
Peer2 = setelement(RecField, Peer, undefined),
Peers2 = Peers#{PeerId := Peer2},
{Lookup2, Peers2};
{{MovedPeerIdx, MovedPeerId}, Lookup2} ->
#{MovedPeerId := MovedPeer} = Peers,
Peer2 = setelement(RecField, Peer, undefined),
MovedPeer2 = setelement(RecField, MovedPeer, MovedPeerIdx),
Peers2 = Peers#{PeerId := Peer2, MovedPeerId := MovedPeer2},
{Lookup2, Peers2}
end
end.
%--- GENERIC POOL HANDLING FUNCTIONS -------------------------------------------
-ifdef(TEST).
pool_bucket_size(Pool, BucketIdx) ->
?assert(BucketIdx < Pool#pool.bucket_count),
#pool{buckets = Buckets} = Pool,
Bucket = array:get(BucketIdx, Buckets),
length(Bucket).
-endif.
%% Creates a new pool record.
-spec pool_new(pos_integer(), pos_integer(), pos_integer(), number()) -> pool().
pool_new(Count, Size, MaxRefs, EvictSkew) ->
#pool{
size = 0,
max_refs = MaxRefs,
skew = EvictSkew,
bucket_count = Count,
bucket_size = Size,
buckets = array:new(Count, [{default, []}])
}.
%% Returns the number of peers in a pool.
-spec pool_size(pool()) -> non_neg_integer().
pool_size(#pool{size = Size}) -> Size.
%% Adds given value to the pool size.
-spec pool_update_size(pool(), integer()) -> pool().
pool_update_size(Pool, Diff) ->
?assert((Pool#pool.size + Diff) >= 0),
Pool#pool{size = Pool#pool.size + Diff}.
%% Adds a value to given pool's bucket.
%% It doesn't increment the pool size, pool_update_size/2 should be called.
-spec pool_add(pool(), non_neg_integer(), term()) -> pool().
pool_add(Pool, BucketIdx, Value) ->
?assert(BucketIdx < Pool#pool.bucket_count),
#pool{buckets = Buckets} = Pool,
Bucket = array:get(BucketIdx, Buckets),
?assert(length(Bucket) < Pool#pool.bucket_size),
?assertNot(lists:member(Value, Bucket)),
Buckets2 = array:set(BucketIdx, [Value | Bucket], Buckets),
Pool#pool{buckets = Buckets2}.
%% Removes a value from given pool's bucket.
%% It doesn't decrement the pool size, pool_update_size/2 should be called.
-spec pool_del(pool(), non_neg_integer(), term()) -> pool().
pool_del(Pool, BucketIdx, Value) ->
?assert(BucketIdx < Pool#pool.bucket_count),
#pool{buckets = Buckets} = Pool,
Bucket = array:get(BucketIdx, Buckets),
?assert(lists:member(Value, Bucket)),
Buckets2 = array:set(BucketIdx, lists:delete(Value, Bucket), Buckets),
Pool#pool{buckets = Buckets2}.
%% Tells if given the pool configuration and the current references, the caller
%% should try to add another reference to the pool.
-spec pool_should_add_ref(pool(), rand_state(), [non_neg_integer()])
-> {boolean(), rand_state()}.
pool_should_add_ref(Pool, RSt, RefIdxs) ->
#pool{max_refs = MaxRefCount} = Pool,
RefCount = length(RefIdxs),
case RefCount < MaxRefCount of
true -> should_add_ref(RSt, RefCount);
false -> {false, RSt}
end.
%% Makes space in the pool to add a new value.
%% The filter function can either mark entries to keep them, remove them
%% or elect them for eviction. The sort key function is used to order the
%% buckets before selecting a random entry for eviction with the
%% pool configured skew toward the entries with the smallest key.
-spec pool_make_space(pool(), rand_state(), non_neg_integer(),
bucket_filter_fun(), bucket_sort_key_fun())
-> {free_space, [peer_id()], peer_id() | undefined, rand_state(), pool()}
| no_space.
pool_make_space(Pool, RSt, BucketIdx, FilterFun, SortKeyFun) ->
?assert(BucketIdx < Pool#pool.bucket_count),
#pool{buckets = Buckets, bucket_size = MaxBucketSize, skew = Skew} = Pool,
Bucket = array:get(BucketIdx, Buckets),
BucketSize = length(Bucket),
case BucketSize < MaxBucketSize of
true ->
{free_space, [], undefined, RSt, Pool};
false ->
{Bucket2, Removed, KeyedEvictable, EvictableSize}
= bucket_prepare(Bucket, FilterFun, SortKeyFun),
case {Removed, EvictableSize} of
{[_|_], _} ->
% If some entry are removed, there is no need for eviction.
Buckets2 = array:set(BucketIdx, Bucket2, Buckets),
Pool2 = Pool#pool{buckets = Buckets2},
{free_space, Removed, undefined, RSt, Pool2};
{[], 0} ->
% Nothing removed and nothing to evict.
no_space;
_ ->
% We need to evict an entry.
SortedEvictable = lists:keysort(1, KeyedEvictable),
{EvictedIdx, RSt2} =
skewed_randint(RSt, EvictableSize, Skew),
{_, EvictedValue} =
lists:nth(EvictedIdx + 1, SortedEvictable),
Bucket3 = lists:delete(EvictedValue, Bucket2),
Buckets2 = array:set(BucketIdx, Bucket3, Buckets),
Pool2 = Pool#pool{buckets = Buckets2},
{free_space, [], EvictedValue, RSt2, Pool2}
end
end.
%% Prepares a bucket for freeing space.
%% Returns the new bucket, the removed entries and the entries elected
%% for eviction keyed for sorting.
-spec bucket_prepare([peer_id()], bucket_filter_fun(), bucket_sort_key_fun())
-> {[peer_id()], [peer_id()], [{term(), peer_id()}], non_neg_integer()}.
bucket_prepare(Bucket, FilterFun, SortKeyFun) ->
bucket_prepare(Bucket, FilterFun, SortKeyFun, [], [], [], 0).
bucket_prepare([], _FFun, _SortKeyFun, BAcc, RAcc, EAcc, ECount) ->
{BAcc, RAcc, EAcc, ECount};
bucket_prepare([Val | Rest], FFun, KFun, BAcc, RAcc, EAcc, ECount) ->
case FFun(Val) of
keep ->
bucket_prepare(Rest, FFun, KFun, [Val | BAcc], RAcc, EAcc, ECount);
remove ->
bucket_prepare(Rest, FFun, KFun, BAcc, [Val | RAcc], EAcc, ECount);
evict ->
BAcc2 = [Val | BAcc],
EAcc2 = [{KFun(Val), Val} | EAcc],
bucket_prepare(Rest, FFun, KFun, BAcc2, RAcc, EAcc2, ECount + 1)
end.
%--- LOOKUP HANDLING FUNCTIONS -------------------------------------------------
-ifdef(TEST).
lookup_internal_size(#lookup{array = Array}) -> array:size(Array).
lookup_internal_free(#lookup{size = Size, array = Array}) ->
array:size(Array) - Size.
-endif.
%% Creates a new lookup data structure.
-spec lookup_new() -> lookup().
lookup_new() ->
#lookup{
size = 0,
array = array:new(?LOOKUP_START_SIZE)
}.
%% Converts a lookup table to a list.
-spec lookup_to_list(lookup()) -> [peer_id()].
lookup_to_list(#lookup{array = Array}) ->
array:sparse_to_list(Array).
%% Returns the number of element in a lookup table.
-spec lookup_size(lookup()) -> non_neg_integer().
lookup_size(#lookup{size = Size}) -> Size.
%% Get a value from the lookup table
-spec lookup_get(lookup(), non_neg_integer()) -> peer_id().
lookup_get(#lookup{size = Size, array = Array}, Idx)
when Idx < Size ->
array:get(Idx, Array).
%% Gets two values, swap them in the array and return them.
-spec lookup_swap(lookup(), non_neg_integer(), non_neg_integer())
-> {peer_id(), peer_id(), lookup()}.
lookup_swap(#lookup{size = Size, array = Array} = Lookup, Idx1, Idx2)
when Idx1 < Size, Idx2 < Size ->
A = array:get(Idx1, Array),
B = array:get(Idx2, Array),
Array2 = array:set(Idx2, A, array:set(Idx1, B, Array)),
{A, B, Lookup#lookup{array = Array2}}.
%% Appends a value at the end of the lookup table;
%% eventually resize the underlying array.
-spec lookup_append(lookup(), peer_id()) -> {non_neg_integer(), lookup()}.
lookup_append(Lookup, Value) ->
#lookup{size = Size, array = Array} = Lookup,
ArraySize = array:size(Array),
case ArraySize > Size of
true ->
Array2 = array:set(Size, Value, Array),
{Size, Lookup#lookup{size = Size + 1, array = Array2}};
false ->
NewArraySize = ArraySize + min(ArraySize, ?MAX_LOOKUP_SIZE_INC),
Array2 = array:resize(NewArraySize, Array),
Array3 = array:set(Size, Value, Array2),
{Size, Lookup#lookup{size = Size + 1, array = Array3}}
end.
%% Shrinks the lookup table, removing the last value;
%% eventually resize the underlying array.
-spec lookup_shrink(lookup()) -> lookup().
lookup_shrink(Lookup) ->
#lookup{size = OldSize, array = Array} = Lookup,
NewSize = OldSize - 1,
ArraySize = array:size(Array),
FreeSpace = (ArraySize - NewSize),
MaxFreeSpace = min(?MAX_LOOKUP_SIZE_INC, NewSize),
case (NewSize < ?LOOKUP_START_SIZE) or (FreeSpace < MaxFreeSpace) of
true ->
Array2 = array:reset(NewSize, Array),
Lookup#lookup{size = NewSize, array = Array2};
false ->
Array2 = array:resize(NewSize, Array),
Lookup#lookup{size = NewSize, array = Array2}
end.
%% Adds the value in the given lookup table at a random position;
%% if the lookup table had to move and existing value, it returns a tuple
%% with the value and its new index, otherwise it returns `undefined'.
%% Needs the state of the random number generator.
-spec lookup_add(lookup(), term(), term())
-> {non_neg_integer(), {non_neg_integer(), term()}, term(), lookup()}
| undefined.
lookup_add(#lookup{size = 0} = Lookup, RSt, Value) ->
{0, Lookup2} = lookup_append(Lookup, Value),
{0, undefined, RSt, Lookup2};
lookup_add(#lookup{size = 1} = Lookup, RSt, Value) ->
% even if there is a only two values we want the table randomized.
{1, Lookup2} = lookup_append(Lookup, Value),
{SwapFlag, RSt2} = randint(RSt, 2),
case SwapFlag =:= 0 of
false ->
{1, undefined, RSt2, Lookup2};
true ->
{OldValue, _, Lookup3} = lookup_swap(Lookup2, 0, 1),
{0, {1, OldValue}, RSt2, Lookup3}
end;
lookup_add(Lookup, RSt, Value) ->
{Idx, Lookup2} = lookup_append(Lookup, Value),
{RandIdx, RSt2} = randint(RSt, Idx),
{OldValue, _, Lookup3} = lookup_swap(Lookup2, RandIdx, Idx),
{RandIdx, {Idx, OldValue}, RSt2, Lookup3}.
%% Removes the value at the given index;
%% if the lookup table had to move and existing value, it returns a tuple
%% with the value and its new index, otherwise it returns `undefined'.
-spec lookup_del(lookup(), non_neg_integer())
-> {undefined | {non_neg_integer(), term()}, lookup()}.
lookup_del(#lookup{size = 1} = Lookup, 0) ->
{undefined, lookup_shrink(Lookup)};
lookup_del(#lookup{size = Size} = Lookup, Idx) when Idx =:= (Size - 1)->
{undefined, lookup_shrink(Lookup)};
lookup_del(#lookup{size = Size} = Lookup, Idx) when Size > 1, Idx < Size ->
OldIdx = Size - 1,
{OldValue, _, Lookup2} = lookup_swap(Lookup, OldIdx, Idx),
{{Idx, OldValue}, lookup_shrink(Lookup2)}.
%% Selects a random value from the lookup table.
%% If a restriction function is specified, it will keep selecting random values
%% until either there is no more values or the function returns `true'.
%% Optionally uses a strong random number as offset to weak random numbers
%% to ensure relatively strong randomness.
-spec lookup_select(lookup(), rand_state(), boolean(), int_filter_fun())
-> {unavailable, rand_state()} | {peer_id(), rand_state()}.
lookup_select(#lookup{size = 0}, RSt, _UseRandOff, _FilterFun) ->
{unavailable, RSt};
lookup_select(#lookup{size = 1} = Lookup, RSt, _UseRandOff, undefined) ->
{lookup_get(Lookup, 0), RSt};
lookup_select(Lookup, RSt, UseRandOff, undefined) ->
#lookup{size = Size} = Lookup,
RandOffset = strong_randword(UseRandOff),
{RandInt, RSt2} = randint(RSt, Size),
RandIdx = (RandInt + RandOffset) rem Size,
{lookup_get(Lookup, RandIdx), RSt2};
lookup_select(Lookup, RSt, UseRandOff, FilterFun) ->
#lookup{size = Size} = Lookup,
RandOffset = strong_randword(UseRandOff),
lookup_select(Lookup, RSt, FilterFun, RandOffset, Size).
lookup_select(Lookup, RSt, FilterFun, _Offset, 1) ->
Value = lookup_get(Lookup, 0),
case FilterFun(Value) of
true -> {Value, RSt};
false -> {unavailable, RSt}
end;
lookup_select(Lookup, RSt, FilterFun, Offset, SamplingSize) ->
LastIdx = SamplingSize - 1,
{RandInt, RSt2} = randint(RSt, SamplingSize),
RandIdx = (RandInt + Offset) rem SamplingSize,
Value = lookup_get(Lookup, RandIdx),
case FilterFun(Value) of
true -> {Value, RSt2};
false ->
{_, _, Lookup2} = lookup_swap(Lookup, RandIdx, LastIdx),
lookup_select(Lookup2, RSt2, FilterFun, Offset, SamplingSize - 1)
end.
%% Samples a random number of peer identifers from given lookup table.
%% If the requested sample size is `all' or larger than the size of the table
%% the result will not be shuffled (maybe reversed).
%% Optionaly uses a strong random number as offset to weak random numbers to
%% ensure relatively strong randomness.
-spec lookup_sample(lookup(), rand_state(), boolean(), non_neg_integer() | all,
int_filter_fun() | undefined)
-> {[peer_id()], rand_state()}.
lookup_sample(#lookup{size = 0}, RSt, _UseRandOff, _SampleSize, _FilterFun) ->
{[], RSt};
lookup_sample(#lookup{size = Size} = Lookup, RSt, _, SampleSize, undefined)
when SampleSize =:= all; SampleSize >= Size ->
#lookup{array = Array} = Lookup,
{array:sparse_to_list(Array), RSt};
lookup_sample(Lookup, RSt, _UseRandOff, all, FilterFun) ->
#lookup{array = Array} = Lookup,
Result = array:sparse_foldl(fun(_, V, Acc) ->
case FilterFun(V) of
true -> [V | Acc];
false -> Acc
end
end, [], Array),
{Result, RSt};
lookup_sample(Lookup, RSt, UseRandOff, SampleSize, FilterFun) ->
#lookup{size = Size} = Lookup,
RandOffset = strong_randword(UseRandOff),
lookup_sample(Lookup, RSt, FilterFun, RandOffset, SampleSize, Size, []).
lookup_sample(_Lookup, RSt, _FilterFun, _Offset, 0, _SamplingSize, Acc) ->
{Acc, RSt};
lookup_sample(Lookup, RSt, FilterFun, _Offset, Remaining, 1, Acc) ->
Value = lookup_get(Lookup, 0),
{_, Acc2} = lookup_filter(Value, FilterFun, Remaining, Acc),
{Acc2, RSt};
lookup_sample(Lookup, RSt, FilterFun, Offset, Remaining, SamplingSize, Acc) ->
{RandInt, RSt2} = randint(RSt, SamplingSize),
RandIdx = (Offset + RandInt) rem SamplingSize,
LastIdx = SamplingSize - 1,
{RandValue, _, Lookup2} = lookup_swap(Lookup, RandIdx, LastIdx),
{Remaining2, Acc2} = lookup_filter(RandValue, FilterFun, Remaining, Acc),
lookup_sample(Lookup2, RSt2, FilterFun, Offset, Remaining2, LastIdx, Acc2).
lookup_filter(Value, undefined, Rem, Acc) ->
{Rem - 1, [Value | Acc]};
lookup_filter(Value, FilterFun, Rem, Acc) ->
case FilterFun(Value) of
true -> {Rem - 1, [Value | Acc]};
false -> {Rem, Acc}
end. | apps/aecore/src/aec_peers_pool.erl | 0.674587 | 0.507934 | aec_peers_pool.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% Copyright (c) 2016 <NAME>. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%%
%% @doc This module contains the logic to check whether
%% an execution respects some given consistency models.
%%
-module(conver_consistency).
-include("conver.hrl").
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-export([check_consistency/1]).
%%% API
%% @doc Check which consistency models are respected by an execution.
%%
%% Takes as input parameter an array of tuples of operations (`Ops')
%% grouped by issuing process, and outputs to stdout the outcome
%% of the tests against various consistency models.
%% It returns an array of tuples having the same format and content of the
%% input parameter `Ops', in which each operation revealing a consistency
%% anomaly has been suitably marked.
%%
-spec check_consistency([{atom(), [op()]}]) -> [{atom(), [op()]}].
check_consistency(Ops) ->
Sessions = [lists:sort(fun cmp_rb/2, Session) || {_, Session} <- Ops],
OpLst = lists:append(Sessions),
G = digraph:new(),
[digraph:add_vertex(G, V) || V <- OpLst],
build_ordering(OpLst, G, fun cmp_so/2, so),
build_ordering(OpLst, G, fun cmp_rb/2, rb),
build_ordering(OpLst, G, fun cmp_vis/2, vis),
build_ordering(OpLst, G, fun are_concurrent/2, conc),
build_ordering(OpLst, G, fun cmp_ar/2, ar),
true = (count_edges(G, ar) == (length(OpLst) * (length(OpLst)-1)) div 2),
ArLst = lists:sort(fun cmp_ar/2, OpLst),
IsMR = check_monotonic_reads(G, Sessions),
IsRYW = check_read_your_writes(G, Sessions),
IsMW = check_monotonic_writes(G),
IsWFR = check_writes_follow_reads(G),
IsPRAM = IsMR andalso IsMW andalso IsRYW andalso is_subset(G, so, ar),
IsCausal = IsPRAM andalso IsWFR,
IsRealTime = check_real_time(G),
IsRValF = check_rval(G, ArLst),
IsRegular = IsCausal andalso IsRealTime andalso
IsRValF andalso is_subset(G, vis, ar),
OpsChecked = build_checked_proplist(G, Ops),
%io:format("Ar as list: ~p~n~n", [ArLst]),
io:format("~nConsistency check: ~p~n", [OpsChecked]),
io:format("~nMonotonic Reads...................... ~s~n", [print_bool(IsMR)]),
io:format("Read-Your-Writes..................... ~s~n", [print_bool(IsRYW)]),
io:format("Monotonic Writes..................... ~s~n", [print_bool(IsMW)]),
io:format("Writes-Follow-Reads.................. ~s~n", [print_bool(IsWFR)]),
io:format("PRAM................................. ~s~n", [print_bool(IsPRAM)]),
io:format("Causal............................... ~s~n", [print_bool(IsCausal)]),
io:format("RealTime............................. ~s~n", [print_bool(IsRealTime)]),
io:format("Regular.............................. ~s~n~n", [print_bool(IsRegular)]),
OpsChecked.
%%%===================================================================
%%% Utility generic functions
%%%===================================================================
%% @doc Builds and returns an array of tuples containing
%% the operations of the execution and having the same format
%% of the input parameter `Ops'.
%% In the returned array, each operation revealing a consistency anomaly
%% as marked in the corresponding vertices of the graph `G',
%% has been suitably marked.
-spec build_checked_proplist(digraph:graph(), [{atom(), [op()]}]) ->
[{atom(), [op()]}].
build_checked_proplist(G, Ops) ->
FunGetMarkedOp = fun(V) ->
{V, Label} = digraph:vertex(G, V),
V#op{notes = Label}
end,
[{Proc, lists:sort(fun cmp_rb/2, [FunGetMarkedOp(Op) ||
Op <- digraph:vertices(G), Op#op.proc == Proc])}
|| Proc <- proplists:get_keys(Ops)].
-spec print_bool(boolean()) -> iolist().
print_bool(true) -> color:green("PASS");
print_bool(false) -> color:red("FAIL").
%%%===================================================================
%%% Consistency check functions
%%%===================================================================
%% Monotonic reads
%% @doc Checks the Monotonic Read consistency guarantee.
-spec check_monotonic_reads(digraph:graph(), [[op()]]) -> boolean().
check_monotonic_reads(G, Sessions) ->
[mark_mr_violations(G, #op{proc=init, type=write, arg=0}, Session) || Session <- Sessions],
is_semantics_respected(G, mr).
-spec mark_mr_violations(digraph:graph(), op(), [op()]) -> 'ok'.
mark_mr_violations(_, _, []) -> ok;
mark_mr_violations(G, LastWriteRead, [H|T]) when H#op.type == read ->
NewLastWriteRead = if
LastWriteRead#op.arg > H#op.arg, H#op.arg =< 0 ->
LastWriteRead;
LastWriteRead#op.arg > H#op.arg, H#op.arg > 0 ->
OriginalWrite = hd(get_in_neighbours_by_rel(G, H, vis)),
ConcNeighbours = get_in_neighbours_by_rel(G, OriginalWrite, conc),
case lists:member(LastWriteRead, ConcNeighbours) of
%% If the last write in ar is concurrent with the write whose value is read by this operation
%% then it's an error in the way we constructed the ar speculative total order.
true ->
lager:debug("[MR] Anomaly in the speculative total order ar: ~p", [H]),
OriginalWrite;
false ->
add_label_to_vertex(G, H, mr), %% otherwise: mark the anomaly
LastWriteRead
end;
LastWriteRead#op.arg == H#op.arg ->
LastWriteRead;
LastWriteRead#op.arg < H#op.arg ->
hd(get_in_neighbours_by_rel(G, H, vis))
end,
mark_mr_violations(G, NewLastWriteRead, T);
mark_mr_violations(G, LastValueRead, [H|T]) when H#op.type == write ->
mark_mr_violations(G, LastValueRead, T).
%% Read-your-writes
%% @doc Checks the Read-Your-Write consistency guarantee.
-spec check_read_your_writes(digraph:graph(), [[op()]]) -> boolean().
check_read_your_writes(G, Sessions) ->
[mark_ryw_violations(G, #op{proc=init, type=write, arg=0}, Session) || Session <- Sessions],
is_semantics_respected(G, ryw).
-spec mark_ryw_violations(digraph:graph(), op(), [op()]) -> 'ok'.
mark_ryw_violations(_, _, []) -> ok;
mark_ryw_violations(G, LastWrite, [H|T]) when H#op.type == read ->
case LastWrite#op.arg > H#op.arg of
true ->
OriginalWrite = hd(get_in_neighbours_by_rel(G, H, vis)),
ConcNeighbours = get_in_neighbours_by_rel(G, OriginalWrite, conc),
case lists:member(LastWrite, ConcNeighbours) of
%% If the last write in ar is concurrent with the write whose value is read by this operation
%% then it's an error in the way we constructed the ar speculative total order.
true -> lager:debug("[RYW] Anomaly in the speculative total order ar: ~p", [H]);
false -> add_label_to_vertex(G, H, ryw) %% otherwise: mark the anomaly
end;
false -> ok
end,
mark_ryw_violations(G, LastWrite, T);
mark_ryw_violations(G, _, [H|T]) when H#op.type == write ->
mark_ryw_violations(G, H, T).
%% Monotonic writes
%% @doc Checks the Monotonic Writes consistency guarantee.
-spec check_monotonic_writes(digraph:graph()) -> boolean().
check_monotonic_writes(G) ->
FunFilterWW = fun({V1, V2}) ->
(V1#op.type == write) andalso (V2#op.type == write)
end,
SetSoWW = sets:from_list(lists:filter(FunFilterWW, filter_edges_by_rel(G, so))),
SetAr = sets:from_list(filter_edges_by_rel(G, ar)),
sets:is_subset(SetSoWW, SetAr).
%% Writes-follow-reads
%% @doc Checks the Writes-Follow-Reads consistency guarantee.
-spec check_writes_follow_reads(digraph:graph()) -> boolean().
check_writes_follow_reads(G) ->
FunFilterRW = fun({V1, V2}) ->
(V1#op.type == read) andalso (V2#op.type == write)
end,
SetSoRW = sets:from_list(lists:filter(FunFilterRW, filter_edges_by_rel(G, so))),
SetVis = sets:from_list(filter_edges_by_rel(G, vis)),
SetAr = sets:from_list(filter_edges_by_rel(G, ar)),
SetVisSoRW = sets:union(SetSoRW, SetVis),
case sets:is_subset(SetVisSoRW, SetAr) of
true -> true;
false ->
lager:debug("WFR anomaly: ~p", [sets:subtract(SetVisSoRW, SetAr)]),
false
end.
%% Real-time
%% @doc Checks the Real Time consistency guarantee.
-spec check_real_time(digraph:graph()) -> boolean().
check_real_time(G) ->
is_subset(G, rb, ar).
% RVal
%% @doc Checks the RVAL consistency guarantee
%% (for a register, according to the speculative total ordering `ar').
-spec check_rval(digraph:graph(), [op()]) -> boolean().
check_rval(G, ArLst) ->
mark_rval_violations(G, #op{proc=init, type=write, arg=0}, ArLst),
is_semantics_respected(G, rval).
-spec mark_rval_violations(digraph:graph(), op(), [op()]) -> 'ok'.
mark_rval_violations(_, _, []) -> ok;
mark_rval_violations(G, LastWrite, [H|T]) when H#op.type == read ->
case H#op.arg == LastWrite#op.arg of
true -> ok;
false ->
if H#op.arg == 0 -> ok; %% XXX exception for initial write(0): otherwise hd() would throw an error
true ->
OriginalWrite = hd(get_in_neighbours_by_rel(G, H, vis)),
OriginalWriteConc = get_in_neighbours_by_rel(G, OriginalWrite, conc),
LastWriteConc = get_in_neighbours_by_rel(G, LastWrite, conc),
IsWriteConcurrent = lists:member(LastWrite, OriginalWriteConc) or
lists:member(H, OriginalWriteConc) or
lists:member(H, LastWriteConc),
if IsWriteConcurrent ->
%% If the last write in ar or the current read is concurrent with the write whose value has been read
%% then it's just an error in the way we constructed the ar speculative total order
lager:debug("[RVAL] Anomaly in the speculative total order ar: ~p", [H]);
true ->
add_label_to_vertex(G, H, rval) %% otherwise: mark the anomaly
end
end
end,
mark_rval_violations(G, LastWrite, T);
mark_rval_violations(G, _, [H|T]) when H#op.type == write ->
mark_rval_violations(G, H, T).
%% @doc Returns `true' or `false' depending on whether a specific
%% consistency model `Model` has been respected throughout the
%% execution represented by the graph `G'.
-spec is_semantics_respected(digraph:graph(), atom()) -> boolean().
is_semantics_respected(G, Model) ->
lists:all(fun(X)->
{_, L} = digraph:vertex(G, X),
not(lists:member(Model, L))
end,
digraph:vertices(G)).
%%%===================================================================
%%% Functions to operate on graph entities
%%%===================================================================
%% @doc Adds edges to the execution graph `G' to represent
%% the relationships between operations (`O')
%% according to the specified comparison function `FunCmp'.
%% The added edges are labeled with `Label'.
%% If the edge is already part of the graph, the label gets
%% attached to the existing ones.
-spec build_ordering([op()], digraph:graph(),
fun((op(), op()) -> boolean()), atom()) -> [term()].
build_ordering(O, G, FunCmp, Label) ->
[add_label_to_edge(G, V1, V2, Label) || V1<- O, V2 <- O, FunCmp(V1, V2)].
-spec add_label_to_edge(digraph:graph(), digraph:vertex(), digraph:vertex(), atom()) ->
digraph:edge() | {error, digraph:add_edge_err_rsn()}.
add_label_to_edge(G, V1, V2, NewLabel) ->
case digraph:edge(G, {V1, V2}) of
{{V1, V2}, V1, V2, Label} ->
digraph:add_edge(G, {V1, V2}, V1, V2, Label ++ [NewLabel]);
false ->
digraph:add_edge(G, {V1, V2}, V1, V2, [NewLabel])
end.
%% @doc Adds a label to a vertex of the execution graph
%% in order to mark that the operation associated to that vertex
%% violated a given consistency model identified by the label.
-spec add_label_to_vertex(digraph:graph(), digraph:vertex(), atom()) -> digraph:vertex().
add_label_to_vertex(G, V, NewLabel) ->
{_, Label} = digraph:vertex(G, V),
digraph:add_vertex(G, V, Label ++ [NewLabel]).
%% @doc Gets all edges of the execution graph `G' that
%% express a certain relation (i.e., have a given label `Rel').
-spec filter_edges_by_rel(digraph:graph(), atom()) -> [digraph:edge()].
filter_edges_by_rel(G, Rel) ->
lists:filter(fun(E) ->
{{_, _}, _, _, Label} = digraph:edge(G, E),
lists:member(Rel, Label)
end,
digraph:edges(G)).
%% @doc Counts the edges of the execution graph `G' that
%% express a certain relation (i.e., have a given label `Rel').
-spec count_edges(digraph:graph(), atom()) -> non_neg_integer().
count_edges(G, Rel) ->
length(filter_edges_by_rel(G, Rel)).
%% @doc Determines whether all the edges expressing relation `Rel1'
%% are also expressing relation `Rel2' - hence, whether
%% `Rel1' is a subset of `Rel2'.
-spec is_subset(digraph:graph(), atom(), atom()) -> boolean().
is_subset(G, Rel1, Rel2) ->
SetRel1 = sets:from_list(filter_edges_by_rel(G, Rel1)),
SetRel2 = sets:from_list(filter_edges_by_rel(G, Rel2)),
sets:is_subset(SetRel1, SetRel2).
%% @doc Gets all vertex which are in-neighbours of vertex `V'
%% (i.e. having an edge directed to `V') according to relation `Rel'.
-spec get_in_neighbours_by_rel(digraph:graph(), digraph:vertex(), atom()) ->
[digraph:vertex()].
get_in_neighbours_by_rel(G, V, Rel) ->
lists:filter(fun(VN) ->
{{VN, V}, VN, V, Label} = digraph:edge(G, {VN, V}),
lists:member(Rel, Label)
end,
digraph:in_neighbours(G, V)).
%%%===================================================================
%%% Functions to compare operations
%%%===================================================================
%% @doc Returns `true' if `Op1' precedes `Op2'
%% according to the session ordering (so); `false' otherwise.
-spec cmp_so(op(), op()) -> boolean().
cmp_so(Op1, Op2) ->
Op1#op.proc == Op2#op.proc andalso
Op1#op.end_time =< Op2#op.start_time.
%% @doc Returns `true' if `Op1' precedes `Op2'
%% according to the returns-before ordering (rb); `false' otherwise.
-spec cmp_rb(op(), op()) -> boolean().
cmp_rb(Op1, Op2) ->
Op1#op.end_time < Op2#op.start_time.
%% @doc Returns `true' if `Op1' precedes `Op2'
%% according to the visibility ordering (vis); `false' otherwise.
-spec cmp_vis(op(), op()) -> boolean().
cmp_vis(Op1, Op2) ->
Op1#op.type == write andalso
Op2#op.type == read andalso
Op1#op.arg == Op2#op.arg.
%% @doc Returns `true' if `Op1' precedes `Op2'
%% in the arbitration speculative total ordering (ar); `false' otherwise.
%% This total ordering is built as a linear extension
%% of the returns-before partial ordering.
%% In case of concurrent operations, it arbitrarily uses parameters
%% of the operations to determine their ordering (i.e. process id, type, timing).
-spec cmp_ar(op(), op()) -> boolean().
cmp_ar(Op1, Op2) ->
case are_concurrent(Op1, Op2) of
false -> cmp_rb(Op1, Op2);
true -> %% Concurrent operations
if
Op1#op.arg == Op2#op.arg, Op1#op.type == Op2#op.type -> %% they can only be two reads, by design
Op1#op.proc < Op2#op.proc; %% use process id to break ties
Op1#op.arg == Op2#op.arg, Op1#op.type =/= Op2#op.type -> %% a read and a write with same argument, concurrent
Op1#op.type == write; %% the write goes first
Op1#op.arg < Op2#op.arg; Op1#op.arg > Op2#op.arg ->
cmp_opmedian(Op1, Op2) %% Speculative ordering based on operations' medians
end
end.
%% @doc Returns `true' if `Op1' and `Op2' are concurrent, i.e.
%% they are not ordered by the returns-before relation.
-spec are_concurrent(op(), op()) -> boolean().
are_concurrent(Op1, Op2) ->
not cmp_rb(Op1, Op2) andalso not cmp_rb(Op2, Op1).
%% @doc Returns `true' if `Op1' precedes `Op2'
%% according to their median time.
-spec cmp_opmedian(op(), op()) -> boolean().
cmp_opmedian(Op1, Op2) ->
(Op1#op.start_time + Op1#op.end_time)/2 <
(Op2#op.start_time + Op2#op.end_time)/2.
%% ===================================================================
%% EUnit tests
%% ===================================================================
-ifdef(TEST).
cmp_so_test() ->
Op1 = #op{proc=proc1, start_time=0, end_time=10},
Op2 = #op{proc=proc1, start_time=11, end_time=25},
Op3 = #op{proc=proc2, start_time=3, end_time=6},
Op4 = #op{proc=proc1, start_time=1, end_time=8},
?assert(cmp_so(Op1, Op2)),
?assertNot(cmp_so(Op2, Op1)),
?assert(cmp_so(Op4, Op2)),
?assertNot(cmp_so(Op4, Op1)),
?assertNot(cmp_so(Op3, Op2)).
-endif. | src/conver_consistency.erl | 0.610453 | 0.506774 | conver_consistency.erl | starcoder |
%% -----------------------------------------------------------------------------
%%
%% The MIT License (MIT)
%%
%% Copyright (c) 2015 <NAME>
%%
%% Permission is hereby granted, free of charge, to any person obtaining a copy
%% of this software and associated documentation files (the "Software"), to deal
%% in the Software without restriction, including without limitation the rights
%% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
%% copies of the Software, and to permit persons to whom the Software is
%% furnished to do so, subject to the following conditions:
%%
%% The above copyright notice and this permission notice shall be included in all
%% copies or substantial portions of the Software.
%%
%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
%% SOFTWARE.
%%
%% -----------------------------------------------------------------------------
%%
%% @author <NAME> <<EMAIL>>
%% @doc Example of wrapping OTP stdlib digraph
%%
%% The example module shows how to wrap existing digraph implementation for
%% using with ERLGT.
%%
%% @copyright 2015 <NAME>
%% @end
%%
%% -----------------------------------------------------------------------------
-module(otp_digraph).
-behaviour(gen_digraph).
-export([new/0]).
-export([ from_list/1
, to_list/1
, edges/1
, no_edges/1
, vertices/1
, no_vertices/1
, in_neighbours/2
, out_neighbours/2
, in_degree/2
, out_degree/2
, sources/1
, sinks/1
, delete/1
, is_edge/3
, is_path/2
, get_path/3
, get_cycle/2
, get_short_path/3
, get_short_cycle/2
, has_path/3
, has_cycle/2
, reachable/2
, reachable_neighbours/2
, reaching/2
, reaching_neighbours/2
, components/1
, strong_components/1
, preorder/1
, is_acyclic/1
, postorder/1
, topsort/1
, condensation/1
]).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
%% -----------------------------------------------------------------------------
%% API
%% -----------------------------------------------------------------------------
new() -> {?MODULE, digraph:new([private])}.
%% -----------------------------------------------------------------------------
%% Callbacks
%% -----------------------------------------------------------------------------
from_list(L) ->
{_, D} = G = new(),
[ case E of
{V} -> digraph:add_vertex(D, V);
{V1, V2} ->
digraph:add_vertex(D, V1),
digraph:add_vertex(D, V2),
digraph:add_edge(D, V1, V2);
_ -> error(badarg)
end
|| E <- L ],
G.
to_list(G) -> gen_digraph:gen_to_list(G).
edges({_, D}) ->
[ case digraph:edge(D, E) of
{_, V1, V2, _} -> {V1, V2}
end
|| E <- digraph:edges(D) ].
no_edges({_, D}) -> digraph:no_edges(D).
vertices({_, D}) -> digraph:vertices(D).
no_vertices({_, D}) -> digraph:no_vertices(D).
in_neighbours({_, D}, V) -> digraph:in_neighbours(D, V).
out_neighbours({_, D}, V) -> digraph:out_neighbours(D, V).
in_degree({_, D}, V) -> digraph:in_degree(D, V).
out_degree({_, D}, V) -> digraph:out_degree(D, V).
sources(G) -> gen_digraph:gen_sources(G).
sinks(G) -> gen_digraph:gen_sinks(G).
delete({_, D}) -> digraph:delete(D).
is_edge(G, V1, V2) -> lists:member(V2, out_neighbours(G, V1)).
is_path(G, P) -> gen_digraph:gen_is_path(G, P).
get_path({_, D}, V1, V2) -> digraph:get_path(D, V1, V2).
get_cycle({_, D}, V) ->
case digraph:get_cycle(D, V) of
[V] = P -> [V|P];
P -> P
end.
get_short_path({_, D}, V1, V2) -> digraph:get_short_path(D, V1, V2).
get_short_cycle({_, D}, V) -> digraph:get_short_cycle(D, V).
has_path(G, V1, V2) -> gen_digraph:gen_has_path(G, V1, V2).
has_cycle(G, V) -> gen_digraph:gen_has_cycle(G, V).
reachable({_, D}, Vs) -> digraph_utils:reachable(Vs, D).
reachable_neighbours({_, D}, Vs) -> digraph_utils:reachable_neighbours(Vs, D).
reaching({_, D}, Vs) -> digraph_utils:reaching(Vs, D).
reaching_neighbours({_, D}, Vs) -> digraph_utils:reaching_neighbours(Vs, D).
components({_, D}) -> digraph_utils:components(D).
strong_components({_, D}) -> digraph_utils:strong_components(D).
preorder({_, D}) -> digraph_utils:preorder(D).
is_acyclic({_, D}) -> digraph_utils:is_acyclic(D).
postorder({_, D}) -> digraph_utils:postorder(D).
topsort({_, D}) -> digraph_utils:topsort(D).
condensation({Mod, D}) -> {Mod, digraph_utils:condensation(D)}.
%% -----------------------------------------------------------------------------
%% Tests
%% -----------------------------------------------------------------------------
-ifdef(TEST).
gen_properties_test_() ->
gen_digraph:gen_properties_tests(?MODULE).
gen_tests_test_() ->
gen_digraph:gen_tests(?MODULE).
-endif. %% TEST | src/otp_digraph.erl | 0.605799 | 0.433382 | otp_digraph.erl | starcoder |
-module(aesim_scenario_gossip_time).
%% @doc Default simulation scenario.
%%
%% - Starts a cluster of configurable size.
%% - Wait for all nodes to know a configurable percentage of the other nodes.
%% - Start a single node and measure the time for it to get to know a
%% configurable percentage of the other nodes.
%% - Repeate multiple times and report the min/average/median/max time.
%%
%% Configuration:
%% - `start_period`: The period between new nodes are started; default: 30s.
%% - `max_nodes`: The maximum number of node that get started; default: 150.
%% - `gossip_percent`: The percentage of the other nodes to reach.
-behaviour(aesim_scenario).
%=== INCLUDES ==================================================================
-include_lib("stdlib/include/assert.hrl").
-include("aesim_types.hrl").
%=== EXPORTS ===================================================================
%% Behaviour esim_scenario callback functions
-export([parse_options/2]).
-export([scenario_new/1]).
-export([scenario_start/3]).
-export([scenario_phase_start/4]).
-export([scenario_phase_stop/4]).
-export([scenario_phase_check/4]).
-export([scenario_handle_event/6]).
-export([scenario_report/4]).
%=== MACROS ====================================================================
-define(DEFAULT_NODE_START_PERIOD, "30s").
-define(DEFAULT_MAX_NODES, 150).
-define(DEFAULT_GOSSIP_PERCENT, 90).
-define(CHECK_INTERVAL, 200).
-define(MEASURE_COUNT, 20).
-define(RESULTS_SPEC, [
{string, "DESCRIPTION", left, undefined, 36},
{minimal_time, "MINIMUM", right, undefined, 10},
{minimal_time, "AVERAGE", right, undefined, 10},
{minimal_time, "MEDIAN", right, undefined, 10},
{minimal_time, "MAXIMUM", right, undefined, 10}
]).
%=== TYPES =====================================================================
-type state() :: #{
event_ref := event_ref() | undefined,
node_id := id() | undefined,
measures := [],
next_phases := [aesim_senario:phase_tag()]
}.
-export_type([state/0]).
%=== BEHAVIOUR aesim_scenario CALLBACK FUNCTIONS ===============================
parse_options(Opts, Sim) ->
aesim_config:parse(Sim, Opts, [
{node_start_period, time, ?DEFAULT_NODE_START_PERIOD},
{max_nodes, integer, ?DEFAULT_MAX_NODES},
{gossip_percent, integer, ?DEFAULT_GOSSIP_PERCENT}
]).
scenario_new(Sim) ->
State = #{
event_ref => undefined,
node_id => undefined,
measures => [],
next_phases => [cluster_setup, cluster_gossip
| lists:duplicate(?MEASURE_COUNT, node_gossip)]
},
Phases = [
% First phase; starting all the cluster node up to the configured maximum.
{"Starting cluster nodes",
cluster_setup, cfg_node_start_period(Sim)},
% Second phase; waiting for all nodes to know a configurable
% percentage of the other nodes.
{"Waiting for nodes to know each others",
cluster_gossip, ?CHECK_INTERVAL * 60},
% Third phase; waiting for the last node to know a configured
% percentage of the other nodes.
{"Waiting for reference node to know enough of the cluster",
node_gossip, ?CHECK_INTERVAL}
],
{State, Phases, Sim}.
scenario_start(State, Nodes, Sim) ->
{Nodes2, Sim2} = aesim_scenario:default_start(Nodes, Sim),
{State, cluster_setup, Nodes2, Sim2}.
scenario_phase_start(State, Phase, Nodes, Sim) ->
aesim_scenario:print_phase_start(Phase, Sim),
phase_start(State, Phase, Nodes, Sim).
scenario_phase_stop(State, Phase, Nodes, Sim) ->
aesim_scenario:print_phase_stop(Phase, Sim),
phase_stop(State, Phase, Nodes, Sim).
scenario_phase_check(State, #{tag := PhaseTag}, Nodes, Sim) ->
case phase_terminated(State, PhaseTag, Nodes, Sim) of
true -> phase_next(State);
false -> continue
end.
scenario_handle_event(State, _, start_node, Count, Nodes, Sim) ->
do_start_node(State, Count, Nodes, Sim);
scenario_handle_event(_State, _, _EventName, _Params, _Nodes, _Sim) -> ignore.
scenario_report(State, normal, _Nodes, Sim) ->
#{measures := Measures} = State,
aesim_simulator:print_title("SCENARIO RESULTS", Sim),
Desc = aesim_utils:format("Time to know ~b% of the cluster",
[cfg_gossip_percent(Sim)]),
{Min, Avg, Med, Max} = aesim_utils:reduce_metric(Measures),
Fileds = [Desc, Min, Avg, Med, Max],
aesim_simulator:print_header(?RESULTS_SPEC, Sim),
aesim_simulator:print_fields(?RESULTS_SPEC, Fileds, Sim),
aesim_simulator:print_separator(Sim),
normal;
scenario_report(_State, Reason, _Nodes, _Sim) -> Reason.
%=== INTERNAL FUNCTIONS ========================================================
phase_next(#{next_phases := []}) -> {stop, normal};
phase_next(#{next_phases := [PhaseTag | _]}) -> {next, PhaseTag}.
phase_started(#{next_phases := [_ | Rest]} = State) ->
State#{next_phases := Rest}.
phase_start(State, #{tag := cluster_setup}, Nodes, Sim) ->
%% Start nodes up to the configured maxium
CurrCount = aesim_nodes:count(Nodes),
do_start_node(phase_started(State), CurrCount, Nodes, Sim);
phase_start(State, #{tag := cluster_gossip}, Nodes, Sim) ->
{phase_started(State), Nodes, Sim};
phase_start(State, #{tag := node_gossip}, Nodes, Sim) ->
%% Start the last node and keep its identifier
{Nodes2, NodeId, Sim2} = aesim_nodes:start_node(Nodes, Sim),
{phase_started(State#{node_id := NodeId}), Nodes2, Sim2}.
phase_stop(State, #{tag := cluster_setup}, Nodes, Sim) ->
{State2, Sim2} = cancel_start_node(State, Sim),
{State2, Nodes, Sim2};
phase_stop(State, #{tag := cluster_gossip}, Nodes, Sim) ->
{State, Nodes, Sim};
phase_stop(State, #{tag := node_gossip} = Phase, Nodes, Sim) ->
#{measures := Measures} = State,
#{sim_start_time := StartTime, sim_stop_time := StopTime} = Phase,
Measure = StopTime - StartTime,
{State#{measures := [Measure | Measures]}, Nodes, Sim}.
phase_terminated(_State, cluster_setup, Nodes, Sim) ->
aesim_nodes:count(Nodes) >= (cfg_max_nodes(Sim) - ?MEASURE_COUNT);
phase_terminated(_State, cluster_gossip, Nodes, Sim) ->
nodes_gossip_target_reached(Nodes, Sim);
phase_terminated(State, node_gossip, Nodes, Sim) ->
#{node_id := NodeId} = State,
node_gossip_target_reached(NodeId, Nodes, Sim).
nodes_gossip_target_reached(Nodes, Sim) ->
NodeCount = aesim_nodes:count(Nodes),
NodesReport = aesim_nodes:report(Nodes, simple, Sim),
#{nodes := NodeReports} = NodesReport,
fold_while_true(fun(R) ->
report_gossip_target_reached(R, NodeCount, Sim)
end, NodeReports).
node_gossip_target_reached(NodeId, Nodes, Sim) ->
NodeCount = aesim_nodes:count(Nodes),
NodeReport = aesim_nodes:node_report(Nodes, NodeId, simple, Sim),
report_gossip_target_reached(NodeReport, NodeCount, Sim).
report_gossip_target_reached(NodeReport, NodeCount, Sim) ->
#{pool := PoolReport} = NodeReport,
#{known_count := KnownCount} = PoolReport,
(KnownCount * 100 / (NodeCount - 1)) >= cfg_gossip_percent(Sim).
fold_while_true(_Fun, []) -> true;
fold_while_true(Fun, [Data | Rest]) ->
case Fun(Data) of
true -> fold_while_true(Fun, Rest);
false -> false
end.
%--- EVENTS FUNCTIONS ----------------------------------------------------------
sched_start_node(Count, Sim) ->
aesim_scenario:post(cfg_node_start_period(Sim), start_node, Count, Sim).
do_start_node(State, Count, Nodes, Sim) ->
case Count < (cfg_max_nodes(Sim) - ?MEASURE_COUNT) of
false ->
{State#{event_ref := undefined}, Nodes, Sim};
true ->
{Nodes2, _, Sim2} = aesim_nodes:start_node(Nodes, Sim),
{Ref, Sim3} = sched_start_node(Count +1, Sim2),
{State#{event_ref := Ref}, Nodes2, Sim3}
end.
cancel_start_node(#{event_ref := undefined} = State, Sim) -> {State, Sim};
cancel_start_node(#{event_ref := Ref} = State, Sim) ->
{State#{event_ref := undefined}, aesim_events:cancel(Ref, Sim)}.
%--- CONFIG FUNCTIONS ----------------------------------------------------------
cfg_node_start_period(Sim) -> aesim_config:get(Sim, node_start_period).
cfg_max_nodes(Sim) -> aesim_config:get(Sim, max_nodes).
cfg_gossip_percent(Sim) -> aesim_config:get(Sim, gossip_percent). | src/scenarios/aesim_scenario_gossip_time.erl | 0.522689 | 0.636523 | aesim_scenario_gossip_time.erl | starcoder |
% @doc GRiSP SPI API.
%
% <a href="https://en.wikipedia.org/wiki/Serial_Peripheral_Interface">Serial
% Peripheral Interface (SPI)</a> is a synchronous serial communication
% protocol, where a single controller device can control many responder
% devices. With this API, the GRiSP board acts as a controller and any connected
% device is a responder.
%
% SPI has four lines: clock, MOSI, MISO and chip select. MOSI is the data line
% from the controller device to the responder devices. MISO is the data line
% from the responder devices to the controller device. The controller device is
% regulating the communication speed and synchronization by controlling the
% chip select and clock lines.
%
% == Chip Select ==
%
% By default, if the bus aliases `spi1' or `spi2' is used with {@link open/1}
% the chip select pin is automatically configured to the default SPI slot chip
% select pin. When using {@link open/2} the chip select pin can be any GPIO pin
% (see {@link grisp_gpio}).
%
% == Clock ==
%
% The clock line in SPI is a digital I/O line that pulses at a certain
% frequency. The default SPI clock frequency in GRiSP is 0.1 MHz.
%
% The clock line polarity and phase should be configured by setting the
% mandatory `clock' mode option ({@link clock()}) for each message. A polarity
% of `low' means the clock line is idling at `0' and a polarity of `high' means
% the clock line is idling at `1'. The phase denotes at which edge of the clock
% pulse the actual protocol values should be written and read. A phase of
% `leading' means that the controller and responder devices should read or
% write values as the pulse starts. A phase of `trailing' means the devices
% should read or write values as the current pulse ends.
%
% The settings for the clock line is specific per device and needs to be
% consulted from the official specification of the device.
%
% == Request & Response ==
%
% From the controller point of view, requests are always written to the MOSI
% line and response data is always read from the MISO line.
%
% Because SPI is synchronous, for every request byte sent a response byte is
% always received. Many responder devices respond silently with zeroes until a
% request is fully received, then send their response while ignoring the rest
% of the bytes being received. Other devices can start sending data already
% before the controller has finished sending a request, and even reply with
% data while the controller is writing its own data.
%
% This module provides two message formats. One format ({@link message_raw()})
% returns the full response, all bytes received from the responder device
% including ones sent during the initial request.
%
% The other format ({@link message_simple()}) automatically pads the request so
% that response bytes can be received, and strips the response of an initial
% number of bytes. This makes it simpler to communicate with responder devices
% that wait for a fully received request before replying.
-module(grisp_spi).
-include("grisp_nif.hrl").
% API
-export([open/1]).
-export([open/2]).
-export([transfer/2]).
% Callbacks
-export([on_load/0]).
-on_load(on_load/0).
-define(CPOL_HIGH, 1).
-define(CPHA_TRAILING, 2).
-define(CS_DISABLE, 4).
%--- Types ---------------------------------------------------------------------
-type pin() :: {cs, integer()} | {gpio, grisp_gpio:bus()}.
-type bus() :: spi1 | spi2.
% SPI bus identifier.
-type clock() :: {Polarity :: low | high, Phase :: leading | trailing}.
% SPI clock configuration.
-type mode() :: #{clock := clock()}.
% SPI transfer mode.
-type message_raw() :: {Mode::mode(), Message::binary()}.
% A message where the request is unmodified and which will yield a response
% binary of the same size.
-type message_simple() :: {
Mode::mode(),
Message::binary(),
Skip::non_neg_integer(),
Pad::non_neg_integer()
}.
% A message where the request itself is padded with `Pad' number of `0' bytes,
% and will yield a response binary that is stripped of its first `Skip'
% number of bytes.
-type message() :: message_raw() | message_simple().
% SPI message.
-type response() :: binary().
% SPI response.
-opaque ref() :: {reference(), pin()}.
% Reference to an opened SPI bus.
-export_type([bus/0]).
-export_type([clock/0]).
-export_type([mode/0]).
-export_type([message/0]).
-export_type([response/0]).
-export_type([ref/0]).
%--- API -----------------------------------------------------------------------
% @doc Opens an SPI bus with the default chip select pin.
%
% The respective pin 1 is used as `DefaultPin' for each slot.
%
% @equiv open(Slot, DefaultPin)
% @see grisp_gpio
-spec open(Bus::bus()) -> ref().
open(spi1) -> open(spi1, spi1_pin1);
open(spi2) -> open(spi2, spi2_pin1).
% @doc Opens an SPI bus.
%
% Chip select pin `CS' can be any valid GPIO output pin.
%
% @see grisp_gpio
-spec open(bus(), grisp_gpio:pin()) -> ref().
open(_Bus, CS) -> {open_nif(), pin(grisp_hw:platform(), CS)}.
% @doc Transfers SPI messages on a bus.
%
% A list of responses is returned in the same order as their respective
% messages.
%
% There are two forms of messages, raw binary messages or a simple message with
% padding and skipping.
%
% <ul>
% <li>
% {@link message_raw()}<br/>
% A raw binary message `{Mode, Binary}' sends the data and generates a
% response of the same length.
% </li>
% <li>
% {@link message_simple()}<br/>
% A simple message tuple `{Mode, Binary, Skip, Pad}' additionally contains
% the amount of bytes to skip in the response and an amount of padding
% bytes to add to the request. The response length will be the length of
% the request plus the pad length minus the skip length.
% </li>
% </ul>
%
% See <a href="#Request_&_Response">Request & Response</a> for more
% information.
-spec transfer(ref(), [message()]) -> [response()].
transfer(Ref, Messages) -> [message(Ref, M) || M <- Messages].
%--- Callbacks -----------------------------------------------------------------
% @private
on_load() -> erlang:load_nif(atom_to_list(?MODULE), 0).
%--- Internal ------------------------------------------------------------------
pin(grisp_base, spi1_pin1) -> {cs, 2};
pin(grisp_base, spi2_pin1) -> {cs, 3};
pin(grisp2, spi1_pin1) -> {cs, 0};
pin(grisp2, spi2_pin1) -> {cs, 1};
pin(grisp2, spi2_pin9) -> {cs, 2};
pin(grisp2, spi2_pin10) -> {cs, 3};
pin(_Platform, Pin) -> {gpio, grisp_gpio:open(Pin, #{mode => {output, 1}})}.
message({Bus, Pin}, {Mode, Message, Skip, Pad}) ->
chip_select(Pin, mode(Mode), fun(CS, M) ->
Padding = binary:copy(<<16#ff>>, Pad),
Request = <<Message/binary, Padding/binary>>,
Result = ioctl_nif(Bus, CS, M, Request),
<<_:Skip/binary, Response/binary>> = Result,
Response
end);
message({Bus, Pin}, {Mode, Message}) ->
chip_select(Pin, mode(Mode), fun(CS, M) ->
ioctl_nif(Bus, CS, M, Message)
end).
chip_select({cs, Pin}, Mode, Fun) ->
Fun(Pin, Mode);
chip_select({gpio, Pin}, Mode, Fun) ->
grisp_gpio:set(Pin, 0),
try
case grisp_hw:platform() of
grisp_base -> Fun(0, Mode);
grisp2 -> Fun(0, Mode bor ?CS_DISABLE)
end
after
grisp_gpio:set(Pin, 1)
end.
mode(#{clock := {low, leading}}) -> 0;
mode(#{clock := {low, trailing}}) -> ?CPHA_TRAILING;
mode(#{clock := {high, leading}}) -> ?CPOL_HIGH;
mode(#{clock := {high, trailing}}) -> ?CPOL_HIGH bor ?CPHA_TRAILING.
open_nif() -> ?NIF_STUB.
ioctl_nif(_Obj, _CS, _Mode, _Msg) -> ?NIF_STUB. | src/grisp_spi.erl | 0.835181 | 0.6522 | grisp_spi.erl | starcoder |
%% Taken from erlang stdlib implementation.
%% Adapted by <EMAIL> to encode and decode base64url. This is
%% a url and filename friendly version of base64. See also RFC4648.
%%
%% %CopyrightBegin%
%%
%% Copyright Ericsson AB 2007-2009. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
%%
%% %CopyrightEnd%
%%
%% Description: Implements base 64 encode and decode. See RFC4648.
-module(base64url).
-export([encode/1, decode/1, mime_decode/1,
encode_to_string/1, decode_to_string/1, mime_decode_to_string/1]).
%%-------------------------------------------------------------------------
%% The following type is a subtype of string() for return values
%% of (some) functions of this module.
%%-------------------------------------------------------------------------
-type ascii_string() :: [1..255].
%%-------------------------------------------------------------------------
%% encode_to_string(ASCII) -> Base64String
%% ASCII - string() | binary()
%% Base64String - string()
%%
%% Description: Encodes a plain ASCII string (or binary) into base64.
%%-------------------------------------------------------------------------
-spec encode_to_string(string() | binary()) -> ascii_string().
encode_to_string(Bin) when is_binary(Bin) ->
encode_to_string(binary_to_list(Bin));
encode_to_string(List) when is_list(List) ->
encode_l(List).
%%-------------------------------------------------------------------------
%% encode(ASCII) -> Base64
%% ASCII - string() | binary()
%% Base64 - binary()
%%
%% Description: Encodes a plain ASCII string (or binary) into base64.
%%-------------------------------------------------------------------------
-spec encode(string() | binary()) -> binary().
encode(Bin) when is_binary(Bin) ->
encode_binary(Bin);
encode(List) when is_list(List) ->
list_to_binary(encode_l(List)).
-spec encode_l(string()) -> ascii_string().
encode_l([]) ->
[];
encode_l([A]) ->
[b64e(A bsr 2),
b64e((A band 3) bsl 4)];
encode_l([A,B]) ->
[b64e(A bsr 2),
b64e(((A band 3) bsl 4) bor (B bsr 4)),
b64e((B band 15) bsl 2)];
encode_l([A,B,C|Ls]) ->
BB = (A bsl 16) bor (B bsl 8) bor C,
[b64e(BB bsr 18),
b64e((BB bsr 12) band 63),
b64e((BB bsr 6) band 63),
b64e(BB band 63) | encode_l(Ls)].
encode_binary(Bin) ->
Split = 3*(byte_size(Bin) div 3),
<<Main0:Split/binary,Rest/binary>> = Bin,
Main = << <<(b64e(C)):8>> || <<C:6>> <= Main0 >>,
case Rest of
<<A:6,B:6,C:4>> ->
<<Main/binary,(b64e(A)):8,(b64e(B)):8,(b64e(C bsl 2)):8>>;
<<A:6,B:2>> ->
<<Main/binary,(b64e(A)):8,(b64e(B bsl 4)):8>>;
<<>> ->
Main
end.
%%-------------------------------------------------------------------------
%% mime_decode(Base64) -> ASCII
%% decode(Base64) -> ASCII
%% Base64 - string() | binary()
%% ASCII - binary()
%%
%% Description: Decodes an base64 encoded string to plain ASCII.
%% mime_decode strips away all characters not Base64 before converting,
%% whereas decode crashes if an illegal character is found
%%-------------------------------------------------------------------------
-spec decode(string() | binary()) -> binary().
decode(Bin) when is_binary(Bin) ->
PaddingSize = 4 - (size(Bin) rem 4),
Padding = padding_b(PaddingSize),
decode_binary(<<>>, <<Bin/binary, Padding/binary>>);
decode(List) when is_list(List) ->
PaddingSize = 4 - (length(List) rem 4),
Padding = padding_l(PaddingSize),
list_to_binary(decode_l(List ++ Padding)).
padding_b(2) ->
<<"==">>;
padding_b(1) ->
<<"=">>;
padding_b(Int) when Int == 0; Int == 4 ->
<<>>.
padding_l(2) ->
"==";
padding_l(1) ->
"=";
padding_l(Int) when Int == 0; Int == 4 ->
"".
-spec mime_decode(string() | binary()) -> binary().
mime_decode(Bin) when is_binary(Bin) ->
mime_decode_binary(<<>>, Bin);
mime_decode(List) when is_list(List) ->
list_to_binary(mime_decode_l(List)).
-spec decode_l(string()) -> string().
decode_l(List) ->
L = strip_spaces(List, []),
decode(L, []).
-spec mime_decode_l(string()) -> string().
mime_decode_l(List) ->
L = strip_illegal(List, []),
decode(L, []).
%%-------------------------------------------------------------------------
%% mime_decode_to_string(Base64) -> ASCII
%% decode_to_string(Base64) -> ASCII
%% Base64 - string() | binary()
%% ASCII - binary()
%%
%% Description: Decodes an base64 encoded string to plain ASCII.
%% mime_decode strips away all characters not Base64 before converting,
%% whereas decode crashes if an illegal character is found
%%-------------------------------------------------------------------------
-spec decode_to_string(string() | binary()) -> string().
decode_to_string(Bin) when is_binary(Bin) ->
decode_to_string(binary_to_list(Bin));
decode_to_string(List) when is_list(List) ->
decode_l(List).
-spec mime_decode_to_string(string() | binary()) -> string().
mime_decode_to_string(Bin) when is_binary(Bin) ->
mime_decode_to_string(binary_to_list(Bin));
mime_decode_to_string(List) when is_list(List) ->
mime_decode_l(List).
%% One-based decode map.
-define(DECODE_MAP,
{bad,bad,bad,bad,bad,bad,bad,bad,ws,ws,bad,bad,ws,bad,bad, %1-15
bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad, %16-31
ws,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,62,bad,bad, %32-47
52,53,54,55,56,57,58,59,60,61,bad,bad,bad,eq,bad,bad, %48-63
bad,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14, %64-79
15,16,17,18,19,20,21,22,23,24,25,bad,bad,bad,bad,63, % 80-95
bad,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,
41,42,43,44,45,46,47,48,49,50,51,bad,bad,bad,bad,bad,
bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,
bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,
bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,
bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,
bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,
bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,
bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,
bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad,bad}).
decode_binary(Result0, <<C:8,T0/bits>>) ->
case element(C, ?DECODE_MAP) of
bad ->
erlang:error({badarg,C});
ws ->
decode_binary(Result0, T0);
eq ->
case strip_ws(T0) of
<<$=:8,T/binary>> ->
<<>> = strip_ws(T),
Split = byte_size(Result0) - 1,
<<Result:Split/bytes,_:4>> = Result0,
Result;
T ->
<<>> = strip_ws(T),
Split = byte_size(Result0) - 1,
<<Result:Split/bytes,_:2>> = Result0,
Result
end;
Bits ->
decode_binary(<<Result0/bits,Bits:6>>, T0)
end;
decode_binary(Result, <<>>) ->
true = is_binary(Result),
Result.
mime_decode_binary(Result, <<0:8,T/bits>>) ->
mime_decode_binary(Result, T);
mime_decode_binary(Result0, <<C:8,T/bits>>) ->
case element(C, ?DECODE_MAP) of
Bits when is_integer(Bits) ->
mime_decode_binary(<<Result0/bits,Bits:6>>, T);
eq ->
case tail_contains_equal(T) of
true ->
Split = byte_size(Result0) - 1,
<<Result:Split/bytes,_:4>> = Result0,
Result;
false ->
Split = byte_size(Result0) - 1,
<<Result:Split/bytes,_:2>> = Result0,
Result
end;
_ ->
mime_decode_binary(Result0, T)
end;
mime_decode_binary(Result, <<>>) ->
true = is_binary(Result),
Result.
decode([], A) -> A;
decode([$=,$=,C2,C1|Cs], A) ->
Bits2x6 = (b64d(C1) bsl 18) bor (b64d(C2) bsl 12),
Octet1 = Bits2x6 bsr 16,
decode(Cs, [Octet1|A]);
decode([$=,C3,C2,C1|Cs], A) ->
Bits3x6 = (b64d(C1) bsl 18) bor (b64d(C2) bsl 12)
bor (b64d(C3) bsl 6),
Octet1 = Bits3x6 bsr 16,
Octet2 = (Bits3x6 bsr 8) band 16#ff,
decode(Cs, [Octet1,Octet2|A]);
decode([C4,C3,C2,C1| Cs], A) ->
Bits4x6 = (b64d(C1) bsl 18) bor (b64d(C2) bsl 12)
bor (b64d(C3) bsl 6) bor b64d(C4),
Octet1 = Bits4x6 bsr 16,
Octet2 = (Bits4x6 bsr 8) band 16#ff,
Octet3 = Bits4x6 band 16#ff,
decode(Cs, [Octet1,Octet2,Octet3|A]).
%%%========================================================================
%%% Internal functions
%%%========================================================================
strip_spaces([], A) -> A;
strip_spaces([$\s|Cs], A) -> strip_spaces(Cs, A);
strip_spaces([$\t|Cs], A) -> strip_spaces(Cs, A);
strip_spaces([$\r|Cs], A) -> strip_spaces(Cs, A);
strip_spaces([$\n|Cs], A) -> strip_spaces(Cs, A);
strip_spaces([C|Cs], A) -> strip_spaces(Cs, [C | A]).
strip_ws(<<$\t,T/binary>>) ->
strip_ws(T);
strip_ws(<<$\n,T/binary>>) ->
strip_ws(T);
strip_ws(<<$\r,T/binary>>) ->
strip_ws(T);
strip_ws(<<$\s,T/binary>>) ->
strip_ws(T);
strip_ws(T) -> T.
strip_illegal([0|Cs], A) ->
strip_illegal(Cs, A);
strip_illegal([C|Cs], A) ->
case element(C, ?DECODE_MAP) of
bad -> strip_illegal(Cs, A);
ws -> strip_illegal(Cs, A);
eq -> strip_illegal_end(Cs, [$=|A]);
_ -> strip_illegal(Cs, [C|A])
end;
strip_illegal([], A) -> A.
strip_illegal_end([0|Cs], A) ->
strip_illegal_end(Cs, A);
strip_illegal_end([C|Cs], A) ->
case element(C, ?DECODE_MAP) of
bad -> strip_illegal(Cs, A);
ws -> strip_illegal(Cs, A);
eq -> [C|A];
_ -> strip_illegal(Cs, [C|A])
end;
strip_illegal_end([], A) -> A.
tail_contains_equal(<<$=,_/binary>>) -> true;
tail_contains_equal(<<_,T/binary>>) -> tail_contains_equal(T);
tail_contains_equal(<<>>) -> false.
%% accessors
b64e(X) ->
element(X+1,
{$A, $B, $C, $D, $E, $F, $G, $H, $I, $J, $K, $L, $M, $N,
$O, $P, $Q, $R, $S, $T, $U, $V, $W, $X, $Y, $Z,
$a, $b, $c, $d, $e, $f, $g, $h, $i, $j, $k, $l, $m, $n,
$o, $p, $q, $r, $s, $t, $u, $v, $w, $x, $y, $z,
$0, $1, $2, $3, $4, $5, $6, $7, $8, $9, $-, $_}).
b64d(X) ->
b64d_ok(element(X, ?DECODE_MAP)).
b64d_ok(I) when is_integer(I) -> I. | src/base64url/base64url.erl | 0.560373 | 0.42919 | base64url.erl | starcoder |
%%%-----------------------------------------------------------------------------
%%% Copyright (c) 2013-2018 Klarna AB
%%%
%%% This file is provided to you under the Apache License,
%%% Version 2.0 (the "License"); you may not use this file
%%% except in compliance with the License. You may obtain
%%% a copy of the License at
%%%
%%% http://www.apache.org/licenses/LICENSE-2.0
%%%
%%% Unless required by applicable law or agreed to in writing,
%%% software distributed under the License is distributed on an
%%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%%% KIND, either express or implied. See the License for the
%%% specific language governing permissions and limitations
%%% under the License.
%%%
%%% @author <NAME> <<EMAIL>>
%%%-----------------------------------------------------------------------------
-module(avro_array).
%% API
-export([ cast/2
, encode/3
, get_items/1
, get_items_type/1
, new/1
, new/2
, prepend/2
, resolve_fullname/2
, to_term/1
, type/1
, type/2
, update_items_type/2
]).
%% API to be used only inside erlavro
-export([new_direct/2]).
-include("avro_internal.hrl").
%%%_* APIs =====================================================================
%% @doc Define array type.
-spec type(type_or_name()) -> array_type().
type(Type) -> type(Type, []).
%% @doc Define array type with custom properties.
-spec type(type_or_name(), [custom_prop()]) -> array_type().
type(Type, CustomProps) ->
#avro_array_type{ type = avro_util:canonicalize_type_or_name(Type)
, custom = avro_util:canonicalize_custom_props(CustomProps)
}.
%% @doc Resolve children type's fullnames.
-spec resolve_fullname(array_type(), namespace()) -> array_type().
resolve_fullname(Array, Ns) ->
update_items_type(Array, fun(T) -> avro:resolve_fullname(T, Ns) end).
%% @doc Update children types by evaluating callback function.
-spec update_items_type(array_type(),
fun((type_or_name()) -> type_or_name())) ->
array_type().
update_items_type(#avro_array_type{type = ST} = T, F) ->
T#avro_array_type{type = F(ST)}.
%% @doc Get array element type.
-spec get_items_type(array_type()) -> avro_type().
get_items_type(ArrayType) when ?IS_ARRAY_TYPE(ArrayType) ->
ArrayType#avro_array_type.type.
%% @doc Create a wrapped (boxed) empty array avro value.
-spec new(array_type()) -> avro_value().
new(Type) ->
new(Type, []).
%% @doc Create a wrapped (boxed) avro value with given array data.
-spec new(array_type(), [term()]) -> avro_value() | no_return().
new(Type, List) when ?IS_ARRAY_TYPE(Type) ->
case cast(Type, List) of
{ok, Value} -> Value;
{error, Err} -> erlang:error(Err)
end.
%% @doc Special optimized version of new which assumes that all items in List
%% have been already casted to items type of the array, so we can skip checking
%% types one more time during casting. Should only be used inside erlavro.
%% @end
-spec new_direct(array_type(), [avro:in()]) -> avro_value().
new_direct(Type, List) when ?IS_ARRAY_TYPE(Type) ->
?AVRO_VALUE(Type, List).
%% @doc Returns array contents as a list of avro values.
-spec get_items(avro_value()) -> [avro_value()].
get_items(Value) when ?IS_ARRAY_VALUE(Value) ->
?AVRO_VALUE_DATA(Value).
%% @doc Prepend elements to the array.
-spec prepend([term()], avro_value()) -> avro_value() | no_return().
prepend(Items0, Value) when ?IS_ARRAY_VALUE(Value) ->
Type = ?AVRO_VALUE_TYPE(Value),
Data = ?AVRO_VALUE_DATA(Value),
#avro_array_type{type = ItemType} = Type,
{ok, Items} = cast_items(ItemType, Items0, []),
new_direct(Type, Items ++ Data).
%% @hidden Only other Avro array type or erlang list can be casted to arrays.
-spec cast(array_type(), [avro:in()]) -> {ok, avro_value()} | {error, term()}.
cast(Type, Value) when ?IS_ARRAY_TYPE(Type) ->
do_cast(Type, Value).
%% @hidden Recursively unbox typed value.
-spec to_term(avro_value()) -> list().
to_term(Array) when ?IS_ARRAY_VALUE(Array) ->
[ avro:to_term(Item) || Item <- ?AVRO_VALUE_DATA(Array) ].
%% @hidden Encoder help function. For internal use only.
-spec encode(type_or_name(), list(), fun()) -> list().
encode(Type, Value, EncodeFun) ->
ItemsType = avro_array:get_items_type(Type),
lists:map(fun(Element) -> EncodeFun(ItemsType, Element) end, Value).
%%%===================================================================
%%% Internal functions
%%%===================================================================
%% @private
-spec do_cast(array_type(), [avro:in()]) ->
{ok, avro_value()} | {error, term()}.
do_cast(Type, Items) when is_list(Items) ->
#avro_array_type{type = ItemType} = Type,
case cast_items(ItemType, Items, []) of
{ok, ResArray} -> {ok, ?AVRO_VALUE(Type, ResArray)};
{error, Reason} -> {error, Reason}
end.
%% @private
-spec cast_items(avro_type(), [term()], [avro_value()]) ->
{ok, [avro_value()]} | {error, any()}.
cast_items(_TargetType, [], Acc) ->
{ok, lists:reverse(Acc)};
cast_items(TargetType, [Item|H], Acc) ->
case avro:cast(TargetType, Item) of
{ok, Value} -> cast_items(TargetType, H, [Value|Acc]);
Err -> Err
end.
%%%_* Emacs ============================================================
%%% Local Variables:
%%% allout-layout: t
%%% erlang-indent-level: 2
%%% End: | src/avro_array.erl | 0.534612 | 0.442215 | avro_array.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% Riak: A lightweight, decentralized key-value store.
%%
%% Copyright (c) 2007-2011 Basho Technologies, Inc. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
-module(riak_core_status).
-export([ringready/0,
all_active_transfers/0,
transfers/0,
ring_status/0]).
-spec(ringready() -> {ok, [atom()]} | {error, any()}).
ringready() ->
case get_rings() of
{[], Rings} ->
{N1,R1}=hd(Rings),
case rings_match(hash_ring(R1), tl(Rings)) of
true ->
Nodes = [N || {N,_} <- Rings],
{ok, Nodes};
{false, N2} ->
{error, {different_owners, N1, N2}}
end;
{Down, _Rings} ->
{error, {nodes_down, Down}}
end.
-spec(transfers() -> {[atom()], [{waiting_to_handoff, atom(), integer()} |
{stopped, atom(), integer()}]}).
transfers() ->
{Down, Rings} = get_rings(),
%% Work out which vnodes are running and which partitions they claim
F = fun({N,R}, Acc) ->
{_Pri, Sec, Stopped} = partitions(N, R),
Acc1 = case Sec of
[] ->
[];
_ ->
[{waiting_to_handoff, N, length(Sec)}]
end,
case Stopped of
[] ->
Acc1 ++ Acc;
_ ->
Acc1 ++ [{stopped, N, length(Stopped)} | Acc]
end
end,
{Down, lists:foldl(F, [], Rings)}.
%% @doc Produce status for all active transfers in the cluster.
-spec all_active_transfers() -> {Xfers::list(), Down::list()}.
all_active_transfers() ->
{Xfers, Down} =
riak_core_util:rpc_every_member(riak_core_handoff_manager,
status,
[{direction, outbound}],
5000),
{Xfers, Down}.
ring_status() ->
%% Determine which nodes are reachable as well as what vnode modules
%% are running on each node.
{ok, Ring} = riak_core_ring_manager:get_raw_ring(),
{AllMods, Down} =
riak_core_util:rpc_every_member_ann(riak_core, vnode_modules, [], 1000),
%% Check if the claimant is running and if it believes the ring is ready
Claimant = riak_core_ring:claimant(Ring),
case riak_core_util:safe_rpc(Claimant, riak_core_ring, ring_ready, [], 5000) of
{badrpc, _} ->
Down2 = lists:usort([Claimant|Down]),
RingReady = undefined;
RingReady ->
Down2 = Down,
RingReady = RingReady
end,
%% Get the list of pending ownership changes
Changes = riak_core_ring:pending_changes(Ring),
%% Group pending changes by (Owner, NextOwner)
Merged = lists:foldl(
fun({Idx, Owner, NextOwner, Mods, Status}, Acc) ->
orddict:append({Owner, NextOwner},
{Idx, Mods, Status},
Acc)
end, [], Changes),
%% For each pending transfer, determine which vnode modules have completed
%% handoff and which we are still waiting on.
%% Final result is of the form:
%% [{Owner, NextOwner}, [{Index, WaitingMods, CompletedMods, Status}]]
TransferStatus =
orddict:map(
fun({Owner, _}, Transfers) ->
case orddict:find(Owner, AllMods) of
error ->
[{Idx, down, Mods, Status}
|| {Idx, Mods, Status} <- Transfers];
{ok, OwnerMods} ->
NodeMods = [Mod || {_App, Mod} <- OwnerMods],
[{Idx, NodeMods -- Mods, Mods, Status}
|| {Idx, Mods, Status} <- Transfers]
end
end, Merged),
MarkedDown = riak_core_ring:down_members(Ring),
{Claimant, RingReady, Down2, MarkedDown, TransferStatus}.
%% ===================================================================
%% Internal functions
%% ===================================================================
%% Retrieve the rings for all other nodes by RPC
get_rings() ->
{RawRings, Down} = riak_core_util:rpc_every_member(
riak_core_ring_manager, get_my_ring, [], 30000),
RawRings2 = [riak_core_ring:upgrade(R) || {ok, R} <- RawRings],
Rings = orddict:from_list([{riak_core_ring:owner_node(R), R} || R <- RawRings2]),
{lists:sort(Down), Rings}.
%% Produce a hash of the 'chash' portion of the ring
hash_ring(R) ->
erlang:phash2(riak_core_ring:all_owners(R)).
%% Check if all rings match given a hash and a list of [{N,P}] to check
rings_match(_, []) ->
true;
rings_match(R1hash, [{N2, R2} | Rest]) ->
case hash_ring(R2) of
R1hash ->
rings_match(R1hash, Rest);
_ ->
{false, N2}
end.
%% Get a list of active partition numbers - regardless of vnode type
active_partitions(Node) ->
VNodes = gen_server:call({riak_core_vnode_manager, Node}, all_vnodes, 30000),
lists:foldl(fun({_, P, _}, Ps) ->
ordsets:add_element(P, Ps)
end, [], VNodes).
%% Return a list of active primary partitions, active secondary partitions (to be handed off)
%% and stopped partitions that should be started
partitions(Node, Ring) ->
Owners = riak_core_ring:all_owners(Ring),
Owned = ordsets:from_list(owned_partitions(Owners, Node)),
Active = ordsets:from_list(active_partitions(Node)),
Stopped = ordsets:subtract(Owned, Active),
Secondary = ordsets:subtract(Active, Owned),
Primary = ordsets:subtract(Active, Secondary),
{Primary, Secondary, Stopped}.
%% Return the list of partitions owned by a node
owned_partitions(Owners, Node) ->
[P || {P, Owner} <- Owners, Owner =:= Node]. | src/riak_core_status.erl | 0.598664 | 0.430028 | riak_core_status.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% Copyright (c) 2011 Bash<NAME>, Inc.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%% @doc Proof of concept for recursive input (fitting sending output
%% to itself). When this fitting receives an input, it passes
%% that input to its output, and also passes `Input-1' to itself
%% as input until the input is `0'. Thus, sending `3' as the
%% input to this fitting, would result in the outputs `3', `2',
%% `1', and `0'. That is:
%%
%%```
%% Spec = [#fitting_spec{name=counter,
%% module=riak_pipe_w_rec_countdown}],
%% {ok, Pipe} = riak_pipe:exec(Spec, []),
%% riak_pipe:queue_work(Pipe, 3),
%% riak_pipe:eoi(Pipe),
%% {eoi, Results, []} = riak_pipe:collect_results(Pipe).
%% [{counter,0},{counter,1},{counter,2},{counter,3}] = Results.
%%'''
%%
%% This fitting should work with any consistent-hash function.
%% It requires no archiving for handoff.
%%
%% If the argument is the atom `testeoi', then the final
%% recursive input (`0') will be sent three times, with no delay
%% before the second case and a 1-second delay before the third.
%% These two sends should test the behavior of vnode enqueueing
%% while attempting to force a worker to `done'. If all `eoi'
%% handling is done properly, then `0' should appear three times
%% in the result list. The `testeoi' case should go like this:
%%
%%```
%% Spec = [#fitting_spec{name=counter,
%% module=riak_pipe_w_rec_countdown,
%% arg=testeoi}],
%% Options = [{trace,[restart]},{log,sink}],
%% {ok, Pipe} = riak_pipe:exec(Spec, Options),
%% riak_pipe:queue_work(Pipe, 3),
%% riak_pipe:eoi(Pipe),
%% {eoi, Results, Trace} = riak_pipe:collect_results(Pipe).
%% [{counter,0},{counter,0},{counter,0},
%% {counter,1},{counter,2},{counter,3}] = Results.
%% [{counter,{trace,[restart],{vnode,{restart,_}}}}] = Trace.
%%'''
%%
%% If `Results' contains less than three instances of
%% `{counter,0}', then the test failed. If `Trace' is empty, the
%% done/eoi race was not triggered, and the test should be
%% re-run.
%%
%% NOTE: This test code has been copied to the EUnit tests in riak_pipe.erl,
%% into the basic_test_() collection.
-module(riak_pipe_w_rec_countdown).
-behaviour(riak_pipe_vnode_worker).
-export([init/2,
process/3,
done/1]).
-include("riak_pipe.hrl").
-include("riak_pipe_log.hrl").
-record(state, {p :: riak_pipe_vnode:partition(),
fd :: riak_pipe_fitting:details()}).
-opaque state() :: #state{}.
%% @doc Initialization just stows the partition and fitting details in
%% the module's state, for sending outputs in {@link process/3}.
-spec init(riak_pipe_vnode:partition(),
riak_pipe_fitting:details()) ->
{ok, state()}.
init(Partition, FittingDetails) ->
{ok, #state{p=Partition, fd=FittingDetails}}.
%% @doc Process sends `Input' directly to the next fitting, and also
%% `Input-1' back to this fitting as new input.
-spec process(term(), boolean(), state()) -> {ok, state()}.
process(Input, _Last, #state{p=Partition, fd=FittingDetails}=State) ->
?T(FittingDetails, [], {input, Input, Partition}),
riak_pipe_vnode_worker:send_output(Input, Partition, FittingDetails),
if Input =< 0 ->
ok;
Input == 1, FittingDetails#fitting_details.arg == testeoi ->
?T(FittingDetails, [], {zero1, Partition}),
riak_pipe_vnode_worker:recurse_input(
0, Partition, FittingDetails),
?T(FittingDetails, [], {zero2, Partition}),
riak_pipe_vnode_worker:recurse_input(
0, Partition, FittingDetails),
timer:sleep(1000),
?T(FittingDetails, [], {zero3, Partition}),
riak_pipe_vnode_worker:recurse_input(
0, Partition, FittingDetails);
true ->
?T(FittingDetails, [], {recinput, Input-1, Partition}),
riak_pipe_vnode_worker:recurse_input(
Input-1, Partition, FittingDetails)
end,
{ok, State}.
%% @doc Unused.
-spec done(state()) -> ok.
done(_State) ->
?T(_State#state.fd, [], {done, _State#state.p}),
ok. | src/riak_pipe_w_rec_countdown.erl | 0.698741 | 0.757122 | riak_pipe_w_rec_countdown.erl | starcoder |
%%%========================================================================
%%% File: ring.erl
%%%
%%% This module implements a simple ring benchmark in Erlang.
%%%
%%%
%%% The ring benchmark is about sending a message over a ring of processes.
%%% The following are the parameters of the benchmark that can be
%%% configured:
%%%
%%% - Number of processes `P`
%%% - Size of the message (in bytes)
%%% - Type of the message (string or binary)
%%% - Number of times the message is circulated over the ring `N`
%%%
%%% Once the benchmark has finished, a total of `P` * `N`
%%% messages will have been sent.
%%%
%%% Note that ERTS does not allow you to create more than 262144 processes.
%%% This number can be changed by starting the Erlang emulator using the
%%% +P flag (i.e. erl +P 500000).
%%%
%%%
%%% Author: <NAME> <<EMAIL>>
%%%
%%%-- LICENSE -------------------------------------------------------------
%%% The MIT License (MIT)
%%%
%%% Copyright (c) 2015 <NAME>
%%%
%%% Permission is hereby granted, free of charge, to any person obtaining
%%% a copy of this software and associated documentation files (the
%%% "Software"), to deal in the Software without restriction, including
%%% without limitation the rights to use, copy, modify, merge, publish,
%%% distribute, sublicense, and/or sell copies of the Software,
%%% and to permit persons to whom the Software is furnished to do so,
%%% subject to the following conditions:
%%%
%%% The above copyright notice and this permission notice shall be included
%%% in all copies or substantial portions of the Software.
%%%
%%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
%%% EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
%%% MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
%%% IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
%%% CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
%%% TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
%%% SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
%%%========================================================================
-module(ring).
%% API
-export([run/4]).
%%=========================================================================
%% API
%%=========================================================================
%%-------------------------------------------------------------------------
%% @doc
%% Run the ring benchmark using `P` processes and circulating the message
%% over the ring `N` times. That is, a total of `N` * `P` messages will
%% be sent. The circulated message is of size `Size` and can be either a
%% string or a binary.
%% @end
%%-------------------------------------------------------------------------
run(P, Size, Type, N) when P > 0, N > 0 ->
Peer1 = spawn(fun() -> loop0() end),
PeerP = lists:foldl(fun(_, Peer) -> spawn(fun() -> loop(Peer) end) end,
Peer1,
lists:seq(2, P)),
Msg = gen_msg(Size, Type),
Peer1 ! {init, self(), PeerP, Msg, N},
receive
{result, Result} ->
Result
end.
%%=========================================================================
%% Local functions
%%=========================================================================
loop0() ->
receive
{init, Orig, Peer, Msg, N} ->
T0 = erlang:now(),
Peer ! {msg, Orig, self(), N, T0, T0, {0, 0}, Msg},
loop(Peer)
end.
loop(Peer) ->
receive
{msg, Pid1, Pid2, Cnt, T0, T1, {Acc, N}, _Msg}
when Pid2 == self(), Cnt == 1 ->
T2 = erlang:now(),
Lat = timer:now_diff(T2, T1),
AvgLat = (Acc + Lat) / (N + 1),
TotTime = timer:now_diff(T2, T0),
Peer ! stop,
Pid1 ! {result, {AvgLat, TotTime}};
{msg, Pid1, Pid2, Cnt, T0, T1, {Acc, N}, Msg} when Pid2 == self() ->
T2 = erlang:now(),
Lat = timer:now_diff(T2, T1),
Peer ! {msg, Pid1, Pid2, Cnt - 1, T0, T2, {Acc + Lat, N + 1}, Msg},
loop(Peer);
{msg, Pid1, Pid2, Cnt, T0, T1, {Acc, N}, Msg} ->
T2 = erlang:now(),
Lat = timer:now_diff(T2, T1),
Peer ! {msg, Pid1, Pid2, Cnt, T0, T2, {Acc + Lat, N + 1}, Msg},
loop(Peer);
stop ->
Peer ! stop
end.
gen_msg(Size, bin) ->
list_to_binary(gen_msg(Size, str));
gen_msg(Size, str) ->
lists:foldl(fun(_, Msg) -> [random:uniform(94) + 32| Msg] end,
[],
lists:seq(1, Size)). | src/ring.erl | 0.553988 | 0.494263 | ring.erl | starcoder |
%%
%% Copyright (c) 2015-2016 <NAME>. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%% @doc DotSet.
%%
%% @reference <NAME>, <NAME>, and <NAME>
%% Delta State Replicated Data Types (2016)
%% [http://arxiv.org/pdf/1603.01529v1.pdf]
-module(dot_set).
-author("<NAME> <<EMAIL>>").
-behaviour(dot_store).
-export([
new/0,
from_dots/1,
add_dot/2,
is_empty/1,
is_element/2,
union/2,
intersection/2,
subtract/2,
subtract_causal_context/2,
fold/3
]).
-type dot_set() :: dot_store:dot_set().
%% @doc Create an empty DotSet.
-spec new() -> dot_set().
new() ->
ordsets:new().
%% @doc Create a DotSet from a list of dots.
-spec from_dots(list(dot_store:dot())) -> dot_set().
from_dots(Dots) ->
lists:foldl(
fun(Dot, DotSet) ->
dot_set:add_dot(Dot, DotSet)
end,
dot_set:new(),
Dots
).
%% @doc Add a dot to the DotSet.
-spec add_dot(dot_store:dot(), dot_set()) -> dot_set().
add_dot(Dot, DotSet) ->
ordsets:add_element(Dot, DotSet).
%% @doc Check if a DotSet is empty.
-spec is_empty(dot_set()) -> boolean().
is_empty(DotSet) ->
ordsets:size(DotSet) == 0.
%% @doc Check if a dot belongs to the DotSet.
-spec is_element(dot_store:dot(), dot_set()) -> boolean().
is_element(Dot, DotSet) ->
ordsets:is_element(Dot, DotSet).
%% @doc Union two DotSets.
-spec union(dot_set(), dot_set()) -> dot_set().
union(DotSetA, DotSetB) ->
ordsets:union(DotSetA, DotSetB).
%% @doc Intersect two DotSets.
-spec intersection(dot_set(), dot_set()) -> dot_set().
intersection(DotSetA, DotSetB) ->
ordsets:intersection(DotSetA, DotSetB).
%% @doc Subtract a DotSet from a DotSet.
-spec subtract(dot_set(), dot_set()) -> dot_set().
subtract(DotSetA, DotSetB) ->
ordsets:subtract(DotSetA, DotSetB).
%% @doc Subtract a CausalContext from a DotSet.
-spec subtract_causal_context(dot_set(),
causal_context:causal_context()) ->
dot_set().
subtract_causal_context(DotSet, CausalContext) ->
ordsets:filter(
fun(Dot) ->
not causal_context:is_element(Dot, CausalContext)
end,
DotSet
).
%% @doc Fold a DotSet.
-spec fold(function(), term(), dot_set()) -> term().
fold(Fun, AccIn, DotSet) ->
ordsets:fold(Fun, AccIn, DotSet). | src/dot_set.erl | 0.705278 | 0.442456 | dot_set.erl | starcoder |
%%% ==========================================================================
%%% Copyright 2015 Silent Circle
%%%
%%% Licensed under the Apache License, Version 2.0 (the "License");
%%% you may not use this file except in compliance with the License.
%%% You may obtain a copy of the License at
%%%
%%% http://www.apache.org/licenses/LICENSE-2.0
%%%
%%% Unless required by applicable law or agreed to in writing, software
%%% distributed under the License is distributed on an "AS IS" BASIS,
%%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%%% See the License for the specific language governing permissions and
%%% limitations under the License.
%%% ==========================================================================
%%% ==========================================================================
%%% @author <NAME>
%%% @author <NAME> <<EMAIL>>
%%% @copyright 2015 Silent Circle
%%% @doc sc_util_app unit testing.
%%% @end
%%% @private
%%% ==========================================================================
-module(sc_util_app_SUITE).
%%% ==========================================================================
%%% Includes
%%% ==========================================================================
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
%%% ==========================================================================
%%% Exports
%%% ==========================================================================
%%% Test cases
-export([
merge_config/1,
start_applications/1,
get_app_info/1
]).
%%% Common test callbacks
-export([suite/0, all/0,
init_per_suite/1, end_per_suite/1,
init_per_testcase/2, end_per_testcase/2]).
%%% ==========================================================================
%%% Exports
%%% ==========================================================================
-define(assertMerge(Exp, Conf, Over),
?assertMatch(Exp, sc_util_app:merge_config(Conf, Over))).
%%% ==========================================================================
%%% Common Test Callbacks
%%% ==========================================================================
suite() ->
[{timetrap, {seconds, 30}}].
all() ->
[merge_config,
start_applications,
get_app_info].
init_per_suite(Config) ->
Config.
end_per_suite(_Config) ->
ok.
init_per_testcase(start_applications, Config) ->
[{app_mgr, mock_app()} |Config];
init_per_testcase(_TestCase, Config) ->
Config.
end_per_testcase(start_applications, Config) ->
cleanup_app(?config(app_mgr, Config)),
ok;
end_per_testcase(_TestCase, _Config) ->
ok.
%%% ==========================================================================
%%% Test Case Functions
%%% ==========================================================================
merge_config(_Config) ->
?assertMerge([], [], []),
?assertMerge([{foo, 1}], [{foo, 1}], []),
?assertMerge([{foo, 1}, {bar, 2}], [{foo, 1}, {bar, 2}], []),
?assertMerge([{foo, [{spam, 1}]}, {bar, 2}],
[{foo, [{spam, 1}]}, {bar, 2}], []),
?assertMerge([{foo, 1}], [], [{foo, 1}]),
?assertMerge([{foo, 1}, {bar, 2}], [], [{foo, 1}, {bar, 2}]),
?assertMerge([{bar, 2}, {foo, [{spam, 1}]}],
[], [{bar, 2}, {foo, [{spam, 1}]}]),
?assertMerge([{foo, 2}], [{foo, 1}], [{foo, 2}]),
?assertMerge([{foo, 1}, {bar, 2}], [{foo, 1}], [{bar, 2}]),
?assertMerge([{bar, 2}, {foo, 1}], [{bar, 2}], [{foo, 1}]),
?assertMerge([{foo, 2}, {bar, 2}], [{foo, 1}, {bar, 2}], [{foo, 2}]),
?assertMerge([{bar, 2}, {foo, [{spam, 1}, {bacon, 3}]}],
[{bar, 2}, {foo, [{spam, 1}]}], [{foo, [{bacon, 3}]}]),
?assertMerge([{bar, 2}, {foo, [{spam, 3}]}],
[{bar, 2}, {foo, [{spam, 1}]}], [{foo, [{spam, 3}]}]),
?assertMerge([{foo, [{spam, 3}]}, {bar, 2}],
[{foo, [{spam, 1}]}], [{bar, 2}, {foo, [{spam, 3}]}]),
?assertMerge([{foo, [{spam, 1}]}, {bar, 2}],
[{foo, [{spam, 1}]}, {bar, 2}], [{foo, []}]),
ok.
start_applications(_Config) ->
?assertMatch({ok, []}, sc_util_app:start_applications([])),
?assertMatch({ok, [{foo, 1}]},
sc_util_app:start_applications([{{foo, 1}, []}])),
?assertMatch({ok, [{foo, 2}, {foo, 3}, {foo, 4}]},
sc_util_app:start_applications([{{foo, 4}, []}])),
application:load({bar, 2}),
?assertMatch({ok, [{bar, 1}, {bar, 2}, {bar, 3}]},
sc_util_app:start_applications([{{bar, 3}, []}])),
?assertMatch({ok, [{bar, 4}, {bar, 5}]},
sc_util_app:start_applications([{{bar, 5}, []},
{{bar, 3}, []},
{{bar, 4}, []}],
[{logger, fun test_logger/3}])),
?assertMatch({error, _},
sc_util_app:start_applications([{{buz, 2}, []},
{dummy, []}])),
?assertMatch({ok, [{spam, 1}]},
sc_util_app:start_applications([{{spam, 1},
[{key1, val1},
{key2, val2}]}])),
?assertMatch({ok, val1}, application:get_env({spam, 1}, key1)),
?assertMatch({ok, val2}, application:get_env({spam, 1}, key2)),
?assertMatch(undefined, application:get_env({spam, 1}, key3)),
ok.
get_app_info(Config) ->
AppInfo = sc_util_app:get_app_info(),
States = ['loading', 'loaded', 'starting',
'started', 'start_p_false', 'running'],
[begin
true = is_tuple(Tuple),
true = (tuple_size(Tuple) == 2),
true = lists:member(element(1, Tuple), States),
true = is_list(element(2, Tuple))
end || Tuple <- AppInfo],
Config.
%%% ==========================================================================
%%% Internal Functions
%%% ==========================================================================
test_logger(Level, Format, Args) ->
ct:log("~s: " ++ Format, [Level |Args]).
mock_app() ->
Self = self(),
AppMgr = spawn_link(fun() -> app_manager_proc() end),
meck:new(application, [unstick]),
meck:expect(application, info,
fun() ->
Ref = make_ref(),
AppMgr ! {info, Self, Ref},
receive {Ref, Result} -> Result
after 1000 -> error("application:info timeout")
end
end),
meck:expect(application, set_env,
fun(App, Key , Value) ->
Ref = make_ref(),
AppMgr ! {set_env, Self, Ref, App, Key, Value},
receive {Ref, Result} -> Result
after 1000 -> error("application:set_env timeout")
end
end),
meck:expect(application, load,
fun(App) ->
Ref = make_ref(),
AppMgr ! {load, Self, Ref, App},
receive {Ref, Result} -> Result
after 1000 -> error("application:load timeout")
end
end),
meck:expect(application, start,
fun(App) ->
Ref = make_ref(),
AppMgr ! {start, Self, Ref, App},
receive {Ref, Result} -> Result
after 1000 -> error("application:start timeout")
end
end),
meck:expect(application, get_env,
fun(App, Key) ->
Ref = make_ref(),
AppMgr ! {get_env, Self, Ref, App, Key},
receive {Ref, Result} -> Result
after 1000 -> error("application:get_env timeout")
end
end),
AppMgr.
cleanup_app(AppMgr) ->
meck:unload(application),
unlink(AppMgr),
MonRef = monitor(process, AppMgr),
AppMgr ! stop,
receive {'DOWN', MonRef, process, _, _} -> ok end.
app_manager_proc() ->
app_manager_loop([], [], []).
app_manager_loop(Loaded, Started, Env) ->
receive
stop -> ok;
{info, Pid, Ref} ->
Result = [{started, [{App, temporary} || App <- Started]}],
ct:pal("application:info() -> ~p~nLoaded: ~p~nStarted: ~p~nEnv: ~p~n",
[Result, Loaded, Started, Env]),
Pid ! {Ref, Result},
app_manager_loop(Loaded, Started, Env);
{set_env, Pid, Ref, App, Key, Value} ->
NewEnv = app_manager_set_env(App, Key, Value, Env),
ct:pal("application:set_env(~p, ~p, ~p) -> ok~n"
"Loaded: ~p~nStarted: ~p~nEnv: ~p~n",
[App, Key, Value, Loaded, Started, NewEnv]),
Pid ! {Ref, ok},
app_manager_loop(Loaded, Started, NewEnv);
{load, Pid, Ref, App} ->
{NewLoaded, Result} = app_manager_load(App, Loaded),
ct:pal("application:load(~p) -> ~p~n"
"Loaded: ~p~nStarted: ~p~nEnv: ~p~n",
[App, Result, NewLoaded, Started, Env]),
Pid ! {Ref, Result},
app_manager_loop(NewLoaded, Started, Env);
{start, Pid, Ref, App} ->
{NewStarted, Result} = app_manager_start(App, Loaded, Started),
ct:pal("application:start(~p) -> ~p~n"
"Loaded: ~p~nStarted: ~p~nEnv: ~p~n",
[App, Result, Loaded, NewStarted, Env]),
Pid ! {Ref, Result},
app_manager_loop(Loaded, NewStarted, Env);
{get_env, Pid, Ref, App, Key} ->
Result = app_manager_get_env(App, Key, Env),
Pid ! {Ref, Result},
app_manager_loop(Loaded, Started, Env)
end.
app_manager_set_env(App, Key, Value, Env) ->
case lists:keytake(App, 1, Env) of
false -> [{App, [{Key, Value}]} |Env];
{value, {App, Props}, Env2} ->
case lists:keytake(Key, 1, Props) of
false -> [{App, [{Key, Value} |Props]} |Env2];
{value, {Key, _Old}, Props2} ->
[{App, [{Key, Value} |Props2]} |Env2]
end
end.
app_manager_load({_, _} = App, Loaded) ->
case lists:member(App, Loaded) of
true -> {Loaded, {error, {already_loaded, App}}};
false -> {[App |Loaded], ok}
end;
app_manager_load(App, Loaded) ->
{Loaded, {error, {"app not found", App}}}.
app_manager_start({_Name, 1} = App, Loaded, Started) ->
case lists:member(App, Loaded) of
false -> {Started, {error, {"not loaded", App}}};
true ->
case lists:member(App, Started) of
true -> {Started, {error, {already_started, App}}};
false -> {[App |Started], ok}
end
end;
app_manager_start({Name, Idx} = App, Loaded, Started) ->
case lists:member(App, Loaded) of
false -> {Started, {error, {"not loaded", App}}};
true ->
case lists:member(App, Started) of
true -> {Started, {error, {already_started, App}}};
false ->
case lists:member({Name, Idx - 1}, Started) of
true -> {[App |Started], ok};
false -> {Started, {error, {not_started, {Name, Idx - 1}}}}
end
end
end.
app_manager_get_env(App, Key, Env) ->
case proplists:get_value(App, Env, undefined) of
undefined -> undefined;
AppEnv ->
case proplists:get_value(Key, AppEnv, undefined) of
undefined -> undefined;
Value -> {ok, Value}
end
end.
trace() ->
dbg:tracer(),
dbg:p(all, c),
dbg:tpl(sc_util_app, x),
ok. | test/sc_util_app_SUITE.erl | 0.548432 | 0.47098 | sc_util_app_SUITE.erl | starcoder |
%% @doc
%% Counter is a Metric that represents a single numerical value that only ever
%% goes up. That implies that it cannot be used to count items whose number can
%% also go down, e.g. the number of currently running processes. Those
%% "counters" are represented by {@link prometheus_gauge}.
%%
%% A Counter is typically used to count requests served, tasks completed, errors
%% occurred, etc.
%%
%% Examople use cases for Counters:
%% <ul>
%% <li>Number of requests processed</li>
%% <li>Number of items that were inserted into a queue</li>
%% <li>Total amount of data a system has processed</li>
%% </ul>
%%
%% Use the
%% <a href="https://prometheus.io/docs/querying/functions/#rate()">rate()</a>/<a
%% href="https://prometheus.io/docs/querying/functions/#irate()">irate()</a>
%% functions in Prometheus to calculate the rate of increase of a Counter.
%% By convention, the names of Counters are suffixed by `_total'.
%%
%% To create a counter use either {@link new/1} or {@link declare/1},
%% the difference is that {@link new/1} will raise
%% {:mf_already_exists, {Registry, Name}, Message} error if counter with
%% the same `Registry', `Name' and `Labels' combination already exists.
%% Both accept `Spec' [proplist](http://erlang.org/doc/man/proplists.html)
%% with the same set of keys:
%%
%% - `Registry' - optional, default is `default';
%% - `Name' - required, can be an atom or a string;
%% - `Help' - required, must be a string;
%% - `Labels' - optional, default is `[]'.
%%
%% Example:
%% <pre lang="erlang">
%% -module(my_service_instrumenter).
%%
%% -export([setup/0,
%% inc/1]).
%%
%% setup() ->
%% prometheus_counter:declare([{name, my_service_requests_total},
%% {help, "Requests count"},
%% {labels, caller}]).
%%
%% inc(Caller) ->
%% prometheus_counter:inc(my_service_requests_total, [Caller]).
%%
%% </pre>
%% @end
-module(prometheus_counter).
%%% metric
-export([new/1,
new/2,
declare/1,
declare/2,
inc/1,
inc/2,
inc/3,
inc/4,
dinc/1,
dinc/2,
dinc/3,
dinc/4,
remove/1,
remove/2,
remove/3,
reset/1,
reset/2,
reset/3,
value/1,
value/2,
value/3]).
%%% collector
-export([deregister_cleanup/1,
collect_mf/2,
collect_metrics/2]).
%%% gen_server
-export([init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3,
start_link/0]).
-import(prometheus_model_helpers, [create_mf/5,
gauge_metrics/1,
gauge_metric/1,
gauge_metric/2,
counter_metric/1,
counter_metric/2]).
-include("prometheus.hrl").
-behaviour(prometheus_metric).
-behaviour(prometheus_collector).
-behaviour(gen_server).
%%====================================================================
%% Macros
%%====================================================================
-define(TABLE, ?PROMETHEUS_COUNTER_TABLE).
-define(SUM_POS, 2).
-define(WIDTH, 16).
%%====================================================================
%% Metric API
%%====================================================================
%% @doc Creates a counter using `Spec'.
%%
%% Raises `{missing_metric_spec_key, Key, Spec}' error if required `Soec' key
%% is missing.<br/>
%% Raises `{invalid_metric_name, Name, Message}' error if metric `Name'
%% is invalid.<br/>
%% Raises `{invalid_metric_help, Help, Message}' error if metric `Help'
%% is invalid.<br/>
%% Raises `{invalid_metric_labels, Labels, Message}' error if `Labels'
%% isn't a list.<br/>
%% Raises `{invalid_label_name, Name, Message}' error if `Name' isn't a valid
%% label name.<br/>
%% Raises `{mf_already_exists, {Registry, Name}, Message}' error if a counter
%% with the same `Spec' already exists.
%% @end
new(Spec) ->
prometheus_metric:insert_new_mf(?TABLE, ?MODULE, Spec).
%% @deprecated Please use {@link new/1} with registry
%% key instead.
new(Spec, Registry) ->
?DEPRECATED("prometheus_counter:new/2", "prometheus_counter:new/1"
" with registry key"),
new([{registry, Registry} | Spec]).
%% @doc Creates a counter using `Spec', if a counter with the same `Spec' exists
%% returns `false'.
%%
%% Raises `{missing_metric_spec_key, Key, Spec}' error if required `Soec' key
%% is missing.<br/>
%% Raises `{invalid_metric_name, Name, Message}' error if metric `Name'
%% is invalid.<br/>
%% Raises `{invalid_metric_help, Help, Message}' error if metric `Help'
%% is invalid.<br/>
%% Raises `{invalid_metric_labels, Labels, Message}' error if `Labels'
%% isn't a list.<br/>
%% Raises `{invalid_label_name, Name, Message}' error if `Name' isn't a valid
%% label name.
%% @end
declare(Spec) ->
prometheus_metric:insert_mf(?TABLE, ?MODULE, Spec).
%% @deprecated Please use {@link declare/1} with registry
%% key instead.
declare(Spec, Registry) ->
?DEPRECATED("prometheus_counter:declare/2", "prometheus_counter:declare/1"
" with registry key"),
declare([{registry, Registry} | Spec]).
%% @equiv inc(default, Name, [], 1)
inc(Name) ->
inc(default, Name, [], 1).
%% @doc If the second argument is a list, equivalent to
%% <a href="#inc-4"><tt>inc(default, Name, LabelValues, 1)</tt></a>
%% otherwise equivalent to
%% <a href="#inc-4"><tt>inc(default, Name, [], Value)</tt></a>.
inc(Name, LabelValues) when is_list(LabelValues)->
inc(default, Name, LabelValues, 1);
inc(Name, Value) ->
inc(default, Name, [], Value).
%% @equiv inc(default, Name, LabelValues, Value)
inc(Name, LabelValues, Value) ->
inc(default, Name, LabelValues, Value).
%% @doc Increments the counter identified by `Registry', `Name'
%% and `LabelValues' by `Value'.
%%
%% Raises `{invalid_value, Value, Message}' if `Value'
%% isn't a positive integer.<br/>
%% Raises `{unknown_metric, Registry, Name}' error if counter with named `Name'
%% can't be found in `Registry'.<br/>
%% Raises `{invalid_metric_arity, Present, Expected}' error if labels count
%% mismatch.
%% @end
inc(_Registry, _Name, _LabelValues, Value) when Value < 0 ->
erlang:error({invalid_value, Value,
"inc accepts only non-negative integers"});
inc(Registry, Name, LabelValues, Value) when is_integer(Value) ->
try
ets:update_counter(?TABLE,
key(Registry, Name, LabelValues),
{?SUM_POS, Value})
catch error:badarg ->
insert_metric(Registry, Name, LabelValues, Value, fun inc/4)
end,
ok;
inc(_Registry, _Name, _LabelValues, Value) ->
erlang:error({invalid_value, Value,
"inc accepts only non-negative integers"}).
%% @equiv dinc(default, Name, [], 1)
dinc(Name) ->
dinc(default, Name, [], 1).
%% @doc If the second argument is a list, equivalent to
%% <a href="#dinc-4"><tt>dinc(default, Name, LabelValues, 1)</tt></a>
%% otherwise equivalent to
%% <a href="#dinc-4"><tt>dinc(default, Name, [], Value)</tt></a>.
dinc(Name, LabelValues) when is_list(LabelValues)->
dinc(default, Name, LabelValues, 1);
dinc(Name, Value) when is_number(Value) ->
dinc(default, Name, [], Value).
%% @equiv dinc(default, Name, LabelValues, Value)
dinc(Name, LabelValues, Value) ->
dinc(default, Name, LabelValues, Value).
%% @doc Increments the counter identified by `Registry', `Name'
%% and `LabelValues' by `Value'.
%% If `Value' happened to be a float number even one time(!) you
%% shouldn't use {@link inc/4} after dinc.
%%
%% Raises `{invalid_value, Value, Message}' if `Value'
%% isn't a number.<br/>
%% Raises `{unknown_metric, Registry, Name}' error if counter with named `Name'
%% can't be found in `Registry'.<br/>
%% Raises `{invalid_metric_arity, Present, Expected}' error if labels count
%% mismatch.
%% @end
dinc(_Registry, _Name, _LabelValues, Value) when Value < 0 ->
erlang:error({invalid_value, Value,
"dinc accepts only non-negative numbers"});
dinc(Registry, Name, LabelValues, Value) when is_number(Value) ->
MF = prometheus_metric:check_mf_exists(?TABLE, Registry, Name, LabelValues),
CallTimeout = prometheus_metric:mf_call_timeout(MF),
case CallTimeout of
false ->
gen_server:cast(?MODULE,
{inc, {Registry, Name, LabelValues, Value}});
_ -> gen_server:call(?MODULE,
{inc, {Registry, Name, LabelValues, Value}},
CallTimeout)
end,
ok;
dinc(_Registry, _Name, _LabelValues, Value) ->
erlang:error({invalid_value, Value,
"dinc accepts only non-negative numbers"}).
%% @equiv remove(default, Name, [])
remove(Name) ->
remove(default, Name, []).
%% @equiv remove(default, Name, LabelValues)
remove(Name, LabelValues) ->
remove(default, Name, LabelValues).
%% @doc Removes counter series identified by `Registry', `Name'
%% and `LabelValues'.
%%
%% Raises `{unknown_metric, Registry, Name}' error if counter with named `Name'
%% can't be found in `Registry'.<br/>
%% Raises `{invalid_metric_arity, Present, Expected}' error if labels count
%% mismatch.
%% @end
remove(Registry, Name, LabelValues) ->
prometheus_metric:check_mf_exists(?TABLE, Registry, Name, LabelValues),
case lists:flatten([ets:take(?TABLE,
{Registry, Name, LabelValues, Scheduler})
|| Scheduler <- schedulers_seq()]) of
[] -> false;
_ -> true
end.
%% @equiv reset(default, Name, [])
reset(Name) ->
reset(default, Name, []).
%% @equiv reset(default, Name, LabelValues)
reset(Name, LabelValues) ->
reset(default, Name, LabelValues).
%% @doc Resets the value of the counter identified by `Registry', `Name'
%% and `LabelValues'.
%%
%% Raises `{unknown_metric, Registry, Name}' error if counter with named `Name'
%% can't be found in `Registry'.<br/>
%% Raises `{invalid_metric_arity, Present, Expected}' error if labels count
%% mismatch.
%% @end
reset(Registry, Name, LabelValues) ->
prometheus_metric:check_mf_exists(?TABLE, Registry, Name, LabelValues),
case lists:usort([ets:update_element(?TABLE,
{Registry, Name, LabelValues, Scheduler},
{?SUM_POS, 0})
|| Scheduler <- schedulers_seq()]) of
[_, _] -> true;
[true] -> true;
_ -> false
end.
%% @equiv value(default, Name, [])
value(Name) ->
value(default, Name, []).
%% @equiv value(default, Name, LabelValues)
value(Name, LabelValues) ->
value(default, Name, LabelValues).
%% @doc Returns the value of the counter identified by `Registry', `Name'
%% and `LabelValues'. If there is no counter for `LabelValues',
%% returns `undefined'.
%%
%% Raises `{unknown_metric, Registry, Name}' error if counter named `Name'
%% can't be found in `Registry'.<br/>
%% Raises `{invalid_metric_arity, Present, Expected}' error if labels count
%% mismatch.
%% @end
value(Registry, Name, LabelValues) ->
prometheus_metric:check_mf_exists(?TABLE, Registry, Name, LabelValues),
case ets:select(?TABLE, [{{{Registry, Name, LabelValues, '_'}, '$1'},
[],
['$1']}]) of
[] -> undefined;
List -> lists:sum(List)
end.
%%====================================================================
%% Collector API
%%====================================================================
%% @private
deregister_cleanup(Registry) ->
prometheus_metric:deregister_mf(?TABLE, Registry),
true = ets:match_delete(?TABLE, {{Registry, '_', '_', '_'}, '_'}),
ok.
%% @private
collect_mf(Registry, Callback) ->
[Callback(create_counter(Name, Help, {Labels, Registry})) ||
[Name, {Labels, Help}, _, _, _] <- prometheus_metric:metrics(?TABLE,
Registry)],
ok.
%% @private
collect_metrics(Name, {Labels, Registry}) ->
MFValues = ets:match(?TABLE, {{Registry, Name, '$1', '_'}, '$2'}),
[begin
Value = reduce_label_values(LabelValues, MFValues),
counter_metric(lists:zip(Labels, LabelValues), Value)
end ||
LabelValues <- collect_unique_labels(MFValues)].
%%====================================================================
%% Gen_server API
%%====================================================================
%% @private
init(_Args) ->
{ok, []}.
%% @private
handle_call({inc, {Registry, Name, LabelValues, Value}}, _From, State) ->
dinc_impl(Registry, Name, LabelValues, Value),
{reply, ok, State}.
%% @private
handle_cast({inc, {Registry, Name, LabelValues, Value}}, State) ->
dinc_impl(Registry, Name, LabelValues, Value),
{noreply, State}.
%% @private
handle_info(_Info, State) ->
{noreply, State}.
%% @private
terminate(_Reason, _State) ->
ok.
%% @private
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%% @private
start_link() ->
gen_server:start_link({local, prometheus_counter},
prometheus_counter, [], []).
%%====================================================================
%% Private Parts
%%====================================================================
dinc_impl(Registry, Name, LabelValues, Value) ->
case ets:lookup(?TABLE, key(Registry, Name, LabelValues)) of
[{_Key, OldValue}] ->
ets:update_element(?TABLE, key(Registry, Name, LabelValues),
{?SUM_POS, Value + OldValue});
[] ->
insert_metric(Registry, Name, LabelValues, Value, fun dinc_impl/4)
end.
insert_metric(Registry, Name, LabelValues, Value, ConflictCB) ->
prometheus_metric:check_mf_exists(?TABLE, Registry, Name, LabelValues),
case ets:insert_new(?TABLE, {key(Registry, Name, LabelValues), Value}) of
false -> %% some sneaky process already inserted
ConflictCB(Registry, Name, LabelValues, Value);
true ->
ok
end.
schedulers_seq() ->
lists:seq(0, ?WIDTH-1).
key(Registry, Name, LabelValues) ->
X = erlang:system_info(scheduler_id),
Rnd = X band (?WIDTH-1),
{Registry, Name, LabelValues, Rnd}.
collect_unique_labels(MFValues) ->
lists:usort([L || [L, _] <- MFValues]).
reduce_label_values(Labels, MFValues) ->
lists:sum([Y || [L, Y] <- MFValues, L == Labels]).
create_counter(Name, Help, Data) ->
create_mf(Name, Help, counter, ?MODULE, Data). | src/metrics/prometheus_counter.erl | 0.79956 | 0.620248 | prometheus_counter.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% Copyright (c) 2007-2012 Basho Technologies, Inc. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%% @doc riak_core_stat_q is an interface to query folsom stats
%% To use, call `get_stats/1' with a query `Path'.
%% A `Path' is a list of atoms | binaries. The module creates a set
%% of `ets:select/1' guards, one for each element in `Path'
%% For each stat that has a key that matches `Path' we calculate the
%% current value and return it. This module makes use of
%% `riak_core_stat_calc_proc'
%% to cache and limit stat calculations.
-module(riak_core_stat_q).
-compile(export_all).
-export_type([path/0,
stat_name/0]).
-type path() :: [] | [atom()|binary()].
-type stats() :: [stat()].
-type stat() :: {stat_name(), stat_value()}.
-type stat_name() :: tuple().
-type stat_value() :: integer() | [tuple()].
%% @doc To allow for namespacing, and adding richer dimensions, stats
%% are named with a tuple key. The key (like `{riak_kv, node, gets}' or
%% `{riak_kv, vnode, puts, time}') can
%% be seen as an hierarchical path. With `riak_kv' at the root and
%% the other elements as branches / leaves.
%% This module allows us to get only the stats at and below a particular key.
%% `Path' is a list of atoms or the empty list.
%% an example path might be `[riak_kv]' which will return every
%% stat that has `riak_kv' in the first element of its key tuple.
%% You may use the atom '_' at any point
%% in `Path' as a wild card.
-spec get_stats(path()) -> stats().
get_stats(Path) ->
%% get all the stats that are at Path
NamesNTypes = names_and_types(Path),
calculate_stats(NamesNTypes).
%% @doc queries folsom's metrics table for stats that match our path
names_and_types(Path) ->
Guards = guards_from_path(Path),
ets:select(folsom, [{{'$1','$2'}, Guards,['$_']}]).
guards_from_path(Path) ->
SizeGuard = size_guard(length(Path)),
%% Going to reverse it is why this way around
Guards = [SizeGuard, {is_tuple, '$1'}],
add_guards(Path, Guards, 1).
add_guards([], Guards, _Cnt) ->
lists:reverse(Guards);
add_guards(['_'|Path], Guards, Cnt) ->
add_guards(Path, Guards, Cnt+1);
add_guards([Elem|Path], Guards, Cnt) ->
add_guards(Path, [guard(Elem, Cnt) | Guards], Cnt+1).
guard(Elem, Cnt) when Cnt > 0 ->
{'==', {element, Cnt, '$1'}, Elem}.
-spec size_guard(pos_integer()) -> tuple().
size_guard(N) ->
{'>=', {size, '$1'}, N}.
calculate_stats(NamesAndTypes) ->
[{Name, get_stat({Name, Type})} || {Name, {metric, _, Type, _}} <- NamesAndTypes].
%% Create/lookup a cache/calculation process
get_stat(Stat) ->
Pid = riak_core_stat_calc_sup:calc_proc(Stat),
riak_core_stat_calc_proc:value(Pid).
throw_folsom_error({error, _, _} = Err) ->
throw(Err);
throw_folsom_error(Other) -> Other.
%% Encapsulate getting a stat value from folsom.
%%
%% If for any reason we can't get a stats value
%% return 'unavailable'.
%% @TODO experience shows that once a stat is
%% broken it stays that way. Should we delete
%% stats that are broken?
calc_stat({Name, gauge}) ->
try
GaugeVal = throw_folsom_error(folsom_metrics:get_metric_value(Name)),
calc_gauge(GaugeVal)
catch ErrClass:ErrReason ->
log_error(Name, ErrClass, ErrReason),
unavailable
end;
calc_stat({Name, histogram}) ->
try
throw_folsom_error(folsom_metrics:get_histogram_statistics(Name))
catch ErrClass:ErrReason ->
log_error(Name, ErrClass, ErrReason),
unavailable
end;
calc_stat({Name, _Type}) ->
try throw_folsom_error(folsom_metrics:get_metric_value(Name))
catch ErrClass:ErrReason ->
log_error(Name, ErrClass, ErrReason),
unavailable
end.
log_error(StatName, ErrClass, ErrReason) ->
lager:warning("Failed to calculate stat ~p with ~p:~p", [StatName, ErrClass, ErrReason]).
%% some crazy people put funs in folsom gauges
%% so that they can have a consistent interface
%% to access stats from disperate sources
calc_gauge({function, Mod, Fun}) ->
Mod:Fun();
calc_gauge(Val) ->
Val. | src/riak_core_stat_q.erl | 0.625438 | 0.486149 | riak_core_stat_q.erl | starcoder |
%@doc A BSON document is a JSON-like object with a standard binary encoding defined at bsonspec.org. This implements version 1.0 of that spec.
-module (bson).
-export_type ([maybe/1]).
-export_type ([document/0, label/0, value/0]).
-export_type ([arr/0]).
-export_type ([bin/0, bfunction/0, uuid/0, md5/0, userdefined/0]).
-export_type ([floating_point/0]).
-export_type ([mongostamp/0, minmaxkey/0]).
-export_type ([utf8/0, regex/0, unixtime/0]).
-export_type ([javascript/0]).
-export_type ([objectid/0, unixsecs/0]).
-export ([lookup/2, lookup/3, at/2, include/2, exclude/2, update/3, merge/2, append/2]).
-export ([doc_foldl/3, doc_foldr/3, fields/1, document/1]).
-export ([utf8/1, str/1]).
-export ([timenow/0, ms_precision/1, secs_to_unixtime/1, unixtime_to_secs/1]).
-export ([objectid/3, objectid_time/1]).
-type maybe(A) :: {A} | {}.
% Document %
-type document() :: tuple(). % {label(), value(), label(), value(), ...}.
% Conceptually a document is a list of label-value pairs (associative array, dictionary, record). However, for read/write-ability, it is implemented as a flat tuple, ie. the list becomes a tuple and the pair braces are elided, so you just have alternating labels and values where each value is associated with the previous label.
% To distinguish a tagged value such as {uuid, _} (see value() type below) from a document with field name 'uuid' we made sure all valid tagged value types have an odd number of elements (documents have even number of elements). So actually only {bin, uuid, _} is a valid value, {uuid, _} is a document.
-type label() :: atom() | binary().
-spec doc_foldl (fun ((label(), value(), A) -> A), A, document()) -> A.
%@doc Reduce document by applying given function to each field with result of previous field's application, starting with given initial result.
doc_foldl (Fun, Acc, Doc) -> doc_foldlN (Fun, Acc, Doc, 0, tuple_size (Doc) div 2).
-spec doc_foldlN (fun ((label(), value(), A) -> A), A, document(), integer(), integer()) -> A.
%@doc Fold over fields from first index (inclusive) to second index (exclusive), zero-based index.
doc_foldlN (_, Acc, _, High, High) -> Acc;
doc_foldlN (Fun, Acc, Doc, Low, High) ->
Acc1 = Fun (element (Low * 2 + 1, Doc), element (Low * 2 + 2, Doc), Acc),
doc_foldlN (Fun, Acc1, Doc, Low + 1, High).
-spec doc_foldr (fun ((label(), value(), A) -> A), A, document()) -> A.
%@doc Same as doc_foldl/3 except apply fields in reverse order
doc_foldr (Fun, Acc, Doc) -> doc_foldrN (Fun, Acc, Doc, 0, tuple_size (Doc) div 2).
-spec doc_foldrN (fun ((label(), value(), A) -> A), A, document(), integer(), integer()) -> A.
%@doc Fold over fields from second index (exclusive) to first index (inclusive), zero-based index.
doc_foldrN (_, Acc, _, Low, Low) -> Acc;
doc_foldrN (Fun, Acc, Doc, Low, High) ->
Acc1 = Fun (element (High * 2 - 1, Doc), element (High * 2, Doc), Acc),
doc_foldrN (Fun, Acc1, Doc, Low, High - 1).
-spec fields (document()) -> [{label(), value()}].
%@doc Convert document to a list of all its fields
fields (Doc) -> doc_foldr (fun (Label, Value, List) -> [{Label, Value} | List] end, [], Doc).
-spec document ([{label(), value()}]) -> document().
%@doc Convert list of fields to a document
document (Fields) -> list_to_tuple (flatten (Fields)).
-spec flatten ([{label(), value()}]) -> [label() | value()].
%@doc Flatten list by removing tuple constructors
flatten ([]) -> [];
flatten ([{Label, Value} | Fields]) -> [Label, Value | flatten (Fields)].
-spec lookup (label(), document()) -> maybe (value()).
%@doc Value of field in document if there
lookup (Label, Doc) when is_atom(Label) ->
lookup(atom_to_binary(Label, utf8), Doc);
lookup (Label, Doc) ->
Parts = string:tokens (binary_to_list (Label), "."),
case length (Parts) of
1 ->
case find (list_to_atom (hd (Parts)), Doc) of
{Index} -> {element (Index * 2 + 2, Doc)};
{} -> {} end;
_ ->
case find (list_to_atom (hd (Parts)), Doc) of
{Index} -> lookup (list_to_atom (string:join (tl (Parts), ".")), element (Index * 2 + 2, Doc));
{} -> {} end
end.
-spec lookup (label(), document(), value()) -> value().
%@doc Value of field in document if there or default
lookup (Label, Doc, Default) when is_atom(Label) ->
lookup(atom_to_binary(Label, utf8), Doc, Default);
lookup (Label, Doc, Default) ->
Parts = string:tokens (binary_to_list (Label), "."),
case length (Parts) of
1 ->
case find (list_to_atom (hd (Parts)), Doc) of
{Index} -> element (Index * 2 + 2, Doc);
{} -> Default end;
_ ->
case find (list_to_atom (hd (Parts)), Doc) of
{Index} -> lookup (list_to_atom (string:join (tl (Parts), ".")), element (Index * 2 + 2, Doc));
{} -> Default end
end.
-spec find (label(), document()) -> maybe (integer()).
%@doc Index of field in document if there
find (Label, Doc) when is_atom(Label) ->
find(atom_to_binary(Label, utf8), Doc);
find (Label, Doc) -> findN (Label, Doc, 0, tuple_size (Doc) div 2).
-spec findN (label(), document(), integer(), integer()) -> maybe (integer()).
%@doc Find field index in document from first index (inclusive) to second index (exclusive).
findN (Label, Doc, Low, High) when is_atom(Label) ->
findN(atom_to_binary(Label, utf8), Doc, Low, High);
findN (_Label, _Doc, High, High) -> {};
findN (Label, Doc, Low, High) -> case element (Low * 2 + 1, Doc) of
Label -> {Low};
_ -> findN (Label, Doc, Low + 1, High) end.
-spec at (label(), document()) -> value().
%@doc Value of field in document, error if missing
at (Label, Document) when is_atom(Label) ->
at(atom_to_binary(Label, utf8), Document);
at (Label, Document) -> case lookup (Label, Document) of
% {} -> erlang:error (missing_field, [Label, Document]);
{} -> null;
{Value} -> Value end.
-spec include ([label()], document()) -> document().
%@doc Project given fields of document
include (Labels, Document) ->
Fun = fun (Label, Doc) -> case lookup (Label, Document) of
{Value} -> [Label, Value | Doc];
{} -> Doc end end,
list_to_tuple (lists:foldr (Fun, [], Labels)).
-spec exclude ([label()], document()) -> document().
%@doc Remove given fields from document
exclude (Labels, Document) ->
Fun = fun (Label, Value, Doc) -> case lists:member (Label, Labels) of
false -> [Label, Value | Doc];
true -> Doc end end,
list_to_tuple (doc_foldr (Fun, [], Document)).
-spec update (label(), value(), document()) -> document().
%@doc Replace field with new value, adding to end if new
update (Label, Value, Document) ->
Parts = string:tokens (atom_to_list (Label), "."),
case length (Parts) of
1 ->
case find (list_to_atom (hd (Parts)), Document) of
{Index} -> setelement (Index * 2 + 2, Document, Value);
{} ->
Doc = erlang:append_element (Document, Label),
erlang:append_element (Doc, Value) end;
_ ->
case find (list_to_atom (hd (Parts)), Document) of
{Index} -> setelement (Index * 2 + 2, Document, update (list_to_atom (string:join (tl (Parts), ".")), Value, element (Index * 2 + 2, Document)));
{} -> Doc = erlang:append_element (Document, list_to_atom (hd (Parts))),
erlang:append_element (Doc, update (list_to_atom (string:join (tl (Parts), ".")), Value, {})) end
end.
-spec merge (document(), document()) -> document().
%@doc First doc overrides second with new fields added at end of second doc
merge (UpDoc, BaseDoc) ->
Fun = fun (Label, Value, Doc) -> update (Label, Value, Doc) end,
doc_foldl (Fun, BaseDoc, UpDoc).
-spec append (document(), document()) -> document().
%@doc Append two documents together
append (Doc1, Doc2) -> list_to_tuple (tuple_to_list (Doc1) ++ tuple_to_list (Doc2)).
% Value %
-type value() ::
floating_point() |
utf8() |
document() |
arr() |
bin() |
bfunction() |
uuid() |
md5() |
userdefined() |
objectid() |
boolean() |
unixtime() |
null |
regex() |
javascript() |
atom() |
integer() |
mongostamp() |
minmaxkey().
-type floating_point() ::
float() |
{float, nan, binary()}.
% Note, No value() can be a tuple with even number of elements because then it would be ambiguous with document(). Therefore all tagged values defined below have odd number of elements.
% Array %
-type arr() :: [value()].
% Caution, a string() will be interpreted as an array of integers. You must supply strings as utf8 binary, see below.
% String %
-type utf8() :: unicode:unicode_binary().
% binary() representing a string of characters encoded with UTF-8.
% An Erlang string() is a list of unicode characters (codepoints), but this list must be converted to utf-8 binary for use in Bson. Call utf8/1 to do this, or encode pure ascii literals directly as `<<"abc">>' and non-pure ascii literals as `<<"aßc"/utf8>>'.
-spec utf8 (unicode:chardata()) -> utf8().
%@doc Convert string to utf8 binary. string() is a subtype of unicode:chardata().
utf8 (CharData) -> case unicode:characters_to_binary (CharData) of
{error, _Bin, _Rest} -> erlang:error (unicode_error, [CharData]);
{incomplete, _Bin, _Rest} -> erlang:error (unicode_incomplete, [CharData]);
Bin -> Bin end.
-spec str (unicode:chardata()) -> string().
%@doc Convert utf8 binary to string. utf8() is a subtype of unicode:chardata().
str (CharData) -> case unicode:characters_to_list (CharData) of
{error, _Bin, _Rest} -> erlang:error (unicode_error, [CharData]);
{incomplete, _Bin, _Rest} -> erlang:error (unicode_incomplete, [CharData]);
Str -> Str end.
% Binary %
-type bin() :: {bin, bin, binary()}.
-type bfunction() :: {bin, function, binary()}.
-type uuid() :: {bin, uuid, binary()}.
-type md5() :: {bin, md5, binary()}.
-type userdefined() :: {bin, userdefined, binary()}.
% Special %
-type mongostamp() :: {mongostamp, integer(), integer()}.
% 4-byte increment, 4-byte timestamp. 0 timestamp has special semantics
-type minmaxkey() :: 'MIN_KEY' | 'MAX_KEY'.
% Special values that compare lower/higher than all other bson values
% Regex %
-type regex() :: {regex, utf8(), utf8()}. % pattern and options
% Datetime %
-type unixtime() :: {integer(), integer(), integer()}. % {MegaSecs, Secs, MicroSecs}
% Unix time in Erlang now/os:timstamp format, but only to millisecond precision when serialized.
-spec timenow () -> unixtime(). % IO
% Current unixtime to millisecond precision, ie. MicroSecs is always a multiple of 1000.
timenow() -> ms_precision (os:timestamp()).
-spec ms_precision (unixtime()) -> unixtime().
%@doc Truncate microsecs to millisecs since bson drops microsecs anyway, so time will be equal before and after serialization.
ms_precision ({MegaSecs, Secs, MicroSecs}) ->
{MegaSecs, Secs, MicroSecs div 1000 * 1000}.
-type unixsecs() :: integer(). % Unix Time in seconds
-spec secs_to_unixtime (unixsecs()) -> unixtime().
secs_to_unixtime (UnixSecs) -> {UnixSecs div 1000000, UnixSecs rem 1000000, 0}.
-spec unixtime_to_secs (unixtime()) -> unixsecs().
unixtime_to_secs ({MegaSecs, Secs, _}) -> MegaSecs * 1000000 + Secs.
% Javascript %
-type javascript() :: {javascript, document(), utf8()}. % scope and code
% ObjectId %
-type objectid() :: {<<_:96>>}.
% `<<UnixTimeSecs:32/big, MachineId:24/big, ProcessId:16/big, Count:24/big>>'
-spec objectid (unixsecs(), <<_:40>>, integer()) -> objectid().
objectid (UnixSecs, MachineAndProcId, Count) ->
{<<UnixSecs :32/big, MachineAndProcId :5/binary, Count :24/big>>}.
-spec objectid_time (objectid()) -> unixtime().
%@doc Time when object id was generated
objectid_time ({<<UnixSecs:32/big, _:64>>}) -> secs_to_unixtime (UnixSecs). | src/bson.erl | 0.525612 | 0.446072 | bson.erl | starcoder |
% Licensed under the Apache License, Version 2.0 (the "License"); you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
-module(kai_sup).
-behaviour(supervisor).
-export([start_link/1]).
-export([init/1]).
-define(SERVER, ?MODULE).
start_link(Args) ->
supervisor:start_link({local, ?SERVER}, ?MODULE, Args).
init(Args) ->
Config = {
kai_config,
{kai_config, start_link, [Args]},
permanent, 1000, worker,
[kai_config]
},
Log = {
kai_log,
{kai_log, start_link, []},
permanent, 1000, worker,
[kai_log]
},
Hash = {
kai_hash,
{kai_hash, start_link, []},
permanent, 1000, worker,
[kai_hash]
},
Store = {
kai_store,
{kai_store, start_link, []},
permanent, 1000, worker,
[kai_store]
},
Stat = {
kai_stat,
{kai_stat, start_link, []},
permanent, 1000, worker,
[kai_stat]
},
Version = {
kai_version,
{kai_version, start_link, []},
permanent, 1000, worker,
[kai_version]
},
Connection = {
kai_connection,
{kai_connection, start_link, []},
permanent, 1000, worker,
[kai_connection]
},
Sync = {
kai_sync,
{kai_sync, start_link, []},
permanent, 1000, worker,
[kai_sync]
},
Membership = {
kai_membership,
{kai_membership, start_link, []},
permanent, 1000, worker,
[kai_membership]
},
Rpc = {
kai_rpc,
{kai_rpc, start_link, []},
permanent, 1000, worker,
[kai_rpc]
},
Memcache = {
kai_memcache,
{kai_memcache, start_link, []},
permanent, 1000, worker,
[kai_memcache]
},
{ok, {{one_for_one, 3, 10}, [
Config, Log, Hash, Store, Stat, Version, Connection, Sync, Membership,
Rpc, Memcache
]}}. | src/kai_sup.erl | 0.653901 | 0.403273 | kai_sup.erl | starcoder |
%%----------------------------------------------------------------------------------------------------------------
%% @author <NAME> <<EMAIL>>
%% @copyright 2010 <NAME>
%% @doc 'datediff' filter, produce the difference between two dates selecting which date part is interesting.
%%----------------------------------------------------------------------------------------------------------------
%%----------------------------------------------------------------------------------------------------------------
%% Copyright 2010 <NAME>
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%----------------------------------------------------------------------------------------------------------------
-module(filter_datediff).
-export([datediff/3]).
datediff(undefined, _X, _Context) ->
undefined;
datediff(_X, undefined, _Context) ->
undefined;
datediff( {_Y,_M,_D} = Date, DatePart, Context) ->
datediff( [Date,{0,0,0}], DatePart, Context );
datediff( {{_Y,_M,_D},{_H,_I,_S}} = DateTime, DatePart, Context) ->
datediff( [ DateTime, calendar:universal_time() ], DatePart, Context );
datediff( [ {{_Y,_M,_D},{_H,_I,_S}} = DateTimeA, {_YB,_MB,_DB} = DateB ], DatePart, Context) ->
datediff( [ DateTimeA, {DateB,{0,0,0}} ], DatePart, Context );
datediff( [ {{_YA,_MA,_DA},{_HA,_IA,_SA}} = DateTimeA, {{_YB,_MB,_DB},{_HB,_IB,_SB}} = DateTimeB ], DatePart, _Context ) ->
{{Y,M,D},{H,I,S}} = z_datetime:diff( DateTimeA, DateTimeB ),
case DatePart of
"Y" -> Y;
"M" -> M;
"D" -> D;
"H" -> H;
"I" -> I;
"S" -> S;
_ -> Y % Defaults to YEAR
end;
datediff( [ DateStringA, DateStringB ], DatePart, Context ) when is_list(DateStringA), is_tuple(DateStringB) ->
datediff( [ z_convert:to_datetime(DateStringA), DateStringB ], DatePart, Context);
datediff( [ DateStringA, DateStringB ], DatePart, Context ) when is_tuple(DateStringA), is_list(DateStringB) ->
datediff( [ DateStringA, z_convert:to_datetime(DateStringB) ], DatePart, Context);
datediff( [ DateStringA, DateStringB ], DatePart, Context ) when is_list(DateStringA), is_list(DateStringB) ->
datediff( [ z_convert:to_datetime(DateStringA), z_convert:to_datetime(DateStringB) ], DatePart, Context);
datediff( DateString, DatePart, Context ) when is_list(DateString) ->
datediff(z_convert:to_datetime(DateString), DatePart, Context). | apps/zotonic_mod_base/src/filters/filter_datediff.erl | 0.557364 | 0.438244 | filter_datediff.erl | starcoder |
-module(aoc2018_day02).
-behavior(aoc_puzzle).
-export([parse/1, solve1/1, solve2/1, info/0]).
-include("aoc_puzzle.hrl").
-spec info() -> aoc_puzzle().
info() ->
#aoc_puzzle{module = ?MODULE,
year = 2018,
day = 2,
name = "Inventory Management System",
expected = {9139, "uqcidadzwtnhsljvxyobmkfyr"},
has_input_file = true}.
-type input_type() :: [string()].
-type result1_type() :: integer().
-type result2_type() :: string().
-spec parse(Input :: binary()) -> input_type().
parse(Input) ->
string:tokens(binary_to_list(Input), "\n\r").
-spec solve1(Input :: input_type()) -> result1_type().
solve1(Input) ->
%% This is a tuple containing the number of strings which has a
%% letter occurring twice and thrice, respectively.
{X, Y} =
lists:foldl(fun(Line, {Twos, Threes}) ->
FT = freq_table(Line, #{}),
{Twos + has_n(FT, 2), Threes + has_n(FT, 3)}
end,
{0, 0},
Input),
X * Y.
-spec solve2(Input :: input_type()) -> result2_type().
solve2(Lines) ->
DiffList = [{X, Y, number_of_different_chars(X, Y)} || X <- Lines, Y <- Lines, X < Y],
[Solution] =
lists:filtermap(fun({X, Y, N}) ->
if N =:= 1 -> {true, remove_diff_char(X, Y)};
true -> false
end
end,
DiffList),
Solution.
%% Build a frequency table of the given string
freq_table([], Table) ->
Table;
freq_table([Char | Rest], Table0) ->
Table = maps:update_with(Char, fun(N) -> N + 1 end, 1, Table0),
freq_table(Rest, Table).
%% Returns 1 if the Table map contains the value N, 0 otherwise.
has_n(Table, N) ->
case lists:member(N, maps:values(Table)) of
true ->
1;
false ->
0
end.
number_of_different_chars([], []) ->
0;
number_of_different_chars([X | Xs], [X | Ys]) ->
0 + number_of_different_chars(Xs, Ys);
number_of_different_chars([_ | Xs], [_ | Ys]) ->
1 + number_of_different_chars(Xs, Ys).
remove_diff_char([], []) ->
[];
remove_diff_char([X | Xs], [X | Ys]) ->
[X | remove_diff_char(Xs, Ys)];
remove_diff_char([_ | Xs], [_ | Ys]) ->
remove_diff_char(Xs, Ys). | src/2018/aoc2018_day02.erl | 0.54359 | 0.650155 | aoc2018_day02.erl | starcoder |
%% -*- mode: erlang;erlang-indent-level: 4;indent-tabs-mode: nil -*-
%% @author <NAME> <<EMAIL>>
%% @doc
%% An Erlang interface to Amazon's DynamoDB.
%%
%% [http://aws.amazon.com/archives/Amazon-DynamoDB/8498019230173117]
%%
%% erlcloUd_ddb implements the entire 20111205 API. erlcloud_ddb2
%% implements a newer version.
%%
%% Method names match DynamoDB operations converted to
%% lower_case_with_underscores. The one exception is query, which is
%% an Erlang reserved word. The `q' method implements Query.
%%
%% Required parameters are passed as function arguments. In addition
%% all methods take an options proplist argument which can be used to
%% pass optional parameters. See function documentation for examples.
%%
%% Table names, key names, attribute names and any other input strings
%% except attribute values must be binary strings.
%%
%% Attribute values may be either `{Type, Value}' or `Value'. If only
%% `Value' is provided then the type is inferred. Lists (iolists are
%% handled) and binaries are assumed to be strings. The following are
%% equivalent: `{s, <<"value">>}', `<<"value">>', `"value"'. Numbers
%% are assumed to be numbers. The following are equivalent: `{n, 42}',
%% `42'. To specify the AWS binary or set types an explicit `Type'
%% must be provided. For example: `{b, <<1,2,3>>}' or `{ns,
%% [4,5,6]}'. Note that binary values will be base64 encoded and
%% decoded automatically.
%%
%% Output is in the form of `{ok, Value}' or `{error, Reason}'. The
%% format of `Value' is controlled by the `out' option, which defaults
%% to `simple'. The possible values are:
%%
%% * `simple' - The most interesting part of the output. For example
%% `get_item' will return the item.
%%
%% * `record' - A record containing all the information from the
%% DynamoDB response. This is useful if you need more detailed
%% information than what is returned with `simple'. For example, with
%% `scan' and `query' the record will contain the last evaluated key
%% which can be used to continue the operation.
%%
%% * `json' - The output from DynamoDB as processed by `jsx:decode'
%% but with no further manipulation. This would rarely be useful,
%% unless the DynamoDB API is updated to include data that is not yet
%% parsed correctly.
%%
%% Items will be returned as a list of `{Name, Value}'. In most cases
%% the output will have type information removed. For example:
%% `[{<<"String Attribute">>, <<"value">>}, {<<"Number Attribute">>,
%% 42}, {<<"BinaryAttribute">>, <<1,2,3>>}]'. The exception is for
%% output fields that are intended to be passed to a subsequent call,
%% such as `unprocessed_keys' and `last_evaluated_key'. Those will
%% contain typed attribute values so that they may be correctly passed
%% to subsequent calls.
%%
%% DynamoDB errors are return in the form `{error, {ErrorCode,
%% Message}}' where `ErrorCode' and 'Message' are both binary
%% strings. List of error codes:
%% [http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/ErrorHandling.html]. So
%% to handle conditional check failures, match `{error,
%% {<<"ConditionalCheckFailedException">>, _}}'.
%%
%% `erlcloud_ddb_util' provides a higher level API that implements common
%% operations that may require multiple DynamoDB API calls.
%%
%% `erlcloud_ddb1' provides a lower level API that takes JSON terms as
%% defined by `jsx'.
%%
%% See the unit tests for additional usage examples beyond what are
%% provided for each function.
%%
%% @end
-module(erlcloud_ddb).
-include("erlcloud.hrl").
-include("erlcloud_aws.hrl").
-include("erlcloud_ddb.hrl").
%%% Library initialization.
-export([configure/2, configure/3, new/2, new/3]).
%%% DynamoDB API
-export([batch_get_item/1, batch_get_item/2, batch_get_item/3,
batch_write_item/1, batch_write_item/2, batch_write_item/3,
create_table/4, create_table/5, create_table/6,
delete_item/2, delete_item/3, delete_item/4,
delete_table/1, delete_table/2, delete_table/3,
describe_table/1, describe_table/2, describe_table/3,
get_item/2, get_item/3, get_item/4,
list_tables/0, list_tables/1, list_tables/2,
put_item/2, put_item/3, put_item/4,
%% Note that query is a Erlang reserved word, so we use q instead
q/2, q/3, q/4,
scan/1, scan/2, scan/3,
update_item/3, update_item/4, update_item/5,
update_table/3, update_table/4, update_table/5
]).
-export_type(
[attr_name/0,
attr_type/0,
batch_get_item_request_item/0,
batch_get_item_return/0,
batch_write_item_delete/0,
batch_write_item_put/0,
batch_write_item_request/0,
batch_write_item_request_item/0,
batch_write_item_return/0,
boolean_opt/1,
comparison_op/0,
create_table_return/0,
ddb_opts/0,
ddb_return/2,
delete_item_opt/0,
delete_item_opts/0,
delete_item_return/0,
delete_table_return/0,
describe_table_return/0,
get_item_opt/0,
get_item_opts/0,
hash_key/0,
hash_range_key/0,
in_attr/0,
in_attr_data/0,
in_attr_data_scalar/0,
in_attr_data_set/0,
in_attr_typed_value/0,
in_attr_value/0,
in_expected/0,
in_expected_item/0,
in_item/0,
in_update/0,
in_updates/0,
item_return/0,
key/0,
key_schema/0,
key_schema_value/0,
list_tables_opt/0,
list_tables_opts/0,
list_tables_return/0,
maybe_list/1,
ok_return/1,
out_attr/0,
out_attr_value/0,
out_item/0,
out_opt/0,
out_type/0,
put_item_opt/0,
put_item_opts/0,
put_item_return/0,
q_opt/0,
q_opts/0,
q_return/0,
range_key/0,
range_key_condition/0,
return_value/0,
scan_filter/0,
scan_filter_item/0,
scan_opt/0,
scan_opts/0,
scan_return/0,
table_name/0,
update_action/0,
update_item_opt/0,
update_item_opts/0,
update_item_return/0,
update_table_return/0
]).
%%%------------------------------------------------------------------------------
%%% Library initialization.
%%%------------------------------------------------------------------------------
-spec new(string(), string()) -> aws_config().
new(AccessKeyID, SecretAccessKey) ->
#aws_config{access_key_id=AccessKeyID,
secret_access_key=SecretAccessKey}.
-spec new(string(), string(), string()) -> aws_config().
new(AccessKeyID, SecretAccessKey, Host) ->
#aws_config{access_key_id=AccessKeyID,
secret_access_key=SecretAccessKey,
ddb_host=Host}.
-spec configure(string(), string()) -> ok.
configure(AccessKeyID, SecretAccessKey) ->
put(aws_config, new(AccessKeyID, SecretAccessKey)),
ok.
-spec configure(string(), string(), string()) -> ok.
configure(AccessKeyID, SecretAccessKey, Host) ->
put(aws_config, new(AccessKeyID, SecretAccessKey, Host)),
ok.
default_config() -> erlcloud_aws:default_config().
%%%------------------------------------------------------------------------------
%%% Shared Types
%%%------------------------------------------------------------------------------
-type table_name() :: binary().
-type attr_type() :: s | n | b | ss | ns | bs.
-type attr_name() :: binary().
-type maybe_list(T) :: T | [T].
-type in_attr_data_scalar() :: iolist() | binary() | number().
-type in_attr_data_set() :: [iolist() | binary()] | [number()].
-type in_attr_data() :: in_attr_data_scalar() | in_attr_data_set().
-type in_attr_typed_value() :: {attr_type(), in_attr_data()}.
-type in_attr_value() :: in_attr_data() | in_attr_typed_value().
-type in_attr() :: {attr_name(), in_attr_value()}.
-type in_expected_item() :: {attr_name(), false | in_attr_value()}.
-type in_expected() :: maybe_list(in_expected_item()).
-type in_item() :: [in_attr()].
-type json_pair() :: {binary(), jsx:json_term()}.
-type json_attr_type() :: binary().
-type json_attr_data() :: binary() | [binary()].
-type json_attr_value() :: {json_attr_type(), json_attr_data()}.
-type json_attr() :: {attr_name(), [json_attr_value()]}.
-type json_item() :: [json_attr()].
-type json_expected() :: [{attr_name(), [json_attr_value()] | [{binary(), boolean()}]}].
-type json_key() :: [json_attr(),...].
-type hash_key() :: in_attr_value().
-type range_key() :: in_attr_value().
-type hash_range_key() :: {hash_key(), range_key()}.
-type key() :: hash_key() | hash_range_key().
-type key_schema_value() :: {attr_name(), attr_type()}.
-type key_schema() :: key_schema_value() | {key_schema_value(), key_schema_value()}.
-type return_value() :: none | all_old | updated_old | all_new | updated_new.
-type comparison_op() :: eq | ne | le | lt | ge | gt | not_null | null | contains | not_contains |
begins_with | in | between.
-type out_attr_value() :: binary() | number() | [binary()] | [number()].
-type out_attr() :: {attr_name(), out_attr_value()}.
-type out_item() :: [out_attr()].
-type ok_return(T) :: {ok, T} | {error, term()}.
%%%------------------------------------------------------------------------------
%%% Shared Dynamizers
%%%------------------------------------------------------------------------------
%% Convert terms into the form expected by erlcloud_ddb1
-spec dynamize_type(attr_type()) -> binary().
dynamize_type(s) ->
<<"S">>;
dynamize_type(n) ->
<<"N">>;
dynamize_type(b) ->
<<"B">>.
-spec dynamize_number(number()) -> binary().
dynamize_number(Value) when is_integer(Value) ->
list_to_binary(integer_to_list(Value));
dynamize_number(Value) when is_float(Value) ->
%% Note that float_to_list produces overly precise and long string
[String] = io_lib:format("~p", [Value]),
list_to_binary(String).
-spec dynamize_set(attr_type(), in_attr_data_set()) -> [binary()].
dynamize_set(ss, Values) ->
[iolist_to_binary(Value) || Value <- Values];
dynamize_set(ns, Values) ->
[dynamize_number(Value) || Value <- Values];
dynamize_set(bs, Values) ->
[base64:encode(Value) || Value <- Values].
-spec dynamize_value(in_attr_value()) -> json_attr_value().
dynamize_value({s, Value}) when is_binary(Value) ->
{<<"S">>, Value};
dynamize_value({s, Value}) when is_list(Value) ->
{<<"S">>, list_to_binary(Value)};
dynamize_value({s, Value}) when is_atom(Value) ->
{<<"S">>, atom_to_binary(Value, utf8)};
dynamize_value({n, Value}) when is_number(Value) ->
{<<"N">>, dynamize_number(Value)};
dynamize_value({b, Value}) when is_binary(Value) orelse is_list(Value) ->
{<<"B">>, base64:encode(Value)};
dynamize_value({ss, Value}) when is_list(Value) ->
{<<"SS">>, dynamize_set(ss, Value)};
dynamize_value({ns, Value}) when is_list(Value) ->
{<<"NS">>, dynamize_set(ns, Value)};
dynamize_value({bs, Value}) when is_list(Value) ->
{<<"BS">>, dynamize_set(bs, Value)};
dynamize_value(Value) when is_binary(Value) ->
dynamize_value({s, Value});
dynamize_value(Value) when is_list(Value) ->
dynamize_value({s, Value});
dynamize_value(Value) when is_number(Value) ->
dynamize_value({n, Value});
dynamize_value(Value) when is_atom(Value) ->
dynamize_value({s, atom_to_binary(Value, utf8)});
dynamize_value(Value) ->
error({erlcloud_ddb, {invalid_attr_value, Value}}).
-spec dynamize_attr(in_attr()) -> json_attr().
dynamize_attr({Name, Value}) ->
{Name, [dynamize_value(Value)]}.
-spec dynamize_key(key()) -> erlcloud_ddb1:key().
dynamize_key({HashType, _} = HashKey) when is_atom(HashType) ->
dynamize_value(HashKey);
dynamize_key({HashKey, RangeKey}) ->
{dynamize_value(HashKey), dynamize_value(RangeKey)};
dynamize_key(HashKey) ->
dynamize_value(HashKey).
-spec dynamize_key_schema_value(key_schema_value()) -> erlcloud_ddb1:key_schema_value().
dynamize_key_schema_value({Name, Type}) ->
{Name, dynamize_type(Type)}.
-spec dynamize_key_schema(key_schema()) -> erlcloud_ddb1:key_schema().
dynamize_key_schema({{_, _} = HashKey, {_, _} = RangeKey}) ->
{dynamize_key_schema_value(HashKey), dynamize_key_schema_value(RangeKey)};
dynamize_key_schema(HashKey) ->
dynamize_key_schema_value(HashKey).
-spec dynamize_maybe_list(fun((A) -> B), maybe_list(A)) -> [B].
dynamize_maybe_list(DynamizeItem, List) when is_list(List) ->
[DynamizeItem(I) || I <- List];
dynamize_maybe_list(DynamizeItem, Item) ->
[DynamizeItem(Item)].
-spec dynamize_expected_item(in_expected_item()) -> json_pair().
dynamize_expected_item({Name, false}) ->
{Name, [{<<"Exists">>, false}]};
dynamize_expected_item({Name, Value}) ->
{Name, [{<<"Value">>, [dynamize_value(Value)]}]}.
-spec dynamize_expected(in_expected()) -> json_expected().
dynamize_expected(Expected) ->
dynamize_maybe_list(fun dynamize_expected_item/1, Expected).
-spec dynamize_return_value(return_value()) -> binary().
dynamize_return_value(none) ->
<<"NONE">>;
dynamize_return_value(all_old) ->
<<"ALL_OLD">>;
dynamize_return_value(updated_old) ->
<<"UPDATED_OLD">>;
dynamize_return_value(all_new) ->
<<"ALL_NEW">>;
dynamize_return_value(updated_new) ->
<<"UPDATED_NEW">>.
-spec dynamize_item(in_item()) -> json_item().
dynamize_item(Item) when is_list(Item) ->
[dynamize_attr(Attr) || Attr <- Item];
dynamize_item(Item) ->
error({erlcloud_ddb, {invalid_item, Item}}).
-spec dynamize_comparison(comparison_op()) -> {binary(), binary()}.
dynamize_comparison(eq) ->
{<<"ComparisonOperator">>, <<"EQ">>};
dynamize_comparison(ne) ->
{<<"ComparisonOperator">>, <<"NE">>};
dynamize_comparison(le) ->
{<<"ComparisonOperator">>, <<"LE">>};
dynamize_comparison(lt) ->
{<<"ComparisonOperator">>, <<"LT">>};
dynamize_comparison(ge) ->
{<<"ComparisonOperator">>, <<"GE">>};
dynamize_comparison(gt) ->
{<<"ComparisonOperator">>, <<"GT">>};
dynamize_comparison(not_null) ->
{<<"ComparisonOperator">>, <<"NOT_NULL">>};
dynamize_comparison(null) ->
{<<"ComparisonOperator">>, <<"NULL">>};
dynamize_comparison(contains) ->
{<<"ComparisonOperator">>, <<"CONTAINS">>};
dynamize_comparison(not_contains) ->
{<<"ComparisonOperator">>, <<"NOT_CONTAINS">>};
dynamize_comparison(begins_with) ->
{<<"ComparisonOperator">>, <<"BEGINS_WITH">>};
dynamize_comparison(in) ->
{<<"ComparisonOperator">>, <<"IN">>};
dynamize_comparison(between) ->
{<<"ComparisonOperator">>, <<"BETWEEN">>}.
%%%------------------------------------------------------------------------------
%%% Shared Undynamizers
%%%------------------------------------------------------------------------------
-spec undynamize_type(json_attr_type()) -> attr_type().
undynamize_type(<<"S">>) ->
s;
undynamize_type(<<"N">>) ->
n;
undynamize_type(<<"B">>) ->
b.
-spec undynamize_number(binary()) -> number().
undynamize_number(Value) ->
String = binary_to_list(Value),
case lists:member($., String) of
true ->
list_to_float(String);
false ->
list_to_integer(String)
end.
-spec undynamize_value(json_attr_value()) -> out_attr_value().
undynamize_value({<<"S">>, Value}) when is_binary(Value) ->
Value;
undynamize_value({<<"N">>, Value}) ->
undynamize_number(Value);
undynamize_value({<<"B">>, Value}) ->
base64:decode(Value);
undynamize_value({<<"SS">>, Values}) when is_list(Values) ->
Values;
undynamize_value({<<"NS">>, Values}) ->
[undynamize_number(Value) || Value <- Values];
undynamize_value({<<"BS">>, Values}) ->
[base64:decode(Value) || Value <- Values].
-spec undynamize_attr(json_attr()) -> out_attr().
undynamize_attr({Name, [ValueJson]}) ->
{Name, undynamize_value(ValueJson)}.
-spec undynamize_object(fun((json_pair()) -> A), [json_pair()] | [{}]) -> [A].
undynamize_object(_, [{}]) ->
%% jsx returns [{}] for empty objects
[];
undynamize_object(PairFun, List) ->
[PairFun(I) || I <- List].
-spec undynamize_item(json_item()) -> out_item().
undynamize_item(Json) ->
undynamize_object(fun undynamize_attr/1, Json).
-spec undynamize_value_typed(json_attr_value()) -> in_attr_typed_value().
undynamize_value_typed({<<"S">>, Value}) ->
{s, Value};
undynamize_value_typed({<<"N">>, Value}) ->
{n, undynamize_number(Value)};
undynamize_value_typed({<<"B">>, Value}) ->
{b, base64:decode(Value)}.
-spec undynamize_key(json_key()) -> key().
undynamize_key([{<<"HashKeyElement">>, [HashKey]}]) ->
undynamize_value_typed(HashKey);
undynamize_key([{<<"HashKeyElement">>, [HashKey]}, {<<"RangeKeyElement">>, [RangeKey]}]) ->
{undynamize_value_typed(HashKey), undynamize_value_typed(RangeKey)}.
-spec undynamize_key_schema_value(jsx:json_term()) -> key_schema().
undynamize_key_schema_value([{<<"AttributeName">>, Name}, {<<"AttributeType">>, Type}]) ->
{Name, undynamize_type(Type)}.
-spec undynamize_key_schema(jsx:json_term()) -> key_schema().
undynamize_key_schema([{<<"HashKeyElement">>, HashKey}]) ->
undynamize_key_schema_value(HashKey);
undynamize_key_schema([{<<"HashKeyElement">>, HashKey}, {<<"RangeKeyElement">>, RangeKey}]) ->
{undynamize_key_schema_value(HashKey), undynamize_key_schema_value(RangeKey)}.
-type field_table() :: [{binary(), pos_integer(), fun((jsx:json_term()) -> term())}].
-spec undynamize_folder(field_table(), json_pair(), tuple()) -> tuple().
undynamize_folder(Table, {Key, Value}, A) ->
case lists:keyfind(Key, 1, Table) of
{Key, Index, ValueFun} ->
setelement(Index, A, ValueFun(Value));
false ->
A
end.
-type record_desc() :: {tuple(), field_table()}.
-spec undynamize_record(record_desc(), jsx:json_term()) -> tuple().
undynamize_record({Record, _}, [{}]) ->
%% jsx returns [{}] for empty objects
Record;
undynamize_record({Record, Table}, Json) ->
lists:foldl(fun(Pair, A) -> undynamize_folder(Table, Pair, A) end, Record, Json).
%%%------------------------------------------------------------------------------
%%% Shared Options
%%%------------------------------------------------------------------------------
-spec id(X) -> X.
id(X) -> X.
-type out_type() :: json | record | simple.
-type out_opt() :: {out, out_type()}.
-type boolean_opt(Name) :: Name | {Name, boolean()}.
-type property() :: proplists:property().
-type aws_opts() :: [json_pair()].
-type ddb_opts() :: [out_opt()].
-type opts() :: {aws_opts(), ddb_opts()}.
-spec verify_ddb_opt(atom(), term()) -> ok.
verify_ddb_opt(out, Value) ->
case lists:member(Value, [json, record, simple]) of
true ->
ok;
false ->
error({erlcloud_ddb, {invalid_opt, {out, Value}}})
end;
verify_ddb_opt(Name, Value) ->
error({erlcloud_ddb, {invalid_opt, {Name, Value}}}).
-type opt_table() :: [{atom(), binary(), fun((_) -> jsx:json_term())}].
-spec opt_folder(opt_table(), property(), opts()) -> opts().
opt_folder(_, {_, undefined}, Opts) ->
%% ignore options set to undefined
Opts;
opt_folder(Table, {Name, Value}, {AwsOpts, DdbOpts}) ->
case lists:keyfind(Name, 1, Table) of
{Name, Key, ValueFun} ->
{[{Key, ValueFun(Value)} | AwsOpts], DdbOpts};
false ->
verify_ddb_opt(Name, Value),
{AwsOpts, [{Name, Value} | DdbOpts]}
end;
opt_folder(Table, Name, Opts) ->
opt_folder(Table, {Name, true}, Opts).
-spec opts(opt_table(), proplist()) -> opts().
opts(Table, Opts) when is_list(Opts) ->
lists:foldl(fun(Opt, A) -> opt_folder(Table, Opt, A) end, {[], []}, Opts);
opts(_, _) ->
error({erlcloud_ddb, opts_not_list}).
-type get_item_opt() :: {attributes_to_get, [binary()]} |
boolean_opt(consistent_read) |
out_opt().
-type get_item_opts() :: [get_item_opt()].
-spec get_item_opts() -> opt_table().
get_item_opts() ->
[{attributes_to_get, <<"AttributesToGet">>, fun id/1},
{consistent_read, <<"ConsistentRead">>, fun id/1}].
%%%------------------------------------------------------------------------------
%%% Output
%%%------------------------------------------------------------------------------
-type ddb_return(Record, Simple) :: {ok, jsx:json_term() | Record | Simple} | {error, term()}.
-type item_return() :: ok_return(out_item()).
-type undynamize_fun() :: fun((jsx:json_term()) -> tuple()).
-spec out(erlcloud_ddb1:json_return(), undynamize_fun(), ddb_opts())
-> {ok, jsx:json_term() | tuple()} |
{simple, term()} |
{error, term()}.
out({error, Reason}, _, _) ->
{error, Reason};
out({ok, Json}, Undynamize, Opts) ->
case proplists:get_value(out, Opts, simple) of
json ->
{ok, Json};
record ->
{ok, Undynamize(Json)};
simple ->
{simple, Undynamize(Json)}
end.
%% Returns specified field of tuple for simple return
-spec out(erlcloud_ddb1:json_return(), undynamize_fun(), ddb_opts(), pos_integer())
-> ok_return(term()).
out(Result, Undynamize, Opts, Index) ->
out(Result, Undynamize, Opts, Index, {error, no_return}).
-spec out(erlcloud_ddb1:json_return(), undynamize_fun(), ddb_opts(), pos_integer(), ok_return(term()))
-> ok_return(term()).
out(Result, Undynamize, Opts, Index, Default) ->
case out(Result, Undynamize, Opts) of
{simple, Record} ->
case element(Index, Record) of
undefined ->
Default;
Element ->
{ok, Element}
end;
Else ->
Else
end.
%%%------------------------------------------------------------------------------
%%% Shared Records
%%%------------------------------------------------------------------------------
-spec provisioned_throughput_record() -> record_desc().
provisioned_throughput_record() ->
{#ddb_provisioned_throughput{},
[{<<"ReadCapacityUnits">>, #ddb_provisioned_throughput.read_capacity_units, fun id/1},
{<<"WriteCapacityUnits">>, #ddb_provisioned_throughput.write_capacity_units, fun id/1},
{<<"LastDecreaseDateTime">>, #ddb_provisioned_throughput.last_decrease_date_time, fun id/1},
{<<"LastIncreaseDateTime">>, #ddb_provisioned_throughput.last_increase_date_time, fun id/1}
]}.
-spec table_description_record() -> record_desc().
table_description_record() ->
{#ddb_table_description{},
[{<<"CreationDateTime">>, #ddb_table_description.creation_date_time, fun id/1},
{<<"KeySchema">>, #ddb_table_description.key_schema, fun(V) -> undynamize_key_schema(V) end},
{<<"ProvisionedThroughput">>, #ddb_table_description.provisioned_throughput,
fun(V) -> undynamize_record(provisioned_throughput_record(), V) end},
{<<"TableName">>, #ddb_table_description.table_name, fun id/1},
{<<"TableStatus">>, #ddb_table_description.table_status, fun id/1}
]}.
%%%------------------------------------------------------------------------------
%%% BatchGetItem
%%%------------------------------------------------------------------------------
-type batch_get_item_request_item() :: {table_name(), [key(),...], get_item_opts()} | {table_name(), [key(),...]}.
-spec dynamize_batch_get_item_request_item(batch_get_item_request_item())
-> {binary(), jsx:json_term(), jsx:json_term()}.
dynamize_batch_get_item_request_item({Table, Keys}) ->
dynamize_batch_get_item_request_item({Table, Keys, []});
dynamize_batch_get_item_request_item({Table, Keys, Opts}) ->
{AwsOpts, []} = opts(get_item_opts(), Opts),
{Table, [dynamize_key(K) || K <- Keys], AwsOpts}.
-type batch_get_item_request_items() :: maybe_list(batch_get_item_request_item()).
-spec dynamize_batch_get_item_request_items(batch_get_item_request_items()) -> [tuple()].
dynamize_batch_get_item_request_items(Request) ->
dynamize_maybe_list(fun dynamize_batch_get_item_request_item/1, Request).
-spec batch_get_item_response_record(table_name()) -> record_desc().
batch_get_item_response_record(Table) ->
{#ddb_batch_get_item_response{table = Table},
[{<<"Items">>, #ddb_batch_get_item_response.items, fun(V) -> [undynamize_item(I) || I <- V] end},
{<<"ConsumedCapacityUnits">>, #ddb_batch_get_item_response.consumed_capacity_units, fun id/1}
]}.
-spec batch_get_item_request_item_folder({binary(), term()}, batch_get_item_request_item())
-> batch_get_item_request_item().
batch_get_item_request_item_folder({<<"Keys">>, Keys}, {Table, _, Opts}) ->
{Table, [undynamize_key(K) || K <- Keys], Opts};
batch_get_item_request_item_folder({<<"AttributesToGet">>, Value}, {Table, Keys, Opts}) ->
{Table, Keys, [{attributes_to_get, Value} | Opts]};
batch_get_item_request_item_folder({<<"ConsistentRead">>, Value}, {Table, Keys, Opts}) ->
{Table, Keys, [{consistent_read, Value} | Opts]}.
-spec undynamize_batch_get_item_request_item(table_name(), jsx:json_term()) -> batch_get_item_request_item().
undynamize_batch_get_item_request_item(Table, Json) ->
lists:foldl(fun batch_get_item_request_item_folder/2, {Table, [], []}, Json).
-spec batch_get_item_record() -> record_desc().
batch_get_item_record() ->
{#ddb_batch_get_item{},
[{<<"Responses">>, #ddb_batch_get_item.responses,
fun(V) -> undynamize_object(fun({Table, Json}) ->
undynamize_record(batch_get_item_response_record(Table), Json)
end, V)
end},
{<<"UnprocessedKeys">>, #ddb_batch_get_item.unprocessed_keys,
fun(V) -> undynamize_object(fun({Table, Json}) ->
undynamize_batch_get_item_request_item(Table, Json)
end, V)
end}
]}.
-type batch_get_item_return() :: ddb_return(#ddb_batch_get_item{}, [erlcloud_ddb:out_item()]).
-spec batch_get_item(batch_get_item_request_items()) -> batch_get_item_return().
batch_get_item(RequestItems) ->
batch_get_item(RequestItems, [], default_config()).
-spec batch_get_item(batch_get_item_request_items(), ddb_opts()) -> batch_get_item_return().
batch_get_item(RequestItems, Opts) ->
batch_get_item(RequestItems, Opts, default_config()).
%%------------------------------------------------------------------------------
%% @doc
%%
%% ===Example===
%%
%% Get attributes "user" and "friends" from items with keys "Julie"
%% and "Mingus" in table "comp2" and attributes "user" and "status"
%% from three items in table "comp1".
%%
%% `
%% {ok, Items} = erlcloud_ddb:batch_get_item(
%% [{<<"comp2">>, [<<"Julie">>,
%% <<"Mingus">>],
%% [{attributes_to_get, [<<"user">>, <<"friends">>]}]},
%% {<<"comp1">>, [{<<"Casey">>, 1319509152},
%% {<<"Dave">>, 1319509155},
%% {<<"Riley">>, 1319509158}],
%% [{attributes_to_get, [<<"user">>, <<"status">>]}]}])
%% '
%% @end
%%------------------------------------------------------------------------------
-spec batch_get_item(batch_get_item_request_items(), ddb_opts(), aws_config()) -> batch_get_item_return().
batch_get_item(RequestItems, Opts, Config) ->
{[], DdbOpts} = opts([], Opts),
Return = erlcloud_ddb1:batch_get_item(dynamize_batch_get_item_request_items(RequestItems), Config),
case out(Return, fun(Json) -> undynamize_record(batch_get_item_record(), Json) end, DdbOpts) of
{simple, #ddb_batch_get_item{unprocessed_keys = [_|_]}} ->
%% TODO resend unprocessed keys automatically (or controlled by option).
%% For now return an error - you can handle manually if you don't use simple.
{error, unprocessed};
{simple, #ddb_batch_get_item{unprocessed_keys = [], responses = Responses}} ->
%% Simple return for batch_get_item is all items from all tables in a single list
{ok, lists:flatmap(fun(#ddb_batch_get_item_response{items = I}) -> I end, Responses)};
{ok, _} = Out -> Out;
{error, _} = Out -> Out
end.
%%%------------------------------------------------------------------------------
%%% BatchWriteItem
%%%------------------------------------------------------------------------------
-type batch_write_item_put() :: {put, in_item()}.
-type batch_write_item_delete() :: {delete, key()}.
-type batch_write_item_request() :: batch_write_item_put() | batch_write_item_delete().
-type batch_write_item_request_item() :: {table_name(), [batch_write_item_request()]}.
-spec dynamize_batch_write_item_request(batch_write_item_request()) -> erlcloud_ddb1:batch_write_item_request().
dynamize_batch_write_item_request({put, Item}) ->
{put, dynamize_item(Item)};
dynamize_batch_write_item_request({delete, Key}) ->
{delete, dynamize_key(Key)}.
-spec dynamize_batch_write_item_request_item(batch_write_item_request_item())
-> json_pair().
dynamize_batch_write_item_request_item({Table, Requests}) ->
{Table, [dynamize_batch_write_item_request(R) || R <- Requests]}.
-type batch_write_item_request_items() :: maybe_list(batch_write_item_request_item()).
-spec dynamize_batch_write_item_request_items(batch_write_item_request_items()) -> [tuple()].
dynamize_batch_write_item_request_items(Request) ->
dynamize_maybe_list(fun dynamize_batch_write_item_request_item/1, Request).
-spec batch_write_item_response_record(table_name()) -> record_desc().
batch_write_item_response_record(Table) ->
{#ddb_batch_write_item_response{table = Table},
[{<<"ConsumedCapacityUnits">>, #ddb_batch_write_item_response.consumed_capacity_units, fun id/1}
]}.
-spec undynamize_attr_typed(json_attr()) -> in_attr().
undynamize_attr_typed({Name, [ValueJson]}) ->
{Name, undynamize_value_typed(ValueJson)}.
-spec undynamize_item_typed(json_item()) -> in_item().
undynamize_item_typed(Json) ->
undynamize_object(fun undynamize_attr_typed/1, Json).
-spec batch_write_item_request_folder([{binary(), term()}], batch_write_item_request_item())
-> batch_write_item_request_item().
batch_write_item_request_folder([{<<"PutRequest">>, [{<<"Item">>, Item}]}], {Table, Requests}) ->
{Table, [{put, undynamize_item_typed(Item)} | Requests]};
batch_write_item_request_folder([{<<"DeleteRequest">>, [{<<"Key">>, Key}]}], {Table, Requests}) ->
{Table, [{delete, undynamize_key(Key)} | Requests]}.
-spec undynamize_batch_write_item_request_item(table_name(), jsx:json_term()) -> batch_write_item_request_item().
undynamize_batch_write_item_request_item(Table, Json) ->
{Table, Requests} = lists:foldl(fun batch_write_item_request_folder/2, {Table, []}, Json),
{Table, lists:reverse(Requests)}.
-spec batch_write_item_record() -> record_desc().
batch_write_item_record() ->
{#ddb_batch_write_item{},
[{<<"Responses">>, #ddb_batch_write_item.responses,
fun(V) -> undynamize_object(fun({Table, Json}) ->
undynamize_record(batch_write_item_response_record(Table), Json)
end, V)
end},
{<<"UnprocessedItems">>, #ddb_batch_write_item.unprocessed_items,
fun(V) -> undynamize_object(fun({Table, Json}) ->
undynamize_batch_write_item_request_item(Table, Json)
end, V)
end}
]}.
-type batch_write_item_return() :: ddb_return(#ddb_batch_write_item{}, #ddb_batch_write_item{}).
-spec batch_write_item(batch_write_item_request_items()) -> batch_write_item_return().
batch_write_item(RequestItems) ->
batch_write_item(RequestItems, [], default_config()).
-spec batch_write_item(batch_write_item_request_items(), ddb_opts()) -> batch_write_item_return().
batch_write_item(RequestItems, Opts) ->
batch_write_item(RequestItems, Opts, default_config()).
%%------------------------------------------------------------------------------
%% @doc
%%
%% ===Example===
%%
%% Put and delete an item in the "Reply" table and put an item in the "Thread" table.
%%
%% `
%% {ok, _} = erlcloud_ddb:batch_write_item(
%% [{<<"Reply">>, [{put, [{<<"ReplyDateTime">>, <<"2012-04-03T11:04:47.034Z">>},
%% {<<"Id">>, <<"Amazon DynamoDB#DynamoDB Thread 5">>}]},
%% {delete, {<<"Amazon DynamoDB#DynamoDB Thread 4">>,
%% <<"oops - accidental row">>}}]},
%% {<<"Thread">>, [{put, [{<<"ForumName">>, <<"Amazon DynamoDB">>},
%% {<<"Subject">>, <<"DynamoDB Thread 5">>}]}]}])
%% '
%% @end
%%------------------------------------------------------------------------------
-spec batch_write_item(batch_write_item_request_items(), ddb_opts(), aws_config()) -> batch_write_item_return().
batch_write_item(RequestItems, Opts, Config) ->
{[], DdbOpts} = opts([], Opts),
Return = erlcloud_ddb1:batch_write_item(dynamize_batch_write_item_request_items(RequestItems), Config),
case out(Return, fun(Json) -> undynamize_record(batch_write_item_record(), Json) end, DdbOpts) of
{simple, #ddb_batch_write_item{unprocessed_items = [_|_]}} ->
%% TODO resend unprocessed items automatically (or controlled by option).
%% For now return an error - you can handle manually if you don't use simple.
{error, unprocessed};
{simple, Record} -> {ok, Record};
{ok, _} = Out -> Out;
{error, _} = Out -> Out
end.
%%%------------------------------------------------------------------------------
%%% CreateTable
%%%------------------------------------------------------------------------------
-spec create_table_record() -> record_desc().
create_table_record() ->
{#ddb_create_table{},
[{<<"TableDescription">>, #ddb_create_table.table_description,
fun(V) -> undynamize_record(table_description_record(), V) end}
]}.
-type create_table_return() :: ddb_return(#ddb_create_table{}, #ddb_table_description{}).
-spec create_table(table_name(), key_schema(), non_neg_integer(), non_neg_integer()) -> create_table_return().
create_table(Table, KeySchema, ReadUnits, WriteUnits) ->
create_table(Table, KeySchema, ReadUnits, WriteUnits, [], default_config()).
-spec create_table(table_name(), key_schema(), non_neg_integer(), non_neg_integer(), ddb_opts())
-> create_table_return().
create_table(Table, KeySchema, ReadUnits, WriteUnits, Opts) ->
create_table(Table, KeySchema, ReadUnits, WriteUnits, Opts, default_config()).
%%------------------------------------------------------------------------------
%% @doc
%%
%% ===Example===
%%
%% Create "comp-table" with "user" as a string hash key and "time" as
%% a number range key and provisioned throughput of 5 read units and
%% 10 write units.
%%
%% `
%% {ok, _} = erlcloud_ddb:create_table(<<"comp-table">>, {{<<"user">>, s}, {<<"time">>, n}}, 5, 10)
%% '
%% @end
%%------------------------------------------------------------------------------
-spec create_table(table_name(), key_schema(), non_neg_integer(), non_neg_integer(), ddb_opts(), aws_config())
-> create_table_return().
create_table(Table, KeySchema, ReadUnits, WriteUnits, Opts, Config) ->
{[], DdbOpts} = opts([], Opts),
Return = erlcloud_ddb1:create_table(Table, dynamize_key_schema(KeySchema), ReadUnits, WriteUnits, Config),
out(Return, fun(Json) -> undynamize_record(create_table_record(), Json) end, DdbOpts,
#ddb_create_table.table_description).
%%%------------------------------------------------------------------------------
%%% DeleteItem
%%%------------------------------------------------------------------------------
-type delete_item_opt() :: {expected, in_expected()} |
{return_values, none | all_old} |
out_opt().
-type delete_item_opts() :: [delete_item_opt()].
-spec delete_item_opts() -> opt_table().
delete_item_opts() ->
[{expected, <<"Expected">>, fun dynamize_expected/1},
{return_values, <<"ReturnValues">>, fun dynamize_return_value/1}].
-spec delete_item_record() -> record_desc().
delete_item_record() ->
{#ddb_delete_item{},
[{<<"Attributes">>, #ddb_delete_item.attributes, fun undynamize_item/1},
{<<"ConsumedCapacityUnits">>, #ddb_delete_item.consumed_capacity_units, fun id/1}
]}.
-type delete_item_return() :: ddb_return(#ddb_delete_item{}, out_item()).
-spec delete_item(table_name(), key()) -> delete_item_return().
delete_item(Table, Key) ->
delete_item(Table, Key, [], default_config()).
-spec delete_item(table_name(), key(), delete_item_opts()) -> delete_item_return().
delete_item(Table, Key, Opts) ->
delete_item(Table, Key, Opts, default_config()).
%%------------------------------------------------------------------------------
%% @doc
%%
%% ===Example===
%%
%% Delete item with hash key "Mingus" and range key 200 from
%% "comp-table" if the "status" field is "shopping". Return all old
%% values.
%%
%% `
%% {ok, OldValues} = erlcloud_ddb:delete_item(<<"comp-table">>, {"Mingus", 200},
%% [{return_values, all_old},
%% {expected, {<<"status">>, "shopping"}}])
%% '
%% @end
%%------------------------------------------------------------------------------
-spec delete_item(table_name(), key(), delete_item_opts(), aws_config()) -> delete_item_return().
delete_item(Table, Key, Opts, Config) ->
{AwsOpts, DdbOpts} = opts(delete_item_opts(), Opts),
Return = erlcloud_ddb1:delete_item(Table, dynamize_key(Key), AwsOpts, Config),
out(Return, fun(Json) -> undynamize_record(delete_item_record(), Json) end, DdbOpts,
#ddb_delete_item.attributes, {ok, []}).
%%%------------------------------------------------------------------------------
%%% DeleteTable
%%%------------------------------------------------------------------------------
-spec delete_table_record() -> record_desc().
delete_table_record() ->
{#ddb_delete_table{},
[{<<"TableDescription">>, #ddb_create_table.table_description,
fun(V) -> undynamize_record(table_description_record(), V) end}
]}.
-type delete_table_return() :: ddb_return(#ddb_delete_table{}, #ddb_table_description{}).
-spec delete_table(table_name()) -> delete_table_return().
delete_table(Table) ->
delete_table(Table, [], default_config()).
-spec delete_table(table_name(), ddb_opts()) -> delete_table_return().
delete_table(Table, Opts) ->
delete_table(Table, Opts, default_config()).
%%------------------------------------------------------------------------------
%% @doc
%%
%% ===Example===
%%
%% Delete "Table1".
%%
%% `
%% {ok, _} = erlcloud_ddb:delete_table(<<"Table1">>)
%% '
%% @end
%%------------------------------------------------------------------------------
-spec delete_table(table_name(), ddb_opts(), aws_config()) -> delete_table_return().
delete_table(Table, Opts, Config) ->
{[], DdbOpts} = opts([], Opts),
Return = erlcloud_ddb1:delete_table(Table, Config),
out(Return, fun(Json) -> undynamize_record(delete_table_record(), Json) end, DdbOpts,
#ddb_delete_table.table_description).
%%%------------------------------------------------------------------------------
%%% DescribeTable
%%%------------------------------------------------------------------------------
-spec table_record() -> record_desc().
table_record() ->
{#ddb_table{},
[{<<"CreationDateTime">>, #ddb_table.creation_date_time, fun id/1},
{<<"ItemCount">>, #ddb_table.item_count, fun id/1},
{<<"KeySchema">>, #ddb_table.key_schema, fun(V) -> undynamize_key_schema(V) end},
{<<"ProvisionedThroughput">>, #ddb_table.provisioned_throughput,
fun(V) -> undynamize_record(provisioned_throughput_record(), V) end},
{<<"TableName">>, #ddb_table.table_name, fun id/1},
{<<"TableSizeBytes">>, #ddb_table.table_size_bytes, fun id/1},
{<<"TableStatus">>, #ddb_table.table_status, fun id/1}
]}.
-spec describe_table_record() -> record_desc().
describe_table_record() ->
{#ddb_describe_table{},
[{<<"Table">>, #ddb_describe_table.table, fun(V) -> undynamize_record(table_record(), V) end}
]}.
-type describe_table_return() :: ddb_return(#ddb_describe_table{}, #ddb_table{}).
-spec describe_table(table_name()) -> describe_table_return().
describe_table(Table) ->
describe_table(Table, [], default_config()).
-spec describe_table(table_name(), ddb_opts()) -> describe_table_return().
describe_table(Table, Opts) ->
describe_table(Table, Opts, default_config()).
%%------------------------------------------------------------------------------
%% @doc
%%
%% ===Example===
%%
%% Describe "Table1".
%%
%% `
%% {ok, Table} = erlcloud_ddb:describe_table(<<"Table1">>)
%% '
%% @end
%%------------------------------------------------------------------------------
-spec describe_table(table_name(), ddb_opts(), aws_config()) -> describe_table_return().
describe_table(Table, Opts, Config) ->
{[], DdbOpts} = opts([], Opts),
Return = erlcloud_ddb1:describe_table(Table, Config),
out(Return, fun(Json) -> undynamize_record(describe_table_record(), Json) end, DdbOpts,
#ddb_describe_table.table).
%%%------------------------------------------------------------------------------
%%% GetItem
%%%------------------------------------------------------------------------------
-spec get_item_record() -> record_desc().
get_item_record() ->
{#ddb_get_item{},
[{<<"Item">>, #ddb_get_item.item, fun undynamize_item/1},
{<<"ConsumedCapacityUnits">>, #ddb_get_item.consumed_capacity_units, fun id/1}
]}.
-spec get_item(table_name(), key()) -> item_return().
get_item(Table, Key) ->
get_item(Table, Key, [], default_config()).
-spec get_item(table_name(), key(), get_item_opts()) -> item_return().
get_item(Table, Key, Opts) ->
get_item(Table, Key, Opts, default_config()).
%%------------------------------------------------------------------------------
%% @doc
%%
%% ===Example===
%%
%% Get attributes "status" and "friends" from the item with hash key
%% "Julie" and range key 1307654345 in the table "comptable" using a
%% consistent read.
%%
%% `
%% {ok, Item} = erlcloud_ddb:get_item(<<"comptable">>, {"Julie", 1307654345},
%% [consistent_read,
%% {attributes_to_get, [<<"status">>, <<"friends">>]}])
%% '
%% @end
%%------------------------------------------------------------------------------
-spec get_item(table_name(), key(), get_item_opts(), aws_config()) -> item_return().
get_item(Table, Key, Opts, Config) ->
{AwsOpts, DdbOpts} = opts(get_item_opts(), Opts),
Return = erlcloud_ddb1:get_item(Table, dynamize_key(Key), AwsOpts, Config),
out(Return, fun(Json) -> undynamize_record(get_item_record(), Json) end, DdbOpts,
#ddb_get_item.item, {ok, []}).
%%%------------------------------------------------------------------------------
%%% ListTables
%%%------------------------------------------------------------------------------
-type list_tables_opt() :: {limit, pos_integer()} |
{exclusive_start_table_name, binary()} |
out_opt().
-type list_tables_opts() :: [list_tables_opt()].
-spec list_tables_opts() -> opt_table().
list_tables_opts() ->
[{limit, <<"Limit">>, fun id/1},
{exclusive_start_table_name, <<"ExclusiveStartTableName">>, fun id/1}].
-spec list_tables_record() -> record_desc().
list_tables_record() ->
{#ddb_list_tables{},
[{<<"TableNames">>, #ddb_list_tables.table_names, fun id/1},
{<<"LastEvaluatedTableName">>, #ddb_list_tables.last_evaluated_table_name, fun id/1}
]}.
-type list_tables_return() :: ddb_return(#ddb_list_tables{}, [table_name()]).
-spec list_tables() -> list_tables_return().
list_tables() ->
list_tables([], default_config()).
-spec list_tables(list_tables_opts()) -> list_tables_return().
list_tables(Opts) ->
list_tables(Opts, default_config()).
%%------------------------------------------------------------------------------
%% @doc
%%
%% ===Example===
%%
%% Get the next 3 table names after "comp2".
%%
%% `
%% {ok, TableNames} = erlcloud_ddb:list_tables([{limit, 3}, {exclusive_start_table_name, <<"comp2">>}])
%% '
%% @end
%%------------------------------------------------------------------------------
-spec list_tables(list_tables_opts(), aws_config()) -> list_tables_return().
list_tables(Opts, Config) ->
{AwsOpts, DdbOpts} = opts(list_tables_opts(), Opts),
Return = erlcloud_ddb1:list_tables(AwsOpts, Config),
out(Return, fun(Json) -> undynamize_record(list_tables_record(), Json) end, DdbOpts,
#ddb_list_tables.table_names, {ok, []}).
%%%------------------------------------------------------------------------------
%%% PutItem
%%%------------------------------------------------------------------------------
-type put_item_opt() :: {expected, in_expected()} |
{return_values, none | all_old} |
out_opt().
-type put_item_opts() :: [put_item_opt()].
-spec put_item_opts() -> opt_table().
put_item_opts() ->
[{expected, <<"Expected">>, fun dynamize_expected/1},
{return_values, <<"ReturnValues">>, fun dynamize_return_value/1}].
-spec put_item_record() -> record_desc().
put_item_record() ->
{#ddb_put_item{},
[{<<"Attributes">>, #ddb_put_item.attributes, fun undynamize_item/1},
{<<"ConsumedCapacityUnits">>, #ddb_put_item.consumed_capacity_units, fun id/1}
]}.
-type put_item_return() :: ddb_return(#ddb_put_item{}, out_item()).
-spec put_item(table_name(), in_item()) -> put_item_return().
put_item(Table, Item) ->
put_item(Table, Item, [], default_config()).
-spec put_item(table_name(), in_item(), put_item_opts()) -> put_item_return().
put_item(Table, Item, Opts) ->
put_item(Table, Item, Opts, default_config()).
%%------------------------------------------------------------------------------
%% @doc
%%
%% ===Example===
%%
%% Put item with attributes "time" of 300, "feeling" of "not
%% surprised" and "user" of "Riley" into table "comp5", but only if an
%% item with the same key exists and has field "feeling" set to
%% "surprised". Return all the old attributes.
%%
%% `
%% {ok, OldItem} = erlcloud_ddb:put_item(<<"comp5">>,
%% [{<<"time">>, 300},
%% {<<"feeling">>, <<"not surprised">>},
%% {<<"user">>, <<"Riley">>}],
%% [{return_values, all_old},
%% {expected, {<<"feeling">>, <<"surprised">>}}])
%% '
%% @end
%%------------------------------------------------------------------------------
-spec put_item(table_name(), in_item(), put_item_opts(), aws_config()) -> put_item_return().
put_item(Table, Item, Opts, Config) ->
{AwsOpts, DdbOpts} = opts(put_item_opts(), Opts),
Return = erlcloud_ddb1:put_item(Table, dynamize_item(Item), AwsOpts, Config),
out(Return, fun(Json) -> undynamize_record(put_item_record(), Json) end, DdbOpts,
#ddb_put_item.attributes, {ok, []}).
%%%------------------------------------------------------------------------------
%%% Queue
%%%------------------------------------------------------------------------------
-type json_range_key_condition() :: jsx:json_term().
-type range_key_condition() :: {in_attr_value(), comparison_op()} |
{{in_attr_value(), in_attr_value()}, between}.
-spec dynamize_range_key_condition(range_key_condition()) -> json_range_key_condition().
dynamize_range_key_condition({{Value1, Value2}, between}) ->
[{<<"AttributeValueList">>, [[dynamize_value(Value1)], [dynamize_value(Value2)]]},
dynamize_comparison(between)];
dynamize_range_key_condition({Value, Comparison}) ->
[{<<"AttributeValueList">>, [[dynamize_value(Value)]]}, dynamize_comparison(Comparison)].
-type q_opt() :: {attributes_to_get, [binary()]} |
{limit, pos_integer()} |
boolean_opt(consistent_read) |
boolean_opt(count) |
{range_key_condition, range_key_condition()} |
boolean_opt(scan_index_forward) |
{exclusive_start_key, key() | undefined} |
out_opt().
-type q_opts() :: [q_opt()].
-spec q_opts() -> opt_table().
q_opts() ->
[{attributes_to_get, <<"AttributesToGet">>, fun id/1},
{limit, <<"Limit">>, fun id/1},
{consistent_read, <<"ConsistentRead">>, fun id/1},
{count, <<"Count">>, fun id/1},
{range_key_condition, <<"RangeKeyCondition">>, fun dynamize_range_key_condition/1},
{scan_index_forward, <<"ScanIndexForward">>, fun id/1},
{exclusive_start_key, <<"ExclusiveStartKey">>, fun(V) -> erlcloud_ddb1:key_value(dynamize_key(V)) end}].
-spec q_record() -> record_desc().
q_record() ->
{#ddb_q{},
[{<<"Items">>, #ddb_q.items, fun(V) -> [undynamize_item(I) || I <- V] end},
{<<"Count">>, #ddb_q.count, fun id/1},
{<<"LastEvaluatedKey">>, #ddb_q.last_evaluated_key, fun undynamize_key/1},
{<<"ConsumedCapacityUnits">>, #ddb_q.consumed_capacity_units, fun id/1}
]}.
-type q_return() :: ddb_return(#ddb_q{}, [out_item()]).
-spec q(table_name(), hash_key()) -> q_return().
q(Table, HashKey) ->
q(Table, HashKey, [], default_config()).
-spec q(table_name(), hash_key(), q_opts()) -> q_return().
q(Table, HashKey, Opts) ->
q(Table, HashKey, Opts, default_config()).
%%------------------------------------------------------------------------------
%% @doc
%%
%% ===Example===
%%
%% Get up to 2 items with hash key "John" and with range keys coming
%% before "The Matrix" from table "1-hash-rangetable".
%%
%% `
%% {ok, Items} = erlcloud_ddb:q(<<"1-hash-rangetable">>, <<"John">>,
%% [{exclusive_start_key, {{s, <<"John">>}, {s, <<"The Matrix">>}}},
%% {scan_index_forward, false},
%% {limit, 2}])
%% '
%% @end
%%------------------------------------------------------------------------------
-spec q(table_name(), hash_key(), q_opts(), aws_config()) -> q_return().
q(Table, HashKey, Opts, Config) ->
{AwsOpts, DdbOpts} = opts(q_opts(), Opts),
Return = erlcloud_ddb1:q(Table, dynamize_key(HashKey), AwsOpts, Config),
out(Return, fun(Json) -> undynamize_record(q_record(), Json) end, DdbOpts,
#ddb_q.items, {ok, []}).
%%%------------------------------------------------------------------------------
%%% Scan
%%%------------------------------------------------------------------------------
-type scan_filter_item() :: {attr_name(), [in_attr_value()], in} |
{attr_name(), {in_attr_value(), in_attr_value()}, between} |
{attr_name(), in_attr_value(), comparison_op()}.
-type scan_filter() :: maybe_list(scan_filter_item()).
-spec dynamize_scan_filter_item(scan_filter_item()) -> json_pair().
dynamize_scan_filter_item({Name, AttrValueList, in}) ->
{Name, [{<<"AttributeValueList">>, [[dynamize_value(A)] || A <- AttrValueList]},
dynamize_comparison(in)]};
dynamize_scan_filter_item({Name, {AttrValue1, AttrValue2}, between}) ->
{Name, [{<<"AttributeValueList">>, [[dynamize_value(AttrValue1)], [dynamize_value(AttrValue2)]]},
dynamize_comparison(between)]};
dynamize_scan_filter_item({Name, AttrValue, Op}) ->
{Name, [{<<"AttributeValueList">>, [[dynamize_value(AttrValue)]]},
dynamize_comparison(Op)]}.
-spec dynamize_scan_filter(scan_filter()) -> [json_pair()].
dynamize_scan_filter(Filter) ->
dynamize_maybe_list(fun dynamize_scan_filter_item/1, Filter).
-type scan_opt() :: {attributes_to_get, [binary()]} |
{limit, pos_integer()} |
boolean_opt(count) |
{scan_filter, scan_filter()} |
{exclusive_start_key, key()} |
out_opt().
-type scan_opts() :: [scan_opt()].
-spec scan_opts() -> opt_table().
scan_opts() ->
[{attributes_to_get, <<"AttributesToGet">>, fun id/1},
{limit, <<"Limit">>, fun id/1},
{count, <<"Count">>, fun id/1},
{scan_filter, <<"ScanFilter">>, fun dynamize_scan_filter/1},
{exclusive_start_key, <<"ExclusiveStartKey">>, fun(V) -> erlcloud_ddb1:key_value(dynamize_key(V)) end}].
-spec scan_record() -> record_desc().
scan_record() ->
{#ddb_scan{},
[{<<"Items">>, #ddb_scan.items, fun(V) -> [undynamize_item(I) || I <- V] end},
{<<"Count">>, #ddb_scan.count, fun id/1},
{<<"ScannedCount">>, #ddb_scan.scanned_count, fun id/1},
{<<"LastEvaluatedKey">>, #ddb_scan.last_evaluated_key, fun undynamize_key/1},
{<<"ConsumedCapacityUnits">>, #ddb_scan.consumed_capacity_units, fun id/1}
]}.
-type scan_return() :: ddb_return(#ddb_scan{}, [out_item()]).
-spec scan(table_name()) -> scan_return().
scan(Table) ->
scan(Table, [], default_config()).
-spec scan(table_name(), scan_opts()) -> scan_return().
scan(Table, Opts) ->
scan(Table, Opts, default_config()).
%%------------------------------------------------------------------------------
%% @doc
%%
%% ===Example===
%%
%% Return all items from table "comp5" with "time" greater than 400.
%%
%% `
%% {ok, Items} = erlcloud_ddb:scan(<<"comp5">>, [{scan_filter, [{<<"time">>, 400, gt}]}])
%% '
%% @end
%%------------------------------------------------------------------------------
-spec scan(table_name(), scan_opts(), aws_config()) -> scan_return().
scan(Table, Opts, Config) ->
{AwsOpts, DdbOpts} = opts(scan_opts(), Opts),
Return = erlcloud_ddb1:scan(Table, AwsOpts, Config),
out(Return, fun(Json) -> undynamize_record(scan_record(), Json) end, DdbOpts,
#ddb_scan.items, {ok, []}).
%%%------------------------------------------------------------------------------
%%% UpdateItem
%%%------------------------------------------------------------------------------
-type update_action() :: put | add | delete.
-type in_update() :: {attr_name(), in_attr_value(), update_action()} | in_attr() | {attr_name(), delete}.
-type in_updates() :: maybe_list(in_update()).
-type json_update_action() :: {binary(), binary()}.
-type json_update() :: {attr_name(), [{binary(), json_attr_value()} | json_update_action()]}.
-spec dynamize_action(update_action()) -> json_update_action().
dynamize_action(put) ->
{<<"Action">>, <<"PUT">>};
dynamize_action(add) ->
{<<"Action">>, <<"ADD">>};
dynamize_action(delete) ->
{<<"Action">>, <<"DELETE">>}.
-spec dynamize_update(in_update()) -> json_update().
dynamize_update({Name, Value, Action}) ->
{Name, [{<<"Value">>, [dynamize_value(Value)]}, dynamize_action(Action)]};
dynamize_update({Name, delete}) ->
{Name, [dynamize_action(delete)]};
dynamize_update({Name, Value}) ->
%% Uses the default action of put
{Name, [{<<"Value">>, [dynamize_value(Value)]}]}.
-spec dynamize_updates(in_updates()) -> [json_update()].
dynamize_updates(Updates) ->
dynamize_maybe_list(fun dynamize_update/1, Updates).
-type update_item_opt() :: {expected, in_expected()} |
{return_values, return_value()} |
out_opt().
-type update_item_opts() :: [update_item_opt()].
-spec update_item_opts() -> opt_table().
update_item_opts() ->
[{expected, <<"Expected">>, fun dynamize_expected/1},
{return_values, <<"ReturnValues">>, fun dynamize_return_value/1}].
-spec update_item_record() -> record_desc().
update_item_record() ->
{#ddb_update_item{},
[{<<"Attributes">>, #ddb_update_item.attributes, fun undynamize_item/1},
{<<"ConsumedCapacityUnits">>, #ddb_update_item.consumed_capacity_units, fun id/1}
]}.
-type update_item_return() :: ddb_return(#ddb_update_item{}, out_item()).
-spec update_item(table_name(), key(), in_updates()) -> update_item_return().
update_item(Table, Key, Updates) ->
update_item(Table, Key, Updates, [], default_config()).
-spec update_item(table_name(), key(), in_updates(), update_item_opts()) -> update_item_return().
update_item(Table, Key, Updates, Opts) ->
update_item(Table, Key, Updates, Opts, default_config()).
%%------------------------------------------------------------------------------
%% @doc
%%
%% ===Example===
%%
%% Update item with hash key "Julie" and range key 1307654350 in table
%% "comp5" by changing the status from "offline" to "online" and
%% return the new item.
%%
%% `
%% {ok, NewItem} = erlcloud_ddb:update_item(<<"comp5">>, {"Julie", 1307654350},
%% [{<<"status">>, <<"online">>, put}],
%% [{return_values, all_new},
%% {expected, {<<"status">>, "offline"}}])
%% '
%% @end
%%------------------------------------------------------------------------------
-spec update_item(table_name(), key(), in_updates(), update_item_opts(), aws_config()) -> update_item_return().
update_item(Table, Key, Updates, Opts, Config) ->
{AwsOpts, DdbOpts} = opts(update_item_opts(), Opts),
Return = erlcloud_ddb1:update_item(Table, dynamize_key(Key), dynamize_updates(Updates),
AwsOpts, Config),
out(Return, fun(Json) -> undynamize_record(update_item_record(), Json) end, DdbOpts,
#ddb_update_item.attributes, {ok, []}).
%%%------------------------------------------------------------------------------
%%% UpdateTable
%%%------------------------------------------------------------------------------
-spec update_table_record() -> record_desc().
update_table_record() ->
{#ddb_update_table{},
[{<<"TableDescription">>, #ddb_create_table.table_description,
fun(V) -> undynamize_record(table_description_record(), V) end}
]}.
-type update_table_return() :: ddb_return(#ddb_update_table{}, #ddb_table_description{}).
-spec update_table(table_name(), non_neg_integer(), non_neg_integer()) -> update_table_return().
update_table(Table, ReadUnits, WriteUnits) ->
update_table(Table, ReadUnits, WriteUnits, [], default_config()).
-spec update_table(table_name(), non_neg_integer(), non_neg_integer(), ddb_opts()) -> update_table_return().
update_table(Table, ReadUnits, WriteUnits, Opts) ->
update_table(Table, ReadUnits, WriteUnits, Opts, default_config()).
%%------------------------------------------------------------------------------
%% @doc
%%
%% ===Example===
%%
%% Update table "comp1" to have provisioned capacity of 5 read units and 15 write units.
%%
%% `
%% {ok, _} = erlcloud_ddb:update_table(<<"comp1">>, 5, 15)
%% '
%% @end
%%------------------------------------------------------------------------------
-spec update_table(table_name(), non_neg_integer(), non_neg_integer(), ddb_opts(), aws_config())
-> update_table_return().
update_table(Table, ReadUnits, WriteUnits, Opts, Config) ->
{[], DdbOpts} = opts([], Opts),
Return = erlcloud_ddb1:update_table(Table, ReadUnits, WriteUnits, Config),
out(Return, fun(Json) -> undynamize_record(update_table_record(), Json) end, DdbOpts,
#ddb_update_table.table_description). | src/erlcloud_ddb.erl | 0.599368 | 0.417806 | erlcloud_ddb.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% Copyright (c) 2014 SyncFree Consortium. All Rights Reserved.
%%
% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
-module(commit_hooks_SUITE).
-compile({parse_transform, lager_transform}).
%% common_test callbacks
-export([
init_per_suite/1,
end_per_suite/1,
init_per_testcase/2,
end_per_testcase/2,
all/0]).
%% tests
-export([register_hook_test/1,
execute_hook_test/1,
execute_post_hook_test/1,
execute_prehooks_static_txn_test/1,
execute_posthooks_static_txn_test/1]).
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("kernel/include/inet.hrl").
-define(ADDRESS, "localhost").
-define(PORT, 10017).
init_per_suite(Config) ->
test_utils:at_init_testsuite(),
Clusters = test_utils:set_up_clusters_common(Config),
Nodes = hd(Clusters),
[{nodes, Nodes}|Config].
end_per_suite(Config) ->
Config.
init_per_testcase(_Case, Config) ->
Config.
end_per_testcase(_, _) ->
ok.
all() -> [register_hook_test,
execute_hook_test,
execute_post_hook_test,
execute_prehooks_static_txn_test,
execute_posthooks_static_txn_test].
register_hook_test(Config) ->
Nodes = proplists:get_value(nodes, Config),
Node = hd(Nodes),
Bucket = hook_bucket,
Response=rpc:call(Node, antidote, register_pre_hook,
[Bucket, hooks_module, hooks_function]),
?assertMatch({error, _}, Response),
ok=rpc:call(Node, antidote_hooks, register_post_hook,
[Bucket, antidote_hooks, test_commit_hook]),
Result1 = rpc:call(Node, antidote_hooks, get_hooks,
[post_commit, Bucket]),
?assertMatch({antidote_hooks, test_commit_hook}, Result1),
ok=rpc:call(Node, antidote, unregister_hook,
[post_commit, Bucket]),
Result2 = rpc:call(Node, antidote_hooks, get_hooks,
[post_commit, Bucket]),
?assertMatch(undefined, Result2).
%% Test pre-commit hook
execute_hook_test(Config) ->
Nodes = proplists:get_value(nodes, Config),
Node = hd(Nodes),
Bucket = hook_bucket1,
ok = rpc:call(Node, antidote, register_pre_hook,
[Bucket, antidote_hooks, test_increment_hook]),
Bound_object = {hook_key, antidote_crdt_counter_pn, Bucket},
{ok, TxId} = rpc:call(Node, antidote, start_transaction, [ignore, []]),
ct:print("Txid ~p", [TxId]),
ok = rpc:call(Node, antidote, update_objects, [[{Bound_object, increment, 1}], TxId]),
{ok, CT} = rpc:call(Node, antidote, commit_transaction, [TxId]),
{ok, TxId2} = rpc:call(Node, antidote, start_transaction, [CT, []]),
Res = rpc:call(Node, antidote, read_objects, [[Bound_object], TxId2]),
rpc:call(Node, antidote, commit_transaction, [TxId2]),
?assertMatch({ok, [2]}, Res).
%% Test post-commit hook
execute_post_hook_test(Config) ->
Nodes = proplists:get_value(nodes, Config),
Node = hd(Nodes),
Bucket = hook_bucket2,
ok = rpc:call(Node, antidote, register_post_hook,
[Bucket, antidote_hooks, test_post_hook]),
Bound_object = {post_hook_key, antidote_crdt_counter_pn, Bucket},
{ok, TxId} = rpc:call(Node, antidote, start_transaction, [ignore, []]),
ok = rpc:call(Node, antidote, update_objects, [[{Bound_object, increment, 1}], TxId]),
{ok, CT} = rpc:call(Node, antidote, commit_transaction, [TxId]),
CommitCount = {post_hook_key, antidote_crdt_counter_pn, commitcount},
{ok, TxId2} = rpc:call(Node, antidote, start_transaction, [CT, []]),
Res = rpc:call(Node, antidote, read_objects, [[CommitCount], TxId2]),
rpc:call(Node, antidote, commit_transaction, [TxId2]),
?assertMatch({ok, [1]}, Res).
execute_prehooks_static_txn_test(Config) ->
Nodes = proplists:get_value(nodes, Config),
Node = hd(Nodes),
Bucket = hook_bucket3,
ok = rpc:call(Node, antidote, register_pre_hook,
[Bucket, antidote_hooks, test_increment_hook]),
Bound_object = {prehook_key, antidote_crdt_counter_pn, Bucket},
{ok, CT} = rpc:call(Node, antidote, update_objects, [ignore, [], [{Bound_object, increment, 1}]]),
{ok, TxId2} = rpc:call(Node, antidote, start_transaction, [CT, []]),
Res = rpc:call(Node, antidote, read_objects, [[Bound_object], TxId2]),
rpc:call(Node, antidote, commit_transaction, [TxId2]),
?assertMatch({ok, [2]}, Res).
execute_posthooks_static_txn_test(Config) ->
Nodes = proplists:get_value(nodes, Config),
Node = hd(Nodes),
Bucket = hook_bucket4,
ok = rpc:call(Node, antidote, register_post_hook,
[Bucket, antidote_hooks, test_post_hook]),
Bound_object = {posthook_static_key, antidote_crdt_counter_pn, Bucket},
{ok, CT} = rpc:call(Node, antidote, update_objects, [ignore, [], [{Bound_object, increment, 1}]]),
CommitCount = {posthook_static_key, antidote_crdt_counter_pn, commitcount},
{ok, TxId2} = rpc:call(Node, antidote, start_transaction, [CT, []]),
Res = rpc:call(Node, antidote, read_objects, [[CommitCount], TxId2]),
rpc:call(Node, antidote, commit_transaction, [TxId2]),
?assertMatch({ok, [1]}, Res). | test/commit_hooks_SUITE.erl | 0.530236 | 0.461441 | commit_hooks_SUITE.erl | starcoder |
% This file is licensed to you under the Apache License,
% Version 2.0 (the "License"); you may not use this file
% except in compliance with the License. You may obtain
% a copy of the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing,
% software distributed under the License is distributed on an
% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
% KIND, either express or implied. See the License for the
% specific language governing permissions and limitations
% under the License.
%% @doc Third excercise: .
%% == Required Knowledge ==
%% <ul>
%% <li>Modules and functions</li>
%% <li>List functions</li>
%% <li>Basic Erlang Structures</li>
%% <li>Pattern Matching</li>
%% </ul>
%%
%% == Problem Statement ==
%% Create a 4-in-a-row game manager
%% <dl>
%% <dt>start() -> board().</dt><dd>Returns an empty board</dd>
%% <dt>play(Col, Board) -> won | drawn | {next, board()}.</dt>
%% <dd>Accepts a play and returns:
%% <dl>
%% <dt>won</dt><dd>the player won</dd>
%% <dt>drawn</dt><dd>the game ended in a draw</dd>
%% <dt>next</dt><dd>the game should keep going</dd>
%% </dl>
%% </dd>
%% </dl>
-module(fiar_core_SUITE).
-author('<EMAIL>').
-type config() :: [{atom(), term()}].
-export([all/0]).
-export([empty/1, invalid/1, drawn/1]).
-export([wins_vertically/1, wins_horizontally/1,
wins_right_diagonally/1, wins_left_diagonally/1]).
-export ([fail_board/1]).
%% @private
-spec all() -> [atom()].
all() -> [Fun || {Fun, 1} <- module_info(exports), Fun =/= module_info].
%% @doc test play with fail board
-spec fail_board(config()) -> ok.
fail_board(_Config) ->
EmptyBoard = fiar_core:start(),
try fiar_core:play(8, EmptyBoard) of
Result -> no_result = Result
catch
invalid_column -> ok
end.
%% @doc start returns an empty board
-spec empty(config()) -> ok.
empty(_Config) ->
Board = fiar_core:start(),
lists:foreach(
fun(Col) ->
{next, _} = fiar_core:play(Col, Board)
end,
[1, 2, 3, 4, 5, 6, 7]),
ok.
%% @doc no row accepts more than 7 chips
-spec invalid(config()) -> ok.
invalid(_Config) ->
EmptyBoard = fiar_core:start(),
BoardAfter7 = drop_chips(7, 1, EmptyBoard),
try fiar_core:play(1, BoardAfter7) of
Result ->
no_result = Result
catch
_:Ex ->
invalid_column = Ex
end,
ok.
%% @doc when the board is full, play should always return drawn
-spec drawn(_Config) -> ok.
drawn(_Config) ->
EmptyBoard = fiar_core:start(),
FullBoard = almost_fill_board(EmptyBoard),
{drawn, _} = fiar_core:play(3, FullBoard),
ok.
%% @doc when the player puts 4 chips in a vertical row, wins
-spec wins_vertically(config()) -> ok.
wins_vertically(_Config) ->
EmptyBoard = fiar_core:start(),
CheckMateBoard = drop_chips([1, 2, 1, 2, 1, 2], EmptyBoard),
{won, _} = fiar_core:play(1, CheckMateBoard),
ok.
%% @doc when the player puts 4 chips in a horizontal row, wins
-spec wins_horizontally(config()) -> ok.
wins_horizontally(_Config) ->
EmptyBoard = fiar_core:start(),
CheckMateBoard = drop_chips([2, 5, 3, 6, 4, 7], EmptyBoard),
{won, _} = fiar_core:play(1, CheckMateBoard),
ok.
%% @doc when the player puts 4 chips in a diagonal row, wins
-spec wins_right_diagonally(config()) -> ok.
wins_right_diagonally(_Config) ->
EmptyBoard = fiar_core:start(),
CheckMateBoard = drop_chips([4, 4, 4, 4, 6, 3, 3, 3, 2, 2, 7], EmptyBoard),
{won, _} = fiar_core:play(1, CheckMateBoard),
ok.
%% @doc when the player puts 4 chips in a diagonal row, wins
-spec wins_left_diagonally(config()) -> ok.
wins_left_diagonally(_Config) ->
EmptyBoard = fiar_core:start(),
CheckMateBoard = drop_chips([4, 4, 4, 4, 2, 5, 5, 5, 6, 6, 1], EmptyBoard),
{won, _} = fiar_core:play(7, CheckMateBoard),
ok.
%% @private
%% @doc fills all columns in the board except #3
almost_fill_board(Board) ->
WithColsExcept3 = lists:foldl(fun fill_column/2, Board, [1, 4, 2, 5, 6, 7]),
drop_chips(6, 3, WithColsExcept3).
%% @private
fill_column(Col, Board) -> drop_chips(7, Col, Board).
%% @private
drop_chips(0, _Col, Board) -> Board;
drop_chips(N, Col, Board) ->
{Col, Board, {next, NextBoard}} = {Col, Board, fiar_core:play(Col, Board)},
drop_chips(N-1, Col, NextBoard).
%% @private
drop_chips([], Board) -> Board;
drop_chips([Col|Rest], Board) ->
{Col, Board, {next, NextBoard}} = {Col, Board, fiar_core:play(Col, Board)},
drop_chips(Rest, NextBoard). | test/fiar_core_SUITE.erl | 0.553988 | 0.488832 | fiar_core_SUITE.erl | starcoder |
%% @doc Node data structure for A* search.
%% @author <NAME> <<EMAIL>>
%% @copyright <NAME> 2010, released under the MIT license.
%% @version 0.0.1
-module(astar_node).
-export([new_node/0, new_node/1, extract_attribute/2, set_attribute/3, add_cost_estimates/2, add_cost_estimate/2]).
%% Copyright (c) 2010 <NAME>
%% Permission is hereby granted, free of charge, to any person obtaining a copy
%% of this software and associated documentation files (the "Software"), to deal
%% in the Software without restriction, including without limitation the rights
%% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
%% copies of the Software, and to permit persons to whom the Software is
%% furnished to do so, subject to the following conditions:
%% The above copyright notice and this permission notice shall be included in
%% all copies or substantial portions of the Software.
%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
%% THE SOFTWARE.
%% @type node() = {node, NodeData}. One node in the A* search tree. NodeData generally contains <ul>
%% <li>the state the current node represents;</li>
%% <li>the estimated total cost of the optimal path from the start node, passing through this node, and ending at a goal node;</li>
%% <li>and the estimated "remaining" cost, that is, the cost of the optimal path from this node to a goal node.</li>
%% </ul>
%% You can also, using {@link set_attribute/3. 'set_attribute/3'}, set an arbitrary key-value pair on the node. One possible use for this is recording details about the path to this node.
%% NodeData is currently implemented with a dictionary from the {@link dict} module but you shouldn't count on that. Use only {@link set_attribute/3.} and {@link get_attribute/2.} to manipulate its contents, and {@link new_node/0.} or {@link new_node/1.} to create nodes.
%% @spec new_node() -> node()
%% @equiv new_node([])
new_node() -> new_node([]).
%% @spec new_node(Proplist::[{Key, Value}]) -> node()
%% @doc Returns a new node with each Key set to Value.
new_node(Proplist) ->
NodeDict = dict:from_list(Proplist),
{node, NodeDict}.
%% @spec set_attribute(Attribute::atom(), Value::any(), Node::node()) -> Node2::node()
%% @doc Sets the value stored in Node at the key Attribute to Value. Returns the updated node.
set_attribute(Attribute, Value, {node, NodeDict}) ->
{node, dict:store(Attribute, Value, NodeDict)}.
%% @spec extract_attribute(Attribute::atom(), Node::node()) -> any()
%% @doc Returns the value stored in Node at the key Attribute. Crashes if the key is not present.
extract_attribute(Attribute, {node, NodeDict}) ->
{ok, Result} = dict:find(Attribute, NodeDict),
Result.
add_cost_estimates(Nodes, RemainingCostEstimateFun) ->
lists:map(
fun(Node) ->
add_cost_estimate(Node, RemainingCostEstimateFun)
end,
Nodes
).
add_cost_estimate(Node, RemainingCostEstimateFun) ->
RemainingCostEstimate = RemainingCostEstimateFun(Node),
Node2 = set_remaining_cost_estimate(RemainingCostEstimate, Node),
TotalCostEstimate = RemainingCostEstimate + extract_path_cost(Node2),
set_total_cost_estimate(TotalCostEstimate, Node2).
set_remaining_cost_estimate(Estimate, Node) ->
astar_node:set_attribute(remaining_cost_estimate, Estimate, Node).
extract_path_cost(Node) -> astar_node:extract_attribute(path_cost, Node).
set_total_cost_estimate(Estimate, Node) ->
astar_node:set_attribute(total_cost_estimate, Estimate, Node). | astar_node.erl | 0.719975 | 0.493592 | astar_node.erl | starcoder |
%%%-----------------------------------------------------------------------------
%%% @doc blockchain_poc_path_v3 implementation.
%%%
%%% The way paths are built depends solely on witnessing data we have accumulated
%%% in the blockchain ledger.
%%%
%%% Consider X having [A, B, C, D] as its geographic neighbors but only
%%% having [A, C, E] as it's transmission witnesses. It stands to reason
%%% that we would expect packets from X -> any[A, C, E] to work with relatively
%%% high confidence compared to its geographic neighbors. RF varies
%%% heavily depending on surroundings therefore relying only on geographic
%%% vicinity is not enough to build potentially interesting paths.
%%%
%%% In order to build a path, we first find a target gateway using
%%% blockchain_poc_target_v3:target/3 and build a path outward from it.
%%%
%%% Once we have a target we recursively find a potential next hop from the target
%%% gateway by looking into its witness list.
%%%
%%% Before we calculate the probability associated with each witness in witness
%%% list, we filter out potentially useless paths, depending on the following filters:
%%% - Next hop witness must not be in the same hex index as the target
%%% - Every hop in the path must be unique
%%% - Every hop in the path must have a minimum exclusion distance
%%%
%%% The criteria for a potential next hop witness are biased like so:
%%% - P(WitnessRSSI) = Probability that the witness has a good (valid) RSSI.
%%% - P(WitnessTime) = Probability that the witness timestamp is not stale.
%%% - P(WitnessCount) = Probability that the witness is infrequent.
%%%
%%% The overall probability of picking a next witness is additive depending on
%%% chain var configurable weights for each one of the calculated probs.
%%% P(Witness) = RSSIWeight*P(WitnessRSSI) + TimeWeight*P(WitnessTime) + CountWeight*P(WitnessCount)
%%%
%%% We scale these probabilities and run an ICDF to select the witness from
%%% the witness list. Once we have a potential next hop, we simply do the same process
%%% for the next hop and continue building till the path limit is reached or there
%%% are no more witnesses to continue with.
%%%
%%%-----------------------------------------------------------------------------
-module(blockchain_poc_path_v3).
-export([
build/5
]).
-include("blockchain_utils.hrl").
-type path() :: [libp2p_crypto:pubkey_bin()].
-type prob_map() :: #{libp2p_crypto:pubkey_bin() => float()}.
%% @doc Build a path starting at `TargetPubkeyBin`.
-spec build(TargetPubkeyBin :: libp2p_crypto:pubkey_bin(),
Ledger :: blockchain:ledger(),
HeadBlockTime :: pos_integer(),
Hash :: binary(),
Vars :: map()) -> path().
build(TargetPubkeyBin, Ledger, HeadBlockTime, Hash, Vars) ->
TargetGw = find(TargetPubkeyBin, Ledger),
TargetGwLoc = blockchain_ledger_gateway_v2:location(TargetGw),
RandState = blockchain_utils:rand_state(Hash),
build_(TargetPubkeyBin,
Ledger,
HeadBlockTime,
Vars,
RandState,
[TargetGwLoc],
[TargetPubkeyBin]).
%%%-------------------------------------------------------------------
%% Helpers
%%%-------------------------------------------------------------------
-spec build_(TargetPubkeyBin :: libp2p_crypto:pubkey_bin(),
Ledger :: blockchain:ledger(),
HeadBlockTime :: pos_integer(),
Vars :: map(),
RandState :: rand:state(),
Indices :: [h3:h3_index()],
Path :: path()) -> path().
build_(TargetPubkeyBin,
Ledger,
HeadBlockTime,
#{poc_path_limit := Limit} = Vars,
RandState,
Indices,
Path) when length(Path) < Limit ->
%% Try to find a next hop
{NewRandVal, NewRandState} = rand:uniform_s(RandState),
case next_hop(TargetPubkeyBin, Ledger, HeadBlockTime, Vars, NewRandVal, Indices) of
{error, no_witness} ->
lists:reverse(Path);
{ok, WitnessPubkeyBin} ->
%% Try the next hop in the new path, continue building forward
NextHopGw = find(WitnessPubkeyBin, Ledger),
Index = blockchain_ledger_gateway_v2:location(NextHopGw),
NewPath = [WitnessPubkeyBin | Path],
build_(WitnessPubkeyBin,
Ledger,
HeadBlockTime,
Vars,
NewRandState,
[Index | Indices],
NewPath)
end;
build_(_TargetPubkeyBin, _Ledger, _HeadBlockTime, _Vars, _RandState, _Indices, Path) ->
lists:reverse(Path).
-spec next_hop(GatewayBin :: blockchain_ledger_gateway_v2:gateway(),
Ledger :: blockchain:ledger(),
HeadBlockTime :: pos_integer(),
Vars :: map(),
RandVal :: float(),
Indices :: [h3:h3_index()]) -> {error, no_witness} | {ok, libp2p_crypto:pubkey_bin()}.
next_hop(GatewayBin, Ledger, HeadBlockTime, Vars, RandVal, Indices) ->
%% Get gateway
Gateway = find(GatewayBin, Ledger),
case blockchain_ledger_gateway_v2:witnesses(Gateway) of
W when map_size(W) == 0 ->
{error, no_witness};
Witnesses ->
%% If this gateway has witnesses, it is implied that it's location cannot be undefined
GatewayLoc = blockchain_ledger_gateway_v2:location(Gateway),
%% Filter witnesses
FilteredWitnesses = filter_witnesses(GatewayLoc, Indices, Witnesses, Ledger, Vars),
%% Assign probabilities to filtered witnesses
%% P(WitnessRSSI) = Probability that the witness has a good (valid) RSSI.
PWitnessRSSI = rssi_probs(FilteredWitnesses, Vars),
%% P(WitnessTime) = Probability that the witness timestamp is not stale.
PWitnessTime = time_probs(HeadBlockTime, FilteredWitnesses, Vars),
%% P(WitnessCount) = Probability that the witness is infrequent.
PWitnessCount = witness_count_probs(FilteredWitnesses, Vars),
%% P(Witness) = RSSIWeight*P(WitnessRSSI) + TimeWeight*P(WitnessTime) + CountWeight*P(WitnessCount)
PWitness = witness_prob(Vars, PWitnessRSSI, PWitnessTime, PWitnessCount),
%% Scale probabilities assigned to filtered witnesses so they add up to 1 to do the selection
ScaledProbs = maps:to_list(scaled_prob(PWitness, Vars)),
%% Pick one
select_witness(ScaledProbs, RandVal, Vars)
end.
-spec scaled_prob(PWitness :: prob_map(),
Vars :: map()) -> prob_map().
scaled_prob(PWitness, Vars) ->
%% Scale probabilities assigned to filtered witnesses so they add up to 1 to do the selection
SumProbs = lists:sum(maps:values(PWitness)),
maps:map(fun(_WitnessPubkeyBin, P) ->
?normalize_float(P / SumProbs, Vars)
end, PWitness).
-spec witness_prob(Vars :: map(), PWitnessRSSI :: prob_map(), PWitnessTime :: prob_map(), PWitnessCount :: prob_map()) -> prob_map().
witness_prob(Vars, PWitnessRSSI, PWitnessTime, PWitnessCount) ->
%% P(Witness) = RSSIWeight*P(WitnessRSSI) + TimeWeight*P(WitnessTime) + CountWeight*P(WitnessCount)
maps:map(fun(WitnessPubkeyBin, PTime) ->
?normalize_float((time_weight(Vars) * PTime), Vars) +
?normalize_float(rssi_weight(Vars) * maps:get(WitnessPubkeyBin, PWitnessRSSI), Vars) +
?normalize_float(count_weight(Vars) * maps:get(WitnessPubkeyBin, PWitnessCount), Vars) +
%% NOTE: The randomness weight is always multiplied with a probability of 1.0
%% So we can do something like:
%% - Set all the other weights to 0.0
%% - Set randomness_wt to 1.0
%% Doing that would basically eliminate the other associated weights and
%% make each witness have equal 1.0 probability of getting picked as next hop
?normalize_float((randomness_wt(Vars) * 1.0), Vars)
end, PWitnessTime).
-spec rssi_probs(Witnesses :: blockchain_ledger_gateway_v2:witnesses(),
Vars :: map()) -> prob_map().
rssi_probs(Witnesses, _Vars) when map_size(Witnesses) == 1 ->
%% There is only a single witness, probabilitiy of picking it is 1
maps:map(fun(_, _) -> 1.0 end, Witnesses);
rssi_probs(Witnesses, Vars) ->
WitnessList = maps:to_list(Witnesses),
lists:foldl(fun({WitnessPubkeyBin, Witness}, Acc) ->
try
blockchain_ledger_gateway_v2:witness_hist(Witness)
of
RSSIs ->
SumRSSI = lists:sum(maps:values(RSSIs)),
BadRSSI = maps:get(28, RSSIs, 0),
case {SumRSSI, BadRSSI} of
{0, _} ->
%% No RSSI but we have it in the witness list,
%% possibly because of next hop poc receipt.
maps:put(WitnessPubkeyBin, prob_no_rssi(Vars), Acc);
{_S, 0} ->
%% No known bad rssi value
maps:put(WitnessPubkeyBin, prob_good_rssi(Vars), Acc);
{S, S} ->
%% All bad RSSI values
maps:put(WitnessPubkeyBin, prob_bad_rssi(Vars), Acc);
{S, B} ->
%% Invert the "bad" probability
maps:put(WitnessPubkeyBin, ?normalize_float((1 - ?normalize_float(B/S, Vars)), Vars), Acc)
end
catch
error:no_histogram ->
maps:put(WitnessPubkeyBin, prob_no_rssi(Vars), Acc)
end
end, #{},
WitnessList).
-spec time_probs(HeadBlockTime :: pos_integer(),
Witnesses :: blockchain_ledger_gateway_v2:witnesses(),
Vars :: map()) -> prob_map().
time_probs(_, Witnesses, _Vars) when map_size(Witnesses) == 1 ->
%% There is only a single witness, probabilitiy of picking it is 1.0
maps:map(fun(_, _) -> 1.0 end, Witnesses);
time_probs(HeadBlockTime, Witnesses, Vars) ->
Deltas = lists:foldl(fun({WitnessPubkeyBin, Witness}, Acc) ->
case blockchain_ledger_gateway_v2:witness_recent_time(Witness) of
undefined ->
maps:put(WitnessPubkeyBin, nanosecond_time(HeadBlockTime), Acc);
T ->
maps:put(WitnessPubkeyBin, (nanosecond_time(HeadBlockTime) - T), Acc)
end
end, #{},
maps:to_list(Witnesses)),
DeltaSum = lists:sum(maps:values(Deltas)),
%% NOTE: Use inverse of the probabilities to bias against staler witnesses, hence the one minus
maps:map(fun(_WitnessPubkeyBin, Delta) ->
case ?normalize_float((1 - ?normalize_float(Delta/DeltaSum, Vars)), Vars) of
0.0 ->
%% There is only one
1.0;
X ->
X
end
end, Deltas).
-spec witness_count_probs(Witnesses :: blockchain_ledger_gateway_v2:witnesses(),
Vars :: map()) -> prob_map().
witness_count_probs(Witnesses, _Vars) when map_size(Witnesses) == 1 ->
%% only a single witness, probability = 1.0
maps:map(fun(_, _) -> 1.0 end, Witnesses);
witness_count_probs(Witnesses, Vars) ->
TotalRSSIs = maps:map(fun(_WitnessPubkeyBin, Witness) ->
RSSIs = blockchain_ledger_gateway_v2:witness_hist(Witness),
lists:sum(maps:values(RSSIs))
end,
Witnesses),
maps:map(fun(WitnessPubkeyBin, _Witness) ->
case maps:get(WitnessPubkeyBin, TotalRSSIs) of
0 ->
%% No RSSIs at all, default to 1.0
1.0;
S ->
%% Scale and invert this prob
?normalize_float((1 - ?normalize_float(S/lists:sum(maps:values(TotalRSSIs)), Vars)), Vars)
end
end, Witnesses).
-spec select_witness(WitnessProbs :: [{libp2p_crypto:pubkey_bin(), float()}],
Rnd :: float(),
Vars :: map()) -> {error, no_witness} | {ok, libp2p_crypto:pubkey_bin()}.
select_witness([], _Rnd, _Vars) ->
{error, no_witness};
select_witness([{WitnessPubkeyBin, Prob}=_Head | _], Rnd, _Vars) when Rnd - Prob < 0 ->
{ok, WitnessPubkeyBin};
select_witness([{_WitnessPubkeyBin, Prob} | Tail], Rnd, Vars) ->
select_witness(Tail, ?normalize_float((Rnd - Prob), Vars), Vars).
-spec filter_witnesses(GatewayLoc :: h3:h3_index(),
Indices :: [h3:h3_index()],
Witnesses :: blockchain_ledger_gateway_v2:witnesses(),
Ledger :: blockchain:ledger(),
Vars :: map()) -> blockchain_ledger_gateway_v2:witnesses().
filter_witnesses(GatewayLoc, Indices, Witnesses, Ledger, Vars) ->
{ok, Height} = blockchain_ledger_v1:current_height(Ledger),
ParentRes = parent_res(Vars),
ExclusionCells = exclusion_cells(Vars),
GatewayParent = h3:parent(GatewayLoc, ParentRes),
ParentIndices = [h3:parent(Index, ParentRes) || Index <- Indices],
maps:filter(fun(WitnessPubkeyBin, Witness) ->
WitnessGw = find(WitnessPubkeyBin, Ledger),
case is_witness_stale(WitnessGw, Height, Vars) of
true ->
false;
false ->
WitnessLoc = blockchain_ledger_gateway_v2:location(WitnessGw),
WitnessParent = h3:parent(WitnessLoc, ParentRes),
%% Dont include any witnesses in any parent cell we've already visited
not(lists:member(WitnessLoc, Indices)) andalso
%% Don't include any witness whose parent is the same as the gateway we're looking at
(GatewayParent /= WitnessParent) andalso
%% Don't include any witness whose parent is too close to any of the indices we've already seen
check_witness_distance(WitnessParent, ParentIndices, ExclusionCells) andalso
check_witness_bad_rssi(Witness, Vars)
end
end,
Witnesses).
-spec check_witness_distance(WitnessParent :: h3:h3_index(),
ParentIndices :: [h3:h3_index()],
ExclusionCells :: pos_integer()) -> boolean().
check_witness_distance(WitnessParent, ParentIndices, ExclusionCells) ->
not(lists:any(fun(ParentIndex) ->
try h3:grid_distance(WitnessParent, ParentIndex) < ExclusionCells of
Res -> Res
catch
%% Grid distance may badarg because of pentagonal distortion or
%% non matching resolutions or just being too far.
%% In either of those cases, we assume that the gateway
%% is potentially legitimate to be a target.
_:_ -> true
end
end, ParentIndices)).
-spec check_witness_bad_rssi(Witness :: blockchain_ledger_gateway_v2:gateway_witness(),
Vars :: map()) -> boolean().
check_witness_bad_rssi(Witness, Vars) ->
case poc_version(Vars) of
V when is_integer(V), V > 4 ->
try
blockchain_ledger_gateway_v2:witness_hist(Witness)
of
Hist ->
case maps:get(28, Hist, 0) of
0 ->
%% No bad RSSIs found, include
true;
BadCount when is_integer(V), V > 5 ->
%% Activate with PoC v6
%% Check that the bad rssi count is less than
%% the sum of other known good rssi
BadCount < lists:sum(maps:values(maps:without([28], Hist)));
BadCount ->
%% If the bad RSSI count does not dominate
%% the overall RSSIs this witness has, include,
%% otherwise exclude
%% XXX: This is an incorrect check
BadCount < lists:sum(maps:values(Hist))
end
catch
error:no_histogram ->
%% No histogram found, include
true
end;
_ ->
true
end.
-spec is_witness_stale(Gateway :: blockchain_ledger_gateway_v2:gateway(),
Height :: pos_integer(),
Vars :: map()) -> boolean().
is_witness_stale(Gateway, Height, Vars) ->
case blockchain_ledger_gateway_v2:last_poc_challenge(Gateway) of
undefined ->
%% No POC challenge, don't include
true;
C ->
%% Check challenge age is recent depending on the set chain var
(Height - C) >= challenge_age(Vars)
end.
-spec rssi_weight(Vars :: map()) -> float().
rssi_weight(Vars) ->
maps:get(poc_v4_prob_rssi_wt, Vars).
-spec time_weight(Vars :: map()) -> float().
time_weight(Vars) ->
maps:get(poc_v4_prob_time_wt, Vars).
-spec count_weight(Vars :: map()) -> float().
count_weight(Vars) ->
maps:get(poc_v4_prob_count_wt, Vars).
-spec prob_no_rssi(Vars :: map()) -> float().
prob_no_rssi(Vars) ->
maps:get(poc_v4_prob_no_rssi, Vars).
-spec prob_good_rssi(Vars :: map()) -> float().
prob_good_rssi(Vars) ->
maps:get(poc_v4_prob_good_rssi, Vars).
-spec prob_bad_rssi(Vars :: map()) -> float().
prob_bad_rssi(Vars) ->
maps:get(poc_v4_prob_bad_rssi, Vars).
-spec parent_res(Vars :: map()) -> pos_integer().
parent_res(Vars) ->
maps:get(poc_v4_parent_res, Vars).
-spec exclusion_cells(Vars :: map()) -> pos_integer().
exclusion_cells(Vars) ->
maps:get(poc_v4_exclusion_cells, Vars).
-spec nanosecond_time(Time :: integer()) -> integer().
nanosecond_time(Time) ->
erlang:convert_time_unit(Time, millisecond, nanosecond).
-spec randomness_wt(Vars :: map()) -> float().
randomness_wt(Vars) ->
maps:get(poc_v4_randomness_wt, Vars).
-spec poc_version(Vars :: map()) -> pos_integer().
poc_version(Vars) ->
maps:get(poc_version, Vars).
-spec challenge_age(Vars :: map()) -> pos_integer().
challenge_age(Vars) ->
maps:get(poc_v4_target_challenge_age, Vars).
%% we assume that everything that has made it into build has already
%% been asserted, and thus the lookup will never fail. This function
%% in no way exists simply because
%% blockchain_ledger_v1:find_gateway_info is too much to type a bunch
%% of times.
find(Addr, Ledger) ->
{ok, Gw} = blockchain_ledger_v1:find_gateway_info(Addr, Ledger),
Gw. | src/poc/blockchain_poc_path_v3.erl | 0.541166 | 0.723767 | blockchain_poc_path_v3.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% Copyright (c) 2014 SyncFree Consortium. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
-module(bcountermgr_SUITE).
-compile({parse_transform, lager_transform}).
%% common_test callbacks
-export([init_per_suite/1,
end_per_suite/1,
init_per_testcase/2,
end_per_testcase/2,
all/0]).
%% tests
-export([new_bcounter_test/1,
test_dec_success/1,
test_dec_fail/1,
test_dec_multi_success0/1,
test_dec_multi_success1/1,
conditional_write_test_run/1
]).
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("kernel/include/inet.hrl").
-define(TYPE, antidote_crdt_counter_b).
-define(BUCKET, bcountermgr_bucket).
-define(RETRY_COUNT, 10).
init_per_suite(Config) ->
ct:print("Starting test suite ~p", [?MODULE]),
test_utils:at_init_testsuite(),
Clusters = test_utils:set_up_clusters_common(Config),
Nodes = lists:flatten(Clusters),
%Ensure that write operations are certified
test_utils:pmap(fun(Node) ->
rpc:call(Node, application, set_env, [antidote, txn_cert, true])
end, Nodes),
%Check that indeed transactions certification is turned on
{ok, true} = rpc:call(hd(hd(Clusters)), application, get_env, [antidote, txn_cert]),
[{nodes, Nodes},
{clusters, Clusters}|Config].
end_per_suite(Config) ->
Config.
init_per_testcase(_Case, Config) ->
Config.
end_per_testcase(Name, _) ->
ct:print("[ OK ] ~p", [Name]),
ok.
all() -> [
new_bcounter_test,
test_dec_success,
test_dec_fail,
test_dec_multi_success0,
test_dec_multi_success1,
conditional_write_test_run
].
%% Tests creating a new `bcounter()'.
new_bcounter_test(Config) ->
Clusters = proplists:get_value(clusters, Config),
[Node1 | _Nodes] = [ hd(Cluster)|| Cluster <- Clusters ],
Key = bcounter1_mgr,
check_read(Node1, Key, 0).
test_dec_success(Config) ->
Clusters = proplists:get_value(clusters, Config),
[Node1, Node2 | _Nodes] = [ hd(Cluster)|| Cluster <- Clusters ],
Actor = dc,
Key = bcounter2_mgr,
{ok, _} = execute_op(Node1, increment, Key, 10, Actor),
{ok, CommitTime} = execute_op(Node1, decrement, Key, 4, Actor),
check_read(Node2, Key, 6, CommitTime).
test_dec_fail(Config) ->
Clusters = proplists:get_value(clusters, Config),
[Node1, Node2 | _Nodes] = [ hd(Cluster)|| Cluster <- Clusters ],
Actor = dc,
Key = bcounter3_mgr,
{ok, CommitTime} = execute_op(Node1, increment, Key, 10, Actor),
_ForcePropagation = read_si(Node2, Key, CommitTime),
Result0 = execute_op_success(Node2, decrement, Key, 5, Actor, 0),
?assertEqual({error, no_permissions}, Result0).
test_dec_multi_success0(Config) ->
Clusters = proplists:get_value(clusters, Config),
[Node1, Node2 | _Nodes] = [ hd(Cluster)|| Cluster <- Clusters ],
Actor = dc,
Key = bcounter4_mgr,
{ok, _} = execute_op(Node1, increment, Key, 10, Actor),
{ok, CommitTime} = execute_op(Node2, decrement, Key, 5, Actor),
check_read(Node1, Key, 5, CommitTime).
test_dec_multi_success1(Config) ->
Clusters = proplists:get_value(clusters, Config),
[Node1, Node2 | _Nodes] = [ hd(Cluster)|| Cluster <- Clusters ],
Actor = dc,
Key = bcounter5_mgr,
{ok, _} = execute_op(Node1, increment, Key, 10, Actor),
{ok, _} = execute_op(Node2, decrement, Key, 5, Actor),
{error, no_permissions} = execute_op(Node1, decrement, Key, 6, Actor),
check_read(Node1, Key, 5).
conditional_write_test_run(Config) ->
Nodes = proplists:get_value(nodes, Config),
[Node1, Node2 | _OtherNodes] = Nodes,
Type = antidote_crdt_counter_b,
Key = bcounter6_mgr,
BObj = {Key, Type, ?BUCKET},
{ok, AfterIncrement} = execute_op(Node1, increment, Key, 10, r1),
%% Start a transaction on the first node and perform a read operation.
{ok, TxId1} = rpc:call(Node1, antidote, start_transaction, [AfterIncrement, []]),
{ok, _} = rpc:call(Node1, antidote, read_objects, [[BObj], TxId1]),
%% Execute a transaction on the last node which performs a write operation.
{ok, TxId2} = rpc:call(Node2, antidote, start_transaction, [AfterIncrement, []]),
ok = rpc:call(Node2, antidote, update_objects,
[[{BObj, decrement, {3, r1}}], TxId2]),
End1 = rpc:call(Node2, antidote, commit_transaction, [TxId2]),
?assertMatch({ok, _}, End1),
{ok, AfterTxn2} = End1,
%% Resume the first transaction and check that it fails.
Result0 = rpc:call(Node1, antidote, update_objects,
[[{BObj, decrement, {3, r1}}], TxId1]),
?assertEqual(ok, Result0),
CommitResult = rpc:call(Node1, antidote, commit_transaction, [TxId1]),
?assertMatch({error, {aborted, _}}, CommitResult),
%% Test that the failed transaction didn't affect the `bcounter()'.
check_read(Node1, Key, 7, AfterTxn2).
execute_op(Node, Op, Key, Amount, Actor) ->
execute_op_success(Node, Op, Key, Amount, Actor, ?RETRY_COUNT).
%%Auxiliary functions.
execute_op_success(Node, Op, Key, Amount, Actor, Try) ->
lager:info("Execute OP ~p", [Key]),
Result = rpc:call(Node, antidote, update_objects,
[ignore, [],
[{{Key, ?TYPE, ?BUCKET}, Op, {Amount, Actor} }]
]
),
case Result of
{ok, CommitTime} -> {ok, CommitTime};
Error when Try == 0 -> Error;
_ ->
timer:sleep(1000),
execute_op_success(Node, Op, Key, Amount, Actor, Try -1)
end.
read_si(Node, Key, CommitTime) ->
lager:info("Read si ~p", [Key]),
rpc:call(Node, antidote, read_objects, [CommitTime, [], [{Key, ?TYPE, ?BUCKET}]]).
check_read(Node, Key, Expected, CommitTime) ->
{ok, [Obj], _CT} = read_si(Node, Key, CommitTime),
?assertEqual(Expected, ?TYPE:permissions(Obj)).
check_read(Node, Key, Expected) ->
check_read(Node, Key, Expected, ignore). | test/bcountermgr_SUITE.erl | 0.561215 | 0.470433 | bcountermgr_SUITE.erl | starcoder |
%%% Concurrent Programming In Erlang -- The University of Kent / FutureLearn
%%% Exercise : https://www.futurelearn.com/courses/concurrent-programming-erlang/3/steps/488342
%%% - Version 2 : Less permissive frequency server. Renamed to frequency.erl.
%%%
%%% Last Modified Time-stamp: <2020-07-08 13:12:20, updated by <NAME>>
%% -----------------------------------------------------------------------------
%% What's New
%% ----------
%% A better server that builds on v1 and adds:
%%
%% - Prevents allocation of multiple frequencies by a client,
%% - Prevents de-allocation of a frequency not allocated by the requester,
%% - Prevents de-allocation of a currently free frequency (note that the previous
%% requirement handles this one).
%%
%% Supported Transactions
%% ----------------------
%%
%% Here's the representation of the supported transactions:
%%
%% @startuml
%%
%% actor Client
%% database Frequency
%%
%% == Initialization: explicit spawn ==
%%
%% Client -> Frequency : spawn(frequencies, init, [])
%%
%%
%% == Operation: successful allocation ==
%%
%% Client -> Frequency : {request, Pid, allocate}
%% Client <-- Frequency : {reply, {ok, Freq}}
%%
%% == Operation: successful de-allocation ==
%%
%% Client -> Frequency : {request, Pid, {deallocate, Freq}}
%% Client <-- Frequency : {reply, ok}
%%
%%
%%
%% == Error: failed allocation (no available frequency) ==
%%
%% Client -> Frequency : {request, Pid, allocate}
%% Client <-- Frequency : {reply, {error, no_frequency}}
%%
%% == Error: failed allocation (client already owns one) ==
%%
%% Client -> Frequency : {request, Pid, allocate}
%% Client <-- Frequency : {reply, {error, client_already_owns, Freq}}
%%
%% == Error: failed de-allocation (frequency not allocated by client) ==
%%
%% Client -> Frequency : {request, Pid, {deallocate, Freq}}
%% Client <-- Frequency : {reply, {error, client_does_not_own, Freq}}
%%
%%
%% == Development help ==
%%
%% Client -> Frequency : {request, Pid, dump}
%% Client <-- Frequency : {reply, FreqDb}
%%
%% == Shutdown ==
%%
%% Client -> Frequency : {request, Pid, stop}
%% Client <- Frequency : {reply, stopped}
%%
%% @enduml
%% Server Functional State / Data Model
%% ------------------------------------
%% The server functional state is:
%% - a pair of lists {Free, Allocated}
%% - Free := a list of frequency integers
%% - Allocated: a list of {Freq, UserPid}
%%
%% Db access functions:
%% - allocate/2 : Allocate any frequency for Client
%% - deallocate/3 : de-allocate client owned frequency
%% - is_owner/2 : predicate: return {true, Freq} if Client owns a frequency,
%% False otherwise.
%% - owns/3 : predicate: return true if Client owns a specific frequency.
-module(frequency).
-export([init/0, allocate/2, deallocate/3]).
%% Data Model:
%% FreqDb := {free:[integer], allocated:[{integer, pid}]}
%% Usage: explicit spawn from client.
init() ->
FreqDb = {get_frequencies(), []},
loop(FreqDb).
loop(FreqDb) ->
receive
{request, Pid, allocate} ->
{NewFreqDb, Result} = allocate(FreqDb, Pid),
Pid ! {reply, Result},
loop(NewFreqDb);
{request, Pid, {deallocate, Freq}} ->
{NewFreqDb, Result} = deallocate(FreqDb, Freq, Pid),
Pid! {reply, Result},
loop(NewFreqDb);
{request, Pid, dump} ->
Pid! {reply, FreqDb},
loop(FreqDb);
{request, Pid, stop} ->
Pid! {reply, stopped}
end.
%% Frequency 'Database' management functions.
%% allocate/2: FreqDb, ClientPid
%% allocate a frequency for ClientPid. Allow 1 frequency per Client.
%% Return: {FreqDb, Reply}
%% 1) when all frequencies are allocated (none free)
allocate({[], Allocated}, _Pid) ->
{ {[], Allocated},
{error, no_frequency} };
%% 2) with some available frequency/ies
allocate({[Freq|Free], Allocated}, Pid) ->
case is_owner(Allocated, Pid) of
false -> { {Free, [{Freq, Pid} | Allocated]},
{ok, Freq} };
{true, OwnedFreq} -> { {[Freq|Free], Allocated},
{error, client_already_owns, OwnedFreq} }
end.
%% deallocate/3 : FreqDb, Freq, Pid
%% de-allocate client owned frequency
%% Return: {FreqDb, Reply}
deallocate({Free, Allocated}, Freq, Pid) ->
case owns(Allocated, Freq, Pid) of
true -> NewAllocated = lists:keydelete(Freq, 1, Allocated),
{ {[Freq|Free], NewAllocated},
ok };
false -> { {Free, Allocated},
{error, client_does_not_own, Freq} }
end.
%%% Database verifications
%% is_owner/2 : Allocated, ClientPid
%% Return {true, Freq} when ClientPid already owns a frequency, false otherwise.
is_owner([], _ClientPid) -> false;
is_owner([{Freq, ClientPid} | _AllocatedTail], ClientPid) -> {true, Freq};
is_owner([_Head | Tail], ClientPid) -> is_owner(Tail, ClientPid).
%% owns/3 : Allocated, Freq, ClientPid
%% Return true when ClientPid owns Freq, false otherwise.
owns([], _Freq, _ClientPid) -> false;
owns([{Freq, ClientPid} | _AllocatedTail], Freq, ClientPid) -> true;
owns([_Head | Tail], Freq, ClientPid) -> owns(Tail, Freq, ClientPid).
%%% Database initialization
get_frequencies() ->
[10,11,12,13,14,15].
%% ----------------------------------------------------------------------------- | exercises/e3/v2/frequency.erl | 0.609989 | 0.599661 | frequency.erl | starcoder |
%
% This file is part of AtomVM.
%
% Copyright 2020-2021 <NAME> <<EMAIL>>
%
% Licensed under the Apache License, Version 2.0 (the "License");
% you may not use this file except in compliance with the License.
% You may obtain a copy of the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS,
% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
% See the License for the specific language governing permissions and
% limitations under the License.
%
% SPDX-License-Identifier: Apache-2.0 OR LGPL-2.1-or-later
%
-module(sexp_lexer).
-export([string/1]).
string(Bin) when is_binary(Bin) ->
string(erlang:binary_to_list(Bin));
string(L) when is_list(L) ->
string(L, 1).
string([], _Line) ->
[];
string([$\r, $\n | T], Line) ->
string(T, Line + 1);
string([$\n | T], Line) ->
string(T, Line + 1);
string([$( | T], Line) ->
[{'(', Line} | string(T, Line)];
string([$) | T], Line) ->
[{')', Line} | string(T, Line)];
string([$\s | T], Line) ->
string(T, Line);
string([$-, Digit | _T] = L, Line) when Digit >= $0 andalso Digit =< $9 ->
integer(L, "", Line);
string([$- | _T] = L, Line) ->
symbol(L, "", Line);
string([C | _T] = L, Line) when C >= $0 andalso C =< $9 ->
integer(L, "", Line);
string([$" | T], Line) ->
bin(T, "", Line);
string(L, Line) ->
symbol(L, "", Line).
integer([], Acc, Line) ->
make_integer([], Acc, Line);
integer([$\s | T], Acc, Line) ->
make_integer(T, Acc, Line);
integer([$\t | T], Acc, Line) ->
make_integer(T, Acc, Line);
integer([$\n | T], Acc, Line) ->
make_integer(T, Acc, Line);
integer([$( | _T] = L, Acc, Line) ->
make_integer(L, Acc, Line);
integer([$) | _T] = L, Acc, Line) ->
make_integer(L, Acc, Line);
integer([C | T], Acc, Line) ->
integer(T, [C | Acc], Line).
make_integer(T, Acc, Line) ->
IntegerList = reverse(Acc),
Integer = erlang:list_to_integer(IntegerList),
[{integer, Line, Integer} | string(T, Line)].
bin([$" | T], Acc, Line) ->
make_bin(T, Acc, Line);
bin([C | T], Acc, Line) ->
bin(T, [C | Acc], Line).
make_bin(T, Acc, Line) ->
BinList = reverse(Acc),
Bin = erlang:list_to_binary(BinList),
[{binary, Line, Bin} | string(T, Line)].
symbol([], Acc, Line) ->
make_symbol([], Acc, Line);
symbol([$\s | T], Acc, Line) ->
make_symbol(T, Acc, Line);
symbol([$\t | T], Acc, Line) ->
make_symbol(T, Acc, Line);
symbol([$\n | T], Acc, Line) ->
make_symbol(T, Acc, Line);
symbol([$: | T], Acc, Line) ->
make_symbol_prefix(T, Acc, Line);
symbol([$( | _T] = L, Acc, Line) ->
make_symbol(L, Acc, Line);
symbol([$) | _T] = L, Acc, Line) ->
make_symbol(L, Acc, Line);
symbol([C | T], Acc, Line) ->
symbol(T, [C | Acc], Line).
make_symbol(T, Acc, Line) ->
Symbol = reverse(Acc),
[{symbol, Line, Symbol} | string(T, Line)].
make_symbol_prefix(T, Acc, Line) ->
Symbol = reverse(Acc),
[{symbol_prefix, Line, Symbol} | string(T, Line)].
reverse(L) ->
reverse(L, "").
reverse([], Acc) ->
Acc;
reverse([H | T], Acc) ->
reverse(T, [H | Acc]). | libs/alisp/src/sexp_lexer.erl | 0.619471 | 0.758309 | sexp_lexer.erl | starcoder |
% Licensed under the Apache License, Version 2.0 (the "License"); you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
% This reimplements beam_lib:cmp_dirs/2 because it detects differences
% in abstract code even if that doesn't affect the module. This was
% most obvious when different paths to eunit.hrl would cause a module
% to be flagged as changed which leads to unnecessary upgrade
% instructions.
-module(knit_beam_lib).
-export([
cmp_dirs/2
]).
cmp_dirs(Dir1, Dir2) ->
Beams1 = lists:ukeysort(1, list_beams(Dir1)),
Beams2 = lists:ukeysort(1, list_beams(Dir2)),
Names1 = [M || {M, _} <- Beams1],
Names2 = [M || {M, _} <- Beams2],
Added0 = Names2 -- Names1,
Removed0 = Names1 -- Names2,
Changed0 = lists:usort(Names1 ++ Names2) -- (Added0 ++ Removed0),
Removed = [element(2, lists:keyfind(M, 1, Beams1)) || M <- Removed0],
Added = [element(2, lists:keyfind(M, 1, Beams2)) || M <- Added0],
Changed = lists:flatmap(fun(M) ->
{_, B1} = lists:keyfind(M, 1, Beams1),
{_, B2} = lists:keyfind(M, 1, Beams2),
Md51 = beam_lib:md5(B1),
Md52 = beam_lib:md5(B2),
if Md51 == Md52 -> []; true ->
[{B1, B2}]
end
end, Changed0),
{Removed, Added, Changed}.
list_beams(Dir) ->
true = filelib:is_dir(Dir),
Pattern = filename:join(Dir, "*.beam"),
Files = filelib:wildcard(Pattern),
Beams = [{filename:basename(F), F} || F <- Files],
case lists:sort(Beams) == lists:ukeysort(1, Beams) of
true ->
Beams;
false ->
erlang:error({invalid_beam_list, Beams})
end. | src/knit_beam_lib.erl | 0.699562 | 0.409693 | knit_beam_lib.erl | starcoder |
% Percent sign starts a one-line comment.
%% Two percent characters shall be used to comment functions.
%%% Three percent characters shall be used to comment modules.
% We use three types of punctuation in Erlang.
% Commas (`,`) separate arguments in function calls, data constructors, and
% patterns.
% Periods (`.`) (followed by whitespace) separate entire functions and
% expressions in the shell.
% Semicolons (`;`) separate clauses. We find clauses in several contexts:
% function definitions and in `case`, `if`, `try..catch`, and `receive`
% expressions.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% 1. Variables and pattern matching.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% In Erlang new variables are bound with an `=` statement.
Num = 42. % All variable names must start with an uppercase letter.
% Erlang has single-assignment variables; if you try to assign a different
% value to the variable `Num`, you’ll get an error.
Num = 43. % ** exception error: no match of right hand side value 43
% In most languages, `=` denotes an assignment statement. In Erlang, however,
% `=` denotes a pattern-matching operation. When an empty variable is used on the
% left hand side of the `=` operator to is bound (assigned), but when a bound
% variable is used on the left hand side the following behaviour is observed.
% `Lhs = Rhs` really means this: evaluate the right side (`Rhs`), and then
% match the result against the pattern on the left side (`Lhs`).
Num = 7 * 6.
% Floating-point number.
Pi = 3.14159.
% Atoms are used to represent different non-numerical constant values. Atoms
% start with lowercase letters, followed by a sequence of alphanumeric
% characters or the underscore (`_`) or at (`@`) sign.
Hello = hello.
OtherNode = example@node.
% Atoms with non alphanumeric values can be written by enclosing the atoms
% with apostrophes.
AtomWithSpace = 'some atom with space'.
% Tuples are similar to structs in C.
Point = {point, 10, 45}.
% If we want to extract some values from a tuple, we use the pattern-matching
% operator `=`.
{point, X, Y} = Point. % X = 10, Y = 45
% We can use `_` as a placeholder for variables that we’re not interested in.
% The symbol `_` is called an anonymous variable. Unlike regular variables,
% several occurrences of `_` in the same pattern don’t have to bind to the
% same value.
Person = {person, {name, {first, joe}, {last, armstrong}}, {footsize, 42}}.
{_, {_, {_, Who}, _}, _} = Person. % Who = joe
% We create a list by enclosing the list elements in square brackets and
% separating them with commas.
% The individual elements of a list can be of any type.
% The first element of a list is the head of the list. If you imagine removing
% the head from the list, what’s left is called the tail of the list.
ThingsToBuy = [{apples, 10}, {pears, 6}, {milk, 3}].
% If `T` is a list, then `[H|T]` is also a list, with head `H` and tail `T`.
% The vertical bar (`|`) separates the head of a list from its tail.
% `[]` is the empty list.
% We can extract elements from a list with a pattern-matching operation. If we
% have a nonempty list `L`, then the expression `[X|Y] = L`, where `X` and `Y`
% are unbound variables, will extract the head of the list into `X` and the tail
% of the list into `Y`.
[FirstThing|OtherThingsToBuy] = ThingsToBuy.
% FirstThing = {apples, 10}
% OtherThingsToBuy = [{pears, 6}, {milk, 3}]
% There are no strings in Erlang. Strings are really just lists of integers.
% Strings are enclosed in double quotation marks (`"`).
Name = "Hello".
[72, 101, 108, 108, 111] = "Hello".
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% 2. Sequential programming.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Modules are the basic unit of code in Erlang. All the functions we write are
% stored in modules. Modules are stored in files with `.erl` extensions.
% Modules must be compiled before the code can be run. A compiled module has the
% extension `.beam`.
-module(geometry).
-export([area/1]). % the list of functions exported from the module.
% The function `area` consists of two clauses. The clauses are separated by a
% semicolon, and the final clause is terminated by dot-whitespace.
% Each clause has a head and a body; the head consists of a function name
% followed by a pattern (in parentheses), and the body consists of a sequence of
% expressions, which are evaluated if the pattern in the head is successfully
% matched against the calling arguments. The patterns are matched in the order
% they appear in the function definition.
area({rectangle, Width, Ht}) -> Width * Ht;
area({circle, R}) -> 3.14159 * R * R.
% Compile the code in the file geometry.erl.
c(geometry). % {ok,geometry}
% We need to include the module name together with the function name in order to
% identify exactly which function we want to call.
geometry:area({rectangle, 10, 5}). % 50
geometry:area({circle, 1.4}). % 6.15752
% In Erlang, two functions with the same name and different arity (number of
% arguments) in the same module represent entirely different functions.
-module(lib_misc).
-export([sum/1]). % export function `sum` of arity 1
% accepting one argument: list of integers.
sum(L) -> sum(L, 0).
sum([], N) -> N;
sum([H|T], N) -> sum(T, H+N).
% Funs are "anonymous" functions. They are called this way because they have
% no name. However, they can be assigned to variables.
Double = fun(X) -> 2 * X end. % `Double` points to an anonymous function
% with handle: #Fun<erl_eval.6.17052888>
Double(2). % 4
% Functions accept funs as their arguments and can return funs.
Mult = fun(Times) -> ( fun(X) -> X * Times end ) end.
Triple = Mult(3).
Triple(5). % 15
% List comprehensions are expressions that create lists without having to use
% funs, maps, or filters.
% The notation `[F(X) || X <- L]` means "the list of `F(X)` where `X` is taken
% from the list `L`."
L = [1,2,3,4,5].
[2 * X || X <- L]. % [2,4,6,8,10]
% A list comprehension can have generators and filters, which select subset of
% the generated values.
EvenNumbers = [N || N <- [1, 2, 3, 4], N rem 2 == 0]. % [2, 4]
% Guards are constructs that we can use to increase the power of pattern
% matching. Using guards, we can perform simple tests and comparisons on the
% variables in a pattern.
% You can use guards in the heads of function definitions where they are
% introduced by the `when` keyword, or you can use them at any place in the
% language where an expression is allowed.
max(X, Y) when X > Y -> X;
max(X, Y) -> Y.
% A guard is a series of guard expressions, separated by commas (`,`).
% The guard `GuardExpr1, GuardExpr2, ..., GuardExprN` is true if all the guard
% expressions `GuardExpr1`, `GuardExpr2`, ..., `GuardExprN` evaluate to `true`.
is_cat(A) when is_atom(A), A =:= cat -> true;
is_cat(A) -> false.
is_dog(A) when is_atom(A), A =:= dog -> true;
is_dog(A) -> false.
% We won't dwell on the `=:=` operator here; just be aware that it is used to
% check whether two Erlang expressions have the same value *and* the same type.
% Contrast this behaviour to that of the `==` operator:
1 + 2 =:= 3. % true
1 + 2 =:= 3.0. % false
1 + 2 == 3.0. % true
% A guard sequence is either a single guard or a series of guards, separated
% by semicolons (`;`). The guard sequence `G1; G2; ...; Gn` is true if at
% least one of the guards `G1`, `G2`, ..., `Gn` evaluates to `true`.
is_pet(A) when is_atom(A), (A =:= dog);(A =:= cat) -> true;
is_pet(A) -> false.
% Warning: not all valid Erlang expressions can be used as guard expressions;
% in particular, our `is_cat` and `is_dog` functions cannot be used within the
% guard sequence in `is_pet`'s definition. For a description of the
% expressions allowed in guard sequences, refer to the specific section
% in the Erlang reference manual:
% http://erlang.org/doc/reference_manual/expressions.html#guards
% Records provide a method for associating a name with a particular element in a
% tuple.
% Record definitions can be included in Erlang source code files or put in files
% with the extension `.hrl`, which are then included by Erlang source code
% files.
-record(todo, {
status = reminder, % Default value
who = joe,
text
}).
% We have to read the record definitions into the shell before we can define a
% record. We use the shell function `rr` (short for read records) to do this.
rr("records.hrl"). % [todo]
% Creating and updating records:
X = #todo{}.
% #todo{status = reminder, who = joe, text = undefined}
X1 = #todo{status = urgent, text = "Fix errata in book"}.
% #todo{status = urgent, who = joe, text = "Fix errata in book"}
X2 = X1#todo{status = done}.
% #todo{status = done, who = joe, text = "Fix errata in book"}
% `case` expressions.
% `filter` returns a list of all elements `X` in a list `L` for which `P(X)` is
% true.
filter(P, [H|T]) ->
case P(H) of
true -> [H|filter(P, T)];
false -> filter(P, T)
end;
filter(P, []) -> [].
filter(fun(X) -> X rem 2 == 0 end, [1, 2, 3, 4]). % [2, 4]
% `if` expressions.
max(X, Y) ->
if
X > Y -> X;
X < Y -> Y;
true -> nil
end.
% Warning: at least one of the guards in the `if` expression must evaluate to
% `true`; otherwise, an exception will be raised.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% 3. Exceptions.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Exceptions are raised by the system when internal errors are encountered or
% explicitly in code by calling `throw(Exception)`, `exit(Exception)`, or
% `erlang:error(Exception)`.
generate_exception(1) -> a;
generate_exception(2) -> throw(a);
generate_exception(3) -> exit(a);
generate_exception(4) -> {'EXIT', a};
generate_exception(5) -> erlang:error(a).
% Erlang has two methods of catching an exception. One is to enclose the call to
% the function that raises the exception within a `try...catch` expression.
catcher(N) ->
try generate_exception(N) of
Val -> {N, normal, Val}
catch
throw:X -> {N, caught, thrown, X};
exit:X -> {N, caught, exited, X};
error:X -> {N, caught, error, X}
end.
% The other is to enclose the call in a `catch` expression. When you catch an
% exception, it is converted into a tuple that describes the error.
catcher(N) -> catch generate_exception(N).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% 4. Concurrency
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Erlang relies on the actor model for concurrency. All we need to write
% concurrent programs in Erlang are three primitives: spawning processes,
% sending messages and receiving messages.
% To start a new process, we use the `spawn` function, which takes a function
% as argument.
F = fun() -> 2 + 2 end. % #Fun<erl_eval.20.67289768>
spawn(F). % <0.44.0>
% `spawn` returns a pid (process identifier); you can use this pid to send
% messages to the process. To do message passing, we use the `!` operator.
% For all of this to be useful, we need to be able to receive messages. This is
% achieved with the `receive` mechanism:
-module(calculateGeometry).
-compile(export_all).
calculateArea() ->
receive
{rectangle, W, H} ->
W * H;
{circle, R} ->
3.14 * R * R;
_ ->
io:format("We can only calculate area of rectangles or circles.")
end.
% Compile the module and create a process that evaluates `calculateArea` in the
% shell.
c(calculateGeometry).
CalculateArea = spawn(calculateGeometry, calculateArea, []).
CalculateArea ! {circle, 2}. % 12.56000000000000049738
% The shell is also a process; you can use `self` to get the current pid.
self(). % <0.41.0>
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% 5. Testing with EUnit
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Unit tests can be written using EUnits's test generators and assert macros
-module(fib).
-export([fib/1]).
-include_lib("eunit/include/eunit.hrl").
fib(0) -> 1;
fib(1) -> 1;
fib(N) when N > 1 -> fib(N-1) + fib(N-2).
fib_test_() ->
[?_assert(fib(0) =:= 1),
?_assert(fib(1) =:= 1),
?_assert(fib(2) =:= 2),
?_assert(fib(3) =:= 3),
?_assert(fib(4) =:= 5),
?_assert(fib(5) =:= 8),
?_assertException(error, function_clause, fib(-1)),
?_assert(fib(31) =:= 2178309)
].
% EUnit will automatically export to a test() function to allow running the tests
% in the erlang shell
fib:test()
% The popular erlang build tool Rebar is also compatible with EUnit
%
% rebar eunit
% | samples/elixir/erlang/learnerlang.erl | 0.657868 | 0.890103 | learnerlang.erl | starcoder |
%% Copyright (c) 2013-2019 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(emqx_trie).
-include("emqx.hrl").
%% Mnesia bootstrap
-export([mnesia/1]).
-boot_mnesia({mnesia, [boot]}).
-copy_mnesia({mnesia, [copy]}).
%% Trie APIs
-export([ insert/1
, match/1
, lookup/1
, delete/1
]).
-export([empty/0]).
%% Mnesia tables
-define(TRIE, emqx_trie).
-define(TRIE_NODE, emqx_trie_node).
%%------------------------------------------------------------------------------
%% Mnesia bootstrap
%%------------------------------------------------------------------------------
%% @doc Create or replicate trie tables.
-spec(mnesia(boot | copy) -> ok).
mnesia(boot) ->
%% Optimize storage
StoreProps = [{ets, [{read_concurrency, true},
{write_concurrency, true}]}],
%% Trie table
ok = ekka_mnesia:create_table(?TRIE, [
{ram_copies, [node()]},
{record_name, trie},
{attributes, record_info(fields, trie)},
{storage_properties, StoreProps}]),
%% Trie node table
ok = ekka_mnesia:create_table(?TRIE_NODE, [
{ram_copies, [node()]},
{record_name, trie_node},
{attributes, record_info(fields, trie_node)},
{storage_properties, StoreProps}]);
mnesia(copy) ->
%% Copy trie table
ok = ekka_mnesia:copy_table(?TRIE),
%% Copy trie_node table
ok = ekka_mnesia:copy_table(?TRIE_NODE).
%%------------------------------------------------------------------------------
%% Trie APIs
%%------------------------------------------------------------------------------
%% @doc Insert a topic filter into the trie.
-spec(insert(emqx_topic:topic()) -> ok).
insert(Topic) when is_binary(Topic) ->
case mnesia:wread({?TRIE_NODE, Topic}) of
[#trie_node{topic = Topic}] ->
ok;
[TrieNode = #trie_node{topic = undefined}] ->
write_trie_node(TrieNode#trie_node{topic = Topic});
[] ->
%% Add trie path
ok = lists:foreach(fun add_path/1, emqx_topic:triples(Topic)),
%% Add last node
write_trie_node(#trie_node{node_id = Topic, topic = Topic})
end.
%% @doc Find trie nodes that match the topic name.
-spec(match(emqx_topic:topic()) -> list(emqx_topic:topic())).
match(Topic) when is_binary(Topic) ->
TrieNodes = match_node(root, emqx_topic:words(Topic)),
[Name || #trie_node{topic = Name} <- TrieNodes, Name =/= undefined].
%% @doc Lookup a trie node.
-spec(lookup(NodeId :: binary()) -> [#trie_node{}]).
lookup(NodeId) ->
mnesia:read(?TRIE_NODE, NodeId).
%% @doc Delete a topic filter from the trie.
-spec(delete(emqx_topic:topic()) -> ok).
delete(Topic) when is_binary(Topic) ->
case mnesia:wread({?TRIE_NODE, Topic}) of
[#trie_node{edge_count = 0}] ->
ok = mnesia:delete({?TRIE_NODE, Topic}),
delete_path(lists:reverse(emqx_topic:triples(Topic)));
[TrieNode] ->
write_trie_node(TrieNode#trie_node{topic = undefined});
[] -> ok
end.
%% @doc Is the trie empty?
-spec(empty() -> boolean()).
empty() ->
ets:info(?TRIE, size) == 0.
%%------------------------------------------------------------------------------
%% Internal functions
%%------------------------------------------------------------------------------
%% @private
%% @doc Add a path to the trie.
add_path({Node, Word, Child}) ->
Edge = #trie_edge{node_id = Node, word = Word},
case mnesia:wread({?TRIE_NODE, Node}) of
[TrieNode = #trie_node{edge_count = Count}] ->
case mnesia:wread({?TRIE, Edge}) of
[] ->
ok = write_trie_node(TrieNode#trie_node{edge_count = Count + 1}),
write_trie(#trie{edge = Edge, node_id = Child});
[_] -> ok
end;
[] ->
ok = write_trie_node(#trie_node{node_id = Node, edge_count = 1}),
write_trie(#trie{edge = Edge, node_id = Child})
end.
%% @private
%% @doc Match node with word or '+'.
match_node(root, [NodeId = <<$$, _/binary>>|Words]) ->
match_node(NodeId, Words, []);
match_node(NodeId, Words) ->
match_node(NodeId, Words, []).
match_node(NodeId, [], ResAcc) ->
mnesia:read(?TRIE_NODE, NodeId) ++ 'match_#'(NodeId, ResAcc);
match_node(NodeId, [W|Words], ResAcc) ->
lists:foldl(fun(WArg, Acc) ->
case mnesia:read(?TRIE, #trie_edge{node_id = NodeId, word = WArg}) of
[#trie{node_id = ChildId}] -> match_node(ChildId, Words, Acc);
[] -> Acc
end
end, 'match_#'(NodeId, ResAcc), [W, '+']).
%% @private
%% @doc Match node with '#'.
'match_#'(NodeId, ResAcc) ->
case mnesia:read(?TRIE, #trie_edge{node_id = NodeId, word = '#'}) of
[#trie{node_id = ChildId}] ->
mnesia:read(?TRIE_NODE, ChildId) ++ ResAcc;
[] -> ResAcc
end.
%% @private
%% @doc Delete paths from the trie.
delete_path([]) ->
ok;
delete_path([{NodeId, Word, _} | RestPath]) ->
ok = mnesia:delete({?TRIE, #trie_edge{node_id = NodeId, word = Word}}),
case mnesia:wread({?TRIE_NODE, NodeId}) of
[#trie_node{edge_count = 1, topic = undefined}] ->
ok = mnesia:delete({?TRIE_NODE, NodeId}),
delete_path(RestPath);
[TrieNode = #trie_node{edge_count = 1, topic = _}] ->
write_trie_node(TrieNode#trie_node{edge_count = 0});
[TrieNode = #trie_node{edge_count = C}] ->
write_trie_node(TrieNode#trie_node{edge_count = C-1});
[] ->
mnesia:abort({node_not_found, NodeId})
end.
%% @private
write_trie(Trie) ->
mnesia:write(?TRIE, Trie, write).
%% @private
write_trie_node(TrieNode) ->
mnesia:write(?TRIE_NODE, TrieNode, write). | src/emqx_trie.erl | 0.533154 | 0.4953 | emqx_trie.erl | starcoder |
%%
%% Copyright (c) 2018 <NAME>. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
-module(ldb_forward).
-author("<NAME> <<EMAIL>").
-include("ldb.hrl").
-export([all_shards/0,
create/2,
query/2,
update/2,
update_members/1,
update_ignore_keys/1,
get_metrics/0]).
%% number of shards
-define(SHARD_NUMBER, 4096).
%% @doc
-spec all_shards() -> list(atom()).
all_shards() ->
lists:map(
fun(Index) -> ldb_util:integer_to_atom(Index) end,
lists:seq(0, ?SHARD_NUMBER - 1)
).
%% @doc Create a `Key' in the store with a given `Type'.
-spec create(key(), type()) -> ok.
create(Key, Type) ->
forward(Key, call, {create, Key, Type}).
%% @doc Reads the value associated with a given `Key'.
-spec query(key(), list(term())) -> {ok, value()}.
query(Key, Args) ->
forward(Key, call, {query, Key, Args}).
%% @doc Update the value associated with a given `Key',
%% applying a given `Operation'.
-spec update(key(), operation()) -> ok.
update(Key, Operation) ->
forward(Key, call, {update, Key, Operation}).
%% @doc Update members.
-spec update_members(list(ldb_node_id())) -> ok.
update_members(Members) ->
forward(all, cast, {update_members, Members}).
%% @doc Update ignore keys.
-spec update_ignore_keys(sets:set(string())) -> ok.
update_ignore_keys(IgnoreKeys) ->
forward(all, call, {update_ignore_keys, IgnoreKeys}).
%% @doc Get metrics from all shards.
-spec get_metrics() -> list(metrics()).
get_metrics() ->
forward(all, call, get_metrics).
%% @private
-spec forward(key() | all, atom(), term()) -> term().
forward(all, What, Msg) ->
lists:map(
fun(Name) -> do_forward(Name, What, Msg) end,
all_shards()
);
forward(Key, What, Msg) ->
do_forward(shard(Key), What, Msg).
%% @private Forward by call or cast.
-spec do_forward(atom(), atom(), term()) -> term().
do_forward(Name, call, Msg) ->
gen_server:call(Name, Msg, infinity);
do_forward(Name, cast, Msg) ->
gen_server:cast(Name, Msg).
%% @private Get shard name.
-spec shard(term()) -> atom().
shard(Key) ->
Index = erlang:phash2(Key, ?SHARD_NUMBER),
ldb_util:integer_to_atom(Index). | src/ldb_forward.erl | 0.602763 | 0.438304 | ldb_forward.erl | starcoder |
-module(astar).
-compile(inline).
-export([search/4]).
-export([is_unvisited/2]).
-export_type([grid/0, valid_fun/0, visited_grids/0]).
-type grid() :: {integer(), integer()}.
-type result() :: {max, Path :: [grid()]} | none| max_limited.
-type max_limit() :: {max_limit, non_neg_integer()}.
-type option() :: {astar_mod, module()} |max_limit().
-type options() :: [option()].
-type valid_fun() :: fun((CurGrid :: grid()) -> boolean()).
-type visited_grids() :: #{Grid :: grid() => true}.
-callback(get_neighbours(ValidFun :: valid_fun(), CurGrid :: grid(), VisitedGrids :: visited_grids()) -> Neighbours :: [grid()]).
-callback(heuristic(Grid1 :: grid(), Grid2 :: grid()) -> H :: number()).
-callback(distance(Grid1 :: grid(), Grid2 :: grid()) -> G :: number()).
-define(MAX_LIMIT, 16#FFFF).
-spec search(StartGrid, EndGrid, ValidFun, Options) -> Result when
StartGrid :: grid(), EndGrid :: grid(),
ValidFun :: valid_fun(), Options :: options(),
Result :: result().
search(StartGrid, EndGrid, ValidFun, Options) ->
OpenGrids = insert(0, {StartGrid, 0, []}, new()),
VisitedGrids = #{StartGrid => -1},
AStarMod = proplists:get_value(astar_mod, Options, astar_diagonally),
MaxLimit = proplists:get_value(max_limit, Options, ?MAX_LIMIT),
do_search(EndGrid, ValidFun, OpenGrids, VisitedGrids, AStarMod, MaxLimit).
%%=====================================================
%% Internal Function
%%=====================================================
do_search(EndGrid, ValidFun, OpenGrids, VisitedGrids, AStarMod, MaxLimit) when MaxLimit > 0 ->
case take_min(OpenGrids) of
{{EndGrid, _G, Path}, _NewOpenGrids} ->
{max, erlang:tl(lists:reverse([EndGrid | Path]))};
{{Grid, G, Path}, NewOpenGrids} ->
%% io:format("take_min: Grid:~w G:~w ~n", [Grid, G]),
Neighbours = AStarMod:get_neighbours(ValidFun, Grid, VisitedGrids),
{OpenGrids2, NewVisitedGrids} = add_neighbours(EndGrid, Grid, G, [Grid | Path], NewOpenGrids, VisitedGrids, AStarMod, Neighbours),
do_search(EndGrid, ValidFun, OpenGrids2, NewVisitedGrids, AStarMod, MaxLimit - 1);
empty ->
none
end;
do_search(_EndGrid, _ValidFun, _OpenGrids, _VisitedGrids, _AStarMod, _MaxLimit) ->
max_limited.
add_neighbours(EndGrid, ParentGrid, G, Path, OpenGrids, VisitedGrids, AStarMod, [NGrid | T]) ->
G1 = G + AStarMod:distance(ParentGrid, NGrid),
NewScore = G1 + AStarMod:heuristic(EndGrid, NGrid),
case VisitedGrids of
#{NGrid := OldScore} when OldScore =< NewScore ->
OpenGrids1 = OpenGrids,
VisitedGrids1 = VisitedGrids;
_ ->
%% io:format("add_neighbours: NGrid:~w Socre:~w H:~w G:~w G1:~w~n", [NGrid, NewScore, AStarMod:heuristic(EndGrid, NGrid), G, G1]),
OpenGrids1 = insert(NewScore, {NGrid, G1, Path}, OpenGrids),
VisitedGrids1 = VisitedGrids#{NGrid => NewScore}
end,
add_neighbours(EndGrid, ParentGrid, G, Path, OpenGrids1, VisitedGrids1, AStarMod, T);
add_neighbours(_EndGrid, _CurGrid, _G, _Path, OpenGrids, VisitedGrids, _AStarMod, []) ->
{OpenGrids, VisitedGrids}.
%%======================================
%% pairs_heap implement
%%======================================
new() ->
{}.
insert(K, V, Heap) ->
do_merge({K, V, []}, Heap).
take_min({}) ->
empty;
take_min({_, V, SubHeaps}) ->
{V, merge_pairs(SubHeaps)}.
do_merge(Heap1, {}) ->
Heap1;
do_merge({}, Heap2) ->
Heap2;
do_merge({K1, V1, SubHeap1}, Heap2) when K1 < element(1, Heap2) ->
{K1, V1, [Heap2 | SubHeap1]};
do_merge(Heap1, {K2, V2, SubHeap2}) ->
{K2, V2, [Heap1 | SubHeap2]}.
merge_pairs([SH1, SH2 | Rest]) ->
do_merge(do_merge(SH1, SH2), merge_pairs(Rest));
merge_pairs([SubHeap]) ->
SubHeap;
merge_pairs([]) ->
{}.
-spec is_unvisited(Grid :: grid(), VisitedGrids :: visited_grids()) -> boolean().
is_unvisited(Grid, VisitedGrids) ->
case VisitedGrids of
#{Grid := -1} ->
false;
_ ->
true
end. | src/astar.erl | 0.619471 | 0.468791 | astar.erl | starcoder |
%%
%% @doc Saddleback search.
%%
%% Design a function `invert' that takes two arguments:
%% a function `f: N × N ⟶ N', and
%% a number `z ∈ N'.
%% The value `invert(f, z)' is a list of all pairs `(x,y)'
%% satisfying `f(x,y) = z'.
%% `f' is strictly increasing in each argument.
%%
%% The solution should make as few evaluations of `f' as possible.
%%
%% See performance test results to choose the best algorithm
%% for a given function.
%%
%% @reference [B1] Chapter 3, pp. 12–20
%%
-module(saddleback).
-author("<NAME> <<EMAIL>>").
-export([invert/2, invert1/2, invert2/2, invert3/2, invert4/2]).
-import(lists, [seq/2]).
-type natural() :: non_neg_integer().
%%
%% @doc An alias to {@link invert4/2}.
%%
-spec invert(F, Z) -> [{X, Y}] when
F :: fun((X, Y) -> Z),
X :: natural(),
Y :: natural(),
Z :: natural().
invert(F, Z) -> invert4(F, Z).
%%
%% @doc An execution of the definition of the inverse function.
%% The algorithm traverses the entire search square.
%% ```
%% (0,Z) (Z,Z)
%% ┌───────────────┐
%% ↑ ↑ ↑ ↑ ↑ ↑ ↑ ↑ ↑
%% │ │ │ │ │ │ │ │ │
%% │ │ │ │ │ │ │ │ │
%% │ │ │ │ │ │ │ │ │
%% │ │ │ │ │ │ │ │ │
%% │ │ │ │ │ │ │ │ │
%% │ │ │ │ │ │ │ │ │
%% └─┴─┴─┴─┴─┴─┴─┴─┘
%% (0,0) (Z,0)
%% '''
%% Inefficient (quadratic) but easy to understand.
%%
%% See Jack's algorithm, [B1] p. 12.
%%
-spec invert1(F, Z) -> [{X, Y}] when
F :: fun((X, Y) -> Z),
X :: natural(),
Y :: natural(),
Z :: natural().
invert1(F, Z) -> [{X, Y} || X <- seq(0, Z), Y <- seq(0, Z), F(X, Y) =:= Z].
%%
%% @doc Basic algorithm.
%% Traverses the diagonal of the search square, from `(0,Z)' to `(Z,0)'.
%% ```
%% (0,Z) (Z,Z)
%% ┌───────────────┐
%% ├─┐ │
%% │ │ │
%% │ └──┐ │
%% │ └─┐ │
%% │ └─┐ │
%% │ └─┐ │
%% │ └─┐ │
%% └────────────┴──┘
%% (0,0) (Z,0)
%% '''
%% Runs in linear time in `Z'.
%%
%% See Anne's algorithm, [B1] p. 13.
%% @end
%%
%% Not tail optimized.
%%
-spec invert2(F, Z) -> [{X, Y}] when
F :: fun((X, Y) -> Z),
X :: natural(),
Y :: natural(),
Z :: natural().
invert2(F, Z) -> find2({0, Z}, F, Z).
find2({U, V}, _F, Z) when U > Z orelse V < 0 -> [];
find2({U, V}, F, Z) ->
Zf = F(U, V),
if
Zf < Z -> find2({U + 1, V}, F, Z);
Zf =:= Z -> [{U, V} | find2({U + 1, V - 1}, F, Z)];
Zf > Z -> find2({U, V - 1}, F, Z)
end.
%%
%% @doc Improvement to the basic algorithm by
%% Gries, Dijkstra, and Backhouse.
%% Replaces the search square with the search rectangle.
%% ```
%% (0,Z) (Z,Z)
%% ┌───────────────┐
%% │ │
%% │ │
%% M ├┬───────────┐ │
%% │└─┐ │ │
%% │ └─┐ │ │
%% │ └─┐ │ │
%% │ └─┐ │ │
%% └────────┴───┴──┘
%% (0,0) N (Z,0)
%% '''
%% For some functions it can yield logarithmic performance.
%%
%% See Theo's algorithm, [B1] p. 14.
%%
-spec invert3(F, Z) -> [{X, Y}] when
F :: fun((X, Y) -> Z),
X :: natural(),
Y :: natural(),
Z :: natural().
invert3(F, Z) ->
{N, M} = range(ext(F), Z),
find3({0, M}, F, Z, N).
find3({U, V}, _F, _Z, N) when U > N orelse V < 0 -> [];
find3({U, V}, F, Z, _N) -> find2({U, V}, F, Z).
%%
%% @doc Finds better initial search range for function `F'.
%% Narrows `(0,0) (0,Z) (Z,Z) (Z,0)' to `(0,0) (0,M) (N,M) (N,0)'.
%% `F' must be extended to points (0,-1) and (-1,0).
%%
range(F, Z) ->
{
bsearch(fun(X) -> F(X, 0) end, {-1, Z + 1}, Z),
bsearch(fun(Y) -> F(0, Y) end, {-1, Z + 1}, Z)
}.
%%
%% @doc Binary search to determine `M' such that
%%```
%% M = bsearch(G, {A, B}, Z), A ≤ M < B, G(M) ≤ Z < G(M + 1)'''
%%
bsearch(_, {A, B}, _) when A + 1 =:= B -> A;
bsearch(G, {A, B}, Z) ->
M = (A + B) div 2,
bsearch(G, {A, B}, Z, M, G(M)).
bsearch(G, {_, B}, Z, M, Gm) when Gm =< Z -> bsearch(G, {M, B}, Z);
bsearch(G, {A, _}, Z, M, _) -> bsearch(G, {A, M}, Z).
%%
%% @doc Extends function F to points (0,-1) and (-1,0).
%%
ext(F) ->
fun(X, Y) ->
case {X, Y} of
{0, -1} -> 0;
{-1, 0} -> 0;
_ -> F(X, Y)
end
end.
%%
%% @doc Divide and conquer version of {@link invert3/2}.
%% ```
%% (0,Z) (Z,Z)
%% ┌───────────────┐
%% │ │
%% ├───┐ │
%% ├───┘. │
%% │ ┌─┐ │
%% │ └─┘ . │
%% │ ┌──┐│
%% └───────────┴──┴┘
%% (0,0) (Z,0)
%% '''
%% Asymptotically optimal saddleback search algorithm.
%%
%% See Mary's algorithm, [B1] p. 18.
%% @end
%%
%% Not tail optimized though and with list concatenations.
%%
-spec invert4(F, Z) -> [{X, Y}] when
F :: fun((X, Y) -> Z),
X :: natural(),
Y :: natural(),
Z :: natural().
invert4(F, Z) ->
{N, M} = range(ext(F), Z),
find4({0, M}, {N, 0}, F, Z).
find4({U, V}, {R, S}, _F, _Z) when U > R orelse V < S -> [];
find4({U, V}, {R, S}, F, Z) ->
P = (U + R) div 2,
Q = (V + S) div 2,
RFind = fun(X) ->
case F(X, Q) of
Z -> [{X, Q} | find4({U, V}, {X - 1, Q + 1}, F, Z)];
_ -> find4({U, V}, {X, Q + 1}, F, Z)
end ++
find4({X + 1, Q - 1}, {R, S}, F, Z)
end,
CFind = fun(Y) ->
find4({U, V}, {P - 1, Y + 1}, F, Z) ++
case F(P, Y) of
Z -> [{P, Y} | find4({P + 1, Y - 1}, {R, S}, F, Z)];
_ -> find4({P + 1, Y}, {R, S}, F, Z)
end
end,
if
V - S =< R - U -> RFind(bsearch(fun(X) -> F(X, Q) end, {U - 1, R + 1}, Z));
true -> CFind(bsearch(fun(Y) -> F(P, Y) end, {S - 1, V + 1}, Z))
end.
%% =============================================================================
%% Unit tests
%% =============================================================================
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
invert_test_() ->
F = fun(X, Y) -> X + Y end,
Result = [{0, 5}, {1, 4}, {2, 3}, {3, 2}, {4, 1}, {5, 0}],
[
?_assertEqual(Result, invert1(F, 5)),
?_assertEqual(Result, invert2(F, 5)),
?_assertEqual(Result, invert3(F, 5)),
?_assertEqual([{3, 2}, {1, 4}, {0, 5}, {2, 3}, {5, 0}, {4, 1}], invert4(F, 5))
].
ext_test_() ->
F = fun(X, Y) -> X + Y end,
G = ext(F),
[
?_assertEqual(0, G(0, -1)),
?_assertEqual(0, G(-1, 0)),
?_assertEqual(F(3, 4), G(3, 4))
].
-endif. | lib/ndpar/src/saddleback.erl | 0.741206 | 0.859664 | saddleback.erl | starcoder |
%%--------------------------------------------------------------------
%% Copyright (c) 2019-2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(mria_autoheal_SUITE).
-export([ t_autoheal/1
, t_reboot_rejoin/1
]).
-compile(nowarn_underscore_match).
-include_lib("snabbkaffe/include/ct_boilerplate.hrl").
t_autoheal(Config) when is_list(Config) ->
Cluster = mria_ct:cluster([core, core, core], [{mria, cluster_autoheal, 200}]),
?check_trace(
#{timetrap => 25000},
try
[N1, N2, N3] = mria_ct:start_cluster(mria, Cluster),
%% Simulate netsplit
true = rpc:cast(N3, net_kernel, disconnect, [N1]),
true = rpc:cast(N3, net_kernel, disconnect, [N2]),
ok = timer:sleep(1000),
%% SplitView: {[N1,N2], [N3]}
[N1,N2] = rpc:call(N1, mria, info, [running_nodes]),
[N3] = rpc:call(N1, mria, info, [stopped_nodes]),
[N1,N2] = rpc:call(N2, mria, info, [running_nodes]),
[N3] = rpc:call(N2, mria, info, [stopped_nodes]),
[N3] = rpc:call(N3, mria, info, [running_nodes]),
[N1,N2] = rpc:call(N3, mria, info, [stopped_nodes]),
%% Wait for autoheal, it should happen automatically:
?retry(1000, 20,
begin
[N1,N2,N3] = rpc:call(N1, mria, info, [running_nodes]),
[N1,N2,N3] = rpc:call(N2, mria, info, [running_nodes]),
[N1,N2,N3] = rpc:call(N3, mria, info, [running_nodes])
end),
rpc:call(N1, mria, leave, []),
rpc:call(N2, mria, leave, []),
rpc:call(N3, mria, leave, []),
[N1, N2, N3]
after
ok = mria_ct:teardown_cluster(Cluster)
end,
fun([_N1, _N2, N3], Trace) ->
?assert(
?causality( #{?snk_kind := mria_exec_callback, type := start, ?snk_meta := #{node := _N}}
, #{?snk_kind := mria_exec_callback, type := stop, ?snk_meta := #{node := _N}}
, Trace
)),
%% Check that restart callbacks were called after partition was healed:
{_, Rest} = ?split_trace_at(#{?snk_kind := "Rebooting minority"}, Trace),
?assertMatch( [stop, start|_]
, ?projection(type, ?of_kind(mria_exec_callback, ?of_node(N3, Rest)))
)
end).
t_reboot_rejoin(Config) when is_list(Config) ->
CommonEnv = [ {mria, cluster_autoheal, 200}
, {mria, db_backend, rlog}
],
Cluster1 = mria_ct:cluster([{core, n0}], CommonEnv),
Cluster2 = mria_ct:cluster([core, replicant, replicant],
CommonEnv,
[{base_gen_rpc_port, 9001}]),
Cluster = mria_ct:merge_gen_rpc_env(Cluster1 ++ Cluster2),
?check_trace(
#{timetrap => 25000},
try
AllNodes = [C1, C2, _R1, _R2] = mria_ct:start_cluster(mria, Cluster),
?tp(about_to_join, #{}),
%% performs a full "power cycle" in C2.
rpc:call(C2, mria, join, [C1]),
%% we need to ensure that the rlog server for the shard is
%% restarted, since it died during the "power cycle" from
%% the join operation.
rpc:call(C2, mria_rlog, wait_for_shards, [[test_shard], 5000]),
?tp(test_end, #{}),
%% assert there's a single cluster at the end.
mria_mnesia_test_util:wait_full_replication(Cluster, 5000),
AllNodes
after
ok = mria_ct:teardown_cluster(Cluster)
end,
fun([C1, C2, R1, R2], Trace0) ->
{_, Trace1} = ?split_trace_at(#{?snk_kind := about_to_join}, Trace0),
{Trace, _} = ?split_trace_at(#{?snk_kind := test_end}, Trace1),
TraceC2 = ?of_node(C2, Trace),
%% C1 joins C2
?assert(
?strict_causality( #{ ?snk_kind := "Mria is restarting to join the core cluster"
, seed := C1
}
, #{ ?snk_kind := "Starting autoheal"
}
, TraceC2
)),
?assert(
?strict_causality( #{ ?snk_kind := "Starting autoheal"
}
, #{ ?snk_kind := "Mria has joined the core cluster"
, seed := C1
, status := #{ running_nodes := [_, _]
}
}
, TraceC2
)),
?assert(
?strict_causality( #{ ?snk_kind := "Mria has joined the core cluster"
, status := #{ running_nodes := [_, _]
}
}
, #{ ?snk_kind := "Starting RLOG shard"
, shard := test_shard
}
, TraceC2
)),
%% Replicants reboot and bootstrap shard data
assert_replicant_bootstrapped(R1, C2, Trace),
assert_replicant_bootstrapped(R2, C2, Trace)
end).
assert_replicant_bootstrapped(R, C, Trace) ->
%% The core that the replicas are connected to is changing
%% clusters
?assert(
?strict_causality( #{ ?snk_kind := "Mria is restarting to join the core cluster"
, ?snk_meta := #{ node := C }
}
, #{ ?snk_kind := "Remote RLOG agent died"
, ?snk_meta := #{ node := R }
}
, Trace
)),
mria_rlog_props:replicant_bootstrap_stages(R, Trace),
ok. | test/mria_autoheal_SUITE.erl | 0.52683 | 0.437163 | mria_autoheal_SUITE.erl | starcoder |
%%%-----------------------------------------------------------------------------
%%% Copyright (C) 2013-2014, <NAME> <<EMAIL>>
%%%
%%% Licensed under the Apache License, Version 2.0 (the "License");
%%% you may not use this file except in compliance with the License.
%%% You may obtain a copy of the License at
%%%
%%% http://www.apache.org/licenses/LICENSE-2.0
%%%
%%% Unless required by applicable law or agreed to in writing, software
%%% distributed under the License is distributed on an "AS IS" BASIS,
%%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%%% See the License for the specific language governing permissions and
%%% limitations under the License.
%%%
%%% @author <NAME> <<EMAIL>>
%%% @doc
%%% ejson encoder module
%%% @end
%%%-----------------------------------------------------------------------------
-module(ejson_util).
-export([atom_to_binary_cc/1,
binary_to_atom_cc/1,
is_name_convertable/1,
get_fields/2,
get_field_name/1,
zip/2
]).
%%%=============================================================================
%%% API functions
%%%=============================================================================
atom_to_binary_cc(Atom) ->
list_to_binary(lists:reverse(camel_case(atom_to_list(Atom), []))).
binary_to_atom_cc(Binary) ->
list_to_atom(lists:reverse(underscore(binary_to_list(Binary), []))).
%% true if the atom can be converted by the two functions unambiguously
is_name_convertable(Atom) when is_atom(Atom) ->
is_name_convertable(atom_to_list(Atom));
is_name_convertable(String) ->
start_with_char(String) andalso proper_underscore(String).
%%-----------------------------------------------------------------------------
%% @doc Get attributes of a record, or error if no such record.
%% @end
%%-----------------------------------------------------------------------------
-spec get_fields(atom(), term()) -> list() | {error, {no_such_record, atom()}}.
get_fields(RecordName, Opts) ->
case lists:keyfind(RecordName, 1, Opts) of
false ->
{error, {no_such_record, RecordName}};
R ->
[_ | Fields] = tuple_to_list(R),
Fields
end.
get_field_name(skip) ->
undefined;
get_field_name({skip, _FieldOpts}) ->
undefined;
get_field_name({Type, Field}) when Type =:= atom orelse
Type =:= binary orelse
Type =:= boolean orelse
Type =:= list orelse
Type =:= number orelse
Type =:= proplist orelse
Type =:= record orelse
Type =:= string ->
Field;
get_field_name({Type, Field, _FieldOpts}) when Type =:= atom orelse
Type =:= binary orelse
Type =:= boolean orelse
Type =:= const orelse
Type =:= list orelse
Type =:= map orelse
Type =:= number orelse
Type =:= record orelse
Type =:= string orelse
Type =:= virtual ->
Field;
get_field_name({generic, Field, _FieldOpts}) ->
Field;
get_field_name(Field) ->
{error, {invalid_field_rule, Field}}.
zip([], []) ->
[];
zip([H1|T1], []) ->
[{H1, undefined} | zip(T1, [])];
zip([], [H2|T2]) ->
[{undefined, H2} | zip([], T2)];
zip([H1|T1], [H2|T2]) ->
[{H1, H2} | zip(T1, T2)].
%%%=============================================================================
%%% Internal functions
%%%=============================================================================
start_with_char([L|_]) when L >= $a andalso L =< $z ->
true;
start_with_char(_) ->
false.
%% If there is an underscore, it needs to follow by a letter
proper_underscore([]) ->
true;
proper_underscore([$_, L | T]) when L >= $a andalso L =< $z ->
proper_underscore(T);
proper_underscore([$_ | _T]) ->
false;
proper_underscore([L | _T]) when L >= $A andalso L =< $Z ->
false;
proper_underscore([_ | T]) ->
proper_underscore(T).
camel_case([], R) ->
R;
camel_case([L], R) ->
[L|R];
camel_case([$_, L | T], R) ->
camel_case(T, [string:to_upper(L) | R]);
camel_case([H | T], R) ->
camel_case(T, [H | R]).
underscore([], R) ->
R;
underscore([Cap | T], R) when Cap >= $A andalso Cap =< $Z ->
underscore(T, [Cap + 32, $_ | R]);
underscore([Low | T], R) ->
underscore(T, [Low | R]). | src/ejson_util.erl | 0.507812 | 0.435541 | ejson_util.erl | starcoder |
%% This Source Code Form is subject to the terms of the Mozilla Public
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
%% Copyright (c) 2021-2022 VMware, Inc. or its affiliates. All rights reserved.
%%
%% @doc Anonymous function extraction private API.
%%
%% This module is responsible for extracting the code of an anonymous function.
%% The goal is to be able to store the extracted function and execute it later,
%% regardless of the availability of the initial Erlang module which declared
%% it.
%%
%% This module also provides a way for the caller to indicate forbidden
%% operations or function calls.
%%
%% This module works on assembly code to perform all checks and prepare the
%% storable copy of a function. It uses {@link beam_disasm:file/1} from the
%% `compiler' application to extract the assembly code. After the assembly
%% code was extracted and modified, the compiler is used again to compile the
%% code back to an executable module.
%%
%% If the anonymous function calls other functions, either in the same module
%% or in another one, the code of the called functions is extracted and copied
%% as well. This is to make sure the result is completely standalone.
%%
%% To avoid any copies of standard Erlang APIs or Khepri itself, it is
%% possible to specify a list of modules which should not be copied. In this
%% case, calls to functions in those modules are left unmodified.
%%
%% Once the code was extracted and verified, a new module is generated as an
%% "assembly form", ready to be compiled again to an executable module. The
%% generated module has a single `run/N' function. This function contains the
%% code of the extracted anonymous function.
%%
%% Because this process works on the assembly code, it means that if the
%% initial module hosting the anonymous function was compiled with Erlang
%% version N, it will probably not compile or run on older versions of Erlang.
%% The reason is that a newer compiler may use instructions which are unknown
%% to older runtimes.
%%
%% There is a special treatment for anonymous functions evaluated by
%% `erl_eval' (e.g. in the Erlang shell). "erl_eval functions" are lambdas
%% parsed from text and are evaluated using `erl_eval'.
%%
%% This kind of lambdas becomes a local function in the `erl_eval' module.
%%
%% Their assembly code isn't available in the `erl_eval' module. However, the
%% abstract code (i.e. after parsing but before compilation) is available in
%% the `env'. We compile that abstract code and extract the assembly from that
%% compiled beam.
%%
%% This module is private. The documentation is still visible because it may
%% help understand some implementation details. However, this module should
%% never be called directly outside of Khepri.
-module(khepri_fun).
-include_lib("kernel/include/logger.hrl").
-include_lib("stdlib/include/assert.hrl").
-include("src/khepri_fun.hrl").
-export([to_standalone_fun/1,
to_standalone_fun/2,
exec/2]).
-ifdef(TEST).
-export([standalone_fun_cache_key/5,
override_object_code/2,
get_object_code/1,
decode_line_chunk/2,
compile/1]).
-endif.
%% FIXME: compile:forms/2 is incorrectly specified and doesn't accept
%% assembly. This breaks compile/1 and causes a cascade of errors.
%%
%% The following basically disables Dialyzer for this module unfortunately...
%% This can be removed once we start using Erlang 25 to run Dialyzer.
-dialyzer({nowarn_function, [compile/1,
to_standalone_fun/1,
to_standalone_fun/2,
to_standalone_fun1/2,
to_standalone_fun2/2,
to_standalone_fun3/2,
to_standalone_env/1,
to_standalone_arg/2,
standalone_fun_cache_key/1,
cache_standalone_fun/2,
handle_compilation_error/2,
handle_validation_error/3,
add_comments_and_retry/5,
add_comments_to_function/5,
add_comments_to_code/3,
add_comments_to_code/4,
find_comments_in_branch/2,
find_comments_in_branch/4,
split_comments/1,
split_comments/2,
merge_comments/2]}).
-type fun_info() :: #{arity => arity(),
env => any(),
index => any(),
name => atom(),
module => module(),
new_index => any(),
new_uniq => any(),
pid => any(),
type => local | external,
uniq => any()}.
-type beam_instr() :: atom() | tuple().
-type label() :: pos_integer().
%% The following records are used to store the decoded "Line" beam chunk. They
%% are used while processing `line' instructions to restore the correct
%% location. This is needed so that exception stacktraces point to real
%% locations in source files.
-record(lines, {item_count,
items = [],
name_count,
names = [],
location_size}).
-record(line, {name_index,
location}).
%% The following record is also linked to the decoding of the "Line" beam
%% chunk.
-record(tag, {tag,
size,
word_value,
ptr_value}).
%% -------------------------------------------------------------------
%% Taken from lib/compiler/src/beam_disasm.hrl,
%% commit <PASSWORD>
-record(function, {name :: atom(),
arity :: byte(),
entry :: beam_lib:label(), %% unnecessary ?
code = [] :: [beam_instr()]}).
-record(beam_file, {module :: module(),
labeled_exports = [] :: [beam_lib:labeled_entry()],
attributes = [] :: [beam_lib:attrib_entry()],
compile_info = [] :: [beam_lib:compinfo_entry()],
code = [] :: [#function{}]}).
%% -------------------------------------------------------------------
-record(beam_file_ext, {module :: module(),
labeled_exports = [] :: [beam_lib:labeled_entry()],
attributes = [] :: [beam_lib:attrib_entry()],
compile_info = [] :: [beam_lib:compinfo_entry()],
code = [] :: [#function{}],
%% Added in this module to stored the decoded "Line"
%% chunk.
lines :: #lines{} | undefined,
strings :: binary() | undefined}).
-type ensure_instruction_is_permitted_fun() ::
fun((Instruction :: beam_instr()) -> ok).
%% Function which evaluates the given instruction and returns `ok' if it is
%% permitted, throws an exception otherwise.
%%
%% Example:
%%
%% ```
%% Fun = fun
%% ({jump, _}) -> ok;
%% ({move, _, _}) -> ok;
%% ({trim, _, _}) -> ok;
%% (Unknown) -> throw({unknown_instruction, Unknown})
%% end.
%% '''
-type should_process_function_fun() ::
fun((Module :: module(),
Function :: atom(),
Arity :: arity(),
FromModule :: module()) -> ShouldProcess :: boolean()).
%% Function which returns true if a called function should be extracted and
%% followed, false otherwise.
%%
%% `Module', `Function' and `Arity' qualify the function being called.
%%
%% `FromModule' indicates the module performing the call. This is useful to
%% distinguish local calls (`FromModule' == `Module') from remote calls.
%%
%% Example:
%%
%% ```
%% Fun = fun(Module, Name, Arity, FromModule) ->
%% Module =:= FromModule orelse
%% erlang:function_exported(Module, Name, Arity)
%% end.
%% '''
-type is_standalone_fun_still_needed_fun() ::
fun((#{calls := #{Call :: mfa() => true},
errors := [Error :: any()]}) -> IsNeeded :: boolean()).
%% Function which evaluates if the extracted function is still relevant in the
%% end. It returns true if it is, false otherwise.
%%
%% It takes a map with the following members:
%% <ul>
%% <li>`calls', a map of all the calls performed by the extracted code (only
%% the key is useful, the value is always true).</li>
%% <li>`errors', a list of errors collected during the extraction.</li>
%% </ul>
-type standalone_fun() :: #standalone_fun{} | fun().
%% The result of an extraction, as returned by {@link to_standalone_fun/2}.
%%
%% It can be stored, passed between processes and Erlang nodes. To execute the
%% extracted function, simply call {@link exec/2} which works like {@link
%% erlang:apply/2}.
-type options() :: #{ensure_instruction_is_permitted =>
ensure_instruction_is_permitted_fun(),
should_process_function =>
should_process_function_fun(),
is_standalone_fun_still_needed =>
is_standalone_fun_still_needed_fun()}.
%% Options to tune the extraction of an anonymous function.
%%
%% <ul>
%% <li>`ensure_instruction_is_permitted': a function which evaluates if an
%% instruction is permitted or not.</li>
%% <li>`should_process_function': a function which returns if a called module
%% and function should be extracted as well or left alone.</li>
%% <li>`is_standalone_fun_still_needed': a function which returns if, after
%% the extraction is finished, the extracted function is still needed in
%% comparison to keeping the initial anonymous function.</li>
%% </ul>
-export_type([standalone_fun/0,
options/0]).
-record(state, {generated_module_name :: module() | undefined,
entrypoint :: mfa() | undefined,
checksums = #{} :: #{module() => binary()},
fun_info :: fun_info(),
calls = #{} :: #{mfa() => true},
all_calls = #{} :: #{mfa() => true},
functions = #{} :: #{mfa() => #function{}},
lines_in_progress :: #lines{} | undefined,
strings_in_progress :: binary() | undefined,
mfa_in_progress :: mfa() | undefined,
function_in_progress :: atom() | undefined,
next_label = 1 :: label(),
label_map = #{} :: #{{module(), label()} => label()},
literal_funs = #{} :: #{fun() => standalone_fun()},
errors = [] :: [any()],
options = #{} :: options()}).
-type asm() :: {module(),
[{atom(), arity()}],
[],
[#function{}],
label()}.
-define(SF_ENTRYPOINT, run).
-spec to_standalone_fun(Fun) -> StandaloneFun when
Fun :: fun(),
StandaloneFun :: standalone_fun().
%% @doc Extracts the given anonymous function
%%
%% This is the same as:
%% ```
%% khepri_fun:to_standalone_fun(Fun, #{}).
%% '''
%%
%% @param Fun the anonymous function to extract
%%
%% @returns a standalone function record or the same anonymous function if no
%% extraction was needed.
to_standalone_fun(Fun) ->
to_standalone_fun(Fun, #{}).
-spec to_standalone_fun(Fun, Options) -> StandaloneFun when
Fun :: fun(),
Options :: options(),
StandaloneFun :: standalone_fun().
%% @doc Extracts the given anonymous function
%%
%% @param Fun the anonymous function to extract
%% @param Options a map of options
%%
%% @returns a standalone function record or the same anonymous function if no
%% extraction was needed.
to_standalone_fun(Fun, Options) ->
{StandaloneFun, _State} = to_standalone_fun1(Fun, Options),
StandaloneFun.
-spec to_standalone_fun1(Fun, Options) -> {StandaloneFun, State} when
Fun :: fun(),
Options :: options(),
StandaloneFun :: standalone_fun(),
State :: #state{}.
%% @private
%% @hidden
to_standalone_fun1(Fun, Options) ->
Info = maps:from_list(erlang:fun_info(Fun)),
#{module := Module,
name := Name,
arity := Arity} = Info,
State0 = #state{fun_info = Info,
all_calls = #{{Module, Name, Arity} => true},
options = Options},
to_standalone_fun2(Fun, State0).
-spec to_standalone_fun2(Fun, State) -> {StandaloneFun, State} when
Fun :: fun(),
State :: #state{},
StandaloneFun :: standalone_fun().
%% @private
%% @hidden
to_standalone_fun2(Fun, State) ->
case get_cached_standalone_fun(State) of
#standalone_fun{} = StandaloneFunWithoutEnv ->
%% We need to set the environment for this specific call of the
%% anonymous function in the returned `#standalone_fun{}'.
{Env, State1} = to_standalone_env(State),
StandaloneFun = StandaloneFunWithoutEnv#standalone_fun{env = Env},
{StandaloneFun, State1};
fun_kept ->
{Fun, State};
undefined ->
to_standalone_fun3(Fun, State)
end.
-spec to_standalone_fun3(Fun, State) -> {StandaloneFun, State} when
Fun :: fun(),
State :: #state{},
StandaloneFun :: standalone_fun().
%% @private
%% @hidden
to_standalone_fun3(
Fun,
#state{fun_info = #{module := Module,
name := Name,
arity := Arity,
type := Type}} = State) ->
%% Don't extract functions like "fun dict:new/0" which are not meant to be
%% copied.
{ShouldProcess,
State1} = case Type of
local ->
should_process_function(
Module, Name, Arity, Module, State);
external ->
_ = code:ensure_loaded(Module),
case erlang:function_exported(Module, Name, Arity) of
true ->
should_process_function(
Module, Name, Arity, undefined, State);
false ->
throw({call_to_unexported_function,
{Module, Name, Arity}})
end
end,
case ShouldProcess of
true ->
State2 = pass1(State1),
{Env, State3} = to_standalone_env(State2),
%% We offer one last chance to the caller to determine if a
%% standalone function is still useful for him.
%%
%% This callback is only used for the top-level lambda. In other
%% words, if the `env' contains other lambdas (i.e. anonymous
%% functions passed as argument to the top-level one), the
%% callback is not used. However, calls and errors from those
%% inner lambdas are accumulated and can be used by the callback.
case is_standalone_fun_still_needed(State3) of
true ->
process_errors(State3),
#state{literal_funs = LiteralFuns0} = State3,
LiteralFuns = maps:values(LiteralFuns0),
Asm = pass2(State3),
{GeneratedModuleName, Beam} = compile(Asm),
StandaloneFun = #standalone_fun{
module = GeneratedModuleName,
beam = Beam,
arity = Arity,
literal_funs = LiteralFuns,
env = Env},
cache_standalone_fun(State3, StandaloneFun),
{StandaloneFun, State3};
false ->
cache_standalone_fun(State3, fun_kept),
{Fun, State3}
end;
false ->
process_errors(State1),
cache_standalone_fun(State1, fun_kept),
{Fun, State1}
end.
-spec to_embedded_standalone_fun(Fun, State) -> {StandaloneFun, State} when
Fun :: fun(),
State :: #state{},
StandaloneFun :: standalone_fun().
%% @private
%% @hidden
to_embedded_standalone_fun(
Fun,
#state{options = Options,
all_calls = AllCalls,
errors = Errors} = State)
when is_function(Fun) ->
{StandaloneFun, InnerState} = to_standalone_fun1(Fun, Options),
#state{all_calls = InnerAllCalls,
errors = InnerErrors} = InnerState,
AllCalls1 = maps:merge(AllCalls, InnerAllCalls),
Errors1 = Errors ++ InnerErrors,
State1 = State#state{all_calls = AllCalls1,
errors = Errors1},
{StandaloneFun, State1}.
-spec standalone_fun_cache_key(State) -> Key when
State :: #state{},
Key :: {?MODULE,
standalone_fun_cache_key,
{module(), atom(), arity()},
binary()}.
%% @doc Computes the standalone function cache key.
%%
%% To identify a standalone function in the cache, we base the key on:
%% <ul>
%% <li>the anonymous function's module, function name and arity</li>
%% <li>the checksum of the module holding that function</li>
%% </ul>
%%
%% @private
standalone_fun_cache_key(
#state{fun_info = #{module := Module,
name := Name,
arity := Arity,
type := local,
new_uniq := Checksum},
options = Options}) ->
standalone_fun_cache_key(Module, Name, Arity, Checksum, Options);
standalone_fun_cache_key(
#state{fun_info = #{module := Module,
name := Name,
arity := Arity,
type := external},
options = Options}) ->
Checksum = Module:module_info(md5),
standalone_fun_cache_key(Module, Name, Arity, Checksum, Options).
standalone_fun_cache_key(Module, Name, Arity, Checksum, Options) ->
%% We also include the options in the cache key because different options
%% could affect the created standalone function.
{?MODULE, standalone_fun_cache, {Module, Name, Arity}, Checksum, Options}.
-spec get_cached_standalone_fun(State) -> Ret when
State :: #state{},
Ret :: StandaloneFun | fun_kept | undefined,
StandaloneFun :: standalone_fun().
%% @doc Returns the cached standalone function if found.
%%
%% @returns a `standalone_fun()' if a corresponding standalone function was
%% found in the cache, a `fun_kept' atom if the anonymous function didn't need
%% any processing and can be used directly, or `undefined' if there is no
%% corresponding entry in the cache.
%%
%% @private
get_cached_standalone_fun(
#state{fun_info = #{module := Module}} = State)
when Module =/= erl_eval ->
Key = standalone_fun_cache_key(State),
case persistent_term:get(Key, undefined) of
#{standalone_fun := StandaloneFunWithoutEnv,
checksums := Checksums,
counters := Counters} ->
%% We want to make sure that all the modules used by the anonymous
%% function were not updated since it was stored in the cache.
%% Therefore, they must have the same checksums has the ones
%% stored in the cache. This list of modules also contain the
%% modules holding the callbacks in specified in `Options'.
%%
%% The checksum of the module holding the anonymous function is
%% already in the cache key however. Likewise for the actual
%% options.
SameModules = maps:fold(
fun
(Mod, Checksum, true) ->
Checksum =:= Mod:module_info(md5);
(_Module, _Checksum, false) ->
false
end, true, Checksums),
if
SameModules ->
counters:add(Counters, 1, 1),
StandaloneFunWithoutEnv;
true ->
undefined
end;
#{fun_kept := true,
counters := Counters} ->
%% `fun_kept' means the anonymous function could be used directly;
%% i.e. there was no need to create a standalone function.
counters:add(Counters, 1, 1),
fun_kept;
undefined ->
undefined
end;
get_cached_standalone_fun(_State) ->
%% We don't cache `erl_eval'-based anonymous functions currently.
%%
%% TODO: Can we cache them?
undefined.
-spec cache_standalone_fun(StandaloneFun, State) -> ok when
StandaloneFun :: standalone_fun() | fun_kept,
State :: #state{}.
%% @private
cache_standalone_fun(
#state{checksums = Checksums, options = Options} = State,
StandaloneFun) ->
%% We include the options in the cached value. This is useful when the
%% callbacks change for the same anonymous function.
Checksums1 = maps:fold(
fun
(_Key, Fun, Acc) when is_function(Fun) ->
Info = maps:from_list(erlang:fun_info(Fun)),
#{module := Module,
new_uniq := Checksum} = Info,
case Acc of
#{Module := KnownChecksum} ->
?assertEqual(KnownChecksum, Checksum),
Acc;
_ ->
Acc#{Module => Checksum}
end;
(_Key, _Value, Acc) ->
Acc
end, Checksums, Options),
Key = standalone_fun_cache_key(State),
%% Counters track the cache hits. They are only used by the testsuite
%% currently.
Counters = counters:new(1, [write_concurrency]),
case StandaloneFun of
#standalone_fun{} ->
%% The standalone function is stored in the cache without its
%% environment (the variable bindings in the anonymous function).
%% They are given for a specific call of this function and may
%% change for another call , even though the code is the same
%% otherwise.
%%
%% The environment is set by the caller of
%% `get_cached_standalone_fun()'.
StandaloneFunWithoutEnv = StandaloneFun#standalone_fun{env = []},
Value = #{standalone_fun => StandaloneFunWithoutEnv,
checksums => Checksums1,
options => Options,
counters => Counters},
%% TODO: We need to add some memory management here to clear the
%% cache if there are many different standalone functions.
persistent_term:put(Key, Value);
fun_kept ->
Value = #{fun_kept => true,
counters => Counters},
persistent_term:put(Key, Value)
end,
ok.
-spec compile(Asm) -> {Module, Beam} when
Asm :: asm(), %% FIXME: compile:forms/2 is incorrectly specified.
Module :: module(),
Beam :: binary().
compile(Asm) ->
CompilerOptions = [from_asm,
binary,
warnings_as_errors,
return_errors,
return_warnings,
deterministic],
case compile:forms(Asm, CompilerOptions) of
{ok, Module, Beam, []} -> {Module, Beam};
Error -> handle_compilation_error(Asm, Error)
end.
handle_compilation_error(
Asm,
{error,
[{_GeneratedModuleName,
[{_, beam_validator, ValidationFailure} | _Rest]}],
[]} = Error) ->
handle_validation_error(Asm, ValidationFailure, Error);
handle_compilation_error(
Asm,
%% Same as above, but returned by Erlang 23's compiler instead of Erlang 24+.
{error,
[{_GeneratedModuleName,
[{beam_validator, ValidationFailure} | _Rest]}],
[]} = Error) ->
handle_validation_error(Asm, ValidationFailure, Error);
handle_compilation_error(Asm, Error) ->
throw({compilation_failure, Error, Asm}).
handle_validation_error(
Asm,
{{_, Name, Arity},
{{Call, _Arity, {f, EntryLabel}},
CallIndex,
no_bs_start_match2}},
Error) when Call =:= call orelse Call =:= call_only ->
{_GeneratedModuleName,
_Exports,
_Attributes,
Functions,
_Labels} = Asm,
#function{code = Instructions} = find_function(Functions, Name, Arity),
Comments = find_comments_in_branch(Instructions, CallIndex),
Location = {'after', {label, EntryLabel}},
add_comments_and_retry(Asm, Error, EntryLabel, Location, Comments);
handle_validation_error(
Asm,
{FailingFun,
{{bs_start_match4, _Fail, _, Var, Var} = FailingInstruction,
_,
{bad_type, {needed, NeededType}, {actual, any}}}},
Error) ->
VarInfo = {var_info, Var, [{type, NeededType}]},
Comments = [{'%', VarInfo}],
Location = {before, FailingInstruction},
add_comments_and_retry(Asm, Error, FailingFun, Location, Comments);
handle_validation_error(Asm, _ValidationFailure, Error) ->
throw({compilation_failure, Error, Asm}).
%% Looks up the comments for all variables within a branch up as they
%% appear at the given instruction index.
%%
%% For example, consider the instructions for this function:
%%
%% ...
%% {label, 4},
%% {'%', {var_info, {x, 0}, [accepts_match_context]}},
%% {bs_start_match4, {atom, no_fail}, 1, {x, 0}, {x, 0}},
%% {test, bs_match_string, {f, 5}, [{x, 0}, 8, {string, <<".">>}]},
%% {move, {x, 0}, {x, 1}},
%% {move, nil, {x, 0}},
%% {call_only, 2, {f, 7},
%% {label, 5},
%% ...
%%
%% When given these instructions and the index for the `call_only'
%% instruction, this function should return:
%%
%% [{'%', {var_info, {x, 1}, [accepts_match_context]}}]
%%
%% Notice that this comment applies to `{x, 1}' instead of `{x, 0}' as it
%% appears in the original comment, since this is the typing of the variables
%% at the time of the `call_only' instruction.
%%
%% To construct these comments, we fold through the instructions in order
%% and track the comments. There are some special-case instructions to
%% consider - `label/1' and `move/2'.
%%
%% A `label/1' instruction begins a branch within the instructions. Each
%% branch has independent typing, so any comments that appear between two
%% labels only apply to the variables within that branch. So in this function,
%% `label/1' clears any types we've gathered. In the above example
%% instructions, we only care about the typing between `{label, 4}' and
%% `{label, 5}'.
%%
%% `move/2' moves the some value from a source register or literal `Src' to
%% a destination register `Dst'. When handling a `move/2', if any comments
%% exist for `Src', we move the comments for `Src' to `Dst'. If any comments
%% exist for `Dst`, they are discarded. In the example instructions,
%% `{move, {x, 0}, {x, 1}}' moves the comment for `{x, 0}' to `{x, 1}'.
find_comments_in_branch(Instructions, Index) ->
%% `beam_validator' instruction counter is 1-indexed
find_comments_in_branch(Instructions, Index, 1, #{}).
find_comments_in_branch(
_Instructions, Index, Index, VarInfos) ->
[{'%', {var_info, Var, Info}} || {Var, Info} <- maps:to_list(VarInfos)];
find_comments_in_branch(
[{'%', {var_info, Var, Info}} | Rest], Index, Counter, VarInfos) ->
VarInfos1 = maps:put(Var, Info, VarInfos),
find_comments_in_branch(Rest, Index, Counter + 1, VarInfos1);
find_comments_in_branch(
[{move, Src, Dst} | Rest], Index, Counter, VarInfos) ->
VarInfos1 = case VarInfos of
#{Src := Info} -> maps:put(Dst, Info, VarInfos);
_ -> maps:remove(Dst, VarInfos)
end,
VarInfos2 = maps:remove(Src, VarInfos1),
find_comments_in_branch(Rest, Index, Counter + 1, VarInfos2);
find_comments_in_branch(
[{label, _Label} | Rest], Index, Counter, _VarInfos) ->
%% Each branch (separated by labels) has independent typing
find_comments_in_branch(Rest, Index, Counter + 1, #{});
find_comments_in_branch(
[_Instruction | Rest], Index, Counter, VarInfos) ->
find_comments_in_branch(
Rest, Index, Counter + 1, VarInfos).
add_comments_and_retry(
Asm, Error, FailingFun, Location, Comments) ->
{GeneratedModuleName,
Exports,
Attributes,
Functions,
Labels} = Asm,
try
Functions1 = add_comments_to_function(
Functions, FailingFun, Location, Comments, []),
Asm1 = {GeneratedModuleName,
Exports,
Attributes,
Functions1,
Labels},
compile(Asm1)
catch
throw:duplicate_annotations ->
throw({compilation_failure, Error, Asm})
end.
add_comments_to_function(
[#function{name = Name, arity = Arity, code = Code} = Function | Rest],
{_GeneratedModuleName, Name, Arity},
Location, Comments, Result) ->
Code1 = add_comments_to_code(Code, Location, Comments),
Function1 = Function#function{code = Code1},
lists:reverse(Result) ++ [Function1 | Rest];
add_comments_to_function(
[#function{entry = EntryLabel, code = Code} = Function | Rest],
EntryLabel, Location, Comments, Result) ->
Code1 = add_comments_to_code(Code, Location, Comments),
Function1 = Function#function{code = Code1},
lists:reverse(Result) ++ [Function1 | Rest];
add_comments_to_function(
[Function | Rest], FailingFun, Location, Comments, Result) ->
add_comments_to_function(
Rest, FailingFun, Location, Comments, [Function | Result]).
add_comments_to_code(Code, Location, Comments) ->
add_comments_to_code(Code, Location, Comments, []).
add_comments_to_code(
[Instruction | Rest], {before, Instruction}, Comments, Result) ->
{ExistingComments, Result1} = split_comments(Result),
Comments1 = merge_comments(Comments, ExistingComments),
lists:reverse(Result1) ++ Comments1 ++ [Instruction | Rest];
add_comments_to_code(
[Instruction | Rest], {'after', Instruction}, Comments, Result) ->
{ExistingComments, Rest1} = split_comments(Rest),
Comments1 = merge_comments(Comments, ExistingComments),
lists:reverse(Result) ++ [Instruction | Comments1] ++ Rest1;
add_comments_to_code(
[Instruction | Rest], Location, Comments, Result) ->
add_comments_to_code(Rest, Location, Comments, [Instruction | Result]).
split_comments(Instructions) ->
split_comments(Instructions, []).
split_comments([{'%', _} = Comment | Rest], Comments) ->
split_comments(Rest, [Comment | Comments]);
split_comments(Rest, Comments) ->
{lists:reverse(Comments), Rest}.
merge_comments(Comments, ExistingComments) ->
ExistingCommentsMap = maps:from_list(
[{Var, Info} ||
{'%', {var_info, Var, Info}} <-
ExistingComments]),
lists:map(fun({'%', {var_info, Var, Info}} = Annotation) ->
case ExistingCommentsMap of
#{Var := Info} ->
throw(duplicate_annotations);
#{Var := ExistingInfo} ->
{'%', {var_info, Var, Info ++ ExistingInfo}};
_ ->
Annotation
end
end, Comments).
-spec exec(StandaloneFun, Args) -> Ret when
StandaloneFun :: standalone_fun(),
Args :: [any()],
Ret :: any().
%% @doc Executes a previously extracted anonymous function.
%%
%% This is the equivalent of {@link erlang:apply/2} but it supports extracted
%% anonymous functions.
%%
%% The list of `Args' must match the arity of the anonymous function.
%%
%% @param StandaloneFun the extracted function as returned by {@link
%% to_standalone_fun/2}.
%% @param Args the list of arguments to pass to the extracted function.
%%
%% @returns the return value of the extracted function.
exec(
#standalone_fun{module = Module,
arity = Arity,
literal_funs = LiteralFuns,
env = Env} = StandaloneFun,
Args) when length(Args) =:= Arity ->
load_standalone_fun(StandaloneFun),
%% We also need to load any literal functions referenced by the standalone
%% function and extracted with it. The assembly code already references
%% them.
lists:foreach(
fun(LiteralFun) -> load_standalone_fun(LiteralFun) end,
LiteralFuns),
Env1 = to_actual_arg(Env),
erlang:apply(Module, ?SF_ENTRYPOINT, Args ++ Env1);
exec(#standalone_fun{} = StandaloneFun, Args) ->
exit({badarity, {StandaloneFun, Args}});
exec(Fun, Args) ->
erlang:apply(Fun, Args).
-spec load_standalone_fun(StandaloneFun) -> ok when
StandaloneFun :: standalone_fun().
%% @private
load_standalone_fun(#standalone_fun{module = Module, beam = Beam}) ->
case code:is_loaded(Module) of
false ->
{module, _} = code:load_binary(Module, ?MODULE_STRING, Beam),
ok;
_ ->
ok
end;
load_standalone_fun(Fun) when is_function(Fun) ->
ok.
%% -------------------------------------------------------------------
%% Code processing [Pass 1]
%% -------------------------------------------------------------------
-spec pass1(State) -> State when
State :: #state{}.
pass1(
#state{fun_info = #{module := erl_eval, type := local} = Info,
checksums = Checksums} = State) ->
#{module := Module,
name := Name,
arity := Arity} = Info,
Checksum = maps:get(new_uniq, Info),
?assert(is_binary(Checksum)),
Checksums1 = Checksums#{Module => Checksum},
State1 = State#state{checksums = Checksums1,
entrypoint = {Module, Name, Arity}},
pass1_process_function(Module, Name, Arity, State1);
pass1(
#state{fun_info = Info,
checksums = Checksums} = State) ->
#{module := Module,
name := Name,
arity := Arity,
env := Env} = Info,
%% Internally, a lambda which takes arguments and values from its
%% environment (i.e. variables declared in the function which defined that
%% lambda).
InternalArity = Arity + length(Env),
State1 = case maps:get(type, Info) of
local ->
Checksum = maps:get(new_uniq, Info),
?assert(is_binary(Checksum)),
Checksums1 = Checksums#{Module => Checksum},
State#state{checksums = Checksums1};
external ->
State
end,
State2 = State1#state{entrypoint = {Module, Name, InternalArity}},
pass1_process_function(Module, Name, InternalArity, State2).
-spec pass1_process_function(Module, Name, Arity, State) -> State when
Module :: module(),
Name :: atom(),
Arity :: arity(),
State :: #state{}.
pass1_process_function(
Module, Name, Arity,
#state{functions = Functions} = State)
when is_map_key({Module, Name, Arity}, Functions) ->
State;
pass1_process_function(Module, Name, Arity, State) ->
MFA = {Module, Name, Arity},
State1 = State#state{mfa_in_progress = MFA,
calls = #{}},
{Function0, State2} = lookup_function(Module, Name, Arity, State1),
{Function1, State3} = pass1_process_function_code(Function0, State2),
#state{calls = Calls,
functions = Functions} = State3,
Functions1 = Functions#{MFA => Function1},
State4 = State3#state{functions = Functions1},
%% Recurse with called functions.
maps:fold(
fun({M, F, A}, true, St) ->
pass1_process_function(M, F, A, St)
end, State4, Calls).
-spec pass1_process_function_code(Function, State) -> {Function, State} when
Function :: #function{},
State :: #state{}.
pass1_process_function_code(
#function{entry = OldEntryLabel,
code = Instructions} = Function,
#state{mfa_in_progress = {Module, _, _} = MFA,
next_label = NextLabel,
functions = Functions} = State) ->
?assertNot(maps:is_key(MFA, Functions)),
%% Compute label diff.
{label, FirstLabel} = lists:keyfind(label, 1, Instructions),
LabelDiff = NextLabel - FirstLabel,
%% pass1_process_instructions
{Instructions1, State1} = pass1_process_instructions(Instructions, State),
%% Compute its new entry label.
#state{label_map = LabelMap} = State1,
LabelKey = {Module, OldEntryLabel},
NewEntryLabel = maps:get(LabelKey, LabelMap),
?assertEqual(LabelDiff, NewEntryLabel - OldEntryLabel),
%% Rename function & fix its entry label.
Function1 = Function#function{
entry = NewEntryLabel,
code = Instructions1},
{Function1, State1}.
-spec pass1_process_instructions(Instructions, State) ->
{Instructions, State} when
Instructions :: [beam_instr()],
State :: #state{}.
pass1_process_instructions(Instructions, State) ->
pass1_process_instructions(Instructions, State, []).
%% The first group of clauses of this function patch incorrectly decoded
%% instructions. These clauses recurse after fixing the instruction to enter
%% the other groups of clauses.
%%
%% The second group of clauses:
%% 1. ensures the instruction is known and allowed,
%% 2. records all calls that need their code to be copied and
%% 3. records jump labels.
%%
%% The third group of clauses infers type information and match contexts
%% and adds comments to satisfy the compiler's validator pass.
%% First group.
pass1_process_instructions(
[{arithfbif, Operation, Fail, Args, Dst} | Rest],
State,
Result) ->
%% `beam_disasm' did not decode this instruction correctly. `arithfbif'
%% should be translated into a `bif'.
Instruction = {bif, Operation, Fail, Args, Dst},
pass1_process_instructions([Instruction | Rest], State, Result);
pass1_process_instructions(
[{bs_append, _, _, _, _, _, _, {field_flags, FF}, _} = Instruction0 | Rest],
State,
Result)
when is_integer(FF) ->
%% `beam_disasm' did not decode this instruction's field flags.
Instruction = decode_field_flags(Instruction0, 8),
pass1_process_instructions([Instruction | Rest], State, Result);
pass1_process_instructions(
[{bs_create_bin,
[{{f, _} = Fail, {u, Heap}, {u, Live}, {u, Unit}, Dst, _, _N, List}]}
| Rest],
State,
Result) when is_list(List) ->
%% `beam_disasm' decoded the instruction's arguments as a tuple inside a
%% list. They should be part of the instruction's tuple. Also, various
%% arguments are not wrapped/unwrapped correctly.
List1 = fix_create_bin_list(List, State),
Instruction = {bs_create_bin, Fail, Heap, Live, Unit, Dst, {list, List1}},
pass1_process_instructions([Instruction | Rest], State, Result);
pass1_process_instructions(
[{bs_private_append, _, _, _, _, {field_flags, FF}, _} = Instruction0 | Rest],
State,
Result)
when is_integer(FF) ->
%% `beam_disasm' did not decode this instruction's field flags.
Instruction = decode_field_flags(Instruction0, 6),
pass1_process_instructions([Instruction | Rest], State, Result);
pass1_process_instructions(
[{BsInit, _, _, _, _, {field_flags, FF}, _} = Instruction0 | Rest],
State,
Result)
when (BsInit =:= bs_init2 orelse BsInit =:= bs_init_bits) andalso
is_integer(FF) ->
%% `beam_disasm' did not decode this instruction's field flags.
Instruction = decode_field_flags(Instruction0, 6),
pass1_process_instructions([Instruction | Rest], State, Result);
pass1_process_instructions(
[{BsPutSomething, _, _, _, {field_flags, FF}, _} = Instruction0 | Rest],
State,
Result)
when (BsPutSomething =:= bs_put_binary orelse
BsPutSomething =:= bs_put_integer) andalso
is_integer(FF) ->
%% `beam_disasm' did not decode this instruction's field flags.
Instruction = decode_field_flags(Instruction0, 5),
pass1_process_instructions([Instruction | Rest], State, Result);
pass1_process_instructions(
[{bs_start_match3, Fail, Bin, {u, Live}, Dst} | Rest],
State,
Result) ->
%% `beam_disasm' did not decode this instruction correctly. We need to
%% patch it to:
%% 1. add `test' as the first element in the tuple,
%% 2. swap `Bin' and `Live',
%% 3. put `Bin' in a list and
%% 4. store `Live' as an integer.
Instruction = {test, bs_start_match3, Fail, Live, [Bin], Dst},
pass1_process_instructions([Instruction | Rest], State, Result);
pass1_process_instructions(
[{bs_start_match4, Fail, {u, Live}, Src, Dst} | Rest],
State,
Result) ->
%% `beam_disasm' did not decode this instruction correctly. We need to
%% patch it to store `Live' as an integer.
Instruction = {bs_start_match4, Fail, Live, Src, Dst},
pass1_process_instructions([Instruction | Rest], State, Result);
pass1_process_instructions(
[{call_fun2,
{atom, safe},
Arity,
{tr, FunReg, {{t_fun, _Arity, _Domain, _Range} = Type, _, _}}} | Rest],
State,
Result) ->
%% `beam_disasm' did not decode this instruction correctly. The
%% type in the type-tagged record is wrapped with extra information
%% we discard.
Instruction = {call_fun2, {atom, safe}, Arity, {tr, FunReg, Type}},
pass1_process_instructions([Instruction | Rest], State, Result);
pass1_process_instructions(
[{test, BsGetSomething,
Fail, [Ctx, Live, Size, Unit, {field_flags, FF} = FieldFlags0, Dst]}
| Rest],
State,
Result)
when (BsGetSomething =:= bs_get_integer2 orelse
BsGetSomething =:= bs_get_binary2) andalso
is_integer(FF) ->
%% `beam_disasm' did not decode this instruction correctly. We need to
%% patch it to move `Live' before the list. We also need to decode field
%% flags.
FieldFlags = decode_field_flags(FieldFlags0),
Instruction = {test, BsGetSomething,
Fail, Live, [Ctx, Size, Unit, FieldFlags], Dst},
pass1_process_instructions([Instruction | Rest], State, Result);
pass1_process_instructions(
[{BsGetSomething, Ctx, Dst, {u, Live}} | Rest],
State,
Result)
when BsGetSomething =:= bs_get_position orelse
BsGetSomething =:= bs_get_tail ->
%% `beam_disasm' did not decode this instruction correctly. We need to
%% patch it to store `Live' as an integer.
Instruction = {BsGetSomething, Ctx, Dst, Live},
pass1_process_instructions([Instruction | Rest], State, Result);
pass1_process_instructions(
[{test, bs_match_string, Fail, [Ctx, Stride, String]} | Rest],
State,
Result) when is_binary(String) ->
%% `beam_disasm' did not decode this instruction correctly. We need to
%% patch it to put `String' inside a tuple.
Instruction = {test, bs_match_string,
Fail, [Ctx, Stride, {string, String}]},
pass1_process_instructions([Instruction | Rest], State, Result);
pass1_process_instructions(
[{raise, Fail, Args, Dst} | Rest],
State,
Result) ->
%% `beam_disasm` did not decode this instruction correctly. `raise'
%% should be translated into a `bif'.
Instruction = {bif, raise, Fail, Args, Dst},
pass1_process_instructions([Instruction | Rest], State, Result);
%% Second group.
pass1_process_instructions(
[{Call, Arity, {Module, Name, Arity}} = Instruction | Rest],
State,
Result)
when Call =:= call orelse Call =:= call_only ->
State1 = ensure_instruction_is_permitted(Instruction, State),
State2 = pass1_process_call(Module, Name, Arity, State1),
pass1_process_instructions(Rest, State2, [Instruction | Result]);
pass1_process_instructions(
[{Call, Arity, {extfunc, Module, Name, Arity}} = Instruction | Rest],
State,
Result)
when Call =:= call_ext orelse Call =:= call_ext_only ->
State1 = ensure_instruction_is_permitted(Instruction, State),
State2 = pass1_process_call(Module, Name, Arity, State1),
pass1_process_instructions(Rest, State2, [Instruction | Result]);
pass1_process_instructions(
[{call_last, Arity, {Module, Name, Arity}, _} = Instruction
| Rest],
State,
Result) ->
State1 = ensure_instruction_is_permitted(Instruction, State),
State2 = pass1_process_call(Module, Name, Arity, State1),
pass1_process_instructions(Rest, State2, [Instruction | Result]);
pass1_process_instructions(
[{call_ext_last, Arity, {extfunc, Module, Name, Arity}, _} = Instruction
| Rest],
State,
Result) ->
State1 = ensure_instruction_is_permitted(Instruction, State),
State2 = pass1_process_call(Module, Name, Arity, State1),
pass1_process_instructions(Rest, State2, [Instruction | Result]);
pass1_process_instructions(
[{label, OldLabel} | Rest],
#state{mfa_in_progress = {Module, _, _},
next_label = NewLabel,
label_map = LabelMap} = State,
Result) ->
Instruction = {label, NewLabel},
LabelKey = {Module, OldLabel},
?assertNot(maps:is_key(LabelKey, LabelMap)),
LabelMap1 = LabelMap#{LabelKey => NewLabel},
State1 = State#state{next_label = NewLabel + 1,
label_map = LabelMap1},
%% `beam_disasm' seems to put `line' before `label', but the compiler is
%% not pleased with that. Let's make sure the `label' appears first in the
%% final assembly form.
Result1 = case Result of
[{line, _} = Line | R] -> [Line, Instruction | R];
_ -> [Instruction | Result]
end,
pass1_process_instructions(Rest, State1, Result1);
pass1_process_instructions(
[{line, Index} | Rest],
#state{lines_in_progress = Lines} = State,
Result) ->
case Lines of
#lines{items = Items, names = Names} ->
%% We could decode the "Line" beam chunk which contains the mapping
%% between `Index' in the instruction decoded by `beam_disasm' and
%% the actual location (filename + line number). Therefore we can
%% generate the correct `line' instruction.
#line{name_index = NameIndex,
location = Location} = lists:nth(Index + 1, Items),
Name = lists:nth(NameIndex + 1, Names),
Line = {line, [{location, Name, Location}]},
pass1_process_instructions(Rest, State, [Line | Result]);
undefined ->
%% Drop this instruction as we don't have the "Line" beam chunk to
%% decode it.
pass1_process_instructions(Rest, State, Result)
end;
pass1_process_instructions(
[{make_fun2, {Module, Name, Arity}, _, _, _} = Instruction | Rest],
State,
Result) ->
State1 = ensure_instruction_is_permitted(Instruction, State),
State2 = pass1_process_call(Module, Name, Arity, State1),
pass1_process_instructions(Rest, State2, [Instruction | Result]);
pass1_process_instructions(
[{make_fun3, {Module, Name, Arity}, _, _, _, _} = Instruction | Rest],
State,
Result) ->
State1 = ensure_instruction_is_permitted(Instruction, State),
State2 = pass1_process_call(Module, Name, Arity, State1),
pass1_process_instructions(Rest, State2, [Instruction | Result]);
pass1_process_instructions(
[{move, {literal, Fun}, Reg} = Instruction | Rest],
State,
Result) when is_function(Fun) ->
State1 = ensure_instruction_is_permitted(Instruction, State),
%% This `move' instruction references a lambda: we must extract it like
%% lambas present in the environment. We keep track of all extracted
%% literal funs in the `literal_funs' field of the state record. This is
%% used to create the final `#standalone_fun{}' at the end.
%%
%% Note that `literal_funs` can contain both `#standalone_fun{}' records
%% and lambdas. The latter is possible if the function doesn't need
%% extraction.
#state{literal_funs = LiteralFuns} = State1,
State3 = case LiteralFuns of
#{Fun := _} ->
State1;
_ ->
{StandaloneFun, State2} = to_embedded_standalone_fun(
Fun, State1),
LiteralFuns1 = LiteralFuns#{Fun => StandaloneFun},
State2#state{literal_funs = LiteralFuns1}
end,
%% We can now get the result of the extraction and recreate the `move'
%% instruction.
#state{literal_funs = #{Fun := StandaloneFun1}} = State3,
Fun1 = case StandaloneFun1 of
#standalone_fun{module = Module, arity = Arity} ->
%% The lambda was extracted. Here we simply construct the
%% reference to the entrypoint function in the generated
%% module. It doesn't matter that the module is not loaded.
fun Module:?SF_ENTRYPOINT/Arity;
_ when is_function(StandaloneFun1) ->
%% The function didn't require any extraction.
?assertEqual(Fun, StandaloneFun1),
Fun
end,
Instruction1 = {move, {literal, Fun1}, Reg},
pass1_process_instructions(Rest, State3, [Instruction1 | Result]);
%% Third group.
pass1_process_instructions(
[{get_tuple_element, Src, Element, _Dest} = Instruction | Rest],
State,
Result) ->
State1 = ensure_instruction_is_permitted(Instruction, State),
Src1 = fix_type_tagged_beam_register(Src),
Instruction1 = setelement(2, Instruction, Src1),
Reg = get_reg_from_type_tagged_beam_register(Src1),
Type = {t_tuple, Element + 1, false, #{}},
VarInfo = {var_info, Reg, [{type, Type}]},
Comment = {'%', VarInfo},
pass1_process_instructions(Rest, State1, [Instruction1, Comment | Result]);
pass1_process_instructions(
[{select_tuple_arity, Src, _, _} = Instruction | Rest],
State,
Result) ->
State1 = ensure_instruction_is_permitted(Instruction, State),
Src1 = fix_type_tagged_beam_register(Src),
Instruction1 = setelement(2, Instruction, Src1),
Reg = get_reg_from_type_tagged_beam_register(Src1),
Type = {t_tuple, 0, false, #{}},
VarInfo = {var_info, Reg, [{type, Type}]},
Comment = {'%', VarInfo},
pass1_process_instructions(Rest, State1, [Instruction1, Comment | Result]);
pass1_process_instructions(
[{get_map_elements, _Fail, Src, {list, _}} = Instruction | Rest],
State,
Result) ->
State1 = ensure_instruction_is_permitted(Instruction, State),
Src1 = fix_type_tagged_beam_register(Src),
Instruction1 = setelement(3, Instruction, Src1),
Reg = get_reg_from_type_tagged_beam_register(Src1),
Type = {t_map, any, any},
VarInfo = {var_info, Reg, [{type, Type}]},
Comment = {'%', VarInfo},
pass1_process_instructions(Rest, State1, [Instruction1, Comment | Result]);
pass1_process_instructions(
[{put_map_assoc, _Fail, Src, _Dst, _Live, {list, _}} = Instruction | Rest],
State,
Result) ->
State1 = ensure_instruction_is_permitted(Instruction, State),
Type = {t_map, any, any},
VarInfo = {var_info, Src, [{type, Type}]},
Comment = {'%', VarInfo},
pass1_process_instructions(Rest, State1, [Instruction, Comment | Result]);
pass1_process_instructions(
[{bs_start_match4, _Fail, _, Var, Var} = Instruction | Rest],
State,
Result) ->
State1 = ensure_instruction_is_permitted(Instruction, State),
VarInfo = {var_info, Var, [accepts_match_context]},
Comment = {'%', VarInfo},
pass1_process_instructions(Rest, State1, [Instruction, Comment | Result]);
pass1_process_instructions(
[{test, bs_start_match3, _Fail, _, [Var], _Dst} = Instruction | Rest],
State,
Result) ->
State1 = ensure_instruction_is_permitted(Instruction, State),
VarInfo = {var_info, Var, [accepts_match_context]},
Comment = {'%', VarInfo},
pass1_process_instructions(Rest, State1, [Instruction, Comment | Result]);
pass1_process_instructions(
[{test, test_arity, _Fail, [Var, Arity]} = Instruction | Rest],
State,
Result) ->
State1 = ensure_instruction_is_permitted(Instruction, State),
Type = {t_tuple, Arity, false, #{}},
VarInfo = {var_info, Var, [{type, Type}]},
Comment = {'%', VarInfo},
pass1_process_instructions(Rest, State1, [Instruction, Comment | Result]);
pass1_process_instructions(
[Instruction | Rest],
State,
Result) ->
State1 = ensure_instruction_is_permitted(Instruction, State),
pass1_process_instructions(Rest, State1, [Instruction | Result]);
pass1_process_instructions(
[],
State,
Result) ->
{lists:reverse(Result), State}.
-spec pass1_process_call(Module, Name, Arity, State) -> State when
Module :: module(),
Name :: atom(),
Arity :: arity(),
State :: #state{}.
pass1_process_call(
Module, Name, Arity,
#state{mfa_in_progress = {Module, Name, Arity}} = State) ->
State;
pass1_process_call(
Module, Name, Arity,
#state{mfa_in_progress = {FromModule, _, _},
functions = Functions,
calls = Calls,
all_calls = AllCalls} = State) ->
CallKey = {Module, Name, Arity},
AllCalls1 = AllCalls#{CallKey => true},
case should_process_function(Module, Name, Arity, FromModule, State) of
{true, State1} ->
case Functions of
#{CallKey := _} ->
State1;
_ ->
Calls1 = Calls#{CallKey => true},
State1#state{calls = Calls1,
all_calls = AllCalls1}
end;
{false, State1} ->
State1#state{all_calls = AllCalls1}
end.
-spec lookup_function(Module, Name, Arity, State) -> {Function, State} when
Module :: module(),
Name :: atom(),
Arity :: non_neg_integer() | undefined,
State :: #state{},
Function :: #function{}.
%% Looks up the function from the given module with the given arity (if
%% defined), disassembling the module if necessary
lookup_function(
erl_eval = Module, Name, _Arity,
#state{fun_info = #{module := Module,
name := Name,
arity := Arity,
env := Env}} = State) ->
%% There is a special case for `erl_eval' local functions: they are
%% lambdas dynamically parsed, compiled and loaded by `erl_eval' and
%% appear as local functions inside `erl_eval' directly.
%%
%% However `erl_eval' module doesn't contain the assembly for those
%% functions. Instead, the abstract form of the source code is available
%% in the lambda's env.
%%
%% There here, we compile the abstract form and extract the assembly from
%% the compiled beam. This allows to use the rest of `khepri_fun'
%% unmodified.
%%
%% FIXME: Can we compile to assembly form using 'S' instead?
#beam_file_ext{code = Functions} = erl_eval_fun_to_asm(
Module, Name, Arity, Env),
{find_function(Functions, Name, Arity), State};
lookup_function(Module, Name, Arity, State) ->
{#beam_file_ext{code = Functions}, State1} = disassemble_module(Module, State),
{find_function(Functions, Name, Arity), State1}.
-spec find_function([Function], Name, Arity) -> Function when
Function :: #function{},
Name :: atom(),
Arity :: non_neg_integer() | undefined.
%% Finds a function in a list of functions by its name and arity
find_function(
[#function{name = Name, arity = Arity} = Function | _],
Name, Arity) when is_integer(Arity) ->
Function;
find_function(
[#function{name = Name} = Function | _],
Name, undefined) ->
Function;
find_function(
[_ | Rest],
Name, Arity) ->
find_function(Rest, Name, Arity).
-spec erl_eval_fun_to_asm(Module, Name, Arity, Env) -> BeamFileRecord when
Module :: module(),
Name :: atom(),
Arity :: arity(),
Env :: any(),
BeamFileRecord :: #beam_file_ext{}.
%% @private
erl_eval_fun_to_asm(Module, Name, Arity, [{_, Bindings, _, _, _, Clauses}])
when Bindings =:= [] orelse %% Erlang is using a list for bindings,
Bindings =:= #{} -> %% but Elixir is using a map.
%% Erlang starting from 25.
erl_eval_fun_to_asm1(Module, Name, Arity, Clauses);
erl_eval_fun_to_asm(Module, Name, Arity, [{Bindings, _, _, Clauses}])
when Bindings =:= [] orelse %% Erlang is using a list for bindings,
Bindings =:= #{} -> %% but Elixir is using a map.
%% Erlang up to 24.
erl_eval_fun_to_asm1(Module, Name, Arity, Clauses).
erl_eval_fun_to_asm1(Module, Name, Arity, Clauses) ->
%% We construct an abstract form based on the `env' of the lambda loaded
%% by `erl_eval'.
Anno = erl_anno:from_term(1),
Forms = [{attribute, Anno, module, Module},
{attribute, Anno, export, [{Name, Arity}]},
{function, Anno, Name, Arity, Clauses}],
%% The abstract form is now compiled to binary code. Then, the assembly
%% code is extracted from the compiled beam.
CompilerOptions = [from_abstr,
binary,
return_errors,
return_warnings,
deterministic],
case compile:forms(Forms, CompilerOptions) of
{ok, Module, Beam, _Warnings} ->
%% We can ignore warnings because the lambda was already parsed
%% and compiled before by `erl_eval' previously.
do_disassemble(Beam);
Error ->
throw({erl_eval_fun_compilation_failure, Error})
end.
-spec disassemble_module(Module, State) -> {BeamFileRecord, State} when
Module :: module(),
State :: #state{},
BeamFileRecord :: #beam_file_ext{}.
-define(ASM_CACHE_KEY(Module, Checksum),
{?MODULE, asm_cache, Module, Checksum}).
disassemble_module(Module, #state{checksums = Checksums} = State) ->
case Checksums of
#{Module := Checksum} ->
{#beam_file_ext{lines = Lines,
strings = Strings} = BeamFileRecord,
Checksum} = disassemble_module1(Module, Checksum),
State1 = State#state{lines_in_progress = Lines,
strings_in_progress = Strings},
{BeamFileRecord, State1};
_ ->
{#beam_file_ext{lines = Lines,
strings = Strings} = BeamFileRecord,
Checksum} = disassemble_module1(Module, undefined),
?assert(is_binary(Checksum)),
Checksums1 = Checksums#{Module => Checksum},
State1 = State#state{checksums = Checksums1,
lines_in_progress = Lines,
strings_in_progress = Strings},
{BeamFileRecord, State1}
end.
disassemble_module1(Module, Checksum) when is_binary(Checksum) ->
Key = ?ASM_CACHE_KEY(Module, Checksum),
case persistent_term:get(Key, undefined) of
#beam_file_ext{} = BeamFileRecord ->
{BeamFileRecord, Checksum};
undefined ->
{Module, Beam, _} = get_object_code(Module),
{ok, {Module, ActualChecksum}} = beam_lib:md5(Beam),
case ActualChecksum of
Checksum ->
BeamFileRecord = do_disassemble_and_cache(
Module, Checksum, Beam),
{BeamFileRecord, Checksum};
_ ->
throw(
{mismatching_module_checksum,
Module, Checksum, ActualChecksum})
end
end;
disassemble_module1(Module, undefined) ->
{Module, Beam, _} = get_object_code(Module),
{ok, {Module, Checksum}} = beam_lib:md5(Beam),
BeamFileRecord = do_disassemble_and_cache(Module, Checksum, Beam),
{BeamFileRecord, Checksum}.
-ifdef(TEST).
-define(OBJECT_CODE_KEY(Module), {?MODULE, object_code, Module}).
override_object_code(Module, Beam) ->
Key = ?OBJECT_CODE_KEY(Module),
persistent_term:put(Key, Beam),
ok.
get_object_code(Module) ->
Key = ?OBJECT_CODE_KEY(Module),
case persistent_term:get(Key, undefined) of
undefined -> do_get_object_code(Module);
Beam -> {Module, Beam, ""}
end.
-else.
get_object_code(Module) ->
do_get_object_code(Module).
-endif.
do_get_object_code(Module) ->
case code:get_object_code(Module) of
{Module, Beam, Filename} -> {Module, Beam, Filename};
error -> throw({module_not_found, Module})
end.
do_disassemble_and_cache(Module, Checksum, Beam) ->
Key = ?ASM_CACHE_KEY(Module, Checksum),
BeamFileRecordExt = do_disassemble(Beam),
persistent_term:put(Key, BeamFileRecordExt),
BeamFileRecordExt.
do_disassemble(Beam) ->
BeamFileRecord = beam_disasm:file(Beam),
#beam_file{
module = Module,
labeled_exports = LabeledExports,
attributes = Attributes,
compile_info = CompileInfo,
code = Code} = BeamFileRecord,
Lines = get_and_decode_line_chunk(Module, Beam),
Strings = get_and_decode_string_chunk(Module, Beam),
BeamFileRecordExt = #beam_file_ext{
module = Module,
labeled_exports = LabeledExports,
attributes = Attributes,
compile_info = CompileInfo,
code = Code,
lines = Lines,
strings = Strings},
BeamFileRecordExt.
%% The "Line" beam chunk decoding is based on the equivalent C code in ERTS.
%% See: erts/emulator/beam/beam_file.c, parse_line_chunk().
%%
%% The opposite encoding function is inside the compiler.
%% See: compiler/src/beam_asm.erl, build_line_table().
-define(CHAR_BIT, 8).
-define(sizeof_Sint16, 2).
-define(sizeof_Sint32, 4).
-define(sizeof_SWord, 4).
%% See erts/emulator/beam/big.h
%% Here, we assume a 64bit architecture with 4-byte integers.
-define(_IS_SSMALL32(X), true).
-define(IS_SSMALL(X), ?_IS_SSMALL32(X)).
get_and_decode_line_chunk(Module, Beam) ->
case beam_lib:chunks(Beam, ["Line"]) of
{ok, {Module, [{"Line", Chunk}]}} ->
decode_line_chunk(Module, Chunk);
_ ->
undefined
end.
decode_line_chunk(Module, Chunk) ->
decode_line_chunk_version(Module, Chunk).
decode_line_chunk_version(
Module,
<<Version:32/integer,
Rest/binary>>)
when Version =:= 0 ->
%% The original C code makes an assertion that the version is 0, thus the
%% guard expression above.
decode_line_chunk_counts_and_flags(Module, Rest).
decode_line_chunk_counts_and_flags(
Module,
<<_Flags:32/integer,
_InstrCount:32/integer,
ItemCount:32/integer,
NameCount:32/integer,
Rest/binary>>) ->
UndefinedLocation = #line{name_index = 0,
location = 0},
ModuleFilename = atom_to_list(Module) ++ ".erl",
NameCount1 = NameCount + 1,
ItemCount1 = ItemCount + 1,
LocationSize = if
NameCount1 > 1 -> ?sizeof_Sint32;
true -> ?sizeof_Sint16
end,
Lines = #lines{item_count = ItemCount1,
items = [UndefinedLocation],
name_count = NameCount1,
names = [ModuleFilename],
location_size = LocationSize},
NameIndex = 0,
I = 1,
decode_line_chunk_items(Rest, I, NameIndex, Lines).
decode_line_chunk_items(
Rest, I, NameIndex,
#lines{item_count = ItemCount,
items = Items,
location_size = LocationSize} = Lines)
when I < ItemCount ->
{Tag, Rest1} = read_tagged(Rest),
case Tag of
#tag{tag = tag_a, word_value = WordValue} ->
NameIndex1 = WordValue,
decode_line_chunk_items(Rest1, I, NameIndex1, Lines);
#tag{tag = tag_i, size = 0, word_value = WordValue}
when WordValue >= 0 ->
LocationSize1 = if
WordValue > 16#FFFF -> ?sizeof_Sint32;
true -> LocationSize
end,
Item = #line{name_index = NameIndex,
location = WordValue},
Lines1 = Lines#lines{items = Items ++ [Item],
location_size = LocationSize1},
decode_line_chunk_items(Rest1, I + 1, NameIndex, Lines1)
end;
decode_line_chunk_items(
Rest, I, _NameIndex,
#lines{item_count = ItemCount} = Lines) when I =:= ItemCount ->
decode_line_chunk_names(Rest, 1, Lines).
decode_line_chunk_names(
<<NameLength:16/integer, Name:NameLength/binary, Rest/binary>>,
I,
#lines{name_count = NameCount,
names = Names} = Lines)
when I < NameCount ->
Name1 = unicode:characters_to_list(Name),
Names1 = Names ++ [Name1],
Lines1 = Lines#lines{names = Names1},
decode_line_chunk_names(Rest, I + 1, Lines1);
decode_line_chunk_names(<<>>, I, #lines{name_count = NameCount} = Lines)
when I =:= NameCount ->
Lines.
get_and_decode_string_chunk(Module, Beam) ->
case beam_lib:chunks(Beam, ["StrT"]) of
{ok, {Module, [{"StrT", Chunk}]}} ->
%% There is nothing to decode: the chunk is made of concatenated
%% binaries. The instruction knows the offset inside the chunk and
%% the length of the binary to extract.
Chunk;
_ ->
undefined
end.
%% See: erts/emulator/beam/beam_file.c, beamreader_read_tagged().
read_tagged(
<<LenCode:8/unsigned-integer,
Rest/binary>>) ->
Tag = decode_tag(LenCode band 16#07),
if
LenCode band 16#08 =:= 0 ->
WordValue = LenCode bsr 4,
Size = 0,
{#tag{tag = Tag,
word_value = WordValue,
size = Size},
Rest};
LenCode band 16#10 =:= 0 ->
<<ExtraByte:8/unsigned-integer, Rest1/binary>> = Rest,
WordValue = ((LenCode bsr 5) bsl 8) bor ExtraByte,
Size = 0,
{#tag{tag = Tag,
word_value = WordValue,
size = Size},
Rest1};
true ->
LenCode1 = LenCode bsr 5,
{Count, Rest1} = if
LenCode1 < 7 ->
SizeBase = 2,
{LenCode1 + SizeBase, Rest};
true ->
SizeBase = 9,
{#tag{tag = tag_u,
word_value = UnpackedSize},
R1} = read_tagged(Rest),
{UnpackedSize + SizeBase, R1}
end,
<<Data:Count/binary, Rest2/binary>> = Rest1,
case unpack_varint(Count, Data) of
WordValue when is_integer(WordValue) andalso Tag =:= tag_i ->
Shift = ?CHAR_BIT * (?sizeof_SWord - Count),
SignExtendedValue = (WordValue bsl Shift) bsr Shift,
if
%% This first clause is true at compile-time.
?IS_SSMALL(SignExtendedValue) ->
{#tag{tag = Tag,
word_value = SignExtendedValue,
size = 0},
Rest2}
end;
WordValue when is_integer(WordValue) andalso WordValue >= 0 ->
{#tag{tag = Tag,
word_value = WordValue,
size = 0},
Rest2};
false ->
{#tag{tag = tag_o,
ptr_value = Data,
size = Count},
Rest2}
end
end.
decode_tag(0) -> tag_u;
decode_tag(1) -> tag_i;
decode_tag(2) -> tag_a;
decode_tag(3) -> tag_x;
decode_tag(4) -> tag_y;
decode_tag(5) -> tag_f;
decode_tag(6) -> tag_h;
decode_tag(7) -> tag_z.
unpack_varint(Size, Data) ->
if
Size =< ?sizeof_SWord -> do_unpack_varint(0, Size, Data, 0);
true -> false
end.
do_unpack_varint(I, Size, <<Byte:8/unsigned-integer, Rest/binary>>, Res)
when I < Size ->
Res1 = (Byte bsl (Size - I - 1) * ?CHAR_BIT) bor Res,
do_unpack_varint(I + 1, Size, Rest, Res1);
do_unpack_varint(I, I = _Size, <<>> = _Rest, Res) ->
Res.
%% The field flags, which correspond to `Var/signed', `Var/unsigned',
%% `Var/little', `Var/big' and `Var/native' in the bitstring syntax, need to
%% be decoded here. It's the opposite to:
%% https://github.com/erlang/otp/blob/OTP-24.2/lib/compiler/src/beam_asm.erl#L486-L493
%%
%% The field flags bit field becomes a sublist of [signed, little, native].
decode_field_flags(Instruction, Pos) when is_tuple(Instruction) ->
FieldFlags0 = element(Pos, Instruction),
FieldFlags1 = decode_field_flags(FieldFlags0),
setelement(Pos, Instruction, FieldFlags1).
-spec decode_field_flags(FieldFlagsBitFieldsTuple | FieldFlagsBitField) ->
FieldFlagsTuple | FieldFlags when
FieldFlagsBitFieldsTuple :: {field_flags, FieldFlagsBitField},
FieldFlagsBitField :: non_neg_integer(),
FieldFlagsTuple :: {field_flags, FieldFlags},
FieldFlags :: [FieldFlag],
FieldFlag :: little | signed | native.
decode_field_flags(0) ->
[];
decode_field_flags(FieldFlags) when is_integer(FieldFlags) ->
lists:filtermap(
fun
(little) -> (FieldFlags band 16#02) == 16#02;
(signed) -> (FieldFlags band 16#04) == 16#04;
(native) -> (FieldFlags band 16#10) == 16#10
end, [signed, little, native]);
decode_field_flags({field_flags, FieldFlagsBitField}) ->
FieldFlags = decode_field_flags(FieldFlagsBitField),
{field_flags, FieldFlags}.
fix_create_bin_list(
[{atom, string} = Type, Seg, Unit, Flags, {u, Offset} = _Val, Size
| Args],
#state{strings_in_progress = Strings} = State) ->
Seg1 = fix_integer(Seg),
Unit1 = fix_integer(Unit),
Size1 = {integer, Length} = fix_integer(Size),
?assertNotEqual(undefined, Strings),
Binary = binary:part(Strings, {Offset, Length}),
Val = {string, Binary},
[Type, Seg1, Unit1, Flags, Val, Size1 | fix_create_bin_list(Args, State)];
fix_create_bin_list(
[Type, Seg, Unit, Flags, Val, Size
| Args],
State) ->
Seg1 = fix_integer(Seg),
Unit1 = fix_integer(Unit),
Val1 = fix_integer(Val),
Size1 = fix_integer(Size),
[Type, Seg1, Unit1, Flags, Val1, Size1 | fix_create_bin_list(Args, State)];
fix_create_bin_list([], _State) ->
[].
fix_integer({u, U}) -> U;
fix_integer({i, I}) -> {integer, I};
fix_integer(Other) -> Other.
fix_type_tagged_beam_register({tr, Reg, {Type, _, _}}) -> {tr, Reg, Type};
fix_type_tagged_beam_register(Other) -> Other.
get_reg_from_type_tagged_beam_register({tr, Reg, _}) -> Reg;
get_reg_from_type_tagged_beam_register(Reg) -> Reg.
-spec ensure_instruction_is_permitted(Instruction, State) ->
State when
Instruction :: beam_instr(),
State :: #state{}.
ensure_instruction_is_permitted(
Instruction,
#state{options = #{ensure_instruction_is_permitted := Callback},
errors = Errors} = State)
when is_function(Callback) ->
try
Callback(Instruction),
State
catch
throw:Error ->
Errors1 = Errors ++ [Error],
State#state{errors = Errors1}
end;
ensure_instruction_is_permitted(_Instruction, State) ->
State.
-spec should_process_function(Module, Name, Arity, FromModule, State) ->
{ShouldProcess, State} when
Module :: module(),
Name :: atom(),
Arity :: arity(),
FromModule :: module(),
State :: #state{},
ShouldProcess :: boolean().
should_process_function(
erl_eval, Name, Arity, _FromModule,
#state{fun_info = #{module := erl_eval,
name := Name,
arity := Arity,
type := local}} = State) ->
%% We want to process lambas loaded by `erl_eval'
%% even though we wouldn't do that with the
%% regular `erl_eval' API.
{true, State};
should_process_function(
Module, Name, Arity, FromModule,
#state{options = #{should_process_function := Callback},
errors = Errors} = State)
when is_function(Callback) ->
try
ShouldProcess = Callback(Module, Name, Arity, FromModule),
{ShouldProcess, State}
catch
throw:Error ->
Errors1 = Errors ++ [Error],
State1 = State#state{errors = Errors1},
{false, State1}
end;
should_process_function(Module, Name, Arity, _FromModule, State) ->
{default_should_process_function(Module, Name, Arity),
State}.
default_should_process_function(erlang, _Name, _Arity) -> false;
default_should_process_function(_Module, _Name, _Arity) -> true.
-spec is_standalone_fun_still_needed(State) -> IsNeeded when
State :: #state{},
IsNeeded :: boolean().
is_standalone_fun_still_needed(
#state{options = #{is_standalone_fun_still_needed := Callback},
all_calls = Calls,
errors = Errors})
when is_function(Callback) ->
Callback(#{calls => Calls,
errors => Errors});
is_standalone_fun_still_needed(_State) ->
true.
-spec process_errors(State) -> ok | no_return() when
State :: #state{}.
%% TODO: Return all errors?
process_errors(#state{errors = []}) -> ok;
process_errors(#state{errors = [Error | _]}) -> throw(Error).
%% -------------------------------------------------------------------
%% Code processing [Pass 2]
%% -------------------------------------------------------------------
-spec pass2(State) -> Asm when
State :: #state{},
Asm :: asm().
pass2(
#state{functions = Functions,
next_label = NextLabel} = State) ->
%% The module name is based on a hash of its entire code.
GeneratedModuleName = gen_module_name(State),
State1 = State#state{generated_module_name = GeneratedModuleName},
Functions1 = pass2_process_functions(Functions, State1),
%% Sort functions by their entrypoint label.
Functions2 = lists:sort(
fun(#function{entry = EntryA},
#function{entry = EntryB}) ->
EntryA < EntryB
end, maps:values(Functions1)),
%% The first function (the lambda) is the only one exported.
[#function{name = Name, arity = Arity} | _] = Functions2,
Exports = [{Name, Arity}],
Attributes = [],
Labels = NextLabel,
{GeneratedModuleName,
Exports,
Attributes,
Functions2,
Labels}.
-spec pass2_process_functions(Functions, State) -> Functions when
Functions :: #{mfa() => #function{}},
State :: #state{}.
pass2_process_functions(Functions, State) ->
maps:map(
fun(MFA, Function) ->
pass2_process_function(MFA, Function, State)
end, Functions).
-spec pass2_process_function(MFA, Function, State) -> Function when
MFA :: mfa(),
Function :: #function{},
State :: #state{}.
pass2_process_function(
{Module, Name, Arity},
#function{name = Name,
code = Instructions} = Function,
State) ->
Name1 = gen_function_name(Module, Name, Arity, State),
Instructions1 = lists:map(
fun(Instruction) ->
S1 = State#state{mfa_in_progress = {Module,
Name,
Arity},
function_in_progress = Name1},
pass2_process_instruction(Instruction, S1)
end, Instructions),
Function#function{name = Name1,
code = Instructions1}.
-spec pass2_process_instruction(Instruction, State) -> Instruction when
Instruction :: beam_instr(),
State :: #state{}.
pass2_process_instruction(
{Call, Arity, {_, _, _} = MFA} = Instruction,
#state{functions = Functions})
when Call =:= call orelse Call =:= call_only ->
case Functions of
#{MFA := #function{entry = EntryLabel}} ->
{Call, Arity, {f, EntryLabel}};
_ ->
Instruction
end;
pass2_process_instruction(
{Call, Arity, {extfunc, Module, Name, Arity}} = Instruction,
#state{functions = Functions})
when Call =:= call_ext orelse Call =:= call_ext_only ->
MFA = {Module, Name, Arity},
case Functions of
#{MFA := #function{entry = EntryLabel}} ->
Call1 = case Call of
call_ext -> call;
call_ext_only -> call_only
end,
{Call1, Arity, {f, EntryLabel}};
_ ->
Instruction
end;
pass2_process_instruction(
{bif, _, _, _, _} = Instruction, State) ->
replace_label(Instruction, 3, State);
pass2_process_instruction(
{bs_add, _, _, _} = Instruction, State) ->
replace_label(Instruction, 2, State);
pass2_process_instruction(
{bs_append, _, _, _, _, _, _, _, _} = Instruction, State) ->
replace_label(Instruction, 2, State);
pass2_process_instruction(
{bs_create_bin, _, _, _, _, _, _} = Instruction, State) ->
replace_label(Instruction, 2, State);
pass2_process_instruction(
{bs_init2, _, _, _, _, _, _} = Instruction, State) ->
replace_label(Instruction, 2, State);
pass2_process_instruction(
{bs_private_append, _, _, _, _, _, _} = Instruction, State) ->
replace_label(Instruction, 2, State);
pass2_process_instruction(
{BsPutSomething, _, _, _, _, _} = Instruction, State)
when BsPutSomething =:= bs_put_binary orelse
BsPutSomething =:= bs_put_integer ->
replace_label(Instruction, 2, State);
pass2_process_instruction(
{call_last, Arity, {Module, Name, Arity}, Opaque} = Instruction,
#state{functions = Functions}) ->
MFA = {Module, Name, Arity},
case Functions of
#{MFA := #function{entry = EntryLabel}} ->
{call_last, Arity, {f, EntryLabel}, Opaque};
_ ->
Instruction
end;
pass2_process_instruction(
{call_ext_last, Arity, {extfunc, Module, Name, Arity}, Opaque} = Instruction,
#state{functions = Functions}) ->
MFA = {Module, Name, Arity},
case Functions of
#{MFA := #function{entry = EntryLabel}} ->
{call_last, Arity, {f, EntryLabel}, Opaque};
_ ->
Instruction
end;
pass2_process_instruction(
{'catch', _, _} = Instruction, State) ->
replace_label(Instruction, 3, State);
pass2_process_instruction(
{func_info, _ModRepr, _NameRepr, Arity},
#state{generated_module_name = GeneratedModuleName,
function_in_progress = Name}) ->
ModRepr = {atom, GeneratedModuleName},
NameRepr = {atom, Name},
{func_info, ModRepr, NameRepr, Arity};
pass2_process_instruction(
{get_map_elements, _, _, _} = Instruction, State) ->
replace_label(Instruction, 2, State);
pass2_process_instruction(
{jump, _} = Instruction, State) ->
replace_label(Instruction, 2, State);
pass2_process_instruction(
{loop_rec, _, _} = Instruction, State) ->
replace_label(Instruction, 2, State);
pass2_process_instruction(
{Select, _, _, {list, Cases}} = Instruction,
#state{mfa_in_progress = {Module, _, _},
label_map = LabelMap} = State)
when Select =:= select_val orelse Select =:= select_tuple_arity ->
Cases1 = [case Case of
{f, OldLabel} ->
NewLabel = maps:get({Module, OldLabel}, LabelMap),
{f, NewLabel};
_ ->
Case
end || Case <- Cases],
Instruction1 = replace_label(Instruction, 3, State),
setelement(4, Instruction1, {list, Cases1});
pass2_process_instruction(
{test, _, _, _} = Instruction, State) ->
replace_label(Instruction, 3, State);
pass2_process_instruction(
{test, _, _, _, _} = Instruction, State) ->
replace_label(Instruction, 3, State);
pass2_process_instruction(
{test, _, _, _, _, _} = Instruction, State) ->
replace_label(Instruction, 3, State);
pass2_process_instruction(
{make_fun2, {_, _, _} = MFA, _, _, _} = Instruction,
#state{functions = Functions}) ->
case Functions of
#{MFA := #function{entry = EntryLabel}} ->
setelement(2, Instruction, {f, EntryLabel});
_ ->
Instruction
end;
pass2_process_instruction(
{make_fun3, {_, _, _} = MFA, _, _, _, _} = Instruction,
#state{functions = Functions}) ->
case Functions of
#{MFA := #function{entry = EntryLabel}} ->
setelement(2, Instruction, {f, EntryLabel});
_ ->
Instruction
end;
pass2_process_instruction(
{'try', _, _} = Instruction, State) ->
replace_label(Instruction, 3, State);
pass2_process_instruction(
{wait_timeout, _, _} = Instruction, State) ->
replace_label(Instruction, 2, State);
pass2_process_instruction(
Instruction,
_State) ->
Instruction.
replace_label(
Instruction, Pos,
#state{mfa_in_progress = {Module, _, _},
label_map = LabelMap}) ->
case element(Pos, Instruction) of
{f, 0} ->
%% The `0' label is an exception label in the compiler, used to
%% trigger an exception when branching. It should remain unchanged
%% here, for more information see:
%% https://github.com/erlang/otp/blob/d955dc663a6d5dd03ab3360f9dd3dc0f439c7ef5/lib/compiler/src/beam_validator.erl#L26-L32
Instruction;
{f, OldLabel} ->
NewLabel = maps:get({Module, OldLabel}, LabelMap),
setelement(Pos, Instruction, {f, NewLabel})
end.
-spec gen_module_name(State) -> Module when
State :: #state{},
Module :: module().
gen_module_name(#state{fun_info = Info, functions = Functions}) ->
#{module := Module,
name := Name} = Info,
Checksum = erlang:phash2(Functions),
InternalName = lists:flatten(
io_lib:format(
"kfun__~s__~s__~b", [Module, Name, Checksum])),
list_to_atom(InternalName).
-spec gen_function_name(Module, Name, Arity, State) -> Name when
Module :: module(),
Name :: atom(),
Arity :: arity(),
State :: #state{}.
gen_function_name(
Module, Name, Arity,
#state{entrypoint = {Module, Name, Arity}}) ->
?SF_ENTRYPOINT;
gen_function_name(
Module, Name, _Arity,
_State) ->
InternalName = lists:flatten(
io_lib:format(
"~s__~s", [Module, Name])),
list_to_atom(InternalName).
%% -------------------------------------------------------------------
%% Environment handling.
%% -------------------------------------------------------------------
-spec to_standalone_env(State) -> {StandaloneEnv, State} when
State :: #state{},
StandaloneEnv :: list().
%% @doc Converts the fun environment to a standalone term.
%%
%% For "regular" lambdas, variables declared outside of the function body are
%% put in this `env'. We need to process them in case they reference other
%% lambdas for instance. We keep the end result to store it alongside the
%% generated module, but not inside the module to avoid an increase in the
%% number of identical modules with different environment.
%%
%% However for `erl_eval' functions created from lambdas, the env contains the
%% parsed source code of the function. We don't need to interpret it.
%%
%% TODO: `to_standalone_env()' uses `to_standalone_fun1()' to extract and
%% compile lambdas passed as arguments. It means they are fully compiled even
%% if `is_standalone_fun_still_needed()' returns false later. This is a waste
%% of resources and this function can probably be split into two parts to
%% allow the environment to be extracted before and compiled after, once we
%% are sure we need to create the final standalone fun.
to_standalone_env(#state{fun_info = #{module := Module,
type := Type,
env := Env},
options = Options} = State)
when Module =/= erl_eval orelse Type =/= local ->
State1 = State#state{options = maps:remove(
is_standalone_fun_still_needed,
Options)},
{Env1, State2} = to_standalone_arg(Env, State1),
State3 = State2#state{options = Options},
{Env1, State3};
to_standalone_env(State) ->
{[], State}.
to_standalone_arg(List, State) when is_list(List) ->
lists:foldr(
fun(Item, {L, St}) ->
{Item1, St1} = to_standalone_arg(Item, St),
{[Item1 | L], St1}
end, {[], State}, List);
to_standalone_arg(Tuple, State) when is_tuple(Tuple) ->
List0 = tuple_to_list(Tuple),
{List1, State1} = to_standalone_arg(List0, State),
Tuple1 = list_to_tuple(List1),
{Tuple1, State1};
to_standalone_arg(Map, State) when is_map(Map) ->
maps:fold(
fun(Key, Value, {M, St}) ->
{Key1, St1} = to_standalone_arg(Key, St),
{Value1, St2} = to_standalone_arg(Value, St1),
M1 = M#{Key1 => Value1},
{M1, St2}
end, {#{}, State}, Map);
to_standalone_arg(Fun, State) when is_function(Fun) ->
to_embedded_standalone_fun(Fun, State);
to_standalone_arg(Term, State) ->
{Term, State}.
to_actual_arg(#standalone_fun{arity = Arity} = StandaloneFun) ->
case Arity of
0 ->
fun() -> exec(StandaloneFun, []) end;
1 ->
fun(Arg1) -> exec(StandaloneFun, [Arg1]) end;
2 ->
fun(Arg1, Arg2) -> exec(StandaloneFun, [Arg1, Arg2]) end;
3 ->
fun(Arg1, Arg2, Arg3) ->
exec(StandaloneFun, [Arg1, Arg2, Arg3])
end;
4 ->
fun(Arg1, Arg2, Arg3, Arg4) ->
exec(StandaloneFun, [Arg1, Arg2, Arg3, Arg4])
end;
5 ->
fun(Arg1, Arg2, Arg3, Arg4, Arg5) ->
exec(StandaloneFun, [Arg1, Arg2, Arg3, Arg4, Arg5])
end;
6 ->
fun(Arg1, Arg2, Arg3, Arg4, Arg5, Arg6) ->
exec(StandaloneFun, [Arg1, Arg2, Arg3, Arg4, Arg5, Arg6])
end;
7 ->
fun(Arg1, Arg2, Arg3, Arg4, Arg5, Arg6, Arg7) ->
exec(
StandaloneFun,
[Arg1, Arg2, Arg3, Arg4, Arg5, Arg6, Arg7])
end;
8 ->
fun(Arg1, Arg2, Arg3, Arg4, Arg5, Arg6, Arg7, Arg8) ->
exec(
StandaloneFun,
[Arg1, Arg2, Arg3, Arg4, Arg5, Arg6, Arg7, Arg8])
end;
9 ->
fun(Arg1, Arg2, Arg3, Arg4, Arg5, Arg6, Arg7, Arg8, Arg9) ->
exec(
StandaloneFun,
[Arg1, Arg2, Arg3, Arg4, Arg5, Arg6, Arg7, Arg8, Arg9])
end;
10 ->
fun(Arg1, Arg2, Arg3, Arg4, Arg5, Arg6, Arg7, Arg8, Arg9, Arg10) ->
exec(
StandaloneFun,
[Arg1, Arg2, Arg3, Arg4, Arg5, Arg6, Arg7, Arg8, Arg9,
Arg10])
end
end;
to_actual_arg(List) when is_list(List) ->
lists:map(
fun(Item) ->
to_actual_arg(Item)
end, List);
to_actual_arg(Tuple) when is_tuple(Tuple) ->
List0 = tuple_to_list(Tuple),
List1 = to_actual_arg(List0),
list_to_tuple(List1);
to_actual_arg(Map) when is_map(Map) ->
maps:fold(
fun(Key, Value, Acc) ->
Key1 = to_actual_arg(Key),
Value1 = to_actual_arg(Value),
Acc#{Key1 => Value1}
end, #{}, Map);
to_actual_arg(Term) ->
Term. | src/khepri_fun.erl | 0.66236 | 0.470372 | khepri_fun.erl | starcoder |
%%% @doc Bootstrap a sample by resampling in the data structure
%%% @end
-module(eministat_resample).
-include("eministat.hrl").
-export([resample/3, bootstrap_bca/3]).
-compile(export_all).
%% @doc resample/3 is the main resampler of eministat
%% @end
resample(Estimators, Resamples, #dataset { n = N, points = Ps }) ->
ResultSets = boot(Resamples, N, list_to_tuple(Ps)),
estimate(Estimators, ResultSets).
boot(Resamples, N, Points) ->
boot(Resamples, N, Points, []).
boot(0, _, _, Acc) -> Acc;
boot(K, N, Ps, Acc) ->
Points = draw(N, N, Ps),
boot(K-1, N, Ps, [eministat_ds:from_list(K, Points) | Acc]).
draw(0, _, _) -> [];
draw(K, N, Tuple) ->
[element(rand:uniform(N), Tuple) | draw(K-1, N, Tuple)].
estimate([], _Results) -> [];
estimate([Name | Next], Results) ->
Resamples = lists:sort([estimator(Name, D) || D <- Results]),
Rs = eministat_ds:from_list(Name, Resamples),
[{Name, Rs} | estimate(Next, Results)].
%% Bias-correct accelerated bootstrap, taken from <NAME>'s Criterion
bootstrap_bca(CLevel, Sample, Bootstraps) when CLevel > 0 andalso CLevel < 1 ->
[{Est, e(CLevel, Sample, Est, Resample)} || {Est, Resample} <- Bootstraps].
estimator(mean, Ds) -> eministat_ds:mean(Ds);
estimator(variance, Ds) -> eministat_ds:variance(Ds);
estimator(std_dev, Ds) -> eministat_ds:std_dev(Ds).
e(CLevel, Sample, Est, #dataset { n = N, points = Ps } = Rs) ->
PT = estimator(Est, Sample),
Mean = eministat_ds:mean(Rs),
StdDev = eministat_ds:std_dev(Rs),
Z1 = quantile(standard(), (1 - CLevel) / 2),
CumN = fun(X) -> round(N * cumulative(standard(), X)) end,
ProbN = count(fun(X) -> X < PT end, Ps),
Bias = quantile(standard(), ProbN / N),
#dataset { points = JackPs } = Jack = jackknife(Est, Sample),
JackMean = eministat_ds:mean(Jack),
F = fun(J, {S, C}) ->
D = JackMean - J,
D2 = D * D,
{S + D2, C + D2 * D}
end,
{SumSquares, SumCubes} = lists:foldl(F, {0.0,0.0}, JackPs),
%% io:format("JackMean: ~p, Jack: ~p~n", [JackMean, Jack]),
Accel = SumCubes / (6 * (math:pow(SumSquares, 1.5))),
B1 = Bias + Z1,
A1 = Bias + B1 / (1.0 - Accel * B1),
Lo = max(0, CumN(A1)),
B2 = Bias - Z1,
A2 = Bias + B2 / (1.0 - Accel * B2),
Hi = min(N - 1, CumN(A2)),
%% io:format("Points found: ~p~n", [#{ pt => PT, lo => Lo, hi => Hi, n => N, z1 => Z1, prob_n => ProbN, bias => Bias,
%% accel => Accel, b1 => B1, a1 => A1, b2 => B2, a2 => A2 }]),
true = Lo =< Hi,
true = CLevel > 0 andalso CLevel < 1,
#{ pt => PT, mean => Mean, std_dev => StdDev, lo => lists:nth(Lo+1, Ps), hi => lists:nth(Hi+1, Ps), cl => CLevel }.
jackknife(Ty, #dataset{ name = N } = Ds) ->
eministat_ds:from_list({jack, N}, jackknife_(Ty, Ds)).
jackknife_(mean, #dataset { n = N, points = Ps }) when N > 1 ->
L = N-1,
[(X + Y) / L || {X, Y} <- zip(prefix_sum_l(Ps), prefix_sum_r(Ps))];
jackknife_(variance, Ds) -> jackknife_variance(0, Ds);
%jackknife_(unbiased_variance, Ds) -> jackknife_variance(1, Ds);
jackknife_(std_dev, Ds) -> [math:sqrt(X) || X <- jackknife_variance(1, Ds)].
jackknife_variance(C, #dataset { n = N, points = Ps } = Ds) when N > 1 ->
M = eministat_ds:mean(Ds),
GOA = fun(X) ->
V = X - M,
V*V
end,
ALs = prefix_sum_l([GOA(P) || P <- Ps]),
ARs = prefix_sum_r([GOA(P) || P <- Ps]),
BLs = prefix_sum_l([P - M || P <- Ps]),
BRs = prefix_sum_r([P - M || P <- Ps]),
Q = N - 1,
[begin B = BL + BR, (AL + AR - (B * B) / Q) / (Q - C) end ||
{AL, AR, BL, BR} <- zip4(ALs, ARs, BLs, BRs)].
prefix_sum_l(Points) -> scanl(fun erlang:'+'/2, 0.0, Points).
prefix_sum_r(Points) -> tl(scanr(fun erlang:'+'/2, 0.0, Points)).
%% -- NORMAL DISTRIBUTION ------------------------------
%% Constants
sqrt2() -> math:sqrt(2).
sqrt2pi() -> math:sqrt(2 * math:pi()).
standard() ->
#{ mean => 0.0, std_dev => 1.0, pdf_denom => math:log(sqrt2pi()), cdf_denom => sqrt2() }.
cumulative(#{ mean := M, cdf_denom := CDF}, X) ->
math:erfc((M - X) / CDF) / 2.
quantile(#{ mean := M }, 0.5) -> M;
quantile(#{ mean := M, cdf_denom := CDF }, P) when P > 0 andalso P < 1 ->
X = inv_erfc(2 * (1 - P)),
X * CDF + M.
%% -- STANDARD LIBRARY ROUTINES -----------------------------------------
%% Things which should have been in a standard library but isn't, one way or the other.
%% @doc count/2 counts how many times a predicate returns `true'
%% @end
count(F, Ps) -> count(F, Ps, 0).
count(F, [P | Ps], K) ->
case F(P) of
true -> count(F, Ps, K+1);
false -> count(F, Ps, K)
end;
count(_F, [], K) -> K.
%% @doc scanl/3 is like foldl/3 but returns the accumulator for each iteration
%% @end
scanl(F, Q, Ls) ->
case Ls of
[] -> [Q];
[X|Xs] -> [Q|scanl(F, F(X, Q), Xs)]
end.
%% @doc scanr/3 is like foldr/3 but returns the accumulator for each iteration
%% @end
scanr(_F, Q0, []) -> [Q0];
scanr(F, Q0, [X|Xs]) ->
Qs = [Q|_] = scanr(F, Q0, Xs),
[F(X, Q) | Qs].
%% These variants of zip ignore extra arguments
zip([X|Xs], [Y|Ys]) -> [{X,Y} | zip(Xs, Ys)];
zip(_, _) -> [].
zip4([A|As], [B|Bs], [C|Cs], [D|Ds]) -> [{A,B,C,D} | zip4(As, Bs, Cs, Ds)];
zip4(_, _, _, _) -> [].
inv_erfc(P) when P > 0 andalso P < 2 ->
PP = case P =< 1 of
true -> P;
false -> 2 - P
end,
T = math:sqrt(-2 * math:log(0.5 * PP)),
%% Initial guess for searching
X0 = -0.70711 * ((2.30753 + T * 0.27061) / (1 + T * (0.99229 + T * 0.04481)) - T),
R = inv_erfc_loop(PP, 0, X0),
case P =< 1 of
true -> R;
false -> -R
end.
inv_erfc_loop(_PP, J, X) when J >= 2 -> X;
inv_erfc_loop(PP, J, X) ->
Err = math:erfc(X) - PP,
XP = X + Err / (1.12837916709551257 * math:exp(-X * X) - X * Err), %% // Halley
inv_erfc_loop(PP, J+1, XP). | src/eministat_resample.erl | 0.536313 | 0.602529 | eministat_resample.erl | starcoder |
%% @doc Format dates in erlang
%%
%% Licensed under the DWTFYW License
%%
%% This module formats erlang dates in the form
%% {{Year, Month, Day}, {Hour, Minute, Second}}
%% to printable strings, using (almost) equivalent
%% formatting rules as http://uk.php.net/date
%%
%% erlang has no concept of timezone so the following
%% formats are not implemented: B e I O P T Z
%% formats c and r will also differ slightly
%%
%% See tests at bottom for examples
%% MW: Added support for ISO8601 and "Y-M-D H:I:S"
-module(dh_date).
-author("<NAME> <<EMAIL>>").
-author("<NAME> <<EMAIL>>").
-export([format/1, format/2]).
-export([parse/1, parse/2]).
-export([nparse/1]).
-export([tokenise/2]).
-define( is_num(X), (X >= $0 andalso X =< $9) ).
-define( is_meridian(X), (X==[] orelse X==[am] orelse X==[pm]) ).
-define( is_sep(X), (X==$- orelse X==$/ orelse X==$\.) ).
-define( is_day(X), (X >= 1 andalso X =< 31)).
-define( is_month(X), (X >= 1 andalso X =< 12)).
-define(GREGORIAN_SECONDS_1970, 62167219200).
-import(calendar,[last_day_of_the_month/2, day_of_the_week/1,
datetime_to_gregorian_seconds/1, date_to_gregorian_days/1,
gregorian_days_to_date/1, is_leap_year/1]).
-type year() :: non_neg_integer().
-type month() :: 1..12.
-type day() :: 1..31.
-type hour() :: 0..23.
-type minute() :: 0..59.
-type second() :: 0..59.
-type daynum() :: 1..7.
-type date() :: {year(),month(),day()}.
-type time() :: {hour(),minute(),second()}.
-type datetime() :: {date(),time()}.
-type now() :: {integer(),integer(),integer()}.
%%
%% EXPORTS
%%
-spec format(string()) -> string().
%% @doc format current local time as Format
format(Format) ->
format(Format, calendar:universal_time(),[]).
-spec format(string(),datetime() | now()) -> string().
%% @doc format Date as Format
format(Format, {_,_,_}=Now) ->
format(Format, calendar:now_to_datetime(Now), []);
format(Format, Date) ->
format(Format, Date, []).
-spec parse(string() | binary()) -> datetime() | {error, bad_date}.
%% @doc parses the datetime from a string
parse(Date) when is_binary(Date) ->
parse(binary_to_list(Date));
parse(Date) ->
do_parse(Date, calendar:universal_time(),[]).
-spec parse(string(),datetime() | now()) -> datetime() | {error, bad_date}.
%% @doc parses the datetime from a string
parse(Date, {_,_,_}=Now) ->
do_parse(Date, calendar:now_to_datetime(Now), []);
parse(Date, Now) ->
do_parse(Date, Now, []).
do_parse(Date, Now, Opts) ->
case parse(tokenise(string:to_upper(Date), []), Now, Opts) of
{error, Reason} ->
{error, Reason};
{D1, T1} = {{Y, M, D}, {H, M1, S}}
when is_number(Y), ?is_month(M),
?is_day(D), is_number(H),
is_number(M1), is_number(S) ->
case calendar:valid_date(D1) of
true -> {D1, T1};
false -> {error, bad_date}
end;
_ ->
{error, bad_date}
end.
-spec nparse(string()) -> now().
%% @doc parses the datetime from a string into 'now' format
nparse(Date) ->
DateTime = parse(Date),
GSeconds = calendar:datetime_to_gregorian_seconds(DateTime),
ESeconds = GSeconds - ?GREGORIAN_SECONDS_1970,
{ESeconds div 1000000, ESeconds rem 1000000, 0}.
%%
%% LOCAL FUNCTIONS
%%
%% Times - 21:45, 13:45:54, 13:15PM etc
parse([Hour,$:,Min,$:,Sec | PAM], {Date, _Time}, _O) when ?is_meridian(PAM) ->
{Date, {hour(Hour, PAM), Min, Sec}};
parse([Hour,$:,Min | PAM], {Date, _Time}, _Opts) when ?is_meridian(PAM) ->
{Date, {hour(Hour, PAM), Min, 0}};
%% Dates 23/april/1963
parse([Day,Month,Year], {_Date, Time}, _Opts) when ?is_day(Day), ?is_month(Month) ->
{{to_year(Year), Month, Day}, Time};
parse([Day,X,Month,X,Year], {_Date, Time}, _Opts) when ?is_sep(X), ?is_day(Day), ?is_month(Month) ->
{{to_year(Year), Month, Day}, Time};
parse([Year,Day,Month], {_Date, Time}, _Opts) when ?is_day(Day), ?is_month(Month) ->
{{to_year(Year), Month, Day}, Time};
parse([Year,X,Month,X,Day], {_Date, Time}, _Opts) when ?is_sep(X), ?is_day(Day), ?is_month(Month) ->
{{to_year(Year), Month, Day}, Time};
%% Date/Times 22 Aug 2008 6:35 PM
parse([Day,X,Month,X,Year,Hour,$:,Min | PAM], _Date, _Opts)
when ?is_meridian(PAM), ?is_sep(X), ?is_day(Day), ?is_month(Month) ->
{{to_year(Year), Month, Day}, {hour(Hour, PAM), Min, 0}};
parse([Day,X,Month,X,Year,Hour,$:,Min,$:,Sec | PAM], _Now, _Opts)
when ?is_meridian(PAM), ?is_sep(X), ?is_day(Day), ?is_month(Month) ->
{{to_year(Year), Month, Day}, {hour(Hour, PAM), Min, Sec}};
parse([Day,Month,Year,Hour,$:,Min | PAM], _Now, _Opts)
when ?is_meridian(PAM), ?is_day(Day), ?is_month(Month) ->
{{to_year(Year), Month, Day}, {hour(Hour, PAM), Min, 0}};
parse([Day,Month,Year,Hour,$:,Min,$:,Sec | PAM], _Now, _Opts)
when ?is_meridian(PAM), ?is_day(Day), ?is_month(Month) ->
{{to_year(Year), Month, Day}, {hour(Hour, PAM), Min, Sec}};
%% Date/Times 2008-08-22 18:35:00
parse([Year,$-,Month,$-,Day,Hour,$:,Min,$:,Sec], _, _Opts) ->
{{to_year(Year),Month,Day}, {Hour,Min,Sec}};
parse([Year,$-,Month,$-,Day,Hour,$:,Min], _, _Opts) ->
{{to_year(Year),Month,Day}, {Hour,Min,0}};
%% ISO8601: "2012-04-23T17:04:29+02:00"
parse([Year,$-,Month,$-,Day,$T,Hour,$:,Min,$:,Sec,PM,TZHour,$:,TZMin], _, _Opts) when PM =:= $-; PM =:= $+ ->
LocalSecs = calendar:datetime_to_gregorian_seconds({{Year, Month, Day}, {Hour, Min, Sec}}),
TZDiff = TZHour * 3600 + TZMin * 60,
UniversalSecs = case PM of $- -> LocalSecs+TZDiff; $+ -> LocalSecs-TZDiff end,
calendar:universal_time_to_local_time(calendar:gregorian_seconds_to_datetime(UniversalSecs));
parse([Year,$-,Month,$-,Day,$T,Hour,$:,Min,$:,Sec,PM,TZHour], DT, Opts) when PM =:= $-; PM =:= $+ ->
parse([Year,$-,Month,$-,Day,$T,Hour,$:,Min,$:,Sec,PM,TZHour,$:,0], DT, Opts);
parse([Year,$-,Month,$-,Day,$T,Hour,$:,Min,$:,Sec,$Z], _, _Opts) ->
calendar:universal_time_to_local_time({{Year, Month, Day}, {Hour, Min, Sec}});
parse(_Tokens, _Now, _Opts) ->
{error, bad_date}.
tokenise([], Acc) ->
lists:reverse(Acc);
tokenise([N1, N2, N3, N4 | Rest], Acc)
when ?is_num(N1), ?is_num(N2), ?is_num(N3), ?is_num(N4) ->
tokenise(Rest, [ ltoi([N1, N2, N3, N4]) | Acc]);
tokenise([N1, N2 | Rest], Acc)
when ?is_num(N1), ?is_num(N2) ->
tokenise(Rest, [ ltoi([N1, N2]) | Acc]);
tokenise([N1 | Rest], Acc)
when ?is_num(N1) ->
tokenise(Rest, [ ltoi([N1]) | Acc]);
tokenise("JANUARY"++Rest, Acc) -> tokenise(Rest, [1 | Acc]);
tokenise("JAN"++Rest, Acc) -> tokenise(Rest, [1 | Acc]);
tokenise("FEBUARY"++Rest, Acc) -> tokenise(Rest, [2 | Acc]);
tokenise("FEBRUARY"++Rest, Acc) -> tokenise(Rest, [2 | Acc]);
tokenise("FEB"++Rest, Acc) -> tokenise(Rest, [2 | Acc]);
tokenise("MARCH"++Rest, Acc) -> tokenise(Rest, [3 | Acc]);
tokenise("MAR"++Rest, Acc) -> tokenise(Rest, [3 | Acc]);
tokenise("APRIL"++Rest, Acc) -> tokenise(Rest, [4 | Acc]);
tokenise("APR"++Rest, Acc) -> tokenise(Rest, [4 | Acc]);
tokenise("MAY"++Rest, Acc) -> tokenise(Rest, [5 | Acc]);
tokenise("JUNE"++Rest, Acc) -> tokenise(Rest, [6 | Acc]);
tokenise("JUN"++Rest, Acc) -> tokenise(Rest, [6 | Acc]);
tokenise("JULY"++Rest, Acc) -> tokenise(Rest, [7 | Acc]);
tokenise("JUL"++Rest, Acc) -> tokenise(Rest, [7 | Acc]);
tokenise("AUGUST"++Rest, Acc) -> tokenise(Rest, [8 | Acc]);
tokenise("AUG"++Rest, Acc) -> tokenise(Rest, [8 | Acc]);
tokenise("SEPTEMBER"++Rest, Acc) -> tokenise(Rest, [9 | Acc]);
tokenise("SEPT"++Rest, Acc) -> tokenise(Rest, [9 | Acc]);
tokenise("SEP"++Rest, Acc) -> tokenise(Rest, [9 | Acc]);
tokenise("OCTOBER"++Rest, Acc) -> tokenise(Rest, [10 | Acc]);
tokenise("OCT"++Rest, Acc) -> tokenise(Rest, [10 | Acc]);
tokenise("NOVEMBER"++Rest, Acc) -> tokenise(Rest, [11 | Acc]);
tokenise("NOVEM"++Rest, Acc) -> tokenise(Rest, [11 | Acc]);
tokenise("NOV"++Rest, Acc) -> tokenise(Rest, [11 | Acc]);
tokenise("DECEMBER"++Rest, Acc) -> tokenise(Rest, [12 | Acc]);
tokenise("DECEM"++Rest, Acc) -> tokenise(Rest, [12 | Acc]);
tokenise("DEC"++Rest, Acc) -> tokenise(Rest, [12 | Acc]);
tokenise([$: | Rest], Acc) -> tokenise(Rest, [ $: | Acc]);
tokenise([$/ | Rest], Acc) -> tokenise(Rest, [ $/ | Acc]);
tokenise([$- | Rest], Acc) -> tokenise(Rest, [ $- | Acc]);
tokenise([$\. | Rest], Acc) -> tokenise(Rest, [ $\. | Acc]);
tokenise([$+ | Rest], Acc) -> tokenise(Rest, [ $+ | Acc]);
tokenise("AM"++Rest, Acc) -> tokenise(Rest, [am | Acc]);
tokenise("PM"++Rest, Acc) -> tokenise(Rest, [pm | Acc]);
%% Postel's Law
%%
%% be conservative in what you do,
%% be liberal in what you accept from others.
%%
%% See RFC 793 Section 2.10 http://tools.ietf.org/html/rfc793
%%
%% Mebbies folk want to include Saturday etc in a date, nae borra
tokenise("MONDAY"++Rest, Acc) -> tokenise(Rest, Acc);
tokenise("MON"++Rest, Acc) -> tokenise(Rest, Acc);
tokenise("TUESDAY"++Rest, Acc) -> tokenise(Rest, Acc);
tokenise("TUES"++Rest, Acc) -> tokenise(Rest, Acc);
tokenise("TUE"++Rest, Acc) -> tokenise(Rest, Acc);
tokenise("WEDNESDAY"++Rest, Acc) -> tokenise(Rest, Acc);
tokenise("WEDS"++Rest, Acc) -> tokenise(Rest, Acc);
tokenise("WED"++Rest, Acc) -> tokenise(Rest, Acc);
tokenise("THURSDAY"++Rest, Acc) -> tokenise(Rest, Acc);
tokenise("THURS"++Rest, Acc) -> tokenise(Rest, Acc);
tokenise("THUR"++Rest, Acc) -> tokenise(Rest, Acc);
tokenise("THU"++Rest, Acc) -> tokenise(Rest, Acc);
tokenise("FRIDAY"++Rest, Acc) -> tokenise(Rest, Acc);
tokenise("FRI"++Rest, Acc) -> tokenise(Rest, Acc);
tokenise("SATURDAY"++Rest, Acc) -> tokenise(Rest, Acc);
tokenise("SAT"++Rest, Acc) -> tokenise(Rest, Acc);
tokenise("SUNDAY"++Rest, Acc) -> tokenise(Rest, Acc);
tokenise("SUN"++Rest, Acc) -> tokenise(Rest, Acc);
%% Hmm Excel reports GMT in times so nuke that too
tokenise("GMT"++Rest, Acc) -> tokenise(Rest, Acc);
tokenise("UTC"++Rest, Acc) -> tokenise(Rest, Acc);
tokenise("DST"++Rest, Acc) -> tokenise(Rest, Acc); % daylight saving time
tokenise([$, | Rest], Acc) -> tokenise(Rest, Acc);
tokenise([32 | Rest], Acc) -> tokenise(Rest, Acc); % Spaces
tokenise("TH"++Rest, Acc) -> tokenise(Rest, Acc);
tokenise("ND"++Rest, Acc) -> tokenise(Rest, Acc);
tokenise("ST"++Rest, Acc) -> tokenise(Rest, Acc);
tokenise("OF"++Rest, Acc) -> tokenise(Rest, Acc);
tokenise([$T | Rest], Acc) -> tokenise(Rest, [$T | Acc]);
tokenise([$Z | Rest], Acc) -> tokenise(Rest, [$Z | Acc]);
tokenise([Else | Rest], Acc) ->
tokenise(Rest, [{bad_token, Else} | Acc]).
hour(Hour, []) -> Hour;
hour(Hour, [am]) -> Hour;
hour(Hour, [pm]) -> Hour+12.
-spec format(string(),datetime(),list()) -> string().
%% Finished, return
format([], _Date, Acc) ->
lists:flatten(lists:reverse(Acc));
%% Escape backslashes
format([$\\,H|T], Dt, Acc) ->
format(T,Dt,[H|Acc]);
%% Year Formats
format([$Y|T], {{Y,_,_},_}=Dt, Acc) ->
format(T, Dt, [itol(Y)|Acc]);
format([$y|T], {{Y,_,_},_}=Dt, Acc) ->
[_, _, Y3, Y4] = itol(Y),
format(T, Dt, [[Y3,Y4]|Acc]);
format([$L|T], {{Y,_,_},_}=Dt, Acc) ->
format(T, Dt, [itol(is_leap(Y))|Acc]);
format([$o|T], {Date,_}=Dt, Acc) ->
format(T, Dt, [itol(iso_year(Date))|Acc]);
%% Month Formats
format([$n|T], {{_,M,_},_}=Dt, Acc) ->
format(T, Dt, [itol(M)|Acc]);
format([$m|T], {{_,M,_},_}=Dt, Acc) ->
format(T, Dt, [pad2(M)|Acc]);
format([$M|T], {{_,M,_},_}=Dt, Acc) ->
format(T, Dt, [smonth(M)|Acc]);
format([$F|T], {{_,M,_},_}=Dt, Acc) ->
format(T, Dt, [month(M)|Acc]);
format([$t|T], {{Y,M,_},_}=Dt, Acc) ->
format(T, Dt, [itol(last_day_of_the_month(Y,M))|Acc]);
%% Week Formats
format([$W|T], {Date,_}=Dt, Acc) ->
format(T, Dt, [pad2(iso_week(Date))|Acc]);
%% Day Formats
format([$j|T], {{_,_,D},_}=Dt, Acc) ->
format(T, Dt, [itol(D)|Acc]);
format([$S|T], {{_,_,D},_}=Dt, Acc) ->
format(T, Dt,[suffix(D)| Acc]);
format([$d|T], {{_,_,D},_}=Dt, Acc) ->
format(T, Dt, [pad2(D)|Acc]);
format([$D|T], {Date,_}=Dt, Acc) ->
format(T, Dt, [sdayd(Date)|Acc]);
format([$l|T], {Date,_}=Dt, Acc) ->
format(T, Dt, [day(day_of_the_week(Date))|Acc]);
format([$N|T], {Date,_}=Dt, Acc) ->
format(T, Dt, [itol(day_of_the_week(Date))|Acc]);
format([$w|T], {Date,_}=Dt, Acc) ->
format(T, Dt, [itol(to_w(day_of_the_week(Date)))|Acc]);
format([$z|T], {Date,_}=Dt, Acc) ->
format(T, Dt, [itol(days_in_year(Date))|Acc]);
%% Time Formats
format([$a|T], {_,{H,_,_}}=Dt, Acc) when H > 12 ->
format(T, Dt, ["pm"|Acc]);
format([$a|T], Dt, Acc) ->
format(T, Dt, ["am"|Acc]);
format([$A|T], {_,{H,_,_}}=Dt, Acc) when H > 12 ->
format(T, Dt, ["PM"|Acc]);
format([$A|T], Dt, Acc) ->
format(T, Dt, ["AM"|Acc]);
format([$g|T], {_,{H,_,_}}=Dt, Acc) when H == 12; H == 0 ->
format(T, Dt, ["12"|Acc]);
format([$g|T], {_,{H,_,_}}=Dt, Acc) when H > 12 ->
format(T, Dt, [itol(H-12)|Acc]);
format([$g|T], {_,{H,_,_}}=Dt, Acc) ->
format(T, Dt, [itol(H)|Acc]);
format([$G|T], {_,{H,_,_}}=Dt, Acc) ->
format(T, Dt, [itol(H)|Acc]);
format([$h|T], {_,{H,_,_}}=Dt, Acc) when H > 12 ->
format(T, Dt, [pad2(H-12)|Acc]);
format([$h|T], {_,{H,_,_}}=Dt, Acc) ->
format(T, Dt, [pad2(H)|Acc]);
format([$H|T], {_,{H,_,_}}=Dt, Acc) ->
format(T, Dt, [pad2(H)|Acc]);
format([$i|T], {_,{_,M,_}}=Dt, Acc) ->
format(T, Dt, [pad2(M)|Acc]);
format([$s|T], {_,{_,_,S}}=Dt, Acc) ->
format(T, Dt, [pad2(S)|Acc]);
%% Whole Dates
format([$c|T], {{Y,M,D},{H,Min,S}}=Dt, Acc) ->
Format = "~4.10.0B-~2.10.0B-~2.10.0B"
++" ~2.10.0B:~2.10.0B:~2.10.0B",
Date = io_lib:format(Format, [Y, M, D, H, Min, S]),
format(T, Dt, [Date|Acc]);
format([$r|T], {{Y,M,D},{H,Min,S}}=Dt, Acc) ->
Format = "~s, ~p ~s ~p ~2.10.0B:~2.10.0B:~2.10.0B",
Args = [sdayd({Y,M,D}), D, smonth(M), Y, H, Min, S],
format(T, Dt, [io_lib:format(Format, Args)|Acc]);
format([$U|T], Dt, Acc) ->
Epoch = {{1970,1,1},{0,0,0}},
Time = datetime_to_gregorian_seconds(Dt) -
datetime_to_gregorian_seconds(Epoch),
format(T, Dt, [itol(Time)|Acc]);
%% Unrecognised, print as is
format([H|T], Date, Acc) ->
format(T, Date, [H|Acc]).
%% @doc days in year
-spec days_in_year(date()) -> integer().
days_in_year({Y,_,_}=Date) ->
date_to_gregorian_days(Date) -
date_to_gregorian_days({Y,1,1}).
%% @doc is a leap year
-spec is_leap(year()) -> 1|0.
is_leap(Y) ->
case is_leap_year(Y) of
true -> 1;
false -> 0
end.
%% @doc Made up numeric day of the week
%% (0 Sunday -> 6 Saturday)
-spec to_w(daynum()) -> integer().
to_w(7) -> 0;
to_w(X) -> X.
-spec suffix(day()) -> string().
%% @doc English ordinal suffix for the day of the
%% month, 2 characters
suffix(1) -> "st";
suffix(2) -> "nd";
suffix(3) -> "rd";
suffix(_) -> "th".
-spec sdayd(date()) -> string().
%% @doc A textual representation of a day, three letters
sdayd({Y,M,D}) ->
sday(day_of_the_week({Y,M,D})).
-spec sday(daynum()) -> string().
%% @doc A textual representation of a day, three letters
sday(1) -> "Mon";
sday(2) -> "Tue";
sday(3) -> "Wed";
sday(4) -> "Thu";
sday(5) -> "Fri";
sday(6) -> "Sat";
sday(7) -> "Sun".
-spec day(daynum()) -> string().
%% @doc A full textual representation of a day
day(1) -> "Monday";
day(2) -> "Tuesday";
day(3) -> "Wednesday";
day(4) -> "Thursday";
day(5) -> "Friday";
day(6) -> "Saturday";
day(7) -> "Sunday".
-spec smonth(month()) -> string().
%% @doc A short textual representation of a
%% month, three letters
smonth(1) -> "Jan";
smonth(2) -> "Feb";
smonth(3) -> "Mar";
smonth(4) -> "Apr";
smonth(5) -> "May";
smonth(6) -> "Jun";
smonth(7) -> "Jul";
smonth(8) -> "Aug";
smonth(9) -> "Sep";
smonth(10) -> "Oct";
smonth(11) -> "Nov";
smonth(12) -> "Dec".
-spec month(month()) -> string().
%% @doc A full textual representation of a month
month(1) -> "January";
month(2) -> "February";
month(3) -> "March";
month(4) -> "April";
month(5) -> "May";
month(6) -> "June";
month(7) -> "July";
month(8) -> "August";
month(9) -> "September";
month(10) -> "October";
month(11) -> "November";
month(12) -> "December".
-spec iso_week(date()) -> integer().
%% @doc The week of the years as defined in ISO 8601
%% http://en.wikipedia.org/wiki/ISO_week_date
iso_week(Date) ->
Week = iso_week_one(iso_year(Date)),
Days = date_to_gregorian_days(Date) -
date_to_gregorian_days(Week),
trunc((Days / 7) + 1).
-spec iso_year(date()) -> integer().
%% @doc The year number as defined in ISO 8601
%% http://en.wikipedia.org/wiki/ISO_week_date
iso_year({Y, _M, _D}=Dt) ->
case Dt >= {Y, 12, 29} of
true ->
case Dt < iso_week_one(Y+1) of
true -> Y;
false -> Y+1
end;
false ->
case Dt < iso_week_one(Y) of
true -> Y-1;
false -> Y
end
end.
-spec iso_week_one(year()) -> date().
%% @doc The date of the the first day of the first week
%% in the ISO calendar
iso_week_one(Y) ->
Day1 = calendar:day_of_the_week({Y,1,4}),
Days = date_to_gregorian_days({Y,1,4}) + (1-Day1),
gregorian_days_to_date(Days).
-spec itol(integer()) -> list().
%% @doc short hand
itol(X) ->
integer_to_list(X).
-spec pad2(integer()) -> list().
%% @doc int padded with 0 to make sure its 2 chars
pad2(X) when is_integer(X) ->
io_lib:format("~2.10.0B",[X]);
pad2(X) when is_float(X) ->
io_lib:format("~2.10.0B",[trunc(X)]).
ltoi(X) ->
list_to_integer(X).
%% Normalise two digit years
-spec to_year(integer()) -> integer().
to_year(Y) when Y >= 60, Y < 100 -> Y + 1900;
to_year(Y) when Y < 100 -> Y + 2000;
to_year(Y) -> Y.
%%
%% TEST FUNCTIONS
%%
%% c(dh_date,[{d,'TEST'}]).
%-define(NOTEST, 1).
-include_lib("eunit/include/eunit.hrl").
-define(DATE, {{2001,3,10},{17,16,17}}).
-define(ISO, "o \\WW").
basic_format_test_() ->
[
?_assertEqual(format("F j, Y, g:i a",?DATE), "March 10, 2001, 5:16 pm"),
?_assertEqual(format("m.d.y",?DATE), "03.10.01"),
?_assertEqual(format("j, n, Y",?DATE), "10, 3, 2001"),
?_assertEqual(format("Ymd",?DATE), "20010310"),
?_assertEqual(format("H:i:s",?DATE), "17:16:17"),
?_assertEqual(format("z",?DATE), "68"),
?_assertEqual(format("D M j G:i:s Y",?DATE), "Sat Mar 10 17:16:17 2001"),
?_assertEqual(format("h-i-s, j-m-y, it is w Day",?DATE),
"05-16-17, 10-03-01, 1631 1617 6 Satpm01"),
?_assertEqual(format("\\i\\t \\i\\s \\t\\h\\e\\ jS \\d\\a\\y.",?DATE),
"it is the 10th day."),
?_assertEqual(format("H:m:s \\m \\i\\s \\m\\o\\n\\t\\h",?DATE),
"17:03:17 m is month")
].
basic_parse_test_() ->
[
?_assertEqual({{2008,8,22}, {17,16,17}},
parse("22nd of August 2008", ?DATE)),
?_assertEqual({{2008,8,22}, {6,35,0}},
parse("22-Aug-2008 6:35 AM", ?DATE)),
?_assertEqual({{2008,8,22}, {6,35,12}},
parse("22-Aug-2008 6:35:12 AM", ?DATE)),
?_assertEqual({{2008,8,22}, {6,35,0}},
parse("22/Aug/2008 6:35 AM", ?DATE)),
?_assertEqual({{2008,8,22}, {6,35,0}},
parse("22/August/2008 6:35 AM", ?DATE)),
?_assertEqual({{2008,8,22}, {6,35,0}},
parse("22 August 2008 6:35 AM", ?DATE)),
?_assertEqual({{2008,8,22}, {6,35,0}},
parse("22 Aug 2008 6:35AM", ?DATE)),
?_assertEqual({{2008,8,22}, {6,35,0}},
parse("22 Aug 2008 6:35 AM", ?DATE)),
?_assertEqual({{2008,8,22}, {6,35,0}},
parse("22 Aug 2008 6:35", ?DATE)),
?_assertEqual({{2008,8,22}, {18,35,0}},
parse("22 Aug 2008 6:35 PM", ?DATE)),
?_assertEqual({{2001,3,10}, {11,15,0}},
parse("11:15", ?DATE)),
?_assertEqual({{2001,3,10}, {1,15,0}},
parse("1:15", ?DATE)),
?_assertEqual({{2001,3,10}, {1,15,0}},
parse("1:15 am", ?DATE)),
?_assertEqual({{2001,3,10}, {3,45,39}},
parse("3:45:39", ?DATE)),
?_assertEqual({{1963,4,23}, {17,16,17}},
parse("23/4/1963", ?DATE)),
?_assertEqual({{1963,4,23}, {17,16,17}},
parse("23.4.1963", ?DATE)),
?_assertEqual({{1963,4,23}, {17,16,17}},
parse("23/april/1963", ?DATE)),
?_assertEqual({{1963,4,23}, {17,16,17}},
parse("23/apr/1963", ?DATE)),
?_assertEqual({error, bad_date},
parse("23/ap/195", ?DATE)),
?_assertEqual({{2001,3,10}, {6,45,0}},
parse("6:45 am", ?DATE)),
?_assertEqual({{2001,3,10}, {18,45,0}},
parse("6:45 PM", ?DATE)),
?_assertEqual({{2001,3,10}, {18,45,0}},
parse("6:45 PM ", ?DATE))
].
parse_with_days_test_() ->
[
?_assertEqual({{2008,8,22}, {17,16,17}},
parse("Sat 22nd of August 2008", ?DATE)),
?_assertEqual({{2008,8,22}, {6,35,0}},
parse("Sat, 22-Aug-2008 6:35 AM", ?DATE)),
?_assertEqual({{2008,8,22}, {6,35,12}},
parse("Sunday 22-Aug-2008 6:35:12 AM", ?DATE)),
?_assertEqual({{2008,8,22}, {6,35,0}},
parse("Sun 22/Aug/2008 6:35 AM", ?DATE)),
?_assertEqual({{2008,8,22}, {6,35,0}},
parse("THURSDAY, 22/August/2008 6:35 AM", ?DATE)),
?_assertEqual({{2008,8,22}, {6,35,0}},
parse("THU 22 August 2008 6:35 AM", ?DATE)),
?_assertEqual({{2008,8,22}, {6,35,0}},
parse("FRi 22 Aug 2008 6:35AM", ?DATE)),
?_assertEqual({{2008,8,22}, {6,35,0}},
parse("Wednesday 22 Aug 2008 6:35 AM", ?DATE)),
?_assertEqual({{2008,8,22}, {6,35,0}},
parse("Monday 22 Aug 2008 6:35", ?DATE)),
?_assertEqual({{2008,8,22}, {18,35,0}},
parse("Mon, 22 Aug 2008 6:35 PM", ?DATE))
].
parse_ymd_test_() ->
[
?_assertEqual({{2008,8,22}, {18,19,20}},
parse("2008-08-22 18:19:20", ?DATE)),
?_assertEqual({{2008,8,22}, {17,16,17}},
parse("2008-08-22", ?DATE)),
?_assertEqual({{2008,8,22}, {18,19,20}},
parse("22-08-2008 18:19:20", ?DATE)),
?_assertEqual({{2008,8,22}, {17,16,17}},
parse("22-08-2008", ?DATE)),
?_assertEqual({{1997,8,22}, {18,19,20}},
parse("22-08-97 18:19:20", ?DATE)),
?_assertEqual({{1997,8,22}, {17,16,17}},
parse("22-08-97", ?DATE)),
?_assertEqual({{2011,8,22}, {18,19,20}},
parse("22-08-11 18:19:20", ?DATE)),
?_assertEqual({{2011,8,22}, {17,16,17}},
parse("22-08-11", ?DATE))
].
parse_with_TZ_test_() ->
[
?_assertEqual({{2008,8,22}, {17,16,17}},
parse("Sat 22nd of August 2008 GMT", ?DATE)),
?_assertEqual({{2008,8,22}, {17,16,17}},
parse("Sat 22nd of August 2008 UTC", ?DATE)),
?_assertEqual({{2008,8,22}, {17,16,17}},
parse("Sat 22nd of August 2008 DST", ?DATE))
].
parse_iso8601_test_() ->
[
?_assertEqual(calendar:universal_time_to_local_time({{2008,8,22}, {17,16,17}}),
parse("2008-08-22T17:16:17Z", ?DATE)),
?_assertEqual(calendar:universal_time_to_local_time({{2008,8,22}, {16,16,17}}),
parse("2008-08-22T17:16:17+01:00", ?DATE))
].
iso_test_() ->
[
?_assertEqual("2004 W53",format(?ISO,{{2005,1,1}, {1,1,1}})),
?_assertEqual("2004 W53",format(?ISO,{{2005,1,2}, {1,1,1}})),
?_assertEqual("2005 W52",format(?ISO,{{2005,12,31},{1,1,1}})),
?_assertEqual("2007 W01",format(?ISO,{{2007,1,1}, {1,1,1}})),
?_assertEqual("2007 W52",format(?ISO,{{2007,12,30},{1,1,1}})),
?_assertEqual("2008 W01",format(?ISO,{{2007,12,31},{1,1,1}})),
?_assertEqual("2008 W01",format(?ISO,{{2008,1,1}, {1,1,1}})),
?_assertEqual("2009 W01",format(?ISO,{{2008,12,29},{1,1,1}})),
?_assertEqual("2009 W01",format(?ISO,{{2008,12,31},{1,1,1}})),
?_assertEqual("2009 W01",format(?ISO,{{2009,1,1}, {1,1,1}})),
?_assertEqual("2009 W53",format(?ISO,{{2009,12,31},{1,1,1}})),
?_assertEqual("2009 W53",format(?ISO,{{2010,1,3}, {1,1,1}}))
]. | _build/default/lib/dh_date/src/dh_date.erl | 0.540439 | 0.485783 | dh_date.erl | starcoder |
%% Copyright (c) 2012-2016 <NAME> <<EMAIL>>
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(mdns_advertise).
-behaviour(gen_server).
-export([code_change/3]).
-export([handle_call/3]).
-export([handle_cast/2]).
-export([handle_info/2]).
-export([init/1]).
-export([multicast/1]).
-export([start_link/1]).
-export([stop/1]).
-export([terminate/2]).
start_link(Advertiser) ->
gen_server:start_link(ref(Advertiser), ?MODULE, [Advertiser], []).
multicast(Advertiser) ->
gen_server:cast(ref(Advertiser), multicast).
stop(Advertiser) ->
gen_server:cast(ref(Advertiser), stop).
ref(Advertiser) ->
{via, gproc, {n, l, #{module => ?MODULE, service => Advertiser:service()}}}.
init([Advertiser]) ->
case mdns_udp:open(advertise) of
{ok, State} ->
{ok, State#{
advertiser => Advertiser,
domain => mdns_config:domain(),
service => Advertiser:service(),
environment => mdns_config:environment(),
ttl => mdns_config:ttl()},
random_timeout(initial)};
{error, Reason} ->
{stop, Reason}
end.
handle_call(_, _, State) ->
{stop, error, State}.
handle_cast(multicast, #{ttl := TTL} = State) ->
case announce(State) of
ok ->
{noreply, State, random_timeout(announcements, TTL)};
{error, _} = Error ->
{stop, Error, State}
end;
handle_cast(stop, State) ->
{stop, normal, State}.
handle_info(timeout, #{ttl := TTL} = State) ->
case announce(State) of
ok ->
{noreply, State, random_timeout(announcements, TTL)};
{error, _} = Error ->
{stop, Error, State}
end;
handle_info({udp, _, _, _, _}, #{ttl := TTL} = State) ->
{noreply, State, random_timeout(announcements, TTL)}.
terminate(_, #{socket := Socket} = State) ->
_ = announce(State#{ttl => 0}),
gen_udp:close(Socket).
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
random_timeout(initial) ->
crypto:rand_uniform(500, 1500).
random_timeout(announcements, TTL) ->
crypto:rand_uniform(TTL * 500, TTL * 1000).
announce(#{address := Address,
advertiser := Advertiser,
socket := Socket} = State) ->
Instances = Advertiser:instances(),
gen_udp:send(
Socket,
Address,
mdns_config:port(udp),
inet_dns:encode(
inet_dns:make_msg(
[{header, header()},
{anlist, answers(Instances, State)},
{arlist, resources(Instances, State)}]))).
header() ->
inet_dns:make_header(
[{id, 0},
{qr, true},
{opcode, query},
{aa, true},
{tc, false},
{rd, false},
{ra, false},
{pr, false},
{rcode, 0}]).
answers(Instances, #{domain := Domain, service := Service, ttl := TTL}) ->
[inet_dns:make_rr(
[{type, ptr},
{domain, Service ++ Domain},
{class, in},
{ttl, TTL},
{data, Instance}]) || #{instance := Instance} <- Instances].
resources(Instances, State) ->
services(Instances, State) ++ texts(Instances, State).
services(Instances, #{domain := Domain, ttl := TTL}) ->
[inet_dns:make_rr(
[{domain, Instance},
{type, srv},
{class, in},
{ttl, TTL},
{data, {Priority, Weight, Port, Hostname ++ Domain}}]) || #{
instance := Instance,
priority := Priority,
weight := Weight,
port := Port,
hostname := Hostname} <- Instances].
texts(Instances, #{ttl := TTL}) ->
[inet_dns:make_rr(
[{domain, Instance},
{type, txt},
{class, in},
{ttl, TTL},
{data, kvs(KVS)}]) || #{instance := Instance, properties := KVS} <- Instances].
kvs(KVS) ->
maps:fold(
fun
(Key, Value, A) ->
[kv(Key, Value) | A]
end,
[],
KVS).
kv(Key, Value) when is_list(Key) andalso length(Key) =< 9 ->
Key ++ "=" ++ any:to_list(Value);
kv(Key, Value) when not(is_list(Key)) ->
kv(any:to_list(Key), Value). | src/mdns_advertise.erl | 0.59408 | 0.413477 | mdns_advertise.erl | starcoder |
% The Computer Language Benchmarks Game
% https://salsa.debian.org/benchmarksgame-team/benchmarksgame/
%
% contributed by <NAME>
% minor optimizations by <NAME> (2007-06-17)
%% erlc nbody.erl
%% erl -smp enable -noshell -run nbody main 50000000
-module(nbody).
-export([main/1]).
-define(pi, 3.14159265358979323).
-define(solar_mass, (4 * ?pi * ?pi)).
-define(days_per_year, 365.24).
-define(f(X), is_float(X)).
main([Arg]) ->
N = list_to_integer(Arg),
main(N),
erlang:halt(0);
main(N) ->
Bodies = offset_momentum(bodies()),
io:format("~.9f\n", [energy(Bodies)]),
io:format("~.9f\n", [energy(advance(N, Bodies, 0.01))]).
offset_momentum(Bodies = [{X, Y, Z, _, _, _, Ms} | T])
when ?f(X),?f(Y),?f(Z),?f(Ms) ->
{Px, Py, Pz} = lists:foldl(fun({_, _, _, Vx, Vy, Vz, M}, {Px, Py, Pz})
when ?f(Vx),?f(Vy),?f(M),?f(Px),?f(Py),
?f(Pz) ->
{Px + Vx * M, Py + Vy * M, Pz + Vz * M}
end,
{0.0, 0.0, 0.0},
Bodies),
[{X, Y, Z, -Px/?solar_mass, -Py/?solar_mass, -Pz/?solar_mass, Ms} | T].
energy(Bodies) -> energy(Bodies, 0.0).
energy([], E) -> E;
energy([{X, Y, Z, Vx, Vy, Vz, M} | T], E)
when ?f(X),?f(Y), ?f(Z), ?f(Vx), ?f(Vy), ?f(Vz), ?f(M), ?f(E) ->
energy(T, lists:foldl(fun({X2, Y2, Z2, _, _, _, M2}, Ea)
when ?f(X2),?f(Y2),?f(Z2),?f(M2),?f(Ea) ->
Dx = X - X2,
Dy = Y - Y2,
Dz = Z - Z2,
Dist = math:sqrt(Dx*Dx + Dy*Dy + Dz*Dz),
Ea - M * M2 / Dist
end,
E + 0.5 * M * (Vx * Vx + Vy * Vy + Vz * Vz),
T)).
advance(0, Bodies, _Dt) -> Bodies;
advance(N, Bodies, Dt) -> advance(N - 1, adv2(adv1(Bodies, Dt), Dt), Dt).
%%% adv1([B], _) -> [B];
%%% adv1([B | T], Dt) ->
%%% {B1, T1} = adv1(B, T, [], Dt),
%%% [B1 | adv1(T1, Dt)].
adv1(Bs, Dt) ->
adv1(Bs, Dt, []).
adv1([], _, Acc) -> Acc;
adv1([B | T], Dt, Acc) ->
{B1, T1} = adv1(B, T, [], Dt),
adv1(T1, Dt, [B1|Acc]).
%%%adv1(B, [], L, _Dt) -> {B, lists:reverse(L)};
adv1(B, [], L, _Dt) -> {B, L};
adv1({X, Y, Z, Vx, Vy, Vz, M}, [{X2, Y2, Z2, Vx2, Vy2, Vz2, M2} | T], L, Dt)
when ?f(X), ?f(Y), ?f(Z), ?f(Vx), ?f(Vy), ?f(Vz), ?f(M), ?f(Dt),
?f(X2), ?f(Y2), ?f(Z2), ?f(Vx2), ?f(Vy2), ?f(Vz2), ?f(M2) ->
Dx = X - X2,
Dy = Y - Y2,
Dz = Z - Z2,
D = math:sqrt(Dx*Dx + Dy*Dy + Dz*Dz),
Mag = Dt / (D*D*D),
Bmm = M *Mag,
B2mm = M2 *Mag,
Bnew = {X, Y, Z, Vx - Dx * B2mm, Vy - Dy * B2mm, Vz - Dz * B2mm, M},
B2new = {X2, Y2, Z2, Vx2 + Dx * Bmm, Vy2 + Dy * Bmm, Vz2 + Dz * Bmm, M2},
adv1(Bnew, T, [B2new | L], Dt).
adv2(Bs, Dt) ->
adv2(Bs, Dt, []).
adv2([], _, Acc) -> Acc;
adv2([{X, Y, Z, Vx, Vy, Vz, M} | T], Dt, Acc)
when ?f(X), ?f(Y), ?f(Z), ?f(Vx), ?f(Vy), ?f(Vz), ?f(M), ?f(Dt) ->
adv2(T, Dt, [{X + Dt * Vx, Y + Dt * Vy, Z + Dt * Vz, Vx, Vy, Vz, M}|Acc]).
bodies() ->
[
{ % sun
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
?solar_mass
},
{ % jupiter
4.84143144246472090e+00,
-1.16032004402742839e+00,
-1.03622044471123109e-01,
1.66007664274403694e-03 * ?days_per_year,
7.69901118419740425e-03 * ?days_per_year,
-6.90460016972063023e-05 * ?days_per_year,
9.54791938424326609e-04 * ?solar_mass
},
{ % saturn
8.34336671824457987e+00,
4.12479856412430479e+00,
-4.03523417114321381e-01,
-2.76742510726862411e-03 * ?days_per_year,
4.99852801234917238e-03 * ?days_per_year,
2.30417297573763929e-05 * ?days_per_year,
2.85885980666130812e-04 * ?solar_mass
},
{ % uranus
1.28943695621391310e+01,
-1.51111514016986312e+01,
-2.23307578892655734e-01,
2.96460137564761618e-03 * ?days_per_year,
2.37847173959480950e-03 * ?days_per_year,
-2.96589568540237556e-05 * ?days_per_year,
4.36624404335156298e-05 * ?solar_mass
},
{ % neptune
1.53796971148509165e+01,
-2.59193146099879641e+01,
1.79258772950371181e-01,
2.68067772490389322e-03 * ?days_per_year,
1.62824170038242295e-03 * ?days_per_year,
-9.51592254519715870e-05 * ?days_per_year,
5.15138902046611451e-05 * ?solar_mass
}
]. | bench/nbody.erl | 0.536556 | 0.465873 | nbody.erl | starcoder |
%%%-------------------------------------------------------------------
%%% @author <NAME>
%%% @copyright (C) 2017,
%%% @doc Erlang module to convert Romans numbers to integers
%%% @end
%%% Created : 10. dez 2017 17:55
%%%-------------------------------------------------------------------
-module(romans).
-compile([{nowarn_unused_function, [{ validate_input, 2}, {is_permitted, 2}]}]).
%% API exports
-export([]).
%%====================================================================
%% API functions
%%====================================================================
-export([
to_int/1
]).
%% @doc Accepts a Roman number and returns a integer or the 'fail' atom
-type roman() :: string().
-spec to_int(roman()) -> pos_integer() | fail.
to_int(Value) ->
case validate_input(Value) of
true -> roman_to_int(Value);
false -> fail
end.
%%====================================================================
%% Internal functions
%%====================================================================
%% @private
%% @doc Entry point function to Convert Roman List to Integer
-spec roman_to_int(list()) -> fail | pos_integer().
roman_to_int([]) ->
0;
roman_to_int(List) ->
RArr = roman_to_array(List),
RArrSignaled = resolve_roman_array_signals(RArr),
sum_roman_array(RArrSignaled).
%% @private
%% @doc Convert each char to integer
-spec r_to_i(char()) -> pos_integer().
r_to_i($I) ->
1;
r_to_i($V) ->
5;
r_to_i($X) ->
10;
r_to_i($L) ->
50;
r_to_i($C) ->
100;
r_to_i($D) ->
500;
r_to_i($M) ->
1000.
-spec roman_to_array(roman()) -> list().
roman_to_array(RomanInput) ->
do_roman_to_array(RomanInput, []).
%% @private
%% @doc Convert the list of roman characters to integer
%% @returns Return a list of integer in inverse order from the input (this is on purpose)
-spec do_roman_to_array(roman(), list()) -> list().
do_roman_to_array([], Acc) ->
Acc;
do_roman_to_array([Chr|Rest], Acc) ->
ChrInt = r_to_i(Chr),
do_roman_to_array(Rest, [{Chr, ChrInt} | Acc]).
-spec resolve_roman_array_signals(list()) -> list().
resolve_roman_array_signals(Arr) ->
resolve_roman_array_signals(Arr, undefined, []).
%% @private
%% @doc This is the main function, it determines the signal to be used by the
%% sum_roman_array/1 function. It uses a Max visited value and compares it to the current Value
%% to decide if it should add or subtract the current value from the total
%% @returns This function returns a list with tuples {Char, Value, Signal)
-spec resolve_roman_array_signals(list(), pos_integer(), list()) -> list().
resolve_roman_array_signals([], _Max, Acc) ->
lists:reverse(Acc);
%If no Max value is defined, set the current value as Max
resolve_roman_array_signals([{_Chr, Value} | _] = Arr, undefined, Acc) ->
resolve_roman_array_signals(Arr, Value, Acc);
%If current Value and the Max value are the same, the signal must be a PLUS
resolve_roman_array_signals([{Chr, Value} | Rest], Value, Acc) ->
resolve_roman_array_signals(Rest, Value, [{Chr, Value, '+'} | Acc]);
%If the Max value is bigger than the current value, the signal must be a MINUS (ex.: IX, Value = I, Max = X -> 10 - 1 -> 9)
resolve_roman_array_signals([{Chr, Value} | Rest], Max, Acc) when Max > Value->
resolve_roman_array_signals(Rest, Max, [{Chr, Value, '-'} | Acc]);
%Else the signal must be a PLUS
resolve_roman_array_signals([{Chr, Value} | Rest], Max, Acc) when Max =< Value->
resolve_roman_array_signals(Rest, Value, [{Chr, Value, '+'} | Acc]).
%% @private
%% @doc This function sums all the values collected using the singal defined by resolve_roman_array_signals/3
-spec sum_roman_array(list()) -> pos_integer().
sum_roman_array(RArrSignaled) ->
F = fun({_Chr, Value, '+'}, Acc) ->
Acc + Value;
({_Chr, Value, '-'}, Acc) ->
Acc - Value
end,
lists:foldl(F, 0, RArrSignaled).
%% @private
%% @doc Validates the input list. Two validations are done, first forbidden sequence
-spec validate_input(list()) -> boolean().
validate_input([]) ->
true;
validate_input(Input) ->
Validations = validate_input_repeated(Input, []),
F = fun(Elem) ->
Elem
end,
lists:all(F, Validations).
%% @private
%% @deprecated
%% @doc Verify some forbidden combinations. Was replaced by {@link validate_input_repeated}.
-spec validate_input(list(), list()) -> boolean().
validate_input([], Acc) ->
Acc;
validate_input([_], Acc) ->
[true | Acc];
validate_input([Chr1, Chr2 | Rest], Acc) ->
IsPermitted = is_permitted(Chr1, Chr2),
validate_input([Chr2 | Rest], [IsPermitted | Acc]).
%% @private
%% @deprecated
%% @doc Used to do basic validation on the input list
-spec is_permitted(char(), char()) -> boolean().
is_permitted($I, $L) -> false;
is_permitted($V, $V) -> false;
is_permitted($V, $X) -> false;
is_permitted($X, $M) -> false;
is_permitted($L, $L) -> false;
is_permitted($L, $M) -> false;
is_permitted($D, $D) -> false;
is_permitted($D, $M) -> false;
is_permitted(_, _) -> true.
%% @private
%% @doc Validate the input list, uses a more flexible pattern matching to search for forbidden combinations of letters
-spec validate_input_repeated(list(), list()) -> boolean().
validate_input_repeated([], Acc) ->
Acc;
validate_input_repeated([$I, $L| _Resto], Acc) ->
[false | Acc];
validate_input_repeated([$V, $V| _Resto], Acc) ->
[false | Acc];
validate_input_repeated([$V, $X| _Resto], Acc) ->
[false | Acc];
validate_input_repeated([$X, $M| _Resto], Acc) ->
[false | Acc];
validate_input_repeated([$X, $D| _Resto], Acc) ->
[false | Acc];
validate_input_repeated([$L, $L| _Resto], Acc) ->
[false | Acc];
validate_input_repeated([$L, $M| _Resto], Acc) ->
[false | Acc];
validate_input_repeated([$D, $D| _Resto], Acc) ->
[false | Acc];
validate_input_repeated([$D, $M| _Resto], Acc) ->
[false | Acc];
validate_input_repeated([$I, $I, $V | _Resto], Acc) ->
[false | Acc];
validate_input_repeated([$I, $I, $X | _Resto], Acc) ->
[false | Acc];
validate_input_repeated([$X, $X, $C | _Resto], Acc) ->
[false | Acc];
validate_input_repeated([$C, $C, $D | _Resto], Acc) ->
[false | Acc];
validate_input_repeated([Chr, Chr, Chr, Chr | _Resto], Acc) ->
[false | Acc];
validate_input_repeated([_ | Resto], Acc) ->
validate_input_repeated(Resto, [true | Acc]). | src/romans.erl | 0.555676 | 0.440349 | romans.erl | starcoder |
%% -*- erlang-indent-level: 4;indent-tabs-mode: nil; fill-column: 92 -*-
%% ex: ts=4 sw=4 et
%%
%% Copyright 2014 CHEF Software, Inc. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% @author <NAME> <<EMAIL>>
%% @copyright 2014 CHEF Software, Inc. All Rights Reserved.
%% @doc Record to DB mapping module and behaviour.
%%
%% This module helps you map records to and from the DB using prepared
%% queries. By creating a module, named the same as your record, and
%% implementing the `sqerl_rec' behaviour, you can take advantage of a
%% default set of generated prepared queries and helper functions
%% (defined in this module) that leverage those queries.
%%
%% Most of the callbacks can be generated for you if you use the
%% `exprecs' parse transform. If you use this parse transform, then
%% you will only need to implement the following three callbacks in
%% your record module:
%%
%% <ul>
%% <li>``'#insert_fields'/0'' A list of atoms describing the fields (which
%% should align with column names) used to insert a row into the
%% db. In many cases this is a proper subset of the record fields to
%% account for sequence ids and db generated timestamps.</li>
%% <li>``'#update_fields'/0'' A list of atoms giving the fields used for
%% updating a row.</li>
%% <li>``'#statements'/0'' A list of `[default | {atom(),
%% iolist()}]'. If the atom `'default'' is included, then a default
%% set of queries will be generated. Custom queries provided as
%% `{Name, SQL}' tuples will override any default queries of the same
%% name.</li>
%% </ul>
%%
%% If the table name associated with your record name does not follow
%% the naive pluralization rule implemented by `sqerl_rel', you can
%% export a ``'#table_name'/0'' function to provide the table name for
%% the mapping.
%%
%% It's also worth noting that `undefined' properties of a record will
%% be saved in the DB as `NULL', and then translated back to `undefined'
%% when fetched from the DB.
%% @end
-module(sqerl_rec).
-export([
delete/2,
fetch/3,
fetch_all/1,
fetch_page/3,
first_page/0,
insert/1,
cinsert/1,
qfetch/3,
cquery/3,
update/1,
scalar_fetch/3,
statements/1,
statements_for/1,
gen_fetch/2,
gen_delete/2,
gen_fetch_page/2,
gen_fetch_all/2
]).
-ifdef(TEST).
-compile([export_all]).
-endif.
%% These are the callbacks used for generating prepared queries and
%% providing the basic access helpers.
%% db_rec is assumed to be a record. It must at least be a tuple with
%% first element containing the `db_rec''s name as an atom.will almost
%% always be a record, but doesn't have to be as long as the behavior
%% is implemented.
-type db_rec() :: tuple().
%% These callbacks are better, thanks to the sqerl_gobot parse transform.
-callback getval(atom(), db_rec()) ->
any().
-callback '#new'() ->
db_rec().
-callback fromlist([{atom(), _}]) ->
db_rec().
-callback fields() ->
[atom()].
%% these are not part of the exprecs parse transform. Making these /0
%% forces implementing modules to make one module per record. If we
%% don't want that, or if we want symmetry with the exprecs generated
%% items, we'd do /1 and accept rec name as arg.
-callback '#insert_fields'() ->
[atom()].
-callback '#update_fields'() ->
[atom()].
%% Like an iolist but only atoms
-type atom_list() :: atom() | [atom() | atom_list()].
-export_type([atom_list/0]).
-callback '#statements'() ->
[default | {atom_list(), iolist()}].
%% @doc Fetch using prepared query `Query' returning a list of records
%% `[#RecName{}]'. The `Vals' list is the list of parameters for the
%% prepared query. If the prepared query does not take parameters, use
%% `[]'. Note that this can be used for INSERT and UPDATE queries if
%% they use an appropriate RETURNING clause.
-spec qfetch(atom(), atom_list(), [any()]) -> [db_rec()] | {error, _}.
qfetch(RecName, Query, Vals) ->
RealQ = join_atoms([RecName, '_', Query]),
CleanVals = [undef_to_null(V) || V <- Vals],
case sqerl:select(RealQ, CleanVals) of
{ok, none} ->
[];
{ok, N} when is_integer(N) ->
Msg = "query returned count only; expected rows",
{error,
{{ok, N},
{sqerl_rec, qfetch, Msg, [RecName, Query, Vals]}}};
{ok, Rows} ->
rows_to_recs(Rows, RecName);
{ok, N, Rows} when is_integer(N) ->
rows_to_recs(Rows, RecName);
Error ->
ensure_error(Error)
end.
%% @doc Execute query `Query' that returns a row count. If the query
%% returns results, e.g. an UPDATE ... RETURNING query, the result is
%% ignored and only the count is returned. See also {@link qfetch/3}.
-spec cquery(atom(), atom_list(), [any()]) -> {ok, integer()} | {error, _}.
cquery(RecName, Query, Vals) ->
RealQ = join_atoms([RecName, '_', Query]),
CleanVals = [undef_to_null(V) || V <- Vals],
case sqerl:select(RealQ, CleanVals) of
{ok, N} when is_integer(N) ->
{ok, N};
{ok, N, _Rows} when is_integer(N) ->
{ok, N};
{ok, Rows} when is_list(Rows) ->
Msg = "query returned rows and no count; expected count",
{error,
{{ok, Rows},
{sqerl_rec, cquery, Msg, [RecName, Query, CleanVals]}}};
Error ->
ensure_error(Error)
end.
%% @doc Execute a query that returns a list of scalar values. The
%% query must return a single column in result rows. This does
%% slightly less processing than using the rows_as_scalars transform
%% and prepends `RecName' to `Query' to match the sqerl_rec style.
-spec scalar_fetch(atom(), atom(), [any()]) -> [any()] | {error, _}.
scalar_fetch(RecName, Query, Params) ->
RealQuery = join_atoms([RecName, '_', Query]),
case sqerl:select(RealQuery, Params) of
{ok, none} ->
[];
{ok, Results} ->
try scalar_results(Results)
catch throw:{bad_row, Bad} ->
Msg = "query did not return a single column",
{error,
{{bad_row, Bad},
{sqerl_rec, scalar_fetch, Msg,
[RecName, Query, Params]}}}
end;
{error, _} = Error ->
Error
end.
scalar_results(Results) ->
lists:map(fun([{_ColName, Value}]) ->
Value;
(Bad) ->
throw({bad_row, Bad})
end, Results).
%% @doc Return a list of `RecName' records using single parameter
%% prepared query `RecName_fetch_by_By' where `By' is a field and
%% column name and `Val' is the value of the column to match for in a
%% WHERE clause. A (possibly empty) list of record results is returned
%% even though a common use is to fetch a single row.
-spec fetch(atom(), atom(), any()) -> [db_rec()] | {error, _}.
fetch(RecName, By, Val) ->
Query = join_atoms([fetch_by, '_', By]),
qfetch(RecName, Query, [Val]).
%% @doc Return all rows from the table associated with record module
%% `RecName'. Results will, by default, be ordered by the name field
%% (which is assumed to exist).
-spec fetch_all(atom()) -> [db_rec()] | {error, _}.
fetch_all(RecName) ->
qfetch(RecName, fetch_all, []).
%% @doc Fetch rows from the table associated with record module
%% `RecName' in a paginated fashion. The default generated query, like
%% that for `fetch_all', assumes a `name' field and column and orders
%% results by this field. The `StartName' argument determines the
%% start point and `Limit' the number of items to return. To fetch the
%% "first" page, use {@link first_page/0}. Use the last name received
%% as the value for `StartName' to fetch the "next" page.
-spec fetch_page(atom(), string(), integer()) -> [db_rec()] | {error, _}.
fetch_page(RecName, StartName, Limit) ->
qfetch(RecName, fetch_page, [StartName, Limit]).
%% @doc Return an ascii value, as a string, that sorts less or equal
%% to any valid name.
first_page() ->
"\001".
%% @doc Insert record `Rec' using prepared query `RecName_insert'. The
%% fields of `Rec' passed as parameters to the query are determined by
%% `RecName:'#insert_fields/0'. This function assumes the query uses
%% "INSERT ... RETURNING" and returns a record with db assigned fields
%% (such as sequence ids and timestamps filled out).
-spec insert(db_rec()) -> [db_rec()] | {error, _}.
insert(Rec) ->
RecName = rec_name(Rec),
InsertFields = RecName:'#insert_fields'(),
Values = rec_to_vlist(Rec, InsertFields),
qfetch(RecName, insert, Values).
%% @doc Insert record `Rec' using prepared query `RecName_insert'. The
%% fields of `Rec' passed as parameters to the query are determined by
%% `RecName:'#insert_fields/0'. The result is ignored and only the
%% count is returned.
-spec cinsert(db_rec()) -> {ok, integer()} | {error, _}.
cinsert(Rec) ->
RecName = rec_name(Rec),
InsertFields = RecName:'#insert_fields'(),
Values = rec_to_vlist(Rec, InsertFields),
cquery(RecName, insert, Values).
%% @doc Update record `Rec'. Uses the prepared query with name
%% `RecName_update'. Assumes an `id' field and corresponding column
%% which is used to find the row to update. The fields from `Rec'
%% passed as parameters to the query are determined by
%% `RecName:'#update_fields/0'. This function assumes the UPDATE query
%% uses a RETURNING clause so that it can return a list of updated
%% records (similar to {@link insert/1}. This allows calling code to
%% receive db generated values such as timestamps and sequence ids
%% without making an additional round trip.
-spec update(db_rec()) -> [db_rec()] | {error, _}.
update(Rec) ->
RecName = rec_name(Rec),
UpdateFields = RecName:'#update_fields'(),
Values = rec_to_vlist(Rec, UpdateFields),
Id = RecName:getval(id, Rec),
qfetch(RecName, update, Values ++ [Id]).
%% @doc Delete the rows where the column identified by `By' matches
%% the value as found in `Rec'. Typically, one would use `id' to
%% delete a single row. The prepared query with name
%% `RecName_delete_by_By' will be used.
-spec delete(db_rec(), atom()) -> {ok, integer()} | {error, _}.
delete(Rec, By) ->
RecName = rec_name(Rec),
Id = RecName:getval(By, Rec),
cquery(RecName, ['delete_by_', By], [Id]).
rec_to_vlist(Rec, Fields) ->
RecName = rec_name(Rec),
[ RecName:getval(F, Rec) || F <- Fields ].
%% we translate `undefined' properties to `null' so that
%% they get saved as `NULL' in the DB, and not `"undefined"'
undef_to_null(undefined) -> null;
undef_to_null(Other) -> Other.
rows_to_recs(Rows, RecName) when is_atom(RecName) ->
rows_to_recs(Rows, RecName:'#new'());
rows_to_recs(Rows, Rec) when is_tuple(Rec) ->
[ row_to_rec(Row, Rec) || Row <- Rows ].
row_to_rec(Row, Rec) ->
RecName = rec_name(Rec),
RecName:fromlist(atomize_keys_and_null_to_undef(Row)).
atomize_keys_and_null_to_undef(L) ->
[ {bin_to_atom(B), null_to_undef(V)} || {B, V} <- L ].
bin_to_atom(B) ->
erlang:binary_to_atom(B, utf8).
%% same as for saving, we need to translate `null' back to `undefined'
null_to_undef(null) -> undefined;
null_to_undef(Other) -> Other.
%% @doc This function is intended to be used as the `{M, F, A}' for sqerl's
%% `prepared_statements' app config key and returns a proplist of prepared
%% queries in the form `[{QueryName, SQLBinary}]'. The `RecList' argument
%% should be a list of modules implementing the `sqerl_rec' behaviour or
%% elements of the form `{app, App}' in which case sqerl will auto-discover all
%% modules implementing the behavior. Ordering of modules and elements is
%% ignored. Any duplicate modules generated by specifying '{app, App}'
%% will also be ignored.
%%
%% Example inputs:
%%
%% [mod1, mod2, mod3, {app, app1}]
%% [mod1, mod2, mod3, mod4, mod5]
%%
%%
%% If the atom `default' is present in the list, then a default set of
%% queries will be generated using the first field returned by
%% ``RecName:'#info-'/1'' as a unique column for the WHERE clauses of
%% UPDATE, DELETE, and SELECT of single rows. The default queries are:
%% `fetch_by_FF', `delete_by_FF', `insert', and `update', where `FF'
%% is the name of the First Field. The returned query names will have
%% `RecName_' prepended. Custom queries override default queries of
%% the same name.
-spec statements([atom() | {app, term()}]) -> [{atom(), binary()}].
statements(RecList) ->
RecList2 = lists:usort(lists:flatten([ expand_if_app(Term) || Term <- RecList])),
lists:flatten([ statements_for(RecName) || RecName <- RecList2 ]).
ensure_application_spec_loaded(App) ->
case application:load(App) of
{error, {already_loaded, App}} -> ok;
Other -> Other
end.
expand_if_app({app, App}) ->
%% Ensure that the application spec is loaded before trying to read its module list
ok = ensure_application_spec_loaded(App),
{ok, Mods} = application:get_key(App, modules),
%% We use the built-in function `Mod:module_info/1` to lookup the attributes
%% of the module, then check if the sqerl_rec behaviour is present.
%% Currently, a module's attributes will have a separate entry for each
%% behavior it implements `[..., {behaviour,[sqerl_rec]},
%% {behaviour,[db_helper]}]`, instead of `[.., {behaviour,[sqerl_rec,
%% db_helper]}`, which is why we check specifically for the
%% `{behaviour, [sqerl_rec]}` tuple.
[ Mod || Mod <- Mods, lists:member({behaviour, [sqerl_rec]}, Mod:module_info(attributes))];
expand_if_app(Mod) ->
Mod.
-spec statements_for(atom()) -> [{atom(), binary()}].
statements_for(RecName) ->
RawStatements = RecName:'#statements'(),
%% We need to normalize the query names (keys) with join_atoms
%% *before* merging custom and defaults. Otherwise a duplicate
%% could sneak in since the same query can be expressed in more
%% than one way (e.g. `foo_bar' and `[foo_, bar]').
Prefix = [RecName, '_'],
%% do we have default?
Defaults = case lists:member(default, RawStatements) of
true ->
[ {join_atoms([Prefix, Key]), as_bin(SQL)}
|| {Key, SQL} <- default_queries(RecName) ];
false ->
[]
end,
Customs = [ {join_atoms([Prefix, Key]), as_bin(SQL)}
|| {Key, SQL} <- RawStatements ],
proplist_merge(Customs, Defaults).
proplist_merge(L1, L2) ->
SL1 = lists:ukeysort(1, L1),
SL2 = lists:ukeysort(1, L2),
lists:ukeymerge(1, SL1, SL2).
default_queries(RecName) ->
FirstField = first_field(RecName),
[
{insert, gen_insert(RecName)}
, {update, gen_update(RecName, FirstField)}
, {['delete_by_', FirstField], gen_delete(RecName, FirstField)}
, {['fetch_by_', FirstField], gen_fetch(RecName, FirstField)}
].
join_atoms(Atoms) when is_list(Atoms) ->
Bins = [ erlang:atom_to_binary(A, utf8) || A <- lists:flatten(Atoms) ],
erlang:binary_to_atom(iolist_to_binary(Bins), utf8).
as_bin(B) when is_binary(B) ->
B;
as_bin(S) ->
erlang:iolist_to_binary(S).
rec_name(Rec) ->
erlang:element(1, Rec).
gen_params(N) ->
Params = [ "$" ++ erlang:integer_to_list(I) || I <- lists:seq(1, N) ],
string:join(Params, ", ").
%% @doc Return a SQL DELETE query appropriate for module `RecName'
%% implementing the `sqerl_rec' behaviour. Example:
%%
%% ```
%% SQL1 = gen_delete(user, id),
%% SQL1 = ["DELETE FROM ","cookers"," WHERE ","id = $1"]
%%
%% SQL2 = gen_delete(user, [id, name]),
%% SQL2 = ["DELETE FROM ","cookers",
%% " WHERE ","id = $1 AND name = $2"]
%% '''
-spec gen_delete(atom(), atom() | [atom()]) -> [string()].
gen_delete(RecName, By) when is_atom(By) ->
gen_delete(RecName, [By]);
gen_delete(RecName, ByList) when is_list(ByList) ->
WhereItems = zip_params(ByList, " = "),
WhereClause = string:join(WhereItems, " AND "),
Table = table_name(RecName),
["DELETE FROM ", Table, " WHERE ", WhereClause].
%% @doc Generate an UPDATE query. Uses ``RecName:'#update_fields'/0''
%% to determine the fields to include for SET.
%%
%% Example:
%% ```
%% SQL1 = sqerl_rec:gen_update(cook, id),
%% SQL1 = ["UPDATE ","cookers"," SET ",
%% "name = $1, auth_token = $2, ssh_pub_key = $3, "
%% "first_name = $4, last_name = $5, email = $6",
%% " WHERE ","id = $7"]
%%
%% SQL2 = sqerl_rec:gen_update(cook, [id, name]),
%% SQL2 = ["UPDATE ","cookers"," SET ",
%% "name = $1, auth_token = $2, ssh_pub_key = $3, "
%% "first_name = $4, last_name = $5, email = $6",
%% " WHERE ","id = $7 AND name = $8"]
%% '''
-spec gen_update(atom(), atom() | [atom()]) -> [string()].
gen_update(RecName, By) when is_atom(By) ->
gen_update(RecName, [By]);
gen_update(RecName, ByList) when is_list(ByList) ->
UpdateFields = RecName:'#update_fields'(),
Table = table_name(RecName),
UpdateCount = length(UpdateFields),
LastParam = 1 + UpdateCount,
AllFields = map_to_str(UpdateFields),
IdxFields = lists:zip(map_to_str(lists:seq(1, UpdateCount)), AllFields),
KeyVals = string:join([ Key ++ " = $" ++ I || {I, Key} <- IdxFields ], ", "),
AllFieldsSQL = string:join(map_to_str(RecName:fields()), ", "),
WhereItems = zip_params(ByList, " = ", LastParam),
WhereClause = string:join(WhereItems, " AND "),
["UPDATE ", Table, " SET ", KeyVals,
" WHERE ", WhereClause,
" RETURNING ", AllFieldsSQL].
%% @doc Generate an INSERT query for sqerl_rec behaviour
%% `RecName'. Uses ``RecName:'#insert_fields'/0'' to determine the
%% fields to insert. Generates an INSERT ... RETURNING query that
%% returns a complete record.
%%
%% Example:
%% ```
%% SQL = sqerl_rec:gen_insert(kitchen),
%% SQL = ["INSERT INTO ", "kitchens",
%% "(", "name", ") VALUES (", "$1",
%% ") RETURNING ", "id, name"]
%% '''
-spec gen_insert(atom()) -> [string()].
gen_insert(RecName) ->
InsertFields = map_to_str(RecName:'#insert_fields'()),
InsertFieldsSQL = string:join(InsertFields, ", "),
AllFieldsSQL = string:join(map_to_str(RecName:fields()), ", "),
Params = gen_params(length(InsertFields)),
Table = table_name(RecName),
["INSERT INTO ", Table, "(", InsertFieldsSQL,
") VALUES (", Params, ") RETURNING ", AllFieldsSQL].
%% @doc Generate a paginated fetch query.
%%
%% Example:
%% ```
%% SQL = sqerl_rec:gen_fetch_page(kitchen, name).
%% SQL = ["SELECT ", "id, name", " FROM ", "kitchens",
%% " WHERE ","name",
%% " > $1 ORDER BY ","name"," LIMIT $2"]
%% '''
-spec gen_fetch_page(atom(), atom()) -> [string()].
gen_fetch_page(RecName, OrderBy) ->
AllFields = map_to_str(RecName:fields()),
FieldsSQL = string:join(AllFields, ", "),
OrderByStr = to_str(OrderBy),
Table = table_name(RecName),
["SELECT ", FieldsSQL, " FROM ", Table,
" WHERE ", OrderByStr, " > $1 ORDER BY ", OrderByStr,
" LIMIT $2"].
%% @doc Generate a query to return all rows
%%
%% Example:
%% ```
%% SQL = sqerl_rec:gen_fetch_all(kitchen, name),
%% SQL = ["SELECT ", "id, name", " FROM ", "kitchens",
%% " ORDER BY ", "name"]
%% '''
-spec gen_fetch_all(atom(), atom()) -> [string()].
gen_fetch_all(RecName, OrderBy) ->
AllFields = map_to_str(RecName:fields()),
FieldsSQL = string:join(AllFields, ", "),
OrderByStr = to_str(OrderBy),
Table = table_name(RecName),
["SELECT ", FieldsSQL, " FROM ", Table,
" ORDER BY ", OrderByStr].
%% @doc Generate a SELECT query for `RecName' rows.
%%
%% Example:
%% ```
%% SQL1 = sqerl_rec:gen_fetch(kitchen, name).
%% SQL1 = ["SELECT ", "id, name", " FROM ", "kitchens",
%% " WHERE ", "name = $1"]
%%
%% SQL2 = sqerl_rec:gen_fetch(cook, [kitchen_id, name]),
%% SQL2 = ["SELECT ",
%% "id, kitchen_id, name, auth_token, auth_token_bday, "
%% "ssh_pub_key, first_name, last_name, email",
%% " FROM ", "cookers", " WHERE ",
%% "kitchen_id = $1 AND name = $2"]
%% '''
-spec gen_fetch(atom(), atom() | [atom()]) -> [string()].
gen_fetch(RecName, By) when is_atom(By) ->
gen_fetch(RecName, [By]);
gen_fetch(RecName, ByList) when is_list(ByList) ->
AllFields = map_to_str(RecName:fields()),
FieldsSQL = string:join(AllFields, ", "),
WhereItems = zip_params(ByList, " = "),
WhereClause = string:join(WhereItems, " AND "),
Table = table_name(RecName),
["SELECT ", FieldsSQL, " FROM ", Table,
" WHERE ", WhereClause].
zip_params(Prefixes, Sep) ->
zip_params(Prefixes, Sep, 1).
zip_params(Prefixes, Sep, StartIndex) ->
Params = str_seq("$", StartIndex, length(Prefixes) + StartIndex - 1),
[ to_str(Prefix) ++ Sep ++ Param
|| {Prefix, Param} <- lists:zip(Prefixes, Params) ].
str_seq(Prefix, Start, End) ->
[ Prefix ++ erlang:integer_to_list(I)
|| I <- lists:seq(Start, End) ].
map_to_str(L) ->
[ to_str(Elt) || Elt <- L ].
to_str(S) when is_list(S) ->
S;
to_str(B) when is_binary(B) ->
erlang:binary_to_list(B);
to_str(A) when is_atom(A) ->
erlang:atom_to_list(A);
to_str(I) when is_integer(I) ->
erlang:integer_to_list(I).
first_field(RecName) ->
hd(RecName:fields()).
table_name(RecName) ->
Exports = RecName:module_info(exports),
case lists:member({'#table_name', 0}, Exports) of
true ->
RecName:'#table_name'();
false ->
pluralize(to_str(RecName))
end.
%% Naive pluralization of lowercase strings. Rules are simplified from
%% a more robust library found here:
%% https://github.com/lukegalea/inflector
pluralize("alias") ->
"aliases";
pluralize("status") ->
"statuses";
pluralize(S) ->
do_pluralize(lists:reverse(S)).
do_pluralize("x" ++ _ = R) ->
lists:reverse("se" ++ R);
do_pluralize("hc" ++ _ = R) ->
lists:reverse("se" ++ R);
do_pluralize("ss" ++ _ = R) ->
lists:reverse("se" ++ R);
do_pluralize("hs" ++ _ = R) ->
lists:reverse("se" ++ R);
do_pluralize("y" ++ [C|Rest]) when C == $a orelse
C == $e orelse
C == $i orelse
C == $o orelse
C == $u ->
lists:reverse("sy" ++ [C|Rest]);
do_pluralize("y" ++ Rest) ->
lists:reverse("sei" ++ Rest);
do_pluralize(S) ->
lists:reverse("s" ++ S).
ensure_error({error, _} = E) ->
E;
ensure_error(E) ->
{error, E}. | src/sqerl_rec.erl | 0.580114 | 0.420778 | sqerl_rec.erl | starcoder |
%%
%% %CopyrightBegin%
%%
%% Copyright Ericsson AB 1996-2011-2012. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
%%
%% %CopyrightEnd%
%%
%% fork of io_lib_format that uses trunc_io to protect against large terms
%%
%% Renamed to couch_log_format to avoid naming collision with
%% lager_Format.
-module(couch_log_trunc_io_fmt).
-export([format/3, format/4]).
-record(options, {
chomp = false :: boolean()
}).
format(FmtStr, Args, MaxLen) ->
format(FmtStr, Args, MaxLen, []).
format([], [], _, _) ->
"";
format(FmtStr, Args, MaxLen, Opts) when is_atom(FmtStr) ->
format(atom_to_list(FmtStr), Args, MaxLen, Opts);
format(FmtStr, Args, MaxLen, Opts) when is_binary(FmtStr) ->
format(binary_to_list(FmtStr), Args, MaxLen, Opts);
format(FmtStr, Args, MaxLen, Opts) when is_list(FmtStr) ->
case couch_log_util:string_p(FmtStr) of
true ->
Options = make_options(Opts, #options{}),
Cs = collect(FmtStr, Args),
{Cs2, MaxLen2} = build(Cs, [], MaxLen, Options),
%% count how many terms remain
{Count, StrLen} = lists:foldl(
fun
({_C, _As, _F, _Adj, _P, _Pad, _Enc}, {Terms, Chars}) ->
{Terms + 1, Chars};
(_, {Terms, Chars}) ->
{Terms, Chars + 1}
end,
{0, 0},
Cs2
),
build2(Cs2, Count, MaxLen2 - StrLen);
false ->
erlang:error(badarg)
end;
format(_FmtStr, _Args, _MaxLen, _Opts) ->
erlang:error(badarg).
collect([$~ | Fmt0], Args0) ->
{C, Fmt1, Args1} = collect_cseq(Fmt0, Args0),
[C | collect(Fmt1, Args1)];
collect([C | Fmt], Args) ->
[C | collect(Fmt, Args)];
collect([], []) ->
[].
collect_cseq(Fmt0, Args0) ->
{F, Ad, Fmt1, Args1} = field_width(Fmt0, Args0),
{P, Fmt2, Args2} = precision(Fmt1, Args1),
{Pad, Fmt3, Args3} = pad_char(Fmt2, Args2),
{Encoding, Fmt4, Args4} = encoding(Fmt3, Args3),
{C, As, Fmt5, Args5} = collect_cc(Fmt4, Args4),
{{C, As, F, Ad, P, Pad, Encoding}, Fmt5, Args5}.
encoding([$t | Fmt], Args) ->
{unicode, Fmt, Args};
encoding(Fmt, Args) ->
{latin1, Fmt, Args}.
field_width([$- | Fmt0], Args0) ->
{F, Fmt, Args} = field_value(Fmt0, Args0),
field_width(-F, Fmt, Args);
field_width(Fmt0, Args0) ->
{F, Fmt, Args} = field_value(Fmt0, Args0),
field_width(F, Fmt, Args).
field_width(F, Fmt, Args) when F < 0 ->
{-F, left, Fmt, Args};
field_width(F, Fmt, Args) when F >= 0 ->
{F, right, Fmt, Args}.
precision([$. | Fmt], Args) ->
field_value(Fmt, Args);
precision(Fmt, Args) ->
{none, Fmt, Args}.
field_value([$* | Fmt], [A | Args]) when is_integer(A) ->
{A, Fmt, Args};
field_value([C | Fmt], Args) when is_integer(C), C >= $0, C =< $9 ->
field_value([C | Fmt], Args, 0);
field_value(Fmt, Args) ->
{none, Fmt, Args}.
field_value([C | Fmt], Args, F) when is_integer(C), C >= $0, C =< $9 ->
field_value(Fmt, Args, 10 * F + (C - $0));
%Default case
field_value(Fmt, Args, F) ->
{F, Fmt, Args}.
pad_char([$., $* | Fmt], [Pad | Args]) -> {Pad, Fmt, Args};
pad_char([$., Pad | Fmt], Args) -> {Pad, Fmt, Args};
pad_char(Fmt, Args) -> {$\s, Fmt, Args}.
%% collect_cc([FormatChar], [Argument]) ->
%% {Control,[ControlArg],[FormatChar],[Arg]}.
%% Here we collect the arguments for each control character.
%% Be explicit to cause failure early.
collect_cc([$w | Fmt], [A | Args]) -> {$w, [A], Fmt, Args};
collect_cc([$p | Fmt], [A | Args]) -> {$p, [A], Fmt, Args};
collect_cc([$W | Fmt], [A, Depth | Args]) -> {$W, [A, Depth], Fmt, Args};
collect_cc([$P | Fmt], [A, Depth | Args]) -> {$P, [A, Depth], Fmt, Args};
collect_cc([$s | Fmt], [A | Args]) -> {$s, [A], Fmt, Args};
collect_cc([$r | Fmt], [A | Args]) -> {$r, [A], Fmt, Args};
collect_cc([$e | Fmt], [A | Args]) -> {$e, [A], Fmt, Args};
collect_cc([$f | Fmt], [A | Args]) -> {$f, [A], Fmt, Args};
collect_cc([$g | Fmt], [A | Args]) -> {$g, [A], Fmt, Args};
collect_cc([$b | Fmt], [A | Args]) -> {$b, [A], Fmt, Args};
collect_cc([$B | Fmt], [A | Args]) -> {$B, [A], Fmt, Args};
collect_cc([$x | Fmt], [A, Prefix | Args]) -> {$x, [A, Prefix], Fmt, Args};
collect_cc([$X | Fmt], [A, Prefix | Args]) -> {$X, [A, Prefix], Fmt, Args};
collect_cc([$+ | Fmt], [A | Args]) -> {$+, [A], Fmt, Args};
collect_cc([$# | Fmt], [A | Args]) -> {$#, [A], Fmt, Args};
collect_cc([$c | Fmt], [A | Args]) -> {$c, [A], Fmt, Args};
collect_cc([$~ | Fmt], Args) when is_list(Args) -> {$~, [], Fmt, Args};
collect_cc([$n | Fmt], Args) when is_list(Args) -> {$n, [], Fmt, Args};
collect_cc([$i | Fmt], [A | Args]) -> {$i, [A], Fmt, Args}.
%% build([Control], Pc, Indentation) -> [Char].
%% Interpret the control structures. Count the number of print
%% remaining and only calculate indentation when necessary. Must also
%% be smart when calculating indentation for characters in format.
build([{$n, _, _, _, _, _, _}], Acc, MaxLen, #options{chomp = true}) ->
%% trailing ~n, ignore
{lists:reverse(Acc), MaxLen};
build([{C, As, F, Ad, P, Pad, Enc} | Cs], Acc, MaxLen, O) ->
{S, MaxLen2} = control(C, As, F, Ad, P, Pad, Enc, MaxLen),
build(Cs, [S | Acc], MaxLen2, O);
build([$\n], Acc, MaxLen, #options{chomp = true}) ->
%% trailing \n, ignore
{lists:reverse(Acc), MaxLen};
build([$\n | Cs], Acc, MaxLen, O) ->
build(Cs, [$\n | Acc], MaxLen - 1, O);
build([$\t | Cs], Acc, MaxLen, O) ->
build(Cs, [$\t | Acc], MaxLen - 1, O);
build([C | Cs], Acc, MaxLen, O) ->
build(Cs, [C | Acc], MaxLen - 1, O);
build([], Acc, MaxLen, _O) ->
{lists:reverse(Acc), MaxLen}.
build2([{C, As, F, Ad, P, Pad, Enc} | Cs], Count, MaxLen) ->
{S, Len} = control2(C, As, F, Ad, P, Pad, Enc, MaxLen div Count),
[S | build2(Cs, Count - 1, MaxLen - Len)];
build2([C | Cs], Count, MaxLen) ->
[C | build2(Cs, Count, MaxLen)];
build2([], _, _) ->
[].
%% control(FormatChar, [Argument], FieldWidth, Adjust, Precision, PadChar,
%% Indentation) -> [Char]
%% This is the main dispatch function for the various formatting commands.
%% Field widths and precisions have already been calculated.
control($e, [A], F, Adj, P, Pad, _Enc, L) when is_float(A) ->
Res = fwrite_e(A, F, Adj, P, Pad),
{Res, L - lists:flatlength(Res)};
control($f, [A], F, Adj, P, Pad, _Enc, L) when is_float(A) ->
Res = fwrite_f(A, F, Adj, P, Pad),
{Res, L - lists:flatlength(Res)};
control($g, [A], F, Adj, P, Pad, _Enc, L) when is_float(A) ->
Res = fwrite_g(A, F, Adj, P, Pad),
{Res, L - lists:flatlength(Res)};
control($b, [A], F, Adj, P, Pad, _Enc, L) when is_integer(A) ->
Res = unprefixed_integer(A, F, Adj, base(P), Pad, true),
{Res, L - lists:flatlength(Res)};
control($B, [A], F, Adj, P, Pad, _Enc, L) when is_integer(A) ->
Res = unprefixed_integer(A, F, Adj, base(P), Pad, false),
{Res, L - lists:flatlength(Res)};
control($x, [A, Prefix], F, Adj, P, Pad, _Enc, L) when
is_integer(A),
is_atom(Prefix)
->
Res = prefixed_integer(A, F, Adj, base(P), Pad, atom_to_list(Prefix), true),
{Res, L - lists:flatlength(Res)};
control($x, [A, Prefix], F, Adj, P, Pad, _Enc, L) when is_integer(A) ->
%Check if Prefix a character list
true = io_lib:deep_char_list(Prefix),
Res = prefixed_integer(A, F, Adj, base(P), Pad, Prefix, true),
{Res, L - lists:flatlength(Res)};
control($X, [A, Prefix], F, Adj, P, Pad, _Enc, L) when
is_integer(A),
is_atom(Prefix)
->
Res = prefixed_integer(A, F, Adj, base(P), Pad, atom_to_list(Prefix), false),
{Res, L - lists:flatlength(Res)};
control($X, [A, Prefix], F, Adj, P, Pad, _Enc, L) when is_integer(A) ->
%Check if Prefix a character list
true = io_lib:deep_char_list(Prefix),
Res = prefixed_integer(A, F, Adj, base(P), Pad, Prefix, false),
{Res, L - lists:flatlength(Res)};
control($+, [A], F, Adj, P, Pad, _Enc, L) when is_integer(A) ->
Base = base(P),
Prefix = [integer_to_list(Base), $#],
Res = prefixed_integer(A, F, Adj, Base, Pad, Prefix, true),
{Res, L - lists:flatlength(Res)};
control($#, [A], F, Adj, P, Pad, _Enc, L) when is_integer(A) ->
Base = base(P),
Prefix = [integer_to_list(Base), $#],
Res = prefixed_integer(A, F, Adj, Base, Pad, Prefix, false),
{Res, L - lists:flatlength(Res)};
control($c, [A], F, Adj, P, Pad, unicode, L) when is_integer(A) ->
Res = char(A, F, Adj, P, Pad),
{Res, L - lists:flatlength(Res)};
control($c, [A], F, Adj, P, Pad, _Enc, L) when is_integer(A) ->
Res = char(A band 255, F, Adj, P, Pad),
{Res, L - lists:flatlength(Res)};
control($~, [], F, Adj, P, Pad, _Enc, L) ->
Res = char($~, F, Adj, P, Pad),
{Res, L - lists:flatlength(Res)};
control($n, [], F, Adj, P, Pad, _Enc, L) ->
Res = newline(F, Adj, P, Pad),
{Res, L - lists:flatlength(Res)};
control($i, [_A], _F, _Adj, _P, _Pad, _Enc, L) ->
{[], L};
control($s, [A], F, Adj, P, Pad, _Enc, L) when is_atom(A) ->
Res = string(atom_to_list(A), F, Adj, P, Pad),
{Res, L - lists:flatlength(Res)};
control(C, A, F, Adj, P, Pad, Enc, L) ->
%% save this for later - these are all the 'large' terms
{{C, A, F, Adj, P, Pad, Enc}, L}.
control2($w, [A], F, Adj, P, Pad, _Enc, L) ->
Term = couch_log_trunc_io:fprint(A, L, [{lists_as_strings, false}]),
Res = term(Term, F, Adj, P, Pad),
{Res, lists:flatlength(Res)};
control2($p, [A], _F, _Adj, _P, _Pad, _Enc, L) ->
Term = couch_log_trunc_io:fprint(A, L, [{lists_as_strings, true}]),
{Term, lists:flatlength(Term)};
control2($W, [A, Depth], F, Adj, P, Pad, _Enc, L) when is_integer(Depth) ->
Term = couch_log_trunc_io:fprint(A, L, [{depth, Depth}, {lists_as_strings, false}]),
Res = term(Term, F, Adj, P, Pad),
{Res, lists:flatlength(Res)};
control2($P, [A, Depth], _F, _Adj, _P, _Pad, _Enc, L) when is_integer(Depth) ->
Term = couch_log_trunc_io:fprint(A, L, [{depth, Depth}, {lists_as_strings, true}]),
{Term, lists:flatlength(Term)};
control2($s, [L0], F, Adj, P, Pad, latin1, L) ->
List = couch_log_trunc_io:fprint(iolist_to_chars(L0), L, [{force_strings, true}]),
Res = string(List, F, Adj, P, Pad),
{Res, lists:flatlength(Res)};
control2($s, [L0], F, Adj, P, Pad, unicode, L) ->
List = couch_log_trunc_io:fprint(cdata_to_chars(L0), L, [{force_strings, true}]),
Res = uniconv(string(List, F, Adj, P, Pad)),
{Res, lists:flatlength(Res)};
control2($r, [R], F, Adj, P, Pad, _Enc, _L) ->
List = couch_log_formatter:format_reason(R),
Res = string(List, F, Adj, P, Pad),
{Res, lists:flatlength(Res)}.
iolist_to_chars([C | Cs]) when is_integer(C), C >= $\000, C =< $\377 ->
[C | iolist_to_chars(Cs)];
iolist_to_chars([I | Cs]) ->
[iolist_to_chars(I) | iolist_to_chars(Cs)];
iolist_to_chars([]) ->
[];
iolist_to_chars(B) when is_binary(B) ->
binary_to_list(B).
cdata_to_chars([C | Cs]) when is_integer(C), C >= $\000 ->
[C | cdata_to_chars(Cs)];
cdata_to_chars([I | Cs]) ->
[cdata_to_chars(I) | cdata_to_chars(Cs)];
cdata_to_chars([]) ->
[];
cdata_to_chars(B) when is_binary(B) ->
case catch unicode:characters_to_list(B) of
L when is_list(L) -> L;
_ -> binary_to_list(B)
end.
make_options([], Options) ->
Options;
make_options([{chomp, Bool} | T], Options) when is_boolean(Bool) ->
make_options(T, Options#options{chomp = Bool}).
-ifdef(UNICODE_AS_BINARIES).
uniconv(C) ->
unicode:characters_to_binary(C, unicode).
-else.
uniconv(C) ->
C.
-endif.
%% Default integer base
base(none) ->
10;
base(B) when is_integer(B) ->
B.
%% term(TermList, Field, Adjust, Precision, PadChar)
%% Output the characters in a term.
%% Adjust the characters within the field if length less than Max padding
%% with PadChar.
term(T, none, _Adj, none, _Pad) ->
T;
term(T, none, Adj, P, Pad) ->
term(T, P, Adj, P, Pad);
term(T, F, Adj, P0, Pad) ->
L = lists:flatlength(T),
P =
case P0 of
none -> erlang:min(L, F);
_ -> P0
end,
if
L > P ->
adjust(chars($*, P), chars(Pad, F - P), Adj);
F >= P ->
adjust(T, chars(Pad, F - L), Adj)
end.
%% fwrite_e(Float, Field, Adjust, Precision, PadChar)
%Default values
fwrite_e(Fl, none, Adj, none, Pad) ->
fwrite_e(Fl, none, Adj, 6, Pad);
fwrite_e(Fl, none, _Adj, P, _Pad) when P >= 2 ->
float_e(Fl, float_data(Fl), P);
fwrite_e(Fl, F, Adj, none, Pad) ->
fwrite_e(Fl, F, Adj, 6, Pad);
fwrite_e(Fl, F, Adj, P, Pad) when P >= 2 ->
term(float_e(Fl, float_data(Fl), P), F, Adj, F, Pad).
%Negative numbers
float_e(Fl, Fd, P) when Fl < 0.0 ->
[$- | float_e(-Fl, Fd, P)];
float_e(_Fl, {Ds, E}, P) ->
case float_man(Ds, 1, P - 1) of
{[$0 | Fs], true} -> [[$1 | Fs] | float_exp(E)];
{Fs, false} -> [Fs | float_exp(E - 1)]
end.
%% float_man([Digit], Icount, Dcount) -> {[Chars],CarryFlag}.
%% Generate the characters in the mantissa from the digits with Icount
%% characters before the '.' and Dcount decimals. Handle carry and let
%% caller decide what to do at top.
float_man(Ds, 0, Dc) ->
{Cs, C} = float_man(Ds, Dc),
{[$. | Cs], C};
float_man([D | Ds], I, Dc) ->
case float_man(Ds, I - 1, Dc) of
{Cs, true} when D =:= $9 -> {[$0 | Cs], true};
{Cs, true} -> {[D + 1 | Cs], false};
{Cs, false} -> {[D | Cs], false}
end;
%Pad with 0's
float_man([], I, Dc) ->
{string:chars($0, I, [$. | string:chars($0, Dc)]), false}.
float_man([D | _], 0) when D >= $5 -> {[], true};
float_man([_ | _], 0) ->
{[], false};
float_man([D | Ds], Dc) ->
case float_man(Ds, Dc - 1) of
{Cs, true} when D =:= $9 -> {[$0 | Cs], true};
{Cs, true} -> {[D + 1 | Cs], false};
{Cs, false} -> {[D | Cs], false}
end;
%Pad with 0's
float_man([], Dc) ->
{string:chars($0, Dc), false}.
%% float_exp(Exponent) -> [Char].
%% Generate the exponent of a floating point number. Always include sign.
float_exp(E) when E >= 0 ->
[$e, $+ | integer_to_list(E)];
float_exp(E) ->
[$e | integer_to_list(E)].
%% fwrite_f(FloatData, Field, Adjust, Precision, PadChar)
%Default values
fwrite_f(Fl, none, Adj, none, Pad) ->
fwrite_f(Fl, none, Adj, 6, Pad);
fwrite_f(Fl, none, _Adj, P, _Pad) when P >= 1 ->
float_f(Fl, float_data(Fl), P);
fwrite_f(Fl, F, Adj, none, Pad) ->
fwrite_f(Fl, F, Adj, 6, Pad);
fwrite_f(Fl, F, Adj, P, Pad) when P >= 1 ->
term(float_f(Fl, float_data(Fl), P), F, Adj, F, Pad).
float_f(Fl, Fd, P) when Fl < 0.0 ->
[$- | float_f(-Fl, Fd, P)];
float_f(Fl, {Ds, E}, P) when E =< 0 ->
%Prepend enough 0's
float_f(Fl, {string:chars($0, -E + 1, Ds), 1}, P);
float_f(_Fl, {Ds, E}, P) ->
case float_man(Ds, E, P) of
%Handle carry
{Fs, true} -> "1" ++ Fs;
{Fs, false} -> Fs
end.
%% float_data([FloatChar]) -> {[Digit],Exponent}
float_data(Fl) ->
float_data(float_to_list(Fl), []).
float_data([$e | E], Ds) ->
{lists:reverse(Ds), list_to_integer(E) + 1};
float_data([D | Cs], Ds) when D >= $0, D =< $9 ->
float_data(Cs, [D | Ds]);
float_data([_ | Cs], Ds) ->
float_data(Cs, Ds).
%% fwrite_g(Float, Field, Adjust, Precision, PadChar)
%% Use the f form if Float is >= 0.1 and < 1.0e4,
%% and the prints correctly in the f form, else the e form.
%% Precision always means the # of significant digits.
fwrite_g(Fl, F, Adj, none, Pad) ->
fwrite_g(Fl, F, Adj, 6, Pad);
fwrite_g(Fl, F, Adj, P, Pad) when P >= 1 ->
A = abs(Fl),
E =
if
A < 1.0e-1 -> -2;
A < 1.0e0 -> -1;
A < 1.0e1 -> 0;
A < 1.0e2 -> 1;
A < 1.0e3 -> 2;
A < 1.0e4 -> 3;
true -> fwrite_f
end,
if
P =< 1, E =:= -1;
P - 1 > E, E >= -1 ->
fwrite_f(Fl, F, Adj, P - 1 - E, Pad);
P =< 1 ->
fwrite_e(Fl, F, Adj, 2, Pad);
true ->
fwrite_e(Fl, F, Adj, P, Pad)
end.
%% string(String, Field, Adjust, Precision, PadChar)
string(S, none, _Adj, none, _Pad) ->
S;
string(S, F, Adj, none, Pad) ->
string_field(S, F, Adj, lists:flatlength(S), Pad);
string(S, none, _Adj, P, Pad) ->
string_field(S, P, left, lists:flatlength(S), Pad);
string(S, F, Adj, P, Pad) when F >= P ->
N = lists:flatlength(S),
if
F > P ->
if
N > P ->
adjust(flat_trunc(S, P), chars(Pad, F - P), Adj);
N < P ->
adjust([S | chars(Pad, P - N)], chars(Pad, F - P), Adj);
% N == P
true ->
adjust(S, chars(Pad, F - P), Adj)
end;
% F == P
true ->
string_field(S, F, Adj, N, Pad)
end.
string_field(S, F, _Adj, N, _Pad) when N > F ->
flat_trunc(S, F);
string_field(S, F, Adj, N, Pad) when N < F ->
adjust(S, chars(Pad, F - N), Adj);
% N == F
string_field(S, _, _, _, _) ->
S.
%% unprefixed_integer(Int, Field, Adjust, Base, PadChar, Lowercase)
%% -> [Char].
unprefixed_integer(Int, F, Adj, Base, Pad, Lowercase) when
Base >= 2, Base =< 1 + $Z - $A + 10
->
if
Int < 0 ->
S = cond_lowercase(erlang:integer_to_list(-Int, Base), Lowercase),
term([$- | S], F, Adj, none, Pad);
true ->
S = cond_lowercase(erlang:integer_to_list(Int, Base), Lowercase),
term(S, F, Adj, none, Pad)
end.
%% prefixed_integer(Int, Field, Adjust, Base, PadChar, Prefix, Lowercase)
%% -> [Char].
prefixed_integer(Int, F, Adj, Base, Pad, Prefix, Lowercase) when
Base >= 2, Base =< 1 + $Z - $A + 10
->
if
Int < 0 ->
S = cond_lowercase(erlang:integer_to_list(-Int, Base), Lowercase),
term([$-, Prefix | S], F, Adj, none, Pad);
true ->
S = cond_lowercase(erlang:integer_to_list(Int, Base), Lowercase),
term([Prefix | S], F, Adj, none, Pad)
end.
%% char(Char, Field, Adjust, Precision, PadChar) -> [Char].
char(C, none, _Adj, none, _Pad) ->
[C];
char(C, F, _Adj, none, _Pad) ->
chars(C, F);
char(C, none, _Adj, P, _Pad) ->
chars(C, P);
char(C, F, Adj, P, Pad) when F >= P ->
adjust(chars(C, P), chars(Pad, F - P), Adj).
%% newline(Field, Adjust, Precision, PadChar) -> [Char].
newline(none, _Adj, _P, _Pad) -> "\n";
newline(F, right, _P, _Pad) -> chars($\n, F).
%%
%% Utilities
%%
adjust(Data, [], _) -> Data;
adjust(Data, Pad, left) -> [Data | Pad];
adjust(Data, Pad, right) -> [Pad | Data].
%% Flatten and truncate a deep list to at most N elements.
flat_trunc(List, N) when is_integer(N), N >= 0 ->
flat_trunc(List, N, []).
flat_trunc(L, 0, R) when is_list(L) ->
lists:reverse(R);
flat_trunc([H | T], N, R) ->
flat_trunc(T, N - 1, [H | R]);
flat_trunc([], _, R) ->
lists:reverse(R).
%% A deep version of string:chars/2,3
chars(_C, 0) ->
[];
chars(C, 1) ->
[C];
chars(C, 2) ->
[C, C];
chars(C, 3) ->
[C, C, C];
chars(C, N) when is_integer(N), (N band 1) =:= 0 ->
S = chars(C, N bsr 1),
[S | S];
chars(C, N) when is_integer(N) ->
S = chars(C, N bsr 1),
[C, S | S].
%chars(C, N, Tail) ->
% [chars(C, N)|Tail].
%% Lowercase conversion
cond_lowercase(String, true) ->
lowercase(String);
cond_lowercase(String, false) ->
String.
lowercase([H | T]) when is_integer(H), H >= $A, H =< $Z ->
[(H - $A + $a) | lowercase(T)];
lowercase([H | T]) ->
[H | lowercase(T)];
lowercase([]) ->
[]. | src/couch_log/src/couch_log_trunc_io_fmt.erl | 0.503174 | 0.415136 | couch_log_trunc_io_fmt.erl | starcoder |
%%% Pure erlang port of https://github.com/Cobenian/inet_cidr
%%% Erlang Port by <NAME>, 2022
%%%
%%% Original code Copyright (c) 2015 <NAME>
%%%
%%% Licensed under the Apache License, Version 2.0 (the "License");
%%% you may not use this file except in compliance with the License.
%%% You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
%%%
%%% Unless required by applicable law or agreed to in writing, software
%%% distributed under the License is distributed on an "AS IS" BASIS,
%%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%%% See the License for the specific language governing permissions and
%%% limitations under the License.
-module(inet_cidr).
-export([address_count/2,
contains/2,
parse/1,
parse/2,
parse_address/1,
to_string/1,
v4/1,
v6/1]).
-include_lib("kernel/src/inet_int.hrl").
-type cidr() :: {inet:ip_address(), inet:ip_address(), non_neg_integer()}.
%% @doc
%% Parses a string containing either an IPv4 or IPv6 CIDR block using the
%% notation like `192.168.0.0/16` or `2001:abcd::/32`. It returns a tuple with the
%% start address, end address and cidr length.
%%
%% You can optionally pass true as the second argument to adjust the start `IP`
%% address if it is not consistent with the cidr length.
%% For example, `192.168.0.0/0` would be adjusted to have a start IP of `0.0.0.0`
%% instead of `192.168.0.0`. The default behavior is to be more strict and raise
%% an exception when this occurs.
%% @end
-spec parse(list() | binary()) -> cidr().
parse(CidrString) ->
parse(CidrString, false).
-spec parse(list() | binary(), boolean()) -> cidr().
parse(CidrString, Adjust) when is_binary(CidrString) ->
parse(binary_to_list(CidrString), Adjust);
parse(CidrString, Adjust) when is_list(CidrString) ->
{StartAddress, PrefixLength} = parse_cidr(CidrString, Adjust),
EndAddress = calc_end_address(StartAddress, PrefixLength),
{StartAddress, EndAddress, PrefixLength}.
%% @doc
%% Prints the CIDR block to a string such that it can be parsed back to a CIDR
%% block by this module.
%% @end
-spec to_string(cidr()) -> list().
to_string({StartAddress, _EndAddress, CidrLength}) ->
lists:flatten(io_lib:format("~s/~w", [inet:ntoa(StartAddress), CidrLength])).
%% @doc
%% Convenience function that takes an IPv4 or IPv6 address as a string and
%% returns the address. It raises an exception if the string does not contain
%% a valid IP address.
%% @end
-spec parse_address(list()) -> inet:ip_address().
parse_address(Prefix) ->
{ok, StartAddress} = inet:parse_address(Prefix),
StartAddress.
%% @doc The number of IP addresses included in the CIDR block.
-spec address_count(inet:ip_address(), non_neg_integer()) -> non_neg_integer().
address_count(Ip, Len) ->
1 bsl (bit_count(Ip) - Len).
%% @doc The number of bits in the address family (32 for IPv4 and 128 for IPv6)
-spec bit_count(inet:ip_address()) -> 32 | 128.
bit_count({_,_,_,_}) -> 32;
bit_count({_,_,_,_,_,_,_,_}) -> 128.
%% @doc Returns true if the CIDR block contains the IP address, false otherwise.
-spec contains(cidr(), inet:ip_address()) -> boolean().
contains({{A0, B0, C0, D0},
{A1, B1, C1, D1}, _PrefixLength},
{A2, B2, C2, D2}) ->
(A2 >= A0 andalso A2 =< A1)
andalso (B2 >= B0 andalso B2 =< B1)
andalso (C2 >= C0 andalso C2 =< C1)
andalso (D2 >= D0 andalso D2 =< D1);
contains({{A0, B0, C0, D0, E0, F0, G0, H0},
{A1, B1, C1, D1, E1, F1, G1, H1}, _PrefixLength},
{A2, B2, C2, D2, E2, F2, G2, H2}) ->
(A2 >= A0 andalso A2 =< A1)
andalso (B2 >= B0 andalso B2 =< B1)
andalso (C2 >= C0 andalso C2 =< C1)
andalso (D2 >= D0 andalso D2 =< D1)
andalso (E2 >= E0 andalso E2 =< E1)
andalso (F2 >= F0 andalso F2 =< F1)
andalso (G2 >= G0 andalso G2 =< G1)
andalso (H2 >= H0 andalso H2 =< H1);
contains(_, _) ->
false.
%% @doc Returns true if the value passed in is an IPv4 address, false otherwise.
-spec v4(inet:ip_address()) -> boolean().
v4({A, B, C, D}) ->
?ip(A, B, C, D);
v4(_) ->
false.
%% @doc Returns true if the value passed in is an IPv6 address, false otherwise.
-spec v6(inet:ip_address()) -> boolean().
v6({A, B, C, D, E, F, G, H}) ->
?ip6(A, B, C, D, E, F, G, H);
v6(_) ->
false.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% internal functions
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
parse_cidr(CidrString, Adjust) when is_list(CidrString) ->
[Prefix, PrefixLengthStr] = string:split(CidrString, "/"),
StartAddress = parse_address(Prefix),
PrefixLength = list_to_integer(PrefixLengthStr),
%% if something 'nonsensical' is passed in like 192.168.0.0/0
%% we have three choices:
%% a) leave it alone (we do NOT allow this)
%% b) adjust the start ip (to 0.0.0.0 in this case) - when adjust == true
%% c) raise an exception - when adjust != true
Masked = band_with_mask(StartAddress, start_mask(StartAddress, PrefixLength)),
if not Adjust andalso Masked =/= StartAddress ->
throw(invalid_cidr);
true ->
ok
end,
{Masked, PrefixLength}.
calc_end_address(StartAddress, PrefixLength) ->
bor_with_mask( StartAddress, end_mask(StartAddress, PrefixLength) ).
start_mask({_,_,_,_} = S, Len) when Len >= 0 andalso Len =< 32 ->
{A, B, C, D} = end_mask(S, Len),
{bnot(A),
bnot(B),
bnot(C),
bnot(D)};
start_mask({_,_,_,_,_,_,_,_} = S, Len) when Len >= 0 andalso Len =< 128 ->
{A, B, C, D, E, F, G, H} = end_mask(S, Len),
{bnot(A),
bnot(B),
bnot(C),
bnot(D),
bnot(E),
bnot(F),
bnot(G),
bnot(H)}.
end_mask({_,_,_,_}, Len) when Len >= 0 andalso Len =< 32 ->
case true of
_ when Len =:= 32 ->
{0, 0, 0, 0};
_ when Len >= 24 ->
{0, 0, 0, bmask(Len,8)};
_ when Len >= 16 ->
{0, 0, bmask(Len,8), 16#FF};
_ when Len >= 8 ->
{0, bmask(Len,8), 16#FF, 16#FF};
_ when Len >= 0 ->
{bmask(Len,8), 16#FF, 16#FF, 16#FF}
end;
end_mask({_,_,_,_,_,_,_,_}, Len) when Len >= 0 andalso Len =< 128 ->
case true of
_ when Len =:= 128 ->
{0, 0, 0, 0, 0, 0, 0, 0};
_ when Len >= 112 ->
{0, 0, 0, 0, 0, 0, 0, bmask(Len, 16)};
_ when Len >= 96 ->
{0, 0, 0, 0, 0, 0, bmask(Len, 16), 16#FFFF};
_ when Len >= 80 ->
{0, 0, 0, 0, 0, bmask(Len, 16), 16#FFFF, 16#FFFF};
_ when Len >= 64 ->
{0, 0, 0, 0, bmask(Len, 16), 16#FFFF, 16#FFFF, 16#FFFF};
_ when Len >= 48 ->
{0, 0, 0,bmask(Len, 16), 16#FFFF, 16#FFFF, 16#FFFF, 16#FFFF};
_ when Len >= 32 ->
{0, 0, bmask(Len, 16), 16#FFFF, 16#FFFF, 16#FFFF, 16#FFFF, 16#FFFF};
_ when Len >= 16 ->
{0, bmask(Len, 16), 16#FFFF, 16#FFFF, 16#FFFF, 16#FFFF, 16#FFFF, 16#FFFF};
_ when Len >= 0 ->
{bmask(Len, 16), 16#FFFF, 16#FFFF, 16#FFFF, 16#FFFF, 16#FFFF, 16#FFFF, 16#FFFF}
end.
bmask(I,8)when I >= 0 andalso I =< 32 ->
16#FF bsr (I rem 8);
bmask(I, 16) when I >= 0 andalso I =< 128 ->
16#FFFF bsr (I rem 16).
bor_with_mask({A0,B0,C0,D0}, {A1,B1,C1,D1}) ->
{A0 bor A1,
B0 bor B1,
C0 bor C1,
D0 bor D1};
bor_with_mask( {A0,B0,C0,D0,E0,F0,G0,H0}, {A1,B1,C1,D1,E1,F1,G1,H1} ) ->
{A0 bor A1,
B0 bor B1,
C0 bor C1,
D0 bor D1,
E0 bor E1,
F0 bor F1,
G0 bor G1,
H0 bor H1}.
band_with_mask( {A0,B0,C0,D0}, {A1,B1,C1,D1} ) ->
{A0 band A1,
B0 band B1,
C0 band C1,
D0 band D1};
band_with_mask( {A0,B0,C0,D0,E0,F0,G0,H0}, {A1,B1,C1,D1,E1,F1,G1,H1} ) ->
{A0 band A1,
B0 band B1,
C0 band C1,
D0 band D1,
E0 band E1,
F0 band F1,
G0 band G1,
H0 band H1}. | src/inet_cidr.erl | 0.703346 | 0.494019 | inet_cidr.erl | starcoder |
%%==============================================================================
%% Copyright 2012 <NAME>
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%==============================================================================
%%%-------------------------------------------------------------------
%%% @author <NAME> <<EMAIL>>
%%% @copyright (C) 2012, <NAME>
%%% @doc
%%% The leader_cron_task module provides different methods for scheduling
%%% a task to be executed periodically in the future. The supported methods
%%% are one shot, sleeper, and cron mode.
%%%
%%% A oneshot schedule executes a task once after sleeping a specified
%%% number of milliseconds or at a given datetime.
%%%
%%% <code>
%%% {oneshot, 60000} % execute task once after waiting a minute<br />
%%% {oneshot, {{2012, 2, 23}, {1, 0, 0}}} % execute task on Feb 23, 2012 at 1 am
%%% </code>
%%%
%%% A sleeper mode schedule repeatedly executes a task then sleeps for a
%%% specified number of milliseconds before repeating the task.
%%%
%%% <code>{sleeper, 5000} % execute task then wait 5 seconds before the
%%% next execution</code>
%%%
%%% A cron mode schedule acts similarly to Unix cron. The schedule is
%%% defined by the cron tuple
%%%
%%% <code>{cron, {Minute, Hour, DayOfMonth, Month, DayOfWeek}}</code>
%%%
%%% The valid range of values for these fields are
%%%
%%% <pre>
%%% Field Valid Range
%%% ------------ -------------------
%%% minute 0 - 59
%%% hour 0 - 23
%%% day of month 1 - 31
%%% month 1 - 12
%%% day of week 0 - 6 (Sunday is 0) </pre>
%%%
%%% The semantics of these fields align with Unix cron. Each field
%%% specifies which values in the range are valid for task execution. The
%%% values can be given as a range, a list or the atom 'all'.
%%%
%%% <pre>
%%% Field Spec Example Unix Cron
%%% ----------------------------- ----------------- ---------
%%% all all *
%%% {integer(), integer{}} {1, 5} 1-5
%%% [integer()] [1, 3, 7] 1,3,7
%%%
%%% # old range and list format is also supported
%%% {range, integer(), integer()} {range, 1, 5} 1-5
%%% {list, [integer()]} {list, [1, 3, 7]} 1,3,7</pre>
%%%
%%% If the day of month is set to a day which does not exist in the current
%%% month (such as 31 for February) the day is skipped. Setting day of month
%%% to 31 does _not_ mean the last day of the month. This aligns with Unix
%%% cron.
%%%
%%% Specified dates and times are all handled in UTC.
%%%
%%% When a task takes longer than the time to the next valid period (or
%%% periods) the overlapped periods are skipped.
%%%
%%% @end
%%% Created : 1 Feb 2012 by <NAME> <<EMAIL>>
%%%-------------------------------------------------------------------
-module(leader_cron_task).
-behaviour(gen_server).
%% API
-export([start_link/2, status/1, stop/1]).
%% gen_server callbacks
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
-export_type([sleeper/0, cron/0, execargs/0, datetime/0, status/0, schedule/0]).
-define(SERVER, ?MODULE).
-record(state, {
schedule :: schedule(),
exec :: execargs(),
task_pid :: pid(),
status :: status(),
next}).
-define(DAY_IN_SECONDS, 86400).
-define(HOUR_IN_SECONDS, 3600).
-define(MINUTE_IN_SECONDS, 60).
-type schedule() :: oneshot() | sleeper() | cron().
%% A cron schedule.
-type oneshot() :: {oneshot, Millis::pos_integer() | datetime()}.
%% Schedule a task once after a delay or on a particular date.
-type sleeper() :: {sleeper, Millis::pos_integer()}.
%% Repeating schedule sleeping between executions.
-type cron() :: {cron, {Minute :: cronspec(),
Hour :: cronspec(),
DayOfMonth :: cronspec(),
Month :: cronspec(),
DayOfWeek :: cronspec()}}.
%% Unix like cron schedule representing the five cron fields:
%% minute, hour, day of month, month, day of week.
-type cronspec() :: all | [rangespec() | listspec()].
%% Cron field value. Atom all for all values (e.g. *) or one of rangespec()
%% or listspec().
-type rangespec() :: {range, Min :: integer(), Max :: integer()}
| {Min :: integer(), Max :: integer()}.
%% Represents a cron range (e.g. 1-5).
-type listspec() :: {list, Values :: [integer()]} | [integer()] | integer().
%% Represents a cron list (e.g. 1,3,7)
-type status() :: waiting | running | done | error | undefined.
%% Task execution status.
-type execargs() :: mfargs() | funcargs().
%% Task execution type.
-type mfargs() :: {Module :: atom(), Function :: atom(), Args :: [term()]}.
%% Function execution definition.
-type funcargs() :: {Function :: fun(), Args :: [term()]}.
%% Anonymous function execution definition.
-type datetime() :: calendar:datetime().
%% Date and time.
%%%===================================================================
%%% API
%%%===================================================================
%%--------------------------------------------------------------------
%% @doc
%% Creates a linked process which schedules the function in the
%% specified module with the given arguments to be run according
%% to the given schedule.
%%
%% @end
%%--------------------------------------------------------------------
-spec start_link(Schedule, Exec) -> {ok, pid()} | {error, Reason} when
Schedule :: schedule(),
Exec :: execargs(),
Reason :: term().
start_link(Schedule, Exec) ->
gen_server:start_link(?MODULE, [{Schedule, Exec}], []).
%%--------------------------------------------------------------------
%% @doc
%% Gets the current status of the task and the trigger time. If running
%% the trigger time denotes the time the task started. If waiting the
%% time denotes the next time the task will run. If done the time the
%% task ran. If error the cause of the error.
%%
%% @end
%%--------------------------------------------------------------------
-spec status(pid()) -> {Status, ScheduleTime, TaskPid} when
Status :: status(),
ScheduleTime :: datetime() | pos_integer() | {error, Reason},
Reason :: term(),
TaskPid :: pid().
status(Pid) ->
gen_server:call(Pid, status).
%%--------------------------------------------------------------------
%% @doc
%% Stops the task.
%%
%% @end
%%--------------------------------------------------------------------
-spec stop(pid()) -> ok.
stop(Pid) ->
gen_server:cast(Pid, stop).
%%%===================================================================
%%% gen_server callbacks
%%%===================================================================
%%--------------------------------------------------------------------
%% @private
%% @doc
%% Initializes the server
%%
%% @end
%%--------------------------------------------------------------------
-spec init([{schedule(), execargs()}]) -> {ok, #state{}}.
init([{Schedule, Exec}]) ->
Self = self(),
Pid = spawn_link(fun() ->
case Schedule of
{oneshot, _} ->
oneshot(Schedule, Exec, Self);
_ ->
run_task(Schedule, Exec, Self)
end
end),
{ok, #state{schedule = Schedule,
exec = Exec,
task_pid = Pid}}.
%%--------------------------------------------------------------------
%% @private
%% @doc
%% Handling call messages
%%
%% @end
%%--------------------------------------------------------------------
handle_call(status, _From, State) ->
Status = State#state.status,
Next = State#state.next,
TaskPid = State#state.task_pid,
{reply, {Status, Next, TaskPid}, State}.
%%--------------------------------------------------------------------
%% @private
%% @doc
%% Handling cast messages
%%
%% @end
%%--------------------------------------------------------------------
handle_cast({error, Message}, State) ->
{noreply, State#state{status = error, next = Message}};
handle_cast({done, Schedule}, State) ->
{noreply, State#state{status = done, next = Schedule}};
handle_cast({waiting, NextValidDateTime}, State) ->
{noreply, State#state{status = waiting, next = NextValidDateTime}};
handle_cast({running, NextValidDateTime}, State) ->
{noreply, State#state{status = running, next = NextValidDateTime}};
handle_cast(stop, State) ->
{stop, normal, State}.
%%--------------------------------------------------------------------
%% @private
%% @doc
%% Handling all non call/cast messages
%%
%% @end
%%--------------------------------------------------------------------
handle_info(_Info, State) ->
{noreply, State}.
%%--------------------------------------------------------------------
%% @private
%% @doc
%%
%% @end
%%--------------------------------------------------------------------
terminate(_Reason, State) ->
exit(State#state.task_pid, kill),
ok.
%%--------------------------------------------------------------------
%% @private
%% @doc
%% Convert process state when code is changed
%%
%% @end
%%--------------------------------------------------------------------
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%%===================================================================
%%% Internal functions
%%%===================================================================
oneshot({oneshot, Millis}, Exec, ParentPid) when is_integer(Millis) ->
gen_server:cast(ParentPid, {waiting, Millis}),
sleep_accounting_for_max(Millis),
gen_server:cast(ParentPid, {running, Millis}),
apply_task(Exec),
gen_server:cast(ParentPid, {done, Millis});
oneshot({oneshot, DateTime}, Exec, ParentPid) ->
CurrentDateTime = calendar:universal_time(),
CurrentSeconds = calendar:datetime_to_gregorian_seconds(CurrentDateTime),
ScheduleSeconds = calendar:datetime_to_gregorian_seconds(DateTime),
WaitSeconds = ScheduleSeconds - CurrentSeconds,
case WaitSeconds > 0 of
true ->
gen_server:cast(ParentPid, {waiting, DateTime}),
sleep_accounting_for_max(WaitSeconds * 1000),
gen_server:cast(ParentPid, {running, DateTime}),
apply_task(Exec),
gen_server:cast(ParentPid, {done, DateTime});
false ->
Format = "Schedule datetime ~p is in the past",
Message = lists:flatten(io_lib:format(Format, [DateTime])),
error_logger:error_report(Message),
gen_server:cast(ParentPid, {error, Message})
end.
run_task({sleeper, Millis}, Exec, ParentPid) ->
gen_server:cast(ParentPid, {running, Millis}),
apply_task(Exec),
gen_server:cast(ParentPid, {waiting, Millis}),
sleep_accounting_for_max(Millis),
run_task({sleeper, Millis}, Exec, ParentPid);
run_task(Schedule, Exec, ParentPid) ->
CurrentDateTime = calendar:universal_time(),
NextValidDateTime = next_valid_datetime(Schedule, CurrentDateTime),
SleepFor = time_to_wait_millis(CurrentDateTime, NextValidDateTime),
gen_server:cast(ParentPid, {waiting, NextValidDateTime}),
sleep_accounting_for_max(SleepFor),
gen_server:cast(ParentPid, {running, NextValidDateTime}),
apply_task(Exec),
run_task(Schedule, Exec, ParentPid).
-spec apply_task(execargs()) -> any().
apply_task(Exec) ->
try
case Exec of
{M, F, A} ->
apply(M, F, A);
{F, A} ->
apply(F, A)
end
catch
Error:Reason ->
Stacktrace = erlang:get_stacktrace(),
Format = "Task ~p in process ~p with value:~n~p",
Message = lists:flatten(io_lib:format(
Format,
[Error, self(), {Reason, Stacktrace}])),
error_logger:error_report(Message)
end.
-spec time_to_wait_millis(datetime(), datetime()) -> integer().
time_to_wait_millis(CurrentDateTime, NextDateTime) ->
CurrentSeconds = calendar:datetime_to_gregorian_seconds(CurrentDateTime),
NextSeconds = calendar:datetime_to_gregorian_seconds(NextDateTime),
SecondsToSleep = NextSeconds - CurrentSeconds,
SecondsToSleep * 1000.
-spec next_valid_datetime(cron(), datetime()) -> datetime().
next_valid_datetime({cron, Schedule}, DateTime) ->
DateTime1 = advance_seconds(DateTime, ?MINUTE_IN_SECONDS),
{{Y, Mo, D}, {H, M, _}} = DateTime1,
DateTime2 = {{Y, Mo, D}, {H, M, 0}},
next_valid_datetime(not_done, {cron, Schedule}, DateTime2).
-spec next_valid_datetime(done|not_done, cron(), datetime()) -> datetime().
next_valid_datetime(done, _, DateTime) ->
DateTime;
next_valid_datetime(not_done, {cron, Schedule}, DateTime) ->
{MinuteSpec, HourSpec, DayOfMonthSpec, MonthSpec, DayOfWeekSpec} =
Schedule,
{{Year, Month, Day}, {Hour, Minute, _}} = DateTime,
{Done, Time} =
case value_valid(MonthSpec, 1, 12, Month) of
false ->
case Month of
12 ->
{not_done, {{Year + 1, 1, 1}, {0, 0, 0}}};
Month ->
{not_done, {{Year, Month + 1, 1}, {0, 0, 0}}}
end;
true ->
DayOfWeek = case calendar:day_of_the_week(Year, Month, Day) of
7 ->
0; % we want 0 to be Sunday not 7
DOW ->
DOW
end,
DOMValid = value_valid(DayOfMonthSpec, 1, 31, Day),
DOWValid = value_valid(DayOfWeekSpec, 0, 6, DayOfWeek),
case (((DayOfMonthSpec /= all) and
(DayOfWeekSpec /= all) and
(DOMValid or DOWValid)) or (DOMValid and DOWValid)) of
false ->
Temp1 = advance_seconds(DateTime, ?DAY_IN_SECONDS),
{{Y, M, D}, {_, _, _}} = Temp1,
{not_done, {{Y, M, D}, {0, 0, 0}}};
true ->
case value_valid(HourSpec, 0, 23, Hour) of
false ->
Temp3 = advance_seconds(DateTime,
?HOUR_IN_SECONDS),
{{Y, M, D}, {H, _, _}} = Temp3,
{not_done, {{Y, M, D}, {H, 0, 0}}};
true ->
case value_valid(
MinuteSpec, 0, 59, Minute) of
false ->
{not_done, advance_seconds(
DateTime,
?MINUTE_IN_SECONDS)};
true ->
{done, DateTime}
end
end
end
end,
next_valid_datetime(Done, {cron, Schedule}, Time).
-spec value_valid(cronspec(), integer(), integer(), integer()) -> true | false.
value_valid(Spec, Min, Max, Value) when Value >= Min, Value =< Max->
case Spec of
all ->
true;
Spec ->
ValidValues = extract_integers(Spec, Min, Max),
lists:any(fun(Item) ->
Item == Value
end, ValidValues)
end.
-spec advance_seconds(datetime(), integer()) -> datetime().
advance_seconds(DateTime, Seconds) ->
Seconds1 = calendar:datetime_to_gregorian_seconds(DateTime) + Seconds,
calendar:gregorian_seconds_to_datetime(Seconds1).
-spec extract_integers([rangespec()|listspec()], integer(), integer()) ->
[integer()].
extract_integers(Spec, Min, Max) when Min < Max ->
extract_integers(Spec, Min, Max, []).
-spec extract_integers(Spec, Min, Max, Acc) -> Integers when
Spec :: [rangespec()|listspec()],
Min :: integer(),
Max :: integer(),
Acc :: list(),
Integers :: [integer()].
extract_integers([], Min, Max, Acc) ->
Integers = lists:sort(sets:to_list(sets:from_list(lists:flatten(Acc)))),
lists:foreach(fun(Int) ->
if
Int < Min ->
throw({error, {out_of_range, {min, Min},
{value, Int}}});
Int > Max ->
throw({error, {out_of_range, {max, Max},
{value, Int}}});
true ->
ok
end
end, Integers),
Integers;
extract_integers(Spec, Min, Max, Acc) ->
[H|T] = Spec,
Values = case H of
{range, Lower, Upper} when Lower < Upper ->
lists:seq(Lower, Upper);
{list, List} ->
List;
{Lower, Upper} when Lower < Upper ->
lists:seq(Lower, Upper);
List when is_list(List) ->
List;
Integer when is_integer(Integer) ->
[Integer]
end,
extract_integers(T, Min, Max, [Values|Acc]).
-define(LONG_SLEEP_TIME, 100000000).
sleep_accounting_for_max(TimeInMillis) ->
case (TimeInMillis > ?LONG_SLEEP_TIME) of
true -> timer:sleep(TimeInMillis rem ?LONG_SLEEP_TIME), long_sleep(TimeInMillis div ?LONG_SLEEP_TIME);
false -> timer:sleep(TimeInMillis)
end.
long_sleep(0) -> ok;
long_sleep(Chunks) ->
timer:sleep(?LONG_SLEEP_TIME),
long_sleep(Chunks - 1).
%%%===================================================================
%%% Unit Tests
%%%===================================================================
-ifdef(TEST).
-compile(export_all).
-include_lib("eunit/include/eunit.hrl").
oneshot_anon_test() ->
Schedule = {oneshot, 500},
Fun = fun(T) -> timer:sleep(T) end,
{ok, Pid} = leader_cron_task:start_link(Schedule, {Fun, [500]}),
{_, _, TaskPid} = leader_cron_task:status(Pid),
?assertMatch({waiting, 500, _}, leader_cron_task:status(Pid)),
timer:sleep(550),
?assertMatch({running, 500, _}, leader_cron_task:status(Pid)),
?assertEqual(true, is_process_alive(TaskPid)),
timer:sleep(550),
?assertMatch({done, 500, _}, leader_cron_task:status(Pid)),
?assertEqual(false, is_process_alive(TaskPid)).
oneshot_millis_test() ->
Schedule = {oneshot, 500},
{ok, Pid} = leader_cron_task:start_link(Schedule, {timer, sleep, [500]}),
{_, _, TaskPid} = leader_cron_task:status(Pid),
?assertMatch({waiting, 500, _}, leader_cron_task:status(Pid)),
timer:sleep(550),
?assertMatch({running, 500, _}, leader_cron_task:status(Pid)),
?assertEqual(true, is_process_alive(TaskPid)),
timer:sleep(550),
?assertMatch({done, 500, _}, leader_cron_task:status(Pid)),
?assertEqual(false, is_process_alive(TaskPid)).
oneshot_datetime_test() ->
DateTime = advance_seconds(calendar:universal_time(), 2),
Schedule = {oneshot, DateTime},
{ok, Pid} = leader_cron_task:start_link(Schedule, {timer, sleep, [500]}),
{_, _, TaskPid} = leader_cron_task:status(Pid),
?assertMatch({waiting, DateTime, _}, leader_cron_task:status(Pid)),
timer:sleep(2100),
?assertMatch({running, DateTime, _}, leader_cron_task:status(Pid)),
?assertEqual(true, is_process_alive(TaskPid)),
timer:sleep(550),
?assertMatch({done, DateTime, _}, leader_cron_task:status(Pid)),
?assertEqual(false, is_process_alive(TaskPid)).
oneshot_in_the_past_test() ->
DateTime = {{1970, 1, 1}, {1, 1, 1}},
Schedule = {oneshot, DateTime},
{ok, Pid} = leader_cron_task:start_link(Schedule, {timer, sleep, [500]}),
{_, _, TaskPid} = leader_cron_task:status(Pid),
timer:sleep(500),
?assertMatch({error, _, _}, leader_cron_task:status(Pid)),
?assertEqual(false, is_process_alive(TaskPid)).
nominal_sleeper_workflow_test() ->
Schedule = {sleeper, 1000},
{ok, Pid} = leader_cron_task:start_link(
Schedule,
{timer, sleep, [1000]}),
{_, _, TaskPid} = leader_cron_task:status(Pid),
?assertMatch({running, 1000, _}, leader_cron_task:status(Pid)),
timer:sleep(1500),
?assertMatch({waiting, 1000, _}, leader_cron_task:status(Pid)),
timer:sleep(1000),
?assertMatch({running, 1000, _}, leader_cron_task:status(Pid)),
?assertEqual(true, is_process_alive(TaskPid)),
?assertEqual(ok, leader_cron_task:stop(Pid)),
timer:sleep(100),
?assertEqual(false, is_process_alive(TaskPid)),
?assertException(exit,
{noproc,{gen_server,call,[Pid, status]}},
leader_cron_task:status(Pid)).
nominal_cron_workflow_test_() ->
{timeout, 90,
fun() ->
Schedule = {cron, {all, all, all, all, all}},
{ok, Pid} = leader_cron_task:start_link(
Schedule,
{timer, sleep, [5000]}),
Current = calendar:universal_time(),
Next = next_valid_datetime(Schedule, Current),
WaitFor = time_to_wait_millis(Current, Next),
?assertMatch({waiting, Next, _}, leader_cron_task:status(Pid)),
timer:sleep(WaitFor + 2000),
?assertMatch({running, Next, _}, leader_cron_task:status(Pid)),
timer:sleep(4000),
Next1 = next_valid_datetime(Schedule, Next),
?assertMatch({waiting, Next1, _}, leader_cron_task:status(Pid)),
?assertEqual(ok, leader_cron_task:stop(Pid)),
?assertException(exit,
{normal,{gen_server,call,[Pid, status]}},
leader_cron_task:status(Pid))
end}.
invalid_range_test() ->
?assertException(throw, {error, {out_of_range, {min, 2}, {value, 1}}},
extract_integers([], 2, 10, [1])),
?assertException(throw, {error, {out_of_range, {max, 2}, {value, 3}}},
extract_integers([], 1, 2, [3])).
extract_integers_test() ->
?assertException(error, function_clause, extract_integers([], 5, 4)),
?assertException(error, {case_clause, bad}, extract_integers([bad], 0, 5)),
?assertEqual([1,2,3,4,5], extract_integers([{range, 1, 5}], 0, 10)),
?assertEqual([1,2,3,4,5], extract_integers([{1, 5}], 0, 10)),
?assertEqual([1,2,3,4,5], extract_integers([{list, [1,2,3,4,5]}], 0, 10)),
?assertEqual([1,2,3,4,5], extract_integers([[1,2,3,4,5]], 0, 10)),
?assertEqual([5], extract_integers([{list, [5]}], 0, 10)),
?assertEqual([5], extract_integers([5], 0, 10)).
next_valid_datetime_cron_test() ->
% roll year
?assertEqual({{2013, 1, 1}, {0, 0, 0}},
next_valid_datetime({cron, {all, all, all, all, all}},
{{2012, 12, 31}, {23, 59, 48}})),
% last second of minute (we skip a second)
?assertEqual({{2012, 1, 1}, {0, 1, 0}},
next_valid_datetime({cron, {all, all, all, all, all}},
{{2012, 1, 1}, {0, 0, 59}})),
% 12th month rolls year
?assertEqual({{2013, 2, 1}, {0, 0, 0}},
next_valid_datetime({cron, {all, all, all,
[{list, [2]}], all}},
{{2012, 12, 1}, {0, 0, 0}})),
% normal month advance
?assertEqual({{2012, 12, 1}, {0, 0, 0}},
next_valid_datetime(
{cron, {all, all, all, [{list, [12]}], all}},
{{2012, 4, 1}, {0, 0, 0}})),
% day of month (no day of week)
?assertEqual({{2012, 1, 13}, {0, 0, 0}},
next_valid_datetime(
{cron, {all, all, [{list, [13]}], all, all}},
{{2012, 1, 5}, {0, 0, 0}})),
% day of week (no day of month)
?assertEqual({{2012, 2, 10}, {0, 0, 0}},
next_valid_datetime(
{cron, {all, all, all, all, [{list, [5]}]}}, % 5 is Friday
{{2012, 2, 7}, {0, 0, 0}})),
% day of week and day of month (day of month comes first and wins)
?assertEqual({{2012, 2, 8}, {0, 0, 0}},
next_valid_datetime(
{cron, {all, all, [{list, [8]}], all, [{list, [5]}]}},
{{2012, 2, 7}, {0, 0, 0}})),
% day of week and day of month (day of week comes first and wins)
?assertEqual({{2012, 2, 10}, {0, 0, 0}},
next_valid_datetime(
{cron, {all, all, [{list, [12]}], all, [{list, [5]}]}},
{{2012, 2, 7}, {0, 0, 0}})),
% hour advance
?assertEqual({{2012, 1, 1}, {22, 0, 0}},
next_valid_datetime(
{cron, {all, [{list, [22]}], all, all, all}},
{{2012, 1, 1}, {0, 0, 0}})),
% minute advance
?assertEqual({{2012, 1, 1}, {0, 59, 0}},
next_valid_datetime(
{cron, {[{list, [59]}], all, all, all, all}},
{{2012, 1, 1}, {0, 0, 0}})).
time_to_wait_millis_test() ->
?assertEqual(60000, time_to_wait_millis(
{{2012, 1, 1}, {0, 0, 0}},
{{2012, 1, 1}, {0, 1, 0}})).
-endif. | src/leader_cron_task.erl | 0.699562 | 0.438545 | leader_cron_task.erl | starcoder |
%% @doc Interface with hex.pm
-module(dep_hex).
-behaviour(hex_http).
-export([request/5]).
-export([get_latest_vsn/1]).
%% @doc Returns the latest version of a package in hex.pm
-spec get_latest_vsn(atom()) -> binary() | undefined.
get_latest_vsn(Name) ->
case hex_repo:get_package(config(), atom_to_binary(Name, utf8)) of
{ok, {200, _, [_ | _] = Versions}} ->
lists:last([Version || #{version := Version} <- Versions]);
Other ->
rebar_api:warn("Couldn't fetch latest version of ~p from hex.pm:\n~p", [Name, Other]),
undefined
end.
config() ->
Config1 = hex_core:default_config(),
Config2 = put_http_config(Config1),
Config3 = maybe_put_api_key(Config2),
Config3.
put_http_config(Config) ->
Config#{http_user_agent_fragment => <<"(rebar3_depup/0.0.1) (httpc)">>,
http_adapter => {?MODULE, #{}}}.
maybe_put_api_key(Config) ->
case os:getenv("HEX_API_KEY") of
false ->
Config;
Key ->
maps:put(api_key, Key, Config)
end.
%% @private
request(Method, URI, ReqHeaders, Body, _AdapterConfig) ->
Request = build_request(URI, ReqHeaders, Body),
SSLOpts = [{ssl, rebar_utils:ssl_opts(URI)}],
case httpc:request(Method, Request, SSLOpts, [{body_format, binary}], default) of
{ok, {{_, StatusCode, _}, _RespHeaders, RespBody}} ->
{ok, {StatusCode, #{}, RespBody}};
{error, Reason} ->
{error, Reason}
end.
%%====================================================================
%% Internal functions
%%====================================================================
build_request(URI, ReqHeaders, Body) ->
build_request2(binary_to_list(URI), dump_headers(ReqHeaders), Body).
build_request2(URI, ReqHeaders, undefined) ->
{URI, ReqHeaders};
build_request2(URI, ReqHeaders, {ContentType, Body}) ->
{URI, ReqHeaders, ContentType, Body}.
dump_headers(Map) ->
maps:fold(fun(K, V, Acc) -> [{binary_to_list(K), binary_to_list(V)} | Acc] end, [], Map). | src/dep_hex.erl | 0.525369 | 0.405184 | dep_hex.erl | starcoder |
%% @author <NAME> <<EMAIL>>
%% @copyright 2018 <NAME>
%% @doc Javascript minifier. Based on jsmin.c
%% Copyright 2018 <NAME>
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%% jsmin is Copyright (c) 2002 <NAME> (www.crockford.com)
%% see https://github.com/douglascrockford/JSMin/blob/master/jsmin.c
-module(z_jsmin).
-export([
minify/1
]).
-define(is_alnum(C), (
(C >= $a andalso C =< $z) orelse
(C >= $A andalso C =< $Z) orelse
(C >= $0 andalso C =< $9) orelse
C =:= $_ orelse C =:= $\\ orelse C =:= $$ orelse C > 126)).
-define(is_pre_regexp(C), (
C =:= $( orelse C =:= $, orelse C =:= $= orelse C =:= $: orelse
C =:= $[ orelse C =:= $! orelse C =:= $& orelse C =:= $| orelse
C =:= $? orelse C =:= $+ orelse C =:= $- orelse C =:= $~ orelse
C =:= $* orelse C =:= $/ orelse C =:= ${ orelse C =:= $\n)).
-define(isspace(C), (C =:= 32 orelse C =:= $\n)).
-spec minify( binary() ) -> binary().
minify( JS ) ->
minify(next(JS), []).
minify(<<>>, [ C | Acc ]) when ?isspace(C) ->
minify(<<>>, Acc);
minify(<<>>, Acc) ->
iolist_to_binary(lists:reverse(Acc));
minify(<<$\n, JS/binary>>, []) ->
minify(next(JS), []);
minify(<<32, JS/binary>>, []) ->
minify(next(JS), []);
minify(<<$\n, JS/binary>>, [ $\n | _ ] = Acc) ->
minify(next(JS), Acc);
minify(<<$\n, JS/binary>>, [ $; | _ ] = Acc) ->
minify(next(JS), Acc);
minify(<<32, JS/binary>>, [ C | _ ] = Acc) when ?isspace(C) ->
minify(next(JS), Acc);
minify(<<32, B, JS/binary>>, [ A | _ ] = Acc)
when ?is_alnum(B) andalso ?is_alnum(A) ->
minify(next(<<B, JS/binary>>), [ 32 | Acc ]);
minify(<<32, $-, JS/binary>>, [ $- | _ ] = Acc) ->
minify(next(<<$-, JS/binary>>), [ 32 | Acc ]);
minify(<<32, $+, JS/binary>>, [ $+ | _ ] = Acc) ->
minify(next(<<$+, JS/binary>>), [ 32 | Acc ]);
minify(<<32, JS/binary>>, Acc) ->
minify(next(JS), Acc);
minify(<<$\n, B, JS/binary>>, [ A | Acc ])
when (?is_alnum(B) orelse B =:= ${ orelse B =:= $[ orelse B =:= $( orelse B =:= $+ orelse B =:= $-) andalso
(?is_alnum(A) orelse A =:= $} orelse A =:= $] orelse A =:= $) orelse B =:= $+ orelse B =:= $- orelse B =:= $" orelse B =:= $' orelse B =:= $`) ->
minify(next(<<B, JS/binary>>), [ $\n, A | Acc ]);
minify(<<$\n, JS/binary>>, Acc) ->
minify(next(JS), [ $\n | Acc ]);
minify(<<Q, JS/binary>>, Acc) when Q =:= $'; Q =:= $"; Q =:= $` ->
{JS1, Acc1} = string(Q, JS, [ Q | Acc]),
minify(next(JS1), Acc1);
minify(<<"/*!", JS/binary>>, Acc) ->
{JS1, Acc1} = copy_comment(JS, [ $!, $*, $/ | Acc ]),
minify(next(JS1), Acc1);
minify(<<$/, JS/binary>>, [ C | _] = Acc) when ?is_pre_regexp(C) ->
Acc1 = case C of
$/ -> [ $/, 32 | Acc ];
$* -> [ $/, 32 | Acc ];
_ -> [ $/ | Acc ]
end,
{JS1, Acc2} = regexp(JS, Acc1),
minify(next(JS1), Acc2);
minify(<<C, JS/binary>>, Acc) ->
minify(next(JS), [ C | Acc ]).
string(Q, <<Q, JS/binary>>, Acc) ->
{JS, [ Q | Acc ]};
string(Q, <<$\\, C, JS/binary>>, Acc) ->
string(Q, JS, [ C, $\\ | Acc ]);
string(Q, <<C, JS/binary>>, Acc) ->
string(Q, JS, [ C | Acc ]);
string(_Q, <<>>, _Acc) ->
throw('Unterminated string').
regexp(<<$[, JS/binary>>, Acc) ->
{JS1, Acc1} = regexp_set(JS, [ $[ | Acc ]),
regexp(JS1, Acc1);
regexp(<<$/, C, _/binary>>, _Acc) when C =:= $/; C =:= $* ->
throw('Unterminated regexp');
regexp(<<$/, JS/binary>>, Acc) ->
{JS, [ $/ | Acc ]};
regexp(<<$\\, C, JS/binary>>, Acc) ->
regexp(JS, [ C, $\\ | Acc ]);
regexp(<<>>, _Acc) ->
throw('Unterminated regexp');
regexp(<<C, JS/binary>>, Acc) ->
regexp(JS, [ C | Acc ]).
regexp_set(<<$], JS/binary>>, Acc) ->
{JS, [ $] | Acc ]};
regexp_set(<<$\\, C, JS/binary>>, Acc) ->
regexp_set(JS, [ C, $\\ | Acc ]);
regexp_set(<<C, JS/binary>>, Acc) ->
regexp_set(JS, [ C | Acc ]);
regexp_set(<<>>, _Acc) ->
throw('Unterminated set in regexp').
-spec next(binary()) -> binary().
next(<<>>) -> <<>>;
next(<<"//", A/binary>>) -> next(skip_to_eol(A));
next(<<"/*!", _/binary>> = A) -> A;
next(<<"/*", A/binary>>) -> next(skip_comment(A));
next(<<$\r, A/binary>>) -> <<$\n, A/binary>>;
next(<<$\n, _/binary>> = A) -> A;
next(<<C, _/binary>> = A) when C >= 32 -> A;
next(<<_, A/binary>>) -> <<" ", A/binary>>.
-spec skip_to_eol(binary()) -> binary().
skip_to_eol(<<>>) -> <<>>;
skip_to_eol(<<C, _/binary>> = A) when C =< $\n -> A;
skip_to_eol(<<_, A/binary>>) -> skip_to_eol(A).
-spec skip_comment(binary()) -> binary().
skip_comment(<<>>) -> throw('Unterminated comment');
skip_comment(<<"*/", A/binary>>) -> <<" ", A/binary>>;
skip_comment(<<_, A/binary>>) -> skip_comment(A).
-spec copy_comment(binary(), list()) -> {binary(), list()}.
copy_comment(<<>>, _Acc) -> throw('Unterminated comment');
copy_comment(<<"*/", A/binary>>, Acc) -> {A, [ $/, $* | Acc ]};
copy_comment(<<C, A/binary>>, Acc) -> copy_comment(A, [ C | Acc ]). | src/z_jsmin.erl | 0.596081 | 0.45423 | z_jsmin.erl | starcoder |
%%%------------------------------------------------------------------------
%%% @author <NAME> <<EMAIL>>
%%% @author <NAME> <<EMAIL>>
%%% @copyright (C) 2015, <NAME>, <NAME>
%%% @doc
%%% Implements a cluster of nodes. The clusters save information about
%%% which parent-child relationships between its nodes. Clusters are
%%% assumed to have tree hierarchy (no loops), and, as such, a single
%%% root.
%%% @end
%%% Created : 2 Jun 2015 by <NAME>
%%%------------------------------------------------------------------------
-module(cluster).
-export([new_cluster/1, get_root/1, new_parent_child_to_cluster/2,
get_nodes/1, remove_split/2, has_node/2, merge_clusters/2,
make_indirection_cluster/0, is_indirection_cluster/1,
size/1, show_cluster/1, show_cluster/2]).
-export_type([cluster/1]).
-record(cluster, {root_node, node_set, nodes_by_parent}).
-opaque cluster(NodeType) :: #cluster{root_node :: NodeType,
node_set :: sets:set(NodeType),
nodes_by_parent :: dict:dict(NodeType, sets:set(NodeType))}
| {indirection_cluster, tree:tree_ph()}.
%%--------------------------------------------------------------------
%% @doc
%% Creates a cluster with a single node.
%% @end
%%--------------------------------------------------------------------
-spec new_cluster(Node :: NodeType) -> cluster(NodeType).
new_cluster(Node) -> #cluster{root_node = Node,
node_set = sets:from_list([Node]),
nodes_by_parent = dict:new()}.
%%--------------------------------------------------------------------
%% @doc
%% Creates a cluster with a Node and its ParentNode.
%% @end
%%--------------------------------------------------------------------
-spec new_parent_child_to_cluster(Node :: N, Node :: N) -> cluster(N) when N :: term().
new_parent_child_to_cluster(ParentNode, Node) ->
#cluster{root_node = ParentNode,
node_set = sets:from_list([ParentNode, Node]),
nodes_by_parent = dict:from_list([{ParentNode, sets:from_list([Node])}])}.
%%--------------------------------------------------------------------
%% @doc
%% Returns the root node of the Cluster.
%% @end
%%--------------------------------------------------------------------
-spec get_root(Cluster :: cluster(NodeType)) -> OutNode :: NodeType.
get_root(#cluster{root_node = RootNode}) -> RootNode.
%%--------------------------------------------------------------------
%% @doc
%% Returns a set with all the nodes in the Cluster.
%% @end
%%--------------------------------------------------------------------
-spec get_nodes(Cluster :: cluster(NodeType)) -> [NodeType].
get_nodes(#cluster{node_set = Nodes}) -> sets:to_list(Nodes).
%%--------------------------------------------------------------------
%% @doc
%% Returns a boolean that indicates whether the Cluster contains the Node.
%% @end
%%--------------------------------------------------------------------
-spec has_node(Node :: NodeType, Cluster :: cluster(NodeType)) -> boolean() when
NodeType :: term().
has_node(Node, #cluster{node_set = Nodes}) -> sets:is_element(Node, Nodes).
%%--------------------------------------------------------------------
%% @doc
%% Takes two clusters and returns a new cluster containing all the nodes or the
%% atom `disjoint' if they have no common nodes. It assumes that the clusters
%% contain subtrees that belong to global tree, and, as such, nor the individual
%% clusters nor the result cluster should contain any loops, and no node should
%% have several parents.
%% @end
%%--------------------------------------------------------------------
-spec merge_clusters(cluster(Node), cluster(Node)) -> {ok, cluster(Node)} | disjoint.
merge_clusters(#cluster{root_node = RootNode1,
node_set = Nodes1,
nodes_by_parent = NodesByParent1},
#cluster{root_node = RootNode2,
node_set = Nodes2,
nodes_by_parent = NodesByParent2}) ->
case compute_new_root(RootNode1, Nodes1, RootNode2, Nodes2) of
disjoint -> disjoint;
{ok, NewRoot} -> {ok, #cluster{root_node = NewRoot,
node_set = sets:union(Nodes1, Nodes2),
nodes_by_parent = dict:merge(fun (_, V1, V2) ->
sets:union(V1, V2)
end,
NodesByParent1,
NodesByParent2)}}
end.
%%--------------------------------------------------------------------
%% @doc
%% Takes two root nodes (RootNode1 and RootNode2), and two sets of nodes,
%% (Nodes1 and Nodes2), and returns which root node is the absolute root
%% node. If there is no common nodes it returns the atom `disjoint'.
%% @end
%%--------------------------------------------------------------------
-spec compute_new_root(RootNode1 :: Node, Nodes1 :: sets:set(Node),
RootNode2 :: Node, Nodes2 :: sets:set(Node)) ->
'disjoint' | {'ok', Node}.
compute_new_root(RootNode1, Nodes1, RootNode2, Nodes2) ->
case {sets:is_disjoint(sets:del_element(RootNode1, Nodes1),
sets:del_element(RootNode2, Nodes2)),
sets:is_element(RootNode1, Nodes2),
sets:is_element(RootNode2, Nodes1)} of
{false, true, true} -> throw({error, "Found loop when merging trees"});
{false, false, false} -> throw({error, "Found node with two parents when merging trees"});
{true, true, true} -> case RootNode1 =:= RootNode2 of
true -> {ok, RootNode1};
false -> throw({error, "Found loop when merging trees"})
end;
{true, false, true} -> {ok, RootNode1};
{true, true, false} -> {ok, RootNode2};
{true, false, false} -> disjoint
end.
%%--------------------------------------------------------------------
%% @doc
%% Removes the Node from the Cluster an returns the resulting subclusters created,
%% (those that were hold together by the removed node).
%% @end
%%--------------------------------------------------------------------
-spec remove_split(Node :: NodeType, Cluster :: cluster(NodeType)) -> [cluster(NodeType)].
remove_split(Node, #cluster{root_node = Node} = Cluster) ->
{_, SubClusters} = get_removed_nodes_and_subclusters(Node, Cluster),
SubClusters;
remove_split(Node, Cluster) ->
{RemovedNodes, SubClusters} = get_removed_nodes_and_subclusters(Node, Cluster),
ParentCluster = remove_nodes_from_cluster(RemovedNodes,Cluster),
[ParentCluster|SubClusters].
%%--------------------------------------------------------------------
%% @doc
%% Expects a Node from the Cluster, and it returns a set with all the descendats of the Node,
%% and a list with all the subclusters (subtrees) that hang from the Node in the Cluster.
%% @end
%%--------------------------------------------------------------------
-spec get_removed_nodes_and_subclusters(Node :: NodeType, Cluster :: cluster(NodeType)) ->
{sets:set(NodeType), [cluster(NodeType)]}.
get_removed_nodes_and_subclusters(Node, Cluster) ->
Children = sets:to_list(get_children_set_for_node(Node, Cluster)),
NodesAndSubClusters = [begin
SubClusterNodes = sets:add_element(
Child, expand_node_downwards(Child, Cluster,
sets:new())),
{SubClusterNodes, make_sub_cluster(Child, SubClusterNodes, Cluster)}
end || Child <- Children],
{ListOfSets, SubClusters} = lists:unzip(NodesAndSubClusters),
RemovedNodes = sets:union([sets:from_list([Node])|ListOfSets]),
{RemovedNodes, SubClusters}.
%%--------------------------------------------------------------------
%% @doc
%% Returns a subcluster of Cluster with root RootNode and nodes Nodes.
%% RootNode is assumed to be the root of Nodes in the Cluster. And Nodes are
%% suppoused to form a contiguous tree in the Cluster.
%% @end
%%--------------------------------------------------------------------
-spec make_sub_cluster(RootNode :: NodeType, Nodes :: sets:set(NodeType),
Cluster :: cluster(NodeType)) -> cluster(NodeType).
make_sub_cluster(Node, SubClusterNodes, #cluster{nodes_by_parent = NodesByParent}) ->
#cluster{root_node = Node,
node_set = sets:add_element(Node, SubClusterNodes),
nodes_by_parent = keep_nodes_in_dict(SubClusterNodes, NodesByParent)}.
%%--------------------------------------------------------------------
%% @doc
%% Removes from the Cluster all the nodes in the Set.
%% @end
%%--------------------------------------------------------------------
-spec remove_nodes_from_cluster(Set :: sets:set(NodeType),
Cluster :: cluster(NodeType)) ->
cluster(NodeType) when
NodeType :: term().
remove_nodes_from_cluster(RemovedNodes,
#cluster{root_node = RootNode,
node_set = Nodes,
nodes_by_parent = NodesByParent}) ->
#cluster{root_node = RootNode,
node_set = sets:subtract(Nodes, RemovedNodes),
nodes_by_parent = remove_nodes_from_dict(RemovedNodes, NodesByParent)}.
%%--------------------------------------------------------------------
%% @doc
%% Remove from the Dict all the nodes in the Set.
%% @end
%%--------------------------------------------------------------------
-spec remove_nodes_from_dict(Set :: sets:set(NodeType),
Dict :: dict:dict(NodeType, sets:set(NodeType))) ->
dict:dict(NodeType, sets:set(NodeType)) when
NodeType :: term().
remove_nodes_from_dict(RemovedNodes, NodesByParent) ->
dict:fold(fun (Key, Value, Dict) ->
case sets:is_element(Key, RemovedNodes) of
true -> Dict;
false -> dict:store(Key, sets:subtract(Value, RemovedNodes), Dict)
end
end, dict:new(), NodesByParent).
%%--------------------------------------------------------------------
%% @doc
%% Removes from the Dict all the nodes which are not in the Set.
%% @end
%%--------------------------------------------------------------------
-spec keep_nodes_in_dict(Set :: sets:set(NodeType),
Dict :: dict:dict(NodeType, sets:set(NodeType))) ->
dict:dict(NodeType, sets:set(NodeType)) when
NodeType :: term().
keep_nodes_in_dict(KeptNodes, NodesByParent) ->
dict:fold(fun (Key, Value, Dict) ->
case sets:is_element(Key, KeptNodes) of
false -> Dict;
true -> dict:store(Key, sets:intersection(Value, KeptNodes), Dict)
end
end, dict:new(), NodesByParent).
%%--------------------------------------------------------------------
%% @doc
%% Adds all the descendants of the Node provided in the Cluster to the Set.
%% @end
%%--------------------------------------------------------------------
-spec expand_node_downwards(Node :: NodeType, Cluster :: cluster(NodeType), Set :: sets:set(NodeType)) ->
sets:set(NodeType).
expand_node_downwards(Node, Cluster, Acc) ->
sets:fold(fun (Child, InAcc) ->
expand_node_downwards(Child, Cluster, sets:add_element(Child, InAcc))
end, Acc, get_children_set_for_node(Node, Cluster)).
%%--------------------------------------------------------------------
%% @doc
%% Returns the immediate children nodes inside the cluster for the Node provided.
%% @end
%%--------------------------------------------------------------------
-spec get_children_set_for_node(Node :: NodeType, Cluster :: cluster(NodeType)) -> sets:set(NodeType).
get_children_set_for_node(Node, #cluster{nodes_by_parent = NodesByParent}) ->
case dict:find(Node, NodesByParent) of
{ok, Set} -> Set;
error -> sets:new()
end.
%%--------------------------------------------------------------------
%% @doc
%% Creates an indirection "fake" cluster.
%% @end
%%--------------------------------------------------------------------
-spec make_indirection_cluster() -> {reference(), cluster(_)}.
make_indirection_cluster() -> Ref = make_ref(),
{Ref, {indirection_cluster, Ref}}.
%%--------------------------------------------------------------------
%% @doc
%% Returns whether the cluster is an indirection "fake" cluster or not.
%% @end
%%--------------------------------------------------------------------
-spec is_indirection_cluster(cluster(_)) -> 'false' | {'true', reference()}.
is_indirection_cluster({indirection_cluster, Ref}) -> {true, Ref};
is_indirection_cluster(_) -> false.
% Functions for debugging
%%--------------------------------------------------------------------
%% @doc
%% Returns the number of nodes that the cluster has.
%% @end
%%--------------------------------------------------------------------
-spec size(cluster(_)) -> non_neg_integer().
size(#cluster{node_set = Nodes}) -> sets:size(Nodes).
%%--------------------------------------------------------------------
%% @doc
%% Returns a more readable version of custers. This is done by
%% transforming dicts and sets into lists.
%% @end
%%--------------------------------------------------------------------
-spec show_cluster(cluster(_)) -> #{}.
show_cluster(Clus) -> show_cluster(fun (X) -> X end, Clus).
%%--------------------------------------------------------------------
%% @doc
%% Returns a more readable version of custers and takes a function
%% that is suppoused to make the nodes in the clusters more readable.
%% This is done by transforming dicts and sets into lists and by
%% applying the supplied function to the nodes.
%% @end
%%--------------------------------------------------------------------
-spec show_cluster(fun((NodeType) -> any()), cluster(NodeType)) -> #{}.
show_cluster(Fun, #cluster{root_node = Node,
node_set = Set,
nodes_by_parent = Dict}) ->
#{root_node => Fun(Node),
node_set => lists:map(Fun, sets:to_list(Set)),
nodes_by_parent => [{Fun(Key), lists:map(Fun, sets:to_list(Value))}
|| {Key, Value} <- dict:to_list(Dict)]}. | src/behaviour_extraction/cluster.erl | 0.505859 | 0.560162 | cluster.erl | starcoder |
-module(rope_tree).
-include("rope_tree.hrl").
-export([ to_iolist/1
, to_string/1
, nth/2
, length/1
, concatenate/2
, split/2
, insert_string/3
, insert_character/3
, delete_seq/3
, delete_character/2
, rotate_left/1
, rotate_right/1
, is_balanced/1
, is_balanced/2
, shallow_balance/1
, depth/1
, rebalance/1
, rebalance/2
, print/1
]).
to_iolist(?NODE(_W, Left, Right)) ->
[to_iolist(Left) | to_iolist(Right)];
to_iolist(String) ->
String.
to_string(Tree) ->
lists:flatten(to_iolist(Tree)).
nth(?NODE(Weight, _L, Right), N) when Weight < N ->
nth(Right, N - Weight);
nth(?NODE(_W, Left, _R), N) ->
nth(Left, N);
nth(String, N) ->
lists:nth(N, String).
length(?NODE(Weight, _L, Right)) ->
Weight + rope_tree:length(Right);
length(String) ->
erlang:length(String).
concatenate(Tree1, Tree2) ->
?NODE( rope_tree:length(Tree1)
, Tree1
, Tree2
).
split(T, 0) ->
{"", T};
split(?NODE(W, Left, Right), N) when N < W ->
%% Weight is larger than N: split left side
{T1, T2} = split(Left, N),
{?NODE(N, T1, ""), concatenate(T2, Right)};
split(?NODE(W, Left, Right), N) ->
%% Weight is smaller than or equal to N: split right side
{T1, T2} = split(Right, N - W),
{?NODE(W, Left, T1), T2};
split(String, N) ->
lists:split(N, String).
insert_string(Tree, N, String) ->
{T1, T2} = split(Tree, N),
concatenate(T1, concatenate(String, T2)).
insert_character(Tree, N, Character) ->
insert_string(Tree, N, [Character]).
delete_seq(Tree, Start, End) ->
{T1, T2} = split(Tree, End),
{T0, _Deleted} = split(T1, Start),
concatenate(T0, T2).
delete_character(Tree, N) ->
delete_seq(Tree, N - 1, N).
rotate_left(?NODE(AW, A, ?NODE(BW, B, C))) ->
?NODE(AW + BW, ?NODE(AW, A, B), C).
rotate_right(?NODE(ABW, ?NODE(AW, A, B), C)) ->
?NODE(AW, A, ?NODE(ABW - AW, B, C)).
is_balanced(Tree) ->
is_balanced(Tree, 1).
is_balanced(Node, Strictness) ->
{Balance, _Depth} = balance(Node, Strictness),
Balance.
shallow_balance(?NODE(_W, Left, Right)) ->
depth(Left) - depth(Right);
shallow_balance(_String) ->
0.
depth(?NODE(_W, Left, Right)) ->
1 + max(depth(Left), depth(Right));
depth(_String) ->
0.
rebalance(Tree) ->
rebalance(Tree, 1).
rebalance(?NODE(_W, Left, Right), Strictness) ->
{LT, LD, LB} = rebalance(Left, Strictness),
{RT, RD, RB} = rebalance(Right, Strictness),
Balance = LD - RD,
if Balance > Strictness ->
%% Node is left-heavy
%% IF left child is right-heavy THEN
%% - Left-rotate it
%% FINALLY
%% - Right-rotate the node
RotatedLT =
if LB < -Strictness -> rotate_left(LT);
true -> LT
end,
Tree = rotate_right(concatenate(RotatedLT, RT)),
Depth = depth(Tree),
NewBalance = shallow_balance(Tree),
{Tree, Depth, NewBalance};
Balance < -Strictness ->
%% Node is right-heavy
%% IF right child is left-heavy THEN
%% - Right-rotate it
%% FINALLY
%% - Left-rotate the node
RotatedRT =
if RB > Strictness -> rotate_right(RT);
true -> RT
end,
Tree = rotate_left(concatenate(LT, RotatedRT)),
Depth = depth(Tree),
NewBalance = shallow_balance(Tree),
{Tree, Depth, NewBalance};
true ->
%% Node is balanced
Tree = concatenate(LT, RT),
Depth = 1 + max(LD, RD),
{Tree, Depth, Balance}
end;
rebalance(String, _Strictness) ->
{String, 0, 0}.
print(Tree) ->
print(Tree, 0).
print(?NODE(W, Left, Right), Level) ->
io:format("~s~w~n", [lists:duplicate(Level, $ ), W]),
print(Left, Level + 2),
print(Right, Level + 2);
print(String, Level) ->
io:format("~s~w~n", [lists:duplicate(Level, $ ), String]).
%%%
%%% Internal functions
%%%
balance(?NODE(_W, Left, Right), Strictness) ->
{LB, LD} = balance(Left, Strictness),
{RB, RD} = balance(Right, Strictness),
Balanced = LB and RB andalso Strictness >= abs(LD - RD),
Depth = 1 + max(LD, RD),
{Balanced, Depth};
balance(_String, _Strictness) ->
{true, 0}. | src/rope_tree.erl | 0.514644 | 0.433742 | rope_tree.erl | starcoder |
%%% Functions for working with continuous genomes
-module(continuous).
-export([gaussian_mutate/2, gaussian_mutate/3, gaussian_mutate/4]).
-export([random_genome/1]).
-type continuous_genome() :: list(float()).
-type range() :: {number(), number()}.
-type bounds() :: list(range()).
-spec gaussian_mutate(float()) -> float().
gaussian_mutate(G) ->
Mutation = rand:normal(),
G + Mutation.
-spec clipped_gaussian_mutate(float(), range()) -> float().
clipped_gaussian_mutate(G, {Min, Max}) ->
Mutation = gaussian_mutate(G),
if Mutation < Min -> Min;
Mutation > Max -> Max;
(Mutation >= Min) and (Mutation =< Max) -> Mutation
end.
%% Mutate using a normal distribution with mean 0 and standard deviation 1
-spec gaussian_mutate(float(), continuous_genome()) -> continuous_genome().
gaussian_mutate(_, []) -> [];
gaussian_mutate(Probability, [G|Genome]) ->
R = rand:uniform(),
if R < Probability ->
[gaussian_mutate(G)|gaussian_mutate(Probability, Genome)];
R >= Probability ->
[G|gaussian_mutate(Probability, Genome)]
end.
%%% mutate using a normal distribution. if the third argument is a
%%% list of bounds() then the mutated genes are clipped to stay within
%%% the corresponding bounds. The list should specify one pair of
%%% bounds for each 'gene.' bounds should be in the sameorder as the
%%% genes.
%%%
%%% If the third argument is a float() then it is used as the standard
%%% deviation for the mutation.
-spec gaussian_mutate(float(), continuous_genome(), bounds() | float())
-> continuous_genome().
gaussian_mutate(_, [], _) -> [];
gaussian_mutate(Probability, [G|Genome], [Range|Bounds]) ->
R = rand:uniform(),
if R < Probability ->
[clipped_gaussian_mutate(G,Range)
|gaussian_mutate(Probability, Genome, Bounds)];
R >= Probability ->
[G|gaussian_mutate(Probability, Genome, Bounds)]
end;
gaussian_mutate(Probability, [G|Genome], StdDeviation) ->
R = rand:uniform(),
if R < Probability ->
M = rand:normal() * StdDeviation,
[G + M|gaussian_mutate(Probability, Genome, StdDeviation)];
R >= Probability ->
[G|gaussian_mutate(Probability, Genome, StdDeviation)]
end.
%%% Mutate a genome by adding a random value X ~ N(0, StdDev). All
%%% mutations are clipped to keep the resulting genes in the ranges
%%% specified.
-spec gaussian_mutate(float(), continuous_genome(), bounds(), float())
-> continuous_genome().
gaussian_mutate(_, [], _, _) -> [];
gaussian_mutate(Probability, [G|Genome],
[{Min, Max}|Bounds], StdDeviation) ->
R = rand:uniform(),
if R < Probability ->
M = rand:normal() * StdDeviation,
[if G + M < Min -> Min;
G + M > Max -> Max;
(G + M >= Min) and (G + M =< Max) -> G+M
end | gaussian_mutate(Probability, Genome, Bounds, StdDeviation)];
R >= Probability ->
[G|gaussian_mutate(Probability, Genome, Bounds, StdDeviation)]
end.
%%% Returns a random genome
%%%
%%% If the argument is an integer N then a genome of length N with
%%% unbounded genes (ie. could be arbitrarily large or small).
%%% If the argument is a list of bounds then values are uniformly
%%% distributed between the bounds for each gene.
%%% XXX: this produces only positive valued genes without bounds.
%%% if the lower bound is negative then it is possible to have
%%% negative bounds.
-spec random_genome( integer() | list(bounds()) )
-> continuous_genome().
random_genome(0) -> [];
random_genome([]) -> [];
random_genome([{Min, Max}|Bounds]) ->
[Min + (Max - Min) * rand:uniform() | random_genome(Bounds)];
random_genome(GenomeLength) ->
[rand:uniform()/rand:uniform() | random_genome(GenomeLength-1)]. | src/continuous.erl | 0.603348 | 0.712739 | continuous.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% Copyright <2013-2018> <
%% Technische Universität Kaiserslautern, Germany
%% Université Pierre et Marie Curie / Sorbonne-Université, France
%% Universidade NOVA de Lisboa, Portugal
%% Université catholique de Louvain (UCL), Belgique
%% INESC TEC, Portugal
%% >
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either expressed or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% List of the contributors to the development of Antidote: see AUTHORS file.
%% Description and complete License: see LICENSE file.
%% -------------------------------------------------------------------
-module(meta_data_sender).
-behaviour(gen_statem).
-include("antidote.hrl").
-ifdef(EUNIT).
-include_lib("eunit/include/eunit.hrl").
-define(GET_NODE_LIST(), get_node_list_t()).
-define(GET_NODE_AND_PARTITION_LIST(), get_node_and_partition_list_t()).
-else.
-define(GET_NODE_LIST(), get_node_list()).
-define(GET_NODE_AND_PARTITION_LIST(), get_node_and_partition_list()).
-endif.
-export([
start_link/1,
start/1,
put_meta/3,
get_node_list/0,
get_node_and_partition_list/0,
get_merged_data/2,
remove_partition/2,
send_meta_data/3,
callback_mode/0
]).
%% Callbacks
-export([
init/1,
code_change/4,
terminate/3
]).
-record(state, {
last_result :: term(),
name :: atom(),
should_check_nodes :: boolean()
}).
-type state() :: #state{}.
%% ===================================================================
%% Public API
%% ===================================================================
%% This state machine is responsible for sending meta-data that has been collected
%% on a physical node to all other physical nodes in the riak ring.
%% There will be one instance of this state machine running on each physical machine.
%% During execution of the system, vnodes may be continually writing to the meta data item.
%%
%% Periodically, as defined by META_DATA_SLEEP in antidote.hrl, it will trigger
%% itself to send the meta-data.
%% This will cause the meta-data to be broadcast to all other physical nodes in
%% the cluster. Before sending the meta-data, it calls the merge function on the
%% meta-data stored by each vnode located at this partition.
%%
%% Each partition can store meta-data by calling the update function.
%% To synchronize meta-data, first the vnodes data for each physical node is merged,
%% then is broadcast, then the physical nodes meta-data is merged. This way
%% network traffic is reduced.
%%
%% Once the data is fully merged, it does not immediately replace the old merged
%% data. Instead, the UpdateFunction is called on each entry of the new and old
%% versions. It should return true if the new value should be kept, false otherwise.
%% The completely merged data can then be read using the get_merged_data function.
%%
%% InitialLocal and InitialMerged are used to prepopulate the meta-data for the vnode
%% and for the merged data.
%%
%% Note that there is one of these state machines per physical node. Meta-data is only
%% stored in memory. Do not use this if you want to persist data safely, it
%% was designed with light heart-beat type meta-data in mind.
%% Start meta data sender
-spec start(atom()) -> ok.
start(Name) ->
gen_statem:call(get_name(Name), start).
-spec start_link(atom()) -> {ok, pid()} | ignore | {error, term()}.
start_link(Name) ->
gen_statem:start_link({local, get_name(Name)}, ?MODULE, [Name], []).
-spec get_name(atom()) -> atom().
get_name(Name) ->
list_to_atom(atom_to_list(Name) ++ atom_to_list(?MODULE)).
%% Insert meta data for some partition
-spec put_meta(atom(), partition_id(), term()) -> ok.
put_meta(Name, Partition, NewData) ->
true = antidote_ets_meta_data:insert_meta_data(Name, Partition, NewData),
ok.
%% Remove meta data for partition
-spec remove_partition(atom(), partition_id()) -> ok.
remove_partition(Name, Partition) ->
true = antidote_ets_meta_data:delete_meta_data_partition(Name, Partition),
ok.
%% Get merged meta data
-spec get_merged_data(atom(), X) -> X.
get_merged_data(Name, Default) ->
antidote_ets_meta_data:get_meta_data_sender_merged_data(Name, Default).
%% ===================================================================
%% gen_statem callbacks
%% ===================================================================
-spec init([term()]) -> {ok, any(), state()}.
init([Name]) ->
_MetaTable = antidote_ets_meta_data:create_meta_data_table(Name),
_StableTable = antidote_ets_meta_data:create_meta_data_sender_table(Name),
true = antidote_ets_meta_data:insert_meta_data_sender_merged_data(Name, Name:initial_merged()),
{ok, send_meta_data, #state{
last_result = Name:initial_local(),
name = Name,
should_check_nodes = true
}}.
send_meta_data({call, Sender}, start, State) ->
{next_state, send_meta_data, State#state{should_check_nodes = true}, [
{reply, Sender, ok}, {state_timeout, ?META_DATA_SLEEP, timeout}
]};
%% internal timeout transition
send_meta_data(state_timeout, timeout, State) ->
send_meta_data(cast, timeout, State);
send_meta_data(
cast,
timeout,
State = #state{
last_result = LastResult,
name = Name,
should_check_nodes = CheckNodes
}
) ->
{WillChange, Data} = get_merged_meta_data(Name, CheckNodes),
NodeList = ?GET_NODE_LIST(),
LocalMerged = maps:get(local_merged, Data),
MyNode = node(),
ok = lists:foreach(
fun(Node) ->
ok = meta_data_manager:send_meta_data(Name, Node, MyNode, LocalMerged)
end,
NodeList
),
MergedDict = Name:merge(Data),
{HasChanged, NewResult} = update_stable(LastResult, MergedDict, Name),
Store =
case HasChanged of
true ->
%% update changed counter for this metadata type
%?STATS({metadata_updated, Name}),
true = antidote_ets_meta_data:insert_meta_data_sender_merged_data(Name, NewResult),
NewResult;
false ->
LastResult
end,
{next_state, send_meta_data, State#state{last_result = Store, should_check_nodes = WillChange},
[{state_timeout, ?META_DATA_SLEEP, timeout}]}.
callback_mode() -> state_functions.
code_change(_OldVsn, StateName, State, _Extra) -> {ok, StateName, State}.
terminate(_Reason, _SN, _SD) -> ok.
%% ===================================================================
%% Private functions
%% ===================================================================
%% @private
-spec remote_table_ready(atom()) -> boolean().
remote_table_ready(Name) ->
antidote_ets_meta_data:remote_table_ready(Name).
%% @private
-spec update(
atom(), map(), [atom()], fun((atom(), atom(), T) -> any()), fun((atom(), atom()) -> any()), T
) -> map().
update(Name, MetaData, Entries, AddFun, RemoveFun, Initial) ->
{NewMetaData, NodeErase} = lists:foldl(
fun(NodeId, {Acc, Acc2}) ->
AccNew =
case maps:find(NodeId, MetaData) of
{ok, Val} ->
maps:put(NodeId, Val, Acc);
error ->
%% Put a record in the ets table because there is none for this node, yet
AddFun(Name, NodeId, Initial),
maps:put(NodeId, undefined, Acc)
end,
Acc2New = maps:remove(NodeId, Acc2),
{AccNew, Acc2New}
end,
{maps:new(), MetaData},
Entries
),
%% Remove entries that no longer exist
_ = maps:map(fun(NodeId, _Val) -> ok = RemoveFun(Name, NodeId) end, NodeErase),
NewMetaData.
%% @private
-spec get_merged_meta_data(atom(), boolean()) -> {boolean(), term()} | not_ready.
get_merged_meta_data(Name, CheckNodes) ->
case remote_table_ready(Name) of
false ->
not_ready;
true ->
{NodeList, PartitionList, WillChange} = ?GET_NODE_AND_PARTITION_LIST(),
Remote = antidote_ets_meta_data:get_remote_meta_data_as_map(Name),
Local = antidote_ets_meta_data:get_meta_data_as_map(Name),
%% Be sure that you are only checking active nodes
%% This isn't the most efficient way to do this because are checking the list
%% of nodes and partitions every time to see if any have been removed/added
%% This is only done if the ring is expected to change, but should be done
%% differently (check comment in get_node_and_partition_list())
{NewRemote, NewLocal} =
case CheckNodes of
true ->
{
update(
Name,
Remote,
NodeList,
fun meta_data_manager:add_node/3,
fun meta_data_manager:remove_node/2,
undefined
),
update(
Name,
Local,
PartitionList,
fun put_meta/3,
fun remove_partition/2,
Name:default()
)
};
false ->
{Remote, Local}
end,
LocalMerged = Name:merge(NewLocal),
{WillChange, maps:put(local_merged, LocalMerged, NewRemote)}
end.
%% @private
-spec update_stable(term(), term(), atom()) -> {boolean(), term()}.
update_stable(LastResult, NewData, Name) ->
Name:fold(
fun(DcId, Time, {Bool, Acc}) ->
Last = Name:lookup(DcId, LastResult),
case Name:update(Last, Time) of
true ->
{true, Name:store(DcId, Time, Acc)};
false ->
{Bool, Acc}
end
end,
{false, LastResult},
NewData
).
%% @private
-spec get_node_list() -> [node()].
get_node_list() ->
{ok, Ring} = riak_core_ring_manager:get_my_ring(),
get_node_list(Ring).
%% @private
get_node_list(Ring) ->
MyNode = node(),
lists:delete(MyNode, riak_core_ring:ready_members(Ring)).
%% @private
-spec get_node_and_partition_list() -> {[node()], [partition_id()], true}.
get_node_and_partition_list() ->
{ok, Ring} = riak_core_ring_manager:get_my_ring(),
NodeList = get_node_list(Ring),
PartitionList = riak_core_ring:my_indices(Ring),
%% TODO Deciding if the nodes might change by checking the is_resizing function is not
%% safe can cause inconsistencies under concurrency, so this should
%% be done differently
%% Resize = riak_core_ring:is_resizing(Ring) or riak_core_ring:is_post_resize(Ring) or riak_core_ring:is_resize_complete(Ring),
Resize = true,
{NodeList, PartitionList, Resize}.
-ifdef(EUNIT).
meta_data_sender_test_() ->
{foreach, fun start/0, fun stop/1, [
fun empty_test_/1,
fun merge_test_/1,
fun merge_additional_test_/1,
fun missing_test_/1,
fun merge_node_change_test_/1,
fun merge_node_change_additional_test_/1,
fun merge_node_delete_test_/1,
fun merge_node_delete_another_test_/1
]}.
start() ->
MetaType = stable_time_functions,
_Table = antidote_ets_meta_data:create_meta_data_sender_table(MetaType),
_Table2 = antidote_ets_meta_data:create_meta_data_table(MetaType),
_Table3 = ets:new(node_table, [set, named_table, public]),
_Table4 = antidote_ets_meta_data:create_remote_meta_data_table(MetaType),
true = antidote_ets_meta_data:insert_meta_data_sender_merged_data(
MetaType, MetaType:initial_merged()
),
MetaType.
stop(MetaType) ->
true = antidote_ets_meta_data:delete_meta_data_table(MetaType),
true = antidote_ets_meta_data:delete_meta_data_sender_table(MetaType),
true = ets:delete(node_table),
true = antidote_ets_meta_data:delete_remote_meta_data_table(MetaType),
ok.
-spec set_nodes_and_partitions_and_willchange([node()], [partition_id()], boolean()) -> ok.
set_nodes_and_partitions_and_willchange(Nodes, Partitions, WillChange) ->
true = ets:insert(node_table, {nodes, Nodes}),
true = ets:insert(node_table, {partitions, Partitions}),
true = ets:insert(node_table, {willchange, WillChange}),
ok.
%% Basic empty test
empty_test_(MetaType) ->
set_nodes_and_partitions_and_willchange([n1], [p1, p2, p3], false),
put_meta(MetaType, p1, vectorclock:new()),
put_meta(MetaType, p2, vectorclock:new()),
put_meta(MetaType, p3, vectorclock:new()),
{false, Meta} = get_merged_meta_data(MetaType, false),
LocalMerged = maps:get(local_merged, Meta),
?_assertEqual([], vectorclock:to_list(LocalMerged)).
%% This test checks to make sure that merging is done correctly for multiple partitions
merge_test_(MetaType) ->
set_nodes_and_partitions_and_willchange([n1], [p1, p2], false),
%ets:delete_all_objects(get_table_name(MetaType, ?META_TABLE_NAME)),
put_meta(MetaType, p1, vectorclock:from_list([{dc1, 10}, {dc2, 5}])),
put_meta(MetaType, p2, vectorclock:from_list([{dc1, 5}, {dc2, 10}])),
{false, Meta} = get_merged_meta_data(MetaType, false),
LocalMerged = maps:get(local_merged, Meta),
?_assertEqual(vectorclock:from_list([{dc1, 5}, {dc2, 5}]), LocalMerged).
merge_additional_test_(MetaType) ->
set_nodes_and_partitions_and_willchange([n1, n2], [p1, p2, p3], false),
put_meta(MetaType, p1, vectorclock:from_list([{dc1, 10}, {dc2, 5}])),
put_meta(MetaType, p2, vectorclock:from_list([{dc1, 5}, {dc2, 10}])),
put_meta(MetaType, p3, vectorclock:from_list([{dc1, 20}, {dc2, 20}])),
{false, Meta} = get_merged_meta_data(MetaType, false),
LocalMerged = maps:get(local_merged, Meta),
?_assertEqual(vectorclock:from_list([{dc1, 5}, {dc2, 5}]), LocalMerged).
%% Be sure that when you are missing a partition in your meta_data that you get a 0 value for the vectorclock.
missing_test_(MetaType) ->
set_nodes_and_partitions_and_willchange([n1], [p1, p2, p3], false),
put_meta(MetaType, p1, vectorclock:from_list([{dc1, 10}])),
put_meta(MetaType, p3, vectorclock:from_list([{dc1, 10}])),
remove_partition(MetaType, p2),
{false, Meta} = get_merged_meta_data(MetaType, true),
LocalMerged = maps:get(local_merged, Meta),
?_assertEqual(vectorclock:from_list([]), LocalMerged).
%% This test checks to make sure that merging is done correctly for multiple partitions
%% when you have a node that is removed from the cluster.
merge_node_change_test_(MetaType) ->
set_nodes_and_partitions_and_willchange([n1], [p1, p2], true),
put_meta(MetaType, p1, vectorclock:from_list([{dc1, 10}, {dc2, 5}])),
put_meta(MetaType, p2, vectorclock:from_list([{dc1, 5}, {dc2, 10}])),
{true, Meta} = get_merged_meta_data(MetaType, false),
LocalMerged = maps:get(local_merged, Meta),
?_assertEqual(vectorclock:from_list([{dc1, 5}, {dc2, 5}]), LocalMerged).
merge_node_change_additional_test_(MetaType) ->
set_nodes_and_partitions_and_willchange([n1, n2], [p1, p3], true),
put_meta(MetaType, p1, vectorclock:from_list([{dc1, 10}, {dc2, 10}])),
put_meta(MetaType, p2, vectorclock:from_list([{dc1, 5}, {dc2, 5}])),
put_meta(MetaType, p3, vectorclock:from_list([{dc1, 20}, {dc2, 20}])),
{true, Meta} = get_merged_meta_data(MetaType, true),
LocalMerged = maps:get(local_merged, Meta),
?_assertEqual(vectorclock:from_list([{dc1, 10}, {dc2, 10}]), LocalMerged).
merge_node_delete_test_(MetaType) ->
set_nodes_and_partitions_and_willchange([n1], [p1, p2], true),
put_meta(MetaType, p1, vectorclock:from_list([{dc1, 10}, {dc2, 5}])),
put_meta(MetaType, p2, vectorclock:from_list([{dc1, 5}, {dc2, 10}])),
put_meta(MetaType, p3, vectorclock:from_list([{dc1, 0}, {dc2, 0}])),
{true, Meta} = get_merged_meta_data(MetaType, false),
LocalMerged = maps:get(local_merged, Meta),
?_assertEqual(vectorclock:from_list([]), LocalMerged).
merge_node_delete_another_test_(MetaType) ->
set_nodes_and_partitions_and_willchange([n1], [p1, p2], true),
put_meta(MetaType, p1, vectorclock:from_list([{dc1, 10}, {dc2, 5}])),
put_meta(MetaType, p2, vectorclock:from_list([{dc1, 5}, {dc2, 10}])),
put_meta(MetaType, p3, vectorclock:from_list([{dc1, 0}, {dc2, 0}])),
{true, Meta2} = get_merged_meta_data(MetaType, true),
LocalMerged2 = maps:get(local_merged, Meta2),
?_assertEqual(vectorclock:from_list([{dc1, 5}, {dc2, 5}]), LocalMerged2).
get_node_list_t() ->
[{nodes, Nodes}] = ets:lookup(node_table, nodes),
Nodes.
get_node_and_partition_list_t() ->
[{willchange, WillChange}] = ets:lookup(node_table, willchange),
[{nodes, Nodes}] = ets:lookup(node_table, nodes),
[{partitions, Partitions}] = ets:lookup(node_table, partitions),
{Nodes, Partitions, WillChange}.
-endif. | apps/antidote/src/meta_data_sender.erl | 0.519521 | 0.459561 | meta_data_sender.erl | starcoder |
% @doc GRiSP General Purpose Input/Output (GPIO) API.
%
% General Purpose Input / Output (GPIO) is used to control digital signals on a
% pin. The digital values `0' and `1' correspond to a low or high voltage
% respectively. On GRiSP the voltage for GPIO pins is 3.3V.
%
% A pin can be controlled either as an output pin or an input pin. For an output
% pin, it is possible to set or get the current value. For an input pin, it is
% possible to get the current value.
%
% === Pin Mappings ===
%
% For PMOD connectors, the number column (<b>#</b>) maps to the respective PMOD
% pin (see <a href="#figure_1">Figure 1</a>).
%
% <table border="1" cellpadding="8">
% <caption>GRiSP 2 Pin Mappings</caption>
% <tr>
% <th rowspan="2">ID</th>
% <th colspan="4">Mapping</th>
% <th rowspan="2">Direction</th>
% <th rowspan="2">Description</th>
% </tr>
% <tr>
% <th>Slot</th>
% <th>Type</th>
% <th>#</th>
% <th>Schematic</th>
% </tr>
% <tr><td>`gpio1_1'</td> <td>GPIO1</td> <td>PMOD 1A</td> <td>1</td> <td>X1404.1</td> <td>In/Out</td> <td></td></tr>
% <tr><td>`gpio1_2'</td> <td>GPIO1</td> <td>PMOD 1A</td> <td>2</td> <td>X1404.2</td> <td>In/Out</td> <td></td></tr>
% <tr><td>`gpio1_3'</td> <td>GPIO1</td> <td>PMOD 1A</td> <td>3</td> <td>X1404.3</td> <td>In/Out</td> <td></td></tr>
% <tr><td>`gpio1_4'</td> <td>GPIO1</td> <td>PMOD 1A</td> <td>4</td> <td>X1404.4</td> <td>In/Out</td> <td></td></tr>
% <tr><td>`gpio1_7'</td> <td>GPIO1</td> <td>PMOD 1A</td> <td>7</td> <td>X1404.7</td> <td>In/Out</td> <td></td></tr>
% <tr><td>`gpio1_8'</td> <td>GPIO1</td> <td>PMOD 1A</td> <td>8</td> <td>X1404.8</td> <td>In/Out</td> <td></td></tr>
% <tr><td>`gpio1_9'</td> <td>GPIO1</td> <td>PMOD 1A</td> <td>9</td> <td>X1404.9</td> <td>In/Out</td> <td></td></tr>
% <tr><td>`gpio_1_3'</td> <td>GPIO_1</td> <td>Generic</td> <td></td> <td>X1300.3</td> <td>In/Out</td> <td></td></tr>
% <tr><td>`gpio_1_4'</td> <td>GPIO_1</td> <td>Generic</td> <td></td> <td>X1300.4</td> <td>In/Out</td> <td></td></tr>
% <tr><td>`gpio_1_5'</td> <td>GPIO_1</td> <td>Generic</td> <td></td> <td>X1300.5</td> <td>In/Out</td> <td></td></tr>
% <tr><td>`gpio_1_6'</td> <td>GPIO_1</td> <td>Generic</td> <td></td> <td>X1300.6</td> <td>In/Out</td> <td></td></tr>
% <tr><td>`gpio_2_3'</td> <td>GPIO_2/4</td> <td>Generic</td> <td></td> <td>X1301.3</td> <td>In/Out</td> <td></td></tr>
% <tr><td>`gpio_2_4'</td> <td>GPIO_2/4</td> <td>Generic</td> <td></td> <td>X1301.4</td> <td>In/Out</td> <td></td></tr>
% <tr><td>`gpio_2_5'</td> <td>GPIO_2/4</td> <td>Generic</td> <td></td> <td>X1301.5</td> <td>In/Out</td> <td></td></tr>
% <tr><td>`gpio_2_6'</td> <td>GPIO_2/4</td> <td>Generic</td> <td></td> <td>X1301.6</td> <td>In/Out</td> <td></td></tr>
% <tr><td>`gpio_2_7'</td> <td>GPIO_2/4</td> <td>Generic</td> <td></td> <td>X1301.7</td> <td>In/Out</td> <td></td></tr>
% <tr><td>`gpio_2_8'</td> <td>GPIO_2/4</td> <td>Generic</td> <td></td> <td>X1301.8</td> <td>In/Out</td> <td></td></tr>
% <tr><td>`gpio_2_9'</td> <td>GPIO_2/4</td> <td>Generic</td> <td></td> <td>X1301.9</td> <td>In/Out</td> <td></td></tr>
% <tr><td>`gpio_2_10'</td> <td>GPIO_2/4</td> <td>Generic</td> <td></td> <td>X1301.10</td> <td>In/Out</td> <td></td></tr>
% <tr><td>`gpio_2_11'</td> <td>GPIO_2/4</td> <td>Generic</td> <td></td> <td>X1301.11</td> <td>In/Out</td> <td></td></tr>
% <tr><td>`gpio_2_12'</td> <td>GPIO_2/4</td> <td>Generic</td> <td></td> <td>X1301.12</td> <td>In/Out</td> <td></td></tr>
% <tr><td>`gpio_2_13'</td> <td>GPIO_2/4</td> <td>Generic</td> <td></td> <td>X1301.13</td> <td>In/Out</td> <td></td></tr>
% <tr><td>`gpio_2_14'</td> <td>GPIO_2/4</td> <td>Generic</td> <td></td> <td>X1301.14</td> <td>In/Out</td> <td></td></tr>
% <tr><td>`led1_r'</td> <td>LED 1</td> <td>LED</td> <td></td> <td>RGB1red</td> <td>Out</td> <td><em>Reserved by LED driver</em></td></tr>
% <tr><td>`led1_g'</td> <td>LED 1</td> <td>LED</td> <td></td> <td>RGB1green</td> <td>Out</td> <td><em>Reserved by LED driver</em></td></tr>
% <tr><td>`led1_b'</td> <td>LED 1</td> <td>LED</td> <td></td> <td>RGB1blue</td> <td>Out</td> <td><em>Reserved by LED driver</em></td></tr>
% <tr><td>`led2_r'</td> <td>LED 2</td> <td>LED</td> <td></td> <td>RGB2red</td> <td>Out</td> <td><em>Reserved by LED driver</em></td></tr>
% <tr><td>`led2_g'</td> <td>LED 2</td> <td>LED</td> <td></td> <td>RGB2green</td> <td>Out</td> <td><em>Reserved by LED driver</em></td></tr>
% <tr><td>`led2_b'</td> <td>LED 2</td> <td>LED</td> <td></td> <td>RGB3blue</td> <td>Out</td> <td><em>Reserved by LED driver</em></td></tr>
% <tr><td>`jumper_1'</td> <td>Mode</td> <td>Jumper</td> <td></td> <td>JUMPER1</td> <td>In</td> <td>Mode Switch Jumper State</td></tr>
% <tr><td>`jumper_2'</td> <td>Mode</td> <td>Jumper</td> <td></td> <td>JUMPER2</td> <td>In</td> <td>Mode Switch Jumper State</td></tr>
% <tr><td>`jumper_3'</td> <td>Mode</td> <td>Jumper</td> <td></td> <td>JUMPER3</td> <td>In</td> <td>Mode Switch Jumper State</td></tr>
% <tr><td>`jumper_4'</td> <td>Mode</td> <td>Jumper</td> <td></td> <td>JUMPER4</td> <td>In</td> <td>Mode Switch Jumper State</td></tr>
% <tr><td>`jumper_5'</td> <td>Mode</td> <td>Jumper</td> <td></td> <td>JUMPER5</td> <td>In</td> <td>Mode Switch Jumper State</td></tr>
% <tfoot><tr><td colspan="7"><b>Warning!</b> Reserved pins should only be carefully used if their drivers are not in use</td></tr></tfoot>
% </table><br/>
% <table border="1" cellpadding="8">
% <caption>GRiSP 1 Pin Mappings</caption>
% <tr>
% <th rowspan="2">ID</th>
% <th colspan="4">Mapping</th>
% <th rowspan="2">Direction</th>
% <th rowspan="2">Description</th>
% </tr>
% <tr>
% <th>Slot</th>
% <th>Type</th>
% <th>#</th>
% <th>Schematic</th>
% </tr>
% <tr><td>`gpio1_1'</td> <td>GPIO1</td> <td>PMOD 1</td> <td>1</td> <td>X502.1</td> <td>In/Out</td> <td></td></tr>
% <tr><td>`gpio1_2'</td> <td>GPIO1</td> <td>PMOD 1</td> <td>2</td> <td>X502.2</td> <td>In/Out</td> <td></td></tr>
% <tr><td>`gpio1_3'</td> <td>GPIO1</td> <td>PMOD 1</td> <td>3</td> <td>X502.3</td> <td>In/Out</td> <td></td></tr>
% <tr><td>`gpio1_4'</td> <td>GPIO1</td> <td>PMOD 1</td> <td>4</td> <td>X502.4</td> <td>In/Out</td> <td></td></tr>
% <tr><td>`gpio2_1'</td> <td>GPIO2</td> <td>PMOD 1</td> <td>1</td> <td>X503.1</td> <td>In/Out</td> <td></td></tr>
% <tr><td>`gpio2_2'</td> <td>GPIO2</td> <td>PMOD 1</td> <td>2</td> <td>X503.2</td> <td>In/Out</td> <td></td></tr>
% <tr><td>`gpio2_3'</td> <td>GPIO2</td> <td>PMOD 1</td> <td>3</td> <td>X503.3</td> <td>In/Out</td> <td></td></tr>
% <tr><td>`gpio2_4'</td> <td>GPIO2</td> <td>PMOD 1</td> <td>4</td> <td>X503.4</td> <td>In/Out</td> <td></td></tr>
% <tr><td>`led1_r'</td> <td>LED 1</td> <td>LED</td> <td></td> <td>RGB1red</td> <td>Out</td> <td><em>Reserved by LED driver</em></td></tr>
% <tr><td>`led1_g'</td> <td>LED 1</td> <td>LED</td> <td></td> <td>RGB1green</td> <td>Out</td> <td><em>Reserved by LED driver</em></td></tr>
% <tr><td>`led1_b'</td> <td>LED 1</td> <td>LED</td> <td></td> <td>RGB1blue</td> <td>Out</td> <td><em>Reserved by LED driver</em></td></tr>
% <tr><td>`led2_r'</td> <td>LED 2</td> <td>LED</td> <td></td> <td>RGB2red</td> <td>Out</td> <td><em>Reserved by LED driver</em></td></tr>
% <tr><td>`led2_g'</td> <td>LED 2</td> <td>LED</td> <td></td> <td>RGB2green</td> <td>Out</td> <td><em>Reserved by LED driver</em></td></tr>
% <tr><td>`led2_b'</td> <td>LED 2</td> <td>LED</td> <td></td> <td>RGB3blue</td> <td>Out</td> <td><em>Reserved by LED driver</em></td></tr>
% <tr><td>`jumper_1'</td> <td>Mode</td> <td>Jumper</td> <td></td> <td>JUMPER1</td> <td>In</td> <td>Mode Switch Jumper State</td></tr>
% <tr><td>`jumper_2'</td> <td>Mode</td> <td>Jumper</td> <td></td> <td>JUMPER2</td> <td>In</td> <td>Mode Switch Jumper State</td></tr>
% <tr><td>`jumper_3'</td> <td>Mode</td> <td>Jumper</td> <td></td> <td>JUMPER3</td> <td>In</td> <td>Mode Switch Jumper State</td></tr>
% <tr><td>`jumper_4'</td> <td>Mode</td> <td>Jumper</td> <td></td> <td>JUMPER4</td> <td>In</td> <td>Mode Switch Jumper State</td></tr>
% <tr><td>`jumper_5'</td> <td>Mode</td> <td>Jumper</td> <td></td> <td>JUMPER5</td> <td>In</td> <td>Mode Switch Jumper State</td></tr>
% <tr><td>`spi1_pin7'</td> <td>SPI1</td> <td>PMOD 2A</td> <td>7</td> <td>X501.7</td> <td>In/Out</td> <td></td></tr>
% <tr><td>`spi1_pin8'</td> <td>SPI1</td> <td>PMOD 2A</td> <td>8</td> <td>X501.8</td> <td>In/Out</td> <td></td></tr>
% <tr><td>`spi1_pin9'</td> <td>SPI1</td> <td>PMOD 2A</td> <td>9</td> <td>X501.9</td> <td>In/Out</td> <td><em>Reserved by SPI driver</em></td></tr>
% <tr><td>`spi1_pin10'</td> <td>SPI1</td> <td>PMOD 2A</td> <td>10</td> <td>X501.10</td> <td>In/Out</td> <td><em>Reserved by SPI driver</em></td></tr>
% <tr><td>`spi1_pin1'</td> <td>SPI1</td> <td>PMOD 2A</td> <td>1</td> <td>X501.1</td> <td>In/Out</td> <td><em>Reserved by SPI driver</em></td></tr>
% <tr><td>`spi2_pin1'</td> <td>SPI2</td> <td>PMOD 2</td> <td>1</td> <td>X509.1</td> <td>In/Out</td> <td><em>Reserved by SPI driver</em></td></tr>
% <tr><td>`uart_1_cts'</td> <td>UART</td> <td>PMOD 3</td> <td>1</td> <td>X508.1</td> <td>In/Out</td> <td></td></tr>
% <tr><td>`uart_2_txd'</td> <td>UART</td> <td>PMOD 3</td> <td>2</td> <td>X508.2</td> <td>In/Out</td> <td></td></tr>
% <tr><td>`uart_3_rxd'</td> <td>UART</td> <td>PMOD 3</td> <td>3</td> <td>X508.3</td> <td>In/Out</td> <td></td></tr>
% <tr><td>`uart_4_rts'</td> <td>UART</td> <td>PMOD 3</td> <td>4</td> <td>X508.4</td> <td>In/Out</td> <td></td></tr>
% <tfoot><tr><td colspan="7"><b>Warning!</b> Reserved pins should only be carefully used if their drivers are not in use</td></tr></tfoot>
% </table><br/>
%
% === PMOD Pin Numbers ===
%
% <figure id="figure_1">
% <img src="images/pin-mapping.svg" width="700px" alt="PMOD connectors as seen
% from the side of a GRiSP board with number mappings"/>
% <figcaption><em>Figure 1. PMOD connectors as seen from the side of a GRiSP
% board with number mappings</em></figcaption>
% </figure>
%
% PMOD Type A consists of:
% <ul>
% <li>4 × data, pins #1-4</li>
% <li>1 × ground, pin #5</li>
% <li>1 × 3.3V power, pin #6</li>
% </ul>
% PMOD Type B consists of:
% <ul>
% <li>8 × data, pins #1-4 and #7-10</li>
% <li>1 × ground, pins #5 and #11</li>
% <li>1 × 3.3V power, pins #6 and #12</li>
% </ul>
-module(grisp_gpio).
-include("grisp_nif.hrl").
% API
-export([open/1]).
-export([open/2]).
-export([set/2]).
-export([get/1]).
% Callbacks
-export([on_load/0]).
-on_load(on_load/0).
% Macros
-define(DEFAULT_OPTS, #{mode => {output, 0}}).
% Attributes
-compile({no_auto_import, [get/1]}).
%--- Types ---------------------------------------------------------------------
-type pin() :: atom().
-type opts() :: map().
-opaque ref() :: reference().
-type value() :: 0 | 1.
-export_type([pin/0]).
-export_type([opts/0]).
-export_type([ref/0]).
-export_type([value/0]).
%--- API -----------------------------------------------------------------------
% @equiv open(Pin, #{})
-spec open(pin()) -> ref().
open(Pin) -> open(Pin, #{}).
% @doc Creates a reference to a GPIO pin.
%
% === Example ===
% Open the GPIO pin of the red component of LED 1 as an output pin with initial
% value of `0':
% ```
% 1> grisp_gpio:open(led1_r, {output, 0}).
% #Ref<0.2691682867.116916226.176944>
% '''
-spec open(pin(), opts()) -> ref().
open(Pin, UserOpts) ->
#{mode := Mode} = maps:merge(?DEFAULT_OPTS, UserOpts),
gpio_open_nif(pin(Pin), Mode).
% @doc Sets the current value of an output pin.
%
% === Example ===
% Turn off the red component of LED 1:
% ```
% 1> LED1R = grisp_gpio:open(led1_r, {output, 0}).
% #Ref<0.2691682867.116916226.176944>
% 2> grisp_gpio:set(LED1R, 0).
% ok
% '''
% Turn on the red component of LED 1:
% ```
% 3> grisp_gpio:set(LED1R, 1).
% ok
% '''
-spec set(ref(), value()) -> ok.
set(Pin, Value) when is_integer(Value) -> gpio_set_nif(Pin, Value).
% @doc Returns the current value of a pin.
%
% Returns the actual value for input pins or the currently set value for output
% pins.
%
% === Examples ===
% To see whether the red component of LED 1 is enabled:
% ```
% 1> LED1R = grisp_gpio:open(led1_r, {output, 0}).
% #Ref<0.2691682867.116916226.176944>
% 2> grisp_gpio:get(LED1R).
% 0
% 3> grisp_gpio:set(LED1R, 1).
% ok
% 2> grisp_gpio:get(LED1R).
% 1
% '''
-spec get(ref()) -> value().
get(Pin) -> gpio_get_nif(Pin).
%--- Callbacks -----------------------------------------------------------------
% @private
on_load() -> ok = erlang:load_nif(atom_to_list(?MODULE), 0).
%--- Internal ------------------------------------------------------------------
gpio_open_nif(_Attributes, _Mode) -> ?NIF_STUB.
gpio_set_nif(_Pin, _Value) -> ?NIF_STUB.
gpio_get_nif(_Pin) -> ?NIF_STUB.
pin(Pin) -> pin(grisp_hw:platform(), Pin).
% erlfmt-ignore
pin(grisp_base, gpio1_1) -> #{index => 0};
pin(grisp_base, gpio1_2) -> #{index => 1};
pin(grisp_base, gpio1_3) -> #{index => 2};
pin(grisp_base, gpio1_4) -> #{index => 3};
pin(grisp_base, gpio2_1) -> #{index => 4};
pin(grisp_base, gpio2_2) -> #{index => 5};
pin(grisp_base, gpio2_3) -> #{index => 6};
pin(grisp_base, gpio2_4) -> #{index => 7};
pin(grisp_base, led1_r) -> #{index => 8};
pin(grisp_base, led1_g) -> #{index => 9};
pin(grisp_base, led1_b) -> #{index => 10};
pin(grisp_base, led2_r) -> #{index => 11};
pin(grisp_base, led2_g) -> #{index => 12};
pin(grisp_base, led2_b) -> #{index => 13};
pin(grisp_base, jumper_1) -> #{index => 14};
pin(grisp_base, jumper_2) -> #{index => 15};
pin(grisp_base, jumper_3) -> #{index => 16};
pin(grisp_base, jumper_4) -> #{index => 17};
pin(grisp_base, jumper_5) -> #{index => 18};
pin(grisp_base, spi1_pin7) -> #{index => 19};
pin(grisp_base, spi1_pin8) -> #{index => 20};
pin(grisp_base, spi1_pin9) -> #{index => 21};
pin(grisp_base, spi1_pin10) -> #{index => 22};
pin(grisp_base, spi1_pin1) -> #{index => 23};
pin(grisp_base, spi2_pin1) -> #{index => 24};
pin(grisp_base, uart_1_cts) -> #{index => 25};
pin(grisp_base, uart_2_txd) -> #{index => 26};
pin(grisp_base, uart_3_rxd) -> #{index => 27};
pin(grisp_base, uart_4_rts) -> #{index => 28};
pin(grisp2, gpio1_1) -> #{path => <<"/pmod-gpio\0">>, property => <<"grisp,gpios\0">>, index => 0};
pin(grisp2, gpio1_2) -> #{path => <<"/pmod-gpio\0">>, property => <<"grisp,gpios\0">>, index => 1};
pin(grisp2, gpio1_3) -> #{path => <<"/pmod-gpio\0">>, property => <<"grisp,gpios\0">>, index => 2};
pin(grisp2, gpio1_4) -> #{path => <<"/pmod-gpio\0">>, property => <<"grisp,gpios\0">>, index => 3};
pin(grisp2, gpio1_7) -> #{path => <<"/pmod-gpio\0">>, property => <<"grisp,gpios\0">>, index => 4};
pin(grisp2, gpio1_8) -> #{path => <<"/pmod-gpio\0">>, property => <<"grisp,gpios\0">>, index => 5};
pin(grisp2, gpio1_9) -> #{path => <<"/pmod-gpio\0">>, property => <<"grisp,gpios\0">>, index => 6};
pin(grisp2, gpio1_10) -> #{path => <<"/pmod-gpio\0">>, property => <<"grisp,gpios\0">>, index => 7};
pin(grisp2, gpio_1_3) -> #{path => <<"/pin-gpio\0">>, property => <<"grisp,gpios\0">>, index => 0};
pin(grisp2, gpio_1_4) -> #{path => <<"/pin-gpio\0">>, property => <<"grisp,gpios\0">>, index => 1};
pin(grisp2, gpio_1_5) -> #{path => <<"/pin-gpio\0">>, property => <<"grisp,gpios\0">>, index => 2};
pin(grisp2, gpio_1_6) -> #{path => <<"/pin-gpio\0">>, property => <<"grisp,gpios\0">>, index => 3};
pin(grisp2, gpio_2_3) -> #{path => <<"/pin-gpio\0">>, property => <<"grisp,gpios\0">>, index => 4};
pin(grisp2, gpio_2_4) -> #{path => <<"/pin-gpio\0">>, property => <<"grisp,gpios\0">>, index => 5};
pin(grisp2, gpio_2_5) -> #{path => <<"/pin-gpio\0">>, property => <<"grisp,gpios\0">>, index => 6};
pin(grisp2, gpio_2_6) -> #{path => <<"/pin-gpio\0">>, property => <<"grisp,gpios\0">>, index => 7};
pin(grisp2, gpio_2_7) -> #{path => <<"/pin-gpio\0">>, property => <<"grisp,gpios\0">>, index => 8};
pin(grisp2, gpio_2_8) -> #{path => <<"/pin-gpio\0">>, property => <<"grisp,gpios\0">>, index => 9};
pin(grisp2, gpio_2_9) -> #{path => <<"/pin-gpio\0">>, property => <<"grisp,gpios\0">>, index => 10};
pin(grisp2, gpio_2_10) -> #{path => <<"/pin-gpio\0">>, property => <<"grisp,gpios\0">>, index => 11};
pin(grisp2, gpio_2_11) -> #{path => <<"/pin-gpio\0">>, property => <<"grisp,gpios\0">>, index => 12};
pin(grisp2, gpio_2_12) -> #{path => <<"/pin-gpio\0">>, property => <<"grisp,gpios\0">>, index => 13};
pin(grisp2, gpio_2_13) -> #{path => <<"/pin-gpio\0">>, property => <<"grisp,gpios\0">>, index => 14};
pin(grisp2, gpio_2_14) -> #{path => <<"/pin-gpio\0">>, property => <<"grisp,gpios\0">>, index => 15};
pin(grisp2, led1_r) -> #{path => <<"/leds/grisp-rgb1-red\0">>, property => <<"gpios\0">>, index => 0};
pin(grisp2, led1_g) -> #{path => <<"/leds/grisp-rgb1-green\0">>, property => <<"gpios\0">>, index => 0};
pin(grisp2, led1_b) -> #{path => <<"/leds/grisp-rgb1-blue\0">>, property => <<"gpios\0">>, index => 0};
pin(grisp2, led2_r) -> #{path => <<"/leds/grisp-rgb2-red\0">>, property => <<"gpios\0">>, index => 0};
pin(grisp2, led2_g) -> #{path => <<"/leds/grisp-rgb2-green\0">>, property => <<"gpios\0">>, index => 0};
pin(grisp2, led2_b) -> #{path => <<"/leds/grisp-rgb2-blue\0">>, property => <<"gpios\0">>, index => 0};
pin(grisp2, jumper_1) -> #{path => <<"/jumper-keys\0">>, property => <<"grisp,gpios\0">>, index => 0};
pin(grisp2, jumper_2) -> #{path => <<"/jumper-keys\0">>, property => <<"grisp,gpios\0">>, index => 1};
pin(grisp2, jumper_3) -> #{path => <<"/jumper-keys\0">>, property => <<"grisp,gpios\0">>, index => 2};
pin(grisp2, jumper_4) -> #{path => <<"/jumper-keys\0">>, property => <<"grisp,gpios\0">>, index => 3};
pin(grisp2, jumper_5) -> #{path => <<"/jumper-keys\0">>, property => <<"grisp,gpios\0">>, index => 4};
pin(Platform, Pin) ->
error({unknown_pin, Platform, Pin}). | src/grisp_gpio.erl | 0.594787 | 0.650883 | grisp_gpio.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% riak_buckets_fsm: listing of buckets
%%
%% Copyright (c) 2007-2011 Basho Technologies, Inc. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%% @doc The buckets fsm manages the listing of buckets.
-module(riak_kv_buckets_fsm).
-behaviour(riak_core_coverage_fsm).
-include_lib("riak_kv_vnode.hrl").
-export([init/2,
process_results/2,
finish/2]).
-type from() :: {atom(), req_id(), pid()}.
-type req_id() :: non_neg_integer().
-record(state, {buckets=sets:new() :: set(),
from :: from(),
stream=false :: boolean(),
type :: binary()
}).
-include("riak_kv_dtrace.hrl").
%% @doc Return a tuple containing the ModFun to call per vnode,
%% the number of primary preflist vnodes the operation
%% should cover, the service to use to check for available nodes,
%% and the registered name to use to access the vnode master process.
init(From, [_, _]=Args) ->
init(From, Args ++ [false, <<"default">>]);
init(From, [ItemFilter, Timeout, Stream]) ->
init(From, [ItemFilter, Timeout, Stream, <<"default">>]);
init(From={_, _, ClientPid}, [ItemFilter, Timeout, Stream, BucketType]) ->
ClientNode = atom_to_list(node(ClientPid)),
PidStr = pid_to_list(ClientPid),
FilterX = if ItemFilter == none -> 0;
true -> 1
end,
%% "other" is a legacy term from when MapReduce used this FSM (in
%% which case, the string "mapred" would appear
?DTRACE(?C_BUCKETS_INIT, [2, FilterX],
[<<"other">>, ClientNode, PidStr]),
%% Construct the bucket listing request
Req = ?KV_LISTBUCKETS_REQ{item_filter=ItemFilter},
{Req, allup, 1, 1, riak_kv, riak_kv_vnode_master, Timeout,
#state{from=From, stream=Stream, type=BucketType}}.
process_results(done, StateData) ->
{done, StateData};
process_results({error, Reason}, _State) ->
?DTRACE(?C_BUCKETS_PROCESS_RESULTS, [-1], []),
{error, Reason};
process_results(Buckets0,
StateData=#state{buckets=BucketAcc, from=From, stream=true}) ->
Buckets = filter_buckets(Buckets0, StateData#state.type),
?DTRACE(?C_BUCKETS_PROCESS_RESULTS, [length(Buckets)], []),
BucketsToSend = [ B || B <- Buckets,
not sets:is_element(B, BucketAcc) ],
case BucketsToSend =/= [] of
true ->
reply({buckets_stream, BucketsToSend}, From);
false ->
ok
end,
{ok, StateData#state{buckets=accumulate(Buckets, BucketAcc)}};
process_results(Buckets0,
StateData=#state{buckets=BucketAcc, stream=false}) ->
Buckets = filter_buckets(Buckets0, StateData#state.type),
?DTRACE(?C_BUCKETS_PROCESS_RESULTS, [length(Buckets)], []),
{ok, StateData#state{buckets=accumulate(Buckets, BucketAcc)}}.
finish({error, _}=Error,
StateData=#state{from=From}) ->
?DTRACE(?C_BUCKETS_FINISH, [-1], []),
%% Notify the requesting client that an error
%% occurred or the timeout has elapsed.
reply(Error, From),
{stop, normal, StateData};
finish(clean, StateData=#state{from=From, stream=true}) ->
?DTRACE(?C_BUCKETS_FINISH, [0], []),
reply(done, From),
{stop, normal, StateData};
finish(clean,
StateData=#state{buckets=Buckets,
from=From,
stream=false}) ->
reply({buckets, sets:to_list(Buckets)}, From),
?DTRACE(?C_BUCKETS_FINISH, [0], []),
{stop, normal, StateData}.
reply(Msg, {raw, ReqId, ClientPid}) ->
ClientPid ! {ReqId, Msg},
ok.
accumulate(Entries, Set) ->
sets:union(sets:from_list(Entries),Set).
filter_buckets(Buckets, Type) ->
filter_buckets(Buckets, Type, []).
filter_buckets([], _Type, Acc) ->
Acc;
filter_buckets([{Type, Bucket}|Rest], Type, Acc) ->
filter_buckets(Rest, Type, [Bucket|Acc]);
filter_buckets([Bucket|Rest], Type, Acc) when is_binary(Bucket),
Type == undefined orelse
Type == <<"default">> ->
filter_buckets(Rest, Type, [Bucket|Acc]);
filter_buckets([_|Rest], Type, Acc) ->
%% does not match
filter_buckets(Rest, Type, Acc).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-compile(export_all).
%% tests should go here at some point.
-endif. | deps/riak_kv/src/riak_kv_buckets_fsm.erl | 0.531939 | 0.406744 | riak_kv_buckets_fsm.erl | starcoder |
%
% reia_eval: Evaluate a given set of Reia expressions
% Copyright (C)2009 <NAME>
%
% Redistribution is permitted under the MIT license. See LICENSE for details.
%
-module(reia_eval).
-export([new_binding/0, string/1, string/2, exprs/1, exprs/2]).
-include("../compiler/reia_nodes.hrl").
-include("../compiler/reia_bindings.hrl").
-define(return_value_var(Line), #identifier{line=Line, name='__reia_eval_return_value'}).
% Create a new set of local variable bindings
new_binding() -> [].
% Parse and evaluate the given string
string(Str) -> string(Str, new_binding()).
% Parse and evaluate the given string with the given bindings
string(Str, Bindings) ->
case reia_parse:string(Str) of
{error, _} = Error ->
Error;
{ok, Exprs} ->
exprs(Exprs, Bindings)
end.
% Evaluate the given set of expressions
exprs(Exprs) -> exprs(Exprs, new_binding()).
% Evaluate the given set of expressions with the given bindings
exprs(Exprs, Bindings) ->
io:format("Input Code: ~p~n", [Exprs]),
Exprs2 = annotate_return_value(Exprs, Bindings),
{ok, Module} = reia_compiler:compile(
temporary_module(),
Exprs2,
[{toplevel_args, [Var || {Var, _} <- Bindings]}]
),
Args = [Val || {_, Val} <- Bindings],
{ok, Name, {Value, NewBindings}} = reia_bytecode:load(Module, Args),
% FIXME: In the future it's possible eval will create things which persist
% beyond initial evaluation (e.g. lambdas, processes). Once these features
% are added a different solution will be needed than a simple code:purge.
code:purge(Name),
{value, Value, NewBindings}.
% Generate a temporary module name
temporary_module() ->
RawHash = erlang:md5(term_to_binary(make_ref())),
HexHash = lists:flatten([io_lib:format("~.16b",[N]) || <<N>> <= RawHash]),
"reia_eval#" ++ HexHash.
% Annotate the return value of the expression to include the bindings
annotate_return_value(Exprs, Bindings) ->
[LastExpr|Rest] = lists:reverse(Exprs),
Line = element(2, LastExpr),
LastExpr2 = #match{line=Line, left=?return_value_var(Line), right=LastExpr},
ReturnValue = return_value(output_bindings(Exprs, Bindings), Line),
lists:reverse([ReturnValue, LastExpr2 | Rest]).
% Obtain a list of all variables which will be bound when eval is complete
output_bindings(Exprs, Bindings) ->
{ok, BAExprs} = reia_bindings:transform(Exprs),
[#bindings{entries=NewBindings}|_] = lists:reverse(BAExprs),
lists:usort([Var || {Var, _} <- Bindings] ++ dict:fetch_keys(NewBindings)).
% Generate the return value for eval, appending the binding nodes
return_value(Bindings, Line) ->
#tuple{line=Line, elements = [?return_value_var(Line), bindings_list(Bindings, Line)]}.
% Construct the output list for the bindings
bindings_list([], Line) ->
{empty, Line};
bindings_list([Name|Rest], Line) ->
{cons, Line, binding_node(Name, Line), bindings_list(Rest, Line)}.
% Generate the AST representing a given binding
binding_node(Name, Line) ->
#tuple{line=Line, elements=[
#atom{line=Line, name=Name},
#identifier{line=Line, name=Name}
]}. | src/core/reia_eval.erl | 0.688992 | 0.585457 | reia_eval.erl | starcoder |
%%--------------------------------------------------------------------
%% Copyright (c) 2019 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_mod_rewrite_SUITE).
-compile(export_all).
-compile(nowarn_export_all).
-import(emqx_mod_rewrite,
[ rewrite_subscribe/4
, rewrite_unsubscribe/4
, rewrite_publish/2
]).
-include_lib("emqx.hrl").
-include_lib("eunit/include/eunit.hrl").
-define(TEST_RULES, [<<"x/# ^x/y/(.+)$ z/y/$1">>,
<<"y/+/z/# ^y/(.+)/z/(.+)$ y/z/$2">>
]).
all() -> emqx_ct:all(?MODULE).
%%--------------------------------------------------------------------
%% Test cases
%%--------------------------------------------------------------------
t_rewrite_subscribe(_) ->
?assertEqual({ok, [{<<"test">>, #{}}]},
rewrite(subscribe, [{<<"test">>, #{}}])),
?assertEqual({ok, [{<<"z/y/test">>, #{}}]},
rewrite(subscribe, [{<<"x/y/test">>, #{}}])),
?assertEqual({ok, [{<<"y/z/test_topic">>, #{}}]},
rewrite(subscribe, [{<<"y/test/z/test_topic">>, #{}}])).
t_rewrite_unsubscribe(_) ->
?assertEqual({ok, [{<<"test">>, #{}}]},
rewrite(unsubscribe, [{<<"test">>, #{}}])),
?assertEqual({ok, [{<<"z/y/test">>, #{}}]},
rewrite(unsubscribe, [{<<"x/y/test">>, #{}}])),
?assertEqual({ok, [{<<"y/z/test_topic">>, #{}}]},
rewrite(unsubscribe, [{<<"y/test/z/test_topic">>, #{}}])).
t_rewrite_publish(_) ->
?assertMatch({ok, #message{topic = <<"test">>}},
rewrite(publish, #message{topic = <<"test">>})),
?assertMatch({ok, #message{topic = <<"z/y/test">>}},
rewrite(publish, #message{topic = <<"x/y/test">>})),
?assertMatch({ok, #message{topic = <<"y/z/test_topic">>}},
rewrite(publish, #message{topic = <<"y/test/z/test_topic">>})).
%%--------------------------------------------------------------------
%% Helper functions
%%--------------------------------------------------------------------
rewrite(subscribe, TopicFilters) ->
rewrite_subscribe(#{}, #{}, TopicFilters, rules());
rewrite(unsubscribe, TopicFilters) ->
rewrite_unsubscribe(#{}, #{}, TopicFilters, rules());
rewrite(publish, Msg) -> rewrite_publish(Msg, rules()).
rules() ->
[begin
[Topic, Re, Dest] = string:split(Rule, " ", all),
{ok, MP} = re:compile(Re),
{rewrite, Topic, MP, Dest}
end || Rule <- ?TEST_RULES]. | test/emqx_mod_rewrite_SUITE.erl | 0.510008 | 0.406626 | emqx_mod_rewrite_SUITE.erl | starcoder |
-module(phi_failure_detector).
-export([new/2]).
-export([new/3]).
-export([phi/1]).
-export([phi/2]).
-export([phi/3]).
-export([heartbeat/2]).
-export([heartbeat/3]).
-export([monitor/3]).
%% @doc Creates a new Phi Failure Detector
%%
%% {@link new/3}
%% @end
-spec new(Label, ID) -> {ok, Pid}
when Label :: atom(),
ID :: term(),
Pid :: pid().
new(Label, ID) ->
new(Label, ID, []).
%% @doc Creates a new Phi Failure Detector
%%
%% Creating a new Phi Failure Detector creates a process to manage the sampling window of
%% inter-arrival times of heartbeats and allows a client to query the Phi value at a point in time.
%% The unique identifier for a Phi Failure Detector is the combination of `Label' and `ID'. The
%% `Label' and `ID' combination are used with {@link heartbeat/2}.
%% @end
-spec new(Label, ID, Opts) -> {ok, Pid}
when Label :: atom(),
ID :: term(),
Opts :: list(),
Pid :: pid().
new(Label, ID, Opts)->
pfd_sup:start_service(Label, ID, Opts).
%% @doc Calculate Phi for all endpoints that share the same `Label'.
%%
%% See {@link phi/2} for more details on Phi.
%% @end
-spec phi(Label) -> Status
when Label :: atom(),
Status :: [{{atom(), term()}, float()}].
phi(Label) ->
[{S, phi(L, I)} || {L, I} = S <- pfd_service:select(Label)].
%% @doc Calculate the Phi value for a service endpoint
%%
%% Phi represents a dynamically scalable suspicion level of a service endpoint based on the
%% inter-arrival times recorded with each {@link heartbeat/2}. A distribution of
%% inter-arrival times are used to compute the value of Phi at some point in time. The estimation of
%% inter-arrival times assumes a normal distribution. The follow function is used to determine phi:
%% -log10(1 - cdf(tnow - tlast))
%% @end
-spec phi(Label, ID) -> Phi
when Label :: atom(),
ID :: term(),
Phi :: float().
phi(Label, ID) ->
phi(Label, ID, t_now()).
-spec phi(Label, ID, T) -> Phi
when Label :: atom(),
ID :: term(),
T :: pos_integer(),
Phi :: float().
phi(Label, ID, T) ->
pfd_service:phi(Label, ID, T).
%% @doc Add a successful heartbeat to the sample set for calculating Phi
%%
%% When a heartbeat arrives we store the inter-arrival time from the previously received heartbeat
%% and store it in a fix sized sliding window. The data regarding the oldest heartbeat is dropped
%% from window.
%% @end
-spec heartbeat(Label, ID) -> ok
when Label :: atom(),
ID :: term().
heartbeat(Label, ID) ->
heartbeat(Label, ID, t_now()).
heartbeat(Label, ID, T) ->
pfd_service:heartbeat(Label, ID, T).
-spec monitor(Label, ID, Threshold) -> ok
when Label :: atom(),
ID :: term(),
Threshold :: float().
monitor(Label, ID, Threshold) ->
pfd_monitor:monitor(Label, ID, Threshold).
t_now() ->
erlang:system_time(micro_seconds). | src/phi_failure_detector.erl | 0.762866 | 0.646321 | phi_failure_detector.erl | starcoder |
%% @doc: If you wish to implement your own backend for storing
%% registers, your module needs to implement these interfaces. The
%% backend modules have quite a lot of responsibility (detailed below)
%% to allow for backend-specific optimizations.
-module(hyper_register).
-type t() :: module().
-export_type([t/0]).
-export([new/2, set/4, compact/2, max_merge/2, max_merge/3, reduce_precision/3,
register_sum/2, zero_count/2, encode_registers/2, decode_registers/3, bytes/2]).
%% @doc: Creates a new instance of the backend. The return value of
%% this function will be passed to all functions in this module.
-callback new(P :: hyper:precision()) ->
hyper:registers().
%% @doc: Set the register to the given value, *only* if the value
%% already stored is lower than the new value. The backend needs to
%% ensure the register value is only allowed to increase.
-callback set(Index :: integer(),
Value :: integer(),
hyper:registers()) ->
hyper:registers().
%% @doc: Compact is always called before any attempt at reading (sum,
%% zero count, etc) or merging. It is intended to give backends that
%% buffer the writes a chance to flush the buffer before the registers
%% are needed.
-callback compact(hyper:registers()) ->
hyper:registers().
%% @doc: Merge any number of registers, used to calculate the
%% union. For two register values at the same index, the max value
%% must be in the resulting register.
-callback max_merge([hyper:registers()]) ->
hyper:registers().
%% @doc: Same as max_merge/1 but used when we know only two filters
%% are merged.
-callback max_merge(hyper:registers(),
hyper:registers()) ->
hyper:registers().
%% @doc: Reduce the precision of the registers. Used for mixed-precision
%% union by first reducing the precision to the lowest of all filters.
-callback reduce_precision(hyper:precision(),
hyper:registers()) ->
hyper:registers().
%% @doc: Sum of 2^-R where R is the value in each register.
-callback register_sum(hyper:registers()) ->
float().
%% @doc: Count of registers set to 0.
-callback zero_count(hyper:registers()) ->
integer().
%% @doc: Encode and decode are called to convert the in-memory
%% representation of the backend to the serialized format. Must return
%% one binary where each register is encoded as an 8-bit integer.
-callback encode_registers(hyper:registers()) ->
binary().
-callback decode_registers(binary(), hyper:precision()) ->
hyper:registers().
%% @doc: Size in bytes used to represent the registers in memory.
-callback bytes(hyper:registers()) ->
integer().
%% @doc: Creates a new instance of the backend. The return value of
%% this function will be passed to all functions in this module.
-spec new(t(), P :: hyper:precision()) -> hyper:registers().
new(RegisterImpl, P) ->
RegisterImpl:new(P).
%% @doc: Set the register to the given value, *only* if the value
%% already stored is lower than the new value. The backend needs to
%% ensure the register value is only allowed to increase.
-spec set(t(), Index :: integer(), Value :: integer(), hyper:registers()) ->
hyper:registers().
set(RegisterImpl, Index, Value, Registers) ->
RegisterImpl:set(Index, Value, Registers).
%% @doc: Compact is always called before any attempt at reading (sum,
%% zero count, etc) or merging. It is intended to give backends that
%% buffer the writes a chance to flush the buffer before the registers
%% are needed.
-spec compact(t(), hyper:registers()) -> hyper:registers().
compact(RegisterImpl, Registers) ->
RegisterImpl:compact(Registers).
%% @doc: Merge any number of registers, used to calculate the
%% union. For two register values at the same index, the max value
%% must be in the resulting register.
-spec max_merge(t(), [hyper:registers()]) -> hyper:registers().
max_merge(RegisterImpl, RegisterLists) ->
RegisterImpl:max_merge(RegisterLists).
%% @doc: Same as max_merge/1 but used when we know only two filters
%% are merged.
-spec max_merge(t(), hyper:registers(), hyper:registers()) -> hyper:registers().
max_merge(RegisterImpl, Registers1, Registers2) ->
RegisterImpl:max_merge(Registers1, Registers2).
%% @doc: Reduce the precision of the registers. Used for mixed-precision
%% union by first reducing the precision to the lowest of all filters.
-spec reduce_precision(t(), hyper:precision(), hyper:registers()) -> hyper:registers().
reduce_precision(RegisterImpl, Precision, Registers) ->
RegisterImpl:reduce_precision(Precision, Registers).
%% @doc: Sum of 2^-R where R is the value in each register.
-spec register_sum(t(), hyper:registers()) -> float().
register_sum(RegisterImpl, Registers) ->
RegisterImpl:register_sum(Registers).
%% @doc: Count of registers set to 0.
-spec zero_count(t(), hyper:registers()) -> integer().
zero_count(RegisterImpl, Registers) ->
RegisterImpl:zero_count(Registers).
%% @doc: Encode and decode are called to convert the in-memory
%% representation of the backend to the serialized format. Must return
%% one binary where each register is encoded as an 8-bit integer.
-spec encode_registers(t(), hyper:registers()) -> binary().
encode_registers(RegisterImpl, Registers) ->
RegisterImpl:encode_registers(Registers).
-spec decode_registers(t(), binary(), hyper:precision()) -> hyper:registers().
decode_registers(RegisterImpl, Encoded, Precision) ->
RegisterImpl:decode_registers(Encoded, Precision).
%% @doc: Size in bytes used to represent the registers in memory.
-spec bytes(t(), hyper:registers()) -> integer().
bytes(RegisterImpl, Registers) ->
RegisterImpl:bytes(Registers). | src/hyper_register.erl | 0.672439 | 0.764979 | hyper_register.erl | starcoder |
-module(color).
% API
-export([p/2]).
-export([black/1]).
-export([black/2]).
-export([red/1]).
-export([red/2]).
-export([green/1]).
-export([green/2]).
-export([yellow/1]).
-export([yellow/2]).
-export([blue/1]).
-export([blue/2]).
-export([purple/1]).
-export([purple/2]).
-export([cyan/1]).
-export([cyan/2]).
-export([white/1]).
-export([white/2]).
-define(reset, <<"\e[0m">>).
-define(foreground(Color),
Color(Text) -> [<<"\e[0;">>, foreground(Color), <<"m">>, Text, ?reset]
).
-type opt() :: black | red | green | yellow | blue | purple | cyan | white |
light_black | light_red | light_green | light_yellow | light_blue |
light_purple | light_cyan | light_white | normal | bold | underline |
blink | inherit.
%--- API ----------------------------------------------------------------------
% @doc Format and color `IOData' according to `Opts'.
-spec p(iodata(), [opt()]) -> iodata().
p(IOData, Opts) ->
{Modes, Colors} = parse(Opts),
[<<"\e[">>, Modes, Colors, <<"m">>, IOData, ?reset].
% @doc Color the text black.
-spec black(iodata()) -> iodata().
black(IOData) -> p(IOData, [black]).
% @doc Color the format-text black.
-spec black(string(), [term()]) -> iodata().
black(FormatString, FormatArgs) -> black(io_lib:format(FormatString, FormatArgs)).
% @doc Color the text red.
-spec red(iodata()) -> iodata().
red(IOData) -> p(IOData, [red]).
% @doc Color the format-text red.
-spec red(string(), [term()]) -> iodata().
red(FormatString, FormatArgs) -> red(io_lib:format(FormatString, FormatArgs)).
% @doc Color the text green.
-spec green(iodata()) -> iodata().
green(IOData) -> p(IOData, [green]).
% @doc Color the format-text green.
-spec green(string(), [term()]) -> iodata().
green(FormatString, FormatArgs) -> green(io_lib:format(FormatString, FormatArgs)).
% @doc Color the text yellow.
-spec yellow(iodata()) -> iodata().
yellow(IOData) -> p(IOData, [yellow]).
% @doc Color the format-text yellow.
-spec yellow(string(), [term()]) -> iodata().
yellow(FormatString, FormatArgs) -> yellow(io_lib:format(FormatString, FormatArgs)).
% @doc Color the text blue.
-spec blue(iodata()) -> iodata().
blue(IOData) -> p(IOData, [blue]).
% @doc Color the format-text blue.
-spec blue(string(), [term()]) -> iodata().
blue(FormatString, FormatArgs) -> blue(io_lib:format(FormatString, FormatArgs)).
% @doc Color the text purple.
-spec purple(iodata()) -> iodata().
purple(IOData) -> p(IOData, [purple]).
% @doc Color the format-text purple.
-spec purple(string(), [term()]) -> iodata().
purple(FormatString, FormatArgs) -> purple(io_lib:format(FormatString, FormatArgs)).
% @doc Color the text cyan.
-spec cyan(iodata()) -> iodata().
cyan(IOData) -> p(IOData, [cyan]).
% @doc Color the format-text cyan.
-spec cyan(string(), [term()]) -> iodata().
cyan(FormatString, FormatArgs) -> cyan(io_lib:format(FormatString, FormatArgs)).
% @doc Color the text white.
-spec white(iodata()) -> iodata().
white(IOData) -> p(IOData, [white]).
% @doc Color the format-text white.
-spec white(string(), [term()]) -> iodata().
white(FormatString, FormatArgs) -> white(io_lib:format(FormatString, FormatArgs)).
%--- Internal Functions -------------------------------------------------------
parse(Opts) -> parse(Opts, {mode(normal), <<>>}).
parse([], Result) ->
Result;
parse([Mode|Opts], {<<>>, Colors})
when Mode == normal; Mode == bold; Mode == underline ->
parse(Opts, {mode(Mode), Colors});
parse([Mode|Opts], {Modes, Colors})
when Mode == normal; Mode == bold; Mode == underline ->
Bin = mode(Mode),
parse(Opts, {<<Modes/binary, ";", Bin/binary>>, Colors});
parse([Color|Opts], {Mode, <<>>}) ->
Bin = foreground(Color),
parse(Opts, {Mode, <<";", Bin/binary>>});
parse([Color|Opts], {Mode, Colors}) ->
Bin = background(Color),
parse(Opts, {Mode, <<Colors/binary, ";", Bin/binary>>}).
% Color_Off='\e[0m' # Text Reset
mode(normal) -> <<"0">>;
mode(bold) -> <<"1">>;
mode(underline) -> <<"4">>;
mode(blink) -> <<"5">>;
mode(_) -> error(invalid_mode).
foreground(inherit) -> <<"0">>;
foreground(black) -> <<"30">>;
foreground(red) -> <<"31">>;
foreground(green) -> <<"32">>;
foreground(yellow) -> <<"33">>;
foreground(blue) -> <<"34">>;
foreground(purple) -> <<"35">>;
foreground(cyan) -> <<"36">>;
foreground(white) -> <<"37">>;
foreground(light_black) -> <<"90">>;
foreground(light_red) -> <<"91">>;
foreground(light_green) -> <<"92">>;
foreground(light_yellow) -> <<"93">>;
foreground(light_blue) -> <<"94">>;
foreground(light_purple) -> <<"95">>;
foreground(light_cyan) -> <<"96">>;
foreground(light_white) -> <<"97">>;
foreground(_) -> error(invalid_foreground_color).
background(black) -> <<"40">>;
background(red) -> <<"41">>;
background(green) -> <<"42">>;
background(yellow) -> <<"43">>;
background(blue) -> <<"44">>;
background(purple) -> <<"45">>;
background(cyan) -> <<"46">>;
background(white) -> <<"47">>;
background(light_black) -> <<"100">>;
background(light_red) -> <<"101">>;
background(light_green) -> <<"102">>;
background(light_yellow) -> <<"103">>;
background(light_blue) -> <<"104">>;
background(light_purple) -> <<"105">>;
background(light_cyan) -> <<"106">>;
background(light_white) -> <<"107">>;
background(_) -> error(invalid_background_color). | src/color.erl | 0.558809 | 0.51818 | color.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% Copyright (c) 2016 <NAME>. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%% @doc General Header processing functions.
-module(nklib_headers).
-author('<NAME> <<EMAIL>>').
-export([header/2, new/1, update/2, update_uri/3]).
-export_type([op/0]).
-include("nklib.hrl").
-type op() ::
single | multi | after_single | before_single | after_multi |
before_multi | default_single | default_multi.
%% ===================================================================
%% Public
%% ===================================================================
%% @doc Generates a single header. If `Value' is a list, all values will be joined
%% using comma as separator.
-spec header(Name::binary(), Value::nklib:header_value()|[nklib:header_value()]) ->
nklib:header().
header(Name, #uri{}=Uri) ->
{Name, nklib_unparse:uri(Uri)};
header(Name, {Token, Opts}) when is_list(Opts)->
{Name, nklib_unparse:token({Token, Opts})};
header(Name, [{_Token, Opts}|_]=List) when is_list(Opts) ->
{Name, nklib_unparse:token(List)};
header(Name, Binary) when is_binary(Binary) ->
{Name, Binary};
header(Name, Integer) when is_integer(Integer) ->
{Name, nklib_util:to_binary(Integer)};
header(Name, [F|_]=List) when is_integer(F) ->
{Name, nklib_util:to_binary(List)};
header(Name, List) when is_list(List) ->
Values = [
case Term of
#uri{} -> nklib_unparse:uri(Term);
_ -> nklib_util:to_binary(Term)
end
|| Term <- List
],
{Name, nklib_util:bjoin(Values)};
header(Name, Raw) ->
{Name, nklib_util:to_binary(Raw)}.
%% @doc Generates a header list from a list of specifications.
%% See {@link update/2} for options description
-spec new([{Operation, Name, Value} | none]) -> [nklib:header()]
when Operation :: single|multi|after_single|before_single|after_multi|
before_multi|default_single|default_multi,
Name :: binary(), Value :: nklib:header_value() | [nklib:header_value()].
new(Operations) ->
update([], Operations).
%% @doc Performs a serie of modifications to a headers list (or the header list of
%% a request or response).
%% Each modification is applied serially, and has the from `{Op, Name, Value}'.
%% `Value' can be a single value or a list of values, affecting the behaviour of
%% `single' and `multi' version of the operations (see {@link header/2}).
%% Valid values for `Op' are:
%% <ul>
%% <li>`single': Deletes old `Name' headers and adds a single new one.
%% If `Value' is `[]' headers will be deleted.</li>
%% <li>`multi': Deletes old `Name' headers and adds one or more new ones.
%% If `Value' is `[]' headers will be deleted.</li>
%% <li>`add_after_single': Combine previous headers into a single new one appending
%% value or values in `Value' after old values.</li>
%% <li>`add_before_single': Combine previous headers into a single new one appending
%% value or values in `Value' before old values.</li>
%% <li>`add_after_multi': Generates multiple headers, one for each value or values in
%% `Value', after any existing header/s.</li>
%% <li>`add_before_multi': Generates multiple headers, one for each value or values in
%% `Value', before any existing header/s.</li>
%% <li>`default_single': adds a single header only if it was not already present
%% and `Value' is not `[]'</li>
%% <li>`default_multi': adds multiple headers only if it was not already present
%% and `Value' is not `[]'</li>
%% </ul>
%%
-spec update(Input, [{Operation, Name, Value} | list() | none]) ->
[nklib:header()]
when Input :: [nklib:header()],
Operation :: op(),
Name :: binary(), Value :: nklib:header_value() | [nklib:header_value()].
update(Headers, []) ->
Headers;
update(Headers, [{default_single, Name, ValueOrValues}|R]) ->
case lists:keymember(Name, 1, Headers) of
false when ValueOrValues==[]-> update(Headers, R);
false -> update([header(Name, ValueOrValues)|Headers], R);
true -> update(Headers, R)
end;
update(Headers, [{default_multi, Name, ValueOrValues}|R]) ->
case lists:keymember(Name, 1, Headers) of
false ->
case ValueOrValues of
[] ->
update(Headers, R);
[I|_] when is_integer(I) ->
update([header(Name, list_to_binary(ValueOrValues))|Headers], R);
[_|_] ->
update([header(Name, V) || V <- ValueOrValues] ++ Headers, R);
_ ->
update([header(Name, ValueOrValues)|Headers], R)
end;
true ->
update(Headers, R)
end;
update(Headers, [{single, Name, ValueOrValues}|R]) ->
Headers1 = nklib_util:delete(Headers, Name),
case ValueOrValues of
[] -> update(Headers1, R);
_ -> update([header(Name, ValueOrValues)|Headers1], R)
end;
update(Headers, [{multi, Name, ValueOrValues}|R]) ->
Headers1 = nklib_util:delete(Headers, Name),
case ValueOrValues of
[] ->
update(Headers1, R);
[I|_] when is_integer(I) ->
update([header(Name, list_to_binary(ValueOrValues))|Headers1], R);
[_|_] ->
update([header(Name, V) || V <- ValueOrValues] ++ Headers1, R);
_ ->
update([header(Name, ValueOrValues)|Headers1], R)
end;
update(Headers, [{_, _, []}|R]) ->
update(Headers, R);
update(Headers, [{after_single, Name, ValueOrValues}|R]) ->
{OldValues, Headers1} = extract(Name, Headers),
Values1 = case is_list(ValueOrValues) of
true -> OldValues ++ ValueOrValues;
false -> OldValues ++ [ValueOrValues]
end,
update([header(Name, Values1) | Headers1], R);
update(Headers, [{after_multi, Name, ValueOrValues}|R]) ->
{OldValues, Headers1} = extract(Name, Headers),
% If OldValues is like "a,b,c" this will not be splitted in multiple headers
Values1 = case is_list(ValueOrValues) of
true -> OldValues ++ ValueOrValues;
false -> OldValues ++ [ValueOrValues]
end,
update([header(Name, V) || V <- Values1] ++ Headers1, R);
update(Headers, [{before_single, Name, ValueOrValues}|R]) ->
{OldValues, Headers1} = extract(Name, Headers),
Values1 = case is_list(ValueOrValues) of
true -> ValueOrValues ++ OldValues;
false -> [ValueOrValues | OldValues]
end,
update([header(Name, Values1) | Headers1], R);
update(Headers, [{before_multi, Name, ValueOrValues}|R]) ->
{OldValues, Headers1} = extract(Name, Headers),
% If OldValues is like "a,b,c" this will not be splitted in multiple headers
Values1 = case is_list(ValueOrValues) of
true -> ValueOrValues ++ OldValues;
false -> [ValueOrValues | OldValues]
end,
update([header(Name, V) || V <- Values1] ++ Headers1, R);
update(Headers, [List|R]) when is_list(List) ->
Headers1 = update(Headers, List),
update(Headers1, R);
update(Headers, [_|R]) ->
update(Headers, R).
%% @private
-spec update_uri([nklib:header()], nklib:uri(), op()) ->
[nklib:header()].
update_uri(Headers, #uri{headers=[]}, _Op) ->
Headers;
update_uri(Headers, #uri{headers=UriHeaders}, Op) ->
Actions = [
{
Op,
nklib_parse:name(Name),
list_to_binary(http_uri:decode(nklib_util:to_list(Val)))
}
||
{Name, Val} <- UriHeaders
],
update(Headers, Actions).
%% ===================================================================
%% Private
%% ===================================================================
%% @private Extract all the properties named `Name' from `List', converting them to
%% binary. Returns the extracted values and the remaining list
-spec extract(term(), list()) ->
{[binary()], [term()]}.
extract(Name, List) ->
extract(Name, List, [], []).
extract(Name, [{Name, Value}|Rest], Acc1, Acc2) when is_binary(Value)->
extract(Name, Rest, [Value|Acc1], Acc2);
extract(Name, [{Name, Value}|Rest], Acc1, Acc2) ->
extract(Name, Rest, [nklib_util:to_binary(Value)|Acc1], Acc2);
extract(Name, [Term|Rest], Acc1, Acc2) ->
extract(Name, Rest, Acc1, [Term|Acc2]);
extract(_Name, [], Acc1, Acc2) ->
{lists:reverse(Acc1), lists:reverse(Acc2)}.
%% ===================================================================
%% EUnit tests
%% ===================================================================
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
headers_test() ->
H1 = [{<<"a">>,<<"a1,a2">>}],
?assertMatch(H1,
new([
{default_single, <<"a">>, []},
{default_single, <<"a">>, [a1, "a2"]}
])),
?assertMatch(H1,
update(H1, [{default_single, <<"a">>, otro}])),
H2 = [
{<<"d">>,<<"d1">>},
{<<"c">>,<<"c1">>},
{<<"c">>,<<"c2">>},
{<<"b">>,<<"b1">>},
{<<"a">>,<<"a1,a2">>}],
?assertMatch(H2,
update(H1, [
{default_multi, <<"b">>, []},
{default_multi, <<"b">>, "b1"},
{default_multi, <<"c">>, ["c1", c2]},
{default_multi, <<"d">>, <<"d1">>}
])),
?assertMatch(H2,
update(H2, [{default_multi, <<"b">>, otro}])),
H3 = [
{<<"a">>,<<"a3,a4">>},
{<<"b">>,<<"b2">>},
{<<"e">>,<<"e1,e2">>},
{<<"d">>,<<"d1">>}],
?assertMatch(H3,
update(H2, [
{single, <<"e">>, [e1, e2]},
{single, <<"c">>, []},
{single, <<"b">>, <<"b2">>},
{single, <<"a">>, [a3, "a4"]}
])),
H4 = [
{<<"d">>,<<"d2">>},
{<<"c">>,<<"c3">>},
{<<"c">>,<<"c4">>},
{<<"b">>,<<"b3">>},
{<<"e">>,<<"e1,e2">>}],
?assertMatch(H4,
update(H3, [
{multi, <<"a">>, []},
{multi, <<"b">>, "b3"},
{multi, <<"c">>, ["c3", <<"c4">>]},
{multi, <<"d">>, d2}
])),
H5 = [
{<<"f">>,<<"f1">>},
{<<"e">>,<<"e1,e2,e3,e4">>},
{<<"c">>,<<"c3,c4,c5">>},
{<<"d">>,<<"d2">>},
{<<"b">>,<<"b3">>}],
?assertMatch(H5,
update(H4, [
{after_single, <<"c">>, c5},
{after_single, <<"e">>, [e3, e4]},
{after_single, <<"f">>, f1}
])),
H6 = [
{<<"d">>,<<"d2">>},
{<<"d">>,<<"d3">>},
{<<"c">>,<<"c3,c4,c5">>},
{<<"c">>,<<"c6">>}],
?assertMatch(H6,
update(H5, [
{single, <<"f">>, []},
{multi, <<"e">>, []},
{single, <<"b">>, []},
{after_multi, <<"c">>, c6},
{after_multi, <<"d">>, [d3]}
])),
H7 = [
{<<"c">>,<<"c7">>},
{<<"c">>,<<"c3,c4,c5">>},
{<<"c">>,<<"c6">>},
{<<"d">>,<<"d4">>},
{<<"d">>,<<"d2">>},
{<<"d">>,<<"d3">>},
{<<"b">>,<<"b4">>},
{<<"b">>,<<"b5">>},
{<<"a">>,<<"a5">>}],
?assertMatch(H7,
update(H6, [
{before_multi, <<"a">>, a5},
{before_multi, <<"b">>, [b4, b5]},
{before_multi, <<"d">>, d4},
{before_multi, <<"c">>, c7}
])),
H8= [
{<<"a">>,<<"a6,a7,a5">>},
{<<"b">>,<<"b6,b4,b5">>},
{<<"c">>,<<"c8,c7,c3,c4,c5,c6">>}],
?assertMatch(H8,
update(H7, [
{before_single, <<"c">>, c8},
{single, <<"d">>, []},
{before_single, <<"b">>, b6},
{before_single, <<"a">>, [a6, a7]}
])).
-endif. | src/nklib_headers.erl | 0.670824 | 0.401923 | nklib_headers.erl | starcoder |
-module(bingo_game).
-include("bingo_game.hrl").
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-export([new/1, new/2, mark_phrase/3]).
-type buzzwords() :: [bingo_buzzwords:buzzword()].
%%-------------------------------------------------------------------------
%% Function: new(Size) -> BingoGame
%%
%% Size = pos_integer()
%% The size of the game's grid. For example, if the Size is 3, the
%% game's grid would contain 3x3 bingo squares (9 squares in total).
%% BingoGame = bingo_game()
%% A bingo game record.
%%
%% Description: Returns a new bingo game with a `Size` x `Size` collection
%% of squares taken randomly from the list of buzzwords.
%%-------------------------------------------------------------------------
-spec new(pos_integer()) -> bingo_game().
new(Size) when is_integer(Size), Size >= 3 ->
Buzzwords = bingo_buzzwords:read_buzzwords(),
new(Size, Buzzwords).
%%-------------------------------------------------------------------------
%% Function: new(Size, Buzzwords) -> BingoGame
%%
%% Size = pos_integer()
%% The size of the game's grid. For example, if the Size is 3, the
%% game's grid would contain 3x3 bingo squares (9 in total).
%% Buzzwords = [#{phrase => string(), points => integer()}]
%% The list of buzzwords from which to create a bingo game. Note that,
%% since a game of size `Size` would require exactly `Size times Size`
%% buzzwords, therefore the length of the given list of buzzwords must
%% be at least `Size times Size`.
%%
%% Description: Returns a new bingo game with a `Size` x `Size` collection
%% of bingo squares taken randomly from the list of buzzwords.
%%-------------------------------------------------------------------------
-spec new(pos_integer(), buzzwords()) -> bingo_game().
new(Size, Buzzwords) when is_integer(Size) and is_list(Buzzwords) ->
Shuffle = bingo_random:shuffle(Buzzwords),
{ok, Squares} = make_squares(Size, Shuffle),
#bingo_game{squares = Squares}.
%%-------------------------------------------------------------------------
%% Function mark_phrase(Phrase, Player, Game) -> NewGame.
%%
%% Game = NewGame = bingo_game()
%% The before and after (mark) bingo game records.
%% Phrase = string()
%% The phrase to mark.
%% Player = bingo_player()
%% The bingo player record who request to mark.
%%
%% Description: Finds the square in the game's squares grid that has
%% the given `Phrase`, and marks it for the given `Player`,
%% updates the game's scores, and checks for a bingo!.
%%-------------------------------------------------------------------------
-spec mark_phrase(bingo_phrase(), bingo_player(), bingo_game())
-> bingo_game().
mark_phrase(Phrase, Player, Game) when
is_record(Game, bingo_game),
is_record(Player, bingo_player) ->
UpdateSquares = update_squares_with_mark(Game, Phrase, Player),
UpdateScores = update_scores(UpdateSquares),
AssignWinner = assign_winner_if_bingo(Player, UpdateScores),
AssignWinner.
%%
%% HELPERS.
%%
%% Finds the square in the squares grid of the given `Game` that has
%% the given `Phrase`, and marks it for the given `Player`.
-spec update_squares_with_mark(
bingo_game(),
bingo_phrase(),
bingo_player()
) -> bingo_game().
update_squares_with_mark(Game, Phrase, Player)
when is_record(Game, bingo_game),
is_record(Player, bingo_player) ->
#bingo_game{squares = Grid} = Game,
GridWithMark = mark_grid(Grid, Phrase, Player),
Game#bingo_game{squares = GridWithMark}.
%% Updates the scores map for the given `Game`.
-spec update_scores(bingo_game()) -> bingo_game().
update_scores(#bingo_game{squares = Squares} = Game) ->
List = lists:flatten(Squares),
Scores = lists:foldl(fun score_reducer/2, #{}, List),
Game#bingo_game{scores = Scores}.
%% Helper function to be used in `update_scores/1`.
-spec score_reducer(bingo_square(), bingo_scores()) -> bingo_scores().
score_reducer(
#bingo_square{marked_by = undefined, points = _Points},
ScoreAcc
) ->
ScoreAcc;
score_reducer(
#bingo_square{marked_by = Someone, points = Points},
ScoreAcc
) ->
Name = Someone#bingo_player.name,
Fun = fun(OldPoints) -> OldPoints + Points end,
maps:update_with(Name, Fun, Points, ScoreAcc).
%% Checks if the given game is `bingo` and the current winner field is
%% still undefined, if that's the case, assign the given player as
%% winner.
-spec assign_winner_if_bingo(bingo_player(), bingo_game()) -> bingo_game().
assign_winner_if_bingo(Player, #bingo_game{winner = undefined} = Game) ->
Squares = Game#bingo_game.squares,
case bingo_checker:is_bingo(Squares) of
true ->
Game#bingo_game{winner = Player};
false ->
Game
end;
assign_winner_if_bingo(_Player, Game) ->
Game.
%% Finds the square in the given 2D `Grid` that has the given `Phrase`,
%% and marks it for the given `Player`.
-spec mark_grid(bingo_squares(), bingo_phrase(), bingo_player()) ->
bingo_squares().
mark_grid(Grid, Phrase, Player) ->
List = lists:flatten(Grid),
ListWithMark = [mark_square(S, Phrase, Player) || S <- List],
{ok, GridWithMark} = chunk_every(length(Grid), ListWithMark),
GridWithMark.
%% Marks the given (unmarked) `Square` for the given `Player`.
-spec mark_square(bingo_square(), bingo_phrase(), bingo_player()) ->
bingo_square().
mark_square(
#bingo_square{phrase = Phrase, marked_by = undefined} = Square,
Phrase,
Player
) when is_record(Player, bingo_player) ->
Square#bingo_square{marked_by = Player};
mark_square(#bingo_square{} = Square, _, _) ->
Square.
%% Returns a grid of `Size` x `Size` bingo squares from the given list
%% of buzzwords.
-spec make_squares(pos_integer(), buzzwords()) ->
{ok, bingo_squares()} | {error, invalid_arguments}.
make_squares(Size, Buzzwords) when is_integer(Size), is_list(Buzzwords) ->
case Size >= 1 andalso Size * Size =< length(Buzzwords) of
true ->
Sublist = lists:sublist(Buzzwords, Size * Size),
List = [bingo_square:from_buzzword(B) || B <- Sublist],
chunk_every(Size, List);
false ->
{error, invalid_arguments}
end;
make_squares(_,_) ->
{error, invalid_arguments}.
%% Chunk the given list `List` into each sublist of size `Size`.
%% Note that: the length of the specified list must be divisible by `Size`.
-spec chunk_every(pos_integer(), [T]) -> {ok, [[T]]} | {error, atom()}.
chunk_every(_Size, []) ->
{error, invalid_arguments};
chunk_every(Size, List) when is_integer(Size), is_list(List) ->
case Size >= 1 andalso length(List) rem Size =:= 0 of
true ->
{ok, chunk_every(Size, List, [])};
false ->
{error, invalid_arguments}
end.
-spec chunk_every(pos_integer(), [T], [[T]]) -> [[T]].
chunk_every(_Size, [], State) ->
lists:reverse(State);
chunk_every(Size, List, State) ->
{Next,Rest} = lists:split(Size, List),
chunk_every(Size, Rest, [Next | State]).
%%
%% TESTS.
%%
-ifdef(TEST).
buzzwords_list() ->
[#{phrase => "one", points => 1},
#{phrase => "two", points => 2},
#{phrase => "three", points => 3},
#{phrase => "four", points => 4},
#{phrase => "five", points => 5},
#{phrase => "six", points => 6},
#{phrase => "seven", points => 7},
#{phrase => "eight", points => 8},
#{phrase => "nine", points => 9}
].
%% mark_square tests.
mark_unmarked_square_test() ->
Square = bingo_square:new("Some Phrase", 1),
Player = bingo_player:new("User", green),
SquareWithMark = mark_square(Square, "Some Phrase", Player),
Expected = #bingo_square{phrase = "Some Phrase",
points = 1,
marked_by = Player},
?assertEqual(Expected, SquareWithMark).
mark_already_marked_square_test() ->
Phrase = "Some Phrase",
Square = bingo_square:new(Phrase, 1),
P = bingo_player:new("Some Player", red),
FirstMark = mark_square(Square, Phrase, P),
SecondMark = mark_square(FirstMark, Phrase, P),
Expected = #bingo_square{phrase = Phrase,
points = 1,
marked_by = P},
?assertEqual(FirstMark, SecondMark),
?assertEqual(Expected, SecondMark).
mark_someone_square_test() ->
Phrase = "Some Phrase",
Square = bingo_square:new(Phrase, 1),
P1 = bingo_player:new("Player1", red),
P2 = bingo_player:new("Player2", green),
MarkedByP1 = mark_square(Square, Phrase, P1),
MarkAgainByP2 = mark_square(MarkedByP1, Phrase, P2),
Expected = #bingo_square{phrase = Phrase,
points = 1,
marked_by = P1},
?assertEqual(Expected, MarkAgainByP2),
?assertEqual(MarkedByP1, MarkAgainByP2).
mark_square_test_() ->
Square = bingo_square:new("Some Phrase", 1),
Player = bingo_player:new("User", green),
[
?_assertEqual(Square, mark_square(Square, "Foo", Player)),
?_assertEqual(Square, mark_square(Square, "", Player)),
?_assertEqual(Square, mark_square(Square, "Some Other Phrase", Player))
].
%% make_squares tests.
make_1x1_squares_test() ->
Buzzwords = buzzwords_list(),
Expected = [[#bingo_square{phrase = "one",
points = 1,
marked_by = undefined
}
]],
{ok, Result} = make_squares(1, Buzzwords),
?assertEqual(Expected, Result).
make_2x2_squares_test() ->
Buzzwords = buzzwords_list(),
Expected = [
[#bingo_square{phrase = "one",
points = 1,
marked_by = undefined},
#bingo_square{phrase = "two",
points = 2,
marked_by = undefined}
],
[#bingo_square{phrase = "three",
points = 3,
marked_by = undefined},
#bingo_square{phrase = "four",
points = 4,
marked_by = undefined}
]
],
{ok, Result} = make_squares(2, Buzzwords),
?assertEqual(Expected, Result).
make_3x3_squares_test() ->
Buzzwords = buzzwords_list(),
Expected = [
[#bingo_square{phrase = "one",
points = 1,
marked_by = undefined
},
#bingo_square{phrase = "two",
points = 2,
marked_by = undefined
},
#bingo_square{phrase = "three",
points = 3,
marked_by = undefined
}
],
[#bingo_square{phrase = "four",
points = 4,
marked_by = undefined
},
#bingo_square{phrase = "five",
points = 5,
marked_by = undefined
},
#bingo_square{phrase = "six",
points = 6,
marked_by = undefined
}
],
[#bingo_square{phrase = "seven",
points = 7,
marked_by = undefined
},
#bingo_square{phrase = "eight",
points = 8,
marked_by = undefined
},
#bingo_square{phrase = "nine",
points = 9,
marked_by = undefined
}
]
],
{ok, Result} = make_squares(3, Buzzwords),
?assertEqual(Expected, Result).
make_squares_error_test_() ->
Buzzwords = buzzwords_list(),
[?_assertEqual({error, invalid_arguments}, make_squares(9, Buzzwords)),
?_assertEqual({error, invalid_arguments}, make_squares(8, Buzzwords)),
?_assertEqual({error, invalid_arguments}, make_squares(7, Buzzwords)),
?_assertEqual({error, invalid_arguments}, make_squares(6, Buzzwords)),
?_assertEqual({error, invalid_arguments}, make_squares(5, Buzzwords)),
?_assertEqual({error, invalid_arguments}, make_squares(4, Buzzwords)),
?_assertEqual({error, invalid_arguments}, make_squares(0, Buzzwords)),
?_assertEqual({error, invalid_arguments}, make_squares(-1, Buzzwords))
].
%% chunk_every tests.
chunk_every_1_test() ->
Expected = [[1], [2], [3]],
{ok, Result} = chunk_every(1, lists:seq(1, 3)),
?assertEqual(Expected, Result).
chunk_every_2_test() ->
Expected = [[1, 2], [3, 4], [5, 6]],
{ok, Result} = chunk_every(2, lists:seq(1, 6)),
?assertEqual(Expected, Result).
chunk_every_3_test() ->
Expected = [[1, 2, 3], [4, 5, 6], [7, 8, 9]],
{ok, Result} = chunk_every(3, lists:seq(1,9)),
?assertEqual(Expected, Result).
chunk_every_4_test() ->
Expected = [[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12],
[13, 14, 15, 16]],
{ok, Result} = chunk_every(4, lists:seq(1,16)),
?assertEqual(Expected, Result).
chunk_every_5_test() ->
Expected = [[1, 2, 3, 4, 5],
[6, 7, 8, 9, 10],
[11, 12, 13, 14, 15],
[16, 17, 18, 19, 20],
[21, 22, 23, 24, 25]],
{ok, Result} = chunk_every(5, lists:seq(1,25)),
?assertEqual(Expected, Result).
chunk_every_error_test_() ->
[?_assertEqual({error, invalid_arguments}, chunk_every(1, [])),
?_assertEqual({error, invalid_arguments}, chunk_every(0, [])),
?_assertEqual({error, invalid_arguments}, chunk_every(-1, [])),
?_assertEqual({error, invalid_arguments}, chunk_every(2, [1])),
?_assertEqual({error, invalid_arguments}, chunk_every(0, [1])),
?_assertEqual({error, invalid_arguments}, chunk_every(-1, [1])),
?_assertEqual({error, invalid_arguments}, chunk_every(4, [1, 2])),
?_assertEqual({error, invalid_arguments}, chunk_every(3, [1, 2])),
?_assertEqual({error, invalid_arguments}, chunk_every(0, [1, 2])),
?_assertEqual({error, invalid_arguments}, chunk_every(-1, [1, 2])),
?_assertEqual({error,invalid_arguments}, chunk_every(8,lists:seq(1,9))),
?_assertEqual({error,invalid_arguments}, chunk_every(7,lists:seq(1,9))),
?_assertEqual({error,invalid_arguments}, chunk_every(6,lists:seq(1,9))),
?_assertEqual({error,invalid_arguments}, chunk_every(5,lists:seq(1,9))),
?_assertEqual({error,invalid_arguments}, chunk_every(4,lists:seq(1,9))),
?_assertEqual({error,invalid_arguments}, chunk_every(2,lists:seq(1,9))),
?_assertEqual({error,invalid_arguments}, chunk_every(0,lists:seq(1,9))),
?_assertEqual({error,invalid_arguments}, chunk_every(-1,lists:seq(1,9)))
].
-endif. | src/bingo_game.erl | 0.588298 | 0.516961 | bingo_game.erl | starcoder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.