code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1 value |
|---|---|---|---|---|---|
%%-------------------------------------------------------------------
%%
%% Copyright (c) 2016, <NAME> <<EMAIL>>
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%%-------------------------------------------------------------------
%% @doc This modules provides utility functions for overload protection and/or
%% short circuiting calls to a process. It is designed for use with `sbroker'
%% and `sregulator' processes using the `sprotector_pie_meter' meter. However
%% any OTP process can use this module to do load balancing using the `via'
%% naming format if the process is registered with and updates the
%% `sprotector_server'.
%%
%% To use `sprotector' with `via' use names of the form
%% `{via, sprotector, {Broker, ask | ask_r}}'. Where `{Broker, ...}' is
%% a tuple containing
%% `pid() | atom() | {global, any()} | {via, module(), any()} | {atom(), node()}'.
%% The lookup will succeed if the approximate queue length is less than or equal
%% to the minimum queue length or below the the maximum and the drop probability
%% allows. Otherwise the call exits with reason `drop'.
%%
%% Therefore when combining with `sbetter' use format:
%% `{via, sprotector, {{via, sbetter, {..., ask | ask_r}, ask | ask_r}}'. So the
%% load balancing lookup is resolved first and wrapped by `sprotector'.
%%
%% It is not possible to locally look up the pid of a process with name
%% `{atom(), node()}' if the node is not the local node. Therefore a registered
%% name on another node is not supported for use with this module.
%%
%% If a chosen process is not local the call may exit with `{badnode, node()}'.
%%
%% If a chosen process is not registered with the `sprotector_server' the call
%% may exit with `{noprotector, pid()}'. The `sdropper_pie_meter' will register
%% with the server and update it using the PIE active queue management
%% algorithm. However other methods can be used to register and update the
%% `sprotector_server'. Registering with the `sdropper_server' must be done with
%% `sprotector_server:register/1' and not using
%% `start_link({via, sprotector, ...}, ...)'.
%%
%% @see sprotector_pie_meter
%% @see sprotector_server
-module(sprotector).
%% public API
-export([whereis_name/1]).
-export([send/2]).
%% types
-type process() :: pid() | atom() | {atom(), node()} | {global, term()} |
{via, module(), term()}.
%% @doc Lookup a pid and possibly drop the request depending on the min, max and
%% drop probability of the chosen queue. If no process is associated with the
%% process returns `undefined'.
-spec whereis_name({Process, ask | ask_r}) -> Pid | undefined when
Process :: process(),
Pid :: pid().
whereis_name({Process, Key}) ->
case info(Process, Key) of
{go, Pid} ->
Pid;
{drop, _} ->
exit(drop);
undefined ->
undefined;
Error ->
exit(Error)
end.
%% @doc Sends a message to a pid if not dropped by the min, max and drop
%% probability of the chosen queue. Returns `ok' if the message is sent
%% otherwise exits.
-spec send({Process, ask | ask_r}, Msg) -> ok when
Process :: process(),
Msg :: any().
send(Process, Msg) ->
try whereis_name(Process) of
Pid when is_pid(Pid) ->
_ = Pid ! Msg,
ok;
undefined ->
exit({noproc, {?MODULE, send, [Process, Msg]}})
catch
exit:Reason ->
exit({Reason, {?MODULE, send, [Process, Msg]}})
end.
%% Internal
info(Process, Key) ->
case sbroker_gen:whereis(Process) of
Pid when is_pid(Pid), node(Pid) == node() ->
{ask(Pid, Key), Pid};
undefined ->
undefined;
Pid when is_pid(Pid) ->
{badnode, node(Pid)};
{_, Node} ->
{badnode, Node}
end.
ask(Pid, Key) ->
try sprotector_server:ask(Pid, Key) of
Result ->
Result
catch
error:badarg ->
noprotector
end. | deps/sbroker/src/sprotector.erl | 0.63477 | 0.423995 | sprotector.erl | starcoder |
%%
%% %CopyrightBegin%
%%
%% Copyright Ericsson AB 2018-2020. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
%%
%% Share code for semantically equivalent blocks referred to
%% to by `br` and `switch` instructions.
%%
%% A similar optimization is done in beam_jump, but doing it here as
%% well is beneficial as it may enable other optimizations. If there
%% are many semantically equivalent clauses, this optimization can
%% substanstially decrease compilation times.
%%
%% block/2 is called from the liveness optimization pass in
%% beam_ssa_opt, as code sharing helps the liveness pass and vice
%% versa.
%%
-module(beam_ssa_share).
-export([module/2,block/2]).
-include("beam_ssa.hrl").
-import(lists, [keyfind/3,reverse/1,sort/1]).
-spec module(beam_ssa:b_module(), [compile:option()]) ->
{'ok',beam_ssa:b_module()}.
module(#b_module{body=Fs0}=Module, _Opts) ->
Fs = [function(F) || F <- Fs0],
{ok,Module#b_module{body=Fs}}.
-spec block(Blk0, Blocks0) -> Blk when
Blk0 :: beam_ssa:b_blk(),
Blocks0 :: beam_ssa:block_map(),
Blk :: beam_ssa:b_blk().
block(#b_blk{last=Last0}=Blk, Blocks) ->
case share_terminator(Last0, Blocks) of
none -> Blk;
Last -> Blk#b_blk{last=beam_ssa:normalize(Last)}
end.
%%%
%%% Local functions.
%%%
function(#b_function{anno=Anno,bs=Blocks0}=F) ->
try
PO = reverse(beam_ssa:rpo(Blocks0)),
{Blocks1,Changed} = blocks(PO, Blocks0, false),
Blocks = case Changed of
true ->
beam_ssa:trim_unreachable(Blocks1);
false ->
Blocks0
end,
F#b_function{bs=Blocks}
catch
Class:Error:Stack ->
#{func_info:={_,Name,Arity}} = Anno,
io:fwrite("Function: ~w/~w\n", [Name,Arity]),
erlang:raise(Class, Error, Stack)
end.
blocks([L|Ls], Blocks, Changed) ->
#b_blk{last=Last0} = Blk0 = map_get(L, Blocks),
case block(Blk0, Blocks) of
#b_blk{last=Last0} ->
blocks(Ls, Blocks, Changed);
#b_blk{}=Blk ->
blocks(Ls, Blocks#{L:=Blk}, true)
end;
blocks([], Blocks, Changed) ->
{Blocks,Changed}.
share_terminator(#b_br{bool=#b_var{},succ=Succ0,fail=Fail0}=Br, Blocks) ->
{Succ,SuccBlk} = shortcut_nonempty_block(Succ0, Blocks),
{Fail,FailBlk} = shortcut_nonempty_block(Fail0, Blocks),
case are_equivalent(Succ, SuccBlk, Fail, FailBlk, Blocks) of
true ->
%% The blocks are semantically equivalent.
Br#b_br{succ=Succ,fail=Succ};
false ->
if
Succ =:= Succ0, Fail =:= Fail0 ->
%% None of blocks were cut short.
none;
true ->
%% One or both labels were cut short
%% to avoid jumping to an empty block.
Br#b_br{succ=Succ,fail=Fail}
end
end;
share_terminator(#b_switch{}=Sw, Blocks) ->
share_switch(Sw, Blocks);
share_terminator(_Last, _Blocks) -> none.
%% Test whether the two blocks are semantically equivalent. This
%% function is specially optimized to return `false` as fast as
%% possible if the blocks are not equivalent, as that is the common
%% case.
are_equivalent(_Succ, _, ?BADARG_BLOCK, _, _Blocks) ->
%% ?BADARG_BLOCK is special. Sharing could be incorrect.
false;
are_equivalent(_Succ, #b_blk{is=Is1,last=#b_ret{arg=RetVal1}=Ret1},
_Fail, #b_blk{is=Is2,last=#b_ret{arg=RetVal2}=Ret2}, _Blocks) ->
case {RetVal1,RetVal2} of
{#b_literal{},#b_literal{}} ->
case RetVal1 =:= RetVal2 of
true ->
%% The return values are identical literals. We
%% only need to compare the canonicalized bodies.
Can1 = canonical_is(Is1),
Can2 = canonical_is(Is2),
Can1 =:= Can2;
false ->
%% Non-equal literals.
false
end;
{#b_var{},#b_var{}} ->
%% The return values are varibles. We must canonicalize
%% the blocks (including returns) and compare them.
Can1 = canonical_is(Is1 ++ [Ret1]),
Can2 = canonical_is(Is2 ++ [Ret2]),
Can1 =:= Can2;
{_,_} ->
%% One literal and one variable.
false
end;
are_equivalent(Succ,
#b_blk{is=Is1,
last=#b_br{bool=#b_literal{val=true},
succ=Target}},
Fail,
#b_blk{is=Is2,
last=#b_br{bool=#b_literal{val=true},
succ=Target}},
Blocks) ->
%% Both blocks end with an unconditional branch to the
%% same target block. If the target block has phi nodes,
%% we must pick up the values from the phi nodes and
%% compare them.
#b_blk{is=Is} = map_get(Target, Blocks),
Phis1 = canonical_terminator_phis(Is, Succ),
Phis2 = canonical_terminator_phis(Is, Fail),
case {Phis1,Phis2} of
{[#b_set{args=[#b_literal{}]}|_],_} when Phis1 =/= Phis2 ->
%% Different values are used in the phi nodes.
false;
{_,[#b_set{args=[#b_literal{}]}|_]} when Phis1 =/= Phis2 ->
%% Different values are used in the phi nodes.
false;
{_,_} ->
%% The values in the phi nodes are variables or identical
%% literals. We must canonicalize the blocks and compare
%% them.
Can1 = canonical_is(Is1 ++ Phis1),
Can2 = canonical_is(Is2 ++ Phis2),
Can1 =:= Can2
end;
are_equivalent(Succ0, #b_blk{is=Is1,last=#b_br{bool=#b_var{},fail=Same}},
Fail0, #b_blk{is=Is2,last=#b_br{bool=#b_var{},fail=Same}},
Blocks) ->
%% Two-way branches with identical failure labels. First compare the
%% canonicalized bodies of the blocks.
case canonical_is(Is1) =:= canonical_is(Is2) of
false ->
%% Different bodies.
false;
true ->
%% Bodies were equal. That is fairly uncommon, so to keep
%% the code simple we will rewrite the `br` to a `switch`
%% and let share_switch/2 do the work of following the
%% branches.
Sw = #b_switch{arg=#b_var{name=not_used},fail=Fail0,
list=[{#b_literal{},Succ0}]},
#b_switch{fail=Fail,list=[{_,Succ}]} = share_switch(Sw, Blocks),
Fail =:= Succ
end;
are_equivalent(_, _, _, _, _) -> false.
share_switch(#b_switch{fail=Fail0,list=List0}=Sw, Blocks) ->
Prep = share_prepare_sw([{value,Fail0}|List0], Blocks, 0, []),
Res = do_share_switch(Prep, Blocks, []),
[{_,Fail}|List] = [VL || {_,VL} <- sort(Res)],
Sw#b_switch{fail=Fail,list=List}.
share_prepare_sw([{V,L0}|T], Blocks, N, Acc) ->
{L,_Blk} = shortcut_nonempty_block(L0, Blocks),
share_prepare_sw(T, Blocks, N+1, [{{L,#{}},{N,{V,L}}}|Acc]);
share_prepare_sw([], _, _, Acc) -> Acc.
do_share_switch(Prep, Blocks, Acc) ->
Map = share_switch_1(Prep, Blocks, #{}),
share_switch_2(maps:values(Map), Blocks, Acc).
share_switch_1([{Next0,Res}|T], Blocks, Map) ->
{Can,Next} = canonical_block(Next0, Blocks),
case Map of
#{Can:=Ls} ->
share_switch_1(T, Blocks, Map#{Can:=[{Next,Res}|Ls]});
#{} ->
share_switch_1(T, Blocks, Map#{Can=>[{Next,Res}]})
end;
share_switch_1([], _Blocks, Map) -> Map.
share_switch_2([[{_,{N,Res}}]|T], Blocks, Acc) ->
%% This block is not equivalent to any other block.
share_switch_2(T, Blocks, [{N,Res}|Acc]);
share_switch_2([[{done,{_,{_,Common}}}|_]=Eqs|T], Blocks, Acc0) ->
%% Two or more blocks are semantically equivalent, and all blocks
%% are either terminated with a `ret` or a `br` to the same target
%% block. Replace the labels in the `switch` for all of those
%% blocks with the label for the first of the blocks.
Acc = [{N,{V,Common}} || {done,{N,{V,_}}} <- Eqs] ++ Acc0,
share_switch_2(T, Blocks, Acc);
share_switch_2([[{_,_}|_]=Prep|T], Blocks, Acc0) ->
%% Two or more blocks are semantically equivalent, but they have
%% different successful successor blocks. Now we must check
%% recursively whether the successor blocks are equivalent too.
Acc = do_share_switch(Prep, Blocks, Acc0),
share_switch_2(T, Blocks, Acc);
share_switch_2([], _, Acc) -> Acc.
canonical_block({L,VarMap0}, Blocks) ->
#b_blk{is=Is,last=Last0} = map_get(L, Blocks),
case canonical_terminator(L, Last0, Blocks) of
none ->
%% The block has a terminator that we don't handle.
{{none,L},done};
{Last,done} ->
%% The block ends with a `ret` or an unconditional `br` to
%% another block.
{Can,_VarMap} = canonical_is(Is ++ Last, VarMap0, []),
{Can,done};
{Last,Next} ->
%% The block ends with a conditional branch.
{Can,VarMap} = canonical_is(Is ++ Last, VarMap0, []),
{Can,{Next,VarMap}}
end.
%% Translate a sequence of instructions to a canonical representation. If the
%% canonical representation of two blocks compare equal, the blocks are
%% semantically equivalent. The following translations are done:
%%
%% * Variables defined in the instruction sequence are replaced with
%% {var,0}, {var,1}, and so on. Free variables are not changed.
%%
%% * `location` annotations that would produce a `line` instruction are
%% kept. All other annotations are cleared.
%%
%% * Instructions are repackaged into tuples instead of into the
%% usual records. The main reason is to avoid violating the types for
%% the SSA records. We can simplify things a little by linking the
%% instructions directly instead of putting them into a list.
canonical_is(Is) ->
{Can,_} = canonical_is(Is, #{}, []),
Can.
canonical_is([#b_set{op=Op,dst=Dst,args=Args0}=I|Is], VarMap0, Acc) ->
Args = [canonical_arg(Arg, VarMap0) || Arg <-Args0],
Var = {var,map_size(VarMap0)},
VarMap = VarMap0#{Dst=>Var},
LineAnno = case Op of
bs_match ->
%% The location annotation for a bs_match instruction
%% is only used in warnings, never to emit a `line`
%% instruction. Therefore, it should not be included.
[];
_ ->
%% The location annotation will be used in a `line`
%% instruction. It must be included.
beam_ssa:get_anno(location, I, none)
end,
canonical_is(Is, VarMap, {Op,LineAnno,Var,Args,Acc});
canonical_is([#b_ret{arg=Arg}], VarMap, Acc0) ->
Acc1 = case Acc0 of
{call,_Anno,Var,[#b_local{}|_]=Args,PrevAcc} ->
%% This is a tail-recursive call to a local function.
%% There will be no line instruction generated;
%% thus, the annotation is not significant.
{call,[],Var,Args,PrevAcc};
_ ->
Acc0
end,
{{ret,canonical_arg(Arg, VarMap),Acc1},VarMap};
canonical_is([#b_br{bool=#b_var{}=Arg,fail=Fail}], VarMap, Acc) ->
%% A previous buggy version of this code omitted the canonicalized
%% argument in the return value. Unfortunately, that worked most
%% of the time, except when `br` terminator referenced a variable
%% defined in a previous block instead of in the same block.
{{br,canonical_arg(Arg, VarMap),succ,Fail,Acc},VarMap};
canonical_is([#b_br{succ=Succ}], VarMap, Acc) ->
{{br,Succ,Acc},VarMap};
canonical_is([], VarMap, Acc) ->
{Acc,VarMap}.
canonical_terminator(_L, #b_ret{}=Ret, _Blocks) ->
{[Ret],done};
canonical_terminator(L, #b_br{bool=#b_literal{val=true},succ=Succ}=Br, Blocks) ->
#b_blk{is=Is} = map_get(Succ, Blocks),
case canonical_terminator_phis(Is, L) of
[] ->
{[],Succ};
[_|_]=Phis ->
{Phis ++ [Br],done}
end;
canonical_terminator(_L, #b_br{bool=#b_var{},succ=Succ}=Br, _Blocks) ->
{[Br],Succ};
canonical_terminator(_, _, _) -> none.
canonical_terminator_phis([#b_set{op=phi,args=PhiArgs}=Phi|Is], L) ->
{Value,L} = keyfind(L, 2, PhiArgs),
[Phi#b_set{op=copy,args=[Value]}|canonical_terminator_phis(Is, L)];
canonical_terminator_phis([#b_set{op=peek_message}=I|_], L) ->
%% We could get stuck into an infinite loop if we allowed the
%% comparisons to continue into this block. Force an unequal
%% compare with all other predecessors of this block.
[I#b_set{op=copy,args=[#b_literal{val=L}]}];
canonical_terminator_phis(_, _) -> [].
canonical_arg(#b_var{}=Var, VarMap) ->
case VarMap of
#{Var:=CanonicalVar} ->
CanonicalVar;
#{} ->
Var
end;
canonical_arg(#b_remote{mod=Mod,name=Name}, VarMap) ->
{remote,canonical_arg(Mod, VarMap),
canonical_arg(Name, VarMap)};
canonical_arg(Other, _VarMap) -> Other.
%% Shortcut branches to empty blocks if safe.
shortcut_nonempty_block(L, Blocks) ->
case map_get(L, Blocks) of
#b_blk{is=[],last=#b_br{bool=#b_literal{val=true},succ=Succ}}=Blk ->
%% This block is empty.
case is_forbidden(Succ, Blocks) of
false ->
shortcut_nonempty_block(Succ, Blocks);
true ->
{L,Blk}
end;
#b_blk{}=Blk ->
{L,Blk}
end.
is_forbidden(L, Blocks) ->
case map_get(L, Blocks) of
#b_blk{is=[#b_set{op=phi}|_]} -> true;
#b_blk{is=[#b_set{op=peek_message}|_]} -> true;
#b_blk{} -> false
end. | lib/compiler/src/beam_ssa_share.erl | 0.630685 | 0.441312 | beam_ssa_share.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% Copyright (c) 2014 Basho Technologies, Inc. All Rights Reserved.
%%
%% This Source Code Form is subject to the terms of the Mozilla Public
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at http://mozilla.org/MPL/2.0/.
%%
%% -------------------------------------------------------------------
-module(exometer_folsom).
-behaviour(exometer_entry).
-export([behaviour/0,
new/3,
delete/3,
get_datapoints/3,
get_value/4,
update/4,
reset/3,
sample/3,
setopts/3]).
-spec behaviour() -> exometer:behaviour().
behaviour() -> entry.
new(Name, counter, _Opts) ->
folsom_metrics:new_counter(Name);
new(Name, spiral, _Opts) ->
folsom_metrics:new_spiral(Name);
new(Name, histogram, Opts) ->
case lists:keysearch(arg, 1, Opts) of
{_, {histogram, SampleType, SampleArgs}} ->
{folsom_metrics:new_histogram(Name, SampleType, SampleArgs),
opt_ref(Opts)};
false ->
{folsom_metrics:new_histogram(Name, slide_uniform, {60, 1028}),
opt_ref(Opts)}
end;
new(Name, meter, Opts) ->
{folsom_metrics:new_meter(Name), opt_ref(Opts)};
new(Name, meter_reader, Opts) ->
{folsom_metrics:new_meter_reader(Name), opt_ref(Opts)};
new(Name, gauge, Opts) ->
{folsom_metrics:new_gauge(Name), opt_ref(Opts)};
new(Name, duration, Opts) ->
{folsom_metrics:new_duration(Name), opt_ref(Opts)};
new(Name, history, Opts) ->
case lists:keyfind(size, 1, Opts) of
{_, Sz} -> {folsom_metrics:new_history(Name, Sz), opt_ref(Opts)};
false -> {folsom_metrics:new_history(Name), opt_ref(Opts)}
end.
opt_ref(Opts) ->
case lists:keyfind(truncate, 1, Opts) of
false ->
[{truncate,true}];
{_, B} when is_boolean(B) ->
[{truncate,B}]
end.
delete(Name, _Type, _Ref) ->
folsom_metrics:delete_metric(Name).
update(Name, Value, counter, _Ref) ->
folsom_metrics:notify_existing_metric(Name, {inc, Value}, counter);
update(Name, tick, meter_reader, _Ref) ->
folsom_metrics_meter_reader:tick(Name);
update(Name, Value, meter_reader, _Ref) ->
folsom_metrics_meter_reader:mark(Name, Value);
update(Name, Value, Type, _Ref) ->
folsom_metrics:notify_existing_metric(Name, Value, Type).
reset(Name, counter, _Ref) ->
folsom_metrics_counter:clear(Name);
reset(Name, gauge, _Ref) ->
folsom_metrics_gauge:clear(Name);
reset(_, _, _) ->
{error, unsupported}.
get_value(Name, history, _Ref, DataPoints0) ->
try DataPoints = datapoints(history, DataPoints0),
lists:foldr(
fun(events, Acc) ->
[{events, just_events(
folsom_metrics_history:get_events(Name))}
| Acc];
(values, Acc) ->
[{values, folsom_metrics_history:get_events(Name)}
| Acc];
(timed_events, Acc) ->
[{timed_events,
timed_events(
folsom_metrics_history:get_events(Name))}
| Acc];
(Sz, Acc) when is_integer(Sz), Sz > 0 ->
[{Sz, just_events(
folsom_metrics_history:get_events(Name, Sz))}
| Acc];
(info, Acc) ->
[{info, folsom_metrics_history:get_value(Name)}
| Acc];
(_, Acc) -> Acc
end, [], DataPoints)
catch
error:_ -> unavailable
end;
get_value(Name, Type, Ref, DataPoints) ->
Trunc = get_trunc_opt(Ref),
Vals = get_value_(Name, Type, Ref),
try [filter_dp(D, Vals, Trunc) || D <- datapoints(Type, DataPoints)]
catch
error:_Error ->
unavailable
end.
get_trunc_opt(undefined) -> true;
get_trunc_opt(L) ->
proplists:get_value(truncate, L, true).
get_datapoints(_Name, Type, _Ref) ->
datapoints(Type, default).
datapoints(Type, default) -> datapoints(Type);
datapoints(_, L) when is_list(L) -> L.
datapoints(counter) ->
[value];
datapoints(gauge) ->
[value];
datapoints(histogram) ->
stats_datapoints();
datapoints(duration) ->
[count, last |stats_datapoints()];
datapoints(spiral) ->
[one, count];
datapoints(meter) ->
[count,one,five,fifteen,day,mean,acceleration];
datapoints(history) ->
[events, info].
filter_dp(Mean, DPs, Trunc) when Mean==mean; Mean==arithmetic_mean ->
case lists:keyfind(mean, 1, DPs) of
false ->
case lists:keyfind(arithmetic_mean, 1, DPs) of
false -> {mean, zero(Trunc)};
{_,V} -> {mean, opt_trunc(Trunc, V)}
end;
{_,V} -> {mean, opt_trunc(Trunc, V)}
end;
filter_dp(H, DPs, Trunc) when is_integer(H) ->
case lists:keyfind(H, 1, DPs) of
false ->
case lists:keyfind(percentile, 1, DPs) of
false -> {H, zero(Trunc)};
{_, Ps} ->
get_dp(H, Ps, Trunc)
end;
{_,V} -> {H, opt_trunc(Trunc, V)}
end;
filter_dp(H, DPs, Trunc) ->
get_dp(H, DPs, Trunc).
opt_trunc(true, V) when is_float(V) ->
trunc(V);
opt_trunc(round, V) when is_float(V) ->
round(V);
opt_trunc(_, V) ->
V.
get_dp(K, DPs, Trunc) ->
case lists:keyfind(K, 1, DPs) of
false -> {K, zero(Trunc)};
{_, V} -> {K, opt_trunc(Trunc, V)}
end.
zero(true) -> 0;
zero(round) -> 0;
zero(false) -> 0.0.
stats_datapoints() ->
[n,mean,min,max,median,50,75,90,95,99,999].
setopts(_Entry, _Options, _Status) ->
ok.
sample(_Name, _Type, _Ref) ->
{ error, unsupported }.
get_value_(Name, counter, _Ref) ->
[{value, folsom_metrics_counter:get_value(Name)}];
get_value_(Name, gauge, _Ref) ->
[{value, folsom_metrics_gauge:get_value(Name)}];
get_value_(Name, histogram, _Ref) ->
calc_stats(folsom_metrics_histogram:get_values(Name));
get_value_(Name, duration, _Ref) ->
{Name, Cnt, _Start, Last} = folsom_metrics_duration:get_value(Name),
Stats = calc_stats(folsom_metrics_histogram:get_values(Name)),
[{count, Cnt}, {last, Last} | Stats];
get_value_(Name, meter, _Ref) ->
folsom_metrics:get_metric_value(Name);
get_value_(Name, spiral, _Ref) ->
folsom_metrics_spiral:get_values(Name).
calc_stats(Values) ->
L = length(Values),
exometer_util:get_statistics(L,
lists:sum(Values),
lists:sort(Values)).
just_events([{I, Events}|T]) when is_integer(I) ->
just_events1(Events, T);
just_events([]) ->
[].
just_events1([{event, E}|Es], T) ->
[E|just_events1(Es, T)];
just_events1([], T) ->
just_events(T).
timed_events([{I, Events}|T]) when is_integer(I) ->
timed_events(Events, I, T);
timed_events([]) ->
[].
timed_events([{event, E}|Es], I, T) ->
[{I, E}|timed_events(Es, I, T)];
timed_events([], _, T) ->
timed_events(T). | _build/default/lib/exometer_core/src/exometer_folsom.erl | 0.511961 | 0.425725 | exometer_folsom.erl | starcoder |
%% @doc: Space-efficient dictionary implemented using a binary
%%
%% This module implements a space-efficient dictionary with no
%% overhead per entry. Read and write access is O(log n).
%%
%% Keys and values are fixed size binaries stored ordered in a larger
%% binary which acts as a sparse array. All operations are implemented
%% using a binary search.
%%
%% As large binaries can be shared among processes, there can be
%% multiple concurrent readers of an instance of this structure.
%%
%% serialize/1 and deserialize/1
-module(bisect).
-author('<NAME> <<EMAIL>>').
-export([new/2, new/3, insert/3, bulk_insert/2, append/3, find/2, foldl/3]).
-export([next/2, next_nth/3, first/1, last/1, delete/2, compact/1, cas/4, update/4]).
-export([serialize/1, deserialize/1, from_orddict/2, to_orddict/1, find_many/2]).
-export([merge/2, intersection/1, intersection/2]).
-export([expected_size/2, expected_size_mb/2, num_keys/1, size/1]).
-compile({no_auto_import, [size/1]}).
-compile(native).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
%%
%% TYPES
%%
-type key_size() :: pos_integer().
-type value_size() :: pos_integer().
-type block_size() :: pos_integer().
-type key() :: binary().
-type value() :: binary().
-type index() :: pos_integer().
-record(bindict, {
key_size :: key_size(),
value_size :: value_size(),
block_size :: block_size(),
b :: binary()
}).
-type bindict() :: #bindict{}.
%%
%% API
%%
-spec new(key_size(), value_size()) -> bindict().
%% @doc: Returns a new empty dictionary where where the keys and
%% values will always be of the given size.
new(KeySize, ValueSize) when is_integer(KeySize)
andalso is_integer(ValueSize) ->
new(KeySize, ValueSize, <<>>).
-spec new(key_size(), value_size(), binary()) -> bindict().
%% @doc: Returns a new dictionary with the given data
new(KeySize, ValueSize, Data) when is_integer(KeySize)
andalso is_integer(ValueSize)
andalso is_binary(Data) ->
#bindict{key_size = KeySize,
value_size = ValueSize,
block_size = KeySize + ValueSize,
b = Data}.
-spec insert(bindict(), key(), value()) -> bindict().
%% @doc: Inserts the key and value into the dictionary. If the size of
%% key and value is wrong, throws badarg. If the key is already in the
%% array, the value is updated.
insert(B, K, V) when byte_size(K) =/= B#bindict.key_size orelse
byte_size(V) =/= B#bindict.value_size ->
erlang:error(badarg);
insert(#bindict{b = <<>>} = B, K, V) ->
B#bindict{b = <<K/binary, V/binary>>};
insert(B, K, V) ->
Index = index(B, K),
LeftOffset = Index * B#bindict.block_size,
RightOffset = byte_size(B#bindict.b) - LeftOffset,
KeySize = B#bindict.key_size,
ValueSize = B#bindict.value_size,
case B#bindict.b of
<<Left:LeftOffset/binary, K:KeySize/binary, _:ValueSize/binary, Right/binary>> ->
B#bindict{b = iolist_to_binary([Left, K, V, Right])};
<<Left:LeftOffset/binary, Right:RightOffset/binary>> ->
B#bindict{b = iolist_to_binary([Left, K, V, Right])}
end.
%% @doc: Update the value stored under the key by calling F on the old
%% value to get a new value. If the key is not present, initial will
%% be stored as the first value. Same as dict:update/4. Note: find and
%% insert requires two binary searches in the binary, while update
%% only needs one. It's as close to in-place update we can get in pure
%% Erlang.
update(B, K, Initial, F) when byte_size(K) =/= B#bindict.key_size orelse
byte_size(Initial) =/= B#bindict.value_size orelse
not is_function(F) ->
erlang:error(badarg);
update(B, K, Initial, F) ->
Index = index(B, K),
LeftOffset = Index * B#bindict.block_size,
RightOffset = byte_size(B#bindict.b) - LeftOffset,
KeySize = B#bindict.key_size,
ValueSize = B#bindict.value_size,
case B#bindict.b of
<<Left:LeftOffset/binary, K:KeySize/binary, OldV:ValueSize/binary, Right/binary>> ->
case F(OldV) of
OldV ->
B;
NewV ->
byte_size(NewV) =:= ValueSize orelse erlang:error(badarg),
B#bindict{b = iolist_to_binary([Left, K, NewV, Right])}
end;
<<Left:LeftOffset/binary, Right:RightOffset/binary>> ->
B#bindict{b = iolist_to_binary([Left, K, Initial, Right])}
end.
-spec append(bindict(), key(), value()) -> bindict().
%% @doc: Append a key and value. This is only useful if the key is known
%% to be larger than any other key. Otherwise it will corrupt the bindict.
append(B, K, V) when byte_size(K) =/= B#bindict.key_size orelse
byte_size(V) =/= B#bindict.value_size ->
erlang:error(badarg);
append(B, K, V) ->
case last(B) of
{KLast, _} when K =< KLast ->
erlang:error(badarg);
_ ->
Bin = B#bindict.b,
B#bindict{b = <<Bin/binary, K/binary, V/binary>>}
end.
-spec cas(bindict(), key(), value() | 'not_found', value()) -> bindict().
%% @doc: Check-and-set operation. If 'not_found' is specified as the
%% old value, the key should not exist in the array. Provided for use
%% by bisect_server.
cas(B, K, OldV, V) ->
case find(B, K) of
OldV ->
insert(B, K, V);
_OtherV ->
error(badarg)
end.
-spec find(bindict(), key()) -> value() | not_found.
%% @doc: Returns the value associated with the key or 'not_found' if
%% there is no such key.
find(B, K) ->
case at(B, index(B, K)) of
{K, Value} -> Value;
{_OtherK, _} -> not_found;
not_found -> not_found
end.
-spec find_many(bindict(), [key()]) -> [value() | not_found].
find_many(B, Keys) ->
lists:map(fun (K) -> find(B, K) end, Keys).
-spec delete(bindict(), key()) -> bindict().
delete(B, K) ->
LeftOffset = index2offset(B, index(B, K)),
KeySize = B#bindict.key_size,
ValueSize = B#bindict.value_size,
case B#bindict.b of
<<Left:LeftOffset/binary, K:KeySize/binary, _:ValueSize/binary, Right/binary>> ->
B#bindict{b = <<Left/binary, Right/binary>>};
_ ->
erlang:error(badarg)
end.
-spec next(bindict(), key()) -> {key(), value()} | not_found.
%% @doc: Returns the next larger key and value associated with it or
%% 'not_found' if no larger key exists.
next(B, K) ->
next_nth(B, K, 1).
%% @doc: Returns the nth next larger key and value associated with it
%% or 'not_found' if it does not exist.
-spec next_nth(bindict(), key(), non_neg_integer()) -> value() | not_found.
next_nth(B, K, Steps) ->
at(B, index(B, inc(K)) + Steps - 1).
-spec first(bindict()) -> {key(), value()} | not_found.
%% @doc: Returns the first key-value pair or 'not_found' if the dict is empty
first(B) ->
at(B, 0).
-spec last(bindict()) -> {key(), value()} | not_found.
%% @doc: Returns the last key-value pair or 'not_found' if the dict is empty
last(B) ->
at(B, num_keys(B) - 1).
-spec foldl(bindict(), fun(), any()) -> any().
foldl(B, F, Acc) ->
case first(B) of
{Key, Value} ->
do_foldl(B, F, Key, F(Key, Value, Acc));
not_found ->
[]
end.
do_foldl(B, F, PrevKey, Acc) ->
case next(B, PrevKey) of
{Key, Value} ->
do_foldl(B, F, Key, F(Key, Value, Acc));
not_found ->
Acc
end.
%% @doc: Compacts the internal binary used for storage, by creating a
%% new copy where all the data is aligned in memory. Writes will cause
%% fragmentation.
compact(B) ->
B#bindict{b = binary:copy(B#bindict.b)}.
%% @doc: Returns how many bytes would be used by the structure if it
%% was storing NumKeys.
expected_size(B, NumKeys) ->
B#bindict.block_size * NumKeys.
expected_size_mb(B, NumKeys) ->
expected_size(B, NumKeys) / 1024 / 1024.
-spec num_keys(bindict()) -> integer().
%% @doc: Returns the number of keys in the dictionary
num_keys(B) ->
byte_size(B#bindict.b) div B#bindict.block_size.
size(#bindict{b = B}) ->
erlang:byte_size(B).
-spec serialize(bindict()) -> binary().
%% @doc: Returns a binary representation of the dictionary which can
%% be deserialized later to recreate the same structure.
serialize(#bindict{} = B) ->
term_to_binary(B).
-spec deserialize(binary()) -> bindict().
deserialize(Bin) ->
case binary_to_term(Bin) of
#bindict{} = B ->
B;
_ ->
erlang:error(badarg)
end.
%% @doc: Insert a batch of key-value pairs into the dictionary. A new
%% binary is only created once, making it much cheaper than individual
%% calls to insert/2. The input list must be sorted.
bulk_insert(#bindict{} = B, Orddict) ->
L = do_bulk_insert(B, B#bindict.b, [], Orddict),
B#bindict{b = iolist_to_binary(lists:reverse(L))}.
do_bulk_insert(_B, Bin, Acc, []) ->
[Bin | Acc];
do_bulk_insert(B, Bin, Acc, [{Key, Value} | Rest]) ->
{Left, Right} = split_at(Bin, B#bindict.key_size, B#bindict.value_size, Key, 0),
do_bulk_insert(B, Right, [Value, Key, Left | Acc], Rest).
split_at(Bin, KeySize, ValueSize, Key, I) ->
LeftOffset = I * (KeySize + ValueSize),
case Bin of
Bin when byte_size(Bin) < LeftOffset ->
{Bin, <<>>};
<<Left:LeftOffset/binary,
Key:KeySize/binary, _:ValueSize/binary,
Right/binary>> ->
{Left, Right};
<<Left:LeftOffset/binary,
OtherKey:KeySize/binary, Value:ValueSize/binary,
Right/binary>> when OtherKey > Key ->
NewRight = <<OtherKey/binary, Value/binary, Right/binary>>,
{Left, NewRight};
_ ->
split_at(Bin, KeySize, ValueSize, Key, I+1)
end.
merge(Small, Big) ->
Small#bindict.block_size =:= Big#bindict.block_size
orelse erlang:error(badarg),
L = do_merge(Small#bindict.b, Big#bindict.b, [],
Big#bindict.key_size, Big#bindict.value_size),
Big#bindict{b = iolist_to_binary(L)}.
do_merge(Small, Big, Acc, KeySize, ValueSize) ->
case Small of
<<Key:KeySize/binary, Value:ValueSize/binary, RestSmall/binary>> ->
{LeftBig, RightBig} = split_at(Big, KeySize, ValueSize, Key, 0),
do_merge(RestSmall, RightBig, [Value, Key, LeftBig | Acc],
KeySize, ValueSize);
<<>> ->
lists:reverse([Big | Acc])
end.
%% @doc: Intersect two or more bindicts by key. The resulting bindict
%% contains keys found in all input bindicts.
intersection(Bs) when length(Bs) >= 2 ->
intersection(Bs, svs);
intersection(_TooFewSets) ->
erlang:error(badarg).
%% @doc: SvS set intersection algorithm, as described in
%% http://www.cs.toronto.edu/~tl/papers/fiats.pdf
intersection(Bs, svs) ->
[CandidateSet | Sets] = lists:sort(fun (A, B) -> size(A) =< size(B) end, Bs),
from_orddict(new(CandidateSet#bindict.key_size,
CandidateSet#bindict.value_size),
do_svs(Sets, CandidateSet)).
do_svs([], Candidates) ->
Candidates;
do_svs([Set | Sets], #bindict{} = Candidates) ->
%% Optimization: we let the candidate set remain a bindict for the
%% first iteration to avoid creating a large orddict just to throw
%% most of it away. For the remainding sets, we keep the candidate
%% set as a list
{_, NewCandidatesList} =
foldl(Candidates,
fun (K, V, {L, Acc}) ->
Size = byte_size(Set#bindict.b) div Set#bindict.block_size,
Rank = index(Set, L, Size, K),
%% TODO: Skip candidates until OtherK?
case at(Set, Rank) of
{K, _} -> {Rank, [{K, V} | Acc]};
{_OtherK, _} -> {Rank, Acc};
not_found -> {Rank, Acc}
end
end, {0, []}),
do_svs(Sets, lists:reverse(NewCandidatesList));
do_svs([Set | Sets], Candidates) when is_list(Candidates) ->
{_, NewCandidates} =
lists:foldl(fun ({K, V}, {L, Acc}) ->
Size = byte_size(Set#bindict.b) div Set#bindict.block_size,
Rank = index(Set, L, Size, K),
case at(Set, Rank) of
{K, _} -> {Rank, [{K, V} | Acc]};
{_OtherK, _} -> {Rank, Acc};
not_found -> {Rank, Acc}
end
end, {0, []}, Candidates),
do_svs(Sets, lists:reverse(NewCandidates)).
at(B, I) ->
Offset = index2offset(B, I),
KeySize = B#bindict.key_size,
ValueSize = B#bindict.value_size,
case B#bindict.b of
<<_:Offset/binary, Key:KeySize/binary, Value:ValueSize/binary, _/binary>> ->
{Key, Value};
_ ->
not_found
end.
%% @doc: Populates the dictionary with data from the orddict, taking
%% advantage of the fact that it is already ordered. The given bindict
%% must be empty, but contain size parameters.
from_orddict(#bindict{b = <<>>} = B, Orddict) ->
KeySize = B#bindict.key_size,
ValueSize = B#bindict.value_size,
L = orddict:fold(fun (K, V, Acc)
when byte_size(K) =:= B#bindict.key_size andalso
byte_size(V) =:= B#bindict.value_size ->
[<<K:KeySize/binary, V:ValueSize/binary>> | Acc];
(_, _, _) ->
erlang:error(badarg)
end, [], Orddict),
B#bindict{b = iolist_to_binary(lists:reverse(L))}.
to_orddict(#bindict{} = B) ->
lists:reverse(
foldl(B, fun (Key, Value, Acc) ->
[{Key, Value} | Acc]
end, [])).
%%
%% INTERNAL HELPERS
%%
index2offset(_, 0) -> 0;
index2offset(B, I) -> I * B#bindict.block_size.
%% @doc: Uses binary search to find the index of the given key. If the
%% key does not exist, the index where it should be inserted is
%% returned.
-spec index(bindict(), key()) -> index().
index(<<>>, _) ->
0;
index(B, K) ->
N = byte_size(B#bindict.b) div B#bindict.block_size,
index(B, 0, N, K).
index(_B, Low, High, _K) when High =:= Low ->
Low;
index(_B, Low, High, _K) when High < Low ->
-1;
index(B, Low, High, K) ->
Mid = (Low + High) div 2,
MidOffset = index2offset(B, Mid),
KeySize = B#bindict.key_size,
case byte_size(B#bindict.b) > MidOffset of
true ->
<<_:MidOffset/binary, MidKey:KeySize/binary, _/binary>> = B#bindict.b,
if
MidKey > K ->
index(B, Low, Mid, K);
MidKey < K ->
index(B, Mid + 1, High, K);
MidKey =:= K ->
Mid
end;
false ->
Mid
end.
inc(B) ->
IncInt = binary:decode_unsigned(B) + 1,
SizeBits = erlang:size(B) * 8,
<<IncInt:SizeBits>>.
%%
%% TEST
%%
-ifdef(TEST).
-define(i2k(I), <<I:64/integer>>).
-define(i2v(I), <<I:8/integer>>).
-define(b2i(B), list_to_integer(binary_to_list(B))).
new_with_data_test() ->
Dict = insert_many(new(8, 1), [{2, 2}, {4, 4}, {1, 1}, {3, 3}]),
?assertEqual(Dict, new(8, 1, Dict#bindict.b)).
insert_test() ->
insert_many(new(8, 1), [{2, 2}, {4, 4}, {1, 1}, {3, 3}]).
sorted_insert_test() ->
B = insert_many(new(8, 1), [{1, 1}, {2, 2}, {3, 3}, {4, 4}]),
?assertEqual(<<1:64/integer, 1, 2:64/integer, 2,
3:64/integer, 3, 4:64/integer, 4>>, B#bindict.b).
index_test() ->
B = #bindict{key_size = 8, value_size = 1, block_size = 9,
b = <<0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,2,2>>},
?assertEqual(0, index(B, <<1:64/integer>>)),
?assertEqual(1, index(B, <<2:64/integer>>)),
?assertEqual(2, index(B, <<3:64/integer>>)),
?assertEqual(2, index(B, <<100:64/integer>>)).
find_test() ->
B = insert_many(new(8, 1), [{2, 2}, {3, 3}, {1, 1}]),
?assertEqual(<<3:8/integer>>, find(B, <<3:64/integer>>)).
find_non_existing_test() ->
B = insert_many(new(8, 1), [{2, 2}, {3, 3}, {1, 1}]),
?assertEqual(not_found, find(B, ?i2k(4))).
find_many_test() ->
B = insert_many(new(8, 1), [{2, 2}, {3, 3}, {1, 1}]),
find_many(B, [<<1:64/integer>>, <<2:64/integer>>, <<3:64/integer>>]).
insert_overwrite_test() ->
B = insert_many(new(8, 1), [{2, 2}]),
?assertEqual(<<2>>, find(B, <<2:64/integer>>)),
B2 = insert(B, <<2:64/integer>>, <<4>>),
?assertEqual(<<4>>, find(B2, <<2:64/integer>>)).
update_test() ->
B = insert_many(new(8, 1), [{2, 2}]),
B2 = update(B, <<2:64/integer>>, <<4>>, fun (Old) ->
?assertEqual(Old, <<2>>),
<<5>>
end),
?assertEqual(<<5>>, find(B2, <<2:64/integer>>)),
B3 = update(B2, <<3:64/integer>>, <<3>>, fun (_) ->
throw(unexpected_call)
end),
?assertEqual(<<3>>, find(B3, <<3:64/integer>>)).
append_test() ->
KV1 = {<<2:64>>, <<2:8>>},
{K2, V2} = {<<3:64>>, <<3:8>>},
B = insert_many(new(8, 1), [KV1]),
?assertError(badarg, append(B, <<1:64>>, V2)),
?assertError(badarg, append(B, <<2:64>>, V2)),
B2 = append(B, K2, V2),
?assertEqual(V2, find(B2, K2)).
next_test() ->
KV1 = {<<2:64>>, <<2:8>>},
KV2 = {<<3:64>>, <<3:8>>},
B = insert_many(new(8, 1), [KV1, KV2]),
?assertEqual(KV1, next(B, <<0:64>>)),
?assertEqual(KV1, next(B, <<1:64>>)),
?assertEqual(KV2, next(B, <<2:64>>)),
?assertEqual(not_found, next(B, <<3:64>>)).
next_nth_test() ->
KV1 = {<<2:64>>, <<2:8>>},
KV2 = {<<3:64>>, <<3:8>>},
B = insert_many(new(8, 1), [KV1, KV2]),
?assertEqual(KV1, next_nth(B, <<0:64>>, 1)),
?assertEqual(KV2, next_nth(B, <<0:64>>, 2)),
?assertEqual(KV2, next_nth(B, <<2:64>>, 1)),
?assertEqual(not_found, next_nth(B, <<2:64>>, 2)),
?assertEqual(not_found, next_nth(B, <<3:64>>, 1)).
first_test() ->
KV1 = {K1, V1} = {<<2:64>>, <<2:8>>},
_KV2 = {K2, V2} = {<<3:64>>, <<3:8>>},
B1 = new(8, 1),
?assertEqual(not_found, first(B1)),
B2 = insert(B1, K1, V1),
?assertEqual(KV1, first(B2)),
B3 = insert(B2, K2, V2),
?assertEqual(KV1, first(B3)).
last_test() ->
KV1 = {K1, V1} = {<<2:64>>, <<2:8>>},
KV2 = {K2, V2} = {<<3:64>>, <<3:8>>},
B1 = new(8, 1),
?assertEqual(not_found, last(B1)),
?assertEqual(0, num_keys(B1)),
?assertEqual(not_found, at(B1, 0)),
?assertEqual(not_found, at(B1, -1)),
?assertEqual(not_found, at(B1, 1)),
B2 = insert(B1, K1, V1),
?assertEqual(KV1, last(B2)),
B3 = insert(B2, K2, V2),
?assertEqual(KV2, last(B3)).
delete_test() ->
B = insert_many(new(8, 1), [{2, 2}, {3, 3}, {1, 1}]),
?assertEqual(<<2:8/integer>>, find(B, ?i2k(2))),
NewB = delete(B, ?i2k(2)),
?assertEqual(not_found, find(NewB, ?i2k(2))).
delete_non_existing_test() ->
B = insert_many(new(8, 1), [{2, 2}, {3, 3}, {1, 1}]),
?assertError(badarg, delete(B, ?i2k(4))).
foldl_test() ->
B = insert_many(new(8, 1), [{2, 2}, {3, 3}, {1, 1}]),
?assertEqual(2+3+1, foldl(B, fun (_, <<V:8/integer>>, Acc) -> V + Acc end, 0)),
?assertEqual([], foldl(new(8, 1), fun (I, V, Acc) -> [{I, V} | Acc] end, [])).
size_test() ->
Start = 100000000000000,
N = 1000,
Spread = 1,
KeyPairs = lists:map(fun (I) -> {I, 255} end,
lists:seq(Start, Start+(N*Spread), Spread)),
B = insert_many(new(8, 1), KeyPairs),
?assertEqual(N+Spread, num_keys(B)).
serialize_test() ->
KeyPairs = lists:map(fun (I) -> {I, 255} end, lists:seq(1, 100)),
B = insert_many(new(8, 1), KeyPairs),
?assertEqual(B, deserialize(serialize(B))).
from_orddict_test() ->
Orddict = orddict:from_list([{<<1:64/integer>>, <<255:8/integer>>}]),
?assertEqual(<<255>>, find(from_orddict(new(8, 1), Orddict), <<1:64/integer>>)).
intersection_test() ->
Sets = [insert_many(new(8, 1), [{1, 1}, {2, 2}, {3, 3}]),
insert_many(new(8, 1), [{1, 1}, {2, 3}, {4, 4}]),
insert_many(new(8, 1), [{1, 1}, {2, 3}, {5, 5}]),
insert_many(new(8, 1), [{1, 1}, {2, 3}, {6, 6}])],
Intersection = intersection(Sets),
?assertEqual(to_orddict(insert_many(new(8, 1), [{1, 1}, {2, 2}])),
to_orddict(Intersection)).
intersection_perf_test_() ->
{timeout, 600, ?_test(intersection_perf())}.
intersection_perf() ->
TestCases = [{[1000, 1000], 10},
{[100000, 100000, 100000], 1000},
{[10000, 100000, 1000000], 1000},
{[1000000, 1000000, 1000000], 10000}
],
lists:foreach(
fun ({SetSizes, IntersectionSize}) ->
UnionSize = lists:sum([SetSize - IntersectionSize
|| SetSize <- SetSizes]) + IntersectionSize,
KVs = lists:map(fun (K) -> {<<K:36/binary>>, <<97:32/integer>>} end,
generate_unique(UnionSize)),
?assertEqual(UnionSize, sets:size(sets:from_list(KVs))),
{IntersectionKeys, Rest} = lists:split(IntersectionSize, KVs),
{SetKeys, []} = lists:mapfoldl(fun (Size, AccRest) ->
lists:split(Size - IntersectionSize,
AccRest)
end, Rest, SetSizes),
?assertEqual(IntersectionSize, length(IntersectionKeys)),
SetIntersection = sets:intersection(
[sets:from_list(Ks ++ IntersectionKeys)
|| Ks <- SetKeys]),
?assertEqual(IntersectionSize, sets:size(SetIntersection)),
Bisects = lists:map(fun (Ks) ->
AllKeys = orddict:from_list(
Ks ++ IntersectionKeys),
from_orddict(new(36, 4), AllKeys)
end, SetKeys),
{IntersectUs, BisectIntersection} = timer:tc(
fun () -> intersection(Bisects) end),
IntersectingKeys = to_orddict(BisectIntersection),
?assertEqual(length(lists:sort(sets:to_list(SetIntersection))),
length(lists:sort(IntersectingKeys))),
?assertEqual(lists:sort(sets:to_list(SetIntersection)),
lists:sort(IntersectingKeys)),
error_logger:info_msg("Set sizes: ~p, Intersection size: ~p~n"
"Intersection runtime: ~.2f ms~n",
[SetSizes, IntersectionSize,
IntersectUs / 1000]),
ok
end, TestCases).
generate_unique(N) ->
RandomGenerator = fun () -> crypto:rand_bytes(36) end,
generate_unique(RandomGenerator, [], N).
generate_unique(RandomGenerator, Acc, N) ->
case length(Acc) =:= N of
true ->
Acc;
false ->
Gen = fun (_, 0) -> [];
(F, M) -> [RandomGenerator() | F(F, M-1)]
end,
Uniques = lists:usort(Gen(Gen, N - length(Acc))),
generate_unique(RandomGenerator, Acc ++ Uniques, N)
end.
speed_test_() ->
{timeout, 600,
fun() ->
Start = 100000000000000,
N = 100000,
Keys = lists:seq(Start, Start+N),
KeyValuePairs = lists:map(fun (I) -> {<<I:64/integer>>, <<255:8/integer>>} end,
Keys),
%% Will mostly be unique, if N is bigger than 10000
ReadKeys = [lists:nth(random:uniform(N), Keys) || _ <- lists:seq(1, 1000)],
B = from_orddict(new(8, 1), KeyValuePairs),
time_reads(B, N, ReadKeys)
end}.
insert_speed_test_() ->
{timeout, 600,
fun() ->
Start = 100000000000000,
N = 10000,
Keys = lists:seq(Start, Start+N),
KeyValuePairs = lists:map(fun (I) -> {<<I:64/integer>>, <<255:8/integer>>} end,
Keys),
ReadKeys = [lists:nth(random:uniform(N), Keys) || _ <- lists:seq(1, 1000)],
StartTime = now(),
B = lists:foldl(fun ({K, V}, B) ->
insert(B, K, V)
end, new(8, 1), KeyValuePairs),
ElapsedUs = timer:now_diff(now(), StartTime),
error_logger:info_msg("insert in ~p ms, ~p us per key~n",
[ElapsedUs / 1000,
ElapsedUs / N
]),
time_reads(B, N, ReadKeys)
end}.
time_reads(B, Size, ReadKeys) ->
Parent = self(),
spawn(
fun() ->
Runs = 100,
Timings =
lists:map(
fun (_) ->
StartTime = now(),
find_many(B, ReadKeys),
timer:now_diff(now(), StartTime)
end, lists:seq(1, Runs)),
Rps = 1000000 / ((lists:sum(Timings) / length(Timings)) / length(ReadKeys)),
error_logger:info_msg("Average over ~p runs, ~p keys in dict~n"
"Average fetch ~p keys: ~p us, max: ~p us~n"
"Average fetch 1 key: ~p us~n"
"Theoretical sequential RPS: ~w~n",
[Runs, Size, length(ReadKeys),
lists:sum(Timings) / length(Timings),
lists:max(Timings),
(lists:sum(Timings) / length(Timings)) / length(ReadKeys),
trunc(Rps)]),
Parent ! done
end),
receive done -> ok after 1000 -> ok end.
time_write_test_() ->
{timeout, 600,
fun() ->
Fun = fun(N , B) ->
insert(B, <<N:64/integer>>, <<255:8/integer>>)
end,
start_time_interval("Insert", Fun, new(8, 1), 1000, 20000)
end
}.
time_write_and_read_test_() ->
{timeout, 600,
fun() ->
Fun = fun(Count, B) ->
KInt = random:uniform(Count),
find(B, <<KInt:64/integer>>),
insert(B, <<Count:64/integer>>, <<255:8/integer>>)
end,
start_time_interval("Insert and find", Fun, new(8, 1), 1000, 10000)
end
}.
time_appends_test_() ->
{timeout, 600,
fun() ->
Fun = fun(Count, B) ->
append(B, <<Count:64/integer>>, <<255:8/integer>>)
end,
start_time_interval("Append", Fun, new(8, 1), 1000, 50000)
end
}.
time_appends_and_find_test_() ->
{timeout, 600,
fun() ->
Fun = fun(Count, B) ->
KInt = random:uniform(Count),
find(B, <<KInt:64/integer>>),
append(B, <<Count:64/integer>>, <<255:8/integer>>)
end,
start_time_interval("Append and find", Fun, new(8, 1), 1000, 50000)
end
}.
time_appends_and_next_test_() ->
{timeout, 600,
fun() ->
Fun = fun(Count , B) ->
KInt = random:uniform(Count),
next(B, <<KInt:64/integer>>),
append(B, <<Count:64/integer>>, <<255:8/integer>>)
end,
start_time_interval("Append and next", Fun, new(8, 1), 1000, 50000)
end
}.
start_time_interval(Operation, Fun, B, MeasureEvery, N) ->
Times = time_interval(Fun, B, MeasureEvery, N, 1, now()),
error_logger:info_msg("Time (ms) taken for ~p executions each of ~p:\n~p\n",
[MeasureEvery, Operation, Times]).
time_interval(_, _, _, N, N, _) ->
[];
time_interval(Fun, B, MeasureEvery, N, Count, T) ->
B2 = Fun(Count, B),
case Count rem MeasureEvery =:= 0 of
true ->
[timer:now_diff(now(), T)| time_interval(Fun, B2, MeasureEvery, N, Count + 1, now())];
false ->
time_interval(Fun, B2, MeasureEvery, N, Count + 1, T)
end.
insert_many(Bin, Pairs) ->
lists:foldl(fun ({K, V}, B) when is_integer(K) andalso is_integer(V) ->
insert(B, ?i2k(K), ?i2v(V));
({K, V}, B) ->
insert(B, K, V)
end, Bin, Pairs).
inc_test() ->
?assertEqual(<<7:64>>, inc(<<6:64>>)).
bulk_insert_test() ->
B = insert_many(new(8, 1), [{1, 1}, {10, 10}, {12, 12}]),
New = bulk_insert(B, [{?i2k(0), ?i2v(0)},
{?i2k(5), ?i2v(5)},
{?i2k(10), ?i2v(11)},
{?i2k(11), ?i2v(11)}]),
?assertEqual([{?i2k(0) , ?i2v(0)},
{?i2k(1) , ?i2v(1)},
{?i2k(5) , ?i2v(5)},
{?i2k(10), ?i2v(11)},
{?i2k(11), ?i2v(11)},
{?i2k(12), ?i2v(12)}],
to_orddict(New)).
smart_merge_test() ->
Big = insert_many(new(8, 1), [{1, 1}, {10, 10}, {25, 25}]),
Small = insert_many(new(8, 1), [{0, 0}, {10, 11}, {12, 12}]),
Merged = merge(Small, Big),
?assertEqual([{?i2k(0) , ?i2v(0)},
{?i2k(1) , ?i2v(1)},
{?i2k(10) , ?i2v(11)},
{?i2k(12), ?i2v(12)},
{?i2k(25), ?i2v(25)}],
to_orddict(Merged)).
-endif. | src/bisect.erl | 0.635336 | 0.586197 | bisect.erl | starcoder |
% Licensed under the Apache License, Version 2.0 (the "License"); you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
-module(couch_crypto).
-export([hash/2, hash_init/1, hash_update/3, hash_final/2]).
-export([hmac/3]).
-compile([nowarn_deprecated_function]).
hash(Alg, Data) ->
case {Alg, erlang:function_exported(crypto, hash, 2)} of
{_, true} ->
crypto:hash(Alg, Data);
{sha, false} ->
crypto:sha(Data);
{md5, false} ->
crypto:md5(Data);
{Alg, false} ->
throw({unsupported, Alg})
end.
hash_init(Alg) ->
case {Alg, erlang:function_exported(crypto, hash_init, 1)} of
{_, true} ->
crypto:hash_init(Alg);
{sha, false} ->
crypto:sha_init();
{md5, false} ->
crypto:md5_init();
{Alg, false} ->
throw({unsupported, Alg})
end.
hash_update(Alg, Context, Data) ->
case {Alg, erlang:function_exported(crypto, hash_update, 2)} of
{_, true} ->
crypto:hash_update(Context, Data);
{sha, false} ->
crypto:sha_update(Context, Data);
{md5, false} ->
crypto:md5_update(Context, Data);
{Alg, false} ->
throw({unsupported, Alg})
end.
hash_final(Alg, Context) ->
case {Alg, erlang:function_exported(crypto, hash_final, 1)} of
{_, true} ->
crypto:hash_final(Context);
{sha, false} ->
crypto:sha_final(Context);
{md5, false} ->
crypto:md5_final(Context);
{Alg, false} ->
throw({unsupported, Alg})
end.
hmac(Alg, Key, Data) ->
case {Alg, erlang:function_exported(crypto, hmac, 3)} of
{_, true} ->
crypto:hmac(Alg, Key, Data);
{sha, false} ->
crypto:sha_mac(Key, Data);
{Alg, false} ->
throw({unsupported, Alg})
end. | src/couch_crypto.erl | 0.800458 | 0.425187 | couch_crypto.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% Copyright (c) 2018 <NAME>. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%% @doc VClock "Matrix".
-module(m_vclock).
-author("<NAME> <<EMAIL>>").
-include("ldb.hrl").
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-export([new/2,
matrix/1,
get/1,
put/2,
next_dot/1,
add_dot/2,
update/3,
union_matrix/2,
stable/1,
size/1]).
-export_type([m/0]).
-type matrix_st() :: maps:map(ldb_node_id(), vclock()).
-record(state, {id :: ldb_node_id(),
node_number :: non_neg_integer(),
stable :: vclock(),
matrix :: matrix_st()}).
-type m() :: #state{}.
%% @doc Create an empty matrix.
-spec new(ldb_node_id(), non_neg_integer()) -> m().
new(Id, NodeNumber) ->
BottomVV = vclock:new(),
Matrix = maps:from_list([{Id, BottomVV}]),
#state{id=Id,
node_number=NodeNumber,
stable=BottomVV,
matrix=Matrix}.
%% @doc Extract matrix.
-spec matrix(m()) -> matrix_st().
matrix(#state{matrix=Matrix}) ->
Matrix.
%% @doc Get vector from matrix.
-spec get(m()) -> vclock().
get(#state{id=Id, matrix=Matrix}) ->
maps:get(Id, Matrix).
%% @doc Put vector in matrix.
-spec put(vclock(), m()) -> m().
put(VV, #state{id=Id, matrix=Matrix0}=State) ->
Matrix = maps:put(Id, VV, Matrix0),
State#state{matrix=Matrix}.
%% @doc Generate next dot, VV, and update matrix with new VV.
-spec next_dot(m()) -> {dot(), vclock(), m()}.
next_dot(#state{id=Id, matrix=Matrix0}=State) ->
%% find past, generate new dot and vv
Past = maps:get(Id, Matrix0),
Dot = vclock:get_next_dot(Id, Past),
VV = vclock:add_dot(Dot, Past),
%% update matrix
Matrix = maps:put(Id, VV, Matrix0),
%% return dot, vv and update state
{Dot, VV, State#state{matrix=Matrix}}.
%% @doc Add dot.
-spec add_dot(dot(), m()) -> m().
add_dot(Dot, #state{id=Id, matrix=Matrix0}=State) ->
Matrix = maps:update_with(
Id,
fun(VV) -> vclock:add_dot(Dot, VV) end,
Matrix0
),
State#state{matrix=Matrix}.
%% @doc Update clock for a given sender.
-spec update(ldb_node_id(), vclock(), m()) -> m().
update(Id, Clock, #state{matrix=Matrix0}=State) ->
Matrix = maps:update_with(
Id,
%% take the highest clock
fun(Current) -> vclock:union(Clock, Current) end,
Clock,
Matrix0
),
State#state{matrix=Matrix}.
%% @doc Union two matrix.
-spec union_matrix(m(), matrix_st()) -> m().
union_matrix(#state{matrix=MatrixA}=State, MatrixB) ->
Matrix = maps_ext:merge_all(
fun(_, VVA, VVB) -> vclock:union(VVA, VVB) end,
MatrixA,
MatrixB
),
%% update state
State#state{matrix=Matrix}.
%% @doc Get list of stable dots.
-spec stable(m()) -> {list(dot()), m()}.
stable(#state{node_number=NodeNumber, stable=CurrentStable, matrix=Matrix}=State)->
NewStable = case maps:size(Matrix) of
NodeNumber ->
intersect_all(Matrix);
_ ->
%% if not enough info, bottom
vclock:new()
end,
StableDots = vclock:subtract(NewStable, CurrentStable),
{StableDots, State#state{stable=NewStable}}.
%% @doc Size of matrix.
-spec size(m()) -> non_neg_integer().
size(#state{matrix=Matrix}) ->
maps:size(Matrix).
%% @private Assumes map is non-empty.
-spec intersect_all(maps:map(ldb_node_id(), vclock())) -> vclock().
intersect_all(Matrix) ->
[{_, VV}|Rest] = maps:to_list(Matrix),
intersect_all(VV, Rest).
-spec intersect_all(vclock(), term()) -> vclock().
intersect_all(Min0, [{_, VV}|Rest]) ->
Min = vclock:intersection(Min0, VV),
intersect_all(Min, Rest);
intersect_all(Min0, []) ->
Min0.
-ifdef(TEST).
stable_test() ->
%% nodes
A = 0,
B = 1,
NodeNumber = 2,
M0 = new(A, NodeNumber),
%% dots
A1 = {A, 1},
A2 = {A, 2},
B1 = {B, 1},
B2 = {B, 2},
B3 = {B, 3},
B4 = {B, 4},
%% clocks
ClockA1 = #{A => 1, B => 1},
ClockA2 = #{A => 1, B => 2},
ClockB1 = #{B => 3},
ClockA3 = #{A => 1, B => 3},
ClockB2 = #{A => 1, B => 3},
ClockA4 = #{A => 2, B => 4},
ClockB3 = #{A => 2, B => 4},
%% nothing is stable in the beg.
?assertEqual({[], M0}, stable(M0)),
%% update A
M1 = update(A, ClockA1, M0),
?assertEqual({[], M1}, stable(M1)),
%% update A
M2 = update(A, ClockA2, M1),
?assertEqual({[], M2}, stable(M2)),
%% update B
M3 = update(B, ClockB1, M2),
{StableDots0, M4} = stable(M3),
?assertEqual([B1, B2], lists:sort(StableDots0)),
%% update A
M5 = update(A, ClockA3, M4),
{StableDots1, M6} = stable(M5),
?assertEqual([B3], StableDots1),
%% update B
M7 = update(B, ClockB2, M6),
{StableDots2, M8} = stable(M7),
?assertEqual([A1], StableDots2),
%% update A
M9 = update(A, ClockA4, M8),
?assertEqual({[], M9}, stable(M9)),
%% update B
M10 = update(B, ClockB3, M9),
{StableDots3, _} = stable(M10),
?assertEqual([A2, B4], lists:sort(StableDots3)),
ok.
-endif. | src/m_vclock.erl | 0.655557 | 0.501526 | m_vclock.erl | starcoder |
-module(oc_telemetry).
% ------------------------------------------------------------------------------
% Public Exports
-export([attach/4, track/1]).
% ------------------------------------------------------------------------------
-record(options, {name, filter}).
% ------------------------------------------------------------------------------
%% @doc Creates measurement and attach it to the telemetry dispatcher
%%
%% On success it returns `{ok, Measurement}' where `Measurement' is newly
%% created OpenCensus measurement that can be used for creating views.
%%
%% When there is already attached listener for given name then it will return
%% `{error, already_registered}'.
-spec attach(oc_stat_measure:name(),
telemetry:event_name(),
oc_stat_measure:description(),
oc_stat_measure:unit()) -> {ok, oc_stat_measure:measure()}
| {error, already_registered}.
attach(Name, EventName, Description, Unit) ->
attach(Name, EventName, Description, Unit, fun (Data) -> Data end).
%% @doc Creates measurement, view, and attaches measurement to the telemetry
%%
%% On success it returns `{ok, View}' where `View' is newly created OpenCensus
%% view.
%%
%% When there is already attached listener for given name then it will return
%% `{error, already_registered}'.
-spec track(Metric::map()) -> {ok, oc_stat_view:view()}
| {error, term()}.
track(#{'__struct__' := Type,
name := NormalizedName,
event_name := EventName,
metadata := Metadata,
tags := Tags,
description := Desc,
unit := Unit} = Data) ->
Name = build_name(NormalizedName),
Description = Desc,
case attach(Name, EventName, Description, Unit, Metadata) of
{ok, Measure} ->
Aggregation = aggregation(Type, Data),
oc_stat_view:subscribe(Name, Measure, Description, Tags, Aggregation);
Error -> Error
end.
% ------------------------------------------------------------------------------
attach(Name, EventName, Description, Unit, Metadata) ->
Measure = oc_stat_measure:new(Name, Description, Unit),
Options = #options{name = Name, filter = Metadata},
case telemetry:attach(Name, EventName, fun handle_event/4, Options) of
ok -> {ok, Measure};
Error -> Error
end.
% Handle events send by `telemetry' application
handle_event(_EventName, Value, Metadata,
#options{name = Name, filter = Filter}) ->
Filtered = Filter(Metadata),
ok = oc_stat:record(Filtered, Name, Value).
build_name(NormalizedName) ->
Stringified = [atom_to_list(Atom) || Atom <- NormalizedName],
Joined = lists:join($/, Stringified),
list_to_binary(Joined).
aggregation('Elixir.Telemetry.Metrics.Counter', _) ->
oc_stat_aggregation_count;
aggregation('Elixir.Telemetry.Metrics.Sum', _) ->
oc_stat_aggregation_sum;
aggregation('Elixir.Telemetry.Metrics.LastValue', _) ->
oc_stat_aggregation_latest;
aggregation('Elixir.Telemetry.Metrics.Distribution', #{buckets := Buckets}) ->
{oc_stat_aggregation_distribution, [{buckets, Buckets}]}.
-ifdef(EUNIT).
-include_lib("eunit/include/eunit.hrl").
build_name_test() ->
?assertEqual(<<"foo/bar">>, build_name([foo, bar])).
-endif. | src/oc_telemetry.erl | 0.602062 | 0.410225 | oc_telemetry.erl | starcoder |
%%%-------------------------------------------------------------------
%%% Licensed to the Apache Software Foundation (ASF) under one
%%% or more contributor license agreements. See the NOTICE file
%%% distributed with this work for additional information
%%% regarding copyright ownership. The ASF licenses this file
%%% to you under the Apache License, Version 2.0 (the
%%% "License"); you may not use this file except in compliance
%%% with the License. You may obtain a copy of the License at
%%%
%%% http://www.apache.org/licenses/LICENSE-2.0
%%%
%%% Unless required by applicable law or agreed to in writing,
%%% software distributed under the License is distributed on an
%%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%%% KIND, either express or implied. See the License for the
%%% specific language governing permissions and limitations
%%% under the License.
%%%
%%% @doc otter API module. Use the API in this module to interact with
%%% the `otter' application.
%%%
%%% The module covers 3 APIs. The `Span API', `Snapshot/Count API' and
%%% `Config API'.
%%%
%%% == Span API ==
%%%
%%% The module exposes the functional span API.
%%% These API calls allow you to create and manage spans. These are:<br/>
%%% `start/1, start/2, start/3'<br/>
%%% `start_with_tags/2, start_with_tags/3, start_with_tags/4'<br/>
%%% `finish/1' <br/>
%%% `ids/1' <br/>
%%% `log/2, log/3' <br/>
%%% `tag/3, tag/4' <br/>
%%%
%%% The API supports pre-filtering of spans with the start_with_tags
%%% functions. Pre-filtering is invoked when the span is started with
%%% initial tags. The result of the pre filter rules can be an inactive
%%% span. Inactive spans do not trigger the rules when finishing the span
%%% (i.e. always discarded) and consume somewhat less resources (depending
%%% on the API) than gathering active spans.
%%%
%%% There are additional span APIs developed for specific use cases to
%%% simplify instrumentations for different code bases. These
%%% are accessible in separate modules.
%%% - simple process dictionary API : `otter_span_pdict_api' <br/>
%%% - multi span process dictionary API : `otter_span_mpdict_api' <br/>
%%% - span id/process API : `otter_span_id_api' <br/>
%%% For further information on these APIs, check the documentation of the
%%% modules
%%%
%%% == Snapshot/Count API ==
%%% When `finish/1' is called then the completed span is
%%% passed to a configurable filter. The filter can check the Span tags
%%% as well as the name and duration of the span and use the information
%%% to decide to send the Span to the trace collector (Zipkin supported)
%%% and/or increase counters based on values of the tags and store the
%%% last Span for the counters. This latter is particularly useful for
%%% troubleshooting e.g. error events when increase of the corresponding
%%% counter is noticed. These snapshots (referred as Snap) and counters
%%% can be retrieved, managed with this API. These are:<br/>
%%% `counter_list/0'<br/>
%%% `counter_snapshot/1'<br/>
%%% `counter_delete/1'<br/>
%%% `counter_delete_all/0'<br/>
%%%
%%% == Config API ==
%%% The default implementation uses the application environment to
%%% store configuration. There is a simple wrapper module to interface
%%% with configuration store (otter_config). To implement other config
%%% persistence, the module should be replaced with another one providing
%%% the same simple read/write API functions.<br/>
%%% <em>WARNING</em> : In the default implementation using the application
%%% environment, so the write function is NOT persistent. In case of node
%%% restart and/or application reload the configuration will be reset to
%%% whatever environment is defined in the release (sys) config or app
%%% file. There is an example configuration provided in the `otter.app`
%%% file as a reference. These are:<br/>
%%% `config_list/0'<br/>
%%% `config_read/1'<br/>
%%% `config_read/2'<br/>
%%% `config_write/2'<br/>
%%% @end
%%%-------------------------------------------------------------------
-module(otter).
-include_lib("otter_lib/include/otter.hrl").
-export([
%% Span API
start/1, start/2, start/3,
start_with_tags/2, start_with_tags/3, start_with_tags/4,
finish/1,
ids/1,
log/2, log/3,
tag/3, tag/4,
%% Snapshot / Count API
counter_list/0,
counter_snapshot/1,
counter_delete/1,
counter_delete_all/0,
%% Config API
config_list/0,
config_read/1,
config_read/2,
config_write/2
]).
%%--------------------------------------------------------------------
%% @doc Starts a span with the specified name. Automatically generates
%% a trace id.
%% @end
%%--------------------------------------------------------------------
-spec start(Name :: info()) -> span().
start(Name) ->
otter_lib_span:start(Name).
%%--------------------------------------------------------------------
%% @doc Starts a span with the specified name and parent span or a trace id
%% @end
%% --------------------------------------------------------------------
-spec start(Name :: info(), ParentSpan :: span()) -> span().
start(Name, #span{} = ParentSpan) ->
{TraceId, ParentId} = ids(ParentSpan),
otter_lib_span:start(Name, TraceId, ParentId);
start(Name, TraceId) when is_integer(TraceId) ->
otter_lib_span:start(Name, TraceId).
%%--------------------------------------------------------------------
%% @doc Starts a child span with the specified name, trace id and
%% parent id
%% @end
%% --------------------------------------------------------------------
-spec start(Name :: info(), TraceId :: integer(), ParentId :: integer()) -> span().
start(Name, TraceId, ParentId) when is_integer(TraceId),
is_integer(ParentId) ->
otter_lib_span:start(Name, TraceId, ParentId).
%%--------------------------------------------------------------------
%% @doc Starts a span with the specified name and initial tags.
%% Automatically generates a trace id and invokes pre filtering.
%% @end
%%--------------------------------------------------------------------
%% TODO Figure out why dialyzer doesn't like this spec : -spec start_with_tags(Name :: info(), Tags :: [tag()]) -> span().
start_with_tags(Name, Tags) ->
Span = otter_lib_span:start_with_tags(Name, Tags),
otter_filter:pre_span(Span).
%%--------------------------------------------------------------------
%% @doc Starts a span with the specified name, initial tags and
%% trace id or a parent span
%% @end
%% --------------------------------------------------------------------
-spec start_with_tags(Name :: info(), Tags :: [tag()], Span :: span()) -> span();
(Name :: info(), Tags :: [tag()], TraceId :: trace_id()) -> span().
start_with_tags(Name, Tags, #span{trace_id = TraceId, id = ParentId}) ->
Span = otter_lib_span:start_with_tags(Name, Tags, TraceId, ParentId),
otter_filter:pre_span(Span);
start_with_tags(Name, Tags, TraceId) when is_integer(TraceId) ->
Span = otter_lib_span:start_with_tags(Name, Tags, TraceId),
otter_filter:pre_span(Span).
%%--------------------------------------------------------------------
%% @doc Starts a child span with the specified name, trace id and
%% parent id
%% @end
%% --------------------------------------------------------------------
-spec start_with_tags(Name :: info(), Tags :: [tag()], TraceId :: trace_id(), ParentId :: span_id()) -> span().
start_with_tags(Name, Tags, TraceId, ParentId) when is_integer(TraceId),
is_integer(ParentId) ->
Span = otter_lib_span:start_with_tags(Name, Tags, TraceId, ParentId),
otter_filter:pre_span(Span).
%%--------------------------------------------------------------------
%% @doc Adds a tag to a span. If the tag already exists, its value
%% will be overwritten.
%% @end
%% --------------------------------------------------------------------
-spec tag(Span :: span(), Key :: info(), Value :: info()) -> span().
tag(#span{timestamp = 0} = Span, _Key, _Value) ->
Span;
tag(#span{} = Span, Key, Value) ->
otter_lib_span:tag(Span, Key, Value).
%%--------------------------------------------------------------------
%% @doc Adds a tag to a span with a given service. If the tag already
%% exists, its value will be overwritten.
%% @end
%% --------------------------------------------------------------------
-spec tag(Span :: span(), Key :: info(), Value :: info(), Service :: service()) -> span().
tag(#span{timestamp = 0} = Span, _Key, _Value, _Service) ->
Span;
tag(#span{} = Span, Key, Value, Service) ->
otter_lib_span:tag(Span, Key, Value, Service).
%%--------------------------------------------------------------------
%% @doc Adds a log message to a span. If the span is not active,
%% the tag is not added.
%% @end
%% --------------------------------------------------------------------
-spec log(Span :: span(), Text :: info()) -> span().
log(#span{timestamp = 0} = Span, _Text) ->
Span;
log(#span{} = Span, Text) ->
otter_lib_span:log(Span, Text).
%%--------------------------------------------------------------------
%% @doc Adds a log message to a span with the specified service information.
%% If the span is not active, the tag is not added.
%% @end
%% --------------------------------------------------------------------
-spec log(Span :: span(), Text :: info(), Service :: service()) -> span().
log(#span{timestamp = 0} = Span, _Text, _Service) ->
Span;
log(#span{} = Span, Text, Service) ->
otter_lib_span:log(Span, Text, Service).
%%--------------------------------------------------------------------
%% @doc Ends an active span and prepares it for potential delivery to the
%% backend based on filtering rules.
%% @end
%% --------------------------------------------------------------------
-spec finish(Span :: span()) -> ok.
finish(#span{timestamp = 0}) ->
ok;
finish(#span{} = Span) ->
otter_filter:span(otter_lib_span:finish(Span)).
%%--------------------------------------------------------------------
%% @doc Returns the trace id and span id for a given span.
%% @end
%% --------------------------------------------------------------------
-spec ids(Span :: span()) -> {trace_id(), span_id()}.
ids(#span{timestamp = 0}) ->
%% Not sure what sensible things can we return here. I think the main
%% consideration would be not to crash code that expect integers ...
{0,0};
ids(#span{} = Span) ->
otter_lib_span:get_ids(Span).
%%--------------------------------------------------------------------
%% Snapshot/Counter API
%% --------------------------------------------------------------------
%%--------------------------------------------------------------------
%% @doc List all the snapshot counters
%% @end
%% --------------------------------------------------------------------
-spec counter_list() -> [{term(), integer()}].
counter_list() ->
otter_lib_snapshot_count:list_counts().
%%--------------------------------------------------------------------
%% @doc Show the last event snapshot of a counter
%% @end
%% --------------------------------------------------------------------
-spec counter_snapshot(Key :: term()) -> term().
counter_snapshot(Key) ->
otter_lib_snapshot_count:get_snap(Key).
%%--------------------------------------------------------------------
%% @doc Delete (reset) a counter
%% @end
%% --------------------------------------------------------------------
-spec counter_delete(Key :: term()) -> ok.
counter_delete(Key) ->
otter_lib_snapshot_count:delete_counter(Key).
%%--------------------------------------------------------------------
%% @doc Delete (reset) all counters
%% @end
%% --------------------------------------------------------------------
-spec counter_delete_all() -> ok.
counter_delete_all() ->
otter_lib_snapshot_count:delete_all_counters().
%%--------------------------------------------------------------------
%% Config API
%% --------------------------------------------------------------------
%%--------------------------------------------------------------------
%% @doc List all defined configuration
%% @end
%% --------------------------------------------------------------------
-spec config_list() -> term().
config_list() ->
otter_config:list().
%%--------------------------------------------------------------------
%% @doc Read a configuration value
%% @end
%% --------------------------------------------------------------------
-spec config_read(Key :: atom()) -> term().
config_read(Key) ->
otter_config:read(Key).
%%--------------------------------------------------------------------
%% @doc Read a configuration value with default
%% @end
%% --------------------------------------------------------------------
-spec config_read(Key :: atom(), Default :: term()) -> term().
config_read(Key, Default) ->
otter_config:read(Key, Default).
%%--------------------------------------------------------------------
%% @doc Write a configuration value.
%% Note : With the default config implementation this write is not
%% persistent. Update the application environment/config accordingly to
%% persist.
%% @end
%% --------------------------------------------------------------------
-spec config_write(Key :: atom(), Value :: term()) -> ok.
config_write(Key, Value) ->
otter_config:write(Key, Value). | src/otter.erl | 0.58439 | 0.418043 | otter.erl | starcoder |
%% @doc {@module} allows writing composable decoders, combining decoding,
%% transformation and validation.
-module(dj).
-export([ %% Primitives
binary/0
, integer/0
, pos_integer/0
, neg_integer/0
, non_neg_integer/0
, float/0
, null/0
, null/1
, boolean/0
, value/0
%% Convenience decoders
, atom/0
, atom/1
, existing_atom/0
, existing_atom/1
, email/0
, full_date_tuple/1
, uuid/1
, integer/2
, nullable/1
, nullable/2
%% Objects and maps
, field/2
, optional_field/3
, at/2
, prop/2
, prop_list/1
, to_map/1
%% Lists
, list/1
, nonempty_list/1
, index/2
, sequence/1
, set/1
%% Manipulating decoders
, map/2
, chain/2
, fail/1
, succeed/1
%% Fancy decoders
, mapn/2
, exactly/2
, one_of/1
%% Running a decoder
, decode/2
, decode/3
]).
%%%-----------------------------------------------------------------------------
%%% Types
%%%-----------------------------------------------------------------------------
-type decoder(T) :: fun ((jsx:json_term()) -> result(T, errors())).
%% A `decoder(T)' is an opaque datastructure that represents a composable
%% decoder. After composing a decoder for your JSON and final datastructure, you
%% can run it with {@link decode/2} or {@link decode/3}.
%%
%% Consider `decoder(T)' an opaque datastructure. It is only exposed due to
%% dialyzer limitations.
-type result(V, E) :: {ok, V} | {error, E}.
%% Running a decoder results in a `result', indicating either success (and
%% providing the decoded data) or failure, with a nonempty list of errors.
-type error() :: {unexpected_type, ExpectedType :: type(), jsx:json_term()}
| {missing_field, field(), jsx:json_term()}
| {missing_index, non_neg_integer(), jsx:json_term()}
| {in_field, field(), [error(), ...]}
| {at_index, non_neg_integer(), [error(), ...]}
| {custom, any()}
| {invalid_json, any()}.
%% `error()' is a (resursive) structure that gives detailed information about
%% what went wrong, and where.
%%
%% Most primitive decoders may fail with an `unexpected_type' error when the
%% JSON value is not the expected type. Those errors contain both the expected
%% <em>type</em> and the actual <em>value</em>.
%%
%% When decoders like {@link field/2} or {@link index/2} are used,
%% `missing_field' and `missing_index' may be used when the provided field or
%% index was not found in the JSON data.
%%
%% Errors that occur in the context of a field or index, for example when using
%% {@link field/2} or {@link list/1}, are nested in `in_field' or `at_index'
%% which allows tracing the failure path.
%%
%% Convenience and extended decoders using {@link fail/1} will wrap their errors
%% in `custom' to indicate a higher level failure.
%%
%% If the actual JSON cannot be parsed by jsx, an `invalid_json' error is
%% produced.
-type errors() :: [error(), ...].
%% A nonempty list of {@link error/0}s
-type type() :: binary
| integer
| pos_integer
| neg_integer
| non_neg_integer
| null
| boolean
| float
| map
| list
| nonempty_list.
%% These may appear in `unexpected_type' errors.
-type field() :: atom() | binary().
%% Depending on the options passed to {@link decoder/3}, a field-name may be
%% either an `atom()', a `binary()' or a mix of both. When using {@link
%% decoder/2}, field-names are always atoms. Note that those atoms must already
%% exist, or decoding will fail.
-export_type([ decoder/1
, result/2
, error/0
, errors/0
, type/0
, field/0
]).
%%%-----------------------------------------------------------------------------
%%% API
%%%-----------------------------------------------------------------------------
%% @doc Decodes a JSON string as a `binary()'.
%%
%% ```
%% {ok, <<"Hi there">>} = dj:decode(<<"\"Hi there\"">>, dj:binary()).
%% '''
%%
%% If the specified JSON is not a string (it might, for example, be an integer),
%% this will return an error indicating that an unexpected type was found -
%% including the actual value that was found instead. This value will be a
%% `jsx:json_term()'.
%%
%% ```
%% {error, {dj_errors, [{unexpected_type, binary, 123}]}} =
%% dj:decode(<<"123">>, dj:binary()).
%% '''
-spec binary() -> decoder(binary()).
binary() ->
fun (Json) when is_binary(Json) -> {ok, Json};
(Json) -> unexpected_type_error(binary, Json)
end.
%% @doc Decodes a JSON integer as an `integer()'
%%
%% ```
%% {ok, 123} = dj:decode(<<"123">>, dj:integer()).
%% {error, {dj_errors, [{unexpected_type, integer, true}]}} =
%% dj:decode(<<"true">>, dj:integer()).
%% '''
%%
%% @see float/0
%% @see pos_integer/0
%% @see neg_integer/0
%% @see non_neg_integer/0
%% @see integer/2
-spec integer() -> decoder(integer()).
integer() ->
fun (Json) when is_integer(Json) -> {ok, Json};
(Json) -> unexpected_type_error(integer, Json)
end.
%% @doc Decodes a strictly positive JSON integer as a `pos_integer()'
%%
%% @see float/0
%% @see integer/0
%% @see neg_integer/0
%% @see non_neg_integer/0
-spec pos_integer() -> decoder(pos_integer()).
pos_integer() ->
fun (Json) when is_integer(Json), Json > 0 ->
{ok, Json};
(Json) ->
unexpected_type_error(pos_integer, Json)
end.
%% @doc Decodes a negative JSON integer as a `neg_integer()'
%%
%% @see float/0
%% @see pos_integer/0
%% @see integer/0
%% @see non_neg_integer/0
-spec neg_integer() -> decoder(neg_integer()).
neg_integer() ->
fun (Json) when is_integer(Json), Json < 0 ->
{ok, Json};
(Json) ->
unexpected_type_error(neg_integer, Json)
end.
%% @doc Decodes a positive JSON integer as a `non_neg_integer()'
%%
%% @see float/0
%% @see pos_integer/0
%% @see neg_integer/0
%% @see integer/0
-spec non_neg_integer() -> decoder(non_neg_integer()).
non_neg_integer() ->
fun (Json) when is_integer(Json), Json >= 0 ->
{ok, Json};
(Json) ->
unexpected_type_error(non_neg_integer, Json)
end.
%% @doc Decodes a JSON number as a `float()'
%%
%% Note that JSON does not have a separate floating point type. As such,
%% integers in JSON will be cast to floats by this function.
%%
%% ```
%% {ok, 123.0} = dj:decode(<<"123">>, dj:float()).
%% '''
%%
%% @see integer/0
%% @see pos_integer/0
%% @see neg_integer/0
%% @see non_neg_integer/0
-spec float() -> decoder(float()).
float() ->
fun (Json) when is_float(Json) -> {ok, Json};
(Json) when is_integer(Json) -> {ok, float(Json)};
(Json) -> unexpected_type_error(float, Json)
end.
%% @equiv null(null)
-spec null() -> decoder(null).
null() ->
null(null).
%% @doc Decodes `null' into an arbitrary value.
%%
%% This can be used to convert `null' to a specific value, like `undefined' or a
%% default value that makes sense for your application.
%%
%% ```
%% {ok, foo} = dj:decode(<<"null">>, dj:null(foo)).
%% '''
%%
%% @see null/0
-spec null(V) -> decoder(V).
null(V) ->
fun (null) -> {ok, V};
(Json) -> unexpected_type_error(null, Json)
end.
%% @doc Decodes a JSON `true' or `false' to a `boolean()'
%%
%% ```
%% {ok, true} = dj:decode(<<"true">>, dj:boolean()).
%% {ok, false} = dj:decode(<<"false">>, dj:boolean()).
%% {error, _} = dj:decode(<<"null">>, dj:boolean()).
%% '''
-spec boolean() -> decoder(boolean()).
boolean() ->
fun (Json) when is_boolean(Json) -> {ok, Json};
(Json) -> unexpected_type_error(boolean, Json)
end.
%% @doc Extracts a raw `jsx:json_term()'
-spec value() -> decoder(jsx:json_term()).
value() ->
fun (X) -> {ok, X} end.
%% @doc Decodes a JSON value to an `atom()'
%%
%% If the JSON value is `true', `false' or `null', this returns an erlang atom
%% `true', `false' or `null'. If the JSON value is a string,
%% `binary_to_atom(Json, utf8)' is used to turn it into an atom.
%%
%% <strong>NOTE</strong>: Be careful with this function, as it may be used to
%% force erlang to create many, many new atoms, to the point of running out of
%% memory.
%%
%% @see existing_atom/0
%% @see atom/1
-spec atom() -> decoder(atom()).
atom() ->
chain( value()
, fun (Json) when is_atom(Json) -> succeed(Json);
(Json) when is_binary(Json) -> succeed(binary_to_atom(Json, utf8));
(Json) -> fail({not_an_atom, Json})
end
).
%% @doc Decodes a JSON value to one of a set of predefined atoms
%%
%% This is a safer alternative to `atom()', as it not only allows whitelisting
%% allowed values, but can also prevent creating new atoms.
-spec atom([atom(), ...]) -> decoder(atom()).
atom(Allowed) ->
one_of(lists:map(fun (A) -> exactly(A, existing_atom()) end, Allowed)).
%% @doc Decodes a JSON value to an existing atom
%%
%% Note that Erlang, in some cases, may optimize atoms away. For example, if an
%% atom is only every used in an `atom_to_binary(some_atom)' call, the
%% `some_atom' atom may not "exist".
%%
%% @see atom/1
-spec existing_atom() -> decoder(atom()).
existing_atom() ->
AtomizeOrFail =
fun(Json) ->
try succeed(binary_to_existing_atom(Json, utf8))
catch error:badarg ->
fail({not_an_atom, Json})
end
end,
chain ( value()
, fun (Json) when is_atom(Json) -> succeed(Json);
(Json) when is_binary(Json) -> AtomizeOrFail(Json);
(Json) -> fail({not_an_atom, Json})
end
).
%% @equiv atom(Allowed)
-spec existing_atom([atom(), ...]) -> decoder(atom()).
existing_atom(Allowed) ->
atom(Allowed).
%% @doc Decodes a JSON string as a `binary()' if and only if it looks like an
%% email address.
%%
%% ```
%% {ok, <<"<EMAIL>">>} =
%% dj:decode(<<"\"<EMAIL>\"">>, dj:email()).
%% '''
%%
%% If the specified JSON is not a string, this will fail with an
%% `unexpected_type' error (expecing a `binary'). If the specified JSON is a
%% string but does not look like an email address, this will fail with a custom
%% `not_an_email' error.
%%
%% ```
%% E = {dj_errors, [{custom, {not_an_email, <<"foo@bar">>}}]},
%% {error, E} = dj:decode(<<"\"foo@bar\"">>, dj:email()).
%% '''
-spec email() -> decoder(binary()).
email() ->
chain( map(fun string:lowercase/1, binary())
, fun (V) ->
case re:run( V
, <<"^[^@\s]+@([^.@\s]{2,}\.){1,}[a-z]{2,}$">>
, [{capture, none}]
) of
match -> succeed(V);
_ -> fail({not_an_email, V})
end
end
).
-spec full_date_tuple(rfc3339) -> decoder(calendar:date()).
full_date_tuple(rfc3339) ->
RE = <<"^([0-9]+)-(0[1-9]|1[012])-(0[1-9]|[12][0-9]|3[01])$">>,
ToDateTuple =
fun (B) ->
case re:run(B, RE, [{capture, all_but_first, binary}]) of
{match, [Y, M, D]} ->
succeed({ erlang:binary_to_integer(Y)
, erlang:binary_to_integer(M)
, erlang:binary_to_integer(D)
});
_ ->
fail({malformed_date, B})
end
end,
ValidateDateTuple =
fun (Date = {Y, M, D}) ->
case calendar:valid_date(Y, M, D) of
true -> succeed(Date);
false -> fail({invalid_date, Date})
end
end,
chain( binary()
, [ ToDateTuple
, ValidateDateTuple
]
).
%% @doc Decode a UUIDv4 from a JSON string
-spec uuid(v4) -> decoder(binary()).
uuid(v4) ->
Re = <<"^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$">>,
UuidFromBinary =
fun (B) ->
case re:run(B, Re) of
{match, _} -> succeed(B);
_ -> fail({invalid_uuid, v4, B})
end
end,
chain(binary(), UuidFromBinary).
%% @doc Decode a bounded integer from JSON
%%
%% Occasionally, you may want to decode a JSON integer only when it sits between
%% certain bounds. Both the upper and lower bound are inclusive.
%%
%% ```
%% -spec score() -> dj:decoder(1..10)
%% score() ->
%% dj:integer(1, 10).
%%
%% {ok, 5} = dj:decode(<<"5">>, score()).
%%
%% E = {dj_errors, [{custom, {integer_out_of_bounds, 1, 10, 0}}]},
%% {error, E} = dj:decode(<<"0", score()).
%% '''
%%
%% @see integer/0
-spec integer(Min :: integer(), Max :: integer()) -> decoder(integer()).
integer(Min, Max) when Max < Min ->
integer(Max, Min);
integer(Min, Max) ->
CheckBounds =
fun (Int) when Int >= Min andalso Int =< Max -> succeed(Int);
(Int) -> fail({integer_out_of_bounds, Min, Max, Int})
end,
chain(integer(), CheckBounds).
%% @doc Decode a nullable value
%%
%% Sometimes, we explicitly want to allow `null'. In such a case, making that
%% clear by wrapping a decoder with `nullable/1' can help readability.
%%
%% ```
%% -spec score() -> dj:decoder(1..10)
%% score() ->
%% dj:integer(1, 10).
%%
%% {ok, 5} = dj:decode(<<"5">>, nullable(score())).
%% {ok, null} = dj:decode(<<"null">>, nullable(score())).
%%
%% E = {dj_error, [ {unexpected_type, integer, true}
%% , {unexpected_type, null, true}
%% ]},
%% {error, E} = dj:decode(<<"true", nullable(score())).
%% '''
%%
%% @equiv nullable(Decoder, null)
-spec nullable(decoder(T)) -> decoder(T | null).
nullable(Decoder) ->
nullable(Decoder, null).
%% @doc Decode a nullable value
%%
%% Sometimes, we explicitly want to allow `null' but use a default value. In
%% such a case, making that clear by wrapping a decoder with `nullable/2' can
%% help readability.
%%
%% ```
%% -spec score() -> dj:decoder(1..10)
%% score() ->
%% dj:integer(1, 10).
%%
%% {ok, 4} = dj:decode(<<"4">>, nullable(score(), 5)).
%% {ok, 5} = dj:decode(<<"null">>, nullable(score(), 5)).
%%
%% E = {dj_error, [ {unexpected_type, integer, true}
%% , {unexpected_type, null, true}
%% ]},
%% {error, E} = dj:decode(<<"true", nullable(score(), 5)).
%% '''
%%
%% @equiv nullable(Decoder, null)
-spec nullable(decoder(T), V) -> decoder(T | V).
nullable(Decoder, Default) ->
one_of([Decoder, null(Default)]).
%% @doc Instruct a decoder to match a value in a given field
%%
%% ```
%% Dec = dj:field(foo, dj:binary()),
%% {ok, <<"bar">>} = dj:decode(<<"{\"foo\": \"bar\"}">>, Dec),
%%
%% Error = {unexpected_type, binary, null},
%% InField = {in_field, foo, Error},
%% {error, {dj_errors, [InField]}} = dj:decode(<<"{\"foo\": null}">>, Dec),
%%
%% Missing = {missing_field, foo, #{}},
%% {error, {dj_erros, [Missing]}} = dj:decode(<<"{}">>, Dec).
%% '''
%%
%% @see at/2
%% @see prop/2
%% @see to_map/1
%% @see prop_list/1
-spec field(field(), decoder(T)) -> decoder(T).
field(Field, Decoder) ->
fun (#{Field := Value}) -> in_field(Field, Decoder(Value));
(M) when is_map(M) -> missing_field_error(Field, M);
(Json) -> unexpected_type_error(map, Json)
end.
%% @doc Decodes a field or uses a default value when the field is missing
%%
%% Note that if the field is present but malformed according to the decoder,
%% decoding will fail. If we're not working in the context of a map/object,
%% decoding also fails.
%%
%% ```
%% Dec = dj:optional_field(foo, dj:binary(), <<"default">>),
%%
%% {ok, <<"bar">>} = dj:decode(<<"{\"foo\": \"bar\"}">>, Dec),
%% {ok, <<"default">>} = dj:decode(<<"{}">>, Dec),
%%
%% Error = {unexpected_type, binary, null},
%% InField = {in_field, foo, [Error]},
%% {error, {dj_error, [InField]}} = dj:decode(<<"{\"foo\": null}">>, Dec),
%%
%% NotMap = {unexpected_type, map, <<"foo">>},
%% {error, {dj_error, [NotMap]}} = dj:decode(<<"\"foo\"">>, Dec).
%% '''
-spec optional_field(field(), decoder(T), V) -> decoder(T | V).
optional_field(Field, Decoder, Default) ->
fun (#{Field := Value}) -> in_field(Field, Decoder(Value));
(M) when is_map(M) -> {ok, Default};
(Json) -> unexpected_type_error(map, Json)
end.
%% @doc Instruct a decode to match a value in a nested path
%%
%% ```
%% {ok, null} = dj:decode(<<"null">>, dj:at([], dj:null())).
%%
%% Dec = dj:at([foo, bar], dj:decode(dj:integer())),
%% Json = <<"{\"foo\": {\"bar\": 123}}">>,
%% {ok, 123} = dj:decode(Json, Dec).
%% '''
%%
%% @see field/2
%% @see prop/2
%% @see to_map/1
%% @see prop_list/1
-spec at([field()], decoder(T)) -> decoder(T).
at(Path, Decoder) ->
lists:foldr(fun field/2, Decoder, Path).
%% @doc Decode a field as a property
%%
%% Similar to {@link field/2} but adds the fieldname to the decoded entry.
%% Convenient for decoding into a proplist.
%%
%% @see field/2
%% @see prop_list/1
-spec prop(field(), decoder(T)) -> decoder({field(), T}).
prop(Field, Decoder) ->
map(fun (V) -> {Field, V} end, field(Field, Decoder)).
%% @doc Decode a proplist by matching fields on a JSON object
-spec prop_list([{field(), decoder(T)}]) -> decoder([{field(), T}]).
prop_list(Spec) ->
sequence(lists:map(fun ({F, D}) -> prop(F, D) end, Spec)).
%% @doc Decode arbitrary JSON into a `map()'
%%
%% ```
%% Dec = dj:to_map(#{ x => dj:index(0, dj:integer())
%% , y => dj:index(1, dj:integer())
%% , z => dj:index(2, dj:integer())
%% }),
%% Json = <<"[1, 6, 2]">>,
%% {ok, #{x := 1, y := 6, z := 2}} = dj:decode(Json, Dec).
%% '''
%%
%% @see field/2
-spec to_map(MapSpec) -> decoder(MapResult) when
MapSpec :: #{Key := decoder(T)} | [{Key, decoder(T)}],
MapResult :: #{Key := T}.
to_map(Spec) when is_list(Spec) ->
Decoders = lists:map(fun({K, Decoder}) ->
map(fun (V) -> {K, V} end, Decoder)
end, Spec),
map(fun maps:from_list/1, sequence(Decoders));
to_map(Spec) when is_map(Spec) ->
to_map(maps:to_list(Spec)).
%% @doc Decode a JSON list using a decoder
-spec list(decoder(T)) -> decoder([T]).
list(Decoder) ->
fun (Items) when is_list(Items) -> decode_all(Decoder, Items, 0, {ok, []});
(Json) -> unexpected_type_error(list, Json)
end.
%% @doc Decode a nonempty JSON list into a nonempty list `[T, ..]'
-spec nonempty_list(T) -> decoder([T, ...]).
nonempty_list(Decoder) ->
fun (Json = [_X|_Xs]) -> decode_all(Decoder, Json, 0, {ok, []});
(Json) -> unexpected_type_error(nonempty_list, Json)
end.
%% @doc Decode a single index in a JSON list using the specified decoder
-spec index(non_neg_integer(), decoder(T)) -> decoder(T).
index(Index, Decoder) ->
fun (Items) when is_list(Items) ->
case decode_nth(Index, Decoder, Items) of
missing -> {error, [{missing_index, Index, Items}]};
{error, E} -> {error, [{at_index, Index, E}]};
{ok, V} -> {ok, V}
end;
(Json) -> unexpected_type_error(list, Json)
end.
%% @doc Sequence a bunch of decoders, succeeding with the collected values
-spec sequence([decoder(T)]) -> decoder([T]).
sequence(Decoders) ->
fun (Json) ->
lists:foldr(sequence_helper(Json), {ok, []}, Decoders)
end.
%% @doc Decode a JSON list into an erlang `sets:set(T)'
-spec set(decoder(T)) -> decoder(sets:set((T))).
set(Decoder) ->
map(fun (Xs) -> sets:from_list(Xs) end, list(Decoder)).
%% @doc Manipulate the values produced by a decoder with a function
%%
%% ```
%% Dec = dj:map(fun string:uppercase/1, dj:binary()),
%% Json = <<"\"hello world\"">>,
%% {ok, <<"HELLO WORLD">>} = dj:decode(Json, Dec).
%% '''
%%
%% @see mapn/2
%% @see chain/2
-spec map(fun((A) -> B), decoder(A)) -> decoder(B).
map(F, Decoder) ->
fun (Json) ->
case Decoder(Json) of
{ok, V} -> {ok, F(V)};
Error -> Error
end
end.
%% @doc Chain one or more functions that create decoders onto a decoder
%%
%% This has many uses. One possible use is to handle data that may represent
%% different things:
%%
%% ```
%% -type shape() :: {square, pos_integer()}
%% | {oblong, pos_integer(), pos_integer()}.
%%
%% -spec square() -> dj:decoder(shape()).
%% square() ->
%% dj:map(fun (S) -> {square, S} end, dj:field(side, dj:pos_integer())).
%%
%% -spec oblong() -> dj:decoder(shape()).
%% oblong() ->
%% dj:map( fun(L, W) -> {oblong, L, W} end
%% , [ dj:field(length, dj:pos_integer())
%% , dj:field(width, dj:pos_integer())
%% ]
%% ).
%%
%% -spec shape(square | oblong) -> dj:decoder(shape()).
%% shape(square) -> square();
%% shape(oblong) -> oblong().
%%
%% -spec shape() -> dj:decoder(shape()).
%% shape() ->
%% dj:chain(dj:field(type, dj:atom([square, oblong])), fun shape/1).
%%
%% {ok, {square, 12}} =
%% dj:decode(<<"{\"type\": \"square\", \"side\": 12}">>, shape()).
%% '''
%%
%% Occasionally, you may want to chain an operation that doesn't result in a
%% different decoder, but rather results in either failure or success. In that
%% case, use {@link succeed/1} or {@link fail/1}.
%%
%% When more than one function needs to be chained (for example, a function to
%% pattern match, and another function to validate the structure), the second
%% argument may be a list of functions.
%%
%% @see map/2
%% @see succeed/1
%% @see fail/1
-spec chain(decoder(A), ToDecoderB) -> decoder(B) when
ToDecoderB :: ToDecB | [ToDecB],
ToDecB :: fun((A) -> decoder(B)).
chain(DecoderA, Funs) when is_list(Funs) ->
lists:foldl( fun (ToDec, Dec) -> chain(Dec, ToDec) end
, DecoderA
, Funs
);
chain(DecoderA, ToDecoderB) ->
fun (Json) ->
case DecoderA(Json) of
{ok, V} -> (ToDecoderB(V))(Json);
{error, E} -> {error, E}
end
end.
%% @doc Create a decoder that always fails with the provided term
%%
%% ```
%% Dec = dj:fail(no_more_bananas),
%% {error, {dj_errors, [{custom, no_more_bananas}]}}
%% = dj:decode(<<"true">>, Dec).
%% '''
%%
%% Mostly useful when combined with `chain'.
%%
%% @see chain/2
%% @see succeed/1
-spec fail(E :: term()) -> decoder(V :: term()).
fail(E) ->
fun (_) ->
{error, [{custom, E}]}
end.
%% @doc Create a decoder that always succeeds with the provided term
%%
%% ```
%% Dec = dj:one_of([ dj:field(online, dj:boolean())
%% , dj:succeed(false)
%% ]),
%% Json = << "[ {\"online\": true}"
%% , ", {\"online\": false}"
%% , ", {} ]"
%% >>,
%% {ok, [true, false, false]} = dj:decode(Json, dj:list(Dec)).
%% '''
%%
%% This function is also useful for hardcoding values in {@link to_map/1},
%% handling failure and success in {@link chain/2} and - as demonstrated -
%% defaulting values using {@link one_of/1}.
%%
%% @see chain/2
%% @see to_map/1
%% @see fail/1
%% @see one_of/1
-spec succeed(T) -> decoder(T).
succeed(V) ->
fun (_) ->
{ok, V}
end.
%% @doc Apply an n-ary function against a list of n decoders
%%
%% ```
%% Dec = dj:mapn( fun (X, Y, Z) -> {X, Y, Z} end
%% , [ dj:field(major, dj:pos_integer())
%% , dj:field(minor, dj:non_neg_integer())
%% , dj:field(patch, dj:non_neg_integer())
%% ]
%% ),
%% Json = << "{ \"major\": 123"
%% , ", \"minor\": 66"
%% , ", \"patch\": 0"
%% , "}">>,
%% {ok, {123, 66, 0}} = dj:decode(Json, Dec).
%% '''
%%
%% When the arity doesn't match, a custom error is returned with the expected
%% arity (based on the number of decoders passed) and the actual arity of the
%% passed function.
%%
%% ```
%% Dec = dj:mapn( fun (X, Y) -> {X, Y, 0} end
%% , [ dj:field(major, dj:pos_integer())
%% , dj:field(minor, dj:non_neg_integer())
%% , dj:field(patch, dj:non_neg_integer())
%% ]
%% ),
%% Json = << "{ \"major\": 123"
%% , ", \"minor\": 66"
%% , ", \"patch\": 0"
%% , "}">>,
%% {error, {dj_errors, [{custom, {arity_mismatch, 3, 2}}]}}
%% = dj:decode(Json, Dec).
%% '''
%%
%% @see map/2
%% @see chain/2
-spec mapn(Fun, [decoder(T)]) -> decoder(V) when
Fun :: function(),
T :: term(),
V :: term().
mapn(Fun, Decoders) when is_function(Fun, length(Decoders))->
map(fun (Vs) -> erlang:apply(Fun, Vs) end, sequence(Decoders));
mapn(Fun, Decoders) ->
{arity, Actual} = erlang:fun_info(Fun, arity),
fail({arity_mismatch, length(Decoders), Actual}).
%% @doc Decoding succeeds if the decoder produces exactly the supplied value.
%%
%% This can, for example, be used when a certain field is used to switch between
%% different decoders.
-spec exactly(V, decoder(V)) -> decoder(V).
exactly(V, Decoder) ->
fun (Json) ->
case Decoder(Json) of
{ok, V} -> {ok, V};
_ -> {error, [{not_exactly, V, Json}]}
end
end.
%% @doc Try a bunch of decoders. The first one to succeed will be used.
%%
%% If all decoders fails, the errors are accumulated.
%%
%% ```
%% Dec = dj:one_of([dj:binary(), dj:integer()]),
%% {ok, <<"foo">>} = dj:decode(<<"\"foo\"">>, Dec),
%% {ok, 123} = dj:decode(<<"123">>, Dec),
%% {error, _} = dj:decode(<<"null">>, Dec).
%% '''
-spec one_of([decoder(V), ...]) -> decoder(V).
one_of(Decoders) ->
fun (Json) ->
try_decoders(Decoders, Json, [])
end.
%% @equiv dj:decode(Json, Decoder, [{labels, attempt_atom}])
-spec decode(Json, decoder(T)) -> result(T, {dj_error, errors()}) when
Json :: jsx:json_text().
decode(Json, Decoder) ->
decode(Json, Decoder, [{labels, attempt_atom}]).
%% @doc Run a {@type decoder(T)} against arbirary JSON.
%%
%% The resulting {@type result(T, error())} is either a tuple `{ok, T}' or a
%% tuple `{error, error()}' where {@type error()} represents whatever went wrong
%% during the decoding/validation/transformation process.
%%
%% `Opts` are passed on to `jsx:decode/2'. The option `return_maps' is always
%% added by {@module} and does not need to be specified manually.
%%
%% Use of the functions that create {@type decoder(T)}s and functions that help
%% with composition are discussed individually.
-spec decode(Json, decoder(T), Opts) -> result(T, {dj_error, errors()}) when
Json :: jsx:json_text(),
Opts :: [Opt],
Opt :: jsx:option()
| labels
| {labels, LabelOpt},
LabelOpt :: atom
| attempt_atom
| binary
| existing_atom.
decode(Json, Decoder, Opts) ->
case attempt_jsx_decode(Json, Opts) of
{ok, Data} ->
case Decoder(Data) of
{ok, _} = R ->
R;
{error, E} ->
{error, {dj_error, E}}
end;
{error, E} -> {error, {dj_error, E}}
end.
%%%-----------------------------------------------------------------------------
%%% Helpers
%%%-----------------------------------------------------------------------------
-spec attempt_jsx_decode(Json, Opts) -> result(Data, Invalid) when
Json :: binary(),
Invalid :: [{invalid_json, Json}],
Data :: jsx:json_term(),
Opts :: [term()].
attempt_jsx_decode(Json, Opts) ->
try
V = jsx:decode(Json, [return_maps | Opts]),
{ok, V}
catch
error:_ -> {error, [{invalid_json, Json}]}
end.
-spec try_decoders([decoder(T)], Json, [error()])
-> result(T, errors()) when Json :: jsx:json_term().
try_decoders([], _Json, Es) ->
{error, Es};
try_decoders([Decoder | Decoders], Json, Es) ->
case Decoder(Json) of
{ok, V} -> {ok, V};
{error, E} -> try_decoders(Decoders, Json, Es ++ E)
end.
-spec decode_nth(non_neg_integer(), decoder(T), [V])
-> missing | result(T, error()) when V :: term().
decode_nth(0, Decoder, [X | _Xs]) ->
Decoder(X);
decode_nth(_, _Decoder, []) ->
missing;
decode_nth(N, Decoder, [_X | Xs]) ->
decode_nth(N - 1, Decoder, Xs).
-spec decode_all(decoder(T), [V], non_neg_integer(), ResM) -> Res when
ResM :: result([T], [error()]),
Res :: result([T], error()),
V :: jsx:json_term().
decode_all(_Decoder, [], _Idx, {ok, Vs}) ->
{ok, lists:reverse(Vs)};
decode_all(_Decoder, [], _Idx, {error, Es}) ->
{error, lists:reverse(Es)};
decode_all(Decoder, [X | Xs], Idx, {ok, Vs}) ->
Res = case Decoder(X) of
{ok, V} -> {ok, [V | Vs]};
{error, E} -> {error, [{at_index, Idx, E}]}
end,
decode_all(Decoder, Xs, Idx + 1, Res);
decode_all(Decoder, [X | Xs], Idx, {error, Es}) ->
Res = case Decoder(X) of
{ok, _} -> {error, Es};
{error, E} -> {error, [{at_index, Idx, E} | Es]}
end,
decode_all(Decoder, Xs, Idx + 1, Res).
-spec in_field(field(), result(T, E)) -> result(T, E) when
T :: term(),
E :: errors().
in_field(_, Res = {ok, _}) ->
Res;
in_field(Field, {error, E}) ->
{error, [{in_field, Field, E}]}.
-spec unexpected_type_error(type(), jsx:json_term()) -> {error, errors()}.
unexpected_type_error(T, Json) ->
make_error({unexpected_type, T, Json}).
-spec missing_field_error(field(), jsx:json_term()) -> {error, errors()}.
missing_field_error(F, Json) ->
make_error({missing_field, F, Json}).
-spec make_error(error()) -> {error, errors()}.
make_error(E) ->
{error, [E]}.
-spec sequence_helper(Json) -> fun((decoder(T), ResXs) -> ResXs) when
Json :: jsx:json_text(),
T :: term(),
ResXs :: {ok, [V :: term()]} | {error, errors()}.
sequence_helper(Json) ->
fun (Decoder, Result) ->
combine_results(Decoder(Json), Result)
end.
-spec combine_results(ResX, ResXs) -> ResXs when
ResX :: {ok, V} | {error, errors()},
ResXs :: {ok, [V]} | {error, errors()},
V :: term().
combine_results({error, E}, {error, Es}) ->
{error, E ++ Es};
combine_results({error, E}, {ok, _}) ->
{error, E};
combine_results({ok, V}, {ok, Vs}) ->
{ok, [V | Vs]};
combine_results({ok, _}, {error, Es}) ->
{error, Es}.
%% Local variables:
%% mode: erlang
%% erlang-indent-level: 2
%% indent-tabs-mode: nil
%% fill-column: 80
%% coding: latin-1
%% End: | src/dj.erl | 0.900333 | 0.461077 | dj.erl | starcoder |
%% Copyright (c) 2016-2020 <NAME>
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%% File : prop_lfe_docs.erl
%% Author : <NAME>, <NAME>
%% Purpose : PropEr tests for the lfe_docs module.
%% This module is a modified version of the older test module for
%% lfe_doc written by <NAME>.
-module(prop_lfe_docs).
-export([prop_define_lambda/0,prop_define_match/0]).
-include_lib("proper/include/proper.hrl").
-include("lfe_docs.hrl").
%%%===================================================================
%%% Properties
%%%===================================================================
%% These only test the formats of the saved data.
prop_define_lambda() -> ?FORALL(Def, define_lambda(), validate(Def)).
prop_define_match() -> ?FORALL(Def, define_match(), validate(Def)).
validate({['define-function',Name,_Doc,Def],_}=Func) ->
validate_function(Name, function_arity(Def), Func);
validate({['define-macro',Name,_Doc,_Def],_}=Mac) ->
validate_macro(Name, Mac).
function_arity([lambda,Args|_]) -> length(Args);
function_arity(['match-lambda',[Pat|_]|_]) -> length(Pat).
validate_function(Name, Arity, {[_Define,_Name,_Meta,_Def],Line}=Func) ->
Info = [export_all_funcs(),Func], %Add function export
case lfe_docs:make_docs_info(Info, []) of
{ok,#docs_v1{docs=[Fdoc]}} ->
{{function,N,A},Anno,_,_,_} = Fdoc,
(Line =:= Anno) and (Name =:= N) and (Arity =:= A);
_ -> false
end.
validate_macro(Name, {[_Define,_Name,_Meta,_Lambda],Line}=Mac) ->
Info = [export_macro(Name),Mac], %Add macro export
case lfe_docs:make_docs_info(Info, []) of
{ok,#docs_v1{docs=[Mdoc]}} ->
{{macro,N,_},Anno,_,_,_} = Mdoc,
(Line =:= Anno) and (Name =:= N);
_ -> false
end.
export_all_funcs() -> {['extend-module',[],[[export,all]]],1}.
export_macro(Mac) -> {['extend-module',[],[['export-macro',Mac]]],1}.
%%%===================================================================
%%% Definition shapes
%%%===================================================================
define_lambda() ->
{['define-function',atom(),meta_with_doc(),lambda()],line()}.
define_match() ->
?LET(D, define(), {[D,atom(),meta_with_doc(),'match-lambda'(D)],line()}).
%%%===================================================================
%%% Custom types
%%%===================================================================
%%% Definitions
define() -> oneof(['define-function','define-macro']).
lambda() -> [lambda,arglist_simple()|body()].
'match-lambda'('define-function') ->
['match-lambda'|non_empty(list(function_pattern_clause()))];
'match-lambda'('define-macro') ->
['match-lambda'|non_empty(list(macro_pattern_clause()))].
arglist_simple() -> list(atom()).
body() -> non_empty(list(form())).
form() -> union([form_elem(),[atom()|list(form_elem())]]).
form_elem() -> union([non_string_term(),printable_string(),atom()]).
meta_with_doc() -> [[doc,docstring()]].
docstring() -> printable_string().
line() -> pos_integer().
%%% Patterns
pattern() -> union([non_string_term(),printable_string(),pattern_form()]).
pattern_form() ->
[oneof(['=','++*',[],
backquote,quote,
binary,cons,list,map,tuple,
match_fun()])
| body()].
match_fun() -> 'match-record'.
macro_pattern_clause() -> pattern_clause(rand_arity(), true).
function_pattern_clause() -> pattern_clause(rand_arity(), false).
pattern_clause(Arity, Macro) ->
[arglist_patterns(Arity, Macro)|[oneof([guard(),form()])|body()]].
arglist_patterns(Arity, false) -> vector(Arity, pattern());
arglist_patterns(Arity, true) -> [vector(Arity, pattern()),'$ENV'].
guard() -> ['when'|non_empty(list(union([logical_clause(),comparison()])))].
%%% Logical clauses
logical_clause() ->
X = union([atom(),comparison()]),
[logical_operator(),X|non_empty(list(X))].
logical_operator() -> oneof(['and','andalso','or','orelse']).
%%% Comparisons
comparison() -> [comparison_operator(),atom()|list(atom())].
comparison_operator() -> oneof(['==','=:=','=/=','<','>','=<','>=']).
%%% Strings and non-strings
non_string_term() ->
union([atom(),number(),[],bitstring(),binary(),boolean(),tuple()]).
printable_char() -> union([integer(32, 126),integer(160, 255)]).
printable_string() -> list(printable_char()).
%%% Rand compat
-ifdef(NEW_RAND).
rand_arity() -> rand:uniform(10).
-else.
rand_arity() -> random:uniform(10).
-endif. | test/prop_lfe_docs.erl | 0.506836 | 0.408454 | prop_lfe_docs.erl | starcoder |
%%% @doc Contains util functions.
%%%
%%% Copyright 2017 Erlang Solutions Ltd.
%%%
%%% Licensed under the Apache License, Version 2.0 (the "License");
%%% you may not use this file except in compliance with the License.
%%% You may obtain a copy of the License at
%%%
%%% http://www.apache.org/licenses/LICENSE-2.0
%%%
%%% Unless required by applicable law or agreed to in writing, software
%%% distributed under the License is distributed on an "AS IS" BASIS,
%%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%%% See the License for the specific language governing permissions and
%%% limitations under the License.
%%% @end
%%% @copyright Inaka <<EMAIL>>
%%%
-module(apns_utils).
-author("<NAME> <<EMAIL>>").
% API
-export([ sign/2
, epoch/0
, bin_to_hexstr/1
, seconds_to_timestamp/1
]).
%%%===================================================================
%%% API
%%%===================================================================
%% Signs the given binary.
-spec sign(binary(), string()) -> binary().
sign(Data, KeyPath) ->
Command = "printf '" ++
binary_to_list(Data) ++
"' | openssl dgst -binary -sha256 -sign " ++ KeyPath ++ " | base64",
{0, Result} = apns_os:cmd(Command),
strip_b64(list_to_binary(Result)).
%% Retrieves the epoch date.
-spec epoch() -> integer().
epoch() ->
{M, S, _} = os:timestamp(),
M * 1000000 + S.
%% Converts binary to hexadecimal string().
-spec bin_to_hexstr(binary()) -> string().
bin_to_hexstr(Binary) ->
L = size(Binary),
Bits = L * 8,
<<X:Bits/big-unsigned-integer>> = Binary,
F = lists:flatten(io_lib:format("~~~B.16.0B", [L * 2])),
lists:flatten(io_lib:format(F, [X])).
%% Converts from seconds to datetime.
-spec seconds_to_timestamp(pos_integer()) -> calendar:datetime().
seconds_to_timestamp(Secs) ->
Epoch = 62167219200,
calendar:gregorian_seconds_to_datetime(Secs + Epoch).
%% Remove newline and equality characters
-spec strip_b64(binary()) -> binary().
strip_b64(BS) ->
binary:list_to_bin(binary:split(BS, [<<"\n">>, <<"=">>], [global])). | src/apns_utils.erl | 0.568056 | 0.419559 | apns_utils.erl | starcoder |
%%%-------------------------------------------------------------------
%%% @author yangcancai
%%% Copyright (c) 2021 by yangcancai(<EMAIL>), All Rights Reserved.
%%%
%%% Licensed under the Apache License, Version 2.0 (the "License");
%%% you may not use this file except in compliance with the License.
%%% You may obtain a copy of the License at
%%%
%%% https://www.apache.org/licenses/LICENSE-2.0
%%%
%%% Unless required by applicable law or agreed to in writing, software
%%% distributed under the License is distributed on an "AS IS" BASIS,
%%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%%% See the License for the specific language governing permissions and
%%% limitations under the License.
%%%
%%% @doc
%%%
%%% @end
%%% Created : 2021-12-30T09:53:34+00:00
%%%-------------------------------------------------------------------
-module(cool_tools_ip).
-author("yangcancai").
-export([ip_to_int/1, int_to_ip/1]).
-export([ip_between/3]).
%% @doc Converts a binary string with a human readable ip
%% address representation into an uint32.
-spec ip_to_int(binary()) -> pos_integer().
ip_to_int(Ip) ->
[O1Bin, O2Bin, O3Bin, O4Bin] = binary:split(Ip, <<".">>, [global]),
B1 = binary_to_integer(O1Bin) bsl 24,
B2 = binary_to_integer(O2Bin) bsl 16,
B3 = binary_to_integer(O3Bin) bsl 8,
B4 = binary_to_integer(O4Bin),
B1 + B2 + B3 + B4.
%% @doc Converts the given uint32 into a binary string with the
%% human-readable ip address representation, i.e: <<"x.x.x.x">>.
-spec int_to_ip(pos_integer()) -> binary().
int_to_ip(Num) ->
B1 = (Num band 2#11111111000000000000000000000000) bsr 24,
B2 = (Num band 2#00000000111111110000000000000000) bsr 16,
B3 = (Num band 2#00000000000000001111111100000000) bsr 8,
B4 = Num band 2#00000000000000000000000011111111,
<<
(integer_to_binary(B1))/binary, ".",
(integer_to_binary(B2))/binary, ".",
(integer_to_binary(B3))/binary, ".",
(integer_to_binary(B4))/binary
>>.
%% @doc Checks if the given IP address falls into the given network
%% range. E.g: ip_between(<<"192.168.0.1">>, <<"192.168.0.0">>, 16).
-spec ip_between(binary(), binary(), pos_integer()) -> boolean().
ip_between(Ip, Network, NetworkBits) ->
IpNum = ip_to_int(Ip),
NetLow = ip_to_int(Network),
BitsHosts = 32 - NetworkBits,
NetHigh = NetLow + erlang:trunc(math:pow(2, BitsHosts)) - 1,
IpNum >= NetLow andalso IpNum =< NetHigh. | src/cool_tools_ip.erl | 0.628863 | 0.429908 | cool_tools_ip.erl | starcoder |
-module(week1).
-export([perimeter/1, area/1, enclose/1, bitsDirect/1, bitsTail/1, bits/1]).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Shapes
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Define a function perimeter/1 which takes a shape and returns the perimeter of the shape.
% Choose a suitable representation of triangles, and augment area/1 and perimeter/1 to handle this case too.
perimeter({circle, R}) ->
math:pi() * 2.0 * R;
perimeter({rectangle, W, H}) ->
2.0 * (W + H);
perimeter({triangle, A, B, C}) ->
A + B + C.
area({circle, R}) ->
math:pi() * R * R;
area({rectangle, W, H}) ->
W * H;
area({triangle, A, B, C}) ->
heron(A, B, C).
% Heron's formula for the area of a triangle given the length of its sides
heron(A, B, C) ->
S = perimeter({triangle, A, B, C}) / 2.0,
math:sqrt( S * (S-A) * (S-B) * (S-C) ).
% Define a function enclose/1 that takes a shape an returns the smallest enclosing rectangle of the shape.
enclose({circle, R}) ->
{rectangle, 2.0*R, 2.0*R};
enclose({rectangle, W, H}) ->
{rectangle, W, H};
% https://en.wikipedia.org/wiki/Minimum_bounding_box_algorithms#cite_note-1
% "a side of a minimum-area enclosing box must be collinear with a side of the convex polygon"
% So we could brute-force
%
% enclose({triangle, A, B, C}) ->
% RA = {rectangle, A, triangleHeight(A,B,C)},
% RB = {rectangle, B, triangleHeight(B,A,C)},
% RC = {rectangle, C, triangleHeight(C,A,B)},
% minRect(RA,RB,RC).
%
% Given that heron(Base, S1, S2) = area = Base * Height / 2 we can deduce the
% height of the triangle as
% triangleHeight(Base, S1, S2) -> 2.0 * heron(Base, S1, S2) / Base.
% BUT...
% Noting that if we make the rectangle base collinear with a base of the triangle and the rectangle height
% matches the height of the corresponding triangle from that base, we can conclude that the area of the
% enclosing rectangle will *always* be twice the area of the triangle,
% so there is no need to brute-force anything and we can take any side
% as the base of the minimum enclosing rectangle.
enclose({triangle, A, B, C}) ->
{rectangle, A, triangleHeight(A,B,C)}.
triangleHeight(Base, S1, S2) -> 2.0 * heron(Base, S1, S2) / Base.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Summing the bits
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Define a function bits/1 that takes a positive integer N and returns the sum of the bits in the binary representation.
% For example bits(7) is 3 and bits(8) is 1.
% See whether you can make both a direct recursive and a tail recursive definition.
% Which do you think is better? Why?
bitsDirect(0) -> 0;
bitsDirect(1) -> 1;
bitsDirect(N) -> bitsDirect(N div 2) + bitsDirect(N rem 2).
% Naive tail recursive approach:
bitsTail(N) -> bitsTail(N,0).
bitsTail(0, Sum) -> Sum;
bitsTail(1, Sum) -> Sum+1;
bitsTail(N,Sum) ->
LowBit = N rem 2,
OtherBits = N div 2,
bitsTail(OtherBits, Sum+LowBit).
% The direct approach is much more readable from an algorithmic approach (states clearly what it does),
% the tail-recursive approach is more burdoned with implementation details (the Sum accumlator).
% From a performance point of view the tail-recursive approach wins because it does not build up a O(log(N))
% backlog of postponed additions.
% An approach using mutual recursion mixes the best of both worlds.
bits(0) -> 0;
bits(1) -> 1;
bits(N) -> bits(N, 0).
bits(0, Sum) -> Sum;
bits(N, Sum) -> bits(N div 2, Sum + bits(N rem 2)). | week1/week1.erl | 0.615319 | 0.76986 | week1.erl | starcoder |
%%%=============================================================================
%% Copyright 2013 Klarna AB
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% @copyright 2013 Klarna AB
%% @author <NAME> <<EMAIL>>
%%
%% @doc JESSE (JSon Schema Erlang)
%%
%% This is an interface module which provides an access to the main
%% functionality of jesse, such as 1) updating of the schema definitions cache;
%% 2) validation json data against a schema.
%% @end
%%%=============================================================================
-module(jesse).
%% API
-export([ add_schema/2
, add_schema/3
, del_schema/1
, load_schemas/2
, load_schemas/4
, validate/2
, validate/3
, validate_with_schema/2
, validate_with_schema/3
, validate_with_accumulator/2
, validate_with_accumulator/3
, validate_with_accumulator/4
, validate_with_accumulator/5
, explain_errors/1
, explain_errors/2
]).
-export_type([ json_term/0
]).
-type accumulator() :: fun(( jesse_json_path:path()
, jesse_schema_validator:error()
, term() ) -> term()).
-type parser() :: fun((binary()) -> json_term()).
-type encoder() :: fun((json_term()) -> iolist()).
-type json_term() :: term().
-type error() :: {error, jesse_schema_validator:error()}.
%%% API
%% @doc Adds a schema definition `Schema' to in-memory storage associated with
%% a key `Key'. It will overwrite an existing schema with the same key if
%% there is any.
-spec add_schema(Key :: any(), Schema :: json_term()) -> ok | error().
add_schema(Key, Schema) ->
ValidationFun = fun jesse_schema_validator:is_json_object/1,
MakeKeyFun = fun(_) -> Key end,
jesse_database:add(Schema, ValidationFun, MakeKeyFun).
%% @doc Equivalent to `add_schema/2', but `Schema' is a binary string, and
%% the third agument is a parse function to convert the binary string to
%% a supported internal representation of json.
-spec add_schema( Key :: any()
, Schema :: binary()
, ParseFun :: parser()
) -> ok | error().
add_schema(Key, Schema, ParseFun) ->
case try_parse(ParseFun, Schema) of
{parse_error, _} = SError -> {error, {schema_error, SError}};
ParsedSchema -> add_schema(Key, ParsedSchema)
end.
%% @doc Deletes a schema definition from in-memory storage associated with
%% the key `Key'.
-spec del_schema(Key :: any()) -> ok.
del_schema(Key) ->
jesse_database:delete(Key).
%% @doc Loads schema definitions from filesystem to in-memory storage.
%%
%% Equivalent to `load_schemas(Path, ParseFun, ValidationFun, MakeKeyFun)'
%% where `ValidationFun' is `fun jesse_json:is_json_object/1' and
%% `MakeKeyFun' is `fun jesse_schema_validator:get_schema_id/1'. In this case
%% the key will be the value of `id' attribute from the given schemas.
-spec load_schemas( Path :: string()
, ParseFun :: parser()
) -> jesse_database:update_result().
load_schemas(Path, ParseFun) ->
load_schemas( Path
, ParseFun
, fun jesse_schema_validator:is_json_object/1
, fun jesse_schema_validator:get_schema_id/1
).
%% @doc Loads schema definitions from filesystem to in-memory storage.
%% The function loads all the files from directory `Path', then each schema
%% entry will be checked for a validity by function `ValidationFun', and
%% will be stored in in-memory storage with a key returned by `MakeKeyFun'
%% function.
%%
%% In addition to a schema definition, a timestamp of the schema file will be
%% stored, so, during the next update timestamps will be compared to avoid
%% unnecessary updates.
%%
%% Schema definitions are stored in the format which json parsing function
%% `ParseFun' returns.
%%
%% NOTE: it's impossible to automatically update schema definitions added by
%% add_schema/2, the only way to update them is to use add_schema/2
%% again with the new definition.
-spec load_schemas( Path :: string()
, ParseFun :: parser()
, ValidationFun :: fun((any()) -> boolean())
, MakeKeyFun :: fun((json_term()) -> any())
) -> jesse_database:update_result().
load_schemas(Path, ParseFun, ValidationFun, MakeKeyFun) ->
jesse_database:update(Path, ParseFun, ValidationFun, MakeKeyFun).
%% @doc Validates json `Data' against a schema with the same key as `Schema'
%% in the internal storage. If the given json is valid, then it is returned
%% to the caller, otherwise an error with an appropriate error reason
%% is returned.
-spec validate(Schema :: any(), Data :: json_term()) -> {ok, json_term()}
| error().
validate(Schema, Data) ->
try
JsonSchema = jesse_database:read(Schema),
jesse_schema_validator:validate(JsonSchema, Data,
{fun jesse_utils:failfast/3, undefined})
catch
throw:Error ->
{error, Error}
end.
%% @doc Equivalent to `validate/2', but `Data' is a binary string, and
%% the third agument is a parse function to convert the binary string to
%% a supported internal representation of json.
-spec validate( Schema :: any()
, Data :: binary()
, ParseFun :: parser()
) -> {ok, json_term()}
| error().
validate(Schema, Data, ParseFun) ->
case try_parse(ParseFun, Data) of
{parse_error, _} = DError -> {error, {data_error, DError}};
ParsedJson -> validate(Schema, ParsedJson)
end.
%% @doc Validates json `Data' agains the given schema `Schema'. If the given
%% json is valid, the it is returned to the caller, otherwise an error with
%% an appropriate error reason is returned.
-spec validate_with_schema( Schema :: json_term()
, Data :: json_term()
) -> {ok, json_term()}
| error().
validate_with_schema(Schema, Data) ->
try
jesse_schema_validator:validate(Schema, Data,
{fun jesse_utils:failfast/3, undefined})
catch
throw:Error ->
{error, Error}
end.
%% @doc Equivalent to `validate_with_schema/2', but both `Schema' and
%% `Data' are binary strings, and the third arguments is a parse function
%% to convert the binary string to a supported internal representation of json.
-spec validate_with_schema( Schema :: binary()
, Data :: binary()
, ParseFun :: parser()
) -> {ok, json_term()}
| error().
validate_with_schema(Schema, Data, ParseFun) ->
case try_parse(ParseFun, Schema) of
{parse_error, _} = SError ->
{error, {schema_error, SError}};
ParsedSchema ->
case try_parse(ParseFun, Data) of
{parse_error, _} = DError ->
{error, {data_error, DError}};
ParsedData ->
validate_with_schema(ParsedSchema, ParsedData)
end
end.
%% @doc Equivalent to {@link validate_with_accumulator/4} where both
%% <code>Schema</code> and <code>Data</code> are parsed json terms.
-spec validate_with_accumulator( Schema :: json_term(),
Data :: json_term()
) ->
{ok, json_term()} | {error, [error()]}.
validate_with_accumulator(Schema, Data) ->
validate_with_accumulator(Schema, Data, fun jesse_utils:collect/3, []).
%% @doc Equivalent to {@link validate_with_schema/3} but with the additional
%% argument fun to collect errors. This function will return the original
%% JSON in case it is fully correspond to the schema or a list of all
%% collected errors, if they are not critical.
-spec validate_with_accumulator( Schema :: binary(),
Data :: binary(),
ParseFun :: parser()
) ->
{ok, json_term()} | {error, [error()]}.
validate_with_accumulator(Schema, Data, ParseFun) ->
case try_parse(ParseFun, Schema) of
{parse_error, _} = SError ->
{error, {schema_error, SError}};
ParsedSchema ->
case try_parse(ParseFun, Data) of
{parse_error, _} = DError ->
{error, {data_error, DError}};
ParsedData ->
validate_with_accumulator(ParsedSchema, ParsedData)
end
end.
%% @doc Equivalent to {@link validate_with_accumulator/4} where both
%% <code>Schema</code> and <code>Data</code> are parsed json terms.
-spec validate_with_accumulator( Schema :: json_term(),
Data :: json_term(),
Accumulator :: accumulator(),
Initial :: term()
) ->
{ok, json_term()} | {error, term()}.
validate_with_accumulator(Schema, Data, Accumulator, Initial) ->
try
jesse_schema_validator:validate(Schema, Data,
{Accumulator, Initial})
catch
throw:Error ->
{error, Error}
end.
%% @doc Equivalent to {@link validate_with_accumulator/4} where both
%% <code>Schema</code> and <code>Data</code> are parsed json terms.
-spec validate_with_accumulator( Schema :: json_term(),
Data :: json_term(),
ParseFun :: parser(),
Accumulator :: accumulator(),
Initial :: term()
) ->
{ok, json_term()} | {error, term()}.
validate_with_accumulator(Schema, Data, ParseFun, Accumulator, Initial) ->
case try_parse(ParseFun, Schema) of
{parse_error, _} = SError ->
{error, {schema_error, SError}};
ParsedSchema ->
case try_parse(ParseFun, Data) of
{parse_error, _} = DError ->
{error, {data_error, DError}};
ParsedData ->
validate_with_accumulator(ParsedSchema, ParsedData,
Accumulator, Initial)
end
end.
%% @doc Explain list of errors in the internal format and return
%% <code>iolist()</code> in human-readable format.
%% This function accepts the list of errors, collected with the default
%% accumulator.
%% @see validate_with_accumulator/2.
-spec explain_errors( Errors :: [ { json_schema_path:path()
, json_schema_validator:error()} ]
) ->
iolist().
explain_errors(Errors) ->
jesse_utils:explain(Errors).
%% @doc A variant of {@link explain/1} function that takes an additional
%% argument as a <code>fun</code> of encoder
%% This function accepts the list of errors, collected with the default
%% accumulator.
%% @see validate_with_accumulator/2.
-spec explain_errors( Errors :: [ { json_schema_path:path()
, json_schema_validator:error()} ],
Encoder :: encoder()
) ->
iolist().
explain_errors(Errors, Encoder) ->
jesse_utils:explain(Errors, Encoder).
%%% ----------------------------------------------------------------------------
%%% Internal functions
%%% ----------------------------------------------------------------------------
%% @doc Wraps up calls to a third party json parser.
%% @private
try_parse(ParseFun, JsonBin) ->
try
ParseFun(JsonBin)
catch
_:Error ->
{parse_error, Error}
end.
%%% Local Variables:
%%% erlang-indent-level: 2
%%% End: | src/jesse.erl | 0.729616 | 0.423339 | jesse.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% xqerl - XQuery processor
%%
%% Copyright (c) 2017-2020 <NAME> All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%% @doc Wrapper functions for the "http://www.w3.org/2001/XMLSchema" namespace.
-module(xqerl_mod_xs).
-define(NS, <<"http://www.w3.org/2001/XMLSchema">>).
-define(PX, <<"xs">>).
-'module-namespace'({?NS, ?PX}).
-namespaces([{?NS, ?PX}]).
-functions([
{
{qname, ?NS, ?PX, <<"unsignedInt">>},
{seqType, 'xs:unsignedInt', zero_or_one},
[],
{'xs_unsignedInt', 2},
1,
[{seqType, 'xs:anyAtomicType', zero_or_one}]
},
{
{qname, ?NS, ?PX, <<"string">>},
{seqType, 'xs:string', zero_or_one},
[],
{'xs_string', 2},
1,
[{seqType, 'xs:anyAtomicType', zero_or_one}]
},
{
{qname, ?NS, ?PX, <<"boolean">>},
{seqType, 'xs:boolean', zero_or_one},
[],
{'xs_boolean', 2},
1,
[{seqType, 'xs:anyAtomicType', zero_or_one}]
},
{
{qname, ?NS, ?PX, <<"decimal">>},
{seqType, 'xs:decimal', zero_or_one},
[],
{'xs_decimal', 2},
1,
[{seqType, 'xs:anyAtomicType', zero_or_one}]
},
{{qname, ?NS, ?PX, <<"float">>}, {seqType, 'xs:float', zero_or_one}, [], {'xs_float', 2}, 1, [
{seqType, 'xs:anyAtomicType', zero_or_one}
]},
{
{qname, ?NS, ?PX, <<"double">>},
{seqType, 'xs:double', zero_or_one},
[],
{'xs_double', 2},
1,
[{seqType, 'xs:anyAtomicType', zero_or_one}]
},
{
{qname, ?NS, ?PX, <<"duration">>},
{seqType, 'xs:duration', zero_or_one},
[],
{'xs_duration', 2},
1,
[{seqType, 'xs:anyAtomicType', zero_or_one}]
},
{
{qname, ?NS, ?PX, <<"dateTime">>},
{seqType, 'xs:dateTime', zero_or_one},
[],
{'xs_dateTime', 2},
1,
[{seqType, 'xs:anyAtomicType', zero_or_one}]
},
{{qname, ?NS, ?PX, <<"time">>}, {seqType, 'xs:time', zero_or_one}, [], {'xs_time', 2}, 1, [
{seqType, 'xs:anyAtomicType', zero_or_one}
]},
{{qname, ?NS, ?PX, <<"date">>}, {seqType, 'xs:date', zero_or_one}, [], {'xs_date', 2}, 1, [
{seqType, 'xs:anyAtomicType', zero_or_one}
]},
{
{qname, ?NS, ?PX, <<"gYearMonth">>},
{seqType, 'xs:gYearMonth', zero_or_one},
[],
{'xs_gYearMonth', 2},
1,
[{seqType, 'xs:anyAtomicType', zero_or_one}]
},
{{qname, ?NS, ?PX, <<"gYear">>}, {seqType, 'xs:gYear', zero_or_one}, [], {'xs_gYear', 2}, 1, [
{seqType, 'xs:anyAtomicType', zero_or_one}
]},
{
{qname, ?NS, ?PX, <<"gMonthDay">>},
{seqType, 'xs:gMonthDay', zero_or_one},
[],
{'xs_gMonthDay', 2},
1,
[{seqType, 'xs:anyAtomicType', zero_or_one}]
},
{{qname, ?NS, ?PX, <<"gDay">>}, {seqType, 'xs:gDay', zero_or_one}, [], {'xs_gDay', 2}, 1, [
{seqType, 'xs:anyAtomicType', zero_or_one}
]},
{
{qname, ?NS, ?PX, <<"gMonth">>},
{seqType, 'xs:gMonth', zero_or_one},
[],
{'xs_gMonth', 2},
1,
[{seqType, 'xs:anyAtomicType', zero_or_one}]
},
{
{qname, ?NS, ?PX, <<"hexBinary">>},
{seqType, 'xs:hexBinary', zero_or_one},
[],
{'xs_hexBinary', 2},
1,
[{seqType, 'xs:anyAtomicType', zero_or_one}]
},
{
{qname, ?NS, ?PX, <<"base64Binary">>},
{seqType, 'xs:base64Binary', zero_or_one},
[],
{'xs_base64Binary', 2},
1,
[{seqType, 'xs:anyAtomicType', zero_or_one}]
},
{
{qname, ?NS, ?PX, <<"anyURI">>},
{seqType, 'xs:anyURI', zero_or_one},
[],
{'xs_anyURI', 2},
1,
[{seqType, 'xs:anyAtomicType', zero_or_one}]
},
{{qname, ?NS, ?PX, <<"QName">>}, {seqType, 'xs:QName', zero_or_one}, [], {'xs_QName', 2}, 1, [
{seqType, 'xs:anyAtomicType', zero_or_one}
]},
{
{qname, ?NS, ?PX, <<"normalizedString">>},
{seqType, 'xs:normalizedString', zero_or_one},
[],
{'xs_normalizedString', 2},
1,
[{seqType, 'xs:anyAtomicType', zero_or_one}]
},
{{qname, ?NS, ?PX, <<"token">>}, {seqType, 'xs:token', zero_or_one}, [], {'xs_token', 2}, 1, [
{seqType, 'xs:anyAtomicType', zero_or_one}
]},
{
{qname, ?NS, ?PX, <<"language">>},
{seqType, 'xs:language', zero_or_one},
[],
{'xs_language', 2},
1,
[{seqType, 'xs:anyAtomicType', zero_or_one}]
},
{
{qname, ?NS, ?PX, <<"NMTOKEN">>},
{seqType, 'xs:NMTOKEN', zero_or_one},
[],
{'xs_NMTOKEN', 2},
1,
[{seqType, 'xs:anyAtomicType', zero_or_one}]
},
{{qname, ?NS, ?PX, <<"Name">>}, {seqType, 'xs:Name', zero_or_one}, [], {'xs_Name', 2}, 1, [
{seqType, 'xs:anyAtomicType', zero_or_one}
]},
{
{qname, ?NS, ?PX, <<"NCName">>},
{seqType, 'xs:NCName', zero_or_one},
[],
{'xs_NCName', 2},
1,
[{seqType, 'xs:anyAtomicType', zero_or_one}]
},
{{qname, ?NS, ?PX, <<"ID">>}, {seqType, 'xs:ID', zero_or_one}, [], {'xs_ID', 2}, 1, [
{seqType, 'xs:anyAtomicType', zero_or_one}
]},
{{qname, ?NS, ?PX, <<"IDREF">>}, {seqType, 'xs:IDREF', zero_or_one}, [], {'xs_IDREF', 2}, 1, [
{seqType, 'xs:anyAtomicType', zero_or_one}
]},
{
{qname, ?NS, ?PX, <<"ENTITY">>},
{seqType, 'xs:ENTITY', zero_or_one},
[],
{'xs_ENTITY', 2},
1,
[{seqType, 'xs:anyAtomicType', zero_or_one}]
},
{
{qname, ?NS, ?PX, <<"integer">>},
{seqType, 'xs:integer', zero_or_one},
[],
{'xs_integer', 2},
1,
[{seqType, 'xs:anyAtomicType', zero_or_one}]
},
{
{qname, ?NS, ?PX, <<"nonPositiveInteger">>},
{seqType, 'xs:nonPositiveInteger', zero_or_one},
[],
{'xs_nonPositiveInteger', 2},
1,
[{seqType, 'xs:anyAtomicType', zero_or_one}]
},
{
{qname, ?NS, ?PX, <<"negativeInteger">>},
{seqType, 'xs:negativeInteger', zero_or_one},
[],
{'xs_negativeInteger', 2},
1,
[{seqType, 'xs:anyAtomicType', zero_or_one}]
},
{{qname, ?NS, ?PX, <<"long">>}, {seqType, 'xs:long', zero_or_one}, [], {'xs_long', 2}, 1, [
{seqType, 'xs:anyAtomicType', zero_or_one}
]},
{{qname, ?NS, ?PX, <<"int">>}, {seqType, 'xs:int', zero_or_one}, [], {'xs_int', 2}, 1, [
{seqType, 'xs:anyAtomicType', zero_or_one}
]},
{{qname, ?NS, ?PX, <<"short">>}, {seqType, 'xs:short', zero_or_one}, [], {'xs_short', 2}, 1, [
{seqType, 'xs:anyAtomicType', zero_or_one}
]},
{{qname, ?NS, ?PX, <<"byte">>}, {seqType, 'xs:byte', zero_or_one}, [], {'xs_byte', 2}, 1, [
{seqType, 'xs:anyAtomicType', zero_or_one}
]},
{
{qname, ?NS, ?PX, <<"nonNegativeInteger">>},
{seqType, 'xs:nonNegativeInteger', zero_or_one},
[],
{'xs_nonNegativeInteger', 2},
1,
[{seqType, 'xs:anyAtomicType', zero_or_one}]
},
{
{qname, ?NS, ?PX, <<"unsignedLong">>},
{seqType, 'xs:unsignedLong', zero_or_one},
[],
{'xs_unsignedLong', 2},
1,
[{seqType, 'xs:anyAtomicType', zero_or_one}]
},
{
{qname, ?NS, ?PX, <<"unsignedShort">>},
{seqType, 'xs:unsignedShort', zero_or_one},
[],
{'xs_unsignedShort', 2},
1,
[{seqType, 'xs:anyAtomicType', zero_or_one}]
},
{
{qname, ?NS, ?PX, <<"unsignedByte">>},
{seqType, 'xs:unsignedByte', zero_or_one},
[],
{'xs_unsignedByte', 2},
1,
[{seqType, 'xs:anyAtomicType', zero_or_one}]
},
{
{qname, ?NS, ?PX, <<"positiveInteger">>},
{seqType, 'xs:positiveInteger', zero_or_one},
[],
{'xs_positiveInteger', 2},
1,
[{seqType, 'xs:anyAtomicType', zero_or_one}]
},
{
{qname, ?NS, ?PX, <<"yearMonthDuration">>},
{seqType, 'xs:yearMonthDuration', zero_or_one},
[],
{'xs_yearMonthDuration', 2},
1,
[{seqType, 'xs:anyAtomicType', zero_or_one}]
},
{
{qname, ?NS, ?PX, <<"dayTimeDuration">>},
{seqType, 'xs:dayTimeDuration', zero_or_one},
[],
{'xs_dayTimeDuration', 2},
1,
[{seqType, 'xs:anyAtomicType', zero_or_one}]
},
{
{qname, ?NS, ?PX, <<"untypedAtomic">>},
{seqType, 'xs:untypedAtomic', zero_or_one},
[],
{'xs_untypedAtomic', 2},
1,
[{seqType, 'xs:anyAtomicType', zero_or_one}]
},
%% {{qname, ?NS, ?PX, <<"dateTimeStamp">>}, {seqType, 'xs:dateTimeStamp', zero_or_one}, [],
%% {'xs_dateTimeStamp', 2}, 1, [{seqType, 'xs:anyAtomicType', zero_or_one}]},
{
{qname, ?NS, ?PX, <<"NMTOKENS">>},
{seqType, 'xs:NMTOKENS', zero_or_one},
[],
{'xs_NMTOKENS', 2},
1,
[{seqType, 'xs:anyAtomicType', zero_or_one}]
},
{
{qname, ?NS, ?PX, <<"ENTITIES">>},
{seqType, 'xs:ENTITIES', zero_or_one},
[],
{'xs_ENTITIES', 2},
1,
[{seqType, 'xs:anyAtomicType', zero_or_one}]
},
{
{qname, ?NS, ?PX, <<"IDREFS">>},
{seqType, 'xs:IDREFS', zero_or_one},
[],
{'xs_IDREFS', 2},
1,
[{seqType, 'xs:anyAtomicType', zero_or_one}]
},
{{qname, ?NS, ?PX, <<"error">>}, {seqType, 'xs:error', zero_or_one}, [], {'xs_error', 2}, 1, [
{seqType, 'xs:anyAtomicType', zero_or_one}
]}
]).
-export([xs_unsignedInt/2]).
-export([xs_string/2]).
-export([xs_boolean/2]).
-export([xs_decimal/2]).
-export([xs_float/2]).
-export([xs_double/2]).
-export([xs_duration/2]).
-export([xs_dateTime/2]).
-export([xs_time/2]).
-export([xs_date/2]).
-export([xs_gYearMonth/2]).
-export([xs_gYear/2]).
-export([xs_gMonthDay/2]).
-export([xs_gDay/2]).
-export([xs_gMonth/2]).
-export([xs_hexBinary/2]).
-export([xs_base64Binary/2]).
-export([xs_anyURI/2]).
-export([xs_QName/2]).
-export([xs_normalizedString/2]).
-export([xs_token/2]).
-export([xs_language/2]).
-export([xs_NMTOKEN/2]).
-export([xs_Name/2]).
-export([xs_NCName/2]).
-export([xs_ID/2]).
-export([xs_IDREF/2]).
-export([xs_ENTITY/2]).
-export([xs_integer/2]).
-export([xs_nonPositiveInteger/2]).
-export([xs_negativeInteger/2]).
-export([xs_long/2]).
-export([xs_int/2]).
-export([xs_short/2]).
-export([xs_byte/2]).
-export([xs_nonNegativeInteger/2]).
-export([xs_unsignedLong/2]).
-export([xs_unsignedShort/2]).
-export([xs_unsignedByte/2]).
-export([xs_positiveInteger/2]).
-export([xs_yearMonthDuration/2]).
-export([xs_dayTimeDuration/2]).
-export([xs_untypedAtomic/2]).
-export([xs_dateTimeStamp/2]).
-export([xs_NMTOKENS/2]).
-export([xs_ENTITIES/2]).
-export([xs_IDREFS/2]).
-export([xs_error/2]).
-include("xqerl.hrl").
xs_ENTITIES(_S, []) -> [];
xs_ENTITIES(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:ENTITIES').
xs_ENTITY(_S, []) -> [];
xs_ENTITY(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:ENTITY').
xs_ID(_S, []) -> [];
xs_ID(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:ID').
xs_IDREF(_S, []) -> [];
xs_IDREF(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:IDREF').
xs_IDREFS(_S, []) -> [];
xs_IDREFS(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:IDREFS').
xs_NCName(_S, []) -> [];
xs_NCName(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:NCName').
xs_NMTOKEN(_S, []) -> [];
xs_NMTOKEN(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:NMTOKEN').
xs_NMTOKENS(_S, []) -> [];
xs_NMTOKENS(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:NMTOKENS').
xs_Name(_S, []) -> [];
xs_Name(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:Name').
xs_QName(_S, []) -> [];
xs_QName(S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:QName', maps:get(namespaces, S)).
xs_anyURI(_S, []) -> [];
xs_anyURI(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:anyURI').
xs_base64Binary(_S, []) -> [];
xs_base64Binary(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:base64Binary').
xs_boolean(_S, []) -> [];
xs_boolean(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:boolean').
xs_byte(_S, []) -> [];
xs_byte(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:byte').
xs_date(_S, []) -> [];
xs_date(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:date').
xs_dateTime(_S, []) -> [];
xs_dateTime(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:dateTime').
xs_dateTimeStamp(_S, []) -> [];
xs_dateTimeStamp(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:dateTimeStamp').
xs_dayTimeDuration(_S, []) -> [];
xs_dayTimeDuration(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:dayTimeDuration').
xs_decimal(_S, []) -> [];
xs_decimal(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:decimal').
xs_double(_S, []) -> [];
xs_double(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:double').
xs_duration(_S, []) -> [];
xs_duration(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:duration').
%xs_error(_S, {1,_} = E) -> E; % can also be 'XPTY0004' or 'XPST0005'
-dialyzer({[no_return], [xs_error/2]}).
xs_error(_S, _Arg1) ->
% no direct error constructor
?err('XPTY0004').
xs_float(_S, []) -> [];
xs_float(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:float').
xs_gDay(_S, []) -> [];
xs_gDay(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:gDay').
xs_gMonth(_S, []) -> [];
xs_gMonth(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:gMonth').
xs_gMonthDay(_S, []) -> [];
xs_gMonthDay(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:gMonthDay').
xs_gYear(_S, []) -> [];
xs_gYear(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:gYear').
xs_gYearMonth(_S, []) -> [];
xs_gYearMonth(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:gYearMonth').
xs_hexBinary(_S, []) -> [];
xs_hexBinary(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:hexBinary').
xs_int(_S, []) -> [];
xs_int(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:int').
xs_integer(_S, []) -> [];
xs_integer(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:integer').
xs_language(_S, []) -> [];
xs_language(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:language').
xs_long(_S, []) -> [];
xs_long(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:long').
xs_negativeInteger(_S, []) -> [];
xs_negativeInteger(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:negativeInteger').
xs_nonNegativeInteger(_S, []) -> [];
xs_nonNegativeInteger(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:nonNegativeInteger').
xs_nonPositiveInteger(_S, []) -> [];
xs_nonPositiveInteger(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:nonPositiveInteger').
xs_normalizedString(_S, []) -> [];
xs_normalizedString(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:normalizedString').
xs_positiveInteger(_S, []) -> [];
xs_positiveInteger(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:positiveInteger').
xs_short(_S, []) -> [];
xs_short(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:short').
xs_string(_S, []) -> [];
xs_string(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:string').
xs_time(_S, []) -> [];
xs_time(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:time').
xs_token(_S, []) -> [];
xs_token(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:token').
xs_unsignedByte(_S, []) -> [];
xs_unsignedByte(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:unsignedByte').
xs_unsignedInt(_S, []) -> [];
xs_unsignedInt(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:unsignedInt').
xs_unsignedLong(_S, []) -> [];
xs_unsignedLong(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:unsignedLong').
xs_unsignedShort(_S, []) -> [];
xs_unsignedShort(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:unsignedShort').
xs_untypedAtomic(_S, []) -> [];
xs_untypedAtomic(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:untypedAtomic').
xs_yearMonthDuration(_S, []) -> [];
xs_yearMonthDuration(_S, Arg1) -> xqerl_types:cast_as(Arg1, 'xs:yearMonthDuration'). | src/xqerl_mod_xs.erl | 0.510496 | 0.509337 | xqerl_mod_xs.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% Copyright <2013-2018> <
%% Technische Universität Kaiserslautern, Germany
%% Université Pierre et Marie Curie / Sorbonne-Université, France
%% Universidade NOVA de Lisboa, Portugal
%% Université catholique de Louvain (UCL), Belgique
%% INESC TEC, Portugal
%% >
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either expressed or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% List of the contributors to the development of Antidote: see AUTHORS file.
%% Description and complete License: see LICENSE file.
%% -------------------------------------------------------------------
%% These functions are used to instantiate a meta_data_sender for vectorclocks.
-module(stable_time_functions).
-include("antidote.hrl").
-include_lib("kernel/include/logger.hrl").
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-export([
update/2,
merge/1,
lookup/2,
fold/3,
store/3,
default/0,
initial_local/0,
initial_merged/0
]).
default() ->
vectorclock:new().
initial_merged() ->
vectorclock:new().
initial_local() ->
vectorclock:new().
fold(X, Y, Z) ->
vectorclock:fold(X, Y, Z).
lookup(X, Y) ->
vectorclock:get(X, Y).
store(X, Y, Z) ->
vectorclock:set(X, Y, Z).
%% Checks whether entry should be updated.
-spec update(integer(), integer()) -> boolean().
update(Last, Time) ->
case Last of
undefined ->
true;
_ ->
Time >= Last
end.
%% The function merges all entries in a map of vectorclocks by taking the minimum of all entries per node per DC
%% This assumes the meta data being sent have all DCs
-spec merge(map()) -> vectorclock:vectorclock().
merge(VcMap) ->
case all_defined(VcMap) of
true ->
vectorclock:min(maps:values(VcMap));
false ->
?LOG_DEBUG("missing entries: ~p", [VcMap]),
vectorclock:new()
end.
all_defined(VcMap) ->
maps:fold(fun(_K, V, Acc) -> Acc andalso V =/= undefined end, true, VcMap). | apps/antidote/src/stable_time_functions.erl | 0.54819 | 0.487307 | stable_time_functions.erl | starcoder |
%%
%% Copyright (c) 2018 <NAME>. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
-module(exp_simulations_support).
-author("<NAME> <<EMAIL>").
-include("exp.hrl").
-export([push_exp_metrics/1,
push_ldb_metrics/0]).
-define(SEP, ",").
-spec push_exp_metrics(timestamp()) -> ok.
push_exp_metrics(StartTime) ->
LDBVars = [ldb_mode,
ldb_state_sync_interval,
ldb_redundant_dgroups,
ldb_dgroup_back_propagation,
ldb_scuttlebutt_gc,
ldb_op_ii],
LDBConfigs = get_configs(ldb, LDBVars),
LSimVars = [exp_overlay,
exp_node_number,
exp_simulation,
exp_node_event_number,
exp_gmap_simulation_key_percentage,
exp_retwis_zipf],
LSimConfigs = get_configs(exp, LSimVars),
All = [{start_time, StartTime}]
++ LDBConfigs
++ LSimConfigs,
FilePath = file_path(rsg),
File = ldb_json:encode(All),
store(FilePath, File),
ok.
-spec push_ldb_metrics() -> ok.
push_ldb_metrics() ->
RunnerMetrics = exp_simulation_runner:get_metrics(),
LDBMetrics = ldb_forward:get_metrics(),
{Transmission0, Memory0, Latency0, Processing} = ldb_metrics:merge_all([RunnerMetrics | LDBMetrics]),
%% process transmission
Transmission = maps:fold(
fun(Timestamp, {{A, B}, C}, Acc) ->
V = [{ts, Timestamp},
{size, [A, B]},
{term_size, C}],
[V | Acc]
end,
[],
Transmission0
),
%% process memory
Memory = maps:fold(
fun(Timestamp, {{A, B}, C}, Acc) ->
V = [{ts, Timestamp},
{size, [A, B]},
{term_size, C}],
[V | Acc]
end,
[],
Memory0
),
%% process latency
Latency = maps:to_list(Latency0),
All = [
{transmission, Transmission},
{memory, Memory},
{latency, Latency},
{processing, Processing}
],
FilePath = file_path(ldb_config:id()),
File = ldb_json:encode(All),
store(FilePath, File),
ok.
%% @private
file_path(Name) ->
Timestamp = exp_config:get(exp_timestamp),
Filename = str(Timestamp) ++ "/"
++ str(Name) ++ ".json",
Filename.
%% @private
get_configs(App, Vars) ->
Mod = case App of
ldb -> ldb_config;
exp -> exp_config
end,
lists:map(
fun(Var) -> {Var, Mod:get(Var)} end,
Vars
).
%% @private
str(V) when is_atom(V) ->
atom_to_list(V);
str(V) when is_integer(V) ->
integer_to_list(V).
%% @private
store(FilePath, File) ->
ok = exp_redis_metrics_store:put(FilePath, File). | src/exp_simulations_support.erl | 0.512693 | 0.418043 | exp_simulations_support.erl | starcoder |
%% @author <NAME>, <NAME>, <NAME>
%%
%% @copyright 2017 Coowry Ltd.
%%
%% @doc Data validation in Erlang.
%%
%% Validators of type {@type validator(A,B)} are functions that accept
%% terms of type {@type A} and returns a validation result of type
%% {@type result(B)}.
%%
-module(baleen).
%% API exports
%% Types
-export_type([validator/2, result/1, predicate/1, str/0, val_map_validator/3, val_map_result/2]).
%% Main validation function
-export([validate/2]).
%% Functions for user validator injections
-export([validator/1, predicate/1]).
%% Validator composition
-export([compose/2, compose/1, any/1]).
%% Validator constructors
%% Basic validators
-export([invalid/0, valid/0]).
-export([member/1]).
-export([literal/1]).
-export([regex/1]).
-export([max_length/1]).
-export([transform/1]).
%% Type casting validators
-export([to_integer/0]).
-export([to_atom/0]).
-export([to_float/0]).
-export([to_binary/0]).
-export([to_string/0]).
%% Compare validators
-export([between/2]).
-export([between_open_start/2]).
-export([between_open_end/2]).
-export([between_open/2]).
%% Validator "lifters"
-export([list_of/1]).
-export([map_of/2]).
-export([tuple_of/1]).
%% Reqopt
-export([val_map/2]).
%%====================================================================
%% Types
%%====================================================================
-type result(R) :: {ok, R} | {error, binary()}.
-type predicate(A) :: fun((A) -> boolean()).
-type str() :: string() | binary().
-opaque validator(A,B) :: fun((A) -> result(B)).
-type val_map_validator(K, A, B) :: #{K => {optional | required, validator(A, B)}}.
-type val_map_result(K, B) :: #{valid => #{K => B},
nonvalid => #{K => binary()},
unexpected => [K],
missing => [K]}.
%%====================================================================
%% Error messages
%%====================================================================
-define(INVALID_FLOAT(V), format("\"~p\" is not a float", [V])).
%%====================================================================
%% API functions
%%====================================================================
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec validate(validator(A,B), A) -> result(B).
%% @doc Validates data with a validator. `X' is the term to be
%% validated with validator `V'.
validate(V, X) -> V(X).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec predicate(predicate(A)) -> validator(A,A).
%% @doc Returns a validator given a predicate. When validating `X'
%% with a predicate `P', if `P(X)' holds then `{ok, X}' is
%% returned. Otherwise, `{error, <<"Improper term X">>}' is
%% returned.
predicate(P) ->
fun(X) ->
case P(X) of
true ->
{ok, X};
false ->
{error, format("Improper term ~p", [X])}
end
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec validator(fun((A) -> result(B))) -> validator(A,B).
%% @doc Returns a validator given a user defined function that
%% validates.
validator(V) -> V.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec invalid() -> validator(_,_).
%% @doc Returns a validator that always fails.
invalid() -> fun(X) ->
{error, format("Invalid term ~p", [X])}
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec valid() -> validator(_,_).
%% @doc Returns a validator that always validates.
valid() ->
fun(X) -> {ok, X} end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec to_integer() -> validator(str(), integer()).
%% @doc Returns a validator that takes a `Value' and tries to
%% cast to integer. If the cast success, `{ok, Integer}' is returned,
%% otherwise, `{error, <<"Value is not an integer">>}' is returned.
to_integer() ->
fun(Value) when is_binary(Value)->
try erlang:binary_to_integer(Value) of
Integer -> {ok, Integer}
catch
_:_ -> {error, format("~p is not an integer", [Value])}
end;
(Value) ->
case io_lib:fread("~d",Value) of
{ok, [Integer], []} -> {ok, Integer};
_ -> {error, format("~p is not an integer", [Value])}
end
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec compose(validator(A, B), validator(B, C)) -> validator(A, C).
%% @doc Returns a validator that is a composition of two validators.
compose(V1, V2) ->
fun (X1) ->
case validate(V1, X1) of
{ok, X2} ->
validate(V2, X2);
Error ->
Error
end
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec compose(nonempty_list(validator(A,A))) -> validator(A,A).
%% @doc Returns a validator that is a composition of a list of validators.
compose(Validators) -> lists:foldr(fun compose/2, valid(), Validators).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec any(list(validator(A, B))) -> validator(A, B).
%% @doc Returns only one validator of a list of validators that matches.
any([]) -> invalid();
any([V|Vs]) ->
fun(T) ->
case validate(V,T) of
{ok, X1} ->
{ok, X1};
{error, _Error1} ->
case validate(any(Vs), T) of
{ok, X2} ->
{ok, X2};
{error, _Error2} ->
{error, format("There isn't any valid", [])}
end
end
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec member(list(A)) -> validator(A, A).
%% @doc Returns a validator that matches only if the input is member
%% of `L'.
member(L) ->
fun(T) ->
case lists:member(T, L) of
true ->
{ok, T};
false ->
{error, format("~p is not member of ~p", [T, L])}
end
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec literal(A) -> validator(A, A).
%% @doc Returns a validator that matches only if the input is equals
%% to `Term'.
literal(Term) ->
fun(T) ->
case Term =:= T of
true -> {ok, Term};
false -> {error, format("~p and ~p do not match", [Term, T])}
end
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec regex(String) -> validator(String, String) when String :: str().
%% @doc Returns a validator that validates and `String' if matches
%% a regular expression given.
regex(RegularExpression) ->
%% Let's start with the compilation of the regular expression
case re:compile(RegularExpression) of
{ok, MP} ->
fun(T) ->
case re:run(T, MP) of
{match, [{0,0}]} ->
{error, format("~p is not matching the regular expression ~p", [T, RegularExpression])};
{match, _Captured} ->
{ok, T};
nomatch ->
{error, format("~p is not matching the regular expression ~p", [T, RegularExpression])}
end
end;
{error, _ErrSpec} ->
throw(badarg)
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec max_length(non_neg_integer()) -> validator(S, S) when S :: iodata().
%% @doc Returns a validator that validates an input only if its length
%% is less or equal than the integer is specified.
max_length(I) ->
fun(S) ->
case iolist_size(S) > I of
true -> {error, format("The size of ~p is longer than ~p", [S, I])};
false -> {ok, S}
end
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec to_atom() -> validator(str(), atom()).
%% @doc Returns a validator that takes a `T' and tries to
%% cast to atom. If the cast success, `{ok, Atom}' is returned,
%% otherwise, `{error, <<"T is not a valid binary">>}' or
%% `{error, <<"T is not a valid string">>}' is returned.
to_atom() ->
fun(T) when is_binary(T) ->
try binary_to_atom(T, utf8) of
Atom -> {ok, Atom}
catch
_:_ -> {error, format("\"~p\" is not a valid binary", [T])}
end;
(T) ->
try list_to_existing_atom(T) of
Atom -> {ok, Atom}
catch
_:_ -> {error, format("~p is not a valid string", [T])}
end
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec list_of(validator(A,B)) -> validator(list(A), list(B)).
%% @doc Returns a validator that matches a list.
list_of(V) ->
fun(L) ->
Results = [{Term, validate(V,Term)} || Term <- L],
Errors = [{Term, Msg} || {Term, {error, Msg}} <- Results],
case Errors of
[] -> {ok , [Value || {_, {ok, Value}} <- Results]};
[ {Term, Msg} | _] -> {error, format("Error in element ~p: ~s", [Term, Msg])}
end
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec map_of(validator(K1, K2), validator(V1, V2))
-> validator(#{K1 => V1}, #{K2 => V2}).
%% @doc Returns a validator that validates a map, whose Keys are
%% validated by `VK' and Values are validated by `VV'.
map_of(VK, VV) ->
fun(Map) ->
Keys = maps:keys(Map),
Values = maps:values(Map),
Results = [{{K,V},{validate(VK, K), validate(VV, V)}} || K <- Keys, V <- Values, maps:get(K, Map) =:= V],
KeysErrors = [{K, Msg} || {{K, _},{{error, Msg}, _}} <- Results],
ValuesErrors = [{V, Msg} || {{_, V}, {_, {error, Msg}}} <- Results],
case KeysErrors of
[] ->
case ValuesErrors of
[] ->
{ok, maps:from_list([{ValueK, ValueV} || {{_,_}, {{ok, ValueK}, {ok, ValueV}}} <- Results])};
[{V, Msg} | _] -> {error, format("Error in value ~p: ~s", [V, Msg])}
end;
[{K, Msg} | _] -> {error, format("Error in key ~p: ~s", [K, Msg])}
end
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec tuple_of(validator(A, B)) -> validator({A}, {B}).
%% @doc Returns a validator that matches a tuple.
tuple_of(V) ->
fun(Tuple) ->
TupleList = tuple_to_list(Tuple),
Results = [{T, validate(V, T)} || T <- TupleList],
Errors = [{T, Msg} || {T, {error, Msg}} <- Results],
case Errors of
[] -> {ok, list_to_tuple([Value || {_,{ok, Value}} <- Results])};
[{T, Msg} | _] -> {error, format("Error in ~p: ~s", [T, Msg])}
end
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec transform(fun((A) -> B)) -> validator(A,B).
%% @doc Returns a validator that always success and applies `F'.
transform(F) ->
fun(T) ->
{ok, F(T)}
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec to_float() -> validator(str(), float()).
%% @doc Returns a validator that takes a `Value' and tries to
%% cast to float. If the cast success, `{ok, Float}' is returned,
%% otherwise, `{error, <<"Value is not a float">>}' is returned.
to_float() ->
fun(Value) when is_binary(Value) ->
try erlang:binary_to_float(Value) of
Float -> {ok, Float}
catch
_:_ -> {error, ?INVALID_FLOAT(Value)}
end;
(Value) ->
case io_lib:fread("~f", Value) of
{ok, [Float], []} -> {ok, Float};
_ -> {error, ?INVALID_FLOAT(Value)}
end
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec val_map(val_map_validator(K, A, B),
#{K => A}) ->
val_map_result(K, B).
%% @doc Returns a map of the keys that matches, the keys that doesn't,
%% the keys that are missing and the unexpected keys, and their
%% corresponding values.
val_map(Validator, Map) ->
KeysMap = maps:keys(Map),
Unexpected = KeysMap -- maps:keys(Validator),
Missing = [K || {K, {OptReq, _}} <- maps:to_list(Validator), OptReq =:= required] -- KeysMap,
ToValidate = KeysMap -- Unexpected,
MapToValidate = lists:foldl(fun(K, AccMap) -> AccMap #{K => maps:get(K, Map)} end,
#{},
ToValidate),
{Valids, Invalids} = maps:fold(fun(K, V, {AccValids, AccInvalids}) ->
{_, Val} = maps:get(K, Validator),
case validate(Val, V) of
{ok, R} -> {AccValids #{K => R}, AccInvalids}; % Add to Valid map
{error, Msg} -> {AccValids, AccInvalids #{K => Msg}} % Add to Invalid map
end
end, {#{},#{}}, MapToValidate),
#{valid => Valids,
nonvalid => Invalids,
missing => Missing,
unexpected => Unexpected}.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec to_binary() -> validator(str(), binary()).
%% @doc Returns a validator that takes a `Value' and tries to
%% cast to binary. If the cast success, `{ok, Binary}' is returned,
%% otherwise, `{error, <<"Value is not an binary">>}' is returned.
to_binary() ->
fun(Value) when is_binary(Value)->
{ok, Value};
(Value) ->
try list_to_binary(Value) of
Binary -> {ok, Binary}
catch
_:_ -> {error, format("~p is not a string nor a binary", [Value])}
end
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec to_string() -> validator(str(), string()).
%% @doc Returns a validator that takes a `Value' and tries to
%% cast to str. If the cast success, `{ok, Str}' is returned,
%% otherwise, `{error, <<"Value is not an str">>}' is returned.
to_string() ->
fun(Value) when is_binary(Value)->
try binary_to_list(Value) of
String -> {ok, String}
catch
_:_ -> {error, format("~p is not a string nor a binary", [Value])}
end;
(Value) ->
try io_lib:fread("~s", Value) of
{ok, [String], []} -> {ok, String}
catch
_:_ -> {error, format("~p is not a string nor a binary", [Value])}
end
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec between(integer(), integer()) -> validator(integer(), integer()).
%% @doc Returns a validator that takes two `Value' and compare the
%% number betwen the (Min,Max). If the cast success, `{ok, integer}' is returned,
%% otherwise, `{error, <<"Value is not in range (Min,Max)">>}' is returned.
between(Min, Max) ->
fun(T) when is_integer(T) ->
if T > Min andalso T < Max -> {ok, T};
true -> {error, format("~p is not in range between (~p, ~p)", [T, Min, Max])}
end;
(T) -> {error, format("~p is not an integer", [T])}
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec between_open_start(integer(), integer()) -> validator(integer(), integer()).
%% @doc Returns a validator that takes two `Value' and compare the
%% number betwen the [Min,Max). If the cast success, `{ok, integer}' is returned,
%% otherwise, `{error, <<"Value is not in range [Min,Max)">>}' is returned.
between_open_start(Min, Max) ->
fun(T) when is_integer(T) ->
if T >= Min andalso T < Max -> {ok, T};
true -> {error, format("~p is not in range between [~p, ~p)", [T, Min, Max])}
end;
(T) -> {error, format("~p is not an integer", [T])}
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec between_open_end(integer(), integer()) -> validator(integer(), integer()).
%% @doc Returns a validator that takes two `Value' and compare the
%% number betwen the (Min,Max]. If the cast success, `{ok, integer}' is returned,
%% otherwise, `{error, <<"Value is not in range (Min,Max]">>}' is returned.
between_open_end(Min, Max) ->
fun(T) when is_integer(T) ->
if T > Min andalso T =< Max -> {ok, T};
true -> {error, format("~p is not in range between (~p, ~p]", [T, Min, Max])}
end;
(T) -> {error, format("~p is not an integer", [T])}
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec between_open(integer(), integer()) -> validator(integer(), integer()).
%% @doc Returns a validator that takes two `Value' and compare the
%% number betwen the [Min,Max]. If the cast success, `{ok, integer}' is returned,
%% otherwise, `{error, <<"Value is not in range [Min,Max]">>}' is returned.
between_open(Min, Max) ->
fun(T) when is_integer(T) ->
if T >= Min andalso T =< Max -> {ok, T};
true -> {error, format("~p is not in range between [~p, ~p]", [T, Min, Max])}
end;
(T) -> {error, format("~p is not an integer", [T])}
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%====================================================================
%% Internal functions
%%====================================================================
-spec format(io:format(), [term()]) -> binary().
format(Format, Terms) ->
Message = io_lib:format(Format, Terms),
unicode:characters_to_binary(Message). | src/baleen.erl | 0.621656 | 0.469095 | baleen.erl | starcoder |
%% Copyright 2014 Erlio GmbH Basel Switzerland (http://erl.io)
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(vmq_server_sup).
-behaviour(supervisor).
%% API
-export([start_link/0]).
%% Supervisor callbacks
-export([init/1]).
%% Helper macro for declaring children of supervisor
-define(CHILD(I, Type, Args), {I, {I, start_link, Args},
permanent, 5000, Type, [I]}).
%% ===================================================================
%% API functions
%% ===================================================================
-spec start_link() -> 'ignore' | {'error',_} | {'ok',pid()}.
start_link() ->
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
%% ===================================================================
%% Supervisor callbacks
%% ===================================================================
-spec init([]) -> {'ok', {{'one_for_one', 5, 10},
[{atom(), {atom(), atom(), list()},
permanent, pos_integer(), worker, [atom()]}]}}.
init([]) ->
{ok, MsgStoreChildSpecs} = application:get_env(vmq_server, msg_store_childspecs),
maybe_change_nodename(),
purge_stale_subscriber_data(),
{ok, { {one_for_one, 5, 10},
[?CHILD(vmq_config, worker, []) | MsgStoreChildSpecs]
++ [
?CHILD(vmq_crl_srv, worker, []),
?CHILD(vmq_queue_sup_sup, supervisor, [infinity, 5, 10]),
?CHILD(vmq_reg_sup, supervisor, []),
?CHILD(vmq_cluster_node_sup, supervisor, []),
?CHILD(vmq_sysmon, worker, []),
?CHILD(vmq_metrics_sup, supervisor, [])
]} }.
maybe_change_nodename() ->
{ok, LocalState} = plumtree_peer_service_manager:get_local_state(),
case riak_dt_orswot:value(LocalState) of
[Node] when Node =/= node() ->
lager:info("rename VerneMQ node from ~p to ~p", [Node, node()]),
{ok, Actor} = plumtree_peer_service_manager:get_actor(),
{ok, Merged} = riak_dt_orswot:update({update, [{remove, Node},
{add, node()}]}, Actor, LocalState),
_ = gen_server:cast(plumtree_peer_service_gossip, {receive_state, Merged}),
vmq_reg:fold_subscribers(
fun(SubscriberId, Subs, _) ->
{NewSubs, _} = vmq_subscriber:change_node_all(Subs, node(), false),
vmq_subscriber_db:store(SubscriberId, NewSubs)
end, ignored, false);
_ ->
%% we ignore if the node has the same name
%% or if more than one node is returned (clustered)
ignore
end.
purge_stale_subscriber_data() ->
Node = node(),
FoldFun = fun(SubscriberId,Subs,_) ->
SortedSubs =
lists:foldl(fun({N, _, _}, AccAcc) when Node =/= N ->
AccAcc;
({_N, CS, InnerSubs}, {CSFAcc, CSTAcc}) ->
Topics = [T || {T,_} <- InnerSubs],
case CS of
true ->
{CSFAcc,[Topics|CSTAcc]};
false ->
{[Topics|CSFAcc],CSTAcc}
end
end,
{[], []},
Subs),
case SortedSubs of
{[],_} ->
vmq_reg:delete_subscriptions(SubscriberId);
{_,CST} ->
lists:foreach(
fun(Topics) ->
OldSubs = vmq_subscriber_db:read(SubscriberId, []),
case vmq_subscriber:remove(OldSubs, Topics) of
{NewSubs, true} ->
vmq_subscriber_db:store(SubscriberId, NewSubs);
_ ->
ok
end
end,
CST)
end,
ignore
end,
vmq_reg:fold_subscribers(FoldFun, [], false). | apps/vmq_server/src/vmq_server_sup.erl | 0.562898 | 0.439687 | vmq_server_sup.erl | starcoder |
%% Copyright (c) 2015-2016 <NAME>. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%% @doc AWMap CRDT.
%% Modeled as a dictionary where keys can be anything and the
%% values are causal-CRDTs.
%%
%% @reference <NAME>, <NAME>, and <NAME>
%% Delta State Replicated Data Types (2016)
%% [http://arxiv.org/pdf/1603.01529v1.pdf]
-module(state_awmap).
-author("<NAME> <<EMAIL>>").
-include("state_type.hrl").
-behaviour(type).
-behaviour(state_type).
-define(TYPE, ?MODULE).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-export([new/0, new/1]).
-export([mutate/3, delta_mutate/3, merge/2]).
-export([query/1, equal/2, is_bottom/1,
is_inflation/2, is_strict_inflation/2,
irreducible_is_strict_inflation/2]).
-export([join_decomposition/1, delta/2, digest/1]).
-export([encode/2, decode/2]).
-export_type([state_awmap/0, state_awmap_op/0]).
-opaque state_awmap() :: {?TYPE, payload()}.
-type payload() :: {state_type:state_type(),
state_causal_type:causal_crdt()}.
-type key() :: term().
-type key_op() :: term().
-type state_awmap_op() :: {apply, key(), key_op()} |
{rmv, key()}.
%% @doc Create a new, empty `state_awmap()'.
%% By default the values are a AWSet Causal CRDT.
-spec new() -> state_awmap().
new() ->
new([?AWSET_TYPE]).
%% @doc Create a new, empty `state_awmap()'
-spec new([term()]) -> state_awmap().
new([CType]) ->
{?TYPE, {CType, state_causal_type:new(dot_map)}}.
%% @doc Mutate a `state_awmap()'.
-spec mutate(state_awmap_op(), type:id(), state_awmap()) ->
{ok, state_awmap()}.
mutate(Op, Actor, {?TYPE, _}=CRDT) ->
state_type:mutate(Op, Actor, CRDT).
%% @doc Delta-mutate a `state_awmap()'.
%% The first argument can be:
%% - `{apply, Key, Op}'
%% - `{rmv, Key}'
%% `apply' also receives an operation that will be applied to the
%% key.
%% This operation has to be a valid operation in the CausalCRDT
%% choosed to be in the values (by defaul an AWSet).
%% The second argument is the replica id.
%% The third argument is the `state_awmap()' to be inflated.
-spec delta_mutate(state_awmap_op(), type:id(), state_awmap()) ->
{ok, state_awmap()}.
delta_mutate({apply, Key, Op}, Actor,
{?TYPE, {CType, {DotMap, CC}}}) ->
{Type, Args} = state_type:extract_args(CType),
Default = state_causal_type:ds_bottom(CType),
SubDS = dot_map:fetch(Key, DotMap, Default),
CRDT = ccrdt(Type, Args, SubDS, CC),
{ok, {Type, SubDelta}} = Type:delta_mutate(Op, Actor, CRDT),
{DeltaSubDS, DeltaCC} = dcrdt(Type, SubDelta),
DeltaDS = case Default == DeltaSubDS of
true ->
%% if the resulting sub ds is empty
dot_map:new();
false ->
dot_map:store(Key, DeltaSubDS, dot_map:new())
end,
Delta = {CType, {DeltaDS, DeltaCC}},
{ok, {?TYPE, Delta}};
delta_mutate({rmv, Key}, _Actor, {?TYPE, {CType, {DotMap, _CC}}}) ->
Default = state_causal_type:ds_bottom(CType),
SubDS = dot_map:fetch(Key, DotMap, Default),
DotSet = state_causal_type:dots(CType, SubDS),
DeltaCC = causal_context:from_dot_set(DotSet),
DeltaDS = dot_map:new(),
Delta = {CType, {DeltaDS, DeltaCC}},
{ok, {?TYPE, Delta}}.
%% @doc Returns the value of the `state_awmap()'.
%% This value is a dictionary where each key maps to the
%% result of `query/1' over the current value.
-spec query(state_awmap()) -> term().
query({?TYPE, {CType, {DotMap, CC}}}) ->
{Type, Args} = state_type:extract_args(CType),
lists:foldl(
fun(Key, Result) ->
Value = dot_map:fetch(Key, DotMap, undefined),
CRDT = ccrdt(Type, Args, Value, CC),
Query = Type:query(CRDT),
orddict:store(Key, Query, Result)
end,
orddict:new(),
dot_map:fetch_keys(DotMap)
).
%% @doc Merge two `state_awmap()'.
%% Merging is handled by the `merge' function in
%% `state_causal_type' common library.
-spec merge(state_awmap(), state_awmap()) -> state_awmap().
merge({?TYPE, {CType, AWMap1}}, {?TYPE, {CType, AWMap2}}) ->
Map = state_causal_type:merge({dot_map, CType},
AWMap1, AWMap2),
{?TYPE, {CType, Map}}.
%% @doc Equality for `state_awmap()'.
%% Since everything is ordered, == should work.
-spec equal(state_awmap(), state_awmap()) -> boolean().
equal({?TYPE, AWMap1}, {?TYPE, AWMap2}) ->
AWMap1 == AWMap2.
%% @doc Check if a `state_awmap()' is bottom
-spec is_bottom(state_awmap()) -> boolean().
is_bottom({?TYPE, _}=CRDT) ->
CRDT == new().
%% @doc Given two `state_awmap()', check if the second is an inflation
%% of the first.
%% @todo
-spec is_inflation(state_awmap(), state_awmap()) -> boolean().
is_inflation({?TYPE, _}=CRDT1, {?TYPE, _}=CRDT2) ->
state_type:is_inflation(CRDT1, CRDT2).
%% @doc Check for strict inflation.
-spec is_strict_inflation(state_awmap(), state_awmap()) -> boolean().
is_strict_inflation({?TYPE, _}=CRDT1, {?TYPE, _}=CRDT2) ->
state_type:is_strict_inflation(CRDT1, CRDT2).
%% @doc Check for irreducible strict inflation.
-spec irreducible_is_strict_inflation(state_awmap(),
state_type:digest()) ->
boolean().
irreducible_is_strict_inflation({?TYPE, _}=A, B) ->
state_type:irreducible_is_strict_inflation(A, B).
-spec digest(state_awmap()) -> state_type:digest().
digest({?TYPE, _}=CRDT) ->
{state, CRDT}.
%% @doc Join decomposition for `state_awmap()'.
%% @todo
-spec join_decomposition(state_awmap()) -> [state_awmap()].
join_decomposition({?TYPE, _}=CRDT) ->
[CRDT].
%% @doc Delta calculation for `state_awmap()'.
-spec delta(state_awmap(), state_type:digest()) -> state_awmap().
delta({?TYPE, _}=A, B) ->
state_type:delta(A, B).
-spec encode(state_type:format(), state_awmap()) -> binary().
encode(erlang, {?TYPE, _}=CRDT) ->
erlang:term_to_binary(CRDT).
-spec decode(state_type:format(), binary()) -> state_awmap().
decode(erlang, Binary) ->
{?TYPE, _} = CRDT = erlang:binary_to_term(Binary),
CRDT.
%% @private construct CRDT.
ccrdt(Type, Args, DS, CC) ->
case Type of
?TYPE ->
[SubType] = Args,
{?TYPE, {SubType, {DS, CC}}};
_ ->
{Type, {DS, CC}}
end.
%% @private deconstruct CRDT.
dcrdt(Type, Delta) ->
case Type of
?TYPE ->
{_, {DS, CC}} = Delta,
{DS, CC};
_ ->
Delta
end.
%% ===================================================================
%% EUnit tests
%% ===================================================================
-ifdef(TEST).
new_test() ->
?assertEqual({?TYPE, {?AWSET_TYPE, {[], causal_context:new()}}},
new()).
query_test() ->
Actor = 1,
Map0 = new([?AWSET_TYPE]),
Map1 = {?TYPE, {?AWSET_TYPE,
{[{"a", [{17, [{Actor, 2}]}]},
{"b", [{10, [{Actor, 1}]},
{13, [{Actor, 3}]}]}],
{[{Actor, 3}], []}}}},
?assertEqual([], query(Map0)),
?assertEqual([{"a", sets:from_list([17])},
{"b", sets:from_list([10, 13])}], query(Map1)).
delta_apply_test() ->
Actor = 1,
Map0 = new([?AWSET_TYPE]),
{ok, {?TYPE, Delta1}} = delta_mutate({apply, "b", {add, 3}},
Actor, Map0),
Map1 = merge({?TYPE, Delta1}, Map0),
{ok, {?TYPE, Delta2}} = delta_mutate({apply, "a", {add, 17}},
Actor, Map1),
Map2 = merge({?TYPE, Delta2}, Map1),
{ok, {?TYPE, Delta3}} = delta_mutate({apply, "b", {add, 13}},
Actor, Map2),
Map3 = merge({?TYPE, Delta3}, Map2),
{ok, {?TYPE, Delta4}} = delta_mutate({apply, "b", {rmv, 3}},
Actor, Map3),
Map4 = merge({?TYPE, Delta4}, Map3),
?assertEqual({?TYPE, {?AWSET_TYPE,
{[{"b", [{3, [{Actor, 1}]}]}],
{[{Actor, 1}], []}}}},
{?TYPE, Delta1}),
?assertEqual({?TYPE, {?AWSET_TYPE,
{[{"b", [{3, [{Actor, 1}]}]}],
{[{Actor, 1}], []}}}},
Map1),
?assertEqual({?TYPE, {?AWSET_TYPE,
{[{"a", [{17, [{Actor, 2}]}]}],
{[], [{Actor, 2}]}}}},
{?TYPE, Delta2}),
?assertEqual({?TYPE, {?AWSET_TYPE,
{[{"a", [{17, [{Actor, 2}]}]},
{"b", [{3, [{Actor, 1}]}]}],
{[{Actor, 2}], []}}}},
Map2),
?assertEqual({?TYPE, {?AWSET_TYPE,
{[{"b", [{13, [{Actor, 3}]}]}],
{[], [{Actor, 3}]}}}},
{?TYPE, Delta3}),
?assertEqual({?TYPE, {?AWSET_TYPE,
{[{"a", [{17, [{Actor, 2}]}]},
{"b", [{3, [{Actor, 1}]},
{13, [{Actor, 3}]}]}],
{[{Actor, 3}], []}}}},
Map3),
?assertEqual({?TYPE, {?AWSET_TYPE,
{[],
{[{Actor, 1}], []}}}},
{?TYPE, Delta4}),
?assertEqual({?TYPE, {?AWSET_TYPE,
{[{"a", [{17, [{Actor, 2}]}]},
{"b", [{13, [{Actor, 3}]}]}],
{[{Actor, 3}], []}}}},
Map4).
apply_test() ->
Actor = 1,
Map0 = new([?AWSET_TYPE]),
{ok, Map1} = mutate({apply, "b", {add, 3}}, Actor, Map0),
{ok, Map2} = mutate({apply, "a", {add, 17}}, Actor, Map1),
{ok, Map3} = mutate({apply, "b", {add, 13}}, Actor, Map2),
{ok, Map4} = mutate({apply, "b", {rmv, 3}}, Actor, Map3),
?assertEqual({?TYPE, {?AWSET_TYPE,
{[{"b", [{3, [{Actor, 1}]}]}],
{[{Actor, 1}], []}}}},
Map1),
?assertEqual({?TYPE, {?AWSET_TYPE,
{[{"a", [{17, [{Actor, 2}]}]},
{"b", [{3, [{Actor, 1}]}]}],
{[{Actor, 2}], []}}}},
Map2),
?assertEqual({?TYPE, {?AWSET_TYPE,
{[{"a", [{17, [{Actor, 2}]}]},
{"b", [{3, [{Actor, 1}]},
{13, [{Actor, 3}]}]}],
{[{Actor, 3}], []}}}},
Map3),
?assertEqual({?TYPE, {?AWSET_TYPE,
{[{"a", [{17, [{Actor, 2}]}]},
{"b", [{13, [{Actor, 3}]}]}],
{[{Actor, 3}], []}}}},
Map4).
rmv_test() ->
Actor = 1,
Map0 = {?TYPE, {?AWSET_TYPE,
{[{"a", [{17, [{Actor, 2}]}]},
{"b", [{3, [{Actor, 1}]},
{13, [{Actor, 3}]}]}],
{[{Actor, 3}], []}}}},
{ok, Map1} = mutate({rmv, "a"}, Actor, Map0),
{ok, Map2} = mutate({apply, "a", {add, 17}}, Actor, Map1),
{ok, Map3} = mutate({rmv, "b"}, Actor, Map2),
{ok, Map4} = mutate({rmv, "a"}, Actor, Map3),
?assertEqual({?TYPE, {?AWSET_TYPE,
{[{"b", [{3, [{Actor, 1}]},
{13, [{Actor, 3}]}]}],
{[{Actor, 3}], []}}}},
Map1),
?assertEqual({?TYPE, {?AWSET_TYPE,
{[{"a", [{17, [{Actor, 4}]}]},
{"b", [{3, [{Actor, 1}]},
{13, [{Actor, 3}]}]}],
{[{Actor, 4}], []}}}},
Map2),
?assertEqual({?TYPE, {?AWSET_TYPE,
{[{"a", [{17, [{Actor, 4}]}]}],
{[{Actor, 4}], []}}}},
Map3),
?assertEqual({?TYPE, {?AWSET_TYPE,
{[],
{[{Actor, 4}], []}}}},
Map4).
equal_test() ->
Actor = "one",
Map1 = {?TYPE, {?AWSET_TYPE,
{[{"b", [{3, [{Actor, 1}]}]}],
{[{Actor, 1}], []}}}},
Map2 = {?TYPE, {?AWSET_TYPE,
{[{"a", [{17, [{Actor, 2}]}]}],
{[], [{Actor, 2}]}}}},
Map3 = {?TYPE, {?AWSET_TYPE,
{[{"a", [{17, [{Actor, 2}]}]},
{"b", [{3, [{Actor, 1}]}]}],
{[{Actor, 2}], []}}}},
?assert(equal(Map1, Map1)),
?assertNot(equal(Map1, Map2)),
?assertNot(equal(Map1, Map3)).
is_bottom_test() ->
Actor = "one",
Map0 = new(),
Map1 = {?TYPE, {?AWSET_TYPE,
{[{"a", [{17, [{Actor, 2}]}]}],
{[], [{Actor, 2}]}}}},
?assert(is_bottom(Map0)),
?assertNot(is_bottom(Map1)).
is_inflation_test() ->
Actor = "1",
Map1 = {?TYPE, {?AWSET_TYPE,
{[{"b", [{3, [{Actor, 1}]}]}],
{[{Actor, 1}], []}}}},
Map2 = {?TYPE, {?AWSET_TYPE,
{[{"a", [{17, [{Actor, 2}]}]}],
{[], [{Actor, 2}]}}}},
Map3 = {?TYPE, {?AWSET_TYPE,
{[{"a", [{17, [{Actor, 2}]}]},
{"b", [{3, [{Actor, 1}]}]}],
{[{Actor, 2}], []}}}},
?assert(is_inflation(Map1, Map1)),
?assertNot(is_inflation(Map1, Map2)),
?assertNot(is_inflation(Map2, Map1)),
?assert(is_inflation(Map1, Map3)),
?assert(is_inflation(Map2, Map3)),
%% check inflation with merge
?assert(state_type:is_inflation(Map1, Map1)),
?assertNot(state_type:is_inflation(Map1, Map2)),
?assertNot(state_type:is_inflation(Map2, Map1)),
?assert(state_type:is_inflation(Map1, Map3)),
?assert(state_type:is_inflation(Map2, Map3)).
is_strict_inflation_test() ->
Actor = "1",
Map1 = {?TYPE, {?AWSET_TYPE,
{[{"b", [{3, [{Actor, 1}]}]}],
{[{Actor, 1}], []}}}},
Map2 = {?TYPE, {?AWSET_TYPE,
{[{"a", [{17, [{Actor, 2}]}]}],
{[], [{Actor, 2}]}}}},
Map3 = {?TYPE, {?AWSET_TYPE,
{[{"a", [{17, [{Actor, 2}]}]},
{"b", [{3, [{Actor, 1}]}]}],
{[{Actor, 2}], []}}}},
?assertNot(is_strict_inflation(Map1, Map1)),
?assertNot(is_strict_inflation(Map1, Map2)),
?assertNot(is_strict_inflation(Map2, Map1)),
?assert(is_strict_inflation(Map1, Map3)),
?assert(is_strict_inflation(Map2, Map3)).
join_decomposition_test() ->
%% @todo
ok.
encode_decode_test() ->
Actor = "hey",
Map = {?TYPE, {?AWSET_TYPE,
{[{"b", [{3, [{Actor, 1}]}]}],
{[{Actor, 1}], []}}}},
Binary = encode(erlang, Map),
EMap = decode(erlang, Binary),
?assertEqual(Map, EMap).
-endif. | src/state_awmap.erl | 0.695958 | 0.438304 | state_awmap.erl | starcoder |
-module(intersection).
-export([intersection/2]).
intersection(D, D) -> D;
intersection({recur, S}, {recur, T}) -> {recur, fun() -> intersection(S(), T()) end};
intersection({recur, S}, {sum, _} = D) -> {recur, fun() -> intersection(S(), D) end};
intersection({recur, S}, {tagged, _, _} = D) -> {recur, fun() -> intersection(S(), D) end};
intersection({recur, S}, D) when is_map(D) -> {recur, fun() -> intersection(S(), D) end};
intersection({recur, _}, _) -> none;
intersection(D, {recur, S}) -> intersection({recur, S}, D);
intersection(any, D) -> D;
intersection(D, any) -> D;
intersection('Domain/Any', D) -> D;
intersection(D, 'Domain/Any') -> D;
intersection(none, _) -> none;
intersection(_, none) -> none;
% TODO: A function can be considered a value like `5` or 'atom'. While a
% function can also be seen as a constructor which given some inputs returns a
% domain, it isn't meaningful to consider the intersection of two such
% constructors, unless they are identical. While we could compute a new
% function which returns the intersection of domains of F1 and F2, this
% function would not correspond to any real function value.
intersection(F1, F2) when is_function(F1), is_function(F2) ->
case utils:gen_tag(F1) =:= utils:gen_tag(F2) of
true -> F1;
false -> none
end;
intersection({sum, D1}, {sum, D2}) ->
{sum, ordsets:from_list([Elem || Di <- D1,
Dj <- D2,
Elem <- [intersection(Di, Dj)],
not(Elem =:= none)])};
intersection({sum, D1}, D) ->
{sum, ordsets:from_list([Elem || Di <- D1,
Elem <- [intersection(D, Di)],
not(Elem =:= none)])};
intersection(D, {sum, D1}) -> intersection({sum, D1}, D);
% For two lists where one is a prefix of the other, the intersection is the
% shorter list. For example, the intersection of `[1, 2]` and `[1, 2, 3]` would
% be `[1, 2]`
intersection(L1, L2) when is_list(L1) andalso is_list(L2) ->
Length = min(length(L1), length(L2)),
LL1 = lists:sublist(L1, Length),
LL2 = lists:sublist(L2, Length),
propagate_none([intersection(E1, E2) || {E1, E2} <- lists:zip(LL1, LL2)]);
intersection({tagged, Tag, D1}, {tagged, Tag, D2}) ->
propagate_none({tagged, Tag, intersection(D1, D2)});
intersection(D1, D2) when is_map(D1), is_map(D2) -> propagate_none(intersect_map(D1, D2));
intersection(_, _) -> none.
propagate_none(Map) when is_map(Map) ->
case lists:member(none, maps:values(Map)) of
true -> none;
false -> Map
end;
propagate_none(List) when is_list(List) ->
case lists:member(none, List) of
true -> none;
false -> List
end;
propagate_none({tagged, _, D} = Term) ->
case propagate_none(D) of
none -> none;
_ -> Term
end;
propagate_none(D) -> D.
intersect_map(D1, D2) when is_map(D1), is_map(D2) ->
% When intersecting two maps we include all domains of the two maps. This
% is because a key is assumed to have domain `any` when it is not present
% in a map and any narrower definition would need to be captured in the
% intersection
F = fun(K, _) -> case {maps:is_key(K, D1), maps:is_key(K, D2)} of
{true, true} -> intersection(maps:get(K, D1), maps:get(K, D2));
{false, true} -> maps:get(K, D2);
{true, false} -> maps:get(K, D1)
end
end,
maps:map(F, maps:merge(D1, D2)). | src/domain/intersection.erl | 0.530966 | 0.524456 | intersection.erl | starcoder |
-module(mapz).
% API
-export([deep_find/2]).
-export([deep_get/2]).
-export([deep_get/3]).
-export([deep_put/3]).
-export([deep_remove/2]).
-export([deep_merge/1]).
-export([deep_merge/2]).
-export([deep_merge/3]).
-export([inverse/1]).
-type path() :: [term()].
% A list of keys that are used to iterate deeper into a map of maps.
%--- API ----------------------------------------------------------------------
% @doc Returns a tuple `{ok,Value}', where Value is the value associated with
% `Path', or `error' if no value is associated with `Path' in `Map'.
%
% The call fails with a `{badmap,Map}' exception if `Map' is not a map, or with
% a `{badpath,Path}' exception if `Path' is not a path.
-spec deep_find(path(), map()) -> {ok, term()} | error.
deep_find(Path, Map) when is_list(Path), is_map(Map) ->
search(Map, Path,
fun(Value) -> {ok, Value} end,
fun(_Key) -> error end
);
deep_find(Path, Map) when is_map(Map) ->
error({badpath, Path});
deep_find(Path, Map) when is_list(Path) ->
error({badmap, Map}).
% @doc Returns value `Value' associated with `Path' if `Map' contains `Path'.
%
% The call fails with a `{badmap,Map}' exception if `Map' is not a map, or with
% a `{badpath,Path}' exception if `Path' is not a path.
-spec deep_get(path(), map()) -> term().
deep_get(Path, Map) when is_list(Path), is_map(Map) ->
search(Map, Path,
fun(Value) -> Value end,
fun(Key) -> error({badkey, Key}) end
);
deep_get(Path, Map) when is_map(Map) ->
error({badpath, Path});
deep_get(Path, Map) when is_list(Path) ->
error({badmap, Map}).
% @doc Returns value `Value' associated with `Path' if `Map' contains `Path'. If
% no value is associated with `Path', `Default' is returned.
%
% The call fails with a `{badmap,Map}' exception if `Map' is not a map, or with
% a `{badpath,Path}' exception if `Path' is not a path.
-spec deep_get(path(), map(), term()) -> term().
deep_get(Path, Map, Default) when is_list(Path), is_map(Map) ->
search(Map, Path,
fun(Value) -> Value end,
fun(_Key) -> Default end
);
deep_get(Path, Map, _Default) when is_map(Map) ->
error({badpath, Path});
deep_get(Path, Map, _Default) when is_list(Path) ->
error({badmap, Map}).
% @doc Associates `Path' with value `Value' and inserts the association into map
% `Map2'. If path `Path' already exists in map `Map1', the old associated value
% is replaced by value `Value'. The function returns a new map `Map2' containing
% the new association and the old associations in `Map1'.
%
% The call fails with a `{badmap,Map}' exception if `Map' is not a map, or with
% a `{badpath,Path}' exception if `Path' is not a path.
-spec deep_put(path(), term(), map()) -> map().
deep_put(Path, Value, Map) when is_list(Path), is_map(Map) ->
update(Map, Path, {set, Value});
deep_put(Path, _Value, Map) when is_map(Map) ->
error({badpath, Path});
deep_put(Path, _Value, Map) when is_list(Path) ->
error({badmap, Map}).
% @doc Removes the `Path', if it exists, and its associated value from `Map1'
% and returns a new map `Map2' without path `Path'.
%
% The call fails with a `{badmap,Map}' exception if `Map' is not a map, or with
% a `{badpath,Path}' exception if `Path' is not a path.
-spec deep_remove(path(), map()) -> map().
deep_remove(Path, Map) when is_list(Path), is_map(Map) ->
update(Map, Path, delete);
deep_remove(Path, Map) when is_map(Map) ->
error({badpath, Path});
deep_remove(Path, Map) when is_list(Path) ->
error({badmap, Map}).
% @doc Merges a list of maps recursively into a single map. If a path exist in
% several maps, the value in the first nested map is superseded by the value in
% a following nested map.
%
% The call fails with a `{badmap,Map}' exception if `Map1' or `Map2' is not a
% map.
%
% @equiv deep_merge(fun (_, V) -> V end, #{}, Maps)
-spec deep_merge([map()]) -> map().
deep_merge([Map|Maps]) ->
deep_merge(fun (_, V) -> V end, Map, Maps).
% @equiv deep_merge([Map1, Map2])
-spec deep_merge(map(), map()) -> map().
deep_merge(Map1, Map2) ->
deep_merge([Map1, Map2]).
% @doc Merges a list of maps `Maps' recursively into a single map `Target'. If a
% path exist in several maps, the function `Fun' is called with the previous and
% the conflicting value to resolve the conflict. The return value from the
% function is put into the resulting map.
%
% The call fails with a `{badmap,Map}' exception if any of the maps is not a
% map.
-spec deep_merge(fun((Old::term(), New::term()) -> term()), map(), map() | [map()]) -> map().
deep_merge(_Fun, Target, []) when is_map(Target) ->
Target;
deep_merge(Fun, Target, [From|Maps]) ->
deep_merge(Fun, deep_merge(Fun, Target, From), Maps);
deep_merge(Fun, Target, Map) when is_map(Target), is_map(Map) ->
maps:fold(
fun(K, V, T) ->
case maps:find(K, T) of
{ok, Value} when is_map(Value), is_map(V) ->
maps:put(K, deep_merge(Fun, Value, [V]), T);
{ok, Value} ->
maps:put(K, Fun(Value, V), T);
error ->
maps:put(K, V, T)
end
end,
Target,
Map
);
deep_merge(_Fun, Target, Map) when is_map(Map) ->
error({badmap, Target});
deep_merge(_Fun, Target, Map) when is_map(Target) ->
error({badmap, Map}).
% @doc Inverts `Map' by inserting each value as the key with its corresponding
% key as the value. If two keys have the same value, one of the keys will be
% overwritten by the other in an undefined order.
%
% The call fails with a `{badmap,Map}' exception if `Map' is not a map.
-spec inverse(map()) -> map().
inverse(Map) ->
maps:fold(fun(K, V, Acc) -> maps:put(V, K, Acc) end, #{}, Map).
%--- Internal Functions -------------------------------------------------------
search(Element, [], Wrap, _Default) ->
Wrap(Element);
search(Map, [Key|Path], Wrap, Default) when is_map(Map) ->
case maps:find(Key, Map) of
{ok, Value} -> search(Value, Path, Wrap, Default);
error -> Default(Key)
end;
search(_Map, [Key|_Path], _Wrap, Default) ->
Default(Key).
update(Map, [Key], Act) when is_map(Map) ->
case {maps:is_key(Key, Map), Act} of
{true, delete} -> maps:remove(Key, Map);
{true, {set, Value}} -> maps:update(Key, Value, Map);
{false, delete} -> error({badkey, Key});
{false, {set, Value}} -> maps:put(Key, Value, Map)
end;
update(Map, [Key|Path], Act) when is_map(Map) ->
case {maps:find(Key, Map), Act} of
{{ok, Value}, _} when is_map(Value) ->
maps:update(Key, update(Value, Path, Act), Map);
{{ok, Value}, _} ->
error({badvalue, Value});
{error, delete} ->
error({badkey, Key});
{error, {set, _Value}} ->
maps:put(Key, update(#{}, Path, Act), Map)
end;
update(Map, [], {set, Value}) when is_map(Map) ->
Value. | src/mapz.erl | 0.724675 | 0.659898 | mapz.erl | starcoder |
%%%---------------------------------------------------------------------------
%%% @doc
%%% Handler for {@link error_logger} to write events to a text file.
%%%
%%% When added with {@link gen_event:add_handler/3} function, this module
%%% expects a one-element list containing output file name:
%```
%gen_event:add_handler(error_logger, indira_disk_h, [Filename]).
%'''
%%%
%%% This module can be used before Indira application starts.
%%% @end
%%%---------------------------------------------------------------------------
-module(indira_disk_h).
-behaviour(gen_event).
%% gen_event callbacks
-export([init/1, terminate/2]).
-export([handle_event/2, handle_call/2, handle_info/2]).
-export([code_change/3]).
-export([install/2, reopen/2, remove/1]).
-export([install/3, reopen/3, remove/2]).
-export([format_error/1]).
-export_type([event/0]).
-export_type([type_message/0, event_message/0]).
-export_type([type_report/0, event_report/0, report_type/0, report/0]).
%%%---------------------------------------------------------------------------
-define(MAX_LINE_LENGTH, 16#ffffffff). % 4GB should be enough for a log line
-record(state, {
filename :: file:filename(),
handle :: file:io_device()
}).
-type type_message() :: error | warning_msg | info_msg.
%% Marker of a formatted log message ({@type event_message()}).
-type type_report() :: error_report | warning_report | info_report.
%% Marker of a structured log message ({@type event_report()}).
-type report_type() :: std_error | std_warning | std_info | term().
%% Log level or custom category of a {@type report()}.
-type report() :: [{Tag :: term(), Data :: term()} | term()]
| string() | term().
%% Structured log payload.
-type event_message() :: {pid() | atom(), Format :: string(), Args :: [term()]}.
%% Formatted log event generated using {@link error_logger:error_msg/2},
%% {@link error_logger:warning_msg/2}, or {@link error_logger:info_msg/2}.
%%
%% `Format' and `Args' are arguments suitable for {@link io:format/2}.
-type event_report() :: {pid() | atom(), report_type(), report()}.
%% Structured log event generated using {@link error_logger:error_report/1},
%% {@link error_logger:warning_report/1}, {@link error_logger:info_report/1},
%% or their two-argument counterparts.
-type event() ::
{type_message(), GroupLeader :: pid(), event_message()}
| {type_report(), GroupLeader :: pid(), event_report()}.
%% {@link error_logger} event, either formatted or structured.
%%%---------------------------------------------------------------------------
%% @doc Install log handler to an event manager.
%%
%% The handler will be added with `indira_disk_h' name.
%%
%% `EventManager' will usually be `error_logger'.
-spec install(pid() | atom(), file:filename()) ->
ok | {error, file:posix() | badarg | system_limit}.
install(EventManager, File) ->
gen_event:add_handler(EventManager, ?MODULE, [File]).
%% @doc Install log handler to an event manager.
%%
%% The handler will be added with `{indira_disk_h, Id}' name. This is for
%% adding more than one log handler to a single `EventManager'.
%%
%% `EventManager' will usually be `error_logger'.
-spec install(pid() | atom(), term(), file:filename()) ->
ok | {error, file:posix() | badarg | system_limit}.
install(EventManager, Id, File) ->
gen_event:add_handler(EventManager, {?MODULE, Id}, [File]).
%% @doc Remove from an event manager log handler installed with
%% {@link install/2}.
%%
%% `EventManager' will usually be `error_logger'.
-spec remove(pid() | atom()) ->
ok | {error, module_not_found}.
remove(EventManager) ->
gen_event:delete_handler(EventManager, ?MODULE, []).
%% @doc Remove from an event manager log handler installed with
%% {@link install/3}.
%%
%% `EventManager' will usually be `error_logger'.
-spec remove(pid() | atom(), term()) ->
ok | {error, module_not_found}.
remove(EventManager, Id) ->
gen_event:delete_handler(EventManager, {?MODULE, Id}, []).
%% @doc Close old file and open new one in the log handler.
%% If the handler was not present, it will be added.
%%
%% `EventManager' will usually be `error_logger'.
%%
%% This function is intended for handlers added by {@link install/2} (or
%% with `indira_disk_h' as a handler name).
-spec reopen(pid() | atom(), file:filename()) ->
ok | {error, file:posix() | badarg | system_limit}.
reopen(EventManager, File) ->
case gen_event:call(EventManager, ?MODULE, {reopen, File}) of
ok ->
ok;
{error, bad_module} ->
gen_event:add_handler(EventManager, ?MODULE, [File]);
{error, Reason} ->
{error, Reason};
{'EXIT', Reason} ->
{error, {'EXIT', Reason}}
end.
%% @doc Close old file and open new one in the log handler.
%% If the handler was not present, it will be added.
%%
%% `EventManager' will usually be `error_logger'.
%%
%% This function is intended for handlers added by {@link install/3} (or
%% with `{indira_disk_h, Id}' as a handler name).
-spec reopen(pid() | atom(), term(), file:filename()) ->
ok | {error, file:posix() | badarg | system_limit}.
reopen(EventManager, Id, File) ->
case gen_event:call(EventManager, {?MODULE, Id}, {reopen, File}) of
ok ->
ok;
{error, bad_module} ->
gen_event:add_handler(EventManager, {?MODULE, Id}, [File]);
{error, Reason} ->
{error, Reason};
{'EXIT', Reason} ->
{error, {'EXIT', Reason}}
end.
%%%---------------------------------------------------------------------------
%%% gen_event callbacks
%%%---------------------------------------------------------------------------
%%----------------------------------------------------------
%% initialization/termination {{{
%% @private
%% @doc Initialize {@link gen_server} state.
init([File] = _Args) ->
case file:open(File, [append, raw, delayed_write]) of
{ok, Handle} ->
State = #state{
filename = File,
handle = Handle
},
{ok, State};
{error, Reason} ->
{error, Reason}
end.
%% @private
%% @doc Clean up {@link gen_server} state.
terminate(_Arg, _State = #state{handle = Handle}) ->
file:close(Handle),
ok.
%% }}}
%%----------------------------------------------------------
%% communication {{{
%% @private
%% @doc Handle {@link gen_event:notify/2}.
handle_event({LogType, _GroupLeader, LogData} = _Event,
State = #state{handle = Handle}) ->
case format_event(LogType, LogData) of
{ok, Line} ->
case file:write(Handle, [Line, $\n]) of
ok -> ok;
{error, edquot} -> ok;
{error, enospc} -> ok;
{error, enomem} -> ok;
{error, _Reason} -> remove_handler % TODO: log this?
end;
skip ->
skip
end,
{ok, State};
%% unknown events
handle_event(_Event, State) ->
{ok, State}.
%% @private
%% @doc Handle {@link gen_event:call/2}.
handle_call({reopen, NewFile} = _Request, State = #state{handle = Handle}) ->
file:close(Handle),
case file:open(NewFile, [append, raw, delayed_write]) of
{ok, NewHandle} ->
NewState = State#state{
filename = NewFile,
handle = NewHandle
},
{ok, ok, NewState};
{error, Reason} ->
% TODO: log this?
{remove_handler, {error, Reason}}
end;
%% unknown calls
handle_call(_Request, State) ->
{ok, {error, unknown_call}, State}.
%% @private
%% @doc Handle incoming messages.
%% unknown messages
handle_info(_Message, State) ->
{ok, State}.
%% }}}
%%----------------------------------------------------------
%% code change {{{
%% @private
%% @doc Handle code change.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%% }}}
%%----------------------------------------------------------
%%%---------------------------------------------------------------------------
%%% helpers
%%%---------------------------------------------------------------------------
%%----------------------------------------------------------
%% event formatting {{{
%% @doc Format {@link error_logger} event for writing in a log file.
-spec format_event(type_message() | type_report(),
event_message() | event_report()) ->
{ok, iolist()} | skip.
format_event(LogType, LogData) ->
Timestamp = timestamp(),
case level_type(LogType) of
{Level, format = _Type} ->
case format(LogData) of
{ok, Process, Line} ->
LogPrefix = log_prefix(Timestamp, Level, Process),
{ok, [LogPrefix, " ", Line]};
{error, _Reason} ->
skip
end;
{Level, report = _Type} ->
case report(LogData) of
{ok, Process, Line} ->
LogPrefix = log_prefix(Timestamp, Level, Process),
{ok, [LogPrefix, " ", Line]};
{error, _Reason} ->
skip
end;
{error, badarg} ->
skip
end.
%% @doc Build a prefix for a log line.
-spec log_prefix(integer(), error | warning | info, pid() | atom()) ->
iolist().
log_prefix(Time, Level, Process) when is_atom(Process) ->
[integer_to_list(Time), " ", atom_to_list(Level), " ",
"[", os:getpid(), "] ", atom_to_list(Process)];
log_prefix(Time, Level, Process) when is_pid(Process) ->
[integer_to_list(Time), " ", atom_to_list(Level), " ",
"[", os:getpid(), "] ", pid_to_list(Process)].
%% @doc Convert a tag to a log level and its type.
-spec level_type(type_message() | type_report()) ->
{Level, Type} | {error, badarg}
when Level :: error | warning | info,
Type :: format | report.
level_type(error) -> {error, format};
level_type(error_report) -> {error, report};
level_type(warning_msg) -> {warning, format};
level_type(warning_report) -> {warning, report};
level_type(info_msg) -> {info, format};
level_type(info_report) -> {info, report};
level_type(_) -> {error, badarg}.
%% @doc Fill a format string with data, making it a log line.
-spec format(event_message()) ->
{ok, pid() | atom(), iolist()} | {error, badarg | term()}.
format({Process, Format, Args} = _LogData) ->
try
Line = io_lib:format(Format, Args),
{ok, Process, Line}
catch
error:Reason ->
{error, Reason}
end;
format(_LogData) ->
{error, badarg}.
%% @doc Format a report, making it a log line.
-spec report(event_report()) ->
{ok, pid() | atom(), iolist()} | {error, badarg}.
report({Process, Type, Report} = _LogData) ->
Line = [
io_lib:print(Type, 1, ?MAX_LINE_LENGTH, -1),
" ",
io_lib:print(Report, 1, ?MAX_LINE_LENGTH, -1)
],
{ok, Process, Line};
report(_LogData) ->
{error, badarg}.
%% @doc Get a log timestamp.
-spec timestamp() ->
integer().
timestamp() ->
{MS, S, _US} = os:timestamp(), % good enough for logging
MS * 1000 * 1000 + S.
%% }}}
%%----------------------------------------------------------
%% format errors {{{
%% @doc Format an error reported by this module.
-spec format_error(term()) ->
string().
format_error(Reason) ->
file:format_error(Reason).
%% }}}
%%----------------------------------------------------------
%%%---------------------------------------------------------------------------
%%% vim:ft=erlang:foldmethod=marker | src/indira_disk_h.erl | 0.71602 | 0.483222 | indira_disk_h.erl | starcoder |
%% @author <NAME> <<EMAIL>>
%% @copyright 2013, <NAME>; 2018, elli-lib team
%%
%% @doc Binary String Helper Functions
%% @end
%%
%% Copyright 2013 <NAME>
%% Copyright 2018 elli-lib team
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(elli_bstr).
-export([
to_lower/1,
is_equal_ci/2,
lchr/1,
trim_left/1,
trim_right/1,
trim/1
]).
-define(IS_WS(C), (C =:= $\s orelse C=:=$\t orelse C=:= $\r orelse C =:= $\n)).
%%
%% Types
%%
-type ascii_char() :: 0..127.
%%
%% Functions
%%
%% @doc Convert ascii Bin to lowercase
-spec to_lower(Bin :: binary()) -> binary().
to_lower(Bin) ->
<< <<(lchr(C))>> || <<C>> <= Bin >>.
%% @doc Compare two binary values.
%% Return true iff they are equal by a caseless compare.
-spec is_equal_ci(binary(), binary()) -> boolean().
is_equal_ci(Bin, Bin) ->
%% Quick match with an Erlang pattern match
true;
is_equal_ci(Bin1, Bin2) when is_binary(Bin1) andalso is_binary(Bin2)
andalso size(Bin1) =:= size(Bin2) ->
%% Both binaries are the same length, do a good check
equal_ci(Bin1, Bin2);
is_equal_ci(_, _) ->
false.
%% @doc convert character to lowercase.
-spec lchr(ascii_char()) -> ascii_char().
lchr($A) -> $a;
lchr($B) -> $b;
lchr($C) -> $c;
lchr($D) -> $d;
lchr($E) -> $e;
lchr($F) -> $f;
lchr($G) -> $g;
lchr($H) -> $h;
lchr($I) -> $i;
lchr($J) -> $j;
lchr($K) -> $k;
lchr($L) -> $l;
lchr($M) -> $m;
lchr($N) -> $n;
lchr($O) -> $o;
lchr($P) -> $p;
lchr($Q) -> $q;
lchr($R) -> $r;
lchr($S) -> $s;
lchr($T) -> $t;
lchr($U) -> $u;
lchr($V) -> $v;
lchr($W) -> $w;
lchr($X) -> $x;
lchr($Y) -> $y;
lchr($Z) -> $z;
lchr(Chr) -> Chr.
%% @doc Remove leading whitespace from Bin
-spec trim_left(binary()) -> binary().
trim_left(<<C, Rest/binary>>) when ?IS_WS(C) ->
trim_left(Rest);
trim_left(Bin) ->
Bin.
%% @doc Remove trailing whitespace from Bin
-spec trim_right(binary()) -> binary().
trim_right(<<>>) -> <<>>;
trim_right(Bin) ->
case binary:last(Bin) of
C when ?IS_WS(C) ->
trim_right(binary:part(Bin, {0, size(Bin)-1}));
_ ->
Bin
end.
%% @doc Remove leading and trailing whitespace.
-spec trim(binary()) -> binary().
trim(Bin) ->
trim_left(trim_right(Bin)).
%%
%% Helpers
%%
-spec equal_ci(binary(), binary()) -> boolean().
equal_ci(<<>>, <<>>) ->
true;
equal_ci(<<C, Rest1/binary>>, <<C, Rest2/binary>>) ->
equal_ci(Rest1, Rest2);
equal_ci(<<C1, Rest1/binary>>, <<C2, Rest2/binary>>) ->
case lchr(C1) =:= lchr(C2) of
true ->
equal_ci(Rest1, Rest2);
false ->
false
end. | src/elli_bstr.erl | 0.663996 | 0.442335 | elli_bstr.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% cuttlefish_bytesize: complexity for parsing bytesizes
%%
%% Copyright (c) 2013 Basho Technologies, Inc. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
-module(cuttlefish_bytesize).
-define(KILOBYTE, 1024).
-define(MEGABYTE, 1048576).
-define(GIGABYTE, 1073741824).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-export([parse/1, to_string/1]).
%% @doc turns an integer of bytes into a string.
%% Will use the smallest unit to not lose precision.
%% e.g. 1024 -> 1kb, but 1025 = 1025.
-spec to_string(integer()) -> string().
to_string(Bytez) ->
case { Bytez rem ?GIGABYTE, Bytez rem ?MEGABYTE, Bytez rem ?KILOBYTE} of
{0, _, _} ->
integer_to_list(Bytez div ?GIGABYTE) ++ "GB";
{_, 0, _} ->
integer_to_list(Bytez div ?MEGABYTE) ++ "MB";
{_, _, 0} ->
integer_to_list(Bytez div ?KILOBYTE) ++ "KB";
_ ->
integer_to_list(Bytez)
end.
%% @doc the reverse of to_string/1. turns "1kb" or "1KB" into 1024.
-spec parse(string()) -> integer()|cuttlefish_error:error().
parse(String) ->
case lists:reverse(String) of
[$B,$K|BSize] -> bmult(cuttlefish_util:numerify(lists:reverse(BSize)), ?KILOBYTE);
[$b,$k|BSize] -> bmult(cuttlefish_util:numerify(lists:reverse(BSize)), ?KILOBYTE);
[$B,$M|BSize] -> bmult(cuttlefish_util:numerify(lists:reverse(BSize)), ?MEGABYTE);
[$b,$m|BSize] -> bmult(cuttlefish_util:numerify(lists:reverse(BSize)), ?MEGABYTE);
[$B,$G|BSize] -> bmult(cuttlefish_util:numerify(lists:reverse(BSize)), ?GIGABYTE);
[$b,$g|BSize] -> bmult(cuttlefish_util:numerify(lists:reverse(BSize)),?GIGABYTE);
BSize -> cuttlefish_util:numerify(lists:reverse(BSize))
end.
-spec bmult(number()|cuttlefish_error:error(), integer()) ->
number()|cuttlefish_error:error().
bmult({error, _ErrorTerm}=Error, _Mult) ->
Error;
bmult(Quantity, Multiplier) ->
Quantity * Multiplier.
-ifdef(TEST).
to_string_test() ->
?assertEqual("1KB", to_string(1024)),
?assertEqual("2KB", to_string(2048)),
?assertEqual("10MB", to_string(10485760)),
?assertEqual("1GB", to_string(1073741824)),
?assertEqual("20", to_string(20)),
ok.
parse_test() ->
?assertEqual(1024, parse("1kb")),
?assertEqual(2048, parse("2KB")),
?assertEqual(10485760, parse("10mb")),
?assertEqual(10485760, parse("10MB")),
?assertEqual(1073741824, parse("1GB")),
?assertEqual(1073741824, parse("1gb")),
?assertEqual(20, parse("20")),
?assertEqual({error, {number_parse, "10MB10"}}, parse("10MB10kb")),
ok.
-endif. | src/cuttlefish_bytesize.erl | 0.663342 | 0.428353 | cuttlefish_bytesize.erl | starcoder |
% Copyright 2018 <NAME>
%
% Licensed under the Apache License, Version 2.0 (the "License");
% you may not use this file except in compliance with the License.
% You may obtain a copy of the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS,
% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
% See the License for the specific language governing permissions and
% limitations under the License.
-module(ogonek_buildings).
-include("include/ogonek.hrl").
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-export([definitions/0,
definitions_map/0,
get_definition/1,
get_building/2,
get_building_level/2,
get_building_max_level/2,
to_building_type/1,
try_building_type/1,
unlocked_buildings/2,
finish/3,
calculate_power/1,
calculate_workers/1,
calculate_power_workers/2,
apply_building_consumption/3,
has_requirements/2,
calculate_building_consumption/4,
calculate_building_production/1,
calculate_construction_duration/1,
calculate_construction_duration/2,
calculate_building_costs/1,
calculate_building_costs/2]).
-spec definitions() -> [bdef()].
definitions() ->
case application:get_env(buildings) of
undefined -> [];
{ok, Buildings} -> Buildings
end.
-spec definitions_map() -> #{atom() => bdef()}.
definitions_map() ->
lists:foldl(fun(#bdef{name=Name}=BDef, Bs) ->
maps:put(Name, BDef, Bs)
end, maps:new(), definitions()).
-spec get_definition(atom()) -> bdef() | error.
get_definition(Name) ->
get_definition(Name, definitions()).
-spec get_definition(atom(), [bdef()]) -> bdef() | error.
get_definition(_Name, []) -> error;
get_definition(Name, [#bdef{name=Name}=Def | _Ds]) -> Def;
get_definition(Name, [_ | Ds]) ->
get_definition(Name, Ds).
-spec get_building([building()], Type :: atom()) -> {ok, building()} | undefined.
get_building(Buildings, Type) ->
case lists:keyfind(Type, 4, Buildings) of
false -> undefined;
Building -> {ok, Building}
end.
-spec get_buildings_of_type([building()], Type :: atom()) -> [building()].
get_buildings_of_type(Buildings, Type) ->
lists:filter(fun(#building{type=T}) when T == Type -> true;
(_Otherwise) -> false
end, Buildings).
-spec get_building_level([building()], Type :: atom()) -> integer().
get_building_level(Buildings, Type) ->
case get_building(Buildings, Type) of
{ok, #building{level=Level}} -> Level;
_Otherwise -> 0
end.
-spec get_building_max_level([building()], Type :: atom()) -> integer().
get_building_max_level(Buildings, Type) ->
case get_buildings_of_type(Buildings, Type) of
[] -> 0;
Bs ->
Levels = lists:map(fun(#building{level=Lvl}) -> Lvl end, Bs),
lists:max(Levels)
end.
-spec has_requirement([building()], requirement()) -> boolean().
has_requirement(_Buildings, {research, _, _}) -> true;
has_requirement([], _Requirement) -> false;
has_requirement([#building{type=Type, level=Lvl} | Bs], {building, Name, MinLevel}=Req) ->
if Type == Name andalso Lvl >= MinLevel ->
true;
true ->
has_requirement(Bs, Req)
end.
-spec has_requirements([building()], [requirement()]) -> boolean().
has_requirements(Buildings, Requirements) ->
lists:all(fun(Req) -> has_requirement(Buildings, Req) end, Requirements).
-spec finish(bdef(), PlanetId :: binary(), Level :: integer()) -> ok.
finish(#bdef{name=Def}, PlanetId, Level) ->
Now = erlang:timestamp(),
Building = #building{planet=PlanetId,
type=Def,
level=Level,
created=Now},
% maybe this one should be managed via the planet manager
ogonek_mongo:building_finish(Building).
-spec unlocked_buildings([building()], [research()]) -> [bdef()].
unlocked_buildings(Buildings, Research) ->
StillLocked = lists:filter(fun(#bdef{name=Name}) ->
not(lists:keymember(Name, 4, Buildings))
end, definitions()),
lists:filter(fun(#bdef{requirements=Rs}) ->
% meet research requirements
ogonek_research:has_requirements(Research, Rs) andalso
% and meet building requirements as well
has_requirements(Buildings, Rs)
end, StillLocked).
-spec to_building_type(binary()) -> atom().
to_building_type(TypeName) when is_binary(TypeName) ->
% this looks scary but the valid list of building types
% should be already existing via configuration initialization
erlang:binary_to_existing_atom(TypeName, utf8).
-spec try_building_type(binary()) -> atom() | {error, invalid}.
try_building_type(Type) when is_binary(Type) ->
try
to_building_type(Type)
catch
_Error -> {error, invalid}
end;
try_building_type(_Type) ->
{error, invalid}.
-spec calculate_power([building()]) -> integer().
calculate_power(Buildings) ->
Defs = definitions_map(),
lists:foldl(fun(#building{type=T, level=Lvl}, Power) ->
Def = maps:get(T, Defs),
Power - Def#bdef.power * Lvl
end, 0, Buildings).
-spec calculate_workers([building()]) -> integer().
calculate_workers(Buildings) ->
Defs = definitions_map(),
lists:foldl(fun(#building{type=T, level=Lvl}, Workers) ->
Def = maps:get(T, Defs),
Workers - Def#bdef.workers * Lvl
end, 0, Buildings).
-spec calculate_power_workers([building()], [construction()]) -> {integer(), integer()}.
calculate_power_workers(Buildings, Constructions) ->
Defs = definitions_map(),
% at first we are going to calculate all power/worker consumption
% based on finished buildings
FromBuildings = lists:foldl(fun(#building{type=T, level=Lvl}, {Power, Workers}) ->
Def = maps:get(T, Defs),
Power0 = Power - Def#bdef.power * Lvl,
Workers0 = Workers - Def#bdef.workers * Lvl,
{Power0, Workers0}
end, {0, 0}, Buildings),
% after that we will put those of ongoing constructions on top as well
lists:foldl(fun(#construction{building=B}, {Power, Workers}) ->
Def = maps:get(B, Defs),
% we will take 'positive' costs into account only
Power0 = Power - max(Def#bdef.power, 0),
Workers0 = Workers - max(Def#bdef.workers, 0),
{Power0, Workers0}
end, FromBuildings, Constructions).
-define(OGONEK_CHEMICAL_FACTORY_PROD, 40).
-define(OGONEK_SMELTING_PLANT_PROD, 52).
-define(OGONEK_PLASTIC_FACTORY_PROD, 60).
-define(OGONEK_CONSUMPTION_FACTOR, 2).
-spec apply_building_consumption(Resources :: resources(), Utilization :: resources(), [building()]) -> resources().
apply_building_consumption(Resources, Utilization, Buildings) ->
lists:foldl(
fun(#building{type=chemical_factory, level=L}, R) ->
Prod = L * ?OGONEK_CHEMICAL_FACTORY_PROD * Utilization#resources.h2 div 100,
R#resources{h2=R#resources.h2 + Prod,
h2o=R#resources.h2o - Prod * ?OGONEK_CONSUMPTION_FACTOR};
(#building{type=plastic_factory, level=L}, R) ->
Prod = L * ?OGONEK_PLASTIC_FACTORY_PROD * Utilization#resources.pvc div 100,
R#resources{pvc=R#resources.pvc + Prod,
oil=R#resources.oil - Prod * ?OGONEK_CONSUMPTION_FACTOR};
(#building{type=smelting_plant, level=L}, R) ->
Prod = L * ?OGONEK_SMELTING_PLANT_PROD * Utilization#resources.titan div 100,
R#resources{titan=R#resources.titan + Prod,
iron_ore=R#resources.iron_ore - Prod * ?OGONEK_CONSUMPTION_FACTOR};
(_OtherBuilding, R) -> R
end, Resources, Buildings).
-spec calculate_building_consumption(Resources :: resources(), Utilization :: resources(), [building()], TimeFactor :: float()) -> resources().
calculate_building_consumption(Resources, Utilization, Buildings, TimeFactor) ->
% TODO: we need a proper distribution from level to production
lists:foldl(
% hydrogen
fun(#building{type=chemical_factory, level=L}, R) ->
Util = Utilization#resources.h2 / 100,
Prod = round(L * ?OGONEK_CHEMICAL_FACTORY_PROD * TimeFactor * Util),
ToConsume = Prod * ?OGONEK_CONSUMPTION_FACTOR,
Available = R#resources.h2o,
if ToConsume > Available ->
ToProd = Available div ?OGONEK_CONSUMPTION_FACTOR,
R#resources{h2=R#resources.h2 + ToProd,
h2o=0};
true ->
R#resources{h2=R#resources.h2 + Prod,
h2o=R#resources.h2o - ToConsume}
end;
% pvc
(#building{type=plastic_factory, level=L}, R) ->
Util = Utilization#resources.pvc / 100,
Prod = round(L * ?OGONEK_PLASTIC_FACTORY_PROD * TimeFactor * Util),
ToConsume = Prod * ?OGONEK_CONSUMPTION_FACTOR,
Available = R#resources.oil,
if ToConsume > Available ->
ToProd = Available div ?OGONEK_CONSUMPTION_FACTOR,
R#resources{pvc=R#resources.pvc + ToProd,
oil=0};
true ->
R#resources{pvc=R#resources.pvc + Prod,
oil=R#resources.oil - ToConsume}
end;
% titan
(#building{type=smelting_plant, level=L}, R) ->
Util = Utilization#resources.titan / 100,
Prod = round(L * ?OGONEK_SMELTING_PLANT_PROD * TimeFactor * Util),
ToConsume = Prod * ?OGONEK_CONSUMPTION_FACTOR,
Available = R#resources.iron_ore,
if ToConsume > Available ->
ToProd = Available div ?OGONEK_CONSUMPTION_FACTOR,
R#resources{titan=R#resources.titan + ToProd,
iron_ore=0};
true ->
R#resources{titan=R#resources.titan + Prod,
iron_ore=R#resources.iron_ore - ToConsume}
end;
(_OtherBuilding, R) -> R
end, Resources, Buildings).
-spec calculate_building_production([building()]) -> resources().
calculate_building_production(Buildings) ->
% TODO: we need a proper distribution from level to production
lists:foldl(
% iron ore
fun(#building{type=ore_mine, level=L}, R) ->
R#resources{iron_ore=R#resources.iron_ore + L};
(#building{type=ext_ore_mine, level=L}, R) ->
R#resources{iron_ore=R#resources.iron_ore + L * 3};
% gold
(#building{type=gold_mine, level=L}, R) ->
R#resources{gold=R#resources.gold + L};
(#building{type=ext_gold_mine, level=L}, R) ->
R#resources{gold=R#resources.gold + L * 3};
% h2o
(#building{type=water_rig, level=L}, R) ->
R#resources{h2o=R#resources.h2o + L};
(#building{type=ext_water_rig, level=L}, R) ->
R#resources{h2o=R#resources.h2o + L * 3};
% oil
(#building{type=oil_rig, level=L}, R) ->
R#resources{oil=R#resources.oil + L};
(#building{type=ext_oil_rig, level=L}, R) ->
R#resources{oil=R#resources.oil + L * 3};
% uranium
(#building{type=uranium_mine, level=L}, R) ->
R#resources{uranium=R#resources.uranium + L};
% kyanite
(#building{type=kyanite_mine, level=L}, R) ->
R#resources{kyanite=R#resources.kyanite + L};
(_OtherBuilding, R) -> R
end, ogonek_resources:empty(), Buildings).
-spec calculate_construction_duration(building()) -> integer().
calculate_construction_duration(#building{type=Type, level=Level}) ->
calculate_construction_duration(Type, Level).
-spec calculate_construction_duration(atom(), integer()) -> integer().
calculate_construction_duration(Type, Level) ->
% TODO: we need a proper distribution from level to duration
BaseDuration = base_construction_duration(Type),
LevelDuration = 50 * math:pow(Level, 1.5),
round((BaseDuration + LevelDuration) / ?OGONEK_DEFAULT_ACCELERATION).
% TODO: rather move into buildings configuration
% so we can't forget this for some building
-spec base_construction_duration(atom()) -> integer().
base_construction_duration(construction_center) -> 15000;
base_construction_duration(research_lab) -> 8000;
base_construction_duration(oil_rig) -> 1000;
base_construction_duration(oil_tank) -> 800;
base_construction_duration(water_rig) -> 1000;
base_construction_duration(water_tank) -> 800;
base_construction_duration(ore_mine) -> 1000;
base_construction_duration(ore_depot) -> 800;
base_construction_duration(gold_mine) -> 1000;
base_construction_duration(gold_depot) -> 800;
base_construction_duration(uranium_mine) -> 1500;
base_construction_duration(uranium_depot) -> 1000;
base_construction_duration(kyanite_mine) -> 1750;
base_construction_duration(kyanite_depot) -> 1100;
base_construction_duration(plastic_factory) -> 1450;
base_construction_duration(pvc_depot) -> 1000;
base_construction_duration(smelting_plant) -> 1450;
base_construction_duration(titan_depot) -> 1000;
base_construction_duration(chemical_factory) -> 1450;
base_construction_duration(h2_depot) -> 1000;
base_construction_duration(power_plant) -> 1000;
base_construction_duration(wind_turbine) -> 1200;
base_construction_duration(hydro_plant) -> 3500;
base_construction_duration(apartment) -> 600;
base_construction_duration(apartment_block) -> 1100;
base_construction_duration(apartment_complex) -> 4000;
base_construction_duration(ext_oil_rig) -> 3000;
base_construction_duration(ext_water_rig) -> 3000;
base_construction_duration(ext_ore_mine) -> 3000;
base_construction_duration(ext_gold_mine) -> 3000;
base_construction_duration(space_shipyard) -> 9000;
base_construction_duration(weapon_manufacture) -> 8000.
-spec calculate_building_costs(Building :: building()) -> bdef() | error.
calculate_building_costs(#building{type=Type, level=Level}) ->
case ogonek_buildings:get_definition(Type) of
error -> error;
Definition -> calculate_building_costs(Definition, Level)
end.
-spec calculate_building_costs(Definition :: bdef(), Level :: integer()) -> bdef().
calculate_building_costs(Definition, Level) ->
% TODO: we need a proper distribution from level to costs
Factor = max(math:pow(Level, 1.2) * 0.5, 1.0),
Definition#bdef{
iron_ore=round(Definition#bdef.iron_ore * Factor),
gold=round(Definition#bdef.gold * Factor),
h2o=round(Definition#bdef.h2o * Factor),
oil=round(Definition#bdef.oil * Factor),
h2=round(Definition#bdef.h2 * Factor),
uranium=round(Definition#bdef.uranium * Factor),
pvc=round(Definition#bdef.pvc * Factor),
titan=round(Definition#bdef.titan * Factor),
kyanite=round(Definition#bdef.kyanite * Factor)
}.
%%
%% TESTS
%%
-ifdef(TEST).
calculate_building_consumption_test_() ->
PId = <<"planet">>,
Now = erlang:timestamp(),
Empty = ogonek_resources:empty(),
Hour = 1.0,
ThreeHours = 3.0,
Utilization = Empty#resources{titan=100, pvc=100, h2=100},
HalfUtil = Empty#resources{titan=50, pvc=50, h2=50},
Smelting1 = #building{planet=PId, type=smelting_plant, level=1, created=Now},
Plastic1 = #building{planet=PId, type=plastic_factory, level=1, created=Now},
Chemic1 = #building{planet=PId, type=chemical_factory, level=1, created=Now},
[% no consumption/production whatsoever
?_assertEqual(Empty, calculate_building_consumption(Empty, Utilization, [Smelting1], Hour)),
?_assertEqual(Empty, calculate_building_consumption(Empty, Utilization, [Plastic1], Hour)),
?_assertEqual(Empty, calculate_building_consumption(Empty, Utilization, [Chemic1], Hour)),
% enough base resources
?_assertEqual(Empty#resources{iron_ore=1000 - 2 * ?OGONEK_SMELTING_PLANT_PROD, titan=?OGONEK_SMELTING_PLANT_PROD},
calculate_building_consumption(Empty#resources{iron_ore=1000}, Utilization, [Smelting1], Hour)),
?_assertEqual(Empty#resources{oil=1000 - 2 * ?OGONEK_PLASTIC_FACTORY_PROD, pvc=?OGONEK_PLASTIC_FACTORY_PROD},
calculate_building_consumption(Empty#resources{oil=1000}, Utilization, [Plastic1], Hour)),
?_assertEqual(Empty#resources{h2o=1000 - 2 * ?OGONEK_CHEMICAL_FACTORY_PROD, h2=?OGONEK_CHEMICAL_FACTORY_PROD},
calculate_building_consumption(Empty#resources{h2o=1000}, Utilization, [Chemic1], Hour)),
% *not* enough base resources
?_assertEqual(Empty#resources{iron_ore=0, titan=?OGONEK_SMELTING_PLANT_PROD},
calculate_building_consumption(Empty#resources{iron_ore=2 * ?OGONEK_SMELTING_PLANT_PROD}, Utilization, [Smelting1], ThreeHours)),
?_assertEqual(Empty#resources{oil=0, pvc=?OGONEK_PLASTIC_FACTORY_PROD},
calculate_building_consumption(Empty#resources{oil=2 * ?OGONEK_PLASTIC_FACTORY_PROD}, Utilization, [Plastic1], ThreeHours)),
?_assertEqual(Empty#resources{h2o=0, h2=?OGONEK_CHEMICAL_FACTORY_PROD},
calculate_building_consumption(Empty#resources{h2o=2 * ?OGONEK_CHEMICAL_FACTORY_PROD}, Utilization, [Chemic1], ThreeHours)),
% no consumption at all
?_assertEqual(Empty#resources{h2o=1000},
calculate_building_consumption(Empty#resources{h2o=1000}, Utilization, [], Hour)),
% half utilization
?_assertEqual(Empty#resources{h2o=1000 - ?OGONEK_CHEMICAL_FACTORY_PROD, h2=?OGONEK_CHEMICAL_FACTORY_PROD div 2},
calculate_building_consumption(Empty#resources{h2o=1000}, HalfUtil, [Chemic1], Hour)),
?_assertEqual(Empty#resources{iron_ore=1000 - ?OGONEK_SMELTING_PLANT_PROD, titan=?OGONEK_SMELTING_PLANT_PROD div 2},
calculate_building_consumption(Empty#resources{iron_ore=1000}, HalfUtil, [Smelting1], Hour)),
?_assertEqual(Empty#resources{oil=1000 - ?OGONEK_PLASTIC_FACTORY_PROD, pvc=?OGONEK_PLASTIC_FACTORY_PROD div 2},
calculate_building_consumption(Empty#resources{oil=1000}, HalfUtil, [Plastic1], Hour))
].
has_requirements_test_() ->
PId = <<"planet">>,
Now = erlang:timestamp(),
Smelting1 = #building{planet=PId, type=smelting_plant, level=1, created=Now},
Plastic1 = #building{planet=PId, type=plastic_factory, level=1, created=Now},
Chemic1 = #building{planet=PId, type=chemical_factory, level=1, created=Now},
[?_assertEqual(true, has_requirements([Smelting1], [])),
?_assertEqual(false, has_requirements([Smelting1, Plastic1, Chemic1], [{building, oil_rig, 1}])),
?_assertEqual(true, has_requirements([Smelting1, Plastic1, Chemic1], [{building, smelting_plant, 1}]))
].
-endif. | server/src/ogonek_buildings.erl | 0.537527 | 0.478224 | ogonek_buildings.erl | starcoder |
-module(tc_bicommitment).
-export([
%% bivariate commitment API
degree/1,
eval/3,
row/2,
cmp/2,
reveal/1,
verify_poly/3,
verify_point/4,
validate_point/4,
serialize/1,
deserialize/1
]).
-type bicommitment() :: reference().
-export_type([bicommitment/0]).
-spec degree(C :: bicommitment()) -> non_neg_integer().
degree(C) ->
erlang_tc:degree_bivar_commitment(C).
-spec eval(C :: bicommitment(), X :: integer(), Y :: integer()) -> tc_g1:g1().
eval(C, X, Y) ->
erlang_tc:eval_bivar_commitment(C, X, Y).
-spec row(C :: bicommitment(), X :: integer()) -> tc_commitment:commitment().
row(C, X) ->
erlang_tc:row_bivar_commitment(C, X).
-spec cmp(C1 :: bicommitment(), C2 :: bicommitment()) -> boolean().
cmp(C1, C2) ->
erlang_tc:cmp_bivar_commitment(C1, C2).
-spec reveal(C :: bicommitment()) -> string().
reveal(C) ->
erlang_tc:reveal_bivar_commitment(C).
-spec verify_poly(
BiCommitment :: bicommitment(),
RowPoly :: tc_poly:poly(),
VerifierID :: non_neg_integer()
) -> boolean().
verify_poly(BiCommitment, RowPoly, VerifierID) ->
RowCommit = erlang_tc:row_bivar_commitment(BiCommitment, VerifierID),
erlang_tc:cmp_commitment(erlang_tc:commitment_poly(RowPoly), RowCommit).
-spec verify_point(
BiCommitment :: bicommitment(),
RowPoly :: tc_poly:poly(),
SenderID :: non_neg_integer(),
VerifierID :: non_neg_integer()
) -> boolean().
verify_point(BiCommitment, RowPoly, SenderID, VerifierID) ->
Val = tc_poly:eval(RowPoly, SenderID),
G1AffineOne = tc_g1_affine:one(),
ValG1 = tc_g1_affine:mul(G1AffineOne, Val),
erlang_tc:cmp_g1(erlang_tc:eval_bivar_commitment(BiCommitment, VerifierID, SenderID), ValG1).
-spec validate_point(BiCommitment :: bicommitment(),
SenderID :: non_neg_integer(),
VerifierID :: non_neg_integer(),
Point :: non_neg_integer() | tc_fr:fr()) -> boolean().
validate_point(BiCommitment, SenderID, VerifierID, Point) when is_reference(Point) ->
G1AffineOne = tc_g1_affine:one(),
ValG1 = tc_g1_affine:mul(G1AffineOne, Point),
erlang_tc:cmp_g1(erlang_tc:eval_bivar_commitment(BiCommitment, VerifierID, SenderID), ValG1);
validate_point(BiCommitment, SenderID, VerifierID, Point) when is_integer(Point) ->
G1AffineOne = tc_g1_affine:one(),
ValG1 = tc_g1_affine:mul(G1AffineOne, tc_fr:into(Point)),
erlang_tc:cmp_g1(erlang_tc:eval_bivar_commitment(BiCommitment, VerifierID, SenderID), ValG1).
-spec serialize(C :: bicommitment()) -> binary().
serialize(C) ->
erlang_tc:serialize_bivar_commitment(C).
-spec deserialize(B :: binary()) -> bicommitment().
deserialize(B) ->
erlang_tc:deserialize_bivar_commitment(B). | src/tc_bicommitment.erl | 0.581184 | 0.427277 | tc_bicommitment.erl | starcoder |
%%%-------------------------------------------------------------------
%%% Copyright (c) 2007-2011 Gemini Mobile Technologies, Inc. All rights reserved.
%%% Copyright (c) 2013-2015 Basho Technologies, Inc. All rights reserved.
%%%
%%% Licensed under the Apache License, Version 2.0 (the "License");
%%% you may not use this file except in compliance with the License.
%%% You may obtain a copy of the License at
%%%
%%% http://www.apache.org/licenses/LICENSE-2.0
%%%
%%% Unless required by applicable law or agreed to in writing, software
%%% distributed under the License is distributed on an "AS IS" BASIS,
%%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%%% See the License for the specific language governing permissions and
%%% limitations under the License.
%%%
%%%-------------------------------------------------------------------
%% @doc Consistent hashing library. Also known as "random slicing".
%%
%% This code was originally from the Hibari DB source code at
%% [https://github.com/hibari]
-module(hums_chash).
%% TODO items:
%%
%% 1. Refactor to use bigints instead of floating point numbers. The
%% ?SMALLEST_SIGNIFICANT_FLOAT_SIZE macro below doesn't allow as
%% much wiggle-room for making really small hashing range
%% definitions.
-define(SMALLEST_SIGNIFICANT_FLOAT_SIZE, 0.1e-12).
-define(SHA_MAX, (1 bsl (20*8))).
%% -compile(export_all).
-export([make_float_map/1, make_float_map/2,
sum_map_weights/1,
make_tree/1,
query_tree/2,
hash_binary_via_float_map/2,
hash_binary_via_float_tree/2,
pretty_with_integers/2,
pretty_with_integers/3]).
-export([make_demo_map1/0, make_demo_map2/0]).
-export([zzz_usage_details/0]). % merely to give EDoc a hint of our intent
-type owner_name() :: term().
%% Owner for a range on the unit interval. We are agnostic about its
%% type.
-type weight() :: non_neg_integer().
%% For this library, a weight is an integer which specifies the
%% capacity of a "owner" relative to other owners. For example, if
%% owner A with a weight of 10, and if owner B has a weight of 20,
%% then B will be assigned twice as much of the unit interval as A.
-type float_map() :: [{owner_name(), float()}].
%% A float map subdivides the unit interval, starting at 0.0, to
%% partitions that are assigned to various owners. The sum of all
%% floats must be exactly 1.0 (or close enough for floating point
%% purposes).
-opaque float_tree() :: gb_trees:tree(float(), owner_name()).
%% We can't use gb_trees:tree() because 'nil' (the empty tree) is
%% never valid in our case. But teaching Dialyzer that is difficult.
-type owner_int_range() :: {owner_name(), non_neg_integer(), non_neg_integer()}.
%% Used when "prettying" a float map.
-type owner_weight() :: {owner_name(), weight()}.
-type owner_weight_list() :: [owner_weight()].
%% A owner_weight_list is a definition of brick assignments over the
%% unit interval [0.0, 1.0]. The sum of all floats must be 1.0. For
%% example, [{{br1,nd1}, 0.25}, {{br2,nd1}, 0.5}, {{br3,nd1}, 0.25}].
-export_type([float_map/0, float_tree/0]).
%% @doc Create a float map, based on a basic owner weight list.
-spec make_float_map(owner_weight_list()) -> float_map().
make_float_map(NewOwnerWeights) ->
make_float_map([], NewOwnerWeights).
%% @doc Create a float map, based on an older float map and a new weight
%% list.
%%
%% The weights in the new weight list may be different than (or the
%% same as) whatever weights were used to make the older float map.
-spec make_float_map(float_map(), owner_weight_list()) -> float_map().
make_float_map([], NewOwnerWeights) ->
Sum = add_all_weights(NewOwnerWeights),
DiffMap = [{Ch, Wt/Sum} || {Ch, Wt} <- NewOwnerWeights],
make_float_map2([{unused, 1.0}], DiffMap, NewOwnerWeights);
make_float_map(OldFloatMap, NewOwnerWeights) ->
NewSum = add_all_weights(NewOwnerWeights),
%% Normalize to unit interval
%% NewOwnerWeights2 = [{Ch, Wt / NewSum} || {Ch, Wt} <- NewOwnerWeights],
%% Reconstruct old owner weights (will be normalized to unit interval)
SumOldFloatsDict =
lists:foldl(fun({Ch, Wt}, OrdDict) ->
orddict:update_counter(Ch, Wt, OrdDict)
end, orddict:new(), OldFloatMap),
OldOwnerWeights = orddict:to_list(SumOldFloatsDict),
OldSum = add_all_weights(OldOwnerWeights),
OldChs = [Ch || {Ch, _} <- OldOwnerWeights],
NewChs = [Ch || {Ch, _} <- NewOwnerWeights],
OldChsOnly = OldChs -- NewChs,
%% Mark any space in by a deleted owner as unused.
OldFloatMap2 = lists:map(
fun({Ch, Wt} = ChWt) ->
case lists:member(Ch, OldChsOnly) of
true ->
{unused, Wt};
false ->
ChWt
end
end, OldFloatMap),
%% Create a diff map of changing owners and added owners
DiffMap = lists:map(fun({Ch, NewWt}) ->
case orddict:find(Ch, SumOldFloatsDict) of
{ok, OldWt} ->
{Ch, (NewWt / NewSum) -
(OldWt / OldSum)};
error ->
{Ch, NewWt / NewSum}
end
end, NewOwnerWeights),
make_float_map2(OldFloatMap2, DiffMap, NewOwnerWeights).
make_float_map2(OldFloatMap, DiffMap, _NewOwnerWeights) ->
FloatMap = apply_diffmap(DiffMap, OldFloatMap),
XX = combine_neighbors(collapse_unused_in_float_map(FloatMap)),
XX.
apply_diffmap(DiffMap, FloatMap) ->
SubtractDiff = [{Ch, abs(Diff)} || {Ch, Diff} <- DiffMap, Diff < 0],
AddDiff = [D || {_Ch, Diff} = D <- DiffMap, Diff > 0],
TmpFloatMap = iter_diffmap_subtract(SubtractDiff, FloatMap),
iter_diffmap_add(AddDiff, TmpFloatMap).
add_all_weights(OwnerWeights) ->
lists:foldl(fun({_Ch, Weight}, Sum) -> Sum + Weight end, 0.0, OwnerWeights).
iter_diffmap_subtract([{Ch, Diff}|T], FloatMap) ->
iter_diffmap_subtract(T, apply_diffmap_subtract(Ch, Diff, FloatMap));
iter_diffmap_subtract([], FloatMap) ->
FloatMap.
iter_diffmap_add([{Ch, Diff}|T], FloatMap) ->
iter_diffmap_add(T, apply_diffmap_add(Ch, Diff, FloatMap));
iter_diffmap_add([], FloatMap) ->
FloatMap.
apply_diffmap_subtract(Ch, Diff, [{Ch, Wt}|T]) ->
if Wt == Diff ->
[{unused, Wt}|T];
Wt > Diff ->
[{Ch, Wt - Diff}, {unused, Diff}|T];
Wt < Diff ->
[{unused, Wt}|apply_diffmap_subtract(Ch, Diff - Wt, T)]
end;
apply_diffmap_subtract(Ch, Diff, [H|T]) ->
[H|apply_diffmap_subtract(Ch, Diff, T)];
apply_diffmap_subtract(_Ch, _Diff, []) ->
[].
apply_diffmap_add(Ch, Diff, [{unused, Wt}|T]) ->
if Wt == Diff ->
[{Ch, Wt}|T];
Wt > Diff ->
[{Ch, Diff}, {unused, Wt - Diff}|T];
Wt < Diff ->
[{Ch, Wt}|apply_diffmap_add(Ch, Diff - Wt, T)]
end;
apply_diffmap_add(Ch, Diff, [H|T]) ->
[H|apply_diffmap_add(Ch, Diff, T)];
apply_diffmap_add(_Ch, _Diff, []) ->
[].
combine_neighbors([{Ch, Wt1}, {Ch, Wt2}|T]) ->
combine_neighbors([{Ch, Wt1 + Wt2}|T]);
combine_neighbors([H|T]) ->
[H|combine_neighbors(T)];
combine_neighbors([]) ->
[].
collapse_unused_in_float_map([{Ch, Wt1}, {unused, Wt2}|T]) ->
collapse_unused_in_float_map([{Ch, Wt1 + Wt2}|T]);
collapse_unused_in_float_map([{unused, _}] = L) ->
L; % Degenerate case only
collapse_unused_in_float_map([H|T]) ->
[H|collapse_unused_in_float_map(T)];
collapse_unused_in_float_map([]) ->
[].
chash_float_map_to_nextfloat_list(FloatMap) when length(FloatMap) > 0 ->
%% QuickCheck found a bug ... need to weed out stuff smaller than
%% ?SMALLEST_SIGNIFICANT_FLOAT_SIZE here.
FM1 = [P || {_X, Y} = P <- FloatMap, Y > ?SMALLEST_SIGNIFICANT_FLOAT_SIZE],
{_Sum, NFs0} = lists:foldl(fun({Name, Amount}, {Sum, List}) ->
{Sum+Amount, [{Sum+Amount, Name}|List]}
end, {0, []}, FM1),
lists:reverse(NFs0).
chash_nextfloat_list_to_gb_tree([]) ->
gb_trees:balance(gb_trees:from_orddict([]));
chash_nextfloat_list_to_gb_tree(NextFloatList) ->
{_FloatPos, Name} = lists:last(NextFloatList),
%% QuickCheck found a bug ... it really helps to add a catch-all item
%% at the far "right" of the list ... 42.0 is much greater than 1.0.
NFs = NextFloatList ++ [{42.0, Name}],
gb_trees:balance(gb_trees:from_orddict(orddict:from_list(NFs))).
-spec chash_gb_next(float(), float_tree()) -> {float(), owner_name()}.
chash_gb_next(X, {_, GbTree}) ->
chash_gb_next1(X, GbTree).
chash_gb_next1(X, {Key, Val, Left, _Right}) when X < Key ->
case chash_gb_next1(X, Left) of
nil ->
{Key, Val};
Res ->
Res
end;
chash_gb_next1(X, {Key, _Val, _Left, Right}) when X >= Key ->
chash_gb_next1(X, Right);
chash_gb_next1(_X, nil) ->
nil.
%% @doc Not used directly, but can give a developer an idea of how well
%% chash_float_map_to_nextfloat_list will do for a given value of Max.
%%
%% For example:
%% <verbatim>
%% NewFloatMap = make_float_map([{unused, 1.0}],
%% [{a,100}, {b, 100}, {c, 10}]),
%% ChashMap = chash_scale_to_int_interval(NewFloatMap, 100),
%% io:format("QQQ: int int = ~p\n", [ChashIntInterval]),
%% -> [{a,1,47},{b,48,94},{c,94,100}]
%% </verbatim>
%%
%% Interpretation: out of the 100 slots:
%% <ul>
%% <li> 'a' uses the slots 1-47 </li>
%% <li> 'b' uses the slots 48-94 </li>
%% <li> 'c' uses the slots 95-100 </li>
%% </ul>
chash_scale_to_int_interval(NewFloatMap, Max) ->
chash_scale_to_int_interval(NewFloatMap, 0, Max).
%% @type nextfloat_list() = list({float(), brick()}). A nextfloat_list
%% differs from a float_map in two respects: 1) nextfloat_list contains
%% tuples with the brick name in 2nd position, 2) the float() at each
%% position I_n > I_m, for all n, m such that n > m.
%% For example, a nextfloat_list of the float_map example above,
%% [{0.25, {br1, nd1}}, {0.75, {br2, nd1}}, {1.0, {br3, nd1}].
chash_scale_to_int_interval([{Ch, _Wt}], Cur, Max) ->
[{Ch, Cur, Max}];
chash_scale_to_int_interval([{Ch, Wt}|T], Cur, Max) ->
Int = trunc(Wt * Max),
[{Ch, Cur + 1, Cur + Int}|chash_scale_to_int_interval(T, Cur + Int, Max)].
%%%%%%%%%%%%%
%% @doc Make a pretty/human-friendly version of a float map that describes
%% integer ranges between 1 and `Scale'.
-spec pretty_with_integers(float_map(), integer()) -> [owner_int_range()].
pretty_with_integers(Map, Scale) ->
chash_scale_to_int_interval(Map, Scale).
%% @doc Make a pretty/human-friendly version of a float map (based
%% upon a float map created from `OldWeights' and `NewWeights') that
%% describes integer ranges between 1 and `Scale'.
-spec pretty_with_integers(owner_weight_list(), owner_weight_list(),integer())->
[owner_int_range()].
pretty_with_integers(OldWeights, NewWeights, Scale) ->
chash_scale_to_int_interval(
make_float_map(make_float_map(OldWeights),
NewWeights),
Scale).
%% @doc Create a float tree, which is the rapid lookup data structure
%% for consistent hash queries.
-spec make_tree(float_map()) -> float_tree().
make_tree(Map) ->
chash_nextfloat_list_to_gb_tree(
chash_float_map_to_nextfloat_list(Map)).
%% @doc Low-level function for querying a float tree: the (floating
%% point) point within the unit interval.
-spec query_tree(float(), float_tree()) -> {float(), owner_name()}.
query_tree(Val, Tree) when is_float(Val), 0.0 =< Val, Val =< 1.0 ->
chash_gb_next(Val, Tree).
%% @doc Create a sample float map.
-spec make_demo_map1() -> float_map().
make_demo_map1() ->
{_, Res} = make_demo_map1_i(),
Res.
make_demo_map1_i() ->
Fail1 = {b, 100},
L1 = [{a, 100}, Fail1, {c, 100}],
L2 = L1 ++ [{d, 100}, {e, 100}],
L3 = L2 -- [Fail1],
L4 = L3 ++ [{giant, 300}],
{L4, lists:foldl(fun(New, Old) -> make_float_map(Old, New) end,
make_float_map(L1), [L2, L3, L4])}.
%% @doc Create a sample float map.
-spec make_demo_map2() -> float_map().
make_demo_map2() ->
{L0, _} = make_demo_map1_i(),
L1 = L0 ++ [{h, 100}],
L2 = L1 ++ [{i, 100}],
L3 = L2 ++ [{j, 100}],
lists:foldl(fun(New, Old) -> make_float_map(Old, New) end,
make_demo_map1(), [L1, L2, L3]).
%% @doc Create a human-friendly summary of a float map.
%%
%% The two parts of the summary are: a per-owner total of the unit
%% interval range(s) owned by each owner, and a total sum of all
%% per-owner ranges (which should be 1.0 but is not enforced).
-spec sum_map_weights(float_map()) ->
{{per_owner, float_map()}, {weight_sum, float()}}.
sum_map_weights(Map) ->
L = sum_map_weights(lists:sort(Map), undefined, 0.0) -- [{undefined,0.0}],
WeightSum = lists:sum([Weight || {_, Weight} <- L]),
{{per_owner, L}, {weight_sum, WeightSum}}.
sum_map_weights([{SZ, Weight}|T], SZ, SZ_total) ->
sum_map_weights(T, SZ, SZ_total + Weight);
sum_map_weights([{SZ, Weight}|T], LastSZ, LastSZ_total) ->
[{LastSZ, LastSZ_total}|sum_map_weights(T, SZ, Weight)];
sum_map_weights([], LastSZ, LastSZ_total) ->
[{LastSZ, LastSZ_total}].
%% @doc Query a float map with a binary (inefficient).
-spec hash_binary_via_float_map(binary(), float_map()) ->
{float(), owner_name()}.
hash_binary_via_float_map(Key, Map) ->
Tree = make_tree(Map),
<<Int:(20*8)/unsigned>> = crypto:hash(sha, Key),
Float = Int / ?SHA_MAX,
query_tree(Float, Tree).
%% @doc Query a float tree with a binary.
-spec hash_binary_via_float_tree(binary(), float_tree()) ->
{float(), owner_name()}.
hash_binary_via_float_tree(Key, Tree) ->
<<Int:(20*8)/unsigned>> = crypto:hash(sha, Key),
Float = Int / ?SHA_MAX,
query_tree(Float, Tree).
%%%%% @doc Various usage examples, see source code below this function
%%%%% for full details.
zzz_usage_details() ->
%% %% Make a map. See the code for make_demo_map1() for the order of
%% %% additions & deletions. Here's a brief summary of the 4 steps.
%% %%
%% %% * 'a' through 'e' are weighted @ 100.
%% %% * 'giant' is weighted @ 300.
%% %% * 'b' is removed at step #3.
%% 40> M1 = machi_chash:make_demo_map1().
%% [{a,0.09285714285714286},
%% {giant,0.10714285714285715},
%% {d,0.026190476190476153},
%% {giant,0.10714285714285715},
%% {a,0.04999999999999999},
%% {giant,0.04999999999999999},
%% {d,0.04999999999999999},
%% {giant,0.050000000000000044},
%% {d,0.06666666666666671},
%% {e,0.009523809523809434},
%% {giant,0.05714285714285716},
%% {c,0.14285714285714285},
%% {giant,0.05714285714285716},
%% {e,0.13333333333333341}]
%% %% Map M1 onto the interval of integers 0-10,1000
%% %%
%% %% output = list({SZ_name::term(), Start::integer(), End::integer()})
%% 41> machi_chash:pretty_with_integers(M1, 10*1000).
%% [{a,1,928},
%% {giant,929,1999},
%% {d,2000,2260},
%% {giant,2261,3331},
%% {a,3332,3830},
%% {giant,3831,4329},
%% {d,4330,4828},
%% {giant,4829,5328},
%% {d,5329,5994},
%% {e,5995,6089},
%% {giant,6090,6660},
%% {c,6661,8088},
%% {giant,8089,8659},
%% {e,8659,10000}]
%% %% Sum up all of the weights, make sure it's what we expect:
%% 55> machi_chash:sum_map_weights(M1).
%% {{per_owner,[{a,0.14285714285714285},
%% {c,0.14285714285714285},
%% {d,0.14285714285714285},
%% {e,0.14285714285714285},
%% {giant,0.42857142857142866}]},
%% {weight_sum,1.0}}
%% %% Make a tree, then query it
%% %% (Hash::float(), tree()) -> {NextLargestBoundary::float(), szone()}
%% 58> T1 = machi_chash:make_tree(M1).
%% 59> machi_chash:query_tree(0.2555, T1).
%% {0.3333333333333333,giant}
%% 60> machi_chash:query_tree(0.3555, T1).
%% {0.3833333333333333,a}
%% 61> machi_chash:query_tree(0.4555, T1).
%% {0.4833333333333333,d}
%% %% How about hashing a bunch of strings and see what happens?
%% 74> Key1 = "Hello, world!".
%% "Hello, world!"
%% 75> [{K, element(2, machi_chash:hash_binary_via_float_map(K, M1))} || K <- [lists:sublist(Key1, X) || X <- lists:seq(1, length(Key1))]].
%% [{"H",giant},
%% {"He",giant},
%% {"Hel",giant},
%% {"Hell",e},
%% {"Hello",e},
%% {"Hello,",giant},
%% {"Hello, ",e},
%% {"Hello, w",e},
%% {"Hello, wo",giant},
%% {"Hello, wor",d},
%% {"Hello, worl",giant},
%% {"Hello, world",e},
%% {"Hello, world!",d}]
ok. | src/hums_chash.erl | 0.516108 | 0.42919 | hums_chash.erl | starcoder |
-module(akvs_kv).
-export([new/1, dispose/1, set/3, get/2, get/3, del/2]).
-export_type([error/0, key/0, value/0, state/0]).
% the error type for all our functions is the same,
% it's a tuple with the error % atom as first item and a three item tuple as
% second item,
% where the first is an atom identifying the type of error for other code,
% the second is a human readable string-like value describing the error and the
% third is more context about the error
-type error() :: {error, {atom(), iolist(), map()}}.
-type key() :: binary().
-type value() :: any().
% we don't want other modules to know/care about the internal structure of
% the state type
-opaque state() :: map().
%% @doc create a new instance of a key value store
-spec new(map()) -> {ok, state()} | error().
new(_Opts) ->
Table = ets:new(akvs_kv, [set, {write_concurrency, false},
{read_concurrency, false}]),
State = #{table => Table},
{ok, State}.
%% @doc dispose resources associated with a previously created kv store
-spec dispose(state()) -> ok | error().
dispose(#{table := Table}) ->
true = ets:delete(Table),
ok.
%% @doc set a value for a key in a kv store
-spec set(state(), key(), value()) -> {ok, state()} | error().
set(State=#{table := Table}, Key, Value) ->
true = ets:insert(Table, {Key, Value}),
{ok, State}.
%% @doc get a value for a key or an error if not found
-spec get(state(), key()) -> {ok, value()} | error().
get(#{table := Table}, Key) ->
case ets:lookup(Table, Key) of
[] -> {error, {not_found, "Key not found", #{key => Key}}};
[{Key, Value}] -> {ok, Value}
end.
%% @doc get a value for a key or a default value if not found
-spec get(state(), key(), value()) -> {ok, value()} | error().
get(#{table := Table}, Key, DefaultValue) ->
case ets:lookup(Table, Key) of
[] -> {ok, DefaultValue};
[{Key, Value}] -> {ok, Value}
end.
%% @doc remove a value for a key, if not found do nothing
-spec del(state(), key()) -> {ok, state()} | error().
del(StateIn=#{table := Table}, Key) ->
true = ets:delete(Table, Key),
{ok, StateIn}. | apps/akvs/src/akvs_kv.erl | 0.536799 | 0.605712 | akvs_kv.erl | starcoder |
%%-------------------------------------------------------------------
%%
%% Copyright (c) 2016, <NAME> <<EMAIL>>
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%%-------------------------------------------------------------------
%% @doc This modules provides utility functions for load balancing using the
%% best of two random choices. It is designed for use with `sbroker' and
%% `sregulator' processes using the `sbetter_meter' meter. However any OTP
%% process can use this module to do load balancing using the `via' naming
%% format if the process is registered with and updates the `sbetter_server'.
%%
%% To use `sbetter' with `via' use names of the form
%% `{via, sbetter, {{Broker, ...}, ask | ask_r}}'. Where `{Broker, ...}' is
%% a tuple containing
%% `pid() | atom() | {global, any()} | {via, module(), any()} | {atom(), node()}'.
%% The process with the small value/shortest sojourn time of two random
%% processes for the `ask' (or `ask_r') queue will be called. The sojourn time
%% includes the message queue delay and the time spent waiting in the internal
%% queue.
%%
%% Comparing values/sojourn times requires `ets' lookups. However it is not
%% required to carry out the lookups for every request to get well balanced
%% queues. To only compare two random choices 20% of the time and use
%% `sscheduler' the remaining 80% use `scheduler_ask' and `scheduler_ask_r', or
%% to only compare two random choices 20% of the time and choose a random
%% process the reamining 80% use `rand_ask' and `rand_ask_r'. This ratio is
%% chosen as the majority of the gain in choosing two random choices can be
%% captured by giving 20% of requests a choice. See section 4.5 of the reference
%% for more information.
%%
%% It is not possible to locally look up the pid of a process with name
%% `{atom(), node()}' if the node is not the local node. Therefore a registered
%% name on another node is not supported for use with this module.
%%
%% If a chosen process is not local the call may exit with `{badnode, node()}'.
%%
%% If a chosen process is not registered with the `sbetter_server' the call
%% may exit with `{nobetter, pid()}'. The `sbetter_meter' will register with the
%% server. However other methods can be used to register and update the
%% `sbetter_server'. Registering with the `sbetter_server' must be done with
%% `sbetter_server:register/3' and not using
%% `start_link({via, sbetter, ...}, ...)'.
%%
%% @reference <NAME>, The Power of Two Choices in Randomized
%% Load Balancing, 1996.
%% @see sbetter_meter
%% @see sbetter_server
-module(sbetter).
%% public API
-export([whereis_name/1]).
-export([send/2]).
%% types
-type method() ::
ask | ask_r | scheduler_ask | scheduler_ask_r | rand_ask | rand_ask_r.
-export_type([method/0]).
%% @doc Lookup a pid from a tuple of pids using the best of two random choices
%% for the queue (or possibly using the current scheduler id). If no process is
%% associated with the chosen process returns `undefined'.
-spec whereis_name({Processes, Method}) ->
Process | undefined when
Processes :: tuple(),
Method :: method(),
Process :: pid().
whereis_name({{}, _}) ->
undefined;
whereis_name({{Name}, _}) ->
case sbroker_gen:whereis(Name) of
Pid when is_pid(Pid) ->
Pid;
undefined ->
undefined;
{_, Node} ->
exit({badnode, Node})
end;
whereis_name({Processes, scheduler_ask}) when is_tuple(Processes) ->
case scheduler_whereis(Processes, ask) of
Pid when is_pid(Pid) ->
Pid;
undefined ->
undefined;
Error ->
exit(Error)
end;
whereis_name({Processes, scheduler_ask_r}) when is_tuple(Processes) ->
case scheduler_whereis(Processes, ask_r) of
Pid when is_pid(Pid) ->
Pid;
undefined ->
undefined;
Error ->
exit(Error)
end;
whereis_name({Processes, rand_ask}) when is_tuple(Processes) ->
case rand_whereis(Processes, ask) of
Pid when is_pid(Pid) ->
Pid;
undefined ->
undefined;
Error ->
exit(Error)
end;
whereis_name({Processes, rand_ask_r}) when is_tuple(Processes) ->
case rand_whereis(Processes, ask_r) of
Pid when is_pid(Pid) ->
Pid;
undefined ->
undefined;
Error ->
exit(Error)
end;
whereis_name({Processes, Key}) when is_tuple(Processes) ->
case better_whereis(Processes, Key) of
Pid when is_pid(Pid) ->
Pid;
undefined ->
undefined;
Error ->
exit(Error)
end.
%% @doc Sends a message to a process from a tuple of processes using the best of
%% two random choices for the queue (or possibly using the current scheduler
%% id). Returns `ok' if a process could be chosen otherwise exits.
-spec send({Processes, Method}, Msg) ->
ok when
Processes :: tuple(),
Method :: method(),
Msg :: any().
send(Name, Msg) ->
case whereis_name(Name) of
Pid when is_pid(Pid) ->
_ = Pid ! Msg,
ok;
undefined ->
exit({noproc, {?MODULE, send, [Name, Msg]}})
end.
%% Internal
scheduler_whereis(Processes, Key) ->
Size = tuple_size(Processes),
case scheduler_pick(Size) of
scheduler ->
sscheduler:whereis_name(Processes);
{A, B} ->
ProcA = element(A, Processes),
ProcB = element(B, Processes),
compare(info(ProcA, Key), info(ProcB, Key))
end.
rand_whereis(Processes, Key) ->
Size = tuple_size(Processes),
case rand_pick(Size) of
{A, B} ->
ProcA = element(A, Processes),
ProcB = element(B, Processes),
compare(info(ProcA, Key), info(ProcB, Key));
N ->
Proc = element(N, Processes),
rand_whereis(Proc)
end.
rand_whereis(Name) ->
case sbroker_gen:whereis(Name) of
{_, Node} ->
{badnode, Node};
Other ->
Other
end.
better_whereis(Processes, Key) ->
Size = tuple_size(Processes),
{A, B} = pick(Size),
ProcA = element(A, Processes),
ProcB = element(B, Processes),
compare(info(ProcA, Key), info(ProcB, Key)).
scheduler_pick(Size) ->
Pairs = Size * (Size-1),
case erlang:phash2({self(), make_ref()}, 5 * Pairs) of
Hash when Hash < Pairs ->
pick(Hash, Size);
_ ->
scheduler
end.
rand_pick(Size) ->
Pairs = Size * (Size-1),
case erlang:phash2({self(), make_ref()}, 5 * Pairs) of
Hash when Hash < Pairs ->
pick(Hash, Size);
Hash ->
(Hash rem Size) + 1
end.
pick(Hash, Size) ->
case {(Hash div Size) + 1, (Hash div (Size-1)) + 1} of
{Same, Same} ->
% Same must be less than Size for a match, and first element is
% 1..Size-1 so adding 1 to creates even distribution.
{Same+1, Same};
Other ->
Other
end.
pick(Size) ->
Pairs = Size * (Size-1),
Hash = erlang:phash2({self(), make_ref()}, Pairs),
pick(Hash, Size).
info(Process, Key) ->
case sbroker_gen:whereis(Process) of
Pid when is_pid(Pid), node(Pid) == node() ->
{lookup(Pid, Key), Pid};
undefined ->
undefined;
Pid when is_pid(Pid) ->
{badnode, node(Pid)};
{_, Node} ->
{badnode, Node}
end.
lookup(Pid, Key) ->
try sbetter_server:lookup(Pid, Key) of
Value ->
Value
catch
error:badarg ->
nobetter
end.
compare({ValueA, Pid}, {ValueOrError, _})
when is_integer(ValueA), ValueA < ValueOrError ->
Pid;
compare(_, {ValueB, Pid}) when is_integer(ValueB) ->
Pid;
compare({ValueA, Pid}, _) when is_integer(ValueA) ->
Pid;
compare(undefined, _) ->
undefined;
compare(_, undefined) ->
undefined;
compare(Error, _) ->
Error. | deps/sbroker/src/sbetter.erl | 0.684159 | 0.480235 | sbetter.erl | starcoder |
%% @doc The newton_iteration module implements the computation of
%% roots of real functions using Newton's method.
-module(newton_iteration).
-export([iterate/5]).
%% @doc Runs Newton's iteration
%%
%% `F' is an anonymous function of one real variable and `Fp' is the
%% derivative of `F'.
%%
%% `StartValue' is the first value used in the iteration process.
%% Usually, this is a guess of where the wanted root of `F' may be
%% located.
%%
%% `MaxIterationDifference' and `MaxIterations' are used to terminate
%% the iteration process.
%%
%% If the absolute value of the difference between two consecutive
%% values produced by the iteration process is smaller than
%% `MaxIterationDifference', then `iterate/5' returns `{:ok, x_k}'
%% where `x_k' is the latest value produced by the iteration.
%%
%% If the absolute value of the difference between two consecutive
%% values after `MaxIterations' is still larger than
%% `MaxIterationDifference', then `iterate/5' returns `error'.
%%
%% == Examples ==
%%
%% The root of the identity function `fun(X) -> X end' is `0'.
%%
%% ```
%% 1> newton_iteration:iterate(
%% 1> fun(X) -> X end,
%% 1> fun(_X) -> 1 end,
%% 1> 1.0,
%% 1> 1.0e-9,
%% 1> 4
%% 1> ).
%% {ok,0.0}
%% '''
%%
%% The roots of the quadratic function `fun(X) -> X * X - 4 end' are
%% `2' and `-2' but `4' iterations are not sufficient to compute a
%% root with the required accuracy.
%%
%% ```
%% 1> F = fun(X) -> X * X - 4 end.
%% #Fun<erl_eval.44.97283095>
%% 2> Fp = fun(X) -> 2 * X end.
%% #Fun<erl_eval.44.97283095>
%% 3> newton_iteration:iterate(F, Fp, 4.0, 1.0e-9, 4).
%% error
%% 4> newton_iteration:iterate(F, Fp, 4.0, 1.0e-9, 8).
%% {ok,2.0}
%% '''
iterate(F, Fp, StartValue, MaxIterationDifference, MaxIterations) ->
iterate(F, Fp, StartValue, MaxIterationDifference, MaxIterations, 0).
iterate(_, _, _, _, MaxIterations, IterationCount) when IterationCount > MaxIterations ->
error;
iterate(F, Fp, PreviousIteration, MaxIterationDifference, MaxIterations, IterationCount) ->
Iteration = PreviousIteration - F(PreviousIteration) / Fp(PreviousIteration),
if abs(Iteration - PreviousIteration) =< MaxIterationDifference ->
{ok, Iteration};
true ->
iterate(F, Fp, Iteration, MaxIterationDifference, MaxIterations, IterationCount + 1)
end. | src/newton_iteration.erl | 0.73077 | 0.797557 | newton_iteration.erl | starcoder |
%%
%% %CopyrightBegin%
%%
%% Copyright Ericsson AB 2006-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
%%
%% This is from chapter 3, Syntax Components, of RFC 3986:
%%
%% The generic URI syntax consists of a hierarchical sequence of
%% components referred to as the scheme, authority, path, query, and
%% fragment.
%%
%% URI = scheme ":" hier-part [ "?" query ] [ "#" fragment ]
%%
%% hier-part = "//" authority path-abempty
%% / path-absolute
%% / path-rootless
%% / path-empty
%%
%% The scheme and path components are required, though the path may be
%% empty (no characters). When authority is present, the path must
%% either be empty or begin with a slash ("/") character. When
%% authority is not present, the path cannot begin with two slash
%% characters ("//"). These restrictions result in five different ABNF
%% rules for a path (Section 3.3), only one of which will match any
%% given URI reference.
%%
%% The following are two example URIs and their component parts:
%%
%% foo://example.com:8042/over/there?name=ferret#nose
%% \_/ \______________/\_________/ \_________/ \__/
%% | | | | |
%% scheme authority path query fragment
%% | _____________________|__
%% / \ / \
%% urn:example:animal:ferret:nose
%%
%% scheme = ALPHA *( ALPHA / DIGIT / "+" / "-" / "." )
%% authority = [ userinfo "@" ] host [ ":" port ]
%% userinfo = *( unreserved / pct-encoded / sub-delims / ":" )
%%
%%
-module(http_uri).
-export([parse/1, parse/2,
scheme_defaults/0,
encode/1, decode/1]).
-export_type([scheme/0, default_scheme_port_number/0]).
%%%=========================================================================
%%% API
%%%=========================================================================
-type scheme() :: atom().
-type default_scheme_port_number() :: pos_integer().
-spec scheme_defaults() ->
[{scheme(), default_scheme_port_number()}].
scheme_defaults() ->
[{http, 80},
{https, 443},
{ftp, 21},
{ssh, 22},
{sftp, 22},
{tftp, 69}].
parse(AbsURI) ->
parse(AbsURI, []).
parse(AbsURI, Opts) ->
case parse_scheme(AbsURI, Opts) of
{error, Reason} ->
{error, Reason};
{Scheme, DefaultPort, Rest} ->
case (catch parse_uri_rest(Scheme, DefaultPort, Rest, Opts)) of
{ok, Result} ->
{ok, Result};
{error, Reason} ->
{error, {Reason, Scheme, AbsURI}};
_ ->
{error, {malformed_url, Scheme, AbsURI}}
end
end.
reserved() ->
sets:from_list([$;, $:, $@, $&, $=, $+, $,, $/, $?,
$#, $[, $], $<, $>, $\", ${, $}, $|, %"
$\\, $', $^, $%, $ ]).
encode(URI) when is_list(URI) ->
Reserved = reserved(),
lists:append([uri_encode(Char, Reserved) || Char <- URI]);
encode(URI) when is_binary(URI) ->
Reserved = reserved(),
<< <<(uri_encode_binary(Char, Reserved))/binary>> || <<Char>> <= URI >>.
decode(String) when is_list(String) ->
do_decode(String);
decode(String) when is_binary(String) ->
do_decode_binary(String).
do_decode([$+|Rest]) ->
[$ |do_decode(Rest)];
do_decode([$%,Hex1,Hex2|Rest]) ->
[hex2dec(Hex1)*16+hex2dec(Hex2)|do_decode(Rest)];
do_decode([First|Rest]) ->
[First|do_decode(Rest)];
do_decode([]) ->
[].
do_decode_binary(<<$+, Rest/bits>>) ->
<<$ , (do_decode_binary(Rest))/binary>>;
do_decode_binary(<<$%, Hex:2/binary, Rest/bits>>) ->
<<(binary_to_integer(Hex, 16)), (do_decode_binary(Rest))/binary>>;
do_decode_binary(<<First:1/binary, Rest/bits>>) ->
<<First/binary, (do_decode_binary(Rest))/binary>>;
do_decode_binary(<<>>) ->
<<>>.
%%%========================================================================
%%% Internal functions
%%%========================================================================
which_scheme_defaults(Opts) ->
Key = scheme_defaults,
case lists:keysearch(Key, 1, Opts) of
{value, {Key, SchemeDefaults}} ->
SchemeDefaults;
false ->
scheme_defaults()
end.
parse_scheme(AbsURI, Opts) ->
case split_uri(AbsURI, ":", {error, no_scheme}, 1, 1) of
{error, no_scheme} ->
{error, no_scheme};
{SchemeStr, Rest} ->
case extract_scheme(SchemeStr, Opts) of
{error, Error} ->
{error, Error};
{ok, Scheme} ->
SchemeDefaults = which_scheme_defaults(Opts),
case lists:keysearch(Scheme, 1, SchemeDefaults) of
{value, {Scheme, DefaultPort}} ->
{Scheme, DefaultPort, Rest};
false ->
{Scheme, no_default_port, Rest}
end
end
end.
extract_scheme(Str, Opts) ->
case lists:keysearch(scheme_validation_fun, 1, Opts) of
{value, {scheme_validation_fun, Fun}} when is_function(Fun) ->
case Fun(Str) of
valid ->
{ok, list_to_atom(http_util:to_lower(Str))};
{error, Error} ->
{error, Error}
end;
_ ->
{ok, to_atom(http_util:to_lower(Str))}
end.
to_atom(S) when is_list(S) ->
list_to_atom(S);
to_atom(S) when is_binary(S) ->
binary_to_atom(S, unicode).
parse_uri_rest(Scheme, DefaultPort, <<"//", URIPart/binary>>, Opts) ->
{Authority, PathQueryFragment} =
split_uri(URIPart, "[/?#]", {URIPart, <<"">>}, 1, 0),
{RawPath, QueryFragment} =
split_uri(PathQueryFragment, "[?#]", {PathQueryFragment, <<"">>}, 1, 0),
{Query, Fragment} =
split_uri(QueryFragment, "#", {QueryFragment, <<"">>}, 1, 0),
{UserInfo, HostPort} = split_uri(Authority, "@", {<<"">>, Authority}, 1, 1),
{Host, Port} = parse_host_port(Scheme, DefaultPort, HostPort, Opts),
Path = path(RawPath),
case lists:keyfind(fragment, 1, Opts) of
{fragment, true} ->
{ok, {Scheme, UserInfo, Host, Port, Path, Query, Fragment}};
_ ->
{ok, {Scheme, UserInfo, Host, Port, Path, Query}}
end;
parse_uri_rest(Scheme, DefaultPort, "//" ++ URIPart, Opts) ->
{Authority, PathQueryFragment} =
split_uri(URIPart, "[/?#]", {URIPart, ""}, 1, 0),
{RawPath, QueryFragment} =
split_uri(PathQueryFragment, "[?#]", {PathQueryFragment, ""}, 1, 0),
{Query, Fragment} =
split_uri(QueryFragment, "#", {QueryFragment, ""}, 1, 0),
{UserInfo, HostPort} = split_uri(Authority, "@", {"", Authority}, 1, 1),
{Host, Port} = parse_host_port(Scheme, DefaultPort, HostPort, Opts),
Path = path(RawPath),
case lists:keyfind(fragment, 1, Opts) of
{fragment, true} ->
{ok, {Scheme, UserInfo, Host, Port, Path, Query, Fragment}};
_ ->
{ok, {Scheme, UserInfo, Host, Port, Path, Query}}
end.
%% In this version of the function, we no longer need
%% the Scheme argument, but just in case...
parse_host_port(_Scheme, DefaultPort, <<"[", HostPort/binary>>, Opts) -> %ipv6
{Host, ColonPort} = split_uri(HostPort, "\\]", {HostPort, <<"">>}, 1, 1),
Host2 = maybe_ipv6_host_with_brackets(Host, Opts),
{_, Port} = split_uri(ColonPort, ":", {<<"">>, DefaultPort}, 0, 1),
{Host2, int_port(Port)};
parse_host_port(_Scheme, DefaultPort, "[" ++ HostPort, Opts) -> %ipv6
{Host, ColonPort} = split_uri(HostPort, "\\]", {HostPort, ""}, 1, 1),
Host2 = maybe_ipv6_host_with_brackets(Host, Opts),
{_, Port} = split_uri(ColonPort, ":", {"", DefaultPort}, 0, 1),
{Host2, int_port(Port)};
parse_host_port(_Scheme, DefaultPort, HostPort, _Opts) ->
{Host, Port} = split_uri(HostPort, ":", {HostPort, DefaultPort}, 1, 1),
{Host, int_port(Port)}.
split_uri(UriPart, SplitChar, NoMatchResult, SkipLeft, SkipRight) ->
case re:run(UriPart, SplitChar, [{capture, first}]) of
{match, [{Match, _}]} ->
{string:slice(UriPart, 0, Match + 1 - SkipLeft),
string:slice(UriPart, Match + SkipRight, string:length(UriPart))};
nomatch ->
NoMatchResult
end.
maybe_ipv6_host_with_brackets(Host, Opts) when is_binary(Host) ->
case lists:keysearch(ipv6_host_with_brackets, 1, Opts) of
{value, {ipv6_host_with_brackets, true}} ->
<<"[", Host/binary, "]">>;
_ ->
Host
end;
maybe_ipv6_host_with_brackets(Host, Opts) ->
case lists:keysearch(ipv6_host_with_brackets, 1, Opts) of
{value, {ipv6_host_with_brackets, true}} ->
"[" ++ Host ++ "]";
_ ->
Host
end.
int_port(Port) when is_integer(Port) ->
Port;
int_port(Port) when is_binary(Port) ->
binary_to_integer(Port);
int_port(Port) when is_list(Port) ->
list_to_integer(Port);
%% This is the case where no port was found and there was no default port
int_port(no_default_port) ->
throw({error, no_default_port}).
path(<<"">>) ->
<<"/">>;
path("") ->
"/";
path(Path) ->
Path.
uri_encode(Char, Reserved) ->
case sets:is_element(Char, Reserved) of
true ->
[ $% | http_util:integer_to_hexlist(Char)];
false ->
[Char]
end.
uri_encode_binary(Char, Reserved) ->
case sets:is_element(Char, Reserved) of
true ->
<< $%, (integer_to_binary(Char, 16))/binary >>;
false ->
<<Char>>
end.
hex2dec(X) when (X>=$0) andalso (X=<$9) -> X-$0;
hex2dec(X) when (X>=$A) andalso (X=<$F) -> X-$A+10;
hex2dec(X) when (X>=$a) andalso (X=<$f) -> X-$a+10. | lib/inets/src/http_lib/http_uri.erl | 0.614047 | 0.426979 | http_uri.erl | starcoder |
-module(week2dot9).
-export([double/1, evens/1, evensT/1, median/1, myLength/1, occur/2, modes/1]).
% Transforming list elements
% Define an Erlang function double/1 to double the elements of a list of numbers.
double([]) ->
[];
double([H | T]) ->
[H * 2 | double(T)].
% Filtering lists
% Define a function evens/1 that extracts the even numbers from a list of integers.
evens([]) ->
[];
evens([H | T]) when H rem 2 == 0 ->
[H | evens(T) ];
evens([_H| T]) ->
evens(T).
evensT(List) ->
evensT(List, []).
% in this pattern, the list entered in terminal clause with a reverse list
% if a reverse the second clause as "evensT(T, [List| H]);". I have a list like this [ [ [ [], 2],4],8]
evensT([], List) ->
lists:reverse(List);
evensT([H | T], List) when H rem 2 == 0 ->
evensT(T, [H | List]);
evensT([_H | T], List) ->
evensT(T, List).
median(List) ->
median(lists:sort(List), 0, myLength(List)).
median([First | _T ], Increment, Length) when Length rem 2 == 1 , Length div 2 == Increment ->
First;
median([First, Second | _T ], Increment, Length) when Length rem 2 == 0 , Length div 2 == (Increment + 1) ->
(First + Second) / 2;
median([_h | T], Increment, Length) ->
median(T, Increment + 1, Length).
% the modes of a list of numbers: this is a list consisting of the numbers that occur most frequently
% in the list; if there is is just one, this will be a list with one element only
modes([]) ->
[];
modes([H|T]) ->
[ occur([H|T], H) | modes(remove(T, H))].
%modes([], _CurrentValue, _Increment) ->
% 1.
% Tool methods
occur([], _Val) ->
0;
occur(List, Val) ->
occur(List, Val, 0).
occur([], _Val, Acc) ->
Acc;
occur([H|T], H, Acc) ->
occur(T, H, Acc + 1);
occur([H|T], Val, Acc) ->
occur(T, Val, Acc).
remove([], _Val) ->
[];
remove([H|T], H ) ->
remove(T, H);
remove([H|T], Val) ->
[ H | remove(T, Val)].
% the median of a list of numbers: this is the middle element when the list is ordered (if the list
% is of even length you should average the middle two)
myLength([]) ->
0;
myLength([_H | T]) ->
1 + myLength(T). | week2/week2dot9.erl | 0.511229 | 0.714877 | week2dot9.erl | starcoder |
%%
%% Copyright (c) 2018 <NAME>. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
-module(trcb_exp_overlay).
-author("<NAME> <<EMAIL>").
-include("trcb_exp.hrl").
-export([get/2,
to_connect/3]).
-spec get(atom(), pos_integer()) -> orddict:orddict().
get(_, 1) ->
[];
get(fullmesh, N) ->
lists:foldl(
fun(I, Acc) ->
Peers = lists:seq(0, I - 1) ++ lists:seq(I+1, N - 1),
orddict:store(I, Peers, Acc)
end,
orddict:new(),
lists:seq(0, N - 1)
).
%% @doc The first argument is my node spec,
%% the second argument is a list of node specs,
%% and the third argument is the overlay.
-spec to_connect(trcb_exp_node_id(), list(node_spec()), atom()) ->
list(node_spec()).
to_connect(MyName, Nodes, Overlay) ->
NodeNumber = length(Nodes),
%% name -> node
NameToNode = name_to_node_map(Nodes),
%% {id -> name, id}
{IdToName, MyId} = id_to_name_map(MyName, NameToNode),
%% id -> [id]
Topology = get(Overlay, NodeNumber),
find_peers(NameToNode, IdToName, MyId, Topology).
%% @private
name_to_node_map(Nodes) ->
lists:foldl(
fun({Name, _, _}=Node, Acc) ->
orddict:store(Name, Node, Acc)
end,
orddict:new(),
Nodes
).
%% @private
id_to_name_map(MyName, NameToNode) ->
{IdToName, MyId, _} = lists:foldl(
fun({Name, _}, {IdToName0, MyId0, Counter0}) ->
IdToName1 = orddict:store(Counter0, Name, IdToName0),
MyId1 = case MyName == Name of
true ->
Counter0;
false ->
MyId0
end,
Counter1 = Counter0 + 1,
{IdToName1, MyId1, Counter1}
end,
{orddict:new(), undefined, 0},
NameToNode
),
{IdToName, MyId}.
%% @private
find_peers(NameToNode, IdToName, MyId, Topology) ->
%% [id]
IdsToConnect = orddict:fetch(MyId, Topology),
%% [node]
lists:map(
fun(PeerId) ->
PeerName = orddict:fetch(PeerId, IdToName),
orddict:fetch(PeerName, NameToNode)
end,
IdsToConnect
). | src/trcb_exp_overlay.erl | 0.536799 | 0.416085 | trcb_exp_overlay.erl | starcoder |
%% -------------------------------------------------------------------
%% @doc Handles interaction with the validator component.<br/>
%% The validator is responsible for verifying and scoring propositions
%% from the workers. This is the main validator module, the rest of
%% the application should use this to interface with the validator
%% component.<br/>
%% The function {@link start_link} starts a single
%% {@link validator_sup} which then starts a proposition queuing
%% process from {@link validator_queue} and a port process
%% ({@link validator_port}) via another supervisor
%% {@link validator_port_sup}).
%% @end
%% -------------------------------------------------------------------
-module(validator).
%% application programming interface
-export([
start_link/1,
validate/3,
round_started/0,
all_workers_stopped/0
]).
%% ===================================================================
%% application programming interface
%% ===================================================================
%% @doc Starts the whole validator infrastructure, including queue and
%% external executable.
start_link(ExtProg) ->
validator_sup:start_link(ExtProg).
%% @doc Submits a single proposition from a worker to the validator.
-spec validate(atom(), [string()], string()) -> ok.
validate(WorkerID, WorkerInput, WorkerOutput) ->
validator_queue:insert_proposition(WorkerID, WorkerInput, WorkerOutput).
%% @doc Informs the validator that the workers are working on a new
%% round.<br/>
%% The validator has to be ware of this state because when the workers
%% are stopped and the queue runs dry, the validator will tell the dj
%% that it's OK to end the round. This shouldn't happen while the
%% workers are running.
-spec round_started() -> ok.
round_started() ->
validator_queue:round_started().
%% @doc Informs the validator that all workers are stopped.<br/>
%% The next time the queue runs dry, the validator assumes that all
%% propositions have been processed and tells the dj to end the round.
-spec all_workers_stopped() -> ok.
all_workers_stopped() ->
validator_queue:all_workers_stopped().
%% =================================================================== | src/validator/validator.erl | 0.564819 | 0.627723 | validator.erl | starcoder |
%% Copyright 2020, The Tremor Team
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
-module(gen_script).
-include_lib("pbt.hrl").
-export([gen/1]).
gen_({'+', A, B}) -> ["(", gen_(A), " + ", gen_(B), ")"];
gen_({'-', A, B}) -> ["(", gen_(A), " - ", gen_(B), ")"];
gen_({'/', A, B}) -> ["(", gen_(A), " / ", gen_(B), ")"];
gen_({'*', A, B}) -> ["(", gen_(A), " * ", gen_(B), ")"];
gen_({'%', A, B}) -> ["(", gen_(A), " % ", gen_(B), ")"];
gen_({'band', A, B}) -> ["(", gen_(A), " & ", gen_(B), ")"];
gen_({'bxor', A, B}) -> ["(", gen_(A), " ^ ", gen_(B), ")"];
gen_({'==', A, B}) -> ["(", gen_(A), " == ", gen_(B), ")"];
gen_({'!=', A, B}) -> ["(", gen_(A), " != ", gen_(B), ")"];
gen_({'>=', A, B}) -> ["(", gen_(A), " >= ", gen_(B), ")"];
gen_({'>', A, B}) -> ["(", gen_(A), " > ", gen_(B), ")"];
gen_({'<', A, B}) -> ["(", gen_(A), " < ", gen_(B), ")"];
gen_({'<=', A, B}) -> ["(", gen_(A), " <= ", gen_(B), ")"];
gen_({'and', A, B}) -> ["(", gen_(A), " and ", gen_(B), ")"];
gen_({'or', A, B}) -> ["(", gen_(A), " or ", gen_(B), ")"];
gen_({'not', A}) -> ["not (", gen_(A), ")"];
gen_({'+', A}) -> ["(+ ", gen_(A), ")"];
gen_({'-', A}) -> ["(- ", gen_(A), ")"];
gen_({'#',String1, String2, Sub}) -> ["(", string:trim(gen_(String1), trailing, "\""), "#{", gen(Sub), "}", string:trim(gen_(String2), leading, "\""), ")"];
gen_({'let', Path, Expr}) -> ["let ", gen_(Path), " = ", gen_(Expr)];
gen_({local, Path}) -> Path;
gen_({emit, A}) -> ["emit (", gen_(A), ")"];
% "This is #{1} example"
gen_({'#', String1, String2, Sub}) -> ["(", String1, gen_(Sub), String2, ")"];
gen_(drop) -> "drop";
gen_(true) -> "true";
gen_(false) -> "false";
gen_(null) -> "null";
gen_(X) when is_number(X) -> io_lib:format("~p", [X]);
gen_(X) when is_binary(X) -> jsx:encode(X).
gen(Expr) ->
iolist_to_binary(gen_(Expr)). | tremor-script/eqc/gen_script.erl | 0.520009 | 0.42674 | gen_script.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% riak_dt_emcntr: A convergent, replicated, state based PN counter,
%% for embedding in riak_dt_map.
%%
%% Copyright (c) 2007-2013 Basho Technologies, Inc. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%% @doc A PN-Counter CRDT. A PN-Counter is essentially two G-Counters:
%% one for increments and one for decrements. The value of the counter
%% is the difference between the value of the Positive G-Counter and
%% the value of the Negative G-Counter. However, this PN-Counter is
%% for using embedded in a riak_dt_map. The problem with an embedded
%% pn-counter is when the field is removed and added again. PN-Counter
%% merge takes the max of P and N as the merged value. In the case
%% that a field was removed and re-added P and N maybe be _lower_ than
%% their removed values, and when merged with a replica that has not
%% seen the remove, the remove is lost. This counter adds some
%% causality by storing a `dot` with P and N. Merge takes the max
%% event for each actor, so newer values win over old ones. The rest
%% of the mechanics are the same.
%%
%% @see riak_kv_gcounter.erl
%%
%% @reference <NAME>, <NAME>, <NAME>, <NAME> (2011) A comprehensive study of
%% Convergent and Commutative Replicated Data Types. http://hal.upmc.fr/inria-00555588/
%%
%% @end
-module(riak_dt_emcntr).
-behaviour(riak_dt).
-export([new/0, value/1, value/2]).
-export([update/3, merge/2, equal/2]).
-export([to_binary/1, from_binary/1]).
-export([to_binary/2]).
-export([stats/1, stat/2]).
-export([parent_clock/2, update/4]).
-export([to_version/2]).
%% EQC API
-ifdef(EQC).
-include_lib("eqc/include/eqc.hrl").
-export([gen_op/0, gen_op/1, update_expected/3, eqc_state_value/1, init_state/0, generate/0]).
-endif.
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-export_type([emcntr/0, emcntr_op/0]).
-type emcntr() :: {riak_dt_vclock:vclock(), [entry()]}.
-type entry() :: {Actor :: riak_dt:actor(), {Event :: pos_integer(),
Inc :: pos_integer(),
Dec :: pos_integer()}
}.
-type emcntr_op() :: increment_op() | decrement_op().
-type increment_op() :: increment | {increment, integer()}.
-type decrement_op() :: decrement | {decrement, integer()}.
-spec new() -> emcntr().
new() ->
{riak_dt_vclock:fresh(), orddict:new()}.
%% @doc embedded CRDTs most share a causal context with their parent
%% Map, setting the internal clock to the parent clock ensures this
-spec parent_clock(riak_dt_vclock:vclock(), emcntr()) -> emcntr().
parent_clock(Clock, {_, Cntr}) ->
{Clock, Cntr}.
%% @doc the current integer value of the counter
-spec value(emcntr()) -> integer().
value({_Clock, PNCnt}) ->
lists:sum([Inc - Dec || {_Act, {_Event, Inc, Dec}} <- PNCnt]).
%% @doc query value, not implemented. Just returns result of `value/1'
-spec value(term(), emcntr()) -> integer().
value(_, Cntr) ->
value(Cntr).
%% @doc increment/decrement the counter. Op is either a two tuple of
%% `{increment, By}', `{decrement, By}' where `By' is a positive
%% integer. Or simply the atoms `increment' or `decrement', which are
%% equivalent to `{increment | decrement, 1}' Returns the updated
%% counter.
%%
%% Note: the second argument must be a `riak_dt:dot()', that is a
%% 2-tuple of `{Actor :: term(), Event :: pos_integer()}' as this is
%% for embedding in a `riak_dt_map'
-spec update(emcntr_op(), riak_dt:dot(), emcntr()) -> {ok, emcntr()}.
update(Op, {Actor, Evt}=Dot, {Clock, PNCnt}) when is_tuple(Dot) ->
Clock2 = riak_dt_vclock:merge([[Dot], Clock]),
Entry = orddict:find(Actor, PNCnt),
{Inc, Dec} = op(Op, Entry),
{ok, {Clock2, orddict:store(Actor, {Evt, Inc, Dec}, PNCnt)}}.
%% @doc update with a context. Contexts have no effect. Same as
%% `update/3'
-spec update(emcntr_op(), riak_dt:dot(), emcntr(), riak_dt_vclock:vclock()) ->
{ok, emcntr()}.
update(Op, Dot, Cntr, _Ctx) ->
update(Op, Dot, Cntr).
%% @private perform the operation `Op' on the {positive, negative}
%% pair for an actor.
-spec op(emcntr_op(), error | {ok, entry()} | {P::non_neg_integer(), N::non_neg_integer()}) ->
{P::non_neg_integer(), N::non_neg_integer()}.
op(Op, error) ->
op(Op, {0, 0});
op(Op, {ok, {_Evt, P, N}}) ->
op(Op, {P, N});
op(increment, {P, N}) ->
op({increment, 1}, {P, N});
op(decrement, {P, N}) ->
op({decrement, 1}, {P, N});
op({_Op, 0}, {P, N}) ->
{P, N};
op({increment, By}, {P, N}) when is_integer(By), By > 0 ->
{P+By, N};
op({increment, By}, {P, N}) when is_integer(By), By < 0 ->
op({decrement, -By}, {P, N});
op({decrement, By}, {P, N}) when is_integer(By), By > 0 ->
{P, N+By};
op({decrement, By}, {P, N}) when is_integer(By), By < 0 ->
op({increment, -By}, {P, N}).
%% @doc takes two `emcntr()'s and merges them into a single
%% `emcntr()'. This is the Least Upper Bound of the Semi-Lattice/CRDT
%% literature. The semantics of the `emnctr()' merge are explained in
%% the module docs. In a nutshell, merges version vectors, and keeps
%% only dots that are present on both sides, or concurrent.
-spec merge(emcntr(), emcntr()) -> emcntr().
merge(Cnt, Cnt) ->
Cnt;
merge({ClockA, CntA}, {ClockB, CntB}) ->
Clock = riak_dt_vclock:merge([ClockA, ClockB]),
{Cnt0, BUnique} = merge_left(ClockB, CntA, CntB),
Cnt = merge_right(ClockA, BUnique, Cnt0),
{Clock, Cnt}.
%% @private merge the left handside counter (A) by filtering out the
%% dots that are unique to it, and dominated. Returns `[entry()]' as
%% an accumulator, and the dots that are unique to the right hand side
%% (B).
-spec merge_left(riak_dt_vclock:vclock(), [entry()], [entry()]) -> {[entry()], [entry()]}.
merge_left(RHSClock, LHS, RHS) ->
orddict:fold(fun(Actor, {Evt, _Inc, _Dec}=Cnt, {Keep, RHSUnique}) ->
case orddict:find(Actor, RHS) of
error ->
case riak_dt_vclock:descends(RHSClock, [{Actor, Evt}]) of
true ->
{Keep, RHSUnique};
false ->
{orddict:store(Actor, Cnt, Keep), RHSUnique}
end;
%% RHS has this actor, with a greater dot
{ok, {E2, I, D}} when E2 > Evt ->
{orddict:store(Actor, {E2, I, D}, Keep), orddict:erase(Actor, RHSUnique)};
%% RHS has this actor, but a lesser or equal dot
{ok, _} ->
{orddict:store(Actor, Cnt, Keep), orddict:erase(Actor, RHSUnique)}
end
end,
{orddict:new(), RHS},
LHS).
%% @private merge the unique actor entries from the right hand side,
%% keeping the concurrent ones, and dropping the dominated.
-spec merge_right(riak_dt_vclock:vclock(), [entry()], [entry()]) -> [entry()].
merge_right(LHSClock, RHSUnique, Acc) ->
orddict:fold(fun(Actor, {Evt, _Inc, _Dec}=Cnt, Keep) ->
case riak_dt_vclock:descends(LHSClock, [{Actor, Evt}]) of
true ->
Keep;
false ->
orddict:store(Actor, Cnt, Keep)
end
end,
Acc,
RHSUnique).
%% @doc equality of two counters internal structure, not the `value/1'
%% they produce.
-spec equal(emcntr(), emcntr()) -> boolean().
equal({ClockA, PNCntA}, {ClockB, PNCntB}) ->
riak_dt_vclock:equal(ClockA, ClockB) andalso
PNCntA =:= PNCntB.
%% @doc generate stats for this counter. Only `actor_count' is
%% produced at present.
-spec stats(emcntr()) -> [{actor_count, pos_integer()}].
stats(Emcntr) ->
[{actor_count, stat(actor_count, Emcntr)}].
%% @doc generate stat for requested stat type at first argument. Only
%% `actor_count' is supported at present. Return a `pos_integer()' for
%% the stat requested, or `undefined' if stat type is unsupported.
-spec stat(atom(), emcntr()) -> pos_integer() | undefined.
stat(actor_count, {Clock, _Emcntr}) ->
length(Clock);
stat(_, _) -> undefined.
-include("riak_dt_tags.hrl").
-define(TAG, ?DT_EMCNTR_TAG).
-define(V1_VERS, 1).
%% @doc produce a compact binary representation of the counter.
%%
%% @see from_binary/1
-spec to_binary(emcntr()) -> binary().
to_binary(Cntr) ->
Bin = term_to_binary(Cntr),
<<?TAG:8/integer, ?V1_VERS:8/integer, Bin/binary>>.
-spec to_binary(Vers :: pos_integer(), emcntr()) -> {ok, binary()} | ?UNSUPPORTED_VERSION.
to_binary(1, Cntr) ->
B = to_binary(Cntr),
{ok, B};
to_binary(Vers, _Cntr) ->
?UNSUPPORTED_VERSION(Vers).
%% @doc Decode a binary encoded riak_dt_emcntr.
%%
%% @see to_binary/1
-spec from_binary(binary()) -> {ok, emcntr()} | ?INVALID_BINARY | ?UNSUPPORTED_VERSION.
from_binary(<<?TAG:8/integer, ?V1_VERS:8/integer, Bin/binary>>) ->
{ok, binary_to_term(Bin)};
from_binary(<<?TAG:8/integer, Vers:8/integer, _Bin/binary>>) ->
?UNSUPPORTED_VERSION(Vers);
from_binary(_Bin) ->
?INVALID_BINARY.
-spec to_version(pos_integer(), emcntr()) -> emcntr().
to_version(_Version, Cntr) ->
Cntr.
%% ===================================================================
%% EUnit tests
%% ===================================================================
-ifdef(TEST).
-ifdef(EQC).
%% EQC generator
generate() ->
?LET({Ops, Actors}, {non_empty(list(gen_op())), non_empty(list(bitstring(16*8)))},
begin
{Generated, _Evts} = lists:foldl(fun(Op, {Cntr, Evt}) ->
Actor = case length(Actors) of
1 -> hd(Actors);
_ -> lists:nth(crypto:rand_uniform(1, length(Actors)+1), Actors)
end,
{ok, Cntr2} = riak_dt_emcntr:update(Op, {Actor, Evt}, Cntr),
{Cntr2, Evt+1}
end,
{riak_dt_emcntr:new(), 1},
Ops),
Generated
end).
init_state() ->
0.
gen_op(_Size) ->
gen_op().
gen_op() ->
oneof([increment,
{increment, nat()},
decrement,
{decrement, nat()}
]).
update_expected(_ID, increment, Prev) ->
Prev+1;
update_expected(_ID, decrement, Prev) ->
Prev-1;
update_expected(_ID, {increment, By}, Prev) ->
Prev+By;
update_expected(_ID, {decrement, By}, Prev) ->
Prev-By;
update_expected(_ID, _Op, Prev) ->
Prev.
eqc_state_value(S) ->
S.
-endif.
new_test() ->
?assertEqual(0, value(new())).
make_counter(Ops, Evt) ->
lists:foldl(fun({Actor, Op}, {Counter, Event}) ->
E2 = Event+1,
{ok, C2} = update(Op, {Actor, E2}, Counter),
{C2, E2} end,
{new(), Evt},
Ops).
make_counter(Ops) ->
{Cnt, _Evt} = make_counter(Ops, 0),
Cnt.
value_test() ->
PNCnt1 = make_counter([{a, increment},
{b, {increment, 13}}, {b, {decrement, 10}},
{c, increment},
{d, decrement}]),
PNCnt2 = make_counter([]),
PNCnt3 = make_counter([{a, {increment,3}}, {a, {decrement, 3}},
{b, decrement}, {b, increment},
{c, increment}, {c, decrement}]),
?assertEqual(4, value(PNCnt1)),
?assertEqual(0, value(PNCnt2)),
?assertEqual(0, value(PNCnt3)).
update_increment_test() ->
PNCnt0 = new(),
{ok, PNCnt1} = update(increment, {a, 1}, PNCnt0),
{ok, PNCnt2} = update(increment, {b, 1}, PNCnt1),
{ok, PNCnt3} = update(increment, {a, 2}, PNCnt2),
?assertEqual(3, value(PNCnt3)).
update_increment_by_test() ->
PNCnt0 = new(),
{ok, PNCnt1} = update({increment, 7}, {a, 1}, PNCnt0),
?assertEqual(7, value(PNCnt1)).
update_decrement_test() ->
PNCnt0 = new(),
{ok, PNCnt1} = update(increment, {a, 1}, PNCnt0),
{ok, PNCnt2} = update(increment, {b, 1}, PNCnt1),
{ok, PNCnt3} = update(increment, {a, 2}, PNCnt2),
{ok, PNCnt4} = update(decrement, {a, 3}, PNCnt3),
?assertEqual(2, value(PNCnt4)).
update_decrement_by_test() ->
PNCnt0 = new(),
{ok, PNCnt1} = update({increment, 7}, {a, 1}, PNCnt0),
{ok, PNCnt2} = update({decrement, 5}, {a, 2}, PNCnt1),
?assertEqual(2, value(PNCnt2)).
update_neg_increment_by_test() ->
PNCnt0 = new(),
{ok, PNCnt1} = update({increment, -8}, {a, 1}, PNCnt0),
{ok, PNCnt2} = update({decrement, -7}, {a, 2}, PNCnt1),
?assertEqual(-1, value(PNCnt2)).
merge_test() ->
{PNCnt1, Evt} = make_counter([{<<"1">>, increment},
{<<"2">>, {increment, 2}},
{<<"4">>, {increment, 1}}], 0),
{PNCnt2, _Evt2} = make_counter([{<<"3">>, {increment, 3}},
{<<"4">>, {increment, 3}}], Evt),
?assertEqual(new(), merge(new(), new())),
?assertEqual(9, value(merge(PNCnt1, PNCnt2))).
equal_test() ->
PNCnt1 = make_counter([{1, {increment, 2}}, {1, decrement},
{2, increment},
{3, decrement},
{4, increment}]),
PNCnt2 = make_counter([{1, increment},
{2, {increment, 4}},
{3, increment}]),
PNCnt3 = make_counter([{1, {increment, 2}}, {1, decrement},
{2, increment},
{3, decrement},
{4, increment}]),
?assertNot(equal(PNCnt1, PNCnt2)),
?assert(equal(PNCnt1, PNCnt3)).
usage_test() ->
PNCnt1 = new(),
PNCnt2 = new(),
?assert(equal(PNCnt1, PNCnt2)),
{ok, PNCnt1_1} = update({increment, 2}, {a1, 1}, PNCnt1),
{ok, PNCnt2_1} = update(increment, {a2, 1}, PNCnt2),
PNCnt3 = merge(PNCnt1_1, PNCnt2_1),
{ok, PNCnt2_2} = update({increment, 3}, {a3, 1}, PNCnt2_1),
{ok, PNCnt3_1} = update(increment, {a4, 1}, PNCnt3),
{ok, PNCnt3_2} = update(increment, {a1, 2}, PNCnt3_1),
{ok, PNCnt3_3} = update({decrement, 2}, {a5, 1}, PNCnt3_2),
{ok, PNCnt2_3} = update(decrement, {a2, 2}, PNCnt2_2),
?assertEqual({[{a1, 2}, {a2, 2}, {a3, 1}, {a4, 1}, {a5, 1}],
[{a1, {2, 3,0}},
{a2, {2, 1, 1}},
{a3, {1, 3,0}},
{a4, {1, 1, 0}},
{a5, {1, 0,2}}]}, merge(PNCnt3_3, PNCnt2_3)).
roundtrip_bin_test() ->
PN = new(),
{ok, PN1} = update({increment, 2}, {<<"a1">>, 1}, PN),
{ok, PN2} = update({decrement, 1000000000000000000000000}, {douglas_Actor, 1}, PN1),
{ok, PN3} = update(increment, {[{very, ["Complex"], <<"actor">>}, honest], 900987}, PN2),
{ok, PN4} = update(decrement, {"another_acotr", 28}, PN3),
Bin = to_binary(PN4),
{ok, Decoded} = from_binary(Bin),
?assert(equal(PN4, Decoded)).
stat_test() ->
PN = new(),
{ok, PN1} = update({increment, 50}, {a1, 1}, PN),
{ok, PN2} = update({increment, 50}, {a2, 1}, PN1),
{ok, PN3} = update({decrement, 15}, {a3, 1}, PN2),
{ok, PN4} = update({decrement, 10}, {a4, 1}, PN3),
?assertEqual([{actor_count, 0}], stats(PN)),
?assertEqual(4, stat(actor_count, PN4)),
?assertEqual(undefined, stat(max_dot_length, PN4)).
-endif. | deps/riak_dt/src/riak_dt_emcntr.erl | 0.709019 | 0.531209 | riak_dt_emcntr.erl | starcoder |
%%
%% Copyright 2016 <NAME>, All Rights Reserved
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
-module(relog).
-compile({parse_transform, category}).
-include_lib("datum/include/datum.hrl").
-export([start/0]).
-export([
socket/2
, close/1
, uid/2
, iri/2
, append/2
, append/3
, match/2
, stream/3
, c/1
, q/2
, jsonify/1
]).
%%
-type sock() :: _.
%%
%%
start() ->
application:ensure_all_started(?MODULE).
%%
%%
-spec socket(string(), integer()) -> {ok, sock()} | {error, _}.
socket(Host, Port) ->
eredis:start_link(Host, Port).
close(Sock) ->
eredis:stop(Sock).
%%
%% associate urn with unique identity or return existed one
-spec uid(sock(), semantic:iri()) -> datum:either( binary() ).
uid(Sock, IRI) ->
relog_reader:uid(Sock, IRI).
%%
%% lookup urn or uid associated with unique identity
-spec iri(sock(), binary()) -> datum:either( semantic:iri() ).
iri(Sock, Uid) ->
relog_reader:iri(Sock, Uid).
%%
%% append knowledge fact
-spec append(sock(), semantic:spo()) -> datum:either( semantic:iri() ).
-spec append(sock(), semantic:spo(), timeout()) -> datum:either( semantic:iri() ).
append(Sock, Fact) ->
append(Sock, Fact, 30000).
append(Sock, Fact, Timeout) ->
relog_writer:append(Sock, Fact, Timeout).
%%
%% match statements
-spec match(sock(), semantic:spock()) -> datum:stream().
match(Sock, Pattern) ->
relog_reader:match(Sock, Pattern).
%%
%% datalog stream generator
-spec stream(_, _, _) -> _.
stream(_, Keys, Head) ->
relog_reader:stream(Keys, Head).
%%
%% compiles datalog query
-spec c(_) -> _.
c(Datalog) ->
datalog:c(?MODULE, datalog:p(Datalog), [{return, maps}]).
%%
%% execute query
-spec q(_, _) -> _.
q(Lp, Sock) ->
Lp(Sock).
%%
%% encodes deducted fact(s) to json format
jsonify(Stream) ->
stream:map(
fun(Fact) ->
maps:map(
fun(_, Val) -> semantic:to_json(Val) end,
Fact
)
end,
Stream
). | src/relog.erl | 0.501709 | 0.401658 | relog.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% Copyright (c) 2016 <NAME>. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
-module(lasp_dependence_dag).
-author("<NAME> <<EMAIL>>").
-include("lasp.hrl").
-behaviour(gen_server).
%% API
-export([start_link/0,
will_form_cycle/2,
is_root/1,
vertices/0,
add_edges/6,
add_vertex/1,
add_vertices/1]).
%% Utility
-export([to_dot/0,
export_dot/1]).
%% Test
%% @todo Only export on test.
-export([n_vertices/0,
process_map/0,
n_edges/0,
out_degree/1,
in_degree/1,
out_edges/1,
in_edges/1]).
-ifdef(TEST).
-export([contract/0,
cleave/1,
cleave_all/0]).
-endif.
%% gen_server callbacks
-export([init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3]).
%% Defines how often an optimization pass happens.
-define(CONTRACTION_INTERVAL, 1000).
%% Initial depth of all nodes.
-define(BASE_DEPTH, 0).
%%%===================================================================
%%% Type definitions
%%%===================================================================
%% To make contractions reversible, we have to hold all the
%% information about the removed vertices and the edges that were
%% connecting them. When a contraction is reversed, we use this infomation
%% to restart the intermediate processes, linking together the correct
%% vertices. One a contraction is reversed, all this metadata is removed.
%%
%% Stored metadata for a lasp process.
%% We don't store the input(s) and output vertices as that information
%% is implicitly stored by edges in the graph.
%%
%% Used to represent the metadata of processes inside a contracted path,
%% that only can have one parent and one child, hence why there is only
%% a single read function.
-record(process_metadata, {read :: function(),
transform :: function(),
write :: function()}).
%% A process hash is defined as the hash of their metadata (process_args).
%% This hash keeps constant through process restarts, and is used to uniquely
%% identify a process that connects the endpoints of a contracted path.
%% Removed vertices keep a reference to a process hash to identify in what path
%% they were before being removed (vertex_label).
-type process_hash() :: non_neg_integer().
-record(vertex_label, {process_pointer :: process_hash()}).
-type process_args() :: {[{lasp_vertex(), function()}],
function(),
{lasp_vertex(), function()}}.
%% We monitor all lasp processes, but the only information we get once that
%% happens is the process pid. This table keeps a mapping between hashes and
%% pids, and is used to during a cleaving pass to get to the information in
%% the optimized map.
-type pid_table() :: dict:dict(pid(), process_hash()).
%% This structure maps process hashes, identifying processes that connect
%% endpoints in a contracted path, to a list of vertices representing
%% the old path before being contracted, and a list of the metadata of
%% the lasp processes that linked those vertices together.
%% In this metadata list, each element at a position i represents the metadata
%% connecting the i-th and (i+1)-th vertices in the vertex list.
-type optimized_map() :: dict:dict(process_hash(),
{pid(),
contract_path(),
list(#process_metadata{})}).
%% Only used to represent paths suitable to be contracted. These paths
%% consist of two necessary endpoints and a list of unnecessary vertices
%% in between.
-type contract_path() :: list(lasp_vertex()).
%% We store a mapping Pid -> [{parent_node, child_node}] to
%% find the edge labeled with it without traversing the graph.
%%
%% This is useful when the Pid of a lasp process changes
%% (because it gets restarted or it just terminates), as it
%% lets us quickly delete those edges.
-type process_map() :: dict:dict(pid(), {id(), id()}).
%% State. The graph is constructed using the digraph module.
%% See definitions above for further information.
-record(state, {dag :: digraph:graph(),
process_map :: process_map(),
optimized_map :: optimized_map(),
pid_table :: pid_table(),
contraction_timer :: timer:tref()}).
%% We store the function metadata as the edge label.
%% An edge in the graph represents a lasp process, and contains
%% the various functions used by it.
%%
%% Processes that have n inputs are modeled by n edges, all with the same
%% pid, connecting each input to the output. This means that we only have
%% to collect a single read function.
%% @todo merge with process_metadata record
-record(edge_label, {pid :: pid(),
read :: function(),
transform :: function(),
write :: function()}).
%% A vertex in the dag represents either a crdt value or a lasp process pid
%% Pids are used to model reads and updates to a value.
-type lasp_vertex() :: id() | pid().
%% Return type of digraph:edge/2
-type lasp_edge() :: {digraph:edge(),
digraph:vertex(),
digraph:vertex(),
#edge_label{}}.
%%%===================================================================
%%% API
%%%===================================================================
start_link() ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
-spec add_vertex(lasp_vertex()) -> ok.
add_vertex(V) ->
add_vertices([V]).
-spec add_vertices(list(lasp_vertex())) -> ok.
add_vertices([]) ->
ok;
add_vertices(Vs) ->
gen_server:call(?MODULE, {add_vertices, Vs}, infinity).
is_root(V) ->
gen_server:call(?MODULE, {is_root, V}, infinity).
vertices() ->
gen_server:call(?MODULE, vertices, infinity).
%% @doc Check if linking the given vertices will form a loop.
%%
%% The user may accidentally form a loop while writing a dataflow
%% computation.
%%
%% Imagine this example:
%%
%% A = declare(),
%% B = declare(),
%% map(A, \x.x+1, B)
%% bind_to(A, B
%%
%% As soon as A is given a value, it will start to grow in size,
%% as its own internal value is incremented forever.
-spec will_form_cycle(list(lasp_vertex()), lasp_vertex()) -> boolean().
will_form_cycle(Src, Dst) ->
gen_server:call(?MODULE, {will_form_cycle, Src, Dst}, infinity).
%% @doc For all V in Src, create an edge from V to Dst labelled with Pid.
%%
%% Returns error if it couldn't create some of the edges,
%% either because it formed a loop, or because some of the
%% vertices weren't in the graph.
%%
-spec add_edges(list(lasp_vertex()),
lasp_vertex(),
pid(),
list({lasp_vertex(), function()}),
function(),
{lasp_vertex(), function()}) -> ok | error.
add_edges(Src, Dst, Pid, ReadFuns, TransFun, WriteFun) ->
gen_server:call(?MODULE, {add_edges, Src, Dst, Pid, ReadFuns, TransFun, WriteFun}, infinity).
%% @doc Return the dot representation as a string.
-spec to_dot() -> {ok, string()} | {error, no_data}.
to_dot() ->
gen_server:call(?MODULE, to_dot, infinity).
%% @doc Write the dot representation of the dag to the given file path.
-spec export_dot(string()) -> ok | {error, no_data}.
export_dot(Path) ->
gen_server:call(?MODULE, {export_dot, Path}, infinity).
n_vertices() ->
gen_server:call(?MODULE, n_vertices, infinity).
n_edges() ->
gen_server:call(?MODULE, n_edges, infinity).
in_degree(V) ->
gen_server:call(?MODULE, {in_degree, V}, infinity).
out_degree(V) ->
gen_server:call(?MODULE, {out_degree, V}, infinity).
out_edges(V) ->
gen_server:call(?MODULE, {out_edges, V}, infinity).
in_edges(V) ->
gen_server:call(?MODULE, {in_edges, V}, infinity).
process_map() ->
gen_server:call(?MODULE, get_process_map, infinity).
-ifdef(TEST).
%% @doc Contract all suitable paths in the graph.
%%
%% A path can be contracted if it contains one or more
%% unnecessary vertices. An unnecessary vertex is one
%% that only has one child and one parent.
%%
%% This removes intermediate vertices from the graph.
%%
contract() ->
gen_server:call(?MODULE, contract, infinity).
%% @doc Perform vertex cleaving on the given vertex.
%%
%% Given a vertex that was removed as part of a path contraction,
%% undo the contraction on all vertices of the path.
%%
%% Does nothing if the vertex was not removed.
%%
cleave(Vertex) ->
gen_server:call(?MODULE, {cleave, Vertex}, infinity).
%% @doc Perform vertex cleaving on all removed vertices.
%%
%% Same as cleave/1, but on all removed vertices of the graph.
%%
cleave_all() ->
gen_server:call(?MODULE, cleave_all, infinity).
-endif.
%%%===================================================================
%%% gen_server callbacks
%%%===================================================================
%% @doc Initialize state.
init([]) ->
Timer = case lasp_config:get(automatic_contraction, false) of
true ->
{ok, Tref} = timer:send_after(?CONTRACTION_INTERVAL, contract),
Tref;
_ ->
undefined
end,
{ok, #state{dag=digraph:new([acyclic]),
process_map=dict:new(),
optimized_map=dict:new(),
pid_table=dict:new(),
contraction_timer=Timer}}.
-spec handle_call(term(), {pid(), term()}, #state{}) ->
{reply, term(), #state{}}.
handle_call({is_root, V}, _From, #state{dag=Dag}=State) ->
InNeighbours = digraph:in_neighbours(Dag, V),
%% Filter out single-fire processes and only look at nodes that
%% directly modify it from other data nodes in the system.
FilterVs = lists:filter(fun({_, _}) ->
true;
(_) ->
false
end, InNeighbours),
IsRoot = not (length(FilterVs) > 0),
{reply, {ok, IsRoot}, State};
handle_call(vertices, _From, #state{dag=Dag}=State) ->
%% Retrieve vertices from the graph.
Vertices = digraph:vertices(Dag),
%% Annotate vertices with their label.
Annotated = lists:map(fun(V) -> digraph:vertex(Dag, V) end, Vertices),
{reply, {ok, Annotated}, State};
handle_call(n_vertices, _From, #state{dag=Dag}=State) ->
{reply, {ok, digraph:no_vertices(Dag)}, State};
handle_call(n_edges, _From, #state{dag=Dag}=State) ->
{reply, {ok, digraph:no_edges(Dag)}, State};
handle_call({in_degree, V}, _From, #state{dag=Dag}=State) ->
{reply, {ok, digraph:in_degree(Dag, V)}, State};
handle_call({out_degree, V}, _From, #state{dag=Dag}=State) ->
{reply, {ok, digraph:out_degree(Dag, V)}, State};
handle_call({out_edges, V}, _From, #state{dag=Dag}=State) ->
Edges = [digraph:edge(Dag, E) || E <- digraph:out_edges(Dag, V)],
{reply, {ok, Edges}, State};
handle_call({in_edges, V}, _From, #state{dag=Dag}=State) ->
Edges = [digraph:edge(Dag, E) || E <- digraph:in_edges(Dag, V)],
{reply, {ok, Edges}, State};
handle_call({add_vertices, Vs}, _From, #state{dag=Dag}=State) ->
[digraph:add_vertex(Dag, V, ?BASE_DEPTH) || V <- Vs],
{reply, ok, State};
handle_call(to_dot, _From, #state{dag=Dag}=State) ->
{reply, to_dot(Dag), State};
handle_call({export_dot, Path}, _From, #state{dag=Dag}=State) ->
R = case to_dot(Dag) of
{ok, Content} -> file:write_file(Path, Content);
Error -> Error
end,
{reply, R, State};
handle_call(get_process_map, _From, #state{process_map=PM}=State) ->
{reply, {ok, dict:to_list(PM)}, State};
handle_call(contract, _From, State) ->
NewState = contract_all(State),
{reply, ok, NewState};
handle_call({cleave, Vertex}, _From, #state{dag=Dag, optimized_map=OptMap}=State) ->
cleave_if_contracted(Dag, Vertex, OptMap),
{reply, ok, State};
handle_call(cleave_all, _From, #state{optimized_map=OptMap}=State) ->
lists:foreach(fun(Id) ->
{Pid, _, _} = dict:fetch(Id, OptMap),
spawn_link(fun() ->
lasp_process_sup:terminate_child(lasp_process_sup, Pid)
end)
end, dict:fetch_keys(OptMap)),
{reply, ok, State};
%% @doc Check if linking the given vertices will introduce a cycle in the graph.
%%
%% Naive approach first: check if To is a member of From
%%
%% Second approach: check if there is a path from `Dst` to
%% any of the vertices in `Src`.
%%
%% We want to check this before spawning a lasp process, otherwise
%% an infinite loop can be created if the vertices form a loop.
%%
handle_call({will_form_cycle, Src, Dst}, _From, #state{dag=Dag, optimized_map=OptMap}=State) ->
DirectCycle = lists:member(Dst, Src) orelse lists:any(fun(V) ->
digraph:get_path(Dag, Dst, V) =/= false
end, Src),
Response = DirectCycle orelse optimized_cycle(Dag, Src, Dst, OptMap),
{reply, Response, State};
%% @doc For all V in Src, create an edge from V to Dst labelled with Pid.
%%
%% We monitor all edge Pids to know when they die or get restarted.
%%
handle_call({add_edges, Src, Dst, Pid, ReadFuns, TransFun, WriteFun}, _From, State) ->
{Reply, NewState} = add_edges(Src, Dst, Pid,
ReadFuns, TransFun, WriteFun, State),
{reply, Reply, NewState}.
%% @private
-spec handle_cast(term(), #state{}) -> {noreply, #state{}}.
handle_cast(_Request, State) ->
{noreply, State}.
%% @private
-spec handle_info(term(), #state{}) -> {noreply, #state{}}.
%% @doc Remove the edges associated with a lasp process when it terminates.
%%
%% Given that lasp processes might get restarted or terminated,
%% we have to know when it happens so we can delete the appropiate
%% edges in the graph.
%%
handle_info({'DOWN', _, process, Pid, Reason}, #state{dag=Dag,
process_map=PM,
optimized_map=OptMap,
pid_table=PidTable}=State) ->
{ok, Edges} = dict:find(Pid, PM),
NewDag = lists:foldl(fun({F, T}, G) ->
delete_with_pid(G, F, T, Pid)
end, Dag, Edges),
%% If terminated by supervisor, cleave any associated paths.
NewState = case dict:find(Pid, PidTable) of
{ok, Hash} -> case Reason of
shutdown ->
CleavedState = cleave_associated_path(NewDag, Hash, State),
CleavedState#state{optimized_map=dict:erase(Hash, OptMap),
pid_table=dict:erase(Pid, PidTable),
process_map=dict:erase(Pid, CleavedState#state.process_map)};
_ ->
State#state{dag=NewDag,
process_map=dict:erase(Pid, PM),
pid_table=dict:erase(Pid, PidTable)}
end;
_ -> State#state{dag=NewDag,
process_map=dict:erase(Pid, PM)}
end,
{noreply, NewState};
handle_info(contract, #state{contraction_timer=OldTRef}=State) ->
timer:cancel(OldTRef),
NewState = contract_all(State),
{ok, TRef} = timer:send_after(?CONTRACTION_INTERVAL, contract),
{noreply, NewState#state{contraction_timer=TRef}};
handle_info(Msg, State) ->
_ = lager:warning("Unhandled messages ~p", [Msg]),
{noreply, State}.
%% @private
-spec terminate(term(), #state{}) -> term().
terminate(_Reason, _State) ->
ok.
%% @private
-spec code_change(term() | {down, term()}, #state{}, term()) -> {ok, #state{}}.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%%===================================================================
%%% Internal functions
%%%===================================================================
add_edges(Src, Dst, Pid, ReadFuns, TransFun, {Dst, WriteFun}, State) ->
Dag = State#state.dag,
Pm = State#state.process_map,
OptMap = State#state.optimized_map,
PidTable = State#state.pid_table,
%% Add vertices only if they are either sources or sinks. (See add_if)
%% All user-defined variables are tracked through the `declare` function.
lists:foreach(fun(V) -> add_if_pid(Dag, V) end, Src),
add_if_pid(Dag, Dst),
%% Check if this edge is the replacement for an old edge in the graph.
{NewOptMap, NewPidTable} = replace_if_restarted(OptMap, PidTable, Pid,
{ReadFuns,
TransFun,
{Dst, WriteFun}}),
%% @todo This should happen before creating the process
%%
%% Otherwise the process acts on old data. This is only a problem
%% with queries and reads, since they return the value of the vertex.
%% Binds and updates are ok, since values will eventually propagate
%% through the graph.
%%
%% Undo any optimizations involving these vertices.
lists:foreach(fun(V) ->
cleave_if_contracted(Dag, V, NewOptMap)
end, [Dst | Src]),
%% For all V in Src, make edge (V, Dst) with label {Pid, Read, Trans, Write}
%% (where {Id, Read} = ReadFuns s.t. Id = V)
Status = lists:map(fun(V) ->
Read = lists:nth(1, [ReadF || {Id, ReadF} <- ReadFuns, Id =:= V]),
EdgeResult = digraph:add_edge(Dag, V, Dst, #edge_label{pid=Pid,
read=Read,
transform=TransFun,
write=WriteFun}),
%% Determine depth; but ignore nodes that are transient,
%% single-fire processes.
case Dst of
{_Id, _Type} ->
Depth = depth(Dag, Dst, ?BASE_DEPTH),
%% Re-create destination vertex with new depth.
case digraph:vertex(Dag, Dst) of
{_, Label} ->
case Label of
Depth ->
%% Already correct depth; ignore.
ok;
Depth0 ->
VertexResult = digraph:add_vertex(Dag, Dst, Depth),
lager:info("Vertex: ~p re-created with depth ~p => ~p; result: ~p",
[Dst, Depth0, Depth, VertexResult]),
lager:info("Vertex: ~p edges: ~p",
[Dst, digraph:edges(Dag, Dst)])
end;
_ ->
ok
end;
_ ->
ok
end,
EdgeResult
end, Src),
{R, St} = case lists:any(fun is_graph_error/1, Status) of
true ->
%% Sometimes if someone tries to read an object
%% that's not there, we'll try to make an edge to a
%% non-existent vertex and will trigger this
%% error. Ignore.
{ok, State};
false ->
erlang:monitor(process, Pid),
%% For all V in Src, append Pid -> {V, Dst}
%% in the process map.
ProcessMap = lists:foldl(fun(El, D) ->
dict:append(Pid, {El, Dst}, D)
end, Pm, Src),
{ok, State#state{process_map=ProcessMap}}
end,
{R, St#state{optimized_map = NewOptMap, pid_table = NewPidTable}}.
is_graph_error({error, _}) ->
true;
is_graph_error(_) ->
false.
%% @doc Delete all edges between Src and Dst with the given pid..
-spec delete_with_pid(digraph:graph(), lasp_vertex(), lasp_vertex(), term()) -> digraph:graph().
delete_with_pid(Graph, Src, Dst, Pid) ->
lists:foreach(fun
({E, _, _, #edge_label{pid=TargetPid}}) when TargetPid =:= Pid ->
digraph:del_edge(Graph, E);
(_) -> ok
end, get_direct_edges(Graph, Src, Dst)),
Graph.
%% @doc Return all direct edges linking V1 and V2.
%%
%% If V1 and V2 are not linked, return the empty list.
%%
%% Otherwise, get all emanating edges from V1, and return
%% only the ones linking to V2.
%%
-spec get_direct_edges(digraph:graph(),
lasp_vertex(), lasp_vertex()) -> list(lasp_edge()).
get_direct_edges(G, V1, V2) ->
lists:flatmap(fun(Ed) ->
case digraph:edge(G, Ed) of
{_, _, To, _}=E when To =:= V2 -> [E];
_ -> []
end
end, digraph:out_edges(G, V1)).
%% @doc Add a vertex only if it is a pid
%%
%% We only add it if it isn't already present on the dag,
%% as adding the same vertex multiple times removes any
%% metadata (labels).
%%
-spec add_if_pid(digraph:graph(), lasp_vertex()) -> ok.
add_if_pid(Dag, Pid) when is_pid(Pid) ->
case digraph:vertex(Dag, Pid) of
false ->
digraph:add_vertex(Dag, Pid);
_ ->
ok
end;
add_if_pid(_, _) ->
ok.
%%%===================================================================
%%% Contraction Functions
%%%===================================================================
%% @doc Return a list of contraction candidate paths in the graph.
%%
%% A contraction path is formed by two necessary endpoints, and
%% a list of unnecessary vertices connecting them.
%%
%% If no paths are found, the empty list is returned.
%%
-spec contraction_paths(digraph:graph()) -> list(contract_path()).
contraction_paths(G) ->
Result = contraction_paths(G, digraph_utils:topsort(G), sets:new(), [[]]),
lists:filter(fun(L) -> length(L) > 0 end, Result).
-spec contraction_paths(digraph:graph(),
list(lasp_vertex()),
sets:set(lasp_vertex()),
list(digraph:vertex())) -> list(contract_path()).
contraction_paths(G, [V | Vs], Visited, Acc) ->
case sets:is_element(V, Visited) of
true -> contraction_paths(G, Vs, Visited, Acc);
_ -> case is_unnecessary(G, V) of
true ->
Path = get_children_while(fun(El) ->
is_unnecessary(G, El)
end, G, V),
AllVisited = lists:foldl(fun sets:add_element/2, Visited, Path),
%% We already know it only has one parent.
[Parent | _] = digraph:in_neighbours(G, V),
contraction_paths(G, Vs, AllVisited, [[Parent | Path] | Acc]);
false ->
contraction_paths(G, Vs, sets:add_element(V, Visited), Acc)
end
end;
contraction_paths(_, [], _, Acc) -> Acc.
%% @doc Recursively get all the children of a given vertex that satisfy
%% the given predicate.
%%
%% Returns a list of the children, in depth-first order, with the
%% first element that doesn't satisfy the predicate in the last
%% position of the list.
%%
%% If the given vertex has no children, or if it doesn't satisfy
%% the predicate, a list with it as the only element is returned.
%%
-spec get_children_while(fun((lasp_vertex()) -> boolean()),
digraph:graph(),
lasp_vertex()) -> list(lasp_vertex()).
get_children_while(Pred, G, V) ->
lists:reverse(get_children_while(Pred, G, V, [])).
-spec get_children_while(fun((lasp_vertex()) -> boolean()),
digraph:graph(),
lasp_vertex(),
list(lasp_vertex())) -> list(lasp_vertex()).
get_children_while(Pred, G, V, Acc) ->
case Pred(V) of
true ->
Res = lists:flatmap(fun(Child) ->
get_children_while(Pred, G, Child, Acc)
end, digraph:out_neighbours(G, V)),
Res ++ Acc ++ [V];
false -> [V | Acc]
end.
%% @doc Unnecessary vertex.
%%
%% An unnecessary vertex iff its out degree = in degree = 1, where
%% the parent and the child are regular vertices (not pids) and the
%% child only has one parent.
%%
%% Unnecessary vertices can be contracted in the graph.
%%
-spec is_unnecessary(digraph:graph(), lasp_vertex()) -> boolean().
is_unnecessary(G, V) ->
case digraph:in_degree(G, V) =:= 1 andalso digraph:out_degree(G, V) =:= 1 of
false -> false;
true ->
%% We already know it only has one parent and one child.
[Parent | _] = digraph:in_neighbours(G, V),
[Child | _] = digraph:out_neighbours(G, V),
%% Parent isn't a pid, Child isn't a pid _and_ only has a parent.
not is_pid(Parent) andalso maybe_unnecessary(G, Child)
end.
%% @doc Unnecessary vertex candidate.
-spec maybe_unnecessary(digraph:graph(), lasp_vertex()) -> boolean().
maybe_unnecessary(_G, V) when is_pid(V) ->
false;
maybe_unnecessary(G, V) ->
digraph:in_degree(G, V) =:= 1.
%% @doc Replace the Pid in the optimized map if the old one was restarted.
%%
%% Removed vertices hold a reference to the Id (hash) of the process
%% that connects the endpoints of the path that was contracted.
%%
%% If this process is restarted, we have to update the pid in the
%% optimized map, otherwise vertices will hold an old reference.
%%
-spec replace_if_restarted(optimized_map(), pid_table(), pid(),
process_args()) -> {optimized_map(), pid_table()}.
replace_if_restarted(OptMap, PidTable, Pid, ProcessArgs) ->
Id = process_hash(ProcessArgs),
case dict:find(Id, OptMap) of
error -> {OptMap, PidTable};
{ok, {_, VSeq, Metadata}} ->
{dict:store(Id, {Pid, VSeq, Metadata}, OptMap), dict:store(Pid, Id, PidTable)}
end.
%% @doc Find and contract all suitable paths to be contracted in the graph.
-spec contract_all(#state{}) -> #state{}.
contract_all(#state{dag=Dag}=State) ->
lists:foldl(fun(P, S) ->
contract(Dag, P, S)
end, State, contraction_paths(Dag)).
%% @doc Perform path contraction in the given sequence of vertices.
%%
%% The resulting edge represents a lasp process with the read
%% function of the first vertex, the write function of the last
%% and the composition of all inner transform functions.
%%
%% Given two consecutive edges, (v1, v2) = f and (v2, v3) = g, with
%% metadata:
%%
%% f = <r_f, t_f, w_f>
%%
%% g = <r_g, t_g, w_g>
%%
%% where `r`, `t` and `w` represent the read, transform and write
%% functions, we define the composition of `f` and `g` as
%%
%% g . f = <r_f, (t_g . t_f), w_g >
%%
%% where ( . ) is defined as the usual composition operator.
%% The result of this operation is a new edge h = (v1, v3).
%%
-spec contract(digraph:graph(), contract_path(), #state{}) -> #state{}.
contract(G, VSeq, State) ->
[First, Second | _] = VSeq,
Last = lists:last(VSeq),
SndLast = lists:nth(length(VSeq) - 1, VSeq),
%% Read function from the first vertex.
ReadFun = lists:nth(1, get_read_functions(G, First, Second)),
Read = {First, ReadFun},
%% List of all transforming functions.
TransFuns = collect_trans_funs(G, VSeq),
%% Write function from the last vertex.
WriteFun = lists:nth(1, get_write_functions(G, SndLast, Last)),
Write = {Last, WriteFun},
%% Since all transforming functions (with arity one) are
%% of type (CRDT -> value), we need an intermediate
%% function (value -> CRDT) to be able to compose them.
%%
%% The last function gets back the result from the last output.
%%
%% We define path contraction on those containing unnecessary
%% vertices only, so we don't care for multi-arity functions.
TransFun = fun({Id, T, Metadata, _OldValue}=X) ->
apply_sequentially(X, TransFuns, fun(NewValue) ->
{Id, T, Metadata, NewValue}
end, fun({_, _, _, V}) -> V end)
end,
ProcessHash = process_hash({[Read], TransFun, Write}),
%% Manually start a new lasp process and add the edges to the graph.
{ok, Pid} = lasp_process:start_manual_process([[Read], TransFun, Write]),
{ok, NewState} = add_edges([First], Last, Pid,
[Read], TransFun, Write, State),
NewDag = NewState#state.dag,
OptMap = NewState#state.optimized_map,
PidTable = NewState#state.pid_table,
%% Remove the intermediate edges by terminating the associated processes.
NewOptMap = remove_edges(NewDag, VSeq, ProcessHash, Pid, OptMap),
NewState#state{optimized_map = NewOptMap, pid_table = dict:store(Pid, ProcessHash, PidTable)}.
%% @doc Remove intermediate edges in a contracted path.
%%
%% Deletes all intermediate edges in the path, and tags
%% all unnecessary vertices with the given Pid, that should
%% represent the resulting lasp process of the path contraction.
%%
-spec remove_edges(digraph:graph(), contract_path(), process_hash(), pid(), optimized_map()) -> optimized_map().
remove_edges(Dag, VSeq, Id, Pid, OptMap) ->
%% Store process metadata in the optimized map
Metadata = get_metadata(Dag, VSeq),
%% Tag all unnecessary vertices in the path with the new process Pid
UnnecesaryVertices = lists:sublist(VSeq, 2, length(VSeq) - 2),
tag_vertices(Dag, UnnecesaryVertices, #vertex_label{process_pointer=Id}),
%% Delete the intermediate edges and kill the associated processes.
OldPids = collect_pids(Dag, VSeq),
spawn_link(fun() ->
lists:foreach(fun(P) ->
lasp_process_sup:terminate_child(lasp_process_sup, P)
end, OldPids)
end),
dict:store(Id, {Pid, VSeq, Metadata}, OptMap).
%% @doc Check if a list of future edges involving contracted vertices introduce a loop.
%%
%% Checks optimized nodes so that we don't accidentally introduce
%% loops while a vertex is not connected.
%%
%% For example, A -> B -> C, B -> A is a loop, but if (A, B) is
%% optimized, we could make that edge. If we cleave after that,
%% trying to make (A, B) will fail.
%%
-spec optimized_cycle(digraph:graph(), list(lasp_vertex()), lasp_vertex(), optimized_map()) -> boolean().
optimized_cycle(G, From, To, OptMap) ->
OptimizedTails = lists:filter(fun(F) ->
not (contracted(G, F) =:= false)
end, From),
case {OptimizedTails, contracted(G, To)} of
%% Contracted -> Contracted forms a loop if the tails of both are
%% the same, or if it exists a path from the tail of the child
%% to the tail of the parent.
{[_|_]=ContractedVertices, {true, ChildId}} ->
ParentTails = lists:map(fun(V) ->
{_, #vertex_label{process_pointer=ParentId}} = digraph:vertex(G, V),
get_process_tail(ParentId, OptMap)
end, ContractedVertices),
ChildTail = get_process_tail(ChildId, OptMap),
lists:any(fun(PTail) ->
not (digraph:get_path(G, ChildTail, PTail) =:= false)
orelse (ChildTail =:= PTail)
end, ParentTails);
%% Contracted -> Uncontracted forms a loop if there exists a path
%% from the child to the tail of the parent, or if the child is the
%% source.
{[_|_]=ContractedVertices, false} ->
Tails = lists:map(fun(V) ->
{_, #vertex_label{process_pointer = Id}} = digraph:vertex(G, V),
get_process_tail(Id, OptMap)
end, ContractedVertices),
lists:any(fun(Tail) ->
not (digraph:get_path(G, To, Tail) =:= false)
orelse (Tail =:= To)
end, Tails);
%% Uncontracted -> Contracted forms a loop if there exists a path
%% from the source of the child to the parent, or the source is
%% the parent.
{[], {true, Id}} ->
Tail = get_process_tail(Id, OptMap),
lists:any(fun(V) ->
not (digraph:get_path(G, Tail, V) =:= false)
orelse Tail =:= V
end, From);
%% Both are uncontracted, we can let callee deal with this.
{[], false} -> false
end.
%% @doc Get the tail of the process that contracted a path.
-spec get_process_tail(process_hash(), optimized_map()) -> lasp_vertex().
get_process_tail(Id, OptMap) ->
{_, [Tail | _], _} = dict:fetch(Id, OptMap),
Tail.
%%%===================================================================
%%% Cleave Functions
%%%===================================================================
%% @doc If the given vertex was part of a contracted path, cleave it
%%
%% Contracted vertices contain a pointer to the Pid of the process
%% that forms the contraction of the path.
%%
-spec cleave_if_contracted(digraph:graph(), lasp_vertex(), optimized_map()) -> ok.
cleave_if_contracted(G, Vertex, OptMap) ->
case contracted(G, Vertex) of
{true, Id} ->
{Pid, _, _} = dict:fetch(Id, OptMap),
spawn_link(fun() ->
lasp_process_sup:terminate_child(lasp_process_sup, Pid)
end),
ok;
false -> ok
end.
%% @doc Cleave the path represented by the given process Pid.
%%
%% If the Pid represents a process that contracted a path,
%% start all the intermediate processes of the path,
%% and then kill the given Pid.
%%
cleave_associated_path(G, Hash, #state{optimized_map=OptMap}=State) ->
case dict:find(Hash, OptMap) of
error -> State;
{ok, {_, VSeq, MetadataList}} ->
ProcessArgs = unpack_optimized_map(VSeq, MetadataList),
UnnecesaryVertices = lists:sublist(VSeq, 2, length(VSeq) - 2),
tag_vertices(G, UnnecesaryVertices, []),
lists:foldl(fun({Reads, Transform, {Dst, Write}}=Args, St) ->
Src = [To || {To, _} <- Reads],
{ok, Pid} = lasp_process:start_manual_process(tuple_to_list(Args)),
{ok, NewState} = add_edges(Src, Dst, Pid,
Reads, Transform, {Dst, Write}, St),
NewState
end, State, ProcessArgs)
end.
%% @doc Get the process arguments of the given optimized map inner dict.
-spec unpack_optimized_map(contract_path(), list(#process_metadata{})) -> list(process_args()).
unpack_optimized_map(VSeq, MetadataList) ->
mapi(fun(Pos, El) ->
ReadFun = El#process_metadata.read,
Read = {lists:nth(Pos, VSeq), ReadFun},
Transform = El#process_metadata.transform,
WriteFun = El#process_metadata.write,
Write = {lists:nth(Pos + 1, VSeq), WriteFun},
{[Read], Transform, Write}
end, MetadataList).
%% @doc Check if a vertex was optimized in the past.
%%
%% If it was optimized, return also the pointer to the Pid
%% of the process that contracted the path.
%%
-spec contracted(digraph:graph(), lasp_vertex()) -> {true, pid()} | false.
contracted(G, V) ->
case digraph:vertex(G, V) of
{_, #vertex_label{process_pointer=Id}} -> {true, Id};
_ -> false
end.
%% @doc Given a path contraction candidate in the graph, return the process
%% metadata from all intermediate edges.
%%
%% Used to build the optimized map.
%%
-spec get_metadata(digraph:graph(), contract_path()) -> list(#process_metadata{}).
get_metadata(G, [_ | Tail]=VSeq) ->
zipwith(fun(Src, Dst) ->
lists:nth(1, get_metadata(G, Src, Dst))
end, VSeq, Tail).
%% @doc Get the process metadata for all edges between the given vertices.
-spec get_metadata(digraph:graph(),
lasp_vertex(),
lasp_vertex()) -> list(#process_metadata{}).
get_metadata(G, V1, V2) ->
Edges = get_direct_edges(G, V1, V2),
lists:map(fun({_, _, _, Metadata}) ->
#process_metadata{read=Metadata#edge_label.read,
transform=Metadata#edge_label.transform,
write=Metadata#edge_label.write}
end, Edges).
%% @doc Tag the unnecessary vertices in the given path with a pid.
-spec tag_vertices(digraph:graph(), contract_path(), #vertex_label{} | []) -> ok.
tag_vertices(Dag, VSeq, Label) ->
lists:foreach(fun(V) ->
digraph:add_vertex(Dag, V, Label)
end, VSeq).
%%%===================================================================
%%% Utility Functions
%%%===================================================================
%% @doc Get the list of pids from the edges between V1 and V2
-spec get_connecting_pids(digraph:graph(),
lasp_vertex(),
lasp_vertex()) -> list(pid()).
get_connecting_pids(G, V1, V2) ->
get_edge_properties(fun({_, _, _, E}) ->
E#edge_label.pid
end, G, V1, V2).
%% @doc Recursively get all pids from the given path.
-spec collect_pids(digraph:graph(), contract_path()) -> list(pid()).
collect_pids(G, [_ | T]=Seq) ->
lists:flatten(zipwith(fun(Src, Dst) ->
get_connecting_pids(G, Src, Dst)
end, Seq, T)).
%% @doc Get the list of read functions from the edges between V1 and V2
-spec get_read_functions(digraph:graph(),
lasp_vertex(),
lasp_vertex()) -> list(function()).
get_read_functions(G, V1, V2) ->
get_edge_properties(fun({_, _, _, E}) ->
E#edge_label.read
end, G, V1, V2).
%% @doc Get the list of transform functions from the edges between V1 and V2
-spec get_transform_functions(digraph:graph(),
lasp_vertex(),
lasp_vertex()) -> list(function()).
get_transform_functions(G, V1, V2) ->
get_edge_properties(fun({_, _, _, E}) ->
E#edge_label.transform
end, G, V1, V2).
%% @doc Recursively get all transform functions from the given path.
-spec collect_trans_funs(digraph:graph(), contract_path()) -> list(function()).
collect_trans_funs(G, [_ | T]=Seq) ->
lists:flatten(zipwith(fun(Src, Dst) ->
get_transform_functions(G, Src, Dst)
end, Seq, T)).
%% @doc Get the list of write functions from the edges between V1 and V2
-spec get_write_functions(digraph:graph(),
lasp_vertex(),
lasp_vertex()) -> list(function()).
get_write_functions(G, V1, V2) ->
get_edge_properties(fun({_, _, _, E}) ->
E#edge_label.write
end, G, V1, V2).
-spec get_edge_properties(function(),
digraph:graph(),
lasp_vertex(),
lasp_vertex()) -> list(pid() | function()).
get_edge_properties(Fn, G, V1, V2) ->
lists:map(Fn, get_direct_edges(G, V1, V2)).
%% @doc Zipwith that works with lists of different lengths.
%%
%% Stops as soon as one of the lists is empty.
%%
%% zipwith(fun(X, Y) -> {X, Y} end, [1,2,3], [1,2]).
%% => [{1,1}, {2,2}]
%%
-spec zipwith(function(), list(any()), list(any())) -> list(any()).
zipwith(Fn, [X | Xs], [Y | Ys]) ->
[Fn(X, Y) | zipwith(Fn, Xs, Ys)];
zipwith(Fn, _, _) when is_function(Fn, 2) -> [].
%% @doc Same as lists:map, but the function is applied to the index of
%% the element as first argument (counting from 1), and the element
%% itself as second argument.
%%
mapi(F, List) ->
mapi(F, List, 1).
mapi(F, [H | T], Current) ->
[F(Current, H) | mapi(F, T, Current + 1)];
mapi(F, [], _Current) when is_function(F, 2) -> [].
%% @doc Thread a value through a list of functions.
%%
%% Takes an initial value, a list of functions, and two transforming
%% functions. The first one transforms the output of a function into
%% the input of the next one in the list. The second transforms the
%% output of the final function in the list.
%%
%% When Int and Final are the identity function, apply_sequentially
%% is equivalent to applying X to the composition of all functions
%% in the list.
%%
-spec apply_sequentially(any(), list(function()), function(), function()) -> any().
apply_sequentially(X, [], _, Final) -> Final(X);
apply_sequentially(X, [H | T], Int, Final) ->
apply_sequentially(Int(H(X)), T, Int, Final).
%% @doc Get an unique identifier for a process in the graph.
-spec process_hash(process_args()) -> process_hash().
process_hash(Args) ->
erlang:phash2(Args).
%%%===================================================================
%%% .DOT export functions
%%%===================================================================
to_dot(Graph) ->
Vertices = lists:filter(fun(V) ->
not (digraph:in_degree(Graph, V) =:= 0 andalso digraph:out_degree(Graph, V) =:= 0)
end, digraph_utils:topsort(Graph)),
case Vertices of
[] -> {error, no_data};
VertexList ->
Start = ["digraph dag {\n"],
DrawedVertices = lists:foldl(fun(V, Acc) ->
Acc ++ v_str(V) ++ " [fontcolor=black, style=filled, fillcolor=\"#613B93\"];\n"
end, Start, VertexList),
{ok, unicode:characters_to_list(write_edges(Graph, VertexList, [], DrawedVertices) ++ "}\n")}
end.
write_edges(G, [V | Vs], Visited, Result) ->
Edges = lists:map(fun(E) -> digraph:edge(G, E) end, digraph:out_edges(G, V)),
R = lists:foldl(fun({_, _, To, #edge_label{pid=Pid}}, Acc) ->
case lists:member(To, Visited) of
true -> Acc;
false ->
Acc ++ v_str(V) ++ " -> " ++ v_str(To) ++
" [label=" ++ erlang:pid_to_list(Pid) ++ "];\n"
end
end, Result, Edges),
write_edges(G, Vs, [V | Visited], R);
write_edges(_G, [], _Visited, Result) ->
Result.
%% @doc Generate an unique identifier for a vertex.
v_str({Id, _}) ->
erlang:integer_to_list(erlang:phash2(Id));
v_str(V) when is_pid(V)->
pid_to_list(V).
%% @private
depth(G, V, Max) ->
InNeighbors = digraph:in_neighbours(G, V),
%% Filter out single-fire nodes that are identified by process
%% identifier only.
FilterFun = fun({_Id, _Type}) ->
true;
(_) ->
false
end,
Neighbors = lists:filter(FilterFun, InNeighbors),
case Neighbors of
[] ->
Max;
_ ->
lists:foldl(fun(V1, MaxAcc) ->
max(MaxAcc, depth(G, V1, Max + 1)) end,
?BASE_DEPTH, Neighbors)
end. | src/lasp_dependence_dag.erl | 0.708213 | 0.439627 | lasp_dependence_dag.erl | starcoder |
%%
%% Copyright (c) 2018 <NAME>
%% All rights reserved.
%% Distributed under the terms of the MIT License. See the LICENSE file.
%%
%% SIP CSeq header
%%
-module(ersip_hdr_date).
-export([make/1,
make/2,
make_key/1,
date/1,
time/1,
now/0,
is_valid/1,
parse/1,
build/2,
assemble/1
]).
-export_type([datetime/0, date/0, time/0]).
%%%===================================================================
%%% Types
%%%===================================================================
-type datetime() :: {date, calendar:datetime()}.
-type date() :: calendar:date().
-type time() :: calendar:time().
%%%===================================================================
%%% API
%%%===================================================================
-spec make(ersip_hdr:header()) -> datetime().
make(Header) ->
case parse(Header) of
{ok, DateTime} ->
DateTime;
Error ->
error(Error)
end.
-spec now() -> datetime().
now() -> {date, calendar:universal_time()}.
-spec make(date(), time()) -> datetime().
make(Date, Time) ->
{date, {Date, Time}}.
-spec make_key(datetime()) -> datetime().
make_key(DT) ->
DT.
-spec date(datetime()) -> date().
date({date, {Date, _}}) -> Date.
-spec time(datetime()) -> time().
time({date, {_, Time}}) -> Time.
-spec is_valid(datetime()) -> boolean().
is_valid({date, _} = DT) ->
is_correct_time(time(DT)) andalso calendar:valid_date(date(DT));
is_valid(_) ->
false.
-spec parse(ersip_hdr:header()) -> Result when
Result :: {ok, datetime()}
| {error, Error},
Error :: bad_timezone
| bad_datetime
| wrong_weekday
| incorrect_date
| incorrect_time
| no_datetime.
parse(Header) ->
case ersip_hdr:raw_values(Header) of
[] ->
{error, no_datetime};
[DateIOList] ->
parse_datetime(iolist_to_binary(DateIOList))
end.
-spec build(HeaderName :: binary(), datetime()) -> ersip_hdr:header().
build(HdrName, DateTime) ->
Hdr = ersip_hdr:new(HdrName),
ersip_hdr:add_value(assemble(DateTime), Hdr).
-spec assemble(datetime()) -> [binary(), ... ].
assemble(DateTime) ->
[str_impl(DateTime)].
%%%===================================================================
%%% Internal implementation
%%%===================================================================
-define(RANGE(X, From, To), (X >= From andalso X=< To)).
-define(PREFIX(S, Class), lex(<<S, Rest/binary>>, Acc) -> lex(Rest, [Class | Acc])).
-define(SPACE, 32).
parse_datetime(Str) ->
S = ersip_bin:to_lower(Str),
L = lex(S,[]),
match(L).
%% RFC1123
%% Fields in the reverse order
match([{timezone, gmt},
{int, S}, colon, {int, Min}, colon, {int, H},
{int, Y}, {month, M}, {int, D},
comma, {wkday, W}]) ->
correct_datetime({{Y, M, D}, {H, Min, S}}, W);
match([{timezone, _TZ} | _]) -> {error, bad_timezone};
match([{int, _TZ}, plus | _]) -> {error, bad_timezone};
match([{int, _TZ}, minus | _]) -> {error, bad_timezone};
match(_A) -> {error, bad_datetime}.
lex(<<>>, Acc) -> Acc;
lex(<<?SPACE , Rest/binary>>, Acc) -> lex(Rest, Acc);
lex(<< D/utf8, Rest/binary>>, Acc) when ?RANGE(D, $0, $9) ->
{Num, R1} = fetch_digits(Rest, D - $0),
lex(R1, [{int, Num}| Acc]);
?PREFIX($:, colon);
?PREFIX($,, comma);
?PREFIX($+, plus);
?PREFIX($-, minus);
?PREFIX("jan", {month, 1});
?PREFIX("feb", {month, 2});
?PREFIX("mar", {month, 3});
?PREFIX("apr", {month, 4});
?PREFIX("may", {month, 5});
?PREFIX("jun", {month, 6});
?PREFIX("jul", {month, 7});
?PREFIX("aug", {month, 8});
?PREFIX("sep", {month, 9});
?PREFIX("oct", {month, 10});
?PREFIX("nov", {month, 11});
?PREFIX("dec", {month, 12});
?PREFIX("mon", {wkday, 1});
?PREFIX("tue", {wkday, 2});
?PREFIX("wed", {wkday, 3});
?PREFIX("thu", {wkday, 4});
?PREFIX("fri", {wkday, 5});
?PREFIX("sat", {wkday, 6});
?PREFIX("sun", {wkday, 7});
?PREFIX("gmt", {timezone, gmt}); % according to rfc can be GMT only
?PREFIX("utc", {timezone, utc});
?PREFIX("ut", {timezone, ut });
?PREFIX("est", {timezone, est});
?PREFIX("edt", {timezone, edt});
?PREFIX("mst", {timezone, mst});
?PREFIX("mdt", {timezone, mdt});
?PREFIX("pst", {timezone, pst});
?PREFIX("pdt", {timezone, pdt});
lex(<<D/utf8, Rest/binary>>, Acc) -> lex(Rest, [D | Acc]).
fetch_digits(<<D/utf8, R/binary>>, Acc) when ?RANGE(D, $0, $9) ->
fetch_digits(R, (D - $0) + Acc*10);
fetch_digits(R, Acc) ->
{Acc, R}.
correct_time(HMS) -> {t, is_correct_time(HMS)}.
correct_date(Date) -> {d, calendar:valid_date(Date)}.
correct_weekday(Date, Weekday) -> {w, Weekday == calendar:day_of_the_week(Date)}.
correct_datetime({Date, Time} = DateTime, Weekday) ->
try
{t, true} = correct_time(Time),
{d, true} = correct_date(Date),
{w, true} = correct_weekday(Date, Weekday),
{ok, {date, DateTime}}
catch
_: {badmatch, {w, _}} -> {error, wrong_weekday};
_: {badmatch, {d, _}} -> {error, incorrect_date};
_: {badmatch, {t, _}} -> {error, incorrect_time}
end.
str_impl({date, DateTime}) ->
list_to_binary(httpd_util:rfc1123_date(calendar:universal_time_to_local_time(DateTime))).
is_correct_time({H, M, S}) -> ?RANGE(S, 0, 59) andalso ?RANGE(H, 0, 23) andalso ?RANGE(M, 0, 59). | src/ersip_hdr_date.erl | 0.533884 | 0.450299 | ersip_hdr_date.erl | starcoder |
% MIT License
% Copyright (c) 2020 <NAME>
% Permission is hereby granted, free of charge, to any person obtaining a copy
% of this software and associated documentation files (the "Software"), to deal
% in the Software without restriction, including without limitation the rights
% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
% copies of the Software, and to permit persons to whom the Software is
% furnished to do so, subject to the following conditions:
% The above copyright notice and this permission notice shall be included in all
% copies or substantial portions of the Software.
% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
% SOFTWARE.
-module(lesson3).
-export([sum/1, sum2/1, len/1, dna_calc/1, cag_count/1, seq/1, concat/2,
strip_abc/1, strip_abc2/1, reverse/1, verbose_insert/2, ins_sort/1]).
sum([]) -> 0;
sum(L) -> hd(L) + sum(tl(L)).
sum2([]) -> 0;
sum2([H|T]) -> H + sum2(T).
len([]) -> 0;
len([_|T]) -> 1 + len(T).
dna_calc([], A, T, G, C) ->
{A, T, G, C, (G + C) / (A + T + G + C) * 100};
dna_calc([a|Tail], A, T, G, C) -> dna_calc(Tail, A + 1, T, G, C);
dna_calc([t|Tail], A, T, G, C) -> dna_calc(Tail, A, T + 1, G, C);
dna_calc([g|Tail], A, T, G, C) -> dna_calc(Tail, A, T, G + 1, C);
dna_calc([c|Tail], A, T, G, C) -> dna_calc(Tail, A, T, G, C + 1).
dna_calc(L) -> dna_calc(L, 0, 0, 0, 0).
cag_count([]) -> 0;
cag_count([c, a, g|T]) -> 1 + cag_count(T);
cag_count([_, _, _|T]) -> cag_count(T).
seq(0) -> [];
seq(N) -> [N|seq(N - 1)].
concat([], B) -> B;
concat([H|T], B) -> [H|concat(T, B)].
strip_abc("abc" ++ T) -> T;
strip_abc(L) -> L.
strip_abc2([$a, $b, $c|T]) -> T;
strip_abc2(L) -> L.
reverse([], Acc) -> Acc;
reverse([H|T], Acc) -> reverse(T, [H|Acc]).
reverse(L) -> reverse(L, []).
verbose_insert(X, []) -> [X];
verbose_insert(X, [H|T] = L) ->
if
X < H -> [X|L];
true -> [H|verbose_insert(X, T)]
end.
insert(X, []) -> [X];
insert(X, [H|_] = L) when X < H -> [X|L];
insert(X, [H|T]) -> [H|insert(X, T)].
ins_sort([]) -> [];
ins_sort([H|T]) -> insert(H, ins_sort(T)). | erlang/src/lesson3.erl | 0.593491 | 0.410166 | lesson3.erl | starcoder |
%%
%% @doc Various functions to work with prime numbers.
%%
-module(primes).
-author("<NAME>").
-export([is_prime/1, primes_upto/1, random_prime/2]).
-export([pollard_p1/2]).
%%
%% @doc Returns `true' if the given `N' is a prime number.
%%
-spec is_prime(N :: pos_integer()) -> boolean().
is_prime(N) -> miller_rabin(N).
miller_rabin(2) -> true;
miller_rabin(N) when N rem 2 =:= 0 -> false;
miller_rabin(N) when 3 =< N, N rem 2 =:= 1 ->
{S, T} = maths:factor2(N - 1),
miller_rabin(N, S, T, 0).
%%
%% Keep track of the probability of a false result in `K'.
%% The probability is at most 2^-K.
%% Loop until the probability of a false result is small enough.
%%
miller_rabin(N, S, T, K) when K < 128 ->
A = rnd:random(2, N - 1),
case maths:mod_exp(A, S, N) of
1 -> miller_rabin(N, S, T, K + 2);
V -> case mr_squaring(0, V, N, T) of
composite -> false;
candidate -> miller_rabin(N, S, T, K + 2)
end
end;
miller_rabin(_, _, _, _) -> true.
%%
%% The sequence v, v^2,..., v^2^t must finish on the value 1,
%% and the last value not equal to 1 must be n-1 if n is a prime.
%%
mr_squaring(_I, V, N, _T) when V =:= N - 1 -> candidate;
mr_squaring(I, _V, _N, T) when I =:= T - 1 -> composite;
mr_squaring(I, V, N, T) -> mr_squaring(I + 1, maths:mod_exp(V, 2, N), N, T).
%%
%% @doc Find all prime numbers up to specified value.
%% Works relatively fast for `N < 5,000,000'.
%%
-spec primes_upto(N :: 2..5000000) -> [integer()].
primes_upto(N) when 2 =< N, N =< 5000000 -> eratosthenes(math:sqrt(N), lists:seq(2, N)).
%%
%% Recursion implementation of Eratosthenes sieve algorithm
%% Author: <NAME>
%%
%% See also: https://github.com/ndpar/algorithms/blob/master/mymath.erl
%%
eratosthenes(Max, [H | T]) when H =< Max -> [H | eratosthenes(Max, sieve([H | T], H))];
eratosthenes(_Max, L) -> L.
sieve([H | T], N) when H rem N =/= 0 -> [H | sieve(T, N)];
sieve([_ | T], N) -> sieve(T, N);
sieve([], _N) -> [].
%%
%% @doc Returns a random prime in the interval `[L, U]'.
%%
-spec random_prime(L :: pos_integer(), U :: pos_integer()) -> pos_integer().
random_prime(L, U) when 2 < L, L =< U ->
random_prime(L, U, 100 * (maths:ilog2(U) + 1) - 1).
random_prime(L, U, R) when 0 < R ->
N = rnd:random(L, U),
case is_prime(N) of
true -> N;
false -> random_prime(L, U, R - 1)
end.
%%
%% @doc Pollard’s `p − 1' algorithm for factoring integers.
%%
%% See [MvOV1] Chapter 3.2.3. Algorithm 3.14
%%
-spec pollard_p1(pos_integer(), pos_integer()) -> pos_integer() | error.
pollard_p1(N, B) ->
pollard_p1(N, B, rnd:random(2, N - 1)).
pollard_p1(N, B, A) ->
case maths:gcd(A, N) of
1 -> pollard_p1_(N, A, primes_upto(B));
D -> D
end.
pollard_p1_(_, _, []) -> error;
pollard_p1_(N, A, [Q | T]) ->
L = erlang:trunc(maths:log(Q, N)),
A1 = maths:mod_exp(A, maths:pow(Q, L), N),
case maths:gcd(A1 - 1, N) of
1 -> pollard_p1_(N, A1, T);
N -> pollard_p1_(N, A1, T);
D -> D
end.
%% =============================================================================
%% Unit tests
%% =============================================================================
-include_lib("eunit/include/eunit.hrl").
is_prime_test() ->
?assert(is_prime(17)),
?assert(is_prime(283)).
composite_test() ->
?assertNot(is_prime(100)),
?assertNot(is_prime(105)).
primes_upto_test() ->
?assertEqual([2, 3, 5, 7, 11, 13], primes_upto(15)).
random_prime_test() ->
?assertEqual(103, random_prime(102, 105)).
-ifdef(STOCHASTIC_TEST).
pollard_p1_test_() -> [
?_assertEqual(7001, pollard_p1(7451 * 7001, 7)), % 7450 = 2 5 5 149, 7000 = 2 2 2 5 5 5 7
?_assertEqual(5281, pollard_p1(3607 * 5281, 11)), % 3606 = 2 3 601, 5280 = 2 2 2 2 2 3 5 11
?_assertEqual(5741, pollard_p1(5939 * 5741, 41)), % 5938 = 2 2969, 5740 = 2 2 5 7 41
?_assertEqual(error, pollard_p1(5939 * 5741, 39)), % 39 < 41 and 2969
?_assertError(function_clause, pollard_p1(7001, 11))]. % 7001 is prime
-endif. | lib/ndpar/src/primes.erl | 0.757436 | 0.695894 | primes.erl | starcoder |
%% @doc
%%
%% Formats metric output according to the [Prometheus exposition format]
%% (https://prometheus.io/docs/instrumenting/exposition_formats/).
%%
%% Example output:
%% <pre>
%% # A counter, which has the following representation
%% # HELP http_requests_total The total number of HTTP requests.
%% # TYPE http_requests_total counter
%% http_requests_total{method="post",code="200"} 1027 1395066363000
%% http_requests_total{method="post",code="400"} 3 1395066363000
%%
%% # A histogram, which has a complex representation in the text format:
%% # HELP http_request_duration_seconds A histogram of the request duration.
%% # TYPE http_request_duration_seconds histogram
%% http_request_duration_seconds_bucket{le="0.05"} 24054
%% http_request_duration_seconds_bucket{le="0.1"} 33444
%% http_request_duration_seconds_bucket{le="0.2"} 100392
%% http_request_duration_seconds_bucket{le="0.5"} 129389
%% http_request_duration_seconds_bucket{le="1"} 133988
%% http_request_duration_seconds_bucket{le="+Inf"} 144320
%% http_request_duration_seconds_sum 53423
%% http_request_duration_seconds_count 144320
%%
%% # Finally a summary, which has a complex representation, too:
%% # HELP rpc_duration_seconds A summary of the RPC duration in seconds.
%% # TYPE rpc_duration_seconds summary
%% rpc_duration_seconds{quantile="0.01"} 3102
%% rpc_duration_seconds{quantile="0.05"} 3272
%% rpc_duration_seconds{quantile="0.5"} 4773
%% rpc_duration_seconds{quantile="0.9"} 9001
%% rpc_duration_seconds{quantile="0.99"} 76656
%% rpc_duration_seconds_sum 1.7560473e+07
%% rpc_duration_seconds_count 2693
%% </pre>
%%
%% @end
-module(prometheus_format).
-behaviour(metrics_reader_format).
-include("metrics_reader.hrl").
-export([histogram/3, counter/3, combine_lines/2]).
-xref_ignore([histogram/3]).
-spec counter(Name :: [binary()], tags(), counter()) -> binary().
counter(Name, Tags, {Value, Ts})
when is_list(Name),
is_integer(Ts), Ts > 0,
is_integer(Value) ->
MetricName = combine(Name, <<"_">>),
Prologue = emit_prologue(<<"counter">>, MetricName),
Series = emit_series(MetricName, Tags, Value),
TsBin = list_to_binary(integer_to_list(Ts)),
Summary = <<Series/binary, " ", TsBin/binary>>,
combine_lines(Prologue, Summary).
-spec histogram(Name :: [binary()], tags(), histogram()) -> binary().
histogram(Name, Tags, Histogram)
when is_list(Name),
is_list(Tags) ->
MetricName = combine(Name, <<"_">>),
Prologue = emit_prologue(<<"histogram">>, MetricName),
Summary = emit_summary(Histogram, Tags, MetricName),
combine_lines(Prologue, Summary).
-spec combine_lines(L1 :: binary(), L2 :: binary()) -> binary().
combine_lines(L1, L2) when is_binary(L1), is_binary(L2) ->
combine_two(L1, L2, <<"\n">>).
%%====================================================================
%% Internal functions
%%====================================================================
emit_prologue(Type, Name) when is_binary(Type) ->
Help = <<"# HELP ", Name/binary>>,
Type1 = <<"# TYPE ", Name/binary, " ", Type/binary>>,
combine_lines(Help, Type1).
emit_series(Name, Labels, Value) when is_binary(Name) ->
LabelPairs = emit_labels(Labels),
ValueBin = v2b(Value),
<<Name/binary, LabelPairs/binary, " ", ValueBin/binary>>.
emit_labels([]) -> <<"">>;
emit_labels(Labels) when is_list(Labels) ->
LabelPairs = [label_pair(Label) || Label <- Labels],
LabelPairs1 = combine(LabelPairs, <<",">>),
<<${, LabelPairs1/binary, $}>>.
label_pair({Label, Value}) ->
LabelBin = list_to_binary(Label),
ValueBin = v2b(Value),
<<LabelBin/binary, $=, $", ValueBin/binary, $">>.
emit_summary(Histogram, Tags, Name) ->
emit_summary(Histogram, Name, Tags, <<>>).
emit_summary([{min, V} | T], Name, Tags, Acc) ->
Series = emit_series(<<Name/binary, "_min">>, Tags, round(V)),
emit_summary(T, Name, Tags, combine_lines(Acc, Series));
emit_summary([{max, V} | T], Name, Tags, Acc) ->
Series = emit_series(<<Name/binary, "_max">>, Tags, round(V)),
emit_summary(T, Name, Tags, combine_lines(Acc, Series));
emit_summary([{arithmetic_mean, V} | T], Name, Tags, Acc) ->
Series = emit_series(<<Name/binary, "_arithmetic_mean">>, Tags, round(V)),
emit_summary(T, Name, Tags, combine_lines(Acc, Series));
emit_summary([{geometric_mean, V} | T], Name, Tags, Acc) ->
Series = emit_series(<<Name/binary, "_geometric_mean">>, Tags, round(V)),
emit_summary(T, Name, Tags, combine_lines(Acc, Series));
emit_summary([{harmonic_mean, V} | T], Name, Tags, Acc) ->
Series = emit_series(<<Name/binary, "_harmonic_mean">>, Tags, round(V)),
emit_summary(T, Name, Tags, combine_lines(Acc, Series));
emit_summary([{median, V} | T], Name, Tags, Acc) ->
Series = emit_series(<<Name/binary, "_median">>, Tags, round(V)),
emit_summary(T, Name, Tags, combine_lines(Acc, Series));
emit_summary([{variance, V} | T], Name, Tags, Acc) ->
Series = emit_series(<<Name/binary, "_variance">>, Tags, round(V)),
emit_summary(T, Name, Tags, combine_lines(Acc, Series));
emit_summary([{standard_deviation, V} | T], Name, Tags, Acc) ->
Series = emit_series(<<Name/binary, "_standard_deviation">>,
Tags, round(V)),
emit_summary(T, Name, Tags, combine_lines(Acc, Series));
emit_summary([{skewness, V} | T], Name, Tags, Acc) ->
Series = emit_series(<<Name/binary, "_skewness">>, Tags, round(V)),
emit_summary(T, Name, Tags, combine_lines(Acc, Series));
emit_summary([{kurtosis, V} | T], Name, Tags, Acc) ->
Series = emit_series(<<Name/binary, "_kurtosis">>, Tags, round(V)),
emit_summary(T, Name, Tags, combine_lines(Acc, Series));
emit_summary([{percentile,
[{50, P50}, {75, P75}, {95, P95}, {99, P99}, {999, P999}]
} | T], Name, Tags, Acc) ->
Q1 = emit_series(Name, [{"quantile", "0.5"} | Tags], round(P50)),
Q2 = emit_series(Name, [{"quantile", "0.75"} | Tags], round(P75)),
Q3 = emit_series(Name, [{"quantile", "0.95"} | Tags], round(P95)),
Q4 = emit_series(Name, [{"quantile", "0.99"} | Tags], round(P99)),
Q5 = emit_series(Name, [{"quantile", "0.999"} | Tags], round(P999)),
Percentiles = combine([Q1, Q2, Q3, Q4, Q5], <<"\n">>),
emit_summary(T, Name, Tags, combine_lines(Acc, Percentiles));
emit_summary([_ | T], Name, Tags, Acc) ->
emit_summary(T, Name, Tags, Acc);
emit_summary([], _Name, _Tags, Acc) ->
Acc.
combine(Parts, Sep) ->
combine(Parts, Sep, <<>>).
combine([], _Sep, Acc) ->
Acc;
combine([H | T], Sep, Acc) ->
combine(T, Sep, combine_two(Acc, H, Sep)).
combine_two(<<>>, <<>>, _Sep) ->
<<>>;
combine_two(P1, <<>>, _Sep) ->
P1;
combine_two(<<>>, P2, _Sep) ->
P2;
combine_two(P1, P2, Sep)
when is_binary(P1),
is_binary(P2),
is_binary(Sep) ->
<<P1/binary, Sep/binary, P2/binary>>.
v2b(V) when is_binary(V) ->
V;
v2b(V) when is_list(V) ->
list_to_binary(V);
v2b(V) when is_integer(V) ->
integer_to_binary(V);
v2b(V) when is_float(V) ->
float_to_binary(V);
v2b(V) when is_atom(V) ->
erlang:atom_to_binary(V, utf8). | src/prometheus_format.erl | 0.621196 | 0.455986 | prometheus_format.erl | starcoder |
-module(hw1).
% functions that are included to make the assignment easier.
-export([allLess/2, distance/2, sloppyZip/2]).
% functions that you need to write to complete the assignment
-export([allTails/1, closest/2, longestOverlap/2]).
% distance(P1, P1) -> Euclidean distance from P1 to P2
% P1 and P2 must be list of numbers, and the lengths of P1 and P2 must match.
% Examples,
% distance([1], [2]) -> 1.0
% distance([0,0], [3,4]) -> 5.0
% distance([1, 2, 3], [5, -10, 0]) -> 13.0
% distance([1, 3.14159, 2.5], [1.732, 8, -4/3]) -> 6.231726580374371
% The function 'distance' is included in this template because it may be
% useful when implementing the function 'closest'.
distance(P1, P2) when is_list(P1), is_list(P2) ->
math:sqrt(lists:sum([(X1-X2)*(X1-X2) || {X1, X2} <- lists:zip(P1, P2)])).
% closest(P, PointList) -> {Index, Distance}
% Find the point in PointList that is closest to P.
% Parameters:
% P must be a list of numbers.
% PointList must be a non-empty list of lists of numbers.
% The length of P and the length of each list in PointList must be the same.
% Return value:
% Index: the index in PointList of the closest point to P.
% Distance: the distance from P to the closest point in PointList.
% If there are ties, then the lowest such Index is returned.
% Examples:
% closest([1,2,3], [[4,5,6], [0,1,4], [1,1,2], [2,3,4]]) ->
% {3,1.4142135623730951}
% closest([1,2,3], [[4,5,6], [0,1,4], [1,8,2], [2,3,4]]) ->
% {2,1.7320508075688772}
% closest([1,2,3], [[4,5,6], [0,8,4], [1,8,2], [2,3,4]]) ->
% {4,1.7320508075688772}
closest(_P, []) -> {0, 0.0};
closest(P, [H | PointList]) ->
{I, D} = closest(P, PointList),
HD = distance(P, H),
if
(D >= HD) or (I == 0) -> {1, HD};
true -> {I+1, D}
end.
% allTails(L) -> list of all suffixes of L
% L must be a list.
% Examples:
% allTails([1, 2, 3]) -> [[], [3], [2,3], [1,2,3]]
% allTails([]) -> [[]]
allTails([]) -> [[]];
allTails(L) -> allTails(tl(L)) ++ [L]. % stub
% sloppyZip(L1, L2) -> Z
% L1 and L2 must be lists.
% Z is the list of pairs of corresponding elements of L1 and L2.
% length(Z) = min(length(L1), length(L2)) -- in other words, we stop
% zipping when we reach the end of either list. Compare with lists:zip/2
% that requires the two lists to be of the same length. That's why this
% version is 'sloppy'. I wrote a head recursive version so that it keeps
% the elements in the original order, just like lists:zip/2.
% Examples:
% sloppyZip([1,2,3], [cat, dogs, mice]) -> [{1,cat}, {2,dogs}, {3,mice}]
% sloppyZip([1,2,3], [cat, dogs]) -> [{1,cat}, {2,dogs}]
sloppyZip([], _) -> [];
sloppyZip(_, []) -> [];
sloppyZip([H1 | T1], [H2 | T2]) -> [{H1, H2} | sloppyZip(T1, T2)].
% longestOverlap(L1, L2) -> {StartIndex, Length}
% Find the longest overlapping segments of L1 and L2.
% Two segments are overlapping if they start at the same index
% and if they are element-by-element identical.
% Parameters:
% L1, L2: lists. It is acceptable for length(L1) /= length(L2).
% The length(L1) == length(L2) case is acceptable as well.
% Return value: {StartIndex, Length}
% StartIndex and Length are both integers: StartIndex is positive and
% Length is non-negative. For StartIndex =< I < StartIndex+Length,
% lists:nth(I, L1) =:= lists:nth(I, L2)
% We return StartIndex and Length for the longest such segment. In the
% case of a tie, we return the first one (i.e. the one with the smallest
% value of StartIndex.
% If L1 and L2 have no overlapping segments, we return {1, 0}.
longestOverlap([], _L2) -> {1, 0};
longestOverlap(_L1, []) -> {1, 0};
longestOverlap(L1, L2) ->
Len = lengthOfOverlap(L1, L2),
if
(length(L1) == Len) or (length(L2) == Len) -> {1, Len};
true ->
{I, L} = longestOverlap(lists:nthtail(Len+1, L1),
lists:nthtail(Len+1, L2)),
if
Len >= L -> {1, Len};
true -> {I+Len+1, L}
end
end.
lengthOfOverlap([H1 | L1], [H2 | L2]) when H1 == H2 -> lengthOfOverlap(L1, L2) + 1;
lengthOfOverlap(_L1, _L2) -> 0.
% The function allLess was described in the homework as an example of
% a function where adding comprehensive guards can cause an unacceptable
% loss of performance. In this case, if the guards for T1 and T2 are
% added, the time for allLess grows from O(N) to O(N^2).
% allLess(L1, L2) returns true if L1 and L2 are of the same length, bot
% are lists of numbers, and if each element of L1 is less than the
% corresponding element of L2.
% If L1 and L2 have different length, or if either has a non-numeric
% element, then the outcome may depend on which guard expression is used.
% The details are left as an exercise for those who are really into such
% things.
allLess([], []) -> true;
allLess([H1 | T1], [H2 | T2])
when is_number(H1), is_number(H2) ->
% is_list(T1), is_list(T2), length(T1) == length(T2) ->
(H1 < H2) andalso allLess(T1, T2). | hw1/hw1.erl | 0.516839 | 0.824533 | hw1.erl | starcoder |
%% Copyright (c) 2008-2009 <NAME> <<EMAIL>>
%%
%% Permission is hereby granted, free of charge, to any person
%% obtaining a copy of this software and associated documentation
%% files (the "Software"), to deal in the Software without
%% restriction, including without limitation the rights to use,
%% copy, modify, merge, publish, distribute, sublicense, and/or sell
%% copies of the Software, and to permit persons to whom the
%% Software is furnished to do so, subject to the following
%% conditions:
%%
%% The above copyright notice and this permission notice shall be
%% included in all copies or substantial portions of the Software.
%%
%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
%% EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
%% OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
%% NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
%% HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
%% WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
%% OTHER DEALINGS IN THE SOFTWARE.
%%
%% @reference http://testanything.org/wiki/index.php/Main_Page
%% @reference http://en.wikipedia.org/wiki/Test_Anything_Protocol
%% @doc Provide test functionality modules
-module(etap_can).
-export([
loaded_ok/2, can_ok/2, can_ok/3,
has_attrib/2, is_attrib/3, is_behaviour/2
]).
%% @spec loaded_ok(atom(), string()) -> true | false
%% @doc Assert that a module has been loaded successfully.
loaded_ok(M, Desc) when is_atom(M) ->
etap:fun_is(fun({module, _}) -> true; (_) -> false end, code:load_file(M), Desc).
%% @spec can_ok(atom(), atom()) -> true | false
%% @doc Assert that a module exports a given function.
can_ok(M, F) when is_atom(M), is_atom(F) ->
Matches = [X || {X, _} <- M:module_info(exports), X == F],
etap:ok(Matches > 0, lists:concat([M, " can ", F])).
%% @spec can_ok(atom(), atom(), integer()) -> true | false
%% @doc Assert that a module exports a given function with a given arity.
can_ok(M, F, A) when is_atom(M); is_atom(F), is_number(A) ->
Matches = [X || X <- M:module_info(exports), X == {F, A}],
etap:ok(Matches > 0, lists:concat([M, " can ", F, "/", A])).
%% @spec has_attrib(M, A) -> true | false
%% M = atom()
%% A = atom()
%% @doc Asserts that a module has a given attribute.
has_attrib(M, A) when is_atom(M), is_atom(A) ->
etap:isnt(
proplists:get_value(A, M:module_info(attributes), 'asdlkjasdlkads'),
'asdlkjasdlkads',
lists:concat([M, " has attribute ", A])
).
%% @spec has_attrib(M, A. V) -> true | false
%% M = atom()
%% A = atom()
%% V = any()
%% @doc Asserts that a module has a given attribute with a given value.
is_attrib(M, A, V) when is_atom(M) andalso is_atom(A) ->
etap:is(
proplists:get_value(A, M:module_info(attributes)),
[V],
lists:concat([M, "'s ", A, " is ", V])
).
%% @spec is_behavior(M, B) -> true | false
%% M = atom()
%% B = atom()
%% @doc Asserts that a given module has a specific behavior.
is_behaviour(M, B) when is_atom(M) andalso is_atom(B) ->
is_attrib(M, behaviour, B). | src/etap/etap_can.erl | 0.599133 | 0.420124 | etap_can.erl | starcoder |
%%%----------------------------------------------------------------------------
%%% @author <NAME> <<EMAIL>>
%%% @copyright 2012 University of St Andrews (See LICENCE)
%%% @headerfile "skel.hrl"
%%%
%%% @doc This module contains the Map skeleton initialisation logic.
%%%
%%% The Map skeleton is a parallel map. The skeleton applies a given function
%%% to the elements within one or more lists.
%%%
%%% This implementation assumes a list of lists as input, where the
%%% decomposition of said input may be expressed as the identity function.
%%% Whilst this implementation of Map usually determines the number of worker
%%% processes it needs automatically, the developer may explicitly set this,
%%% as in {@link sk_farm}.
%%%
%%%
%%% === Example ===
%%%
%%% ```skel:do([{map, [{seq, fun ?MODULE:f/1}]}], Input).'''
%%%
%%% Here we use a Map skeleton to perform a function `f/1' over all
%%% elements for all lists represented by `Input'. Returned, we receive a
%%% list of lists the same as `Input' itself, bar that the elements of
%%% each are the result of their application to `f/1'.
%%%
%%% In this example we note that the number of worker processes the Map
%%% skeleton uses is determined by the length of the longest list in
%%% `Input'. To constrain, or otherwise set this value, we might add an
%%% extra term to the Map tuple.
%%%
%%% ```skel:do([{map, [{seq, fun ?MODULE:f/1}], 10}], Input).'''
%%%
%%% Using the same example, we now note that the number of worker
%%% processes used is set to ten. Performance comparisons between these
%%% two depends heavily on the chosen `Input', and the machine on which it
%%% runs.
%%%
%%% @end
%%%----------------------------------------------------------------------------
-module(sk_map).
-export([
start/2
]).
-include("skel.hrl").
%% @doc Initialises an instance of the Map skeleton ready to receive inputs.
%%
%% Map creates number of internal workflows, all of theme based on
%% given `WorkFlow'. Each recived data is splitted by
%% {@link sk_map_partitioner} and send to workers, which then send
%% it back to combiner, or recomposition processes, which acts as sink
%% for workers.
%%
%% The number of workers itself can by determined automaticly, when no
%% additional parameters are given. Or could be set staticly by the
%% `NumberOfWorkers' parameter.
-spec start( Parameters, NextPid ) -> WorkflowPid when
Parameters :: { WorkFlow }
| { WorkFlow , NumberOfWorkers }
| {CPUWorkflowCPUWorkflow :: workflow(),
GPUNumberOfWorkers :: pos_integer(),
GPUWorkflow :: workflow(),
GPUNumberOfWorkers :: pos_integer()},
WorkFlow :: workflow(),
NumberOfWorkers :: pos_integer(),
NextPid :: pid(),
WorkflowPid :: pid().
%% @doc Initialises an instance of the Map skeleton ready to receive inputs,
%% using a given number of worker processes. This number is specified under
%% `NWorkers', and the function or functions to be applied to any and all
%% inputs are given by `WorkFlow'.
%%
%% A combiner, or recomposition, process is created, and acts as a sink for
%% the workers. These workers are initialised with the specified workflow, and
%% their Pids passed to a {@link sk_map_partitioner} process.
start({WorkFlow}, NextPid) ->
CombinerPid = proc_lib:spawn(sk_map_combiner, start, [NextPid]),
sk_map_auto_partitioner:start(WorkFlow, CombinerPid);
start({WorkFlow, NWorkers}, NextPid) ->
CombinerPid = proc_lib:spawn(sk_map_combiner, start, [NextPid, NWorkers]),
sk_map_man_partitioner:start(WorkFlow, NWorkers, CombinerPid);
start({WorkFlow, NWorkers, pull}, NextPid) ->
CombinerPid = proc_lib:spawn(sk_map_combiner, start, [NextPid, NWorkers]),
sk_map_pull_partitioner:start(WorkFlow, NWorkers, CombinerPid);
%% @doc Initialises an instance of the Hybrid Map skeleton ready to
%% receive inputs, using a given number of CPU and GPU worker
%% processes. These numbers are specified under `NCPUWorkers' and
%% `NGPUWorkers', and the CPU and GPU versions of the function to be
%% applied to inputs are given by `WorkFlowCPU' and `WorkFlowGPU'.
%%
%% A combiner, or recomposition, process is created, and acts as a sink for
%% the workers. These workers are initialised with the specified workflow, and
%% their Pids passed to a {@link sk_map_partitioner} process.
start({WorkFlowCPU, NCPUWorkers, WorkFlowGPU, NGPUWorkers}, NextPid) ->
CombinerPid = spawn(sk_map_combiner,
start,
[NextPid, NCPUWorkers+NGPUWorkers]),
sk_map_man_partitioner:start(NCPUWorkers,
NGPUWorkers,
WorkFlowCPU,
WorkFlowGPU,
CombinerPid). | src/sk_map.erl | 0.61555 | 0.86977 | sk_map.erl | starcoder |
%% Copyright 2016-2017 TensorHub, Inc.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(guild_list_runs_cmd).
-export([parser/0, main/2]).
-define(true_filter, fun(_) -> true end).
%% ===================================================================
%% Parser
%% ===================================================================
parser() ->
cli:parser(
"guild list-runs",
"[OPTION]...",
"List project runs.",
list_runs_opts() ++ guild_cmd_support:project_options(),
[{pos_args, 0}]).
list_runs_opts() ->
[{completed, "--completed",
"show only completed runs", [flag]},
{terminated, "--terminated",
"show only runs that were stopped by the user", [flag]},
{error, "--error",
"show only runs that stopped due to an error", [flag]},
{error_or_terminated, "--error-or-terminated",
"show only runs that were stopped by the user or due to an error",
[flag]},
{with_export, "--with-export",
"show only runs an exported model", [flag]}].
%% ===================================================================
%% Main
%% ===================================================================
main(Opts, []) ->
Project = guild_cmd_support:project_from_opts(Opts),
guild_app:init_support([exec]),
print_runs(runs_for_project(Project), Opts).
runs_for_project(Project) ->
[{Run, run_status(Run)} || Run <- guild_run:runs_for_project(Project)].
run_status(R) ->
case guild_run_util:run_status(R) of
running -> running;
crashed -> terminated;
stopped ->
case guild_run:attr(R, "exit_status") of
{ok, <<"0">>} -> completed;
{ok, _} -> error;
error -> error
end
end.
print_runs(Runs, Opts) ->
Filtered = lists:filter(run_filter(Opts), Runs),
lists:foreach(fun print_run/1, Filtered).
run_filter(Opts) ->
Filters =
[status_filter(
[completed],
proplists:get_bool(completed, Opts)),
status_filter(
[terminated],
proplists:get_bool(terminated, Opts)),
status_filter(
[error],
proplists:get_bool(error, Opts)),
status_filter(
[error, terminated],
proplists:get_bool(error_or_terminated, Opts)),
exports_filter(Opts)
],
fun(Run) -> apply_filters(Run, Filters) end.
status_filter(Tests, true) ->
fun({_Run, Status}) -> lists:member(Status, Tests) end;
status_filter(_Status, false) ->
?true_filter.
exports_filter(Opts) ->
case proplists:get_bool(with_export, Opts) of
true -> fun(Run) -> has_export(Run) end;
false -> ?true_filter
end.
has_export({Run, _Status}) ->
Export = filename:join(guild_run:dir(Run), "model/export.meta"),
filelib:is_file(Export).
apply_filters(Run, [F|Rest]) ->
case F(Run) of
true -> apply_filters(Run, Rest);
false -> false
end;
apply_filters(_Run, []) ->
true.
print_run({Run, Status}) ->
Dir = guild_run:dir(Run),
guild_cli:closeable_out("~s\t~s~n", [Dir, Status]). | src/guild_list_runs_cmd.erl | 0.548553 | 0.411702 | guild_list_runs_cmd.erl | starcoder |
%% @doc
%% Relcast's job is ensure a consistent state for consensus protocols. It
%% provides atomic updates to the consensus state, the inbound message queue and
%% the outbound message queue. It does this by speculatively processing inbound messages,
%% serializing them to disk if they can't be handled now, by serializing the new module
%% state and any outbound messages to disk and deleting the inbound message
%% after processing the message. Assuming no disk failures, the Erlang process,
%% the Erlang VM or the host operating system should be able to fail at any time
%% and recover where it left off. All messages have a clear ownership and are not
%% removed until they've been handled or someone else has taken ownership.
%%
%% Relcast does this using 3 kinds of keys
%%
%% * `<<"stored_module_state">>' - this key stores the latest serialized state of the
%% callback module's state. It is only read back from disk on
%% recovery. This key is overwritten every time the module
%% handles a message or an event.
%%
%% * `<<"oXXXXXXXXXX">>' - an outbound key, representing a message this instance
%% wishes to send to another peer.
%%
%% * `<<"iXXXXXXXXXX">>' - an inbound key, this represents a message arriving
%% that has not been handled yet.
%%
%% The 10 Xs in the inbound and outbound keys represent a strictly monotonic
%% counter that can hold 2^32 messages. They are prefixed with their direction
%% so we can efficiently iterate over them independently. The 32 bit integer is
%% printed in left zero padded decimal so that the keys sort lexiographically.
%%
%% Inbound values are stored in the form `<<ActorID:16/integer, Value/binary>>'.
%% Inbound messages are only stored if the handler indicates they cannot be
%% handled right now, up to a per-actor maximum. Other inbound events are
%% handled immediately and any new state or new outbound messages are stored to
%% disk.
%%
%% Outbound values come in 3 types; unicast, multicast and 'callback'.
%%
%% Unicast values look like this: `<<1:2/bits, ActorID:14/integer, Value/binary>>'
%% and are only intended for delivery to a single peer, identified by ActorID.
%% Once the designated Actor has ACKed the message, the key can be deleted.
%%
%% Multicast values look like this:
%% `<<0:2/bits, ActorBitMask:BitmaskSize/integer, Value/binary>>' and are
%% intended to be delivered to every other actor in the consensus group. Each
%% time a send to one of the peers is ACKed, the bit for that actor is set to
%% 0. Once all the bits have been set to 0, the key can be deleted. The bitmask
%% is stored least significant bit first and is padded to be a multiple of 8
%% (along with the leading 0 bit) so the message is always byte aligned.
%%
%% Callback values look like this:
%% `<<2:2/bits, ActorBitMask:BitmaskSize/integer, Value/binary>>' and are very
%% similar to multicast values with one crucial difference. When relcast finds
%% that the next message for an actor is a callback message, it invokes
%% Module:callback_message(ActorID, Message, ModuleState). This call should
%% produce either the actual binary of the message to send or `none` to indicate
%% no message should be sent for that actor (in which case the bitfield is cleared
%% for that actor immediately and the search for the next message for that actor
%% continues. This message type is useful, for example, if you have a message with
%% a large common term and some smaller per-user terms that you need to send to
%% all actors but don't want to store separetely N times. It could also be useful
%% if you don't know if the message will ever be sent and it involves some expensive
%% computation or a signature. The module should not be modified as it is not returned.
%%
%% Epochs
%% Relcast has the notion of epochs for protocols that have the property of "if
%% one honest node can complete the round, all nodes can". When appropriate,
%% the module can return the 'new_epoch' which deletes all queued outbound messages.
%% Messages older than that are, by definition, not necessary for the protocol to
%% continue advancing and can be discarded.
%%
%% To do this, relcast uses rocksdb's column families feature. On initial
%% startup it creates the column family "epoch0000000000". Each 'new_epoch'
%% action the epoch counter is incremented by one. 2^32 epochs are allowed
%% before the counter wraps. Don't have 4 billion epochs, please.
-module(relcast).
%%====================================================================
%% Callback functions
%%====================================================================
-callback init(Arguments :: any()) -> {ok, State :: term()}.
-callback restore(OldState :: term(), NewState :: term()) -> {ok, State :: term()}.
-callback serialize(State :: term()) -> Binary :: binary() | #{atom() => #{} | binary()}.
-callback deserialize(Binary :: binary()) -> State :: term().
-callback handle_message(Message :: binary(), ActorId :: pos_integer(), State :: term()) ->
{NewState :: term(), Actions :: actions()} | defer | ignore.
-callback handle_command(Request :: term(), State :: term()) ->
{reply, Reply :: term(), Actions :: actions(), NewState :: term() | ignore} |
{reply, Reply :: term(), ignore}. %% when there's no changes, likely just returning information
-callback callback_message(ActorID :: pos_integer(), Message :: binary(), State :: term()) ->
binary() | none.
-callback terminate(Reason :: term(), NewState :: term()) -> any().
-optional_callbacks([terminate/2]).
-type actions() :: [ Message :: message() |
{stop, Timeout :: timeout()} | new_epoch ].
-type message() ::
{unicast, Index::pos_integer(), Msg::message_value()} |
{multicast, Msg::message_value()}.
-type message_key_prefix() :: <<_:128>>.
-type message_value() ::
{message_key_prefix(), binary()} |
binary().
%%====================================================================
%% State record
%%====================================================================
-record(state,
{
db :: rocksdb:db_handle(),
module :: atom(),
module_state :: any(),
old_module_state :: any(),
old_serialized :: undefined | map() | binary(),
id :: pos_integer(),
ids :: [pos_integer()],
last_sent = #{} :: #{pos_integer() => {rocksdb:cf_handle(), binary()} | none},
pending_acks = #{} ::
#{pos_integer() => [{PendRef :: reference(),
Epoch :: rocksdb:cf_handle(),
Key :: binary(),
Multicast :: boolean()}]},
in_key_count = 0 :: non_neg_integer(),
out_key_count = 0 :: non_neg_integer(),
epoch = 0 :: non_neg_integer(),
bitfieldsize :: pos_integer(),
inbound_cf :: rocksdb:cf_handle(),
active_cf :: rocksdb:cf_handle(),
defers = #{} :: #{pos_integer() => [binary()]},
seq_map = #{} :: #{pos_integer() => pos_integer()},
transaction :: undefined | rocksdb:transaction_handle(),
transaction_dirty = false :: boolean(),
new_defers :: undefined | integer(), % right now this is just a counter for delivers
last_defer_check :: undefined | integer(),
key_tree :: [any()],
key_tree_checked = false :: boolean(),
floated_acks = #{} :: #{pos_integer() => [non_neg_integer()]},
outbound_keys = [] :: [binary()],
db_opts = [] :: [any()],
write_opts = [] :: [any()],
new_messages = #{} :: #{pos_integer() => boolean()}
}).
-type relcast_state() :: #state{}.
-type status() :: {ModuleState :: any(),
InboundQueue :: [{pos_integer(), binary()}],
OutboundQueue :: #{pos_integer() => [binary()]}}.
-export_type([relcast_state/0, status/0]).
-export([
start/5,
command/2,
deliver/4,
take/2, take/3,
reset_actor/2,
in_flight/2,
peek/2,
ack/3,
process_inbound/1,
stop/2,
status/1
]).
-define(stored_module_state, <<"stored_module_state">>).
-define(stored_key_prefix, <<"stored_key_">>).
-define(stored_key_tree, <<"stored_key_tree">>).
-spec transaction(_, _) -> {ok, rocksdb:transaction_handle()}.
transaction(A, B) ->
{ok, Txn} = rocksdb:transaction(A, B),
{ok, Txn}.
%% @doc Start a relcast instance. Starts a relcast instance for the actor
%% `ActorID' in the group of `ActorIDs' using the callback module `Module'
%% initialized with `Arguments'. `RelcastOptions' contains configuration options
%% around the relcast itself, for example the data directory.
-spec start(pos_integer(), [pos_integer(),...], atom(), list(), list()) ->
{error, any()} | {ok, relcast_state()} | {stop, pos_integer(), relcast_state()}.
start(ActorID, ActorIDs, Module, Arguments, RelcastOptions) ->
Create = proplists:get_value(create, RelcastOptions, false),
DataDir = proplists:get_value(data_dir, RelcastOptions),
DBOptions0 = db_options(length(ActorIDs)),
OpenOpts1 = application:get_env(relcast, db_open_opts, []),
OpenOpts2 = proplists:get_value(db_opts, RelcastOptions, []),
WriteOpts = proplists:get_value(write_opts, RelcastOptions, [{sync, true}]),
OpenOpts = OpenOpts1 ++ OpenOpts2,
GlobalOpts = application:get_env(rocksdb, global_opts, []),
DBOptions = DBOptions0 ++ OpenOpts ++ GlobalOpts,
{ColumnFamilies, HasInbound} = case rocksdb:list_column_families(DataDir, DBOptions) of
{ok, CFs0} ->
CFs = lists:sort(CFs0) -- ["default"],
HI = lists:member("Inbound", CFs0),
case length(CFs) of
0 ->
%% we need to create epoch 0
{[], HI};
_ ->
%% we should prune all but the last two
{CFs, HI}
end;
{error, _} ->
%% Assume the database doesn't exist yet, if we can't open it we will fail later
{[], false}
end,
case rocksdb:open_optimistic_transaction_db(DataDir,
[{create_if_missing, Create}, {atomic_flush, true}] ++ OpenOpts,
[ {CF, DBOptions}
|| CF <- ["default"|ColumnFamilies] ]) of
{ok, DB, [_DefaultCF|CFHs0]} ->
{InboundCF, CFHs} = case HasInbound of
false ->
{ok, ICF} = rocksdb:create_column_family(DB, "Inbound", DBOptions),
{ICF, CFHs0};
true ->
{hd(CFHs0), tl(CFHs0)}
end,
%% check if we have some to prune
%% delete all but the two newest *contiguous* column families
{Epoch, ActiveCF} = case lists:reverse(ColumnFamilies -- ["Inbound"]) of
[] ->
%% no column families, create epoch 0
{ok, FirstCF} = rocksdb:create_column_family(DB, make_column_family_name(0), DBOptions),
{0, FirstCF};
[JustOne] ->
%% only a single column family, no need to prune
{cf_to_epoch(JustOne), hd(CFHs)};
[Last | _Tail] ->
%% Prune all but the latest epoch
CFsToDelete = lists:sublist(CFHs, 1, length(CFHs) - 1),
[ ok = rocksdb:drop_column_family(CFH) || CFH <- CFsToDelete ],
[ ok = rocksdb:destroy_column_family(CFH) || CFH <- CFsToDelete ],
{cf_to_epoch(Last), hd(lists:sublist(CFHs, length(CFHs) + 1 - 1, 1))}
end,
case Module:init(Arguments) of
{ok, ModuleState0} ->
{OldSer, ModuleState, KeyTree} = get_mod_state(DB, Module, ModuleState0, WriteOpts),
LastKeyIn = get_last_key_in(DB, InboundCF),
LastKeyOut = get_last_key_out(DB, ActiveCF),
BitFieldSize = round_to_nearest_byte(length(ActorIDs) + 2) - 2, %% two bits for unicast/multicast
State = #state{module = Module,
id = ActorID,
inbound_cf = InboundCF,
active_cf = ActiveCF,
ids = ActorIDs,
module_state = ModuleState,
old_serialized = OldSer,
db = DB,
out_key_count = LastKeyOut + 1,
in_key_count = LastKeyIn + 1,
epoch = Epoch,
bitfieldsize = BitFieldSize,
db_opts = DBOptions,
write_opts = WriteOpts,
key_tree = KeyTree},
{ok, Iter} = rocksdb:iterator(State#state.db, InboundCF, [{iterate_upper_bound, max_inbound_key()}]),
Defers = build_defer_list(rocksdb:iterator_move(Iter, {seek, min_inbound_key()}), Iter, InboundCF, #{}),
%% try to deliver any old queued inbound messages
{ok, Transaction} = transaction(DB, WriteOpts),
{ok, NewState} = handle_pending_inbound(Transaction,
State#state{transaction = Transaction,
defers=Defers}),
ok = rocksdb:transaction_commit(Transaction),
{ok, Transaction1} = transaction(DB, WriteOpts),
{ok, NewState#state{transaction = Transaction1}};
_ ->
{error, module_init_failed}
end;
{error, {db_open, Msg}} ->
{error, {invalid_or_no_existing_store, Msg}};
{error, _} = E->
E
end.
%% @doc Send a command to the relcast callback module. Commands are distinct
%% from messages as they do not originate from another actor in the relcast
%% group. Commands are dispatched to `Module':handle_command and can simply
%% return information via `{reply, Reply, ignore}' or update the callback
%% module's state or send messages via `{reply, Reply, Actions, NewModuleState}'.
-spec command(any(), relcast_state()) -> {any(), relcast_state()} | {stop, any(), pos_integer(), relcast_state()}.
command(Message, State = #state{module = Module,
module_state = ModuleState,
transaction = Transaction}) ->
case Module:handle_command(Message, ModuleState) of
{reply, Reply, ignore} ->
%% just returning information
{Reply, State};
{reply, Reply, Actions, NewModuleState} ->
State1 = maybe_update_state(State, NewModuleState),
%% write new output messages & update the state atomically
case handle_actions(Actions, Transaction, State1) of
{ok, NewState} ->
case handle_pending_inbound(NewState#state.transaction, NewState) of
{ok, NewerState} ->
{Reply, maybe_serialize(NewerState)};
{stop, Timeout, NewerState} ->
{stop, Reply, Timeout, maybe_serialize(NewerState)}
end;
{stop, Timeout, NewState} ->
{stop, Reply, Timeout, maybe_serialize(NewState)}
end
end.
%% @doc Deliver a message from another actor to the relcast instance. `Message'
%% from `FromActorID' is submitted via `Module':handle_message. Depending on the
%% result of this, the message is either consumed immediately, deferred for
%% later, or this function returns `full' to indicate it cannot absorb any more
%% deferred messages from this Actor.
-spec deliver(non_neg_integer(), binary(), pos_integer(), relcast_state()) ->
{ok, relcast_state()} | {stop, pos_integer(), relcast_state()} | full.
deliver(Seq, Message, FromActorID, State = #state{in_key_count = KeyCount,
defers = Defers}) ->
case handle_message(undefined, undefined, FromActorID, Message, State#state.transaction, State) of
{ok, NewState0} ->
NewState = store_ack(Seq, FromActorID, NewState0),
%% something happened, evaluate if we can handle any other blocked messages
case length(maps:keys(Defers)) of
0 ->
%% no active defers, no queued inbound messages to evaluate
{ok, NewState};
_ ->
case handle_pending_inbound(NewState#state.transaction,
NewState#state{new_defers = State#state.new_defers + 1}) of
{ok, NewerState} ->
{ok, NewerState};
{stop, Timeout, NewerState} ->
{stop, Timeout, NewerState}
end
end;
{stop, Timeout, NewState0} ->
NewState = store_ack(Seq, FromActorID, NewState0),
{stop, Timeout, NewState};
ignore ->
NewState = store_ack(Seq, FromActorID, State),
{ok, NewState};
defer ->
NewState = store_ack(Seq, FromActorID, State),
DefersForThisActor = maps:get(FromActorID, Defers, []),
MaxDefers = application:get_env(relcast, max_defers, 100),
case DefersForThisActor of
N when length(N) < MaxDefers ->
Key = make_inbound_key(KeyCount), %% some kind of predictable, monotonic key
ok = rocksdb:transaction_put(NewState#state.transaction, NewState#state.inbound_cf,
Key,
<<FromActorID:16/integer, Message/binary>>),
{ok, NewState#state{in_key_count = KeyCount + 1,
transaction_dirty = true,
%% new_messages = true,
defers = maps:put(FromActorID, [Key|N], Defers)}};
_ ->
%% sorry buddy, no room on the couch
full
end
end.
%% TODO: remove (or change to count default to 1) this when tests and EQC are updated.
take(ID, State) ->
case take(ID, State, 1) of
{ok, [{Seq, Msg}], Acks, State1} ->
%% use the old API for backwards compatibility
{ok, Seq, Acks, Msg, State1};
Else ->
Else
end.
%% @doc Get the next message this relcast has queued outbound for `ForActorID'.
%% Once this message has been delivered to its destination, and acknowledged,
%% `ack()' should be called with reference associated with the message.
%% Subsequent calls to `take()' without any intervening acks will return more
%% messages up to the pipeline depth, thereafter it will return the
%% `pipeline_full' tuple. In the case where the client code has lost its
%% connection, it should call `reset_actor/2', which will reset the pending
%% acks state and reissue the oldest unacked message in case all of the unacked
%% messages were lost in flight.
-spec take(pos_integer(), relcast_state(), pos_integer()) ->
{not_found, relcast_state()} |
{pipeline_full, relcast_state()} |
{ok,
[{Seq :: non_neg_integer(),
Msg :: binary()}],
Acks :: none | #{non_neg_integer() => [non_neg_integer()]},
NewState :: relcast_state()}.
take(ForActorID, State = #state{pending_acks = Pending, new_messages = NewMsgs}, Count) ->
%% we need to find the first "unacked" message for this actor
%% we should remember the last acked message for this actor ID and start there
%% check if there's a pending ACK and use that to find the "last" key, if present
ActorNewMsgs = maps:get(ForActorID, NewMsgs, true),
PipelineDepth = application:get_env(relcast, pipeline_depth, 75),
case maps:get(ForActorID, Pending, []) of
Pends when length(Pends) >= PipelineDepth ->
{pipeline_full, State};
_Pends when ActorNewMsgs == false ->
{not_found, State};
Pends when Pends /= [] ->
case hd(Pends) of
{_Seq, CF, Key, _Multicast} when CF == State#state.active_cf ->
Count1 = min(Count, PipelineDepth - length(Pends)),
case find_next_outbound(ForActorID, CF, Key, State, Count1, false) of
{not_found, LastKey, CF2} ->
{not_found, State#state{last_sent = maps:put(ForActorID,
{CF2, LastKey},
State#state.last_sent),
new_messages = NewMsgs#{ForActorID => false}}};
not_found ->
{not_found, State#state{last_sent=maps:put(ForActorID, none,
State#state.last_sent),
new_messages = NewMsgs#{ForActorID => false}}};
Messages ->
process_messages(Messages, Pends, Pending, ForActorID, State)
end;
%% all our pends are for a stale epoch, clean them out
_ ->
{ok, State1} = reset_actor(ForActorID, State),
take(ForActorID, State1, Count)
end;
_ ->
%% default to the "first" key"
case maps:get(ForActorID, State#state.last_sent, {State#state.active_cf, min_outbound_key()}) of
none ->
%% we *know* there's nothing pending for this actor
{not_found, State};
{CF0, StartKey0} ->
%% check if the column family is still valid
{CF, StartKey} = case CF0 == State#state.active_cf of
true ->
{CF0, StartKey0};
false ->
%% reset the start key as well
{State#state.active_cf, min_outbound_key()}
end,
%% iterate until we find a key for this actor
case find_next_outbound(ForActorID, CF, StartKey, State, Count) of
{not_found, LastKey, CF2} ->
{not_found, State#state{last_sent = maps:put(ForActorID, {CF2, LastKey},
State#state.last_sent),
new_messages = NewMsgs#{ForActorID => false}}};
not_found ->
{not_found, State#state{last_sent = maps:put(ForActorID, none, State#state.last_sent),
new_messages = NewMsgs#{ForActorID => false}}};
Messages ->
process_messages(Messages, [], Pending, ForActorID, State)
end
end
end.
process_messages(Messages, Pends, Pending, ForActorID, State) ->
{Pend, Keys, Msgs, State1} =
lists:foldl(
fun({Key2, CF2, Msg, Multicast}, {P, K, M, S}) ->
{Seq2, S1} = make_seq(ForActorID, S),
P1 = [{Seq2, CF2, Key2, Multicast} | P],
K1 = [Key2 | K],
M1 = [{Seq2, Msg} | M],
{P1, K1, M1, S1}
end,
{[], [], [], State},
Messages),
Pends1 = lists:append(Pend, Pends),
{Acks, State2} = get_acks(Keys, State1),
State3 = maybe_commit(Acks, State2),
{ok, Msgs, Acks,
State3#state{pending_acks = maps:put(ForActorID, Pends1, Pending)}}.
-spec reset_actor(pos_integer(), relcast_state()) -> {ok, relcast_state()}.
reset_actor(ForActorID, State = #state{pending_acks = Pending, last_sent = LastSent}) ->
{ok, reset_seq(ForActorID, State#state{pending_acks = Pending#{ForActorID => []},
new_messages = #{},
last_sent = maps:remove(ForActorID, LastSent)})}.
-spec in_flight(pos_integer(), relcast_state()) -> non_neg_integer().
in_flight(ForActorID, State = #state{pending_acks = Pending}) ->
P = maps:get(ForActorID, Pending, []),
Active = lists:filter(fun ({_Ref, CF, _Key, _Multicast}) ->
CF == State#state.active_cf
end, P),
length(Active).
%%% @doc Get the next message this relcast has queued outbound for
%%% `ForActorID', without affecting the pipeline state or having any other side effects.
-spec peek(pos_integer(), relcast_state()) ->
not_found |
{ok, binary()}.
peek(ForActorID, State = #state{pending_acks = Pending}) ->
%% we need to find the first "unacked" message for this actor
%% we should remember the last acked message for this actor ID and start there
%% check if there's a pending ACK and use that to find the "last" key, if present
case maps:get(ForActorID, Pending, []) of
Pends when Pends /= [] ->
case hd(Pends) of
{_Ref, CF, Key, _Multicast} when CF == State#state.active_cf ->
%% iterate until we find a key for this actor
case find_next_outbound(ForActorID, CF, Key, State, 1, false) of
{not_found, _LastKey, _CF2} ->
not_found;
[{_Key2, _CF2, Msg, _Multicast2}] ->
{ok, Msg};
not_found ->
not_found
end;
_ ->
%% here when the CF is old, we need to re-search in a newer
%% epoch. our state alteration will be undone, since we
%% never pass the changed state back to the user
peek(ForActorID, State#state{pending_acks = Pending#{ForActorID => []}})
end;
_ ->
%% default to the "first" key"
case maps:get(ForActorID, State#state.last_sent, {State#state.active_cf, min_outbound_key()}) of
none ->
%% we *know* there's nothing pending for this actor
not_found;
{CF0, StartKey0} ->
%% check if the column family is still valid
{CF, StartKey} = case CF0 == State#state.active_cf of
true ->
{CF0, StartKey0};
false ->
%% reset the start key as well
{State#state.active_cf, min_outbound_key()}
end,
%% iterate until we find a key for this actor
case find_next_outbound(ForActorID, CF, StartKey, State, 1) of
{not_found, _LastKey, _CF2} ->
not_found;
[{_Key, _CF2, Msg, _Multicast}] ->
{ok, Msg};
not_found ->
not_found
end
end
end.
%% @doc Indicate to relcast that `FromActorID' has acknowledged receipt of the
%% message associated with `Seq'.
-spec ack(pos_integer(), non_neg_integer() | [non_neg_integer], relcast_state()) ->
{ok, relcast_state()}.
ack(FromActorID, Seq, State) when not is_list(Seq) ->
ack(FromActorID, [Seq], State);
ack(_FromActorID, [], State) ->
{ok, State};
ack(FromActorID, Seqs, State = #state{transaction = Transaction,
bitfieldsize = BFS}) ->
case maps:get(FromActorID, State#state.pending_acks, []) of
[] ->
{ok, State};
Pends ->
%% keep the pdict thing because stale deletions don't
%% dirty the transaction
erlang:put(dirty, false),
Pends1 =
lists:flatmap(
fun({Seq, CF, AKey, Multicast} = Pend) when CF == State#state.active_cf ->
case lists:member(Seq, Seqs) of
true ->
erlang:put(dirty, true),
case Multicast of
false ->
%% unicast message, fine to delete now
ok = rocksdb:transaction_delete(Transaction, CF, AKey);
true ->
%% flip the bit, we can delete it next time we iterate
flip_actor_bit(FromActorID, Transaction, CF, AKey, BFS)
end,
[];
_ ->
[Pend]
end;
(_) ->
%% delete this, it's stale
[]
end,
Pends),
Dirty = erlang:get(dirty),
erlang:erase(dirty),
NewPending = (State#state.pending_acks)#{FromActorID => Pends1},
{ok, maybe_dirty(Dirty, State#state{pending_acks=NewPending})} %,
%% last_sent = maps:put(FromActorID, {CF, AKey},
%% State#state.last_sent)})}
end.
%% @doc Allow inbound processing to be externally triggered so that we
%% don't get "stuck" with delayed defers blocking the forward progress
%% of the state machine defined by the behavior.
-spec process_inbound(relcast_state()) ->
{ok,
Acks :: none | #{non_neg_integer() => [non_neg_integer()]},
relcast_state()} |
{stop, pos_integer(), relcast_state()}.
process_inbound(State) ->
case handle_pending_inbound(State#state.transaction, State) of
{ok, NewState} ->
{Acks, NewState1} = get_acks(NewState),
NewState2 = maybe_commit(force, NewState1),
{ok, Acks, NewState2};
{stop, Timeout, NewState} ->
{stop, Timeout, NewState}
end.
%% @doc Stop the relcast instance.
-spec stop(any(), relcast_state()) -> ok.
stop(lite, State = #state{module=Module, module_state=ModuleState})->
case erlang:function_exported(Module, terminate, 2) of
true ->
Module:terminate(normal, ModuleState);
false ->
ok
end,
rocksdb:close(State#state.db);
stop(Reason, State = #state{module=Module, module_state=ModuleState})->
case erlang:function_exported(Module, terminate, 2) of
true ->
Module:terminate(Reason, ModuleState);
false ->
ok
end,
State1 = maybe_serialize(State),
catch rocksdb:transaction_commit(State1#state.transaction),
rocksdb:close(State#state.db).
%% @doc Get a representation of the relcast's module state, inbound queue and
%% outbound queue.
-spec status(relcast_state()) -> status().
status(State = #state{module_state = ModuleState, transaction = Transaction}) ->
{ok, Iter} = rocksdb:transaction_iterator(State#state.db, Transaction, State#state.active_cf,
[{iterate_upper_bound, max_outbound_key()}]),
OutboundQueue = build_outbound_status(rocksdb:iterator_move(Iter, {seek, min_outbound_key()}),
Iter, State#state.bitfieldsize, maps:from_list([{ID, []} || ID <- State#state.ids, ID /= State#state.id])),
{ok, InIter} = rocksdb:transaction_iterator(State#state.db, Transaction, State#state.inbound_cf,
[{iterate_upper_bound, max_inbound_key()}]),
InboundQueue = build_inbound_status(rocksdb:iterator_move(InIter, {seek, min_inbound_key()}), InIter, []),
{ModuleState, InboundQueue, OutboundQueue}.
%%====================================================================
%% Internal functions
%%====================================================================
-spec handle_pending_inbound(rocksdb:transaction_handle(), relcast_state()) ->
{stop, pos_integer(), relcast_state()} | {ok, relcast_state()}.
handle_pending_inbound(Transaction, #state{new_defers = Defers,
last_defer_check = Last0} = State) ->
CountThreshold = application:get_env(relcast, defer_count_threshold, 20),
TimeThreshold = application:get_env(relcast, defer_time_threshold, 5000),
Last = case Last0 of
undefined -> 0;
_ -> Last0
end,
Time = erlang:monotonic_time(milli_seconds) - Last,
case (Defers == undefined orelse Defers > CountThreshold) orelse
(Last0 == undefined orelse Time > TimeThreshold) of
false ->
{ok, State};
true ->
handle_pending_inbound_(Transaction, State)
end.
handle_pending_inbound_(Transaction, State) ->
%% so we need to start at the oldest messages in the inbound queue and
%% attempt Module:handle_message on each one. If the module returns `defer'
%% we need to not attempt to deliver any newer messages from that actor.
%% This function returns when either all actors have hit a defer, or we run
%% out of messages to examine. If we are successful in handling any inbound
%% messages during the run, we should loop back to the oldest messages and
%% try to handle them again, as the module may now be ready to handle them.
{ok, Iter} = rocksdb:transaction_iterator(State#state.db, Transaction, State#state.inbound_cf,
[{iterate_upper_bound, max_inbound_key()}]),
Res = rocksdb:iterator_move(Iter, first),
case find_next_inbound(Res, Iter, Transaction, false, [], State) of
{stop, Timeout, State} ->
{stop, Timeout, mark_defers(State)};
{ok, false, _, State} ->
%% nothing changed, we're done here
{ok, mark_defers(State)};
{ok, true, Acc, NewState} ->
%% we changed something, try handling other deferreds again
%% we have them in an accumulator, so we can just try to handle/delete them
handle_defers(Transaction, Acc, [], false, mark_defers(NewState))
end.
find_next_inbound({error, _}, Iter, _Transaction, Changed, Acc, State) ->
ok = rocksdb:iterator_close(Iter),
{ok, Changed, lists:reverse(Acc), State};
find_next_inbound({ok, <<"i", _/binary>> = Key, <<FromActorID:16/integer, Msg/binary>>}, Iter, Transaction, Changed, Acc, State) ->
CF = State#state.inbound_cf,
case handle_message(Key, CF, FromActorID, Msg, Transaction, State) of
defer ->
%% keep on going
find_next_inbound(rocksdb:iterator_move(Iter, next), Iter, Transaction, Changed,
[{CF, Key, FromActorID, Msg}|Acc], State);
ignore ->
%% keep on going
find_next_inbound(rocksdb:iterator_move(Iter, next), Iter, Transaction, Changed, Acc, State);
{ok, NewState} ->
%% we managed to handle a deferred message, yay
OldDefers = maps:get(FromActorID, NewState#state.defers),
find_next_inbound(rocksdb:iterator_move(Iter, next), Iter, Transaction, true, Acc,
NewState#state{defers=maps:put(FromActorID, OldDefers -- [Key], NewState#state.defers)});
{stop, Timeout, NewState} ->
ok = rocksdb:iterator_close(Iter),
{stop, Timeout, NewState}
end.
handle_defers(Transaction, [], Out, true, State) ->
%% we changed something, go around again
handle_defers(Transaction, Out, [], false, State);
handle_defers(_Transaction, [], _Out, false, State) ->
%% no changes this iteration, bail
{ok, State};
handle_defers(Transaction, [{CF, Key, FromActorID, Msg}|Acc], Out, Changed, State) ->
case handle_message(Key, CF, FromActorID, Msg, Transaction, State) of
defer ->
handle_defers(Transaction, Acc, [{CF, Key, FromActorID, Msg}|Out], Changed, State);
ignore ->
handle_defers(Transaction, Acc, [{CF, Key, FromActorID, Msg}|Out], Changed, State);
{ok, NewState} ->
OldDefers = maps:get(FromActorID, NewState#state.defers),
handle_defers(Transaction, Acc, Out, true,
NewState#state{defers=maps:put(FromActorID, OldDefers -- [Key], NewState#state.defers)});
{stop, Timeout, NewState} ->
{stop, Timeout, NewState}
end.
handle_message(Key, CF, FromActorID, Message, Transaction, State = #state{module=Module, module_state=ModuleState}) ->
case Module:handle_message(Message, FromActorID, ModuleState) of
ignore ->
State1 =
case Key /= undefined of
true ->
ok = rocksdb:transaction_delete(Transaction, CF, Key),
{ok, State#state{transaction_dirty = true}};
false ->
ignore
end,
State1;
defer ->
defer;
{NewModuleState, Actions} ->
%% write new outbound messages, update the state and (if present) delete the message atomically
Dirty =
case Key /= undefined of
true ->
ok = rocksdb:transaction_delete(Transaction, CF, Key),
true;
false ->
false
end,
case handle_actions(Actions, Transaction, State#state{module_state=NewModuleState}) of
{ok, NewState} ->
{ok, maybe_dirty(Dirty, NewState)};
{stop, Timeout, NewState} ->
{stop, Timeout, maybe_dirty(Dirty, NewState)}
end
end.
%% write all resulting messages and keys in an atomic transaction
handle_actions([], _Transaction, State) ->
{ok, State};
handle_actions([new_epoch|Tail], Transaction, State) ->
ok = rocksdb:transaction_commit(Transaction),
{ok, NewCF} = rocksdb:create_column_family(State#state.db, make_column_family_name(State#state.epoch + 1),
State#state.db_opts),
ok = rocksdb:drop_column_family(State#state.active_cf),
ok = rocksdb:destroy_column_family(State#state.active_cf),
%% filter old floating acks
Floats = maps:map(fun(_K, Acks) ->
lists:filter(fun({_Seq, Epoch}) ->
Epoch /= State#state.epoch
end, Acks)
end, State#state.floated_acks),
%% when we're done handling actions, we will write the module state (and all subsequent outbound
%% messages from this point on) into the active CF, which is this new one now
{ok, Transaction1} = transaction(State#state.db, State#state.write_opts),
handle_actions(Tail, Transaction1, State#state{out_key_count=0, active_cf=NewCF,
transaction = Transaction1,
transaction_dirty = false,
new_messages = #{},
floated_acks = Floats,
epoch=State#state.epoch + 1, pending_acks=#{}});
handle_actions([{multicast, Message}|Tail], Transaction, State =
#state{out_key_count=KeyCount, bitfieldsize=BitfieldSize, id=ID, ids=IDs, active_cf=CF, module=Module}) ->
Bitfield = make_bitfield(BitfieldSize, IDs, ID),
Key = make_outbound_key(KeyCount),
ok = rocksdb:transaction_put(Transaction, CF, Key, <<0:2/integer, Bitfield:BitfieldSize/bits, Message/binary>>),
%% handle our own copy of the message
%% deferring your own message is an error
State1 = State#state{outbound_keys = [Key|State#state.outbound_keys], new_messages = #{}},
case Module:handle_message(Message, ID, State#state.module_state) of
ignore ->
handle_actions(Tail, Transaction, update_next(IDs -- [ID], CF, Key, State1#state{out_key_count=KeyCount+1}));
{ModuleState, Actions} ->
handle_actions(Actions++Tail, Transaction, update_next(IDs -- [ID], CF, Key, State1#state{module_state=ModuleState, out_key_count=KeyCount+1}))
end;
handle_actions([{callback, Message}|Tail], Transaction, State =
#state{out_key_count=KeyCount, bitfieldsize=BitfieldSize, id=ID, ids=IDs, active_cf=CF, module=Module}) ->
Bitfield = make_bitfield(BitfieldSize, IDs, ID),
Key = make_outbound_key(KeyCount),
ok = rocksdb:transaction_put(Transaction, CF, Key, <<2:2/integer, Bitfield:BitfieldSize/bits, Message/binary>>),
State1 = State#state{outbound_keys = [Key|State#state.outbound_keys], new_messages = #{}},
case Module:callback_message(ID, Message, State#state.module_state) of
none ->
handle_actions(Tail, Transaction, update_next(IDs -- [ID], CF, Key, State1#state{out_key_count=KeyCount+1}));
OurMessage when is_binary(OurMessage) ->
%% handle our own copy of the message
%% deferring your own message is an error
case Module:handle_message(OurMessage, ID, State#state.module_state) of
ignore ->
handle_actions(Tail, Transaction, update_next(IDs -- [ID], CF, Key, State1#state{out_key_count=KeyCount+1}));
{ModuleState, Actions} ->
handle_actions(Actions++Tail, Transaction, update_next(IDs -- [ID], CF, Key, State1#state{module_state=ModuleState, out_key_count=KeyCount+1}))
end
end;
handle_actions([{unicast, ID, Message}|Tail], Transaction, State = #state{module=Module, id=ID}) ->
%% handle our own message
%% deferring your own message is an error
case Module:handle_message(Message, ID, State#state.module_state) of
ignore ->
handle_actions(Tail, Transaction, State);
{ModuleState, Actions} ->
handle_actions(Actions++Tail, Transaction, State#state{module_state=ModuleState})
end;
handle_actions([{unicast, ToActorID, Message}|Tail], Transaction, State = #state{out_key_count=KeyCount,
new_messages = NewMsgs,
active_cf=CF}) ->
Key = make_outbound_key(KeyCount),
State1 = State#state{outbound_keys = [Key|State#state.outbound_keys], new_messages = NewMsgs#{ToActorID => true}},
ok = rocksdb:transaction_put(Transaction, CF, Key, <<1:2/integer, ToActorID:14/integer, Message/binary>>),
handle_actions(Tail, Transaction, update_next([ToActorID], CF, Key, State1#state{out_key_count=KeyCount+1}));
handle_actions([{stop, Timeout}|_Tail], _Transaction, State) ->
{stop, Timeout, State}.
update_next(Actors, CF, Key, State) ->
LastSent = maps:map(fun(K, none) ->
case lists:member(K, Actors) of
true ->
{CF, Key};
false ->
none
end;
(_, V) -> V
end, State#state.last_sent),
State#state{last_sent=LastSent}.
make_bitfield(BitfieldSize, Actors, Actor) ->
Bits = << begin
case A of
Actor ->
<<0:1/integer>>;
_ ->
<<1:1/integer>>
end
end || A <- Actors >>,
<<Bits:(length(Actors))/bits, 0:(BitfieldSize - length(Actors))/integer>>.
db_options(NumActors) ->
[
{create_if_missing, true},
{max_open_files, 1024},
{max_log_file_size, 100*1024*1024},
{merge_operator, {bitset_merge_operator, round_to_nearest_byte(NumActors+2)}}
].
round_to_nearest_byte(Bits) ->
case Bits rem 8 of
0 ->
Bits;
Extra ->
Bits + (8 - Extra)
end.
get_mod_state(DB, Module, ModuleState0, WriteOpts) ->
case rocksdb:get(DB, ?stored_module_state, []) of
{ok, SerializedModuleState} ->
{SerState, ModState, _} = rehydrate(Module, SerializedModuleState, ModuleState0),
{ok, Txn} = transaction(DB, WriteOpts),
New = Module:serialize(ModState),
KT =
case do_serialize(Module, undefined, New, ?stored_key_prefix, Txn) of
bin ->
bin;
KeyTree ->
ok = rocksdb:transaction_put(Txn, ?stored_key_tree,
term_to_binary(KeyTree, [compressed])),
ok = rocksdb:transaction_delete(Txn, ?stored_module_state),
KeyTree
end,
rocksdb:transaction_commit(Txn),
{SerState, ModState, KT};
not_found ->
{SerState, ModState, KeyTree} =
case rocksdb:get(DB, ?stored_key_tree, []) of
{ok, KeyTreeBin} ->
KT = binary_to_term(KeyTreeBin),
do_deserialize(Module, ModuleState0, ?stored_key_prefix, KT, DB);
not_found ->
{undefined, ModuleState0, bin}
end,
NewSer = Module:serialize(ModState),
case get_key_tree(Module, NewSer) of
%% matches the existing tree on disk
KeyTree ->
{SerState, ModState, KeyTree};
%% monolithic state
bin ->
ok = rocksdb:put(DB, ?stored_module_state, NewSer,
[{sync, true}]),
_ = rocksdb:delete(DB, ?stored_key_tree, [{sync, true}]),
{SerState, ModState, bin};
%% new tree, write the structure to disk
KeyTreeNew ->
%% lager:info("writing initial struct to disk"),
ok = rocksdb:put(DB, ?stored_key_tree,
term_to_binary(KeyTreeNew, [compressed]), [{sync, true}]),
%% force disk sync on first startup, don't wait for messages
{ok, Txn} = transaction(DB, WriteOpts),
_KeyTree = do_serialize(Module, undefined, NewSer, ?stored_key_prefix, Txn),
rocksdb:transaction_commit(Txn),
{NewSer, ModState, KeyTreeNew}
end
end.
rehydrate(Module, SerState, ModuleState0) ->
OldModuleState = Module:deserialize(SerState),
{ok, RestoredModuleState} = Module:restore(OldModuleState, ModuleState0),
{SerState, RestoredModuleState, bin}.
%% get the maximum key ID used
get_last_key_in(DB, CF) ->
{ok, InIter} = rocksdb:iterator(DB, CF, [{iterate_lower_bound, min_inbound_key()}]),
%% XXX iterate_upper_bound doesn't work, so we can't use it.
%% instead we seek to the last possible key, and if that is not present,
%% seek to the previous key
MaxInbound = case rocksdb:iterator_move(InIter, last) of
{ok, <<"i", InNum:10/binary>>, _} ->
list_to_integer(binary_to_list(InNum));
_E1 ->
case rocksdb:iterator_move(InIter, prev) of
{ok, <<"i", InNum:10/binary>>, _} ->
list_to_integer(binary_to_list(InNum));
_E2 ->
0
end
end,
rocksdb:iterator_close(InIter),
MaxInbound.
get_last_key_out(DB, CF) ->
{ok, OutIter} = rocksdb:iterator(DB, CF, [{iterate_lower_bound, min_outbound_key()}]),
MaxOutbound = case rocksdb:iterator_move(OutIter, last) of
{ok, <<"o", OutNum:10/binary>>, _} ->
list_to_integer(binary_to_list(OutNum));
_E1 ->
case rocksdb:iterator_move(OutIter, prev) of
{ok, <<"o", OutNum:10/binary>>, _} ->
list_to_integer(binary_to_list(OutNum));
_E2 ->
0
end
end,
rocksdb:iterator_close(OutIter),
MaxOutbound.
%% iterate the outbound messages until we find one for this ActorID
find_next_outbound(ActorID, CF, StartKey, State, Count) ->
find_next_outbound(ActorID, CF, StartKey, State, Count, true).
find_next_outbound(ActorID, CF, StartKey, State, Count, AcceptStart) ->
{ok, Iter} = rocksdb:transaction_iterator(State#state.db, State#state.transaction,
CF, [{iterate_upper_bound, max_outbound_key()}]),
Res =
case AcceptStart of
true ->
rocksdb:iterator_move(Iter, StartKey);
false ->
%% on the paths where this is called, we're calling this with a
%% known to be existing start key, so we need to move the
%% iterator past it initially so we don't get it back
rocksdb:iterator_move(Iter, StartKey),
rocksdb:iterator_move(Iter, next)
end,
find_next_outbound_(ActorID, Res, Iter, State, Count, []).
find_next_outbound_(_ActorId, _, Iter, _State, 0, Acc) when Acc /= [] ->
rocksdb:iterator_close(Iter),
lists:reverse(Acc);
find_next_outbound_(_ActorId, {error, _}, Iter, State, _, Acc) ->
%% try to return the *highest* key we saw, so we can try starting here next time
case Acc of
[] ->
Res = case rocksdb:iterator_move(Iter, prev) of
{ok, Key, _} ->
{not_found, Key, State#state.active_cf};
_ ->
not_found
end;
_ ->
Res = lists:reverse(Acc)
end,
rocksdb:iterator_close(Iter),
Res;
find_next_outbound_(ActorID, {ok, <<"o", _/binary>> = Key, <<1:2/integer, ActorID:14/integer, Value/binary>>}, Iter, State,
Count, Acc) ->
%% unicast message for this actor
find_next_outbound_(ActorID, rocksdb:iterator_move(Iter, next),
Iter, State, Count - 1,
[{Key, State#state.active_cf, Value, false}|Acc]);
find_next_outbound_(ActorID, {ok, <<"o", _/binary>>, <<1:2/integer, _/bits>>}, Iter, State,
Count, Acc) ->
%% unicast message for someone else
find_next_outbound_(ActorID, rocksdb:iterator_move(Iter, next), Iter, State, Count, Acc);
find_next_outbound_(ActorID, {ok, <<"o", _/binary>> = Key, <<Type:2/integer, Tail/bits>>}, Iter,
State = #state{bitfieldsize=BitfieldSize},
Count, Acc) when Type == 0; Type == 2 ->
<<ActorMask:BitfieldSize/integer-unsigned-big, Value/binary>> = Tail,
case ActorMask band (1 bsl (BitfieldSize - ActorID)) of
0 ->
%% not for us, keep looking
case ActorMask == 0 of
true ->
%% everyone has gotten this message, we can delete it now
ok = rocksdb:transaction_delete(State#state.transaction, State#state.active_cf, Key);
false ->
ok
end,
find_next_outbound_(ActorID, rocksdb:iterator_move(Iter, next), Iter, maybe_dirty(ActorMask == 0, State), Count, Acc);
_ when Type == 0 ->
%% multicast message with the high bit set for this actor
find_next_outbound_(ActorID, rocksdb:iterator_move(Iter, next), Iter, State,
Count - 1,
[{Key, State#state.active_cf, Value, true}|Acc]);
_ when Type == 2 ->
%% callback message with the high bit set for this actor
Module = State#state.module,
case Module:callback_message(ActorID, Value, State#state.module_state) of
none ->
%% nothing for this actor
flip_actor_bit(ActorID, State#state.transaction, State#state.active_cf, Key, BitfieldSize),
find_next_outbound_(ActorID, rocksdb:iterator_move(Iter, next), Iter, State,
Count, Acc);
Message ->
find_next_outbound_(ActorID, rocksdb:iterator_move(Iter, next), Iter, State,
Count - 1,
[{Key, State#state.active_cf, Message, true}|Acc])
end
end.
min_inbound_key() ->
<<"i0000000000">>.
max_inbound_key() ->
<<"i4294967296">>.
min_outbound_key() ->
<<"o0000000000">>.
max_outbound_key() ->
<<"o4294967296">>.
make_inbound_key(KeyCount) ->
list_to_binary(io_lib:format("i~10..0b", [KeyCount])).
make_outbound_key(KeyCount) ->
list_to_binary(io_lib:format("o~10..0b", [KeyCount])).
make_column_family_name(EpochCount) ->
lists:flatten(io_lib:format("epoch~10..0b", [EpochCount])).
cf_to_epoch([$e, $p, $o, $c, $h|EpochString]) ->
list_to_integer(EpochString).
build_outbound_status({error, _}, Iter, _BFS, OutboundQueue) ->
rocksdb:iterator_close(Iter),
maps:map(fun(_K, V) -> lists:reverse(V) end, OutboundQueue);
build_outbound_status({ok, <<"o", _/binary>>, <<1:2/integer, ActorID:14/integer, Value/binary>>},
Iter, BFS, OutboundQueue) ->
%% unicast message
build_outbound_status(rocksdb:iterator_move(Iter, next), Iter, BFS, prepend_message([ActorID], Value, OutboundQueue));
build_outbound_status({ok, <<"o", _/binary>>, <<0:2/integer, Tail/bits>>}, Iter, BFS, OutboundQueue) ->
<<ActorMask:BFS/bits, Value/binary>> = Tail,
ActorIDs = actor_list(ActorMask, 1, []),
build_outbound_status(rocksdb:iterator_move(Iter, next), Iter, BFS, prepend_message(ActorIDs, Value, OutboundQueue));
build_outbound_status({ok, _Key, _Value}, Iter, BFS, OutboundQueue) ->
build_outbound_status(rocksdb:iterator_move(Iter, next), Iter, BFS, OutboundQueue).
build_inbound_status({error, _}, Iter, InboundQueue) ->
rocksdb:iterator_close(Iter),
lists:reverse(InboundQueue);
build_inbound_status({ok, <<"i", _/binary>>, <<FromActorID:16/integer, Msg/binary>>}, Iter, InboundQueue) ->
build_inbound_status(rocksdb:iterator_move(Iter, next), Iter, [{FromActorID, Msg}|InboundQueue]);
build_inbound_status({ok, _Key, _Value}, Iter, InboundQueue) ->
build_inbound_status(rocksdb:iterator_move(Iter, next), Iter, InboundQueue).
build_defer_list({error, _}, Iter, _CF, Acc) ->
rocksdb:iterator_close(Iter),
Acc;
build_defer_list({ok, <<"i", _/binary>>=Key, <<FromActorID:16/integer, _Msg/binary>>},
Iter, CF, Acc) ->
DefersForThisActor = maps:get(FromActorID, Acc, []),
build_defer_list(rocksdb:iterator_move(Iter, next), Iter, CF, maps:put(FromActorID, [Key|DefersForThisActor], Acc)).
prepend_message(Actors, Message, Map) ->
ExtraMap = [{K, []} || K <- (Actors -- maps:keys(Map))],
maps:map(fun(K, V) ->
case lists:member(K, Actors) of
true ->
[Message|V];
false ->
V
end
end, maps:merge(maps:from_list(ExtraMap), Map)).
actor_list(<<>>, _, List) ->
List;
actor_list(<<1:1/integer, Tail/bits>>, I, List) ->
actor_list(Tail, I+1, [I|List]);
actor_list(<<0:1/integer, Tail/bits>>, I, List) ->
actor_list(Tail, I+1, List).
flip_actor_bit(ActorID, Transaction, CF, Key, BFS) ->
%% with transactions, we can't actually do a merge at this point,
%% so we need to read, edit, and write the bitfield inside the transaction
{ok, Bits} = rocksdb:transaction_get(Transaction, CF, Key),
<<Type:2/bits, ActorMask:BFS/integer-unsigned-big, Post/bits>> = Bits,
Mask2 = ActorMask band (bnot (1 bsl (BFS - ActorID))),
ok = rocksdb:transaction_put(Transaction, CF, Key, <<Type/bits, Mask2:BFS/integer-unsigned-big, Post/bits>>).
%% generates a partitioned sequence number with the actor ID in the high bits
%% rollover happens naturally because once the sequence number uses more than 32-PrefixLen
%% bits the high bits get dropped
make_seq(ID, #state{seq_map=SeqMap, ids=A}=State) ->
Seq = maps:get(ID, SeqMap, 0),
%% calculate the number of bits needed to hold length(A)
PrefixLen = ceil(math:log2(length(A))),
<<TaggedSeq:32/integer-unsigned-big>> = <<ID:PrefixLen, Seq:(32-PrefixLen)>>,
{TaggedSeq, State#state{seq_map=maps:put(ID, Seq+1, SeqMap)}}.
reset_seq(ID, #state{seq_map=SeqMap}=State) ->
State#state{seq_map=maps:remove(ID, SeqMap)}.
maybe_serialize(#state{module_state = New, old_module_state = Old} = S) when Old == New ->
S;
maybe_serialize(#state{module_state = New0,
module = Mod,
old_serialized = Old,
transaction = Transaction} = S) ->
New = Mod:serialize(New0),
_KeyTree = do_serialize(Mod, Old, New, ?stored_key_prefix, Transaction),
S#state{old_serialized = New, old_module_state = New0, transaction_dirty = true}.
old_size(M) when is_map(M) ->
maps:size(M);
old_size(_) ->
-1.
%% TODO: remove all keytree accumulation from here?
do_serialize(Mod, Old, New, Prefix, Transaction) ->
case New of
State when is_binary(State) ->
ok = rocksdb:transaction_put(Transaction, ?stored_module_state, State),
bin;
StateMap ->
S = lists:sort(maps:to_list(StateMap)),
SSize = maps:size(StateMap),
OSize = old_size(Old),
O = case Old of
%% since we're serializing for the first time, we need to make sure that
%% everything gets written out, otherwise we have partial state that
%% won't restore correctly.
BorU when BorU == undefined orelse
is_binary(BorU) ->
lists:map(fun({K, _V}) -> {K, never_ever_match_with_anything} end, S);
_Size when OSize =/= SSize ->
SKeys = maps:keys(StateMap),
OKeys = maps:keys(Old),
Old1 = lists:foldl(fun(L, OM) ->
OM#{L => undefined}
end,
Old,
SKeys -- OKeys),
maps:to_list(maps:without(OKeys -- SKeys, Old1));
_ ->
lists:sort(maps:to_list(Old))
end,
L = lists:zip(S, O),
KeyTree =
lists:map(
fun({{K, V}, {_, V}}) ->
%% should be a binary
K;
({{K, V}, {_, OV}}) ->
KeyName = <<Prefix/binary, (atom_to_binary(K, utf8))/binary>>,
case is_map(V) of
true ->
do_serialize(K, fixup_old_map(OV), V, <<KeyName/binary, "_">>, Transaction);
false ->
%% lager:info("writing ~p to disk", [K]),
ok = rocksdb:transaction_put(Transaction, KeyName, V),
K
end
end,
L),
[Mod | KeyTree]
end.
get_key_tree(_, B) when is_binary(B) ->
bin;
get_key_tree(Mod, Map) ->
KT = lists:map(
fun({K, V}) when is_map(V)->
get_key_tree(K, V);
({K, _V}) ->
K
end,
maps:to_list(Map)),
[Mod | KT].
fixup_old_map(never_ever_match_with_anything) ->
undefined;
fixup_old_map(M) ->
M.
do_deserialize(Mod, NewState, Prefix, KeyTree, RocksDB) ->
R = fun Rec(Pfix, [_Top | KT], DB) ->
lists:foldl(
fun(K, Acc) when is_atom(K) ->
KeyName = <<Pfix/binary, (atom_to_binary(K, utf8))/binary>>,
Term = case rocksdb:get(DB, KeyName, []) of
{ok, Bin} ->
Bin;
not_found ->
undefined
end,
Acc#{K => Term};
(L, Acc) when is_list(L) ->
K = hd(L),
KeyName = <<Pfix/binary, (atom_to_binary(K, utf8))/binary, "_">>,
Acc#{K => Rec(KeyName, L, DB)}
end,
#{},
KT);
Rec(_, bin, DB) ->
case rocksdb:get(DB, ?stored_module_state, []) of
{ok, Bin} ->
Bin;
not_found ->
not_found
end
end,
Map = R(Prefix, KeyTree, RocksDB),
{A, B, _} = rehydrate(Mod, Map, NewState),
{A, B, KeyTree}.
maybe_update_state(State, ignore) ->
State;
maybe_update_state(State, NewModuleState) ->
State#state{module_state = NewModuleState}.
maybe_commit(none, S) ->
S;
maybe_commit(_, #state{transaction_dirty = false} = S) ->
S;
maybe_commit(_, #state{transaction = Txn, db = DB, write_opts = Opts} = S0) ->
S = maybe_serialize(S0),
ok = rocksdb:transaction_commit(Txn),
{ok, Txn1} = transaction(DB, Opts),
S#state{transaction = Txn1, transaction_dirty = false}.
maybe_dirty(false, S) ->
S;
maybe_dirty(_, S) ->
S#state{transaction_dirty = true}.
mark_defers(S) ->
S#state{new_defers = 0,
last_defer_check = erlang:monotonic_time(milli_seconds)}.
store_ack(Seq, From, #state{epoch = Epoch, floated_acks = Acks} = S) ->
%% should we be able to infer the seq without external tracking?
ActorAcks = maps:get(From, Acks, []),
%% at some point we might not need to keep them in order?
S#state{floated_acks = Acks#{From => ActorAcks ++ [{Seq, Epoch}]}}.
%% unconditional version
get_acks(#state{floated_acks = Acks} = S) ->
case maps:size(Acks) /= 0 of
true ->
{maps:map(fun(_, V) ->
[Sq || {Sq, _Epoch} <- V]
end, Acks),
S#state{floated_acks = #{}, outbound_keys = []}};
%% we've already synced to disc for this message
false ->
{none, S}
end.
get_acks(Keys, #state{floated_acks = Acks, outbound_keys = OutKeys} = S) ->
case maps:size(Acks) /= 0 andalso
lists:any(fun(Key) -> lists:member(Key, OutKeys) end,
Keys) of
%% we've been floated but not acked
true ->
{maps:map(fun(_, V) ->
[Sq || {Sq, _Epoch} <- V]
end, Acks),
S#state{floated_acks = #{}, outbound_keys = []}};
%% we've already synced to disc for this message
false ->
{none, S}
end.
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
seq_increment_test() ->
State = #state{ids=[1, 2, 3, 4, 5]},
{Seq1, State1} = make_seq(1, State),
{Seq2, _State2} = make_seq(1, State1),
%% sequence numbers for the same actor should increment
?assert(Seq1 < Seq2),
?assertEqual(1, Seq2 - Seq1).
seq_partition_test() ->
State = #state{ids=[1, 2, 3, 4, 5]},
{Seq1, State1} = make_seq(1, State),
{Seq2, _State2} = make_seq(2, State1),
%% Sequence 0 for 2 actors should not be the same
?assertNotEqual(Seq1, Seq2).
seq_rollover_test() ->
State = #state{ids=[1, 2, 3, 4, 5], seq_map=#{1 => trunc(math:pow(2, 29)) - 1}},
{Seq1, State1} = make_seq(1, State),
{Seq2, _State2} = make_seq(1, State1),
?assert(Seq1 > Seq2).
-endif. | src/relcast.erl | 0.530236 | 0.468183 | relcast.erl | starcoder |
%%%----------------------------------------------------------------------------
%%% @author <NAME> <<EMAIL>>
%%% @copyright 2012 University of St Andrews (See LICENCE)
%%% @headerfile "skel.hrl"
%%%
%%% @doc This module contains the initialization logic of a Farm skeleton.
%%%
%%% A task farm has the most basic kind of stream parallelism - inputs are
%%% sent to one of `n' replicas of the inner skeleton for processing.
%%%
%%% === Example ===
%%%
%%% ```skel:run([{farm, [{seq, fun ?MODULE:p1/1}], 10}], Input)'''
%%%
%%% In this simple example, we produce a farm with ten workers to run the
%%% sequential, developer-defined function `p/1' using the list of inputs
%%% `Input'.
%%%
%%% @end
%%%----------------------------------------------------------------------------
-module(sk_farm).
-export([
start/2
]).
-include("skel.hrl").
%% @doc Initialises a Farm skeleton given the inner workflow and number of
%% workers, respectively.
-spec start( Parameters, NextPid ) -> WorkflowPid when
Parameters :: {Workflow :: workflow(),
NumberOfWorkers :: pos_integer() }
| {CPUWorkflowCPUWorkflow :: workflow(),
GPUNumberOfWorkers :: pos_integer(),
GPUWorkflow :: workflow(),
GPUNumberOfWorkers :: pos_integer()},
NextPid :: pid(),
WorkflowPid :: pid().
start({WorkFlow , NWorkers}, NextPid) ->
CollectorPid = proc_lib:spawn(sk_farm_collector, start, [NWorkers, NextPid]),
WorkerPids = sk_utils:start_workers(NWorkers, WorkFlow, CollectorPid),
proc_lib:spawn(sk_farm_emitter, start, [WorkerPids]);
start({WorkFlowCPU, NCPUWorkers, WorkFlowGPU, NGPUWorkers}, NextPid) ->
CollectorPid = proc_lib:spawn(sk_farm_collector,
start, [NCPUWorkers + NGPUWorkers,
NextPid]),
WorkerPids = sk_utils:start_workers_hyb(NCPUWorkers,
NGPUWorkers,
WorkFlowCPU,
WorkFlowGPU,
CollectorPid),
proc_lib:spawn(sk_farm_emitter, start, [WorkerPids]). | src/sk_farm.erl | 0.502197 | 0.618348 | sk_farm.erl | starcoder |
%%%-------------------------------------------------------------------
%%% Copyright 2014 The RySim Authors. All rights reserved.
%%%
%%% Licensed under the Apache License, Version 2.0 (the "License");
%%% you may not use this file except in compliance with the License.
%%% You may obtain a copy of the License at
%%%
%%% http://www.apache.org/licenses/LICENSE-2.0
%%%
%%% Unless required by applicable law or agreed to in writing, software
%%% distributed under the License is distributed on an "AS IS" BASIS,
%%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%%% See the License for the specific language governing permissions and
%%% limitations under the License.
%%%
%%% @doc
%%% Allows for registration of distribution specifications and generation of
%%% random numbers based on those specifications.
%%%
%%% == Records ==
%%% === record(dist_call) ===
%%% <em>label</em> is a user provided string that is associated with a
%%% distribution. This string must be unique amongst other labels.
%%%
%%% <em>scale</em> is a scaling factor to be applied to the results
%%% generated from the distribution. This value should not be negative,
%%% though the system does not enforce this.
%%%
%%% <em>params</em> is a list of floats provided as the control
%%% parameters for the distribution. The number of entries is dependent
%%% on the specific value of type. The expected number entries for each
%%% value of type are as follows:
%%% <ul>
%%% <li>gaussian_tail = 2</li>
%%% <li>exponential = 1</li>
%%% <li>flat = 2</li>
%%% <li>lognormal = 2</li>
%%% <li>poisson = 1</li>
%%% <li>bernoulli = 1</li>
%%% <li>binomial = 2</li>
%%% <li>negative_binomial = 2</li>
%%% <li>geometric = 1</li>
%%% </ul>
%%%
%%% @end
%%%-------------------------------------------------------------------
-module(num_gen).
-include("rysim.hrl").
-export([initialize_generator/1, register_distribution/5,
call_distribution/2]).
%% ===================================================================
%% Records
%% ===================================================================
-record(dist_call, {type = "" :: string(),
scale = 1.0 :: float(),
params = [] :: [float()]}).
%% ===================================================================
%% API
%% ===================================================================
%%--------------------------------------------------------------------
%% @doc
%% Initialize the state of the random number generator to be used by the
%% probability distributions. Calling this affects the global random
%% seed, so should probably only ever be called once for consistency.
%% @spec initialize_generator(Seed :: integer()) -> {ok, NewData}
%% NewData = record(ng_data)
%% @end
%% --------------------------------------------------------------------
initialize_generator(Seed) when is_integer(Seed) ->
random:seed(Seed, Seed, Seed),
{ok, #ng_data{dists=gb_trees:empty()}};
initialize_generator(Seed) ->
error_logger:error_msg("~p:~p: Invalid call to initialize_generator, ~p!",
[?MODULE, ?LINE, Seed]),
throw(badarg).
%%--------------------------------------------------------------------
%% @doc
%% Registers a distribution call for future usage.
%% @spec register_distribution(Label :: string(), Type :: distribution_type(), Scale :: float(), Params :: [float()],
%% OldData) -> {ok, NewData}
%% OldData = record(ng_data)
%% NewData = record(ng_data)
%% @end
%% --------------------------------------------------------------------
register_distribution(Label, Type, Scale, Params, OldData) when is_list(Label),
is_float(Scale),
is_list(Params),
is_record(OldData, ng_data) ->
Dists = OldData#ng_data.dists,
Type = Type,
Params = validators:validate_params(Type, Params),
case gb_trees:lookup(Label, Dists) of
none ->
{ok, OldData#ng_data{dists=gb_trees:insert(Label,
#dist_call{type=Type,
scale=Scale,
params=Params},
Dists)}};
_ ->
throw(duplabel)
end;
register_distribution(Label, Type, Scale, Params, OldData) ->
error_logger:error_msg("~p:~p: Invalid call to register_distribution, ~p!",
[?MODULE, ?LINE, [Label, Type, Scale, Params, OldData]]),
throw(badarg).
%%--------------------------------------------------------------------
%% @doc
%% Generates a random number based on the provided registered
%% distribution label. Calls into private implementations of the
%% probability distributions to achieve this.
%% @spec call_distribution(Label :: string(), Data) -> {ok, integer()}
%% Data = record(ng_data)
%% @end
%% --------------------------------------------------------------------
call_distribution(Label, Data) when is_list(Label),
is_record(Data, ng_data)->
Dists = Data#ng_data.dists,
case gb_trees:lookup(Label, Dists) of
{value, #dist_call{type=Type,
scale=Scale,
params=Params}} ->
{ok, Result} = generate_number(Type, Scale, Params),
{ok, erlang:round(Result)};
none ->
error_logger:error_msg("~p:~p: ~p not registered!",
[?MODULE, ?LINE, Label]),
throw(badarg)
end;
call_distribution(Label, Data) ->
error_logger:error_msg("~p:~p: Invalid call to call_distribution, ~p!",
[?MODULE, ?LINE, [Label, Data]]),
throw(badarg).
%% ===================================================================
%% Private functions
%% ===================================================================
%%--------------------------------------------------------------------
%% @doc
%% Generates a random number from the given distribution with the given
%% parameters. The results is scaled also, but not rounded to an
%% integer. Implementations are based off of descriptions from
%% http://ftp.arl.mil/random/random.pdf.
%% @spec generate_number(Type :: string(), Scale :: float(), Params :: [float()]) -> {ok, float()}
%% @end
%% --------------------------------------------------------------------
generate_number(Type = gaussian_tail, Scale, Params = [A,Sigma|_]) when is_float(Scale),
is_float(A),
is_float(Sigma) ->
U1 = random:uniform(),
U2 = random:uniform(),
X1 = abs(Sigma * math:cos(2 * math:pi() * U1) * math:sqrt(-2 * math:log(U2))),
case X1 > A of
true ->
{ok, Scale * X1};
_ ->
X2 = Sigma * math:sin(2 * math:pi() * U1) * math:sqrt(-2 * math:log(U2)),
case X2 > A of
true ->
{ok, Scale * X2};
_ ->
generate_number(Type, Scale, Params)
end
end;
generate_number(exponential, Scale, [Lambda|_]) when is_float(Scale),
is_float(Lambda) ->
U = random:uniform(),
{ok, Scale * -Lambda * math:log(U)};
generate_number(flat, Scale, [A,B|_]) when is_float(Scale),
is_float(A),
is_float(B) ->
U = random:uniform(),
{ok, Scale * (A + (B - A) * U)};
generate_number(lognormal, Scale, [Mu,Sigma|_]) when is_float(Scale),
is_float(Mu),
is_float(Sigma) ->
U = random:uniform(),
{ok, Scale * math:exp(Mu + Sigma * U)};
generate_number(poisson, Scale, [Lambda|_]) when is_float(Scale),
is_float(Lambda) ->
U = random:uniform(),
{ok, Scale * poisson_acc(math:exp(-Lambda), U, 1)};
generate_number(bernoulli, Scale, [P|_]) when is_float(Scale),
is_float(P) ->
U = random:uniform(),
case P > U of
true ->
{ok, Scale};
_ ->
{ok, 0}
end;
generate_number(binomial, Scale, [P,N|_]) when is_float(Scale),
is_float(P),
is_float(N) ->
{ok, Scale * binomial_acc(P, 0, erlang:round(N))};
generate_number(negative_binomial, Scale, [P,N|_]) when is_float(Scale),
is_float(P),
is_float(N) ->
{ok, Scale * negative_binomial_acc(P, 0, erlang:round(N))};
generate_number(geometric, Scale, [P|_]) when is_float(Scale),
is_float(P) ->
U = random:uniform(),
{ok, Scale * ceiling(math:log(U)/math:log(1-P))};
generate_number(Label, Scale, Params) ->
error_logger:error_msg("~p:~p: Invalid call to generate_number, ~p!",
[?MODULE, ?LINE, [Label, Scale, Params]]),
throw(badarg).
%%--------------------------------------------------------------------
%% @doc
%% Implementation of the looping needed to generate a random number from
%% a poisson distribution.
%% @spec poisson_acc(Target :: float(), Products :: float(), N :: positive_integer()) -> positive_integer()
%% @end
%% --------------------------------------------------------------------
poisson_acc(Target, Products, N) when is_float(Target),
is_float(Products),
is_integer(N),
N > 0,
Target > Products ->
N;
poisson_acc(Target, Products, N) when is_float(Target),
is_float(Products),
is_integer(N),
N > 0->
U = random:uniform(),
poisson_acc(Target, U * Products, N + 1).
%%--------------------------------------------------------------------
%% @doc
%% Implementation of the looping needed to generate a random number from
%% a binomial distribution.
%% @spec binomial_acc(P :: float(), Count :: non_neg_integer(), N :: non_neg_pos_integer()) -> non_neg_pos_integer()
%% @end
%% --------------------------------------------------------------------
binomial_acc(_P, Count, 0) when is_integer(Count),
Count >= 0 ->
Count;
binomial_acc(P, Count, N) when is_float(P),
is_integer(Count),
is_integer(N),
Count >= 0,
N >= 0 ->
U = random:uniform(),
case U < P of
true ->
binomial_acc(P, Count + 1, N - 1);
_ ->
binomial_acc(P, Count, N - 1)
end.
%%--------------------------------------------------------------------
%% @doc
%% Implementation of the looping needed to generate a random number from
%% a negative binomial distribution.
%% @spec negative_binomial_acc(P :: float(), Count :: non_neg_integer(), N :: non_neg_pos_integer()) -> non_neg_pos_integer()
%% @end
%% --------------------------------------------------------------------
negative_binomial_acc(_P, Count, 0) when is_integer(Count),
Count >= 0 ->
Count;
negative_binomial_acc(P, Count, N) when is_float(P),
is_integer(Count),
is_integer(N),
Count >= 0,
N >= 0 ->
U = random:uniform(),
case U > P of
true ->
negative_binomial_acc(P, Count, N - 1);
_ ->
negative_binomial_acc(P, Count + 1, N)
end.
%%--------------------------------------------------------------------
%% @doc
%% Implemenation of ceiling function, because Erlang doesn't provide
%% one.
%% @spec ceiling(X :: float()) -> integer()
%% @end
%% --------------------------------------------------------------------
ceiling(X) when is_float(X) ->
T = erlang:trunc(X),
case (X - T) of
Neg when Neg < 0 ->
T;
Pos when Pos > 0 ->
T + 1;
_ ->
T
end. | erlang/rysim_des_actor/src/num_gen.erl | 0.70912 | 0.480783 | num_gen.erl | starcoder |
%%% @doc
%%% BLOCKTYPE
%%% Get Date and Time, Output formatted string and component values.
%%% DESCRIPTION
%%% Get Date Time value, Output component values and as formatted string
%%% Source may be local time, utc time, timestamp input, or component inputs
%%% Default format configuration string is "F"
%%% Example Output: "Monday, June 15, 2009 1:45:30 PM"
%%% Format configuration follows .NET Date/Time formatting rules. See links below.
%%% The calendar_locale() structure in the language module may be modified for other locales.
%%% LINKS
%%% https://docs.microsoft.com/en-us/dotnet/standard/base-types/standard-date-and-time-format-strings
%%% https://docs.microsoft.com/en-us/dotnet/standard/base-types/custom-date-and-time-format-strings
%%% https://msdn.microsoft.com/en-us/library/hc4ky857(v=vs.71).aspx
%%% https://msdn.microsoft.com/en-us/library/8kb3ddd4(v=vs.71).aspx
%%% @end
-module(lblx_datetime).
-author("<NAME>").
-include("../block_state.hrl").
%% ====================================================================
%% API functions
%% ====================================================================
-export([groups/0, version/0]).
-export([create/2, create/4, create/5, upgrade/1, initialize/1, execute/2, delete/1]).
groups() -> [time].
version() -> "0.2.0".
%% Merge the block type specific, Config, Input, and Output attributes
%% with the common Config, Input, and Output attributes, that all block types have
-spec default_configs(BlockName :: block_name(),
Description :: string()) -> config_attribs().
default_configs(BlockName, Description) ->
attrib_utils:merge_attribute_lists(
block_common:configs(BlockName, ?MODULE, version(), Description),
[
{source, {local}}, %| enum | local | local, utc, timestamp, component |
{format, {"F"}} %| string | "" | Standard and Custom Date Time format string |
]).
-spec default_inputs() -> input_attribs().
default_inputs() ->
attrib_utils:merge_attribute_lists(
block_common:inputs(),
[
{timestamp_in, {empty, {empty}}}, %| composite | empty | N/A |
{year_in, {empty, {empty}}}, %| integer | empty | 0..9999 |
{month_in, {empty, {empty}}}, %| integer | empty | 1..12 |
{day_in, {empty, {empty}}}, %| integer | empty | 1..31 |
{hour_in, {empty, {empty}}}, %| integer | empty | 0..23 |
{minute_in, {empty, {empty}}}, %| integer | empty | 0..59 |
{second_in, {empty, {empty}}}, %| integer | empty | 0..59 |
{micro_sec_in, {empty, {empty}}} %| integer | empty | 0..999999 |
]).
-spec default_outputs() -> output_attribs().
default_outputs() ->
attrib_utils:merge_attribute_lists(
block_common:outputs(),
[
{timestamp_out, {null, []}}, %| composite | null | N/A |
{year_out, {null, []}}, %| integer | null | 0..9999 |
{month_out, {null, []}}, %| integer | null | 1..12 |
{day_out, {null, []}}, %| integer | null | 1..31 |
{dow_out, {null, []}}, %| integer | null | 1..7 |
{hour_out, {null, []}}, %| integer | null | 0..23 |
{hour12_out, {null, []}}, %| integer | null | 1..12 |
{pm_out, {null, []}}, %| boolean | null | null, true, false |
{minute_out, {null, []}}, %| integer | null | 0..59 |
{second_out, {null, []}}, %| integer | null | 0..59 |
{micro_sec_out, {null, []}} %| integer | null | 0..999999 |
]).
%%
%% Create a set of block attributes for this block type.
%% Init attributes are used to override the default attribute values
%% and to add attributes to the lists of default attributes
%%
-spec create(BlockName :: block_name(),
Description :: string()) -> block_defn().
create(BlockName, Description) ->
create(BlockName, Description, [], [], []).
-spec create(BlockName :: block_name(),
Description :: string(),
InitConfig :: config_attribs(),
InitInputs :: input_attribs()) -> block_defn().
create(BlockName, Description, InitConfig, InitInputs) ->
create(BlockName, Description, InitConfig, InitInputs, []).
-spec create(BlockName :: block_name(),
Description :: string(),
InitConfig :: config_attribs(),
InitInputs :: input_attribs(),
InitOutputs :: output_attribs()) -> block_defn().
create(BlockName, Description, InitConfig, InitInputs, InitOutputs) ->
% Update Default Config, Input, Output, and Private attribute values
% with the initial values passed into this function.
%
% If any of the intial attributes do not already exist in the
% default attribute lists, merge_attribute_lists() will create them.
Config = attrib_utils:merge_attribute_lists(default_configs(BlockName, Description), InitConfig),
Inputs = attrib_utils:merge_attribute_lists(default_inputs(), InitInputs),
Outputs = attrib_utils:merge_attribute_lists(default_outputs(), InitOutputs),
% This is the block definition,
{Config, Inputs, Outputs}.
%%
%% Upgrade block attribute values, when block code and block data versions are different
%%
-spec upgrade(BlockDefn :: block_defn()) -> {ok, block_defn()} | {error, atom()}.
upgrade({Config, Inputs, Outputs}) ->
ModuleVer = version(),
{BlockName, BlockModule, ConfigVer} = config_utils:name_module_version(Config),
BlockType = type_utils:type_name(BlockModule),
case attrib_utils:set_value(Config, version, version()) of
{ok, UpdConfig} ->
m_logger:info(block_type_upgraded_from_ver_to,
[BlockName, BlockType, ConfigVer, ModuleVer]),
{ok, {UpdConfig, Inputs, Outputs}};
{error, Reason} ->
m_logger:error(err_upgrading_block_type_from_ver_to,
[Reason, BlockName, BlockType, ConfigVer, ModuleVer]),
{error, Reason}
end.
%%
%% Initialize block values
%% Perform any setup here as needed before starting execution
%%
-spec initialize(BlockState :: block_state()) -> block_state().
initialize({Config, Inputs, Outputs, Private}) ->
Private1 = attrib_utils:merge_attribute_lists(Private, [{format_def, {empty}}]),
case attrib_utils:get_value(Config, format) of
{ok, Format} ->
case time_utils:get_format_defn(Format) of
{FormatStr, ParamDefs} when is_list(ParamDefs) ->
{ok, Private2} = attrib_utils:set_value(Private1, format_def, {FormatStr, ParamDefs}),
case attrib_utils:get_value(Config, source) of
{ok, Source} ->
case lists:member(Source, [local, utc, timestamp, component]) of
true ->
Value = null, Status = initialed;
false ->
{Value, Status} = config_utils:log_error(Config, source, invalid_value)
end;
{error, Reason} ->
{Value, Status} = config_utils:log_error(Config, format, Reason)
end;
{error, Reason} ->
Private2 = Private1,
{Value, Status} = config_utils:log_error(Config, format, Reason)
end;
{error, Reason} ->
Private2 = Private1,
{Value, Status} = config_utils:log_error(Config, format, Reason)
end,
{ok, Outputs1} = attrib_utils:set_values(Outputs,
[
{value, Value}, {status, Status},
{timestamp_out, Value}, {year_out, Value}, {month_out, Value}, {day_out, Value}, {dow_out, Value},
{hour_out, Value}, {minute_out, Value}, {second_out, Value}, {micro_sec_out, Value}
]),
% This is the block state
{Config, Inputs, Outputs1, Private2}.
%%
%% Execute the block specific functionality
%%
-spec execute(BlockState :: block_state(),
ExecMethod :: exec_method()) -> block_state().
execute({Config, Inputs, Outputs, Private}, disable) ->
Outputs1 = output_utils:update_all_outputs(Outputs, null, disabled),
{Config, Inputs, Outputs1, Private};
execute({Config, Inputs, Outputs, Private}, _ExecMethod) ->
{ok, FormatDef} = attrib_utils:get_value(Private, format_def),
case attrib_utils:get_value(Config, source) of
{ok, local} -> % Get local time
Status = normal,
TimeStamp = {_, _, MicroSec} = os:timestamp(),
{{Year, Month, Day},{Hour, Minute, Second}} = calendar:now_to_local_time(TimeStamp);
{ok, utc} -> % Get UTC time
Status = normal,
TimeStamp = {_, _, MicroSec} = os:timestamp(),
{{Year, Month, Day},{Hour, Minute, Second}} = calendar:now_to_universal_time(TimeStamp);
{ok, timestamp} ->
case input_utils:get_any_type(Inputs, timestamp_in) of
{ok, TimeStamp} ->
Status = normal,
{_, _, MicroSec} = TimeStamp,
{{Year, Month, Day},{Hour, Minute, Second}} = calendar:now_to_local_time(TimeStamp);
{error, Reason} ->
TimeStamp = Year = Month = Day = Hour = Minute = Second = MicroSec = null,
{null, Status} = input_utils:log_error(Config, timestamp_in, Reason)
end;
{ok, component} ->
TimeStamp = null,
case input_utils:get_integer_range(Inputs, year_in, 1, 9999) of
{ok, Year} ->
case input_utils:get_integer_range(Inputs, month_in, 1, 12) of
{ok, Month} ->
case input_utils:get_integer_range(Inputs, day_in, 1, 31) of
{ok, Day} ->
case input_utils:get_integer_range(Inputs, hour_in, 0, 23) of
{ok, Hour} ->
case input_utils:get_integer_range(Inputs, minute_in, 0, 59) of
{ok, Minute} ->
case input_utils:get_integer_range(Inputs, second_in, 0, 59) of
{ok, Second} ->
case input_utils:get_integer_range(Inputs, micro_sec_in, 0, 999999) of
{ok, MicroSec} ->
case calendar:valid_date(Year, Month, Day) of
true ->
Status = normal;
false ->
{null, Status} = input_utils:log_error(Config, day_in, invalid_date)
end;
{error, Reason} ->
MicroSec = null,
{null, Status} = input_utils:log_error(Config, micro_sec_in, Reason)
end;
{error, Reason} ->
Second = MicroSec = null,
{null, Status} = input_utils:log_error(Config, second_in, Reason)
end;
{error, Reason} ->
Minute = Second = MicroSec = null,
{null, Status} = input_utils:log_error(Config, minute_in, Reason)
end;
{error, Reason} ->
Hour = Minute = Second = MicroSec = null,
{null, Status} = input_utils:log_error(Config, hour_in, Reason)
end;
{error, Reason} ->
Day = Hour = Minute = Second = MicroSec = null,
{null, Status} = input_utils:log_error(Config, day_in, Reason)
end;
{error, Reason} ->
Month = Day = Hour = Minute = Second = MicroSec = null,
{null, Status} = input_utils:log_error(Config, month_in, Reason)
end;
{error, Reason} ->
Year = Month = Day = Hour = Minute = Second = MicroSec = null,
{null, Status} = input_utils:log_error(Config, year_in, Reason)
end
end,
case Status of
normal ->
case Hour of
null ->
Hour12 = PM = null;
0 ->
Hour12 = 12, PM = false;
12 ->
Hour12 = 12, PM = true;
_ when (Hour > 12) ->
Hour12 = Hour - 12, PM = true;
_ ->
Hour12 = Hour, PM = false
end,
DayOfWeek = calendar:day_of_the_week(Year, Month, Day),
Formatted = time_utils:format_time(FormatDef, {{Year, Month, Day},{Hour, Minute, Second}}, MicroSec),
{ok, Outputs1} = attrib_utils:set_values(Outputs,
[
{value, Formatted}, {status, Status},
{timestamp_out, TimeStamp}, {year_out, Year}, {month_out, Month}, {day_out, Day}, {dow_out, DayOfWeek},
{hour_out, Hour}, {hour12_out, Hour12}, {pm_out, PM}, {minute_out, Minute}, {second_out, Second}, {micro_sec_out, MicroSec}
]);
_NotNormal ->
{ok, Outputs1} = attrib_utils:set_values(Outputs,
[
{value, null}, {status, Status},
{timestamp_out, null}, {year_out, null}, {month_out, null}, {day_out, null}, {dow_out, null},
{hour_out, null}, {hour12_out, null}, {pm_out, null}, {minute_out, null}, {second_out, null}, {micro_sec_out, null}
])
end,
% Return updated block state
{Config, Inputs, Outputs1, Private}.
%%
%% Delete the block
%%
-spec delete(BlockState :: block_state()) -> block_defn().
delete({Config, Inputs, Outputs, _Private}) ->
{Config, Inputs, Outputs}.
%% ====================================================================
%% Internal functions
%% ====================================================================
%% ====================================================================
%% Tests
%% ====================================================================
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-include("block_io_test_gen.hrl").
test_sets() ->
[
% test invalid config
{[{source, bad}], [], [{status, config_err}, {value, null}, {timestamp_out, null}, {year_out, null}, {month_out, null}, {day_out, null}, {dow_out, null},
{hour_out, null}, {hour12_out, null}, {pm_out, null}, {minute_out, null}, {second_out, null}, {micro_sec_out, null}]},
{[{source, local}, {format, "K"}], [], [{status, config_err}, {value, null}, {timestamp_out, null}, {year_out, null}, {month_out, null}, {day_out, null}, {dow_out, null},
{hour_out, null}, {hour12_out, null}, {pm_out, null}, {minute_out, null}, {second_out, null}, {micro_sec_out, null}]},
% test invalid input
{[{source, component}, {format, "F"}], [{year_in, -1}], [{status, input_err}, {value, null}, {timestamp_out, null}, {year_out, null}, {month_out, null}, {day_out, null}, {dow_out, null},
{hour_out, null}, {hour12_out, null}, {pm_out, null}, {minute_out, null}, {second_out, null}, {micro_sec_out, null}]},
{[{year_in, 2018}, {month_in, 20}], [{status, input_err}, {value, null}, {timestamp_out, null}, {year_out, null}, {month_out, null}, {day_out, null}, {dow_out, null},
{hour_out, null}, {hour12_out, null}, {pm_out, null}, {minute_out, null}, {second_out, null}, {micro_sec_out, null}]},
{[{month_in, 3}, {day_in, 32}], [{status, input_err}, {value, null}, {timestamp_out, null}, {year_out, null}, {month_out, null}, {day_out, null}, {dow_out, null},
{hour_out, null}, {hour12_out, null}, {pm_out, null}, {minute_out, null}, {second_out, null}, {micro_sec_out, null}]},
{[{day_in, 16}, {hour_in, 24}], [{status, input_err}, {value, null}, {timestamp_out, null}, {year_out, null}, {month_out, null}, {day_out, null}, {dow_out, null},
{hour_out, null}, {hour12_out, null}, {pm_out, null}, {minute_out, null}, {second_out, null}, {micro_sec_out, null}]},
{[{hour_in, 23}, {minute_in, -2}], [{status, input_err}, {value, null}, {timestamp_out, null}, {year_out, null}, {month_out, null}, {day_out, null}, {dow_out, null},
{hour_out, null}, {hour12_out, null}, {pm_out, null}, {minute_out, null}, {second_out, null}, {micro_sec_out, null}]},
{[{minute_in, 56}, {second_in, 66}], [{status, input_err}, {value, null}, {timestamp_out, null}, {year_out, null}, {month_out, null}, {day_out, null}, {dow_out, null},
{hour_out, null}, {hour12_out, null}, {pm_out, null}, {minute_out, null}, {second_out, null}, {micro_sec_out, null}]},
{[{second_in, 10}, {micro_sec_in, 1234567}], [{status, input_err}, {value, null}, {timestamp_out, null}, {year_out, null}, {month_out, null}, {day_out, null}, {dow_out, null},
{hour_out, null}, {hour12_out, null}, {pm_out, null}, {minute_out, null}, {second_out, null}, {micro_sec_out, null}]},
% test valid input
{[{micro_sec_in, 123456}], [{status, normal}, {value, "Friday, March 16, 2018 11:56:10 PM"}, {timestamp_out, null}, {year_out, 2018}, {month_out, 3}, {day_out, 16},
{dow_out, 5}, {hour_out, 23}, {hour12_out, 11}, {pm_out, true}, {minute_out, 56}, {second_out, 10}, {micro_sec_out, 123456}]},
{[{hour_in, 0}], [{status, normal}, {value, "Friday, March 16, 2018 12:56:10 AM"}, {timestamp_out, null}, {year_out, 2018}, {month_out, 3}, {day_out, 16},
{dow_out, 5}, {hour_out, 0}, {hour12_out, 12}, {pm_out, false}, {minute_out, 56}, {second_out, 10}, {micro_sec_out, 123456}]},
{[{hour_in, 12}], [{status, normal}, {value, "Friday, March 16, 2018 12:56:10 PM"}, {timestamp_out, null}, {year_out, 2018}, {month_out, 3}, {day_out, 16},
{dow_out, 5}, {hour_out, 12}, {hour12_out, 12}, {pm_out, true}, {minute_out, 56}, {second_out, 10}, {micro_sec_out, 123456}]},
% don't know date or time the test will be run, just verify status is normal
{[{source, local}], [], [{status, normal}]},
% don't know date or time the test will be run, just verify status is normal
{[{source, utc}], [], [{status, normal}]},
{[{source, timestamp}], [{timestamp_in, {1522,1695,72085}}], [{status, normal}, {value, "Sunday, March 25, 2018 1:14:55 PM"}, {timestamp_out, {1522,1695,72085}}, {year_out, 2018},
{month_out, 3}, {day_out, 25}, {dow_out, 7}, {hour_out, 13}, {minute_out, 14}, {second_out, 55}, {micro_sec_out, 72085}]}
].
-endif. | src/block_types/lblx_datetime.erl | 0.592902 | 0.450601 | lblx_datetime.erl | starcoder |
%% common_test suite for cqerl hash mode
-module(hash_SUITE).
-include_lib("common_test/include/ct.hrl").
-include_lib("stdlib/include/assert.hrl").
-include("cqerl.hrl").
-compile(export_all).
-import(test_helper, [
% maybe_get_client/1,
get_client/1
]).
%%--------------------------------------------------------------------
%% Function: suite() -> Info
%%
%% Info = [tuple()]
%% List of key/value pairs.
%%
%% Description: Returns list of tuples to set default properties
%% for the suite.
%%
%% Note: The suite/0 function is only meant to be used to return
%% default data values, not perform any other operations.
%%--------------------------------------------------------------------
suite() ->
[{timetrap, {seconds, 20}} | test_helper:requirements()].
%%--------------------------------------------------------------------
%% Function: groups() -> [Group]
%%
%% Group = {GroupName,Properties,GroupsAndTestCases}
%% GroupName = atom()
%% The name of the group.
%% Properties = [parallel | sequence | Shuffle | {RepeatType,N}]
%% Group properties that may be combined.
%% GroupsAndTestCases = [Group | {group,GroupName} | TestCase]
%% TestCase = atom()
%% The name of a test case.
%% Shuffle = shuffle | {shuffle,Seed}
%% To get cases executed in random order.
%% Seed = {integer(),integer(),integer()}
%% RepeatType = repeat | repeat_until_all_ok | repeat_until_all_fail |
%% repeat_until_any_ok | repeat_until_any_fail
%% To get execution of cases repeated.
%% N = integer() | forever
%%
%% Description: Returns a list of test case group definitions.
%%--------------------------------------------------------------------
groups() -> [
{clients, [sequence], [
create_keyspace,
create_clients,
crash_recovery,
outage_recovery
]}
].
%%--------------------------------------------------------------------
%% Function: all() -> GroupsAndTestCases
%%
%% GroupsAndTestCases = [{group,GroupName} | TestCase]
%% GroupName = atom()
%% Name of a test case group.
%% TestCase = atom()
%% Name of a test case.
%%
%% Description: Returns the list of groups and test cases that
%% are to be executed.
%%
%% NB: By default, we export all 1-arity user defined functions
%%--------------------------------------------------------------------
all() ->
[{group, clients}
].
%%--------------------------------------------------------------------
%% Function: init_per_suite(Config0) ->
%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1}
%%
%% Config0 = Config1 = [tuple()]
%% A list of key/value pairs, holding the test case configuration.
%% Reason = term()
%% The reason for skipping the suite.
%%
%% Description: Initialization before the suite.
%%
%% Note: This function is free to add any key/value pairs to the Config
%% variable, but should NOT alter/remove any existing entries.
%%--------------------------------------------------------------------
-define(KEYSPACE, "test_keyspace_3").
init_per_suite(Config) ->
Config2 = test_helper:set_mode(hash, Config),
test_helper:standard_setup(?KEYSPACE, Config2).
%%--------------------------------------------------------------------
%% Function: end_per_suite(Config0) -> void() | {save_config,Config1}
%%
%% Config0 = Config1 = [tuple()]
%% A list of key/value pairs, holding the test case configuration.
%%
%% Description: Cleanup after the suite.
%%--------------------------------------------------------------------
end_per_suite(_Config) ->
ok.
%%--------------------------------------------------------------------
%% Function: init_per_group(GroupName, Config0) ->
%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1}
%%
%% GroupName = atom()
%% Name of the test case group that is about to run.
%% Config0 = Config1 = [tuple()]
%% A list of key/value pairs, holding configuration data for the group.
%% Reason = term()
%% The reason for skipping all test cases and subgroups in the group.
%%
%% Description: Initialization before each test case group.
%%--------------------------------------------------------------------
init_per_group(_Group, Config) ->
[{keyspace, "test_keyspace_3"} | Config].
%%--------------------------------------------------------------------
%% Function: end_per_group(GroupName, Config0) ->
%% void() | {save_config,Config1}
%%
%% GroupName = atom()
%% Name of the test case group that is finished.
%% Config0 = Config1 = [tuple()]
%% A list of key/value pairs, holding configuration data for the group.
%%
%% Description: Cleanup after each test case group.
%%--------------------------------------------------------------------
end_per_group(_group, Config) ->
Config.
%%--------------------------------------------------------------------
%% Function: init_per_testcase(TestCase, Config0) ->
%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1}
%%
%% TestCase = atom()
%% Name of the test case that is about to run.
%% Config0 = Config1 = [tuple()]
%% A list of key/value pairs, holding the test case configuration.
%% Reason = term()
%% The reason for skipping the test case.
%%
%% Description: Initialization before each test case.
%%
%% Note: This function is free to add any key/value pairs to the Config
%% variable, but should NOT alter/remove any existing entries.
%%--------------------------------------------------------------------
init_per_testcase(_TestCase, Config) ->
Config.
%%--------------------------------------------------------------------
%% Function: end_per_testcase(TestCase, Config0) ->
%% void() | {save_config,Config1} | {fail,Reason}
%%
%% TestCase = atom()
%% Name of the test case that is finished.
%% Config0 = Config1 = [tuple()]
%% A list of key/value pairs, holding the test case configuration.
%% Reason = term()
%% The reason for failing the test case.
%%
%% Description: Cleanup after each test case.
%%--------------------------------------------------------------------
create_keyspace(Config) ->
test_helper:create_keyspace(<<"test_keyspace_3">>, Config).
create_clients(Config) ->
{ClientPid, _} = get_client(Config),
% Should get the same client pid each time:
{ClientPid, _} = get_client(Config),
% Should be two table entries (one for undefined keyspace, used to
% create test_keyspace_3)
ClientTables = ets:tab2list(cqerl_client_tables),
?assertEqual(2, length(ClientTables)),
% Each with the default number of elements:
lists:foreach(fun({client_table, _, _, T}) ->
?assertEqual(20, ets:info(T, size)) end,
ClientTables).
crash_recovery(Config) ->
{ClientPid, _} = get_client(Config),
% Let's crash one:
exit(ClientPid, kill),
% Give it a moment to get itself together:
timer:sleep(500),
{ClientPid2, _} = get_client(Config),
?assertNotEqual(ClientPid, ClientPid2),
?assert(is_process_alive(ClientPid2)).
outage_recovery(Config) ->
{ClientPid, _} = get_client(Config),
ClientTables = ets:tab2list(cqerl_client_tables),
lists:foreach(fun({client_table, _, Sup, _}) ->
kill_children(Sup)
end, ClientTables),
timer:sleep(500),
% Everything should have died and been cleaned up:
?assertEqual([], ets:tab2list(cqerl_client_tables)),
% Fire up some new clients:
{ClientPid2, _} = get_client(Config),
?assertNotEqual(ClientPid, ClientPid2),
?assertEqual(1, length(ets:tab2list(cqerl_client_tables))),
ok.
kill_children(Sup) ->
lists:foreach(fun({_, Child, _, _}) -> exit(Child, kill) end,
supervisor:which_children(Sup)). | test/hash_SUITE.erl | 0.503174 | 0.422505 | hash_SUITE.erl | starcoder |
%%
%% Copyright (c) 2016-2018 <NAME>. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
-module(ldb_metrics).
-author("<NAME> <<EMAIL>").
-include("ldb.hrl").
%% ldb_metrics callbacks
-export([new/0,
merge_all/1,
record_transmission/3,
record_memory/3,
record_latency/3,
record_processing/2]).
-type term_size() :: non_neg_integer().
-type transmission() :: maps:map(timestamp(), {size_metric(), term_size()}).
-type memory() :: maps:map(timestamp(), {size_metric(), size_metric()}).
-type latency() :: maps:map(atom(), list(non_neg_integer())).
-type processing() :: non_neg_integer().
-record(state, {transmission :: transmission(),
memory :: memory(),
latency :: latency(),
processing :: processing()}).
-type st() :: #state{}.
-spec new() -> st().
new() ->
#state{transmission=maps:new(),
memory=maps:new(),
latency=maps:new(),
processing=0}.
-spec merge_all(list(st())) -> {transmission(), memory(), latency(), processing()}.
merge_all([A, B | T]) ->
#state{transmission=TransmissionA,
memory=MemoryA,
latency=LatencyA,
processing=ProcessingA} = A,
#state{transmission=TransmissionB,
memory=MemoryB,
latency=LatencyB,
processing=ProcessingB} = B,
Transmission = maps_ext:merge_all(
fun(_, {VA, TA}, {VB, TB}) -> {ldb_util:plus(VA, VB), TA + TB} end,
TransmissionA,
TransmissionB
),
Memory = maps_ext:merge_all(
fun(_, {VA, TA}, {VB, TB}) -> {ldb_util:plus(VA, VB), TA + TB} end,
MemoryA,
MemoryB
),
Latency = maps_ext:merge_all(
fun(_, VA, VB) -> VA ++ VB end,
LatencyA,
LatencyB
),
Processing = ProcessingA + ProcessingB,
H = #state{transmission=Transmission,
memory=Memory,
latency=Latency,
processing=Processing},
merge_all([H | T]);
merge_all([#state{transmission=Transmission,
memory=Memory,
latency=Latency,
processing=Processing}]) ->
{Transmission, Memory, Latency, Processing}.
-spec record_transmission(size_metric(), term_size(), st()) -> st().
record_transmission({0, 0}, _, State) ->
State;
record_transmission(Size, TermSize, #state{transmission=Transmission0}=State) ->
Timestamp = ldb_util:unix_timestamp(),
Transmission = update_transmission(Timestamp, Size, TermSize, Transmission0),
State#state{transmission=Transmission}.
-spec record_memory(size_metric(), term_size(), st()) -> st().
record_memory({0, 0}, _, State) ->
State;
record_memory(Size, TermSize, #state{memory=Memory0}=State) ->
Timestamp = ldb_util:unix_timestamp(),
Memory = update_memory(Timestamp, Size, TermSize, Memory0),
State#state{memory=Memory}.
-spec record_latency(atom(), non_neg_integer(), st()) -> st().
record_latency(Type, MicroSeconds, #state{latency=Latency0}=State) ->
Latency = update_latency(Type, MicroSeconds, Latency0),
State#state{latency=Latency}.
-spec record_processing(processing(), st()) -> st().
record_processing(MicroSeconds, #state{processing=Processing0}=State) ->
State#state{processing=Processing0 + MicroSeconds}.
update_transmission(Timestamp, Size, TermSize, Transmission0) ->
maps:update_with(
Timestamp,
fun({V, T}) -> {ldb_util:plus(V, Size), T + TermSize} end,
{Size, TermSize},
Transmission0
).
update_memory(Timestamp, Size, TermSize, Memory0) ->
maps:update_with(
Timestamp,
fun({V, T}) -> {ldb_util:plus(V, Size), T + TermSize} end,
{Size, TermSize},
Memory0
).
update_latency(Type, MicroSeconds, Latency0) ->
maps:update_with(
Type,
fun(V) -> [MicroSeconds | V] end,
[MicroSeconds],
Latency0
). | src/ldb_metrics.erl | 0.586878 | 0.409693 | ldb_metrics.erl | starcoder |
%% -*- mode: erlang;erlang-indent-level: 4;indent-tabs-mode: nil -*-
%% @author <NAME> <<EMAIL>>
%% @doc
%% An Erlang interface to Amazon's DynamoDB.
%%
%% [http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/operationlist.html]
%%
%% erlcloud_ddb2 implements the entire 20120810 API.
%%
%% Method names match DynamoDB operations converted to
%% lower_case_with_underscores. The one exception is query, which is
%% an Erlang reserved word. The `q' method implements Query.
%%
%% Required parameters are passed as function arguments. In addition
%% all methods take an options proplist argument which can be used to
%% pass optional parameters. See function documentation for examples.
%%
%% Table names, key names, attribute names and any other input strings
%% except attribute values must be binary strings.
%%
%% Attribute values may be either `{Type, Value}' or `Value'. If only
%% `Value' is provided then the type is inferred. Lists (iolists are
%% handled), binaries and atoms are assumed to be strings. The following are
%% equivalent: `{s, <<"value">>}', `<<"value">>', `"value"', `value'. Numbers
%% are assumed to be numbers. The following are equivalent: `{n, 42}',
%% `42'. To specify the AWS binary or set types an explicit `Type'
%% must be provided. For example: `{b, <<1,2,3>>}' or `{ns,
%% [4,5,6]}'. Note that binary values will be base64 encoded and
%% decoded automatically. Since some atoms (such as `true', `false', `not_null',
%% `null', `undefined', `delete', etc) have special meanings in some cases,
%% use them carefully.
%%
%% Output is in the form of `{ok, Value}' or `{error, Reason}'. The
%% format of `Value' is controlled by the `out' option, which defaults
%% to `simple'. The possible values are:
%%
%% * `simple' - The most interesting part of the output. For example
%% `get_item' will return the item.
%%
%% * `record' - A record containing all the information from the
%% DynamoDB response except field types. This is useful if you need more detailed
%% information than what is returned with `simple'. For example, with
%% `scan' and `query' the record will contain the last evaluated key
%% which can be used to continue the operation.
%%
%% * `typed_record' - A record containing all the information from the
%% DynamoDB response. All field values are returned with type information.
%%
%% * `json' - The output from DynamoDB as processed by `jsx:decode'
%% but with no further manipulation. This would rarely be useful,
%% unless the DynamoDB API is updated to include data that is not yet
%% parsed correctly.
%%
%% Items will be returned as a list of `{Name, Value}'. In most cases
%% the output will have type information removed. For example:
%% `[{<<"String Attribute">>, <<"value">>}, {<<"Number Attribute">>,
%% 42}, {<<"BinaryAttribute">>, <<1,2,3>>}]'. The exception is for
%% output fields that are intended to be passed to a subsequent call,
%% such as `unprocessed_keys' and `last_evaluated_key'. Those will
%% contain typed attribute values so that they may be correctly passed
%% to subsequent calls.
%%
%% DynamoDB errors are return in the form `{error, {ErrorCode,
%% Message}}' where `ErrorCode' and 'Message' are both binary
%% strings. List of error codes:
%% [http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/ErrorHandling.html]. So
%% to handle conditional check failures, match `{error,
%% {<<"ConditionalCheckFailedException">>, _}}'.
%%
%% `erlcloud_ddb_util' provides a higher level API that implements common
%% operations that may require multiple DynamoDB API calls.
%%
%% See the unit tests for additional usage examples beyond what are
%% provided for each function.
%%
%% @end
-module(erlcloud_ddb2).
-include("erlcloud.hrl").
-include("erlcloud_aws.hrl").
-include("erlcloud_ddb2.hrl").
%%% Library initialization.
-export([configure/2, configure/3, configure/4, configure/5,
new/2, new/3, new/4, new/5]).
%%% DynamoDB API
-export([batch_get_item/1, batch_get_item/2, batch_get_item/3,
batch_write_item/1, batch_write_item/2, batch_write_item/3,
create_table/5, create_table/6, create_table/7,
delete_item/2, delete_item/3, delete_item/4,
delete_table/1, delete_table/2, delete_table/3,
describe_limits/0, describe_limits/1, describe_limits/2,
describe_table/1, describe_table/2, describe_table/3,
describe_time_to_live/1, describe_time_to_live/2, describe_time_to_live/3,
get_item/2, get_item/3, get_item/4,
list_tables/0, list_tables/1, list_tables/2,
put_item/2, put_item/3, put_item/4,
%% Note that query is a Erlang reserved word, so we use q instead
q/2, q/3, q/4,
scan/1, scan/2, scan/3,
update_item/3, update_item/4, update_item/5,
update_table/2, update_table/3, update_table/4, update_table/5,
update_time_to_live/2, update_time_to_live/3, update_time_to_live/4
]).
-export_type(
[attr_defs/0,
attr_name/0,
attr_type/0,
attributes_to_get_opt/0,
batch_get_item_opt/0,
batch_get_item_opts/0,
batch_get_item_request_item/0,
batch_get_item_request_item_opt/0,
batch_get_item_request_item_opts/0,
batch_get_item_return/0,
batch_write_item_delete/0,
batch_write_item_opt/0,
batch_write_item_opts/0,
batch_write_item_put/0,
batch_write_item_request/0,
batch_write_item_request_item/0,
batch_write_item_return/0,
boolean_opt/1,
comparison_op/0,
condition/0,
conditional_op/0,
conditional_op_opt/0,
conditions/0,
consistent_read_opt/0,
create_table_opt/0,
create_table_opts/0,
create_table_return/0,
ddb_opts/0,
ddb_return/2,
delete_item_opt/0,
delete_item_opts/0,
delete_item_return/0,
delete_table_return/0,
describe_table_return/0,
describe_time_to_live_return/0,
expected_opt/0,
expression/0,
expression_attribute_names/0,
expression_attribute_values/0,
get_item_opt/0,
get_item_opts/0,
get_item_return/0,
global_secondary_index_def/0,
global_secondary_index_update/0,
global_secondary_index_updates/0,
in_attr/0,
in_attr_value/0,
in_expected/0,
in_expected_item/0,
in_item/0,
in_update/0,
in_updates/0,
index_name/0,
key/0,
key_schema/0,
list_tables_opt/0,
list_tables_opts/0,
list_tables_return/0,
local_secondary_index_def/0,
maybe_list/1,
ok_return/1,
out_attr/0,
out_attr_value/0,
out_item/0,
out_opt/0,
out_type/0,
projection/0,
put_item_opt/0,
put_item_opts/0,
put_item_return/0,
q_opt/0,
q_opts/0,
q_return/0,
range_key_name/0,
read_units/0,
return_consumed_capacity/0,
return_consumed_capacity_opt/0,
return_item_collection_metrics/0,
return_item_collection_metrics_opt/0,
return_value/0,
scan_opt/0,
scan_opts/0,
scan_return/0,
stream_specification/0,
select/0,
table_name/0,
time_to_live_status/0,
update_action/0,
update_item_opt/0,
update_item_opts/0,
update_item_return/0,
update_table_return/0,
update_time_to_live_return/0,
write_units/0
]).
%%%------------------------------------------------------------------------------
%%% Library initialization.
%%%------------------------------------------------------------------------------
-spec new(string(), string()) -> aws_config().
new(AccessKeyID, SecretAccessKey) ->
#aws_config{access_key_id=AccessKeyID,
secret_access_key=SecretAccessKey}.
-spec new(string(), string(), string()) -> aws_config().
new(AccessKeyID, SecretAccessKey, Host) ->
#aws_config{access_key_id=AccessKeyID,
secret_access_key=SecretAccessKey,
ddb_host=Host}.
-spec new(string(), string(), string(), non_neg_integer()) -> aws_config().
new(AccessKeyID, SecretAccessKey, Host, Port) ->
#aws_config{access_key_id=AccessKeyID,
secret_access_key=SecretAccessKey,
ddb_host=Host,
ddb_port=Port}.
-spec new(string(), string(), string(), non_neg_integer(), string()) -> aws_config().
new(AccessKeyID, SecretAccessKey, Host, Port, Scheme) ->
#aws_config{access_key_id=AccessKeyID,
secret_access_key=SecretAccessKey,
ddb_host=Host,
ddb_port=Port,
ddb_scheme=Scheme}.
-spec configure(string(), string()) -> ok.
configure(AccessKeyID, SecretAccessKey) ->
put(aws_config, new(AccessKeyID, SecretAccessKey)),
ok.
-spec configure(string(), string(), string()) -> ok.
configure(AccessKeyID, SecretAccessKey, Host) ->
put(aws_config, new(AccessKeyID, SecretAccessKey, Host)),
ok.
-spec configure(string(), string(), string(), non_neg_integer()) -> ok.
configure(AccessKeyID, SecretAccessKey, Host, Port) ->
put(aws_config, new(AccessKeyID, SecretAccessKey, Host, Port)),
ok.
-spec configure(string(), string(), string(), non_neg_integer(), string()) -> ok.
configure(AccessKeyID, SecretAccessKey, Host, Port, Scheme) ->
put(aws_config, new(AccessKeyID, SecretAccessKey, Host, Port, Scheme)),
ok.
default_config() -> erlcloud_aws:default_config().
%%%------------------------------------------------------------------------------
%%% Shared Types
%%%------------------------------------------------------------------------------
-type table_name() :: binary().
-type attr_type() :: s | n | b | bool | null | ss | ns | bs | l | m.
-type attr_name() :: binary().
-type maybe_list(T) :: T | [T].
-type in_string_value() :: binary() | iolist() | atom(). %% non-empty
-type in_number_value() :: number().
-type in_binary_value() :: binary() | [byte()]. %% non-empty
-type in_attr_value() :: in_string_value() |
in_number_value() |
{s, in_string_value()} |
{n, in_number_value()} |
{b, in_binary_value()} |
{bool, boolean()} |
{null, true} |
{ss, [in_string_value(),...]} |
{ns, [in_number_value(),...]} |
{bs, [in_binary_value(),...]} |
{l, [in_attr_value()]} |
{m, [in_attr()]}.
-type in_attr() :: {attr_name(), in_attr_value()}.
-type in_expected_item() :: {attr_name(), false} |
{attr_name(), true, in_attr_value()} |
condition().
-type in_expected() :: maybe_list(in_expected_item()).
-type in_item() :: [in_attr()].
-type json_pair() :: {binary(), jsx:json_term()}.
-type json_attr_type() :: binary().
-type json_attr_data() :: binary() | boolean() | [binary()] | [[json_attr_value()]] | [json_attr()].
-type json_attr_value() :: {json_attr_type(), json_attr_data()}.
-type json_attr() :: {attr_name(), [json_attr_value()]}.
-type json_item() :: [json_attr()].
-type json_expected() :: [json_pair()].
-type json_key() :: [json_attr(),...].
-type key() :: maybe_list(in_attr()).
-type attr_defs() :: maybe_list({attr_name(), attr_type()}).
-type key_schema() :: hash_key_name() | {hash_key_name(), range_key_name()}.
-type hash_key_name() :: attr_name().
-type range_key_name() :: attr_name().
-type read_units() :: pos_integer().
-type write_units() :: pos_integer().
-type index_name() :: binary().
-type projection() :: keys_only |
{include, [attr_name()]} |
all.
-type global_secondary_index_def() :: {index_name(), key_schema(), projection(), read_units(), write_units()}.
-type stream_view_type() :: keys_only | new_image | old_image | new_and_old_images.
-type stream_specification() :: false | {true, stream_view_type()}.
-type return_value() :: none | all_old | updated_old | all_new | updated_new.
-type expression() :: binary().
-type expression_attribute_names() :: [{binary(), attr_name()}].
-type expression_attribute_values() :: [{binary(), in_attr_value()}].
-type conditional_op() :: 'and' | 'or'.
-type comparison_op() :: eq | ne | le | lt | ge | gt | not_null | null | contains | not_contains |
begins_with | in | between.
-type condition() :: {attr_name(), not_null | null} |
{attr_name(), in_attr_value()} |
{attr_name(), in_attr_value(), comparison_op()} |
{attr_name(), {in_attr_value(), in_attr_value()}, between} |
{attr_name(), [in_attr_value(),...], in}.
-type conditions() :: maybe_list(condition()).
-type select() :: all_attributes | all_projected_attributes | count | specific_attributes.
-type return_consumed_capacity() :: none | total | indexes.
-type return_item_collection_metrics() :: none | size.
-type out_attr_value() :: binary() | number() | boolean() | undefined |
[binary()] | [number()] | [out_attr_value()] | [out_attr()].
-type out_attr() :: {attr_name(), out_attr_value()}.
-type out_item() :: [out_attr() | in_attr()]. % in_attr in the case of typed_record
-type ok_return(T) :: {ok, T} | {error, term()}.
%%%------------------------------------------------------------------------------
%%% Shared Dynamizers
%%%------------------------------------------------------------------------------
%% Convert terms into the form expected by DynamoDB
-spec dynamize_type(attr_type()) -> binary().
dynamize_type(s) ->
<<"S">>;
dynamize_type(n) ->
<<"N">>;
dynamize_type(b) ->
<<"B">>.
-spec dynamize_string(in_string_value()) -> binary().
dynamize_string(Value) when is_binary(Value) ->
Value;
dynamize_string(Value) when is_list(Value) ->
list_to_binary(Value);
dynamize_string(Value) when is_atom(Value) ->
atom_to_binary(Value, utf8).
-spec dynamize_number(number()) -> binary().
dynamize_number(Value) when is_integer(Value) ->
list_to_binary(integer_to_list(Value));
dynamize_number(Value) when is_float(Value) ->
%% Note that float_to_list produces overly precise and long string
[String] = io_lib:format("~p", [Value]),
list_to_binary(String).
-spec dynamize_value(in_attr_value()) -> json_attr_value().
dynamize_value({s, Value}) when is_binary(Value); is_list(Value); is_atom(Value) ->
{<<"S">>, dynamize_string(Value)};
dynamize_value({n, Value}) when is_number(Value) ->
{<<"N">>, dynamize_number(Value)};
dynamize_value({b, Value}) when is_binary(Value); is_list(Value) ->
{<<"B">>, base64:encode(Value)};
dynamize_value({bool, Value}) when is_boolean(Value) ->
{<<"BOOL">>, Value};
dynamize_value({null, true}) ->
{<<"NULL">>, true};
dynamize_value({ss, Value}) when is_list(Value) ->
{<<"SS">>, [dynamize_string(V) || V <- Value]};
dynamize_value({ns, Value}) when is_list(Value) ->
{<<"NS">>, [dynamize_number(V) || V <- Value]};
dynamize_value({bs, Value}) when is_list(Value) ->
{<<"BS">>, [base64:encode(V) || V <- Value]};
dynamize_value({l, Value}) when is_list(Value) ->
{<<"L">>, [[dynamize_value(V)] || V <- Value]};
dynamize_value({m, []}) ->
%% jsx represents empty objects as [{}]
{<<"M">>, [{}]};
dynamize_value({m, Value}) when is_list(Value) ->
{<<"M">>, [dynamize_attr(Attr) || Attr <- Value]};
dynamize_value(Value) when is_binary(Value); is_list(Value); is_atom(Value) ->
{<<"S">>, dynamize_string(Value)};
dynamize_value(Value) when is_number(Value) ->
{<<"N">>, dynamize_number(Value)};
dynamize_value(Value) ->
error({erlcloud_ddb, {invalid_attr_value, Value}}).
-spec dynamize_attr(in_attr()) -> json_attr().
dynamize_attr({Name, Value}) when is_binary(Name) ->
{Name, [dynamize_value(Value)]};
dynamize_attr({Name, _}) ->
error({erlcloud_ddb, {invalid_attr_name, Name}});
dynamize_attr(Attr) ->
error({erlcloud_ddb, {invalid_attr, Attr}}).
-spec dynamize_key(key()) -> jsx:json_term().
dynamize_key(Key) when is_list(Key) ->
[dynamize_attr(I) || I <- Key];
dynamize_key(Attr) ->
[dynamize_attr(Attr)].
-spec dynamize_attr_defs(attr_defs()) -> jsx:json_term().
dynamize_attr_defs({Name, Type}) ->
[[{<<"AttributeName">>, Name},
{<<"AttributeType">>, dynamize_type(Type)}]];
dynamize_attr_defs(AttrDefs) ->
[[{<<"AttributeName">>, Name},
{<<"AttributeType">>, dynamize_type(Type)}]
|| {Name, Type} <- AttrDefs].
-spec dynamize_key_schema(key_schema()) -> jsx:json_term().
dynamize_key_schema({HashKey, RangeKey}) ->
[[{<<"AttributeName">>, HashKey}, {<<"KeyType">>, <<"HASH">>}],
[{<<"AttributeName">>, RangeKey}, {<<"KeyType">>, <<"RANGE">>}]];
dynamize_key_schema(HashKey) ->
[[{<<"AttributeName">>, HashKey}, {<<"KeyType">>, <<"HASH">>}]].
-spec dynamize_maybe_list(fun((A) -> B), maybe_list(A)) -> [B].
dynamize_maybe_list(DynamizeItem, List) when is_list(List) ->
[DynamizeItem(I) || I <- List];
dynamize_maybe_list(DynamizeItem, Item) ->
[DynamizeItem(Item)].
-spec dynamize_projection(projection()) -> jsx:json_term().
dynamize_projection(keys_only) ->
[{<<"ProjectionType">>, <<"KEYS_ONLY">>}];
dynamize_projection(all) ->
[{<<"ProjectionType">>, <<"ALL">>}];
dynamize_projection({include, AttrNames}) ->
[{<<"ProjectionType">>, <<"INCLUDE">>},
{<<"NonKeyAttributes">>, AttrNames}].
-spec dynamize_provisioned_throughput({read_units(), write_units()}) -> jsx:json_term().
dynamize_provisioned_throughput({ReadUnits, WriteUnits}) ->
[{<<"ReadCapacityUnits">>, ReadUnits},
{<<"WriteCapacityUnits">>, WriteUnits}].
-spec dynamize_global_secondary_index(global_secondary_index_def()) -> jsx:json_term().
dynamize_global_secondary_index({IndexName, KeySchema, Projection, ReadUnits, WriteUnits}) ->
[{<<"IndexName">>, IndexName},
{<<"KeySchema">>, dynamize_key_schema(KeySchema)},
{<<"Projection">>, dynamize_projection(Projection)},
{<<"ProvisionedThroughput">>, dynamize_provisioned_throughput({ReadUnits, WriteUnits})}].
-spec dynamize_stream_view_type(stream_view_type()) -> binary().
dynamize_stream_view_type(keys_only) -> <<"KEYS_ONLY">>;
dynamize_stream_view_type(new_image) -> <<"NEW_IMAGE">>;
dynamize_stream_view_type(old_image) -> <<"OLD_IMAGE">>;
dynamize_stream_view_type(new_and_old_images) -> <<"NEW_AND_OLD_IMAGES">>.
-spec dynamize_stream_specification(stream_specification()) -> jsx:json_term().
dynamize_stream_specification(false) ->
[{<<"StreamEnabled">>, false}];
dynamize_stream_specification({true, StreamViewType}) ->
[{<<"StreamEnabled">>, true},
{<<"StreamViewType">>, dynamize_stream_view_type(StreamViewType)}].
-spec dynamize_conditional_op(conditional_op()) -> binary().
dynamize_conditional_op('and') ->
<<"AND">>;
dynamize_conditional_op('or') ->
<<"OR">>.
-spec dynamize_expected_item(in_expected_item()) -> json_pair().
dynamize_expected_item({Name, false}) ->
{Name, [{<<"Exists">>, false}]};
dynamize_expected_item({Name, true, Value}) ->
{Name, [{<<"Exists">>, true},
{<<"Value">>, [dynamize_value(Value)]}]};
dynamize_expected_item(Condition) ->
dynamize_condition(Condition).
-spec dynamize_expected(in_expected()) -> json_expected().
dynamize_expected(Expected) ->
dynamize_maybe_list(fun dynamize_expected_item/1, Expected).
-spec dynamize_return_value(return_value()) -> binary().
dynamize_return_value(none) ->
<<"NONE">>;
dynamize_return_value(all_old) ->
<<"ALL_OLD">>;
dynamize_return_value(updated_old) ->
<<"UPDATED_OLD">>;
dynamize_return_value(all_new) ->
<<"ALL_NEW">>;
dynamize_return_value(updated_new) ->
<<"UPDATED_NEW">>.
-spec dynamize_item(in_item()) -> json_item().
dynamize_item(Item) when is_list(Item) ->
[dynamize_attr(Attr) || Attr <- Item];
dynamize_item(Item) ->
error({erlcloud_ddb, {invalid_item, Item}}).
-spec dynamize_expression_attribute_names(expression_attribute_names()) -> [json_pair()].
dynamize_expression_attribute_names(Names) ->
Names.
-spec dynamize_expression_attribute_values(expression_attribute_values()) -> [json_pair()].
dynamize_expression_attribute_values(Values) ->
[{P, [dynamize_value(Value)]} || {P, Value} <- Values].
-spec dynamize_comparison(comparison_op()) -> {binary(), binary()}.
dynamize_comparison(eq) ->
{<<"ComparisonOperator">>, <<"EQ">>};
dynamize_comparison(ne) ->
{<<"ComparisonOperator">>, <<"NE">>};
dynamize_comparison(le) ->
{<<"ComparisonOperator">>, <<"LE">>};
dynamize_comparison(lt) ->
{<<"ComparisonOperator">>, <<"LT">>};
dynamize_comparison(ge) ->
{<<"ComparisonOperator">>, <<"GE">>};
dynamize_comparison(gt) ->
{<<"ComparisonOperator">>, <<"GT">>};
dynamize_comparison(not_null) ->
{<<"ComparisonOperator">>, <<"NOT_NULL">>};
dynamize_comparison(null) ->
{<<"ComparisonOperator">>, <<"NULL">>};
dynamize_comparison(contains) ->
{<<"ComparisonOperator">>, <<"CONTAINS">>};
dynamize_comparison(not_contains) ->
{<<"ComparisonOperator">>, <<"NOT_CONTAINS">>};
dynamize_comparison(begins_with) ->
{<<"ComparisonOperator">>, <<"BEGINS_WITH">>};
dynamize_comparison(in) ->
{<<"ComparisonOperator">>, <<"IN">>};
dynamize_comparison(between) ->
{<<"ComparisonOperator">>, <<"BETWEEN">>}.
-spec dynamize_condition(condition()) -> json_pair().
dynamize_condition({Name, not_null}) ->
{Name, [dynamize_comparison(not_null)]};
dynamize_condition({Name, null}) ->
{Name, [dynamize_comparison(null)]};
dynamize_condition({Name, AttrValue}) ->
%% Default to eq
{Name, [{<<"AttributeValueList">>, [[dynamize_value(AttrValue)]]},
dynamize_comparison(eq)]};
dynamize_condition({Name, AttrValueList, in}) ->
{Name, [{<<"AttributeValueList">>, [[dynamize_value(A)] || A <- AttrValueList]},
dynamize_comparison(in)]};
dynamize_condition({Name, {AttrValue1, AttrValue2}, between}) ->
{Name, [{<<"AttributeValueList">>, [[dynamize_value(AttrValue1)], [dynamize_value(AttrValue2)]]},
dynamize_comparison(between)]};
dynamize_condition({Name, AttrValue, Op}) ->
{Name, [{<<"AttributeValueList">>, [[dynamize_value(AttrValue)]]},
dynamize_comparison(Op)]}.
-spec dynamize_conditions(conditions()) -> [json_pair()].
dynamize_conditions(Conditions) ->
dynamize_maybe_list(fun dynamize_condition/1, Conditions).
-spec dynamize_select(select()) -> binary().
dynamize_select(all_attributes) -> <<"ALL_ATTRIBUTES">>;
dynamize_select(all_projected_attributes) -> <<"ALL_PROJECTED_ATTRIBUTES">>;
dynamize_select(count) -> <<"COUNT">>;
dynamize_select(specific_attributes) -> <<"SPECIFIC_ATTRIBUTES">>.
-spec dynamize_return_consumed_capacity(return_consumed_capacity()) -> binary().
dynamize_return_consumed_capacity(none) ->
<<"NONE">>;
dynamize_return_consumed_capacity(total) ->
<<"TOTAL">>;
dynamize_return_consumed_capacity(indexes) ->
<<"INDEXES">>.
-spec dynamize_return_item_collection_metrics(return_item_collection_metrics()) -> binary().
dynamize_return_item_collection_metrics(none) ->
<<"NONE">>;
dynamize_return_item_collection_metrics(size) ->
<<"SIZE">>.
%%%------------------------------------------------------------------------------
%%% Shared Undynamizers
%%%------------------------------------------------------------------------------
-type undynamize_opt() :: {typed, boolean()}.
-type undynamize_opts() :: [undynamize_opt()].
-spec id(X, undynamize_opts()) -> X.
id(X, _) -> X.
-spec undynamize_type(json_attr_type(), undynamize_opts()) -> attr_type().
undynamize_type(<<"S">>, _) ->
s;
undynamize_type(<<"N">>, _) ->
n;
undynamize_type(<<"B">>, _) ->
b.
-spec undynamize_number(binary(), undynamize_opts()) -> number().
undynamize_number(Value, _) ->
String = binary_to_list(Value),
case lists:member($., String) of
true ->
list_to_float(String);
false ->
list_to_integer(String)
end.
-spec undynamize_value(json_attr_value(), undynamize_opts()) -> out_attr_value().
undynamize_value({<<"S">>, Value}, _) when is_binary(Value) ->
Value;
undynamize_value({<<"N">>, Value}, Opts) ->
undynamize_number(Value, Opts);
undynamize_value({<<"B">>, Value}, _) ->
base64:decode(Value);
undynamize_value({<<"BOOL">>, Value}, _) when is_boolean(Value) ->
Value;
undynamize_value({<<"NULL">>, true}, _) ->
undefined;
undynamize_value({<<"SS">>, Values}, _) when is_list(Values) ->
Values;
undynamize_value({<<"NS">>, Values}, Opts) ->
[undynamize_number(Value, Opts) || Value <- Values];
undynamize_value({<<"BS">>, Values}, _) ->
[base64:decode(Value) || Value <- Values];
undynamize_value({<<"L">>, List}, Opts) ->
[undynamize_value(Value, Opts) || [Value] <- List];
undynamize_value({<<"M">>, [{}]}, _Opts) ->
%% jsx returns [{}] for empty objects
[];
undynamize_value({<<"M">>, Map}, Opts) ->
[undynamize_attr(Attr, Opts) || Attr <- Map].
-spec undynamize_attr(json_attr(), undynamize_opts()) -> out_attr().
undynamize_attr({Name, [ValueJson]}, Opts) ->
{Name, undynamize_value(ValueJson, Opts)}.
-spec undynamize_object(fun((json_pair(), undynamize_opts()) -> A),
[json_pair()] | [{}], undynamize_opts()) -> [A].
undynamize_object(_, [{}], _) ->
%% jsx returns [{}] for empty objects
[];
undynamize_object(PairFun, List, Opts) ->
[PairFun(I, Opts) || I <- List].
-spec undynamize_item(json_item(), undynamize_opts()) -> out_item().
undynamize_item(Json, Opts) ->
case lists:keyfind(typed, 1, Opts) of
{typed, true} ->
undynamize_object(fun undynamize_attr_typed/2, Json, Opts);
_ ->
undynamize_object(fun undynamize_attr/2, Json, Opts)
end.
-spec undynamize_items([json_item()], undynamize_opts()) -> [out_item()].
undynamize_items(Items, Opts) ->
[undynamize_item(I, Opts) || I <- Items].
-spec undynamize_value_typed(json_attr_value(), undynamize_opts()) -> in_attr_value().
undynamize_value_typed({<<"S">>, Value}, _) when is_binary(Value) ->
{s, Value};
undynamize_value_typed({<<"N">>, Value}, Opts) ->
{n, undynamize_number(Value, Opts)};
undynamize_value_typed({<<"B">>, Value}, _) ->
{b, base64:decode(Value)};
undynamize_value_typed({<<"BOOL">>, Value}, _) when is_boolean(Value) ->
{bool, Value};
undynamize_value_typed({<<"NULL">>, true}, _) ->
{null, true};
undynamize_value_typed({<<"SS">>, Values}, _) when is_list(Values) ->
{ss, Values};
undynamize_value_typed({<<"NS">>, Values}, Opts) ->
{ns, [undynamize_number(Value, Opts) || Value <- Values]};
undynamize_value_typed({<<"BS">>, Values}, _) ->
{bs, [base64:decode(Value) || Value <- Values]};
undynamize_value_typed({<<"L">>, List}, Opts) ->
{l, [undynamize_value_typed(Value, Opts) || [Value] <- List]};
undynamize_value_typed({<<"M">>, [{}]}, _Opts) ->
%% jsx returns [{}] for empty objects
{m, []};
undynamize_value_typed({<<"M">>, Map}, Opts) ->
{m, [undynamize_attr_typed(Attr, Opts) || Attr <- Map]}.
-spec undynamize_attr_typed(json_attr(), undynamize_opts()) -> in_attr().
undynamize_attr_typed({Name, [ValueJson]}, Opts) ->
{Name, undynamize_value_typed(ValueJson, Opts)}.
-spec undynamize_item_typed(json_item(), undynamize_opts()) -> in_item().
undynamize_item_typed(Json, Opts) ->
undynamize_object(fun undynamize_attr_typed/2, Json, Opts).
-spec undynamize_typed_key(json_key(), undynamize_opts()) -> key().
undynamize_typed_key(Key, Opts) ->
[undynamize_attr_typed(I, Opts) || I <- Key].
-spec undynamize_attr_defs([json_item()], undynamize_opts()) -> attr_defs().
undynamize_attr_defs(V, Opts) ->
[{proplists:get_value(<<"AttributeName">>, I),
undynamize_type(proplists:get_value(<<"AttributeType">>, I), Opts)}
|| I <- V].
key_name(Key) ->
proplists:get_value(<<"AttributeName">>, Key).
-spec undynamize_key_schema([json_item()], undynamize_opts()) -> key_schema().
undynamize_key_schema([HashKey], _) ->
key_name(HashKey);
undynamize_key_schema([Key1, Key2], _) ->
case proplists:get_value(<<"KeyType">>, Key1) of
<<"HASH">> ->
{key_name(Key1), key_name(Key2)};
<<"RANGE">> ->
{key_name(Key2), key_name(Key1)}
end.
-spec undynamize_stream_view_type(binary(), undynamize_opts()) -> stream_view_type().
undynamize_stream_view_type(<<"KEYS_ONLY">>, _) -> keys_only;
undynamize_stream_view_type(<<"NEW_IMAGE">>, _) -> new_image;
undynamize_stream_view_type(<<"OLD_IMAGE">>, _) -> old_image;
undynamize_stream_view_type(<<"NEW_AND_OLD_IMAGES">>, _) -> new_and_old_images.
-spec undynamize_stream_specification(jsx:json_term(), undynamize_opts()) -> stream_specification().
undynamize_stream_specification(Json, Opts) ->
case proplists:get_value(<<"StreamEnabled">>, Json, false) of
false ->
false;
true ->
{true, undynamize_stream_view_type(proplists:get_value(<<"StreamViewType">>, Json), Opts)}
end.
-spec undynamize_expression(binary(), undynamize_opts()) -> expression().
undynamize_expression(Expression, _) ->
Expression.
-spec undynamize_expression_attribute_names([json_pair()], undynamize_opts()) -> expression_attribute_names().
undynamize_expression_attribute_names(Names, _) ->
Names.
-spec undynamize_table_status(binary(), undynamize_opts()) -> table_status().
undynamize_table_status(<<"CREATING">>, _) -> creating;
undynamize_table_status(<<"UPDATING">>, _) -> updating;
undynamize_table_status(<<"DELETING">>, _) -> deleting;
undynamize_table_status(<<"ACTIVE">>, _) -> active.
-type field_table() :: [{binary(), pos_integer(),
fun((jsx:json_term(), undynamize_opts()) -> term())}].
-spec undynamize_folder(field_table(), json_pair(), undynamize_opts(), tuple()) -> tuple().
undynamize_folder(Table, {Key, Value}, Opts, A) ->
case lists:keyfind(Key, 1, Table) of
{Key, Index, ValueFun} ->
setelement(Index, A, ValueFun(Value, Opts));
false ->
A
end.
-type record_desc() :: {tuple(), field_table()}.
-spec undynamize_record(record_desc(), jsx:json_term(), undynamize_opts()) -> tuple().
undynamize_record({Record, _}, [{}], _) ->
%% jsx returns [{}] for empty objects
Record;
undynamize_record({Record, Table}, Json, Opts) ->
lists:foldl(fun(Pair, A) -> undynamize_folder(Table, Pair, Opts, A) end, Record, Json).
%%%------------------------------------------------------------------------------
%%% Shared Options
%%%------------------------------------------------------------------------------
-spec id(X) -> X.
id(X) -> X.
-type out_type() :: json | record | typed_record | simple.
-type out_opt() :: {out, out_type()}.
-type boolean_opt(Name) :: Name | {Name, boolean()}.
-type property() :: proplists:property().
-type aws_opts() :: [json_pair()].
-type ddb_opts() :: [out_opt()].
-type opts() :: {aws_opts(), ddb_opts()}.
-spec verify_ddb_opt(atom(), term()) -> ok.
verify_ddb_opt(out, Value) ->
case lists:member(Value, [json, record, typed_record, simple]) of
true ->
ok;
false ->
error({erlcloud_ddb, {invalid_opt, {out, Value}}})
end;
verify_ddb_opt(Name, Value) ->
error({erlcloud_ddb, {invalid_opt, {Name, Value}}}).
-type opt_table_entry() :: {atom(), binary(), fun((_) -> jsx:json_term())}.
-type opt_table() :: [opt_table_entry()].
-spec opt_folder(opt_table(), property(), opts()) -> opts().
opt_folder(_, {_, undefined}, Opts) ->
%% ignore options set to undefined
Opts;
opt_folder(Table, {Name, Value}, {AwsOpts, DdbOpts}) ->
case lists:keyfind(Name, 1, Table) of
{Name, Key, ValueFun} ->
{[{Key, ValueFun(Value)} | AwsOpts], DdbOpts};
false ->
verify_ddb_opt(Name, Value),
{AwsOpts, [{Name, Value} | DdbOpts]}
end.
-spec opts(opt_table(), proplist()) -> opts().
opts(Table, Opts) when is_list(Opts) ->
%% remove duplicate options
Opts1 = lists:ukeysort(1, proplists:unfold(Opts)),
lists:foldl(fun(Opt, A) -> opt_folder(Table, Opt, A) end, {[], []}, Opts1);
opts(_, _) ->
error({erlcloud_ddb, opts_not_list}).
-type expression_attribute_names_opt() :: {expression_attribute_names, expression_attribute_names()}.
-spec expression_attribute_names_opt() -> opt_table_entry().
expression_attribute_names_opt() ->
{expression_attribute_names, <<"ExpressionAttributeNames">>, fun dynamize_expression_attribute_names/1}.
-type expression_attribute_values_opt() :: {expression_attribute_values, expression_attribute_values()}.
-spec expression_attribute_values_opt() -> opt_table_entry().
expression_attribute_values_opt() ->
{expression_attribute_values, <<"ExpressionAttributeValues">>, fun dynamize_expression_attribute_values/1}.
-type projection_expression_opt() :: {projection_expression, expression()}.
-spec projection_expression_opt() -> opt_table_entry().
projection_expression_opt() ->
{projection_expression, <<"ProjectionExpression">>, fun dynamize_expression/1}.
-type attributes_to_get_opt() :: {attributes_to_get, [attr_name()]}.
-spec attributes_to_get_opt() -> opt_table_entry().
attributes_to_get_opt() ->
{attributes_to_get, <<"AttributesToGet">>, fun id/1}.
-type consistent_read_opt() :: boolean_opt(consistent_read).
-spec consistent_read_opt() -> opt_table_entry().
consistent_read_opt() ->
{consistent_read, <<"ConsistentRead">>, fun id/1}.
-type condition_expression_opt() :: {condition_expression, expression()}.
-spec condition_expression_opt() -> opt_table_entry().
condition_expression_opt() ->
{condition_expression, <<"ConditionExpression">>, fun dynamize_expression/1}.
-type conditional_op_opt() :: {conditional_op, conditional_op()}.
-spec conditional_op_opt() -> opt_table_entry().
conditional_op_opt() ->
{conditional_op, <<"ConditionalOperator">>, fun dynamize_conditional_op/1}.
-type expected_opt() :: {expected, in_expected()}.
-spec expected_opt() -> opt_table_entry().
expected_opt() ->
{expected, <<"Expected">>, fun dynamize_expected/1}.
-spec filter_expression_opt() -> opt_table_entry().
filter_expression_opt() ->
{filter_expression, <<"FilterExpression">>, fun dynamize_expression/1}.
% This matches the Java API, which asks the user to write their own expressions.
-spec dynamize_expression(expression()) -> binary().
dynamize_expression(Expression) when is_binary(Expression) ->
Expression;
dynamize_expression(Expression) when is_list(Expression) ->
list_to_binary(Expression);
% Or, some convenience functions for assembling expressions using lists of tuples.
dynamize_expression({A, also, B}) ->
AA = dynamize_expression(A),
BB = dynamize_expression(B),
<<"(", AA/binary, ") AND (", BB/binary, ")">>;
dynamize_expression({{A, B}, eq}) ->
<<A/binary, " = ", B/binary>>;
dynamize_expression({{A, B}, ne}) ->
<<A/binary, " <> ", B/binary>>;
dynamize_expression({{A, B}, lt}) ->
<<A/binary, " < ", B/binary>>;
dynamize_expression({{A, B}, le}) ->
<<A/binary, " <= ", B/binary>>;
dynamize_expression({{A, B}, gt}) ->
<<A/binary, " > ", B/binary>>;
dynamize_expression({{A, B}, ge}) ->
<<A/binary, " >= ", B/binary>>;
dynamize_expression({{A, {Low, High}}, between}) ->
<<A/binary, " BETWEEN ", Low/binary, " AND ", High/binary>>;
dynamize_expression({{A, B}, in}) when is_binary(B) ->
<<A/binary, " IN ", B/binary>>;
dynamize_expression({{A, B}, in}) when is_list(B) ->
% Convert everything to binaries.
InList = [to_binary(X) || X <- B],
% Join the list of binaries with commas.
Join = fun(Elem, Acc) when Acc =:= <<"">> ->
Elem;
(Elem, Acc) ->
<<Acc/binary, ",", Elem/binary>> end,
In = lists:foldl(Join, <<>>, InList),
<<A/binary, " IN (", In/binary, ")">>;
dynamize_expression({attribute_exists, Path}) ->
<<"attribute_exists(", Path/binary, ")">>;
dynamize_expression({attribute_not_exists, Path}) ->
<<"attribute_not_exists(", Path/binary, ")">>;
dynamize_expression({begins_with, Path, Operand}) ->
<<"begins_with(", Path/binary, ",", Operand/binary, ")">>;
dynamize_expression({contains, Path, Operand}) ->
<<"contains(", Path/binary, ",", Operand/binary, ")">>.
-type return_consumed_capacity_opt() :: {return_consumed_capacity, return_consumed_capacity()}.
-spec return_consumed_capacity_opt() -> opt_table_entry().
return_consumed_capacity_opt() ->
{return_consumed_capacity, <<"ReturnConsumedCapacity">>, fun dynamize_return_consumed_capacity/1}.
-type return_item_collection_metrics_opt() :: {return_item_collection_metrics, return_item_collection_metrics()}.
-spec return_item_collection_metrics_opt() -> opt_table_entry().
return_item_collection_metrics_opt() ->
{return_item_collection_metrics, <<"ReturnItemCollectionMetrics">>,
fun dynamize_return_item_collection_metrics/1}.
%%%------------------------------------------------------------------------------
%%% Output
%%%------------------------------------------------------------------------------
-type ddb_return(Record, Simple) :: {ok, jsx:json_term() | Record | Simple} | {error, term()}.
-type undynamize_fun() :: fun((jsx:json_term(), undynamize_opts()) -> tuple()).
-spec out(erlcloud_ddb_impl:json_return(), undynamize_fun(), ddb_opts())
-> {ok, jsx:json_term() | tuple()} |
{simple, term()} |
{error, term()}.
out({error, Reason}, _, _) ->
{error, Reason};
out({ok, Json}, Undynamize, Opts) ->
case proplists:get_value(out, Opts, simple) of
json ->
{ok, Json};
record ->
{ok, Undynamize(Json, [])};
typed_record ->
{ok, Undynamize(Json, [{typed, true}])};
simple ->
{simple, Undynamize(Json, [])}
end.
%% Returns specified field of tuple for simple return
-spec out(erlcloud_ddb_impl:json_return(), undynamize_fun(), ddb_opts(), pos_integer())
-> ok_return(term()).
out(Result, Undynamize, Opts, Index) ->
out(Result, Undynamize, Opts, Index, {error, no_return}).
-spec out(erlcloud_ddb_impl:json_return(), undynamize_fun(), ddb_opts(), pos_integer(), ok_return(term()))
-> ok_return(term()).
out(Result, Undynamize, Opts, Index, Default) ->
case out(Result, Undynamize, Opts) of
{simple, Record} ->
case element(Index, Record) of
undefined ->
Default;
Element ->
{ok, Element}
end;
Else ->
Else
end.
%%%------------------------------------------------------------------------------
%%% Shared Records
%%%------------------------------------------------------------------------------
undynamize_consumed_capacity_units(V, _Opts) ->
{_, CapacityUnits} = lists:keyfind(<<"CapacityUnits">>, 1, V),
CapacityUnits.
-spec consumed_capacity_record() -> record_desc().
consumed_capacity_record() ->
{#ddb2_consumed_capacity{},
[{<<"CapacityUnits">>, #ddb2_consumed_capacity.capacity_units, fun id/2},
{<<"GlobalSecondaryIndexes">>, #ddb2_consumed_capacity.global_secondary_indexes,
fun(V, Opts) -> undynamize_object(
fun({IndexName, Json}, Opts2) ->
{IndexName, undynamize_consumed_capacity_units(Json, Opts2)}
end, V, Opts)
end},
{<<"LocalSecondaryIndexes">>, #ddb2_consumed_capacity.local_secondary_indexes,
fun(V, Opts) -> undynamize_object(
fun({IndexName, Json}, Opts2) ->
{IndexName, undynamize_consumed_capacity_units(Json, Opts2)}
end, V, Opts)
end},
{<<"Table">>, #ddb2_consumed_capacity.table, fun undynamize_consumed_capacity_units/2},
{<<"TableName">>, #ddb2_consumed_capacity.table_name, fun id/2}]}.
undynamize_consumed_capacity(V, Opts) ->
undynamize_record(consumed_capacity_record(), V, Opts).
undynamize_consumed_capacity_list(V, Opts) ->
[undynamize_record(consumed_capacity_record(), I, Opts) || I <- V].
-spec item_collection_metrics_record() -> record_desc().
item_collection_metrics_record() ->
{#ddb2_item_collection_metrics{},
[{<<"ItemCollectionKey">>, #ddb2_item_collection_metrics.item_collection_key,
fun([V], Opts) ->
{_Name, Value} = undynamize_attr(V, Opts),
Value
end},
{<<"SizeEstimateRangeGB">>, #ddb2_item_collection_metrics.size_estimate_range_gb,
fun([L, H], _) -> {L, H} end}]}.
undynamize_item_collection_metrics(V, Opts) ->
undynamize_record(item_collection_metrics_record(), V, Opts).
undynamize_item_collection_metric_list(Table, V, Opts) ->
{Table, [undynamize_item_collection_metrics(I, Opts) || I <- V]}.
undynamize_projection(V, _) ->
case proplists:get_value(<<"ProjectionType">>, V) of
<<"KEYS_ONLY">> ->
keys_only;
<<"ALL">> ->
all;
<<"INCLUDE">> ->
{include, proplists:get_value(<<"NonKeyAttributes">>, V)}
end.
-spec undynamize_index_status(binary(), undynamize_opts()) -> index_status().
undynamize_index_status(<<"CREATING">>, _) -> creating;
undynamize_index_status(<<"UPDATING">>, _) -> updating;
undynamize_index_status(<<"DELETING">>, _) -> deleting;
undynamize_index_status(<<"ACTIVE">>, _) -> active.
-spec global_secondary_index_description_record() -> record_desc().
global_secondary_index_description_record() ->
{#ddb2_global_secondary_index_description{},
[{<<"Backfilling">>, #ddb2_global_secondary_index_description.backfilling, fun id/2},
{<<"IndexArn">>, #ddb2_global_secondary_index_description.index_arn, fun id/2},
{<<"IndexName">>, #ddb2_global_secondary_index_description.index_name, fun id/2},
{<<"IndexSizeBytes">>, #ddb2_global_secondary_index_description.index_size_bytes, fun id/2},
{<<"IndexStatus">>, #ddb2_global_secondary_index_description.index_status, fun undynamize_index_status/2},
{<<"ItemCount">>, #ddb2_global_secondary_index_description.item_count, fun id/2},
{<<"KeySchema">>, #ddb2_global_secondary_index_description.key_schema, fun undynamize_key_schema/2},
{<<"Projection">>, #ddb2_global_secondary_index_description.projection, fun undynamize_projection/2},
{<<"ProvisionedThroughput">>, #ddb2_global_secondary_index_description.provisioned_throughput,
fun(V, Opts) -> undynamize_record(provisioned_throughput_description_record(), V, Opts) end}
]}.
-spec local_secondary_index_description_record() -> record_desc().
local_secondary_index_description_record() ->
{#ddb2_local_secondary_index_description{},
[{<<"IndexArn">>, #ddb2_local_secondary_index_description.index_arn, fun id/2},
{<<"IndexName">>, #ddb2_local_secondary_index_description.index_name, fun id/2},
{<<"IndexSizeBytes">>, #ddb2_local_secondary_index_description.index_size_bytes, fun id/2},
{<<"ItemCount">>, #ddb2_local_secondary_index_description.item_count, fun id/2},
{<<"KeySchema">>, #ddb2_local_secondary_index_description.key_schema, fun undynamize_key_schema/2},
{<<"Projection">>, #ddb2_local_secondary_index_description.projection, fun undynamize_projection/2}
]}.
-spec provisioned_throughput_description_record() -> record_desc().
provisioned_throughput_description_record() ->
{#ddb2_provisioned_throughput_description{},
[{<<"LastDecreaseDateTime">>, #ddb2_provisioned_throughput_description.last_decrease_date_time, fun id/2},
{<<"LastIncreaseDateTime">>, #ddb2_provisioned_throughput_description.last_increase_date_time, fun id/2},
{<<"NumberOfDecreasesToday">>, #ddb2_provisioned_throughput_description.number_of_decreases_today, fun id/2},
{<<"ReadCapacityUnits">>, #ddb2_provisioned_throughput_description.read_capacity_units, fun id/2},
{<<"WriteCapacityUnits">>, #ddb2_provisioned_throughput_description.write_capacity_units, fun id/2}
]}.
-spec table_description_record() -> record_desc().
table_description_record() ->
{#ddb2_table_description{},
[{<<"AttributeDefinitions">>, #ddb2_table_description.attribute_definitions, fun undynamize_attr_defs/2},
{<<"CreationDateTime">>, #ddb2_table_description.creation_date_time, fun id/2},
{<<"GlobalSecondaryIndexes">>, #ddb2_table_description.global_secondary_indexes,
fun(V, Opts) -> [undynamize_record(global_secondary_index_description_record(), I, Opts) || I <- V] end},
{<<"ItemCount">>, #ddb2_table_description.item_count, fun id/2},
{<<"KeySchema">>, #ddb2_table_description.key_schema, fun undynamize_key_schema/2},
{<<"LatestStreamArn">>, #ddb2_table_description.latest_stream_arn, fun id/2},
{<<"LatestStreamLabel">>, #ddb2_table_description.latest_stream_label, fun id/2},
{<<"LocalSecondaryIndexes">>, #ddb2_table_description.local_secondary_indexes,
fun(V, Opts) -> [undynamize_record(local_secondary_index_description_record(), I, Opts) || I <- V] end},
{<<"ProvisionedThroughput">>, #ddb2_table_description.provisioned_throughput,
fun(V, Opts) -> undynamize_record(provisioned_throughput_description_record(), V, Opts) end},
{<<"StreamSpecification">>, #ddb2_table_description.stream_specification, fun undynamize_stream_specification/2},
{<<"TableArn">>, #ddb2_table_description.table_arn, fun id/2},
{<<"TableName">>, #ddb2_table_description.table_name, fun id/2},
{<<"TableSizeBytes">>, #ddb2_table_description.table_size_bytes, fun id/2},
{<<"TableStatus">>, #ddb2_table_description.table_status, fun undynamize_table_status/2}
]}.
%%%------------------------------------------------------------------------------
%%% BatchGetItem
%%%------------------------------------------------------------------------------
-type batch_get_item_opt() :: return_consumed_capacity_opt() |
out_opt().
-type batch_get_item_opts() :: [batch_get_item_opt()].
-spec batch_get_item_opts() -> opt_table().
batch_get_item_opts() ->
[return_consumed_capacity_opt()].
-type batch_get_item_request_item_opt() :: expression_attribute_names_opt() |
projection_expression_opt() |
attributes_to_get_opt() |
consistent_read_opt().
-type batch_get_item_request_item_opts() :: [batch_get_item_request_item_opt()].
-spec batch_get_item_request_item_opts() -> opt_table().
batch_get_item_request_item_opts() ->
[expression_attribute_names_opt(),
projection_expression_opt(),
attributes_to_get_opt(),
consistent_read_opt()].
-type batch_get_item_request_item() :: {table_name(), [key(),...], batch_get_item_request_item_opts()} |
{table_name(), [key(),...]}.
-spec dynamize_batch_get_item_request_item(batch_get_item_request_item())
-> json_pair().
dynamize_batch_get_item_request_item({Table, Keys}) ->
dynamize_batch_get_item_request_item({Table, Keys, []});
dynamize_batch_get_item_request_item({Table, Keys, Opts}) ->
{AwsOpts, []} = opts(batch_get_item_request_item_opts(), Opts),
{Table, [{<<"Keys">>, [dynamize_key(K) || K <- Keys]}] ++ AwsOpts}.
-type batch_get_item_request_items() :: maybe_list(batch_get_item_request_item()).
-spec dynamize_batch_get_item_request_items(batch_get_item_request_items()) -> [json_pair()].
dynamize_batch_get_item_request_items(Request) ->
dynamize_maybe_list(fun dynamize_batch_get_item_request_item/1, Request).
-spec batch_get_item_request_item_folder({binary(), term()}, batch_get_item_request_item())
-> batch_get_item_request_item().
batch_get_item_request_item_folder({<<"Keys">>, Keys}, {Table, _, Opts}) ->
{Table, [undynamize_typed_key(K, []) || K <- Keys], Opts};
batch_get_item_request_item_folder({<<"ExpressionAttributeNames">>, Value}, {Table, Keys, Opts}) ->
{Table, Keys, [{expression_attribute_names, undynamize_expression_attribute_names(Value, [])} | Opts]};
batch_get_item_request_item_folder({<<"ProjectionExpression">>, Value}, {Table, Keys, Opts}) ->
{Table, Keys, [{projection_expression, undynamize_expression(Value, [])} | Opts]};
batch_get_item_request_item_folder({<<"AttributesToGet">>, Value}, {Table, Keys, Opts}) ->
{Table, Keys, [{attributes_to_get, Value} | Opts]};
batch_get_item_request_item_folder({<<"ConsistentRead">>, Value}, {Table, Keys, Opts}) ->
{Table, Keys, [{consistent_read, Value} | Opts]}.
-spec undynamize_batch_get_item_request_item(table_name(), jsx:json_term(), undynamize_opts())
-> batch_get_item_request_item().
undynamize_batch_get_item_request_item(Table, Json, _) ->
lists:foldl(fun batch_get_item_request_item_folder/2, {Table, [], []}, Json).
undynamize_batch_get_item_response({Table, Json}, Opts) ->
#ddb2_batch_get_item_response{
table = Table,
items = undynamize_items(Json, Opts)}.
undynamize_batch_get_item_responses(Response, Opts) ->
undynamize_object(fun undynamize_batch_get_item_response/2, Response, Opts).
-spec batch_get_item_record() -> record_desc().
batch_get_item_record() ->
{#ddb2_batch_get_item{},
[{<<"ConsumedCapacity">>, #ddb2_batch_get_item.consumed_capacity, fun undynamize_consumed_capacity_list/2},
{<<"Responses">>, #ddb2_batch_get_item.responses, fun undynamize_batch_get_item_responses/2},
{<<"UnprocessedKeys">>, #ddb2_batch_get_item.unprocessed_keys,
fun(V, Opts) -> undynamize_object(fun({Table, Json}, Opts2) ->
undynamize_batch_get_item_request_item(Table, Json, Opts2)
end, V, Opts)
end}
]}.
-type batch_get_item_return() :: ddb_return(#ddb2_batch_get_item{}, [out_item()]).
-spec batch_get_item(batch_get_item_request_items()) -> batch_get_item_return().
batch_get_item(RequestItems) ->
batch_get_item(RequestItems, [], default_config()).
-spec batch_get_item(batch_get_item_request_items(), batch_get_item_opts()) -> batch_get_item_return().
batch_get_item(RequestItems, Opts) ->
batch_get_item(RequestItems, Opts, default_config()).
%%------------------------------------------------------------------------------
%% @doc
%% DynamoDB API:
%% [http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_BatchGetItem.html]
%%
%% ===Example===
%%
%% Get 4 items total from 2 tables.
%%
%% `
%% {ok, Record} =
%% erlcloud_ddb2:batch_get_item(
%% [{<<"Forum">>,
%% [{<<"Name">>, {s, <<"Amazon DynamoDB">>}},
%% {<<"Name">>, {s, <<"Amazon RDS">>}},
%% {<<"Name">>, {s, <<"Amazon Redshift">>}}],
%% [{projection_expression, <<"Name, Threads, Messages, Views">>}]},
%% {<<"Thread">>,
%% [[{<<"ForumName">>, {s, <<"Amazon DynamoDB">>}},
%% {<<"Subject">>, {s, <<"Concurrent reads">>}}]],
%% [{projection_expression, <<"Tags, Message">>}]}],
%% [{return_consumed_capacity, total},
%% {out, record}]),
%% '
%%
%% See also erlcloud_ddb_util:get_all which provides retry and parallel batching.
%%
%% @end
%%------------------------------------------------------------------------------
-spec batch_get_item(batch_get_item_request_items(), batch_get_item_opts(), aws_config()) ->
batch_get_item_return().
batch_get_item(RequestItems, Opts, Config) ->
{AwsOpts, DdbOpts} = opts(batch_get_item_opts(), Opts),
Return = erlcloud_ddb_impl:request(
Config,
"DynamoDB_20120810.BatchGetItem",
[{<<"RequestItems">>, dynamize_batch_get_item_request_items(RequestItems)}]
++ AwsOpts),
case out(Return,
fun(Json, UOpts) -> undynamize_record(batch_get_item_record(), Json, UOpts) end,
DdbOpts) of
{simple, #ddb2_batch_get_item{unprocessed_keys = [_|_]}} ->
%% Return an error on unprocessed results.
{error, unprocessed};
{simple, #ddb2_batch_get_item{unprocessed_keys = [], responses = Responses}} ->
%% Simple return for batch_get_item is all items from all tables in a single list
{ok, lists:flatmap(fun(#ddb2_batch_get_item_response{items = I}) -> I end, Responses)};
{ok, _} = Out -> Out;
{error, _} = Out -> Out
end.
%%%------------------------------------------------------------------------------
%%% BatchWriteItem
%%%------------------------------------------------------------------------------
-type batch_write_item_opt() :: return_consumed_capacity_opt() |
return_item_collection_metrics_opt() |
out_opt().
-type batch_write_item_opts() :: [batch_write_item_opt()].
-spec batch_write_item_opts() -> opt_table().
batch_write_item_opts() ->
[return_consumed_capacity_opt(),
return_item_collection_metrics_opt()].
-type batch_write_item_put() :: {put, in_item()}.
-type batch_write_item_delete() :: {delete, key()}.
-type batch_write_item_request() :: batch_write_item_put() | batch_write_item_delete().
-type batch_write_item_request_item() :: {table_name(), [batch_write_item_request()]}.
-spec dynamize_batch_write_item_request(batch_write_item_request()) -> jsx:json_term().
dynamize_batch_write_item_request({put, Item}) ->
[{<<"PutRequest">>, [{<<"Item">>, dynamize_item(Item)}]}];
dynamize_batch_write_item_request({delete, Key}) ->
[{<<"DeleteRequest">>, [{<<"Key">>, dynamize_key(Key)}]}].
-spec dynamize_batch_write_item_request_item(batch_write_item_request_item())
-> json_pair().
dynamize_batch_write_item_request_item({Table, Requests}) ->
{Table, [dynamize_batch_write_item_request(R) || R <- Requests]}.
-type batch_write_item_request_items() :: maybe_list(batch_write_item_request_item()).
-spec dynamize_batch_write_item_request_items(batch_write_item_request_items()) -> [json_pair()].
dynamize_batch_write_item_request_items(Request) ->
dynamize_maybe_list(fun dynamize_batch_write_item_request_item/1, Request).
-spec batch_write_item_request_folder([{binary(), term()}], batch_write_item_request_item())
-> batch_write_item_request_item().
batch_write_item_request_folder([{<<"PutRequest">>, [{<<"Item">>, Item}]}], {Table, Requests}) ->
{Table, [{put, undynamize_item_typed(Item, [])} | Requests]};
batch_write_item_request_folder([{<<"DeleteRequest">>, [{<<"Key">>, Key}]}], {Table, Requests}) ->
{Table, [{delete, undynamize_typed_key(Key, [])} | Requests]}.
-spec undynamize_batch_write_item_request_item(table_name(), jsx:json_term(), undynamize_opts())
-> batch_write_item_request_item().
undynamize_batch_write_item_request_item(Table, Json, _) ->
{Table, Requests} = lists:foldl(fun batch_write_item_request_folder/2, {Table, []}, Json),
{Table, lists:reverse(Requests)}.
-spec batch_write_item_record() -> record_desc().
batch_write_item_record() ->
{#ddb2_batch_write_item{},
[{<<"ConsumedCapacity">>, #ddb2_batch_write_item.consumed_capacity, fun undynamize_consumed_capacity_list/2},
{<<"ItemCollectionMetrics">>, #ddb2_batch_write_item.item_collection_metrics,
fun(V, Opts) -> undynamize_object(
fun({Table, Json}, Opts2) ->
undynamize_item_collection_metric_list(Table, Json, Opts2)
end, V, Opts)
end},
{<<"UnprocessedItems">>, #ddb2_batch_write_item.unprocessed_items,
fun(V, Opts) -> undynamize_object(
fun({Table, Json}, Opts2) ->
undynamize_batch_write_item_request_item(Table, Json, Opts2)
end, V, Opts)
end}
]}.
-type batch_write_item_return() :: ddb_return(#ddb2_batch_write_item{}, #ddb2_batch_write_item{}).
-spec batch_write_item(batch_write_item_request_items()) -> batch_write_item_return().
batch_write_item(RequestItems) ->
batch_write_item(RequestItems, [], default_config()).
-spec batch_write_item(batch_write_item_request_items(), batch_write_item_opts()) -> batch_write_item_return().
batch_write_item(RequestItems, Opts) ->
batch_write_item(RequestItems, Opts, default_config()).
%%------------------------------------------------------------------------------
%% @doc
%% DynamoDB API:
%% [http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_BatchWriteItem.html]
%%
%% ===Example===
%%
%% Put 4 items in the "Forum" table.
%%
%% `
%% {ok, Record} =
%% erlcloud_ddb2:batch_write_item(
%% [{<<"Forum">>,
%% [{put, [{<<"Name">>, {s, <<"Amazon DynamoDB">>}},
%% {<<"Category">>, {s, <<"Amazon Web Services">>}}]},
%% {put, [{<<"Name">>, {s, <<"Amazon RDS">>}},
%% {<<"Category">>, {s, <<"Amazon Web Services">>}}]},
%% {put, [{<<"Name">>, {s, <<"Amazon Redshift">>}},
%% {<<"Category">>, {s, <<"Amazon Web Services">>}}]},
%% {put, [{<<"Name">>, {s, <<"Amazon ElastiCache">>}},
%% {<<"Category">>, {s, <<"Amazon Web Services">>}}]}
%% ]}],
%% [{return_consumed_capacity, total},
%% {out, record}]),
%% '
%% @end
%%------------------------------------------------------------------------------
-spec batch_write_item(batch_write_item_request_items(), batch_write_item_opts(), aws_config()) ->
batch_write_item_return().
batch_write_item(RequestItems, Opts, Config) ->
{AwsOpts, DdbOpts} = opts(batch_write_item_opts(), Opts),
Return = erlcloud_ddb_impl:request(
Config,
"DynamoDB_20120810.BatchWriteItem",
[{<<"RequestItems">>, dynamize_batch_write_item_request_items(RequestItems)}]
++ AwsOpts),
case out(Return,
fun(Json, UOpts) -> undynamize_record(batch_write_item_record(), Json, UOpts) end,
DdbOpts) of
{simple, #ddb2_batch_write_item{unprocessed_items = [_|_]}} ->
%% TODO resend unprocessed items automatically (or controlled by option).
%% For now return an error - you can handle manually if you don't use simple.
{error, unprocessed};
{simple, Record} -> {ok, Record};
{ok, _} = Out -> Out;
{error, _} = Out -> Out
end.
%%%------------------------------------------------------------------------------
%%% CreateTable
%%%------------------------------------------------------------------------------
-type local_secondary_index_def() :: {index_name(), range_key_name(), projection()}.
-type local_secondary_indexes() :: maybe_list(local_secondary_index_def()).
-type global_secondary_indexes() :: maybe_list(global_secondary_index_def()).
-spec dynamize_local_secondary_index(hash_key_name(), local_secondary_index_def()) -> jsx:json_term().
dynamize_local_secondary_index(HashKey, {IndexName, RangeKey, Projection}) ->
[{<<"IndexName">>, IndexName},
{<<"KeySchema">>, dynamize_key_schema({HashKey, RangeKey})},
{<<"Projection">>, dynamize_projection(Projection)}].
-spec dynamize_local_secondary_indexes(key_schema(), local_secondary_indexes()) -> jsx:json_term().
dynamize_local_secondary_indexes({HashKey, _RangeKey}, Value) ->
dynamize_maybe_list(fun(I) -> dynamize_local_secondary_index(HashKey, I) end, Value).
-spec dynamize_global_secondary_indexes(global_secondary_indexes()) -> jsx:json_term().
dynamize_global_secondary_indexes(Value) ->
dynamize_maybe_list(fun dynamize_global_secondary_index/1, Value).
-type create_table_opt() :: {local_secondary_indexes, local_secondary_indexes()} |
{global_secondary_indexes, global_secondary_indexes()} |
{stream_specification, stream_specification()}.
-type create_table_opts() :: [create_table_opt()].
-spec create_table_opts(key_schema()) -> opt_table().
create_table_opts(KeySchema) ->
[{local_secondary_indexes, <<"LocalSecondaryIndexes">>,
fun(V) -> dynamize_local_secondary_indexes(KeySchema, V) end},
{global_secondary_indexes, <<"GlobalSecondaryIndexes">>,
fun dynamize_global_secondary_indexes/1},
{stream_specification, <<"StreamSpecification">>, fun dynamize_stream_specification/1}].
-spec create_table_record() -> record_desc().
create_table_record() ->
{#ddb2_create_table{},
[{<<"TableDescription">>, #ddb2_create_table.table_description,
fun(V, Opts) -> undynamize_record(table_description_record(), V, Opts) end}
]}.
-type create_table_return() :: ddb_return(#ddb2_create_table{}, #ddb2_table_description{}).
-spec create_table(table_name(), attr_defs(), key_schema(), read_units(), write_units())
-> create_table_return().
create_table(Table, AttrDefs, KeySchema, ReadUnits, WriteUnits) ->
create_table(Table, AttrDefs, KeySchema, ReadUnits, WriteUnits, [], default_config()).
-spec create_table(table_name(), attr_defs(), key_schema(), read_units(), write_units(),
create_table_opts())
-> create_table_return().
create_table(Table, AttrDefs, KeySchema, ReadUnits, WriteUnits, Opts) ->
create_table(Table, AttrDefs, KeySchema, ReadUnits, WriteUnits, Opts, default_config()).
%%------------------------------------------------------------------------------
%% @doc
%% DynamoDB API:
%% [http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_CreateTable.html]
%%
%% ===Example===
%%
%% Create a table with hash key "ForumName" and range key "Subject"
%% with a local secondary index on "LastPostDateTime"
%% and a global secondary index on "Subject" as hash key and "LastPostDateTime"
%% as range key, read and write capacity 10, projecting all fields
%%
%% `
%% {ok, Description} =
%% erlcloud_ddb2:create_table(
%% <<"Thread">>,
%% [{<<"ForumName">>, s},
%% {<<"Subject">>, s},
%% {<<"LastPostDateTime">>, s}],
%% {<<"ForumName">>, <<"Subject">>},
%% 5,
%% 5,
%% [{local_secondary_indexes,
%% [{<<"LastPostIndex">>, <<"LastPostDateTime">>, keys_only}]},
%% {global_secondary_indexes, [
%% {<<"SubjectTimeIndex">>, {<<"Subject">>, <<"LastPostDateTime">>}, all, 10, 10}
%% ]}
%% ]),
%% '
%% @end
%%------------------------------------------------------------------------------
-spec create_table(table_name(), attr_defs(), key_schema(), read_units(), write_units(),
create_table_opts(), aws_config())
-> create_table_return().
create_table(Table, AttrDefs, KeySchema, ReadUnits, WriteUnits, Opts, Config) ->
{AwsOpts, DdbOpts} = opts(create_table_opts(KeySchema), Opts),
Return = erlcloud_ddb_impl:request(
Config,
"DynamoDB_20120810.CreateTable",
[{<<"TableName">>, Table},
{<<"AttributeDefinitions">>, dynamize_attr_defs(AttrDefs)},
{<<"KeySchema">>, dynamize_key_schema(KeySchema)},
{<<"ProvisionedThroughput">>, dynamize_provisioned_throughput({ReadUnits, WriteUnits})}]
++ AwsOpts),
out(Return, fun(Json, UOpts) -> undynamize_record(create_table_record(), Json, UOpts) end,
DdbOpts, #ddb2_create_table.table_description).
%%%------------------------------------------------------------------------------
%%% DeleteItem
%%%------------------------------------------------------------------------------
-type delete_item_opt() :: expression_attribute_names_opt() |
expression_attribute_values_opt() |
condition_expression_opt() |
conditional_op_opt() |
expected_opt() |
{return_values, none | all_old} |
return_consumed_capacity_opt() |
return_item_collection_metrics_opt() |
out_opt().
-type delete_item_opts() :: [delete_item_opt()].
-spec delete_item_opts() -> opt_table().
delete_item_opts() ->
[expression_attribute_names_opt(),
expression_attribute_values_opt(),
condition_expression_opt(),
conditional_op_opt(),
expected_opt(),
{return_values, <<"ReturnValues">>, fun dynamize_return_value/1},
return_consumed_capacity_opt(),
return_item_collection_metrics_opt()].
-spec delete_item_record() -> record_desc().
delete_item_record() ->
{#ddb2_delete_item{},
[{<<"Attributes">>, #ddb2_delete_item.attributes, fun undynamize_item/2},
{<<"ConsumedCapacity">>, #ddb2_delete_item.consumed_capacity, fun undynamize_consumed_capacity/2},
{<<"ItemCollectionMetrics">>, #ddb2_delete_item.item_collection_metrics,
fun undynamize_item_collection_metrics/2}
]}.
-type delete_item_return() :: ddb_return(#ddb2_delete_item{}, out_item()).
-spec delete_item(table_name(), key()) -> delete_item_return().
delete_item(Table, Key) ->
delete_item(Table, Key, [], default_config()).
-spec delete_item(table_name(), key(), delete_item_opts()) -> delete_item_return().
delete_item(Table, Key, Opts) ->
delete_item(Table, Key, Opts, default_config()).
%%------------------------------------------------------------------------------
%% @doc
%% DynamoDB API:
%% [http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_DeleteItem.html]
%%
%% ===Example===
%%
%% Delete an item from the "Thread" table if it doesn't have a
%% "Replies" attribute.
%%
%% `
%% {ok, Item} =
%% erlcloud_ddb2:delete_item(
%% <<"Thread">>,
%% [{<<"ForumName">>, {s, <<"Amazon DynamoDB">>}},
%% {<<"Subject">>, {s, <<"How do I update multiple items?">>}}],
%% [{return_values, all_old},
%% {condition_expression, <<"attribute_not_exists(Replies)">>}]),
%% '
%%
%% The ConditionExpression option can also be used in place of the legacy
%% ConditionalOperator or Expected parameters.
%%
%% `
%% {ok, Item} =
%% erlcloud_ddb2:delete_item(
%% <<"Thread">>,
%% [{<<"ForumName">>, {s, <<"Amazon DynamoDB">>}},
%% {<<"Subject">>, {s, <<"How do I update multiple items?">>}}],
%% [{return_values, all_old},
%% {condition_expression, <<"attribute_not_exists(#replies)">>},
%% {expression_attribute_names, [{<<"#replies">>, <<"Replies">>}]}]),
%% '
%%
%% @end
%%------------------------------------------------------------------------------
-spec delete_item(table_name(), key(), delete_item_opts(), aws_config()) -> delete_item_return().
delete_item(Table, Key, Opts, Config) ->
{AwsOpts, DdbOpts} = opts(delete_item_opts(), Opts),
Return = erlcloud_ddb_impl:request(
Config,
"DynamoDB_20120810.DeleteItem",
[{<<"TableName">>, Table},
{<<"Key">>, dynamize_key(Key)}]
++ AwsOpts),
out(Return, fun(Json, UOpts) -> undynamize_record(delete_item_record(), Json, UOpts) end, DdbOpts,
#ddb2_delete_item.attributes, {ok, []}).
%%%------------------------------------------------------------------------------
%%% DeleteTable
%%%------------------------------------------------------------------------------
-spec delete_table_record() -> record_desc().
delete_table_record() ->
{#ddb2_delete_table{},
[{<<"TableDescription">>, #ddb2_delete_table.table_description,
fun(V, Opts) -> undynamize_record(table_description_record(), V, Opts) end}
]}.
-type delete_table_return() :: ddb_return(#ddb2_delete_table{}, #ddb2_table_description{}).
-spec delete_table(table_name()) -> delete_table_return().
delete_table(Table) ->
delete_table(Table, [], default_config()).
-spec delete_table(table_name(), ddb_opts()) -> delete_table_return().
delete_table(Table, Opts) ->
delete_table(Table, Opts, default_config()).
%%------------------------------------------------------------------------------
%% @doc
%% DynamoDB API:
%% [http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_DeleteTable.html]
%%
%% ===Example===
%%
%% Delete "Reply" table.
%%
%% `
%% {ok, Description} =
%% erlcloud_ddb2:delete_table(<<"Reply">>),
%% '
%% @end
%%------------------------------------------------------------------------------
-spec delete_table(table_name(), ddb_opts(), aws_config()) -> delete_table_return().
delete_table(Table, Opts, Config) ->
{[], DdbOpts} = opts([], Opts),
Return = erlcloud_ddb_impl:request(
Config,
"DynamoDB_20120810.DeleteTable",
[{<<"TableName">>, Table}]),
out(Return, fun(Json, UOpts) -> undynamize_record(delete_table_record(), Json, UOpts) end,
DdbOpts, #ddb2_delete_table.table_description).
%%%------------------------------------------------------------------------------
%%% DescribeLimits
%%%------------------------------------------------------------------------------
-spec describe_limits_record() -> record_desc().
describe_limits_record() ->
{#ddb2_describe_limits{},
[{<<"AccountMaxReadCapacityUnits">>, #ddb2_describe_limits.account_max_read_capacity_units, fun id/2},
{<<"AccountMaxWriteCapacityUnits">>, #ddb2_describe_limits.account_max_write_capacity_units, fun id/2},
{<<"TableMaxReadCapacityUnits">>, #ddb2_describe_limits.table_max_read_capacity_units, fun id/2},
{<<"TableMaxWriteCapacityUnits">>, #ddb2_describe_limits.table_max_write_capacity_units, fun id/2}
]}.
-type describe_limits_return() :: ddb_return(#ddb2_describe_limits{}, #ddb2_describe_limits{}).
-spec describe_limits() -> describe_limits_return().
describe_limits() ->
describe_limits([], default_config()).
-spec describe_limits(ddb_opts()) -> describe_limits_return().
describe_limits(Opts) ->
describe_limits(Opts, default_config()).
%%------------------------------------------------------------------------------
%% @doc
%% DynamoDB API:
%% [http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_DescribeLimits.html]
%%
%% ===Example===
%%
%% Describe the current provisioned-capacity limits for your AWS account.
%%
%% `
%% {ok, Limits} =
%% erlcloud_ddb2:describe_limits(),
%% '
%% @end
%%------------------------------------------------------------------------------
-spec describe_limits(ddb_opts(), aws_config()) -> describe_limits_return().
describe_limits(Opts, Config) ->
{[], DdbOpts} = opts([], Opts),
Return = erlcloud_ddb_impl:request(
Config,
"DynamoDB_20120810.DescribeLimits",
[]),
case out(Return, fun(Json, UOpts) -> undynamize_record(describe_limits_record(), Json, UOpts) end,
DdbOpts) of
{simple, Record} -> {ok, Record};
{ok, _} = Out -> Out;
{error, _} = Out -> Out
end.
%%%------------------------------------------------------------------------------
%%% DescribeTable
%%%------------------------------------------------------------------------------
-spec describe_table_record() -> record_desc().
describe_table_record() ->
{#ddb2_describe_table{},
[{<<"Table">>, #ddb2_describe_table.table,
fun(V, Opts) -> undynamize_record(table_description_record(), V, Opts) end}
]}.
-type describe_table_return() :: ddb_return(#ddb2_describe_table{}, #ddb2_table_description{}).
-spec describe_table(table_name()) -> describe_table_return().
describe_table(Table) ->
describe_table(Table, [], default_config()).
-spec describe_table(table_name(), ddb_opts()) -> describe_table_return().
describe_table(Table, Opts) ->
describe_table(Table, Opts, default_config()).
%%------------------------------------------------------------------------------
%% @doc
%% DynamoDB API:
%% [http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_DescribeTable.html]
%%
%% ===Example===
%%
%% Describe "Thread" table.
%%
%% `
%% {ok, Description} =
%% erlcloud_ddb2:describe_table(<<"Thread">>),
%% '
%% @end
%%------------------------------------------------------------------------------
-spec describe_table(table_name(), ddb_opts(), aws_config()) -> describe_table_return().
describe_table(Table, Opts, Config) ->
{[], DdbOpts} = opts([], Opts),
Return = erlcloud_ddb_impl:request(
Config,
"DynamoDB_20120810.DescribeTable",
[{<<"TableName">>, Table}]),
out(Return, fun(Json, UOpts) -> undynamize_record(describe_table_record(), Json, UOpts) end,
DdbOpts, #ddb2_describe_table.table).
%%%------------------------------------------------------------------------------
%%% DescribeTimeToLive
%%%------------------------------------------------------------------------------
-type time_to_live_status() :: enabled | disabled | enabling | disabling.
-spec undynamize_time_to_live_status(binary(), undynamize_opts()) -> time_to_live_status().
undynamize_time_to_live_status(Value, _) ->
case Value of
<<"ENABLED">> -> enabled;
<<"ENABLING">> -> enabling;
<<"DISABLED">> -> disabled;
<<"DISABLING">> -> disabling
end.
-spec time_to_live_description_record() -> record_desc().
time_to_live_description_record() ->
{#ddb2_time_to_live_description{},
[{<<"AttributeName">>, #ddb2_time_to_live_description.attribute_name, fun id/2},
{<<"TimeToLiveStatus">>, #ddb2_time_to_live_description.time_to_live_status, fun undynamize_time_to_live_status/2}
]}.
-spec describe_time_to_live_record() -> record_desc().
describe_time_to_live_record() ->
{#ddb2_describe_time_to_live{},
[{<<"TimeToLiveDescription">>, #ddb2_describe_time_to_live.time_to_live_description,
fun(V, Opts) -> undynamize_record(time_to_live_description_record(), V, Opts) end}
]}.
-type describe_time_to_live_return() :: ddb_return(#ddb2_describe_time_to_live{}, #ddb2_time_to_live_description{}).
-spec describe_time_to_live(table_name()) -> describe_time_to_live_return().
describe_time_to_live(Table) ->
describe_time_to_live(Table, []).
-spec describe_time_to_live(table_name(), ddb_opts()) -> describe_time_to_live_return().
describe_time_to_live(Table, DbOpts) ->
describe_time_to_live(Table, DbOpts, default_config()).
%%------------------------------------------------------------------------------
%% @doc
%% DynamoDB API:
%% [http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_DescribeTimeToLive.html]
%%
%% ===Example===
%%
%% Describe TimeToLive for table "SessionData".
%% `
%% {ok, Description} = erlcloud_ddb2:describe_time_to_live(<<"SessionData">>),
%% '
%% @end
%%------------------------------------------------------------------------------
-spec describe_time_to_live(table_name(), ddb_opts(), aws_config()) -> describe_time_to_live_return().
describe_time_to_live(Table, DbOpts, Config) ->
Return = erlcloud_ddb_impl:request(
Config,
"DynamoDB_20120810.DescribeTimeToLive",
[{<<"TableName">>, Table}]),
out(Return, fun(Json, UOpts) -> undynamize_record(describe_time_to_live_record(), Json, UOpts) end,
DbOpts, #ddb2_describe_time_to_live.time_to_live_description).
%%%------------------------------------------------------------------------------
%%% GetItem
%%%------------------------------------------------------------------------------
-type get_item_opt() :: expression_attribute_names_opt() |
projection_expression_opt() |
attributes_to_get_opt() |
consistent_read_opt() |
return_consumed_capacity_opt() |
out_opt().
-type get_item_opts() :: [get_item_opt()].
-spec get_item_opts() -> opt_table().
get_item_opts() ->
[expression_attribute_names_opt(),
projection_expression_opt(),
attributes_to_get_opt(),
consistent_read_opt(),
return_consumed_capacity_opt()].
-spec get_item_record() -> record_desc().
get_item_record() ->
{#ddb2_get_item{},
[{<<"Item">>, #ddb2_get_item.item, fun undynamize_item/2},
{<<"ConsumedCapacity">>, #ddb2_get_item.consumed_capacity, fun undynamize_consumed_capacity/2}
]}.
-type get_item_return() :: ddb_return(#ddb2_get_item{}, out_item()).
-spec get_item(table_name(), key()) -> get_item_return().
get_item(Table, Key) ->
get_item(Table, Key, [], default_config()).
-spec get_item(table_name(), key(), get_item_opts()) -> get_item_return().
get_item(Table, Key, Opts) ->
get_item(Table, Key, Opts, default_config()).
%%------------------------------------------------------------------------------
%% @doc
%% DynamoDB API:
%% [http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_GetItem.html]
%%
%% ===Example===
%%
%% Get selected attributes from an item in the "Thread" table.
%%
%% `
%% {ok, Item} =
%% erlcloud_ddb2:get_item(
%% <<"Thread">>,
%% [{<<"ForumName">>, {s, <<"Amazon DynamoDB">>}},
%% {<<"Subject">>, {s, <<"How do I update multiple items?">>}}],
%% [{projection_expression, <<"LastPostDateTime, Message, Tags">>},
%% consistent_read,
%% {return_consumed_capacity, total}]),
%% '
%% @end
%%------------------------------------------------------------------------------
-spec get_item(table_name(), key(), get_item_opts(), aws_config()) -> get_item_return().
get_item(Table, Key, Opts, Config) ->
{AwsOpts, DdbOpts} = opts(get_item_opts(), Opts),
Return = erlcloud_ddb_impl:request(
Config,
"DynamoDB_20120810.GetItem",
[{<<"TableName">>, Table},
{<<"Key">>, dynamize_key(Key)}]
++ AwsOpts),
out(Return, fun(Json, UOpts) -> undynamize_record(get_item_record(), Json, UOpts) end, DdbOpts,
#ddb2_get_item.item, {ok, []}).
%%%------------------------------------------------------------------------------
%%% ListTables
%%%------------------------------------------------------------------------------
-type list_tables_opt() :: {limit, pos_integer()} |
{exclusive_start_table_name, table_name() | undefined} |
out_opt().
-type list_tables_opts() :: [list_tables_opt()].
-spec list_tables_opts() -> opt_table().
list_tables_opts() ->
[{limit, <<"Limit">>, fun id/1},
{exclusive_start_table_name, <<"ExclusiveStartTableName">>, fun id/1}].
-spec list_tables_record() -> record_desc().
list_tables_record() ->
{#ddb2_list_tables{},
[{<<"TableNames">>, #ddb2_list_tables.table_names, fun id/2},
{<<"LastEvaluatedTableName">>, #ddb2_list_tables.last_evaluated_table_name, fun id/2}
]}.
-type list_tables_return() :: ddb_return(#ddb2_list_tables{}, [table_name()]).
-spec list_tables() -> list_tables_return().
list_tables() ->
list_tables([], default_config()).
-spec list_tables(list_tables_opts()) -> list_tables_return().
list_tables(Opts) ->
list_tables(Opts, default_config()).
%%------------------------------------------------------------------------------
%% @doc
%% DynamoDB API:
%% [http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_ListTables.html]
%%
%% ===Example===
%%
%% Get the next 3 table names after "Forum".
%%
%% `
%% {ok, Tables} =
%% erlcloud_ddb2:list_tables(
%% [{limit, 3},
%% {exclusive_start_table_name, <<"Forum">>}]),
%% '
%% @end
%%------------------------------------------------------------------------------
-spec list_tables(list_tables_opts(), aws_config()) -> list_tables_return().
list_tables(Opts, Config) ->
{AwsOpts, DdbOpts} = opts(list_tables_opts(), Opts),
Return = erlcloud_ddb_impl:request(
Config,
"DynamoDB_20120810.ListTables",
AwsOpts),
out(Return, fun(Json, UOpts) -> undynamize_record(list_tables_record(), Json, UOpts) end,
DdbOpts, #ddb2_list_tables.table_names, {ok, []}).
%%%------------------------------------------------------------------------------
%%% PutItem
%%%------------------------------------------------------------------------------
-type put_item_opt() :: expression_attribute_names_opt() |
expression_attribute_values_opt() |
condition_expression_opt() |
conditional_op_opt() |
expected_opt() |
{return_values, none | all_old} |
return_consumed_capacity_opt() |
return_item_collection_metrics_opt() |
out_opt().
-type put_item_opts() :: [put_item_opt()].
-spec put_item_opts() -> opt_table().
put_item_opts() ->
[expression_attribute_names_opt(),
expression_attribute_values_opt(),
condition_expression_opt(),
conditional_op_opt(),
expected_opt(),
{return_values, <<"ReturnValues">>, fun dynamize_return_value/1},
return_consumed_capacity_opt(),
return_item_collection_metrics_opt()].
-spec put_item_record() -> record_desc().
put_item_record() ->
{#ddb2_put_item{},
[{<<"Attributes">>, #ddb2_put_item.attributes, fun undynamize_item/2},
{<<"ConsumedCapacity">>, #ddb2_put_item.consumed_capacity, fun undynamize_consumed_capacity/2},
{<<"ItemCollectionMetrics">>, #ddb2_put_item.item_collection_metrics,
fun undynamize_item_collection_metrics/2}
]}.
-type put_item_return() :: ddb_return(#ddb2_put_item{}, out_item()).
-spec put_item(table_name(), in_item()) -> put_item_return().
put_item(Table, Item) ->
put_item(Table, Item, [], default_config()).
-spec put_item(table_name(), in_item(), put_item_opts()) -> put_item_return().
put_item(Table, Item, Opts) ->
put_item(Table, Item, Opts, default_config()).
%%------------------------------------------------------------------------------
%% @doc
%% DynamoDB API:
%% [http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_PutItem.html]
%%
%% ===Example===
%%
%% Put an item in the "Thread" table if it does not already exist.
%%
%% `
%% {ok, []} =
%% erlcloud_ddb2:put_item(
%% <<"Thread">>,
%% [{<<"LastPostedBy">>, <<"<EMAIL>">>},
%% {<<"ForumName">>, <<"Amazon DynamoDB">>},
%% {<<"LastPostDateTime">>, <<"201303190422">>},
%% {<<"Tags">>, {ss, [<<"Update">>, <<"Multiple Items">>, <<"HelpMe">>]}},
%% {<<"Subject">>, <<"How do I update multiple items?">>},
%% {<<"Message">>,
%% <<"I want to update multiple items in a single API call. What is the best way to do that?">>}],
%% [{condition_expression, <<"ForumName <> :f and Subject <> :s">>},
%% {expression_attribute_values,
%% [{<<":f">>, <<"Amazon DynamoDB">>},
%% {<<":s">>, <<"How do I update multiple items?">>}]}]),
%% '
%%
%% The ConditionExpression option can be used in place of the legacy Expected parameter.
%%
%% `
%% {ok, []} =
%% erlcloud_ddb2:put_item(
%% <<"Thread">>,
%% [{<<"LastPostedBy">>, <<"<EMAIL>">>},
%% {<<"ForumName">>, <<"Amazon DynamoDB">>},
%% {<<"LastPostDateTime">>, <<"201303190422">>},
%% {<<"Tags">>, {ss, [<<"Update">>, <<"Multiple Items">>, <<"HelpMe">>]}},
%% {<<"Subject">>, <<"How do I update multiple items?">>},
%% {<<"Message">>,
%% <<"I want to update multiple items in a single API call. What is the best way to do that?">>}],
%% [{condition_expression, <<"#forum <> :forum AND attribute_not_exists(#subject)">>},
%% {expression_attribute_names, [{<<"#forum">>, <<"ForumName">>}, {<<"#subject">>, <<"Subject">>}]},
%% {expression_attribute_values, [{<<":forum">>, <<"Amazon DynamoDB">>}]}]),
%% '
%%
%% @end
%%------------------------------------------------------------------------------
-spec put_item(table_name(), in_item(), put_item_opts(), aws_config()) -> put_item_return().
put_item(Table, Item, Opts, Config) ->
{AwsOpts, DdbOpts} = opts(put_item_opts(), Opts),
Return = erlcloud_ddb_impl:request(
Config,
"DynamoDB_20120810.PutItem",
[{<<"TableName">>, Table},
{<<"Item">>, dynamize_item(Item)}]
++ AwsOpts),
out(Return, fun(Json, UOpts) -> undynamize_record(put_item_record(), Json, UOpts) end, DdbOpts,
#ddb2_put_item.attributes, {ok, []}).
%%%------------------------------------------------------------------------------
%%% Query
%%%------------------------------------------------------------------------------
-type q_opt() :: expression_attribute_names_opt() |
expression_attribute_values_opt() |
projection_expression_opt() |
attributes_to_get_opt() |
consistent_read_opt() |
{filter_expression, expression()} |
conditional_op_opt() |
{query_filter, conditions()} |
{limit, pos_integer()} |
{exclusive_start_key, key() | undefined} |
boolean_opt(scan_index_forward) |
{index_name, index_name()} |
{select, select()} |
return_consumed_capacity_opt() |
out_opt().
-type q_opts() :: [q_opt()].
-spec q_opts() -> opt_table().
q_opts() ->
[expression_attribute_names_opt(),
expression_attribute_values_opt(),
projection_expression_opt(),
attributes_to_get_opt(),
consistent_read_opt(),
filter_expression_opt(),
conditional_op_opt(),
{query_filter, <<"QueryFilter">>, fun dynamize_conditions/1},
{limit, <<"Limit">>, fun id/1},
{exclusive_start_key, <<"ExclusiveStartKey">>, fun dynamize_key/1},
{scan_index_forward, <<"ScanIndexForward">>, fun id/1},
{index_name, <<"IndexName">>, fun id/1},
{select, <<"Select">>, fun dynamize_select/1},
return_consumed_capacity_opt()
].
-spec dynamize_q_key_conditions_or_expression(conditions() | expression()) -> json_pair().
dynamize_q_key_conditions_or_expression(KeyConditionExpression) when is_binary(KeyConditionExpression) ->
{<<"KeyConditionExpression">>, dynamize_expression(KeyConditionExpression)};
dynamize_q_key_conditions_or_expression(KeyConditions) ->
{<<"KeyConditions">>, dynamize_conditions(KeyConditions)}.
-spec q_record() -> record_desc().
q_record() ->
{#ddb2_q{},
[{<<"ConsumedCapacity">>, #ddb2_q.consumed_capacity, fun undynamize_consumed_capacity/2},
{<<"Count">>, #ddb2_q.count, fun id/2},
{<<"Items">>, #ddb2_q.items, fun(V, Opts) -> [undynamize_item(I, Opts) || I <- V] end},
{<<"LastEvaluatedKey">>, #ddb2_q.last_evaluated_key, fun undynamize_typed_key/2},
{<<"ScannedCount">>, #ddb2_q.scanned_count, fun id/2}
]}.
-type q_return() :: ddb_return(#ddb2_q{}, [out_item()]).
-spec q(table_name(), conditions() | expression()) -> q_return().
q(Table, KeyConditionsOrExpression) ->
q(Table, KeyConditionsOrExpression, [], default_config()).
-spec q(table_name(), conditions() | expression(), q_opts()) -> q_return().
q(Table, KeyConditionsOrExpression, Opts) ->
q(Table, KeyConditionsOrExpression, Opts, default_config()).
%%------------------------------------------------------------------------------
%% @doc
%% DynamoDB API:
%% [http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_Query.html]
%%
%% KeyConditions are treated as a required parameter, which appears to
%% be the case despite what the documentation says.
%%
%% ===Example===
%%
%% Get up to 3 itesm from the "Thread" table with "ForumName" of
%% "Amazon DynamoDB" and "LastPostDateTime" between specified
%% value. Use the "LastPostIndex".
%%
%% `
%% {ok, Items} =
%% erlcloud_ddb2:q(
%% <<"Thread">>,
%% <<"ForumName = :n AND LastPostDateTime BETWEEN :t1 AND :t2">>,
%% [{expression_attribute_values,
%% [{<<":n">>, <<"Amazon DynamoDB">>},
%% {<<":t1">>, <<"20130101">>},
%% {<<":t2">>, <<"20130115">>}]},
%% {index_name, <<"LastPostIndex">>},
%% {select, all_attributes},
%% {limit, 3},
%% {consistent_read, true},
%% {filter_expression, <<"#user = :user">>},
%% {expression_attribute_names, [{<<"#user">>, <<"User">>}]},
%% {expression_attribute_values, [{<<":user">>, <<"User A">>}]}]),
%% '
%%
%% @end
%%------------------------------------------------------------------------------
-spec q(table_name(), conditions() | expression(), q_opts(), aws_config()) -> q_return().
q(Table, KeyConditionsOrExpression, Opts, Config) ->
{AwsOpts, DdbOpts} = opts(q_opts(), Opts),
Return = erlcloud_ddb_impl:request(
Config,
"DynamoDB_20120810.Query",
[{<<"TableName">>, Table},
dynamize_q_key_conditions_or_expression(KeyConditionsOrExpression)]
++ AwsOpts),
out(Return, fun(Json, UOpts) -> undynamize_record(q_record(), Json, UOpts) end, DdbOpts,
#ddb2_q.items, {ok, []}).
%%%------------------------------------------------------------------------------
%%% Scan
%%%------------------------------------------------------------------------------
-type scan_opt() :: expression_attribute_names_opt() |
expression_attribute_values_opt() |
projection_expression_opt() |
attributes_to_get_opt() |
consistent_read_opt() |
{filter_expression, expression()} |
conditional_op_opt() |
{scan_filter, conditions()} |
{limit, pos_integer()} |
{exclusive_start_key, key() | undefined} |
{segment, non_neg_integer()} |
{total_segments, pos_integer()} |
{index_name, index_name()} |
{select, select()} |
return_consumed_capacity_opt() |
out_opt().
-type scan_opts() :: [scan_opt()].
-spec scan_opts() -> opt_table().
scan_opts() ->
[expression_attribute_names_opt(),
expression_attribute_values_opt(),
projection_expression_opt(),
attributes_to_get_opt(),
consistent_read_opt(),
filter_expression_opt(),
conditional_op_opt(),
{scan_filter, <<"ScanFilter">>, fun dynamize_conditions/1},
{limit, <<"Limit">>, fun id/1},
{exclusive_start_key, <<"ExclusiveStartKey">>, fun dynamize_key/1},
{segment, <<"Segment">>, fun id/1},
{total_segments, <<"TotalSegments">>, fun id/1},
{index_name, <<"IndexName">>, fun id/1},
{select, <<"Select">>, fun dynamize_select/1},
return_consumed_capacity_opt()
].
-spec scan_record() -> record_desc().
scan_record() ->
{#ddb2_scan{},
[{<<"ConsumedCapacity">>, #ddb2_scan.consumed_capacity, fun undynamize_consumed_capacity/2},
{<<"Count">>, #ddb2_scan.count, fun id/2},
{<<"Items">>, #ddb2_scan.items, fun(V, Opts) -> [undynamize_item(I, Opts) || I <- V] end},
{<<"LastEvaluatedKey">>, #ddb2_scan.last_evaluated_key, fun undynamize_typed_key/2},
{<<"ScannedCount">>, #ddb2_scan.scanned_count, fun id/2}
]}.
-type scan_return() :: ddb_return(#ddb2_scan{}, [out_item()]).
-spec scan(table_name()) -> scan_return().
scan(Table) ->
scan(Table, [], default_config()).
-spec scan(table_name(), scan_opts()) -> scan_return().
scan(Table, Opts) ->
scan(Table, Opts, default_config()).
%%------------------------------------------------------------------------------
%% @doc
%% DynamoDB API:
%% [http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_Scan.html]
%%
%% ===Example===
%%
%% Return all items in the "Reply" table.
%%
%% `
%% {ok, Record} =
%% erlcloud_ddb2:scan(
%% <<"Reply">>,
%% [{return_consumed_capacity, total},
%% {out, record}]),
%% '
%% @end
%%------------------------------------------------------------------------------
-spec scan(table_name(), scan_opts(), aws_config()) -> scan_return().
scan(Table, Opts, Config) ->
{AwsOpts, DdbOpts} = opts(scan_opts(), Opts),
Return = erlcloud_ddb_impl:request(
Config,
"DynamoDB_20120810.Scan",
[{<<"TableName">>, Table}]
++ AwsOpts),
out(Return, fun(Json, UOpts) -> undynamize_record(scan_record(), Json, UOpts) end, DdbOpts,
#ddb2_scan.items, {ok, []}).
%%%------------------------------------------------------------------------------
%%% UpdateItem
%%%------------------------------------------------------------------------------
-type update_action() :: put | add | delete.
-type in_update() :: {attr_name(), in_attr_value(), update_action()} | in_attr() | {attr_name(), delete}.
-type in_updates() :: maybe_list(in_update()).
-type json_update_action() :: {binary(), binary()}.
-type json_update() :: {attr_name(), [{binary(), [json_attr_value()]} | json_update_action()]}.
-spec dynamize_action(update_action()) -> json_update_action().
dynamize_action(put) ->
{<<"Action">>, <<"PUT">>};
dynamize_action(add) ->
{<<"Action">>, <<"ADD">>};
dynamize_action(delete) ->
{<<"Action">>, <<"DELETE">>}.
-spec dynamize_update(in_update()) -> json_update().
dynamize_update({Name, Value, Action}) ->
{Name, [{<<"Value">>, [dynamize_value(Value)]}, dynamize_action(Action)]};
dynamize_update({Name, delete}) ->
{Name, [dynamize_action(delete)]};
dynamize_update({Name, Value}) ->
%% Uses the default action of put
dynamize_update({Name, Value, put}).
-spec dynamize_updates(in_updates()) -> [json_update()].
dynamize_updates(Updates) ->
dynamize_maybe_list(fun dynamize_update/1, Updates).
-spec dynamize_update_item_updates_or_expression(in_updates() | expression()) -> [json_pair()].
dynamize_update_item_updates_or_expression(UpdateExpression) when is_binary(UpdateExpression) ->
[{<<"UpdateExpression">>, dynamize_expression(UpdateExpression)}];
dynamize_update_item_updates_or_expression(Updates) ->
case Updates of
[] -> [];
_ -> [{<<"AttributeUpdates">>, dynamize_updates(Updates)}]
end.
-type update_item_opt() :: expression_attribute_names_opt() |
expression_attribute_values_opt() |
condition_expression_opt() |
conditional_op_opt() |
expected_opt() |
{return_values, return_value()} |
return_consumed_capacity_opt() |
return_item_collection_metrics_opt() |
out_opt().
-type update_item_opts() :: [update_item_opt()].
-spec update_item_opts() -> opt_table().
update_item_opts() ->
[expression_attribute_names_opt(),
expression_attribute_values_opt(),
condition_expression_opt(),
conditional_op_opt(),
expected_opt(),
{return_values, <<"ReturnValues">>, fun dynamize_return_value/1},
return_consumed_capacity_opt(),
return_item_collection_metrics_opt()].
-spec update_item_record() -> record_desc().
update_item_record() ->
{#ddb2_update_item{},
[{<<"Attributes">>, #ddb2_update_item.attributes, fun undynamize_item/2},
{<<"ConsumedCapacity">>, #ddb2_update_item.consumed_capacity, fun undynamize_consumed_capacity/2},
{<<"ItemCollectionMetrics">>, #ddb2_update_item.item_collection_metrics,
fun undynamize_item_collection_metrics/2}
]}.
-type update_item_return() :: ddb_return(#ddb2_update_item{}, out_item()).
-spec update_item(table_name(), key(), in_updates() | expression()) -> update_item_return().
update_item(Table, Key, UpdatesOrExpression) ->
update_item(Table, Key, UpdatesOrExpression, [], default_config()).
-spec update_item(table_name(), key(), in_updates() | expression(), update_item_opts()) -> update_item_return().
update_item(Table, Key, UpdatesOrExpression, Opts) ->
update_item(Table, Key, UpdatesOrExpression, Opts, default_config()).
%%------------------------------------------------------------------------------
%% @doc
%% DynamoDB API:
%% [http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_UpdateItem.html]
%%
%% AttributeUpdates is treated as a required parameter because callers
%% will almost always provide it. If no updates are desired, You can
%% pass [] for that argument.
%%
%% ===Example===
%%
%% Update specific item in the "Thread" table by setting "LastPostBy"
%% if it has the expected previous value.
%%
%% `
%% {ok, Item} =
%% erlcloud_ddb2:update_item(
%% <<"Thread">>,
%% [{<<"ForumName">>, {s, <<"Amazon DynamoDB">>}},
%% {<<"Subject">>, {s, <<"How do I update multiple items?">>}}],
%% <<"set LastPostedBy = :val1">>,
%% [{condition_expression, <<"LastPostedBy = :val2">>},
%% {expression_attribute_values,
%% [{<<":val1">>, <<"<EMAIL>">>},
%% {<<":val2">>, <<"<EMAIL>">>}]},
%% {return_values, all_new}]),
%% '
%% @end
%%------------------------------------------------------------------------------
-spec update_item(table_name(), key(), in_updates() | expression(), update_item_opts(), aws_config())
-> update_item_return().
update_item(Table, Key, UpdatesOrExpression, Opts, Config) ->
{AwsOpts, DdbOpts} = opts(update_item_opts(), Opts),
Return = erlcloud_ddb_impl:request(
Config,
"DynamoDB_20120810.UpdateItem",
[{<<"TableName">>, Table},
{<<"Key">>, dynamize_key(Key)}]
++ dynamize_update_item_updates_or_expression(UpdatesOrExpression)
++ AwsOpts),
out(Return, fun(Json, UOpts) -> undynamize_record(update_item_record(), Json, UOpts) end, DdbOpts,
#ddb2_update_item.attributes, {ok, []}).
%%%------------------------------------------------------------------------------
%%% UpdateTable
%%%------------------------------------------------------------------------------
-type update_table_return() :: ddb_return(#ddb2_update_table{}, #ddb2_table_description{}).
-type global_secondary_index_update() :: {index_name(), read_units(), write_units()} |
{index_name(), delete} |
global_secondary_index_def().
-type global_secondary_index_updates() :: maybe_list(global_secondary_index_update()).
-spec dynamize_global_secondary_index_update(global_secondary_index_update()) -> jsx:json_term().
dynamize_global_secondary_index_update({IndexName, ReadUnits, WriteUnits}) ->
[{<<"Update">>, [
{<<"IndexName">>, IndexName},
{<<"ProvisionedThroughput">>, dynamize_provisioned_throughput({ReadUnits, WriteUnits})}
]}];
dynamize_global_secondary_index_update({IndexName, delete}) ->
[{<<"Delete">>, [
{<<"IndexName">>, IndexName}
]}];
dynamize_global_secondary_index_update(Index) ->
[{<<"Create">>, dynamize_global_secondary_index(Index)}].
-spec dynamize_global_secondary_index_updates(global_secondary_index_updates()) -> jsx:json_term().
dynamize_global_secondary_index_updates(Updates) ->
dynamize_maybe_list(fun dynamize_global_secondary_index_update/1, Updates).
-type update_table_opt() :: {provisioned_throughput, {read_units(), write_units()}} |
{attribute_definitions, attr_defs()} |
{global_secondary_index_updates, global_secondary_index_updates()} |
{stream_specification, stream_specification()} |
out_opt().
-type update_table_opts() :: [update_table_opt()].
-spec update_table_opts() -> opt_table().
update_table_opts() ->
[{provisioned_throughput, <<"ProvisionedThroughput">>, fun dynamize_provisioned_throughput/1},
{attribute_definitions, <<"AttributeDefinitions">>, fun dynamize_attr_defs/1},
{global_secondary_index_updates, <<"GlobalSecondaryIndexUpdates">>,
fun dynamize_global_secondary_index_updates/1},
{stream_specification, <<"StreamSpecification">>, fun dynamize_stream_specification/1}].
-spec update_table_record() -> record_desc().
update_table_record() ->
{#ddb2_update_table{},
[{<<"TableDescription">>, #ddb2_update_table.table_description,
fun(V, Opts) -> undynamize_record(table_description_record(), V, Opts) end}
]}.
-spec update_table(table_name(), update_table_opts()) -> update_table_return().
update_table(Table, Opts) ->
update_table(Table, Opts, default_config()).
%%------------------------------------------------------------------------------
%% @doc
%% DynamoDB API:
%% [http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_UpdateTable.html]
%%
%% ===Example===
%%
%% Update table "Thread" to have 10 units of read and write capacity.
%% Update secondary index `<<"SubjectIdx">>' to have 10 units of read write capacity
%% ```
%% erlcloud_ddb2:update_table(
%% <<"Thread">>,
%% [{provisioned_throughput, {10, 10}},
%% {global_secondary_index_updates, [{<<"SubjectIdx">>, 10, 10}]}])
%% '''
%% @end
%%------------------------------------------------------------------------------
-spec update_table(table_name(), update_table_opts(), aws_config()) -> update_table_return();
(table_name(), read_units(), write_units()) -> update_table_return().
update_table(Table, Opts, Config) when is_list(Opts) ->
{AwsOpts, DdbOpts} = opts(update_table_opts(), Opts),
Return = erlcloud_ddb_impl:request(
Config,
"DynamoDB_20120810.UpdateTable",
[{<<"TableName">>, Table} | AwsOpts]),
out(Return, fun(Json, UOpts) -> undynamize_record(update_table_record(), Json, UOpts) end,
DdbOpts, #ddb2_update_table.table_description);
update_table(Table, ReadUnits, WriteUnits) ->
update_table(Table, ReadUnits, WriteUnits, [], default_config()).
-spec update_table(table_name(), read_units(), write_units(), update_table_opts())
-> update_table_return().
update_table(Table, ReadUnits, WriteUnits, Opts) ->
update_table(Table, ReadUnits, WriteUnits, Opts, default_config()).
-spec update_table(table_name(), non_neg_integer(), non_neg_integer(), update_table_opts(),
aws_config())
-> update_table_return().
update_table(Table, ReadUnits, WriteUnits, Opts, Config) ->
update_table(Table, [{provisioned_throughput, {ReadUnits, WriteUnits}} | Opts], Config).
%%%------------------------------------------------------------------------------
%%% UpdateTimeToLive
%%%------------------------------------------------------------------------------
-type update_time_to_live_opt() :: {attribute_name, attr_name()} |
{enabled, boolean()}.
-type update_time_to_live_opts() :: [update_time_to_live_opt()].
-spec dynamize_attribute_name(binary()) -> jsx:json_term().
dynamize_attribute_name(Name) when is_binary(Name) ->
Name.
-spec dynamize_enable(boolean()) -> jsx:json_term().
dynamize_enable(Value) when is_boolean(Value) ->
Value.
-spec update_time_to_live_opts() -> opt_table().
update_time_to_live_opts() ->
[{attribute_name, <<"AttributeName">>, fun dynamize_attribute_name/1},
{enabled, <<"Enabled">>, fun dynamize_enable/1}].
-spec time_to_live_specification_record() -> record_desc().
time_to_live_specification_record() ->
{#ddb2_time_to_live_specification{},
[{<<"AttributeName">>, #ddb2_time_to_live_specification.attribute_name, fun id/2},
{<<"Enabled">>, #ddb2_time_to_live_specification.enabled, fun id/2}
]}.
-spec update_time_to_live_record() -> record_desc().
update_time_to_live_record() ->
{#ddb2_update_time_to_live{},
[{<<"TimeToLiveSpecification">>, #ddb2_update_time_to_live.time_to_live_specification,
fun(V, Opts) -> undynamize_record(time_to_live_specification_record(), V, Opts) end}
]}.
-type update_time_to_live_return() :: ddb_return(#ddb2_update_time_to_live{}, #ddb2_time_to_live_specification{}).
-spec update_time_to_live(table_name(), update_time_to_live_opts()) -> update_time_to_live_return().
update_time_to_live(Table, Opts) ->
update_time_to_live(Table, Opts, default_config()).
-spec update_time_to_live(table_name(), attr_name(), boolean(), aws_config()) -> update_time_to_live_return().
update_time_to_live(Table, AttributeName, Enabled, Config) ->
update_time_to_live(Table, [{attribute_name, AttributeName}, {enabled, Enabled}], Config).
%%------------------------------------------------------------------------------
%% @doc
%% DynamoDB API:
%% [http://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_UpdateTimeToLive.html]
%%
%% ===Example===
%%
%% Enable TTL for table "SessionData" by setting attribute "ExpirationTime" as expiration date.
%% ```
%% erlcloud_ddb2:update_time_to_live(
%% <<"SessionData">>,
%% [{attribute_name, <<"ExpirationTime">>},
%% {enabled, true}])
%% '''
%% @end
%%------------------------------------------------------------------------------
-spec update_time_to_live(table_name(), update_time_to_live_opts(), aws_config()) -> update_time_to_live_return();
(table_name(), attr_name(), boolean()) -> update_time_to_live_return().
update_time_to_live(Table, Opts, Config) when is_list(Opts) ->
{AwsOpts, DdbOpts} = opts(update_time_to_live_opts(), Opts),
Body = [{<<"TableName">>, Table}, {<<"TimeToLiveSpecification">>, AwsOpts}],
Return = erlcloud_ddb_impl:request(
Config,
"DynamoDB_20120810.UpdateTimeToLive",
Body),
out(Return, fun(Json, UOpts) -> undynamize_record(update_time_to_live_record(), Json, UOpts) end,
DdbOpts, #ddb2_update_time_to_live.time_to_live_specification);
update_time_to_live(Table, AttributeName, Enabled) ->
update_time_to_live(Table, [{attribute_name, AttributeName}, {enabled, Enabled}]).
to_binary(X) when is_binary(X) ->
X;
to_binary(X) when is_list(X) ->
list_to_binary(X);
to_binary(X) when is_integer(X) ->
integer_to_binary(X). | src/erlcloud_ddb2.erl | 0.641535 | 0.453685 | erlcloud_ddb2.erl | starcoder |
%% =====================================================================
%% Licensed under the Apache License, Version 2.0 (the "License"); you may
%% not use this file except in compliance with the License. You may obtain
%% a copy of the License at <http://www.apache.org/licenses/LICENSE-2.0>
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% Alternatively, you may use this file under the terms of the GNU Lesser
%% General Public License (the "LGPL") as published by the Free Software
%% Foundation; either version 2.1, or (at your option) any later version.
%% If you wish to allow use of your version of this file only under the
%% terms of the LGPL, you should delete the provisions above and replace
%% them with the notice and other provisions required by the LGPL; see
%% <http://www.gnu.org/licenses/>. If you do not delete the provisions
%% above, a recipient may use your version of this file under the terms of
%% either the Apache License or the LGPL.
%%
%% @private
%% @copyright 2001-2003 <NAME>
%% @author <NAME> <<EMAIL>>
%% @see edoc
%% @end
%% =====================================================================
%% @doc EDoc wiki expansion, parsing and postprocessing of XML text.
%% Uses {@link //xmerl. XMerL}.
%% @end
%% Notes:
%%
%% * Whatever happens in this module, it must interact nicely with the
%% actual XML-parsing. It is not acceptable to break any existing and
%% legal XML markup so that it does not parse or is rendered wrong.
%%
%% * The focus should always be on making *documentation* easier to
%% write. No wiki notation should be introduced unless it is clear that
%% it is better than using plain XHTML, making typing less cumbersome
%% and the resulting text easier to read. The wiki notation should be a
%% small bag of easy-to-remember tricks for making XHTML documentation
%% easier to write, not a complete markup language in itself. As a
%% typical example, it is hardly worthwhile to introduce a special
%% notation like say, ""..."" for emphasized text, since <em>...</em> is
%% not much harder to write, not any less readable, and no more
%% difficult to remember, especially since emphasis is not very often
%% occurring in normal documentation.
%%
%% * The central reasoning for the code-quoting goes like this: I don't
%% want to have special escape characters within the quotes (like
%% backslash in C), to allow quoting of the quote characters themselves.
%% I also don't want to use the "`" character both for opening and
%% closing quotes. Therefore, you can either use `...' - and then you
%% cannot use the "'" character without ending the quote - or you can
%% use ``...'' - which allows single but not double "'" characters
%% within the quote. Whitespace is automatically removed from the
%% beginning and the end of the quoted strings; this allows you to write
%% things like "`` 'foo@bar' ''". Text that contains "''" has to be
%% written within <code>...</code>.
%%
%% To produce a single "`" character without starting a quote, write
%% "`'" (no space between "`" and "'").
%%
%% For verbatim/preformatted text, the ```...'''-quotes expand to
%% "<pre><![CDATA[...]]></pre>". The indentation at the start of the
%% quoted string is preserved; whitespace is stripped only at the end.
%% Whole leading lines of whitespace are however skipped.
-module(edoc_wiki).
-export([parse_xml/2, expand_text/2]).
-include("edoc.hrl").
-include_lib("xmerl/include/xmerl.hrl").
-define(BASE_HEADING, 3).
%% Parsing Wiki-XML with pre-and post-expansion.
parse_xml(Data, Line) ->
par(parse_xml_1(expand_text(Data, Line), Line)).
parse_xml_1(Text, Line) ->
Text1 = "<doc>" ++ Text ++ "</doc>",
%% Any coding except "utf-8".
Opts = [{line, Line}, {encoding, 'iso-8859-1'}],
case catch {ok, xmerl_scan:string(Text1, Opts)} of
{ok, {E, _}} ->
E#xmlElement.content;
{'EXIT', {fatal, {Reason, L, _C}}} ->
throw_error(L, {"XML parse error: ~p.", [Reason]});
{'EXIT', Reason} ->
throw_error(Line, {"error in XML parser: ~P.", [Reason, 10]});
Other ->
throw_error(Line, {"nocatch in XML parser: ~P.", [Other, 10]})
end.
%% Expand wiki stuff in arbitrary text.
expand_text(Cs, L) ->
lists:reverse(expand_new_line(Cs, L, [])).
%% Interestingly, the reverse of "code" is "edoc". :-)
expand_new_line([$\s = C | Cs], L, As) ->
expand_new_line(Cs, L, [C | As]);
expand_new_line([$\t = C | Cs], L, As) ->
expand_new_line(Cs, L, [C | As]);
expand_new_line([$\n = C | Cs], L, As) ->
expand_new_line(Cs, L + 1, [C | As]);
expand_new_line([$=, $=, $=, $= | Cs], L, As) ->
expand_heading(Cs, 2, L, As);
expand_new_line([$=, $=, $= | Cs], L, As) ->
expand_heading(Cs, 1, L, As);
expand_new_line([$=, $= | Cs], L, As) ->
expand_heading(Cs, 0, L, As);
expand_new_line(Cs, L, As) ->
expand(Cs, L, As).
expand([$`, $' | Cs], L, As) ->
expand(Cs, L, [$` | As]); % produce "`" - don't start a new quote
expand([$`, $`, $` | Cs], L, As) ->
%% If this is the first thing on the line, compensate for the
%% indentation, unless we had to skip one or more empty lines.
{Cs1, Skipped} = strip_empty_lines(Cs), % avoid vertical space
N = if Skipped > 0 ->
0;
true ->
{As1, _} = edoc_lib:split_at(As, $\n),
case edoc_lib:is_space(As1) of
true -> 3 + length(As1);
false -> 2 % nice default - usually right.
end
end,
Ss = lists:duplicate(N, $\s),
expand_triple(Cs1, L + Skipped, Ss ++ "[ATADC[!<>erp<" ++ As);
expand([$`, $` | Cs], L, As) ->
expand_double(edoc_lib:strip_space(Cs), L, ">edoc<" ++ As);
expand([$` | Cs], L, As) ->
expand_single(edoc_lib:strip_space(Cs), L, ">edoc<" ++ As);
expand([$[ | Cs], L, As) ->
expand_uri(Cs, L, As);
expand([$\n = C | Cs], L, As) ->
expand_new_line(Cs, L + 1, [C | As]);
expand([C | Cs], L, As) ->
expand(Cs, L, [C | As]);
expand([], _, As) ->
As.
%% == Heading ==
%% === SubHeading ===
%% ==== SubSubHeading ====
expand_heading([$= | _] = Cs, N, L, As) ->
expand_heading_1(Cs, N, L, As);
expand_heading(Cs, N, L, As) ->
{Cs1, Cs2} = edoc_lib:split_at(Cs, $\n),
case edoc_lib:strip_space(lists:reverse(Cs1)) of
[$=, $= | Cs3] ->
{Es, Ts} = lists:splitwith(fun (X) -> X =:= $= end, Cs3),
if length(Es) =:= N ->
Ts1 = edoc_lib:strip_space(
lists:reverse(edoc_lib:strip_space(Ts))),
expand_heading_2(Ts1, Cs2, N, L, As);
true ->
H1 = lists:duplicate(N+2, $=),
H2 = "==" ++ Es,
throw_error(L, {"heading end marker mismatch: "
"~s...~s", [H1, H2]})
end;
_ ->
expand_heading_1(Cs, N, L, As)
end.
expand_heading_1(Cs, N, L, As) ->
expand(Cs, L, lists:duplicate(N + 2, $=) ++ As).
expand_heading_2(Ts, Cs, N, L, As) ->
H = ?BASE_HEADING + N,
Ts1 = io_lib:format("<h~w><a name=\"~ts\">~ts</a></h~w>\n",
[H, make_label(Ts), Ts, H]),
expand_new_line(Cs, L + 1, lists:reverse(lists:flatten(Ts1), As)).
make_label([$\s | Cs]) ->
[$_ | make_label(edoc_lib:strip_space(Cs))];
make_label([$\t | Cs]) ->
[$_ | make_label(edoc_lib:strip_space(Cs))];
make_label([$\n | Cs]) ->
[$_ | make_label(edoc_lib:strip_space(Cs))];
make_label([C | Cs]) ->
[C | make_label(Cs)];
make_label([]) ->
[].
%% `...'
expand_single(Cs, L, As) ->
expand_single(Cs, L, As, L).
expand_single([$' | Cs], L, As, _L0) ->
expand(Cs, L, ">edoc/<" ++ edoc_lib:strip_space(As));
expand_single([$< | Cs], L, As, L0) ->
expand_single(Cs, L, ";tl&" ++ As, L0);
expand_single([$> | Cs], L, As, L0) ->
expand_single(Cs, L, ";tg&" ++ As, L0);
expand_single([$& | Cs], L, As, L0) ->
expand_single(Cs, L, ";pma&" ++ As, L0);
expand_single([$\n = C | Cs], L, As, L0) ->
expand_single(Cs, L + 1, [C | As], L0);
expand_single([C | Cs], L, As, L0) ->
expand_single(Cs, L, [C | As], L0);
expand_single([], L, _, L0) ->
throw_error(L0, {"`-quote ended unexpectedly at line ~w", [L]}).
%% ``...''
expand_double(Cs, L, As) ->
expand_double(Cs, L, As, L).
expand_double([$', $' | Cs], L, As, _L0) ->
expand(Cs, L, ">edoc/<" ++ edoc_lib:strip_space(As));
expand_double([$< | Cs], L, As, L0) ->
expand_double(Cs, L, ";tl&" ++ As, L0);
expand_double([$> | Cs], L, As, L0) ->
expand_double(Cs, L, ";tg&" ++ As, L0);
expand_double([$& | Cs], L, As, L0) ->
expand_double(Cs, L, ";pma&" ++ As, L0);
expand_double([$\n = C | Cs], L, As, L0) ->
expand_double(Cs, L + 1, [C | As], L0);
expand_double([C | Cs], L, As, L0) ->
expand_double(Cs, L, [C | As], L0);
expand_double([], L, _, L0) ->
throw_error(L0, {"``-quote ended unexpectedly at line ~w", [L]}).
%% ```...'''
expand_triple(Cs, L, As) ->
expand_triple(Cs, L, As, L).
expand_triple([$', $', $' | Cs], L, As, _L0) -> % ' stupid emacs
expand(Cs, L, ">erp/<>]]" ++ edoc_lib:strip_space(As));
expand_triple([$], $], $> | Cs], L, As, L0) ->
expand_triple(Cs, L, ";tg&]]" ++ As, L0);
expand_triple([$\n = C | Cs], L, As, L0) ->
expand_triple(Cs, L + 1, [C | As], L0);
expand_triple([C | Cs], L, As, L0) ->
expand_triple(Cs, L, [C | As], L0);
expand_triple([], L, _, L0) ->
throw_error(L0, {"```-quote ended unexpectedly at line ~w", [L]}).
%% e.g. [file:/...] or [http://... LinkText]
expand_uri("http:/" ++ Cs, L, As) ->
expand_uri(Cs, L, "/:ptth", As);
expand_uri("https:/" ++ Cs, L, As) ->
expand_uri(Cs, L, "/:sptth", As);
expand_uri("ftp:/" ++ Cs, L, As) ->
expand_uri(Cs, L, "/:ptf", As);
expand_uri("file:/" ++ Cs, L, As) ->
expand_uri(Cs, L, "/:elif", As);
expand_uri("mailto:/" ++ Cs, L, As) ->
expand_uri(Cs, L, "/:otliam", As);
expand_uri("nfs:/" ++ Cs, L, As) ->
expand_uri(Cs, L, "/:sfn", As);
expand_uri("shttp:/" ++ Cs, L, As) ->
expand_uri(Cs, L, "/:ptths", As);
expand_uri("xmpp:/" ++ Cs, L, As) ->
expand_uri(Cs, L, "/:ppmx", As);
expand_uri(Cs, L, As) ->
expand(Cs, L, [$[ | As]).
expand_uri([$] | Cs], L, Us, As) ->
expand(Cs, L, push_uri(Us, ">tt/<" ++ Us ++ ">tt<", As));
expand_uri([$\s = C | Cs], L, Us, As) ->
expand_uri(Cs, 0, L, [C], Us, As);
expand_uri([$\t = C | Cs], L, Us, As) ->
expand_uri(Cs, 0, L, [C], Us, As);
expand_uri([$\n = C | Cs], L, Us, As) ->
expand_uri(Cs, 1, L, [C], Us, As);
expand_uri([C | Cs], L, Us, As) ->
expand_uri(Cs, L, [C | Us], As);
expand_uri([], L, Us, _As) ->
expand_uri_error(Us, L).
expand_uri([$] | Cs], N, L, Ss, Us, As) ->
Ss1 = lists:reverse(edoc_lib:strip_space(
lists:reverse(edoc_lib:strip_space(Ss)))),
expand(Cs, L + N, push_uri(Us, Ss1, As));
expand_uri([$\n = C | Cs], N, L, Ss, Us, As) ->
expand_uri(Cs, N + 1, L, [C | Ss], Us, As);
expand_uri([C | Cs], N, L, Ss, Us, As) ->
expand_uri(Cs, N, L, [C | Ss], Us, As);
expand_uri([], _, L, _Ss, Us, _As) ->
expand_uri_error(Us, L).
-spec expand_uri_error(list(), pos_integer()) -> no_return().
expand_uri_error(Us, L) ->
{Ps, _} = edoc_lib:split_at(lists:reverse(Us), $:),
throw_error(L, {"reference '[~ts:...' ended unexpectedly", [Ps]}).
push_uri(Us, Ss, As) ->
">a/<" ++ Ss ++ ">\"pot_\"=tegrat \"" ++ Us ++ "\"=ferh a<" ++ As.
strip_empty_lines(Cs) ->
strip_empty_lines(Cs, 0).
strip_empty_lines([], N) ->
{[], N}; % reached the end of input
strip_empty_lines(Cs, N) ->
{Cs1, Cs2} = edoc_lib:split_at(Cs, $\n),
case edoc_lib:is_space(Cs1) of
true ->
strip_empty_lines(Cs2, N + 1);
false ->
{Cs, N}
end.
%% Scanning element content for paragraph breaks (empty lines).
%% Paragraphs are flushed by block level elements.
par(Es) ->
par(Es, [], []).
par([E=#xmlText{value = Value} | Es], As, Bs) ->
par_text(Value, As, Bs, E, Es);
par([E=#xmlElement{name = Name} | Es], As, Bs) ->
%% (Note that paragraphs may not contain any further block-level
%% elements, including other paragraphs. Tables get complicated.)
case Name of
'p' -> par_flush(Es, [E | As], Bs);
'hr' -> par_flush(Es, [E | As], Bs);
'h1' -> par_flush(Es, [E | As], Bs);
'h2' -> par_flush(Es, [E | As], Bs);
'h3' -> par_flush(Es, [E | As], Bs);
'h4' -> par_flush(Es, [E | As], Bs);
'h5' -> par_flush(Es, [E | As], Bs);
'h6' -> par_flush(Es, [E | As], Bs);
'pre' -> par_flush(Es, [E | As], Bs);
'address' -> par_flush(Es, [E | As], Bs);
'div' -> par_flush(Es, [par_elem(E) | As], Bs);
'blockquote' -> par_flush(Es, [par_elem(E) | As], Bs);
'form' -> par_flush(Es, [par_elem(E) | As], Bs);
'fieldset' -> par_flush(Es, [par_elem(E) | As], Bs);
'noscript' -> par_flush(Es, [par_elem(E) | As], Bs);
'ul' -> par_flush(Es, [par_subelem(E) | As], Bs);
'ol' -> par_flush(Es, [par_subelem(E) | As], Bs);
'dl' -> par_flush(Es, [par_subelem(E) | As], Bs);
'table' -> par_flush(Es, [par_subelem(E) | As], Bs);
_ -> par(Es, [E | As], Bs)
end;
par([E | Es], As, Bs) ->
par(Es, [E | As], Bs);
par([], As, Bs) ->
lists:reverse(As ++ Bs).
par_text(Cs, As, Bs, E, Es) ->
case ptxt(Cs) of
none ->
%% no blank lines: keep this element as it is
par(Es, [E | As], Bs);
{Cs1, Ss, Cs2} ->
Es1 = case Cs1 of
[] -> lists:reverse(As);
_ -> lists:reverse(As, [E#xmlText{value = Cs1}])
end,
Bs0 = case Es1 of
[] -> Bs;
_ -> [#xmlElement{name = p, content = Es1} | Bs]
end,
Bs1 = [#xmlText{value = Ss} | Bs0],
case Cs2 of
[] ->
par(Es, [], Bs1);
_ ->
par_text(Cs2, [], Bs1, #xmlText{value = Cs2}, Es)
end
end.
par_flush(Es, As, Bs) ->
par(Es, [], As ++ Bs).
par_elem(E) ->
E#xmlElement{content = par(E#xmlElement.content)}.
%% Only process content of subelements; ignore immediate content.
par_subelem(E) ->
E#xmlElement{content = par_subelem_1(E#xmlElement.content)}.
par_subelem_1([E=#xmlElement{name = Name} | Es]) ->
E1 = case par_skip(Name) of
true ->
E;
false ->
case par_sub(Name) of
true ->
par_subelem(E);
false ->
par_elem(E)
end
end,
[E1 | par_subelem_1(Es)];
par_subelem_1([E | Es]) ->
[E | par_subelem_1(Es)];
par_subelem_1([]) ->
[].
par_skip('caption') -> true;
par_skip('col') -> true;
par_skip('colgroup') -> true;
par_skip(_) -> false.
par_sub(tr) -> true;
par_sub(thead) -> true;
par_sub(tfoot) -> true;
par_sub(tbody) -> true;
par_sub(_) -> false.
%% scanning text content for a blank line
ptxt(Cs) ->
ptxt(Cs, []).
ptxt([$\n | Cs], As) ->
ptxt_1(Cs, As, [$\n]);
ptxt([C | Cs], As) ->
ptxt(Cs, [C | As]);
ptxt([], _As) ->
none.
%% scanning text following an initial newline
ptxt_1([C=$\s | Cs], As, Ss) ->
ptxt_1(Cs, As, [C | Ss]);
ptxt_1([C=$\t | Cs], As, Ss) ->
ptxt_1(Cs, As, [C | Ss]);
ptxt_1([C=$\n | Cs], As, Ss) ->
%% blank line detected
ptxt_2(Cs, As, [C | Ss]);
ptxt_1(Cs, As, Ss) ->
%% not a blank line
ptxt(Cs, lists:reverse(Ss, As)).
%% collecting whitespace following a blank line
ptxt_2([C=$\s | Cs], As, Ss) ->
ptxt_2(Cs, As, [C | Ss]);
ptxt_2([C=$\t | Cs], As, Ss) ->
ptxt_2(Cs, As, [C | Ss]);
ptxt_2([C=$\n | Cs], As, Ss) ->
ptxt_2(Cs, As, [C | Ss]);
ptxt_2(Cs, As, Ss) ->
%% ended by non-whitespace or end of element
case edoc_lib:is_space(As) of
true ->
{[], lists:reverse(Ss ++ As), Cs};
false ->
{lists:reverse(As), lists:reverse(Ss), Cs}
end.
-spec throw_error(non_neg_integer(), {string(), [_]}) -> no_return().
throw_error(L, D) ->
throw({error, L, D}). | lib/edoc/src/edoc_wiki.erl | 0.606149 | 0.435241 | edoc_wiki.erl | starcoder |
%%%=============================================================================
%%% @doc Adapter module for hackney metrics
%%%
%%% To use it, configure hackney;s `mod_metrics` to use `hackney_telemetry` and
%%% make sure that hackney_telemetry starts before your application.
%%%
%%% This module implements all callbacks required by the hackney's
%%% `hackney_metrics` module and is called by hackney to report its
%%% instrumentation metrics.
%%%
%%% Each metric is identified by a list of atoms/charlist. Some examples:
%%% - [hackney, free_count]
%%% - [hackney_pool, api_graphql, free_count]
%%%
%%% Each metric has a type - counter, histogram, gauge or meter - and hackney
%%% leverages the actual interpretation of data to external libraries - like
%%% folsom or exometer. Unfortunatelly, these libs do not export data in a way
%%% that is useful for telemetry so we need to transform this data before
%%% reporting them. Fortunatelly, telemetry are able to create gauges and
%%% histograms, so we just need to keep track of metric values and report to
%%% telemetry.
%%%
%%% This module will receive the data from hackney and delegate the reports to
%%% `hackney_telemetry_worker`, providing all the required transformations so
%%% the data is correctly exported to telemetry.
%%%
%%% For more information please refer to the following document:
%%% - https://github.com/benoitc/hackney/blob/master/README.md%metrics
%%%
%%% @end
%%%=============================================================================
-module(hackney_telemetry).
-export(
[
new/2,
delete/1,
increment_counter/1,
increment_counter/2,
decrement_counter/1,
decrement_counter/2,
update_histogram/2,
update_meter/2,
update_gauge/2
]
).
-include("hackney_telemetry.hrl").
%%------------------------------------------------------------------------------
%% @doc Handles metric worker creation.
%%
%% Called when hackney creates new pools to spawn a worker process to
%% handle the new metrics.
%%
%% Hackney general metrics are ignored here because they are already included
%% in hackney_telemetry_sup supervisor.
%%
%% @end
%%------------------------------------------------------------------------------
-spec new(metric_type(), hackney_metric()) -> ok.
new(_Type, [hackney, _key]) -> ok;
new(_Type, [hackney_pool, PoolName, _] = Metric) when is_atom(PoolName) ->
hackney_telemetry_sup:start_worker(Metric);
new(_Type, _Metric) -> ok.
%%------------------------------------------------------------------------------
%% @doc Handles metric worker deletion.
%% @end
%%------------------------------------------------------------------------------
-spec delete(hackney_metric()) -> ok.
delete(Metric) -> hackney_telemetry_sup:stop_worker(Metric).
%%------------------------------------------------------------------------------
%% @doc Increments a counter metric by 1.
%% @end
%%------------------------------------------------------------------------------
-spec increment_counter(hackney_metric()) -> ok.
increment_counter(Metric) -> increment_counter(Metric, 1).
%%------------------------------------------------------------------------------
%% @doc Increments a counter metric by the given value.
%% @end
%%------------------------------------------------------------------------------
-spec increment_counter(hackney_metric(), non_neg_integer()) -> ok.
increment_counter(Metric, Value) ->
hackney_telemetry_worker:update(Metric, Value, fun sum/2),
ok.
%%------------------------------------------------------------------------------
%% @doc Decrements a counter metric by 1.
%% @end
%%------------------------------------------------------------------------------
-spec decrement_counter(hackney_metric()) -> ok.
decrement_counter(Metric) -> decrement_counter(Metric, 1).
%%------------------------------------------------------------------------------
%% @doc Decrements a counter metric by the given value.
%% @end
%%------------------------------------------------------------------------------
-spec decrement_counter(hackney_metric(), non_neg_integer()) -> ok.
decrement_counter(Metric, Value) -> hackney_telemetry_worker:update(Metric, Value * -1, fun sum/2).
%%------------------------------------------------------------------------------
%% @doc Updates a histogram metric
%% @end
%%------------------------------------------------------------------------------
-spec update_histogram(hackney_metric(), any()) -> ok.
update_histogram(Metric, Fun) when is_function(Fun) ->
hackney_telemetry_worker:update(Metric, Fun, fun eval_and_replace/2);
% Hackney will make the following metrics have a shift of -1 on their value:
% - [hackney_pool, <pool_name>, free_count]
% - [hackney_pool, <pool_name>, in_use_count]
%
% For these metrics, we fix their value by adding +1.
%
% Reference: https://github.com/benoitc/hackney/blob/592a00720cd1c8eb1edb6a6c9c8b8a4709c8b155/src/hackney_pool.erl%L597-L604
update_histogram([hackney_pool, _, MetricName] = Metric, Value) ->
FixedValue =
case lists:member(MetricName, [in_use_count, free_count]) of
true -> Value + 1;
false -> Value
end,
hackney_telemetry_worker:update(Metric, FixedValue, fun replace/2);
update_histogram(Metric, Value) -> hackney_telemetry_worker:update(Metric, Value, fun replace/2).
%%------------------------------------------------------------------------------
%% @doc Updates a meter metric
%%
%% A meter is a type of counter that only goes forward.
%%
%% @end
%%------------------------------------------------------------------------------
-spec update_meter(hackney_metric(), any()) -> ok.
update_meter(Metric, Value) -> hackney_telemetry_worker:update(Metric, Value, fun sum/2).
%%------------------------------------------------------------------------------
%% @doc Updates a gauge metric.
%%
%% Gauges only keep the latest value, so we just need to replace the old state.
%%
%% @end
%%------------------------------------------------------------------------------
-spec update_gauge(hackney_metric(), any()) -> ok.
update_gauge(Metric, Value) -> hackney_telemetry_worker:update(Metric, Value, fun replace/2).
%% Transform functions
sum(StateValue, EventValue) -> StateValue + EventValue.
replace(_StateValue, EventValue) -> EventValue.
eval_and_replace(_StateValue, Fun) -> Fun(). | src/hackney_telemetry.erl | 0.772788 | 0.494995 | hackney_telemetry.erl | starcoder |
% Licensed under the Apache License, Version 2.0 (the "License"); you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
%%% @author <NAME>
%%% @copyright 2012 <NAME>
%%% @doc MongoDB GridFS Cursor API. This module provides functions for retrieving GridFS
%%% files from a cursor.
%%% @end
-module(gridfs_cursor).
-behaviour(gen_server).
%% Includes
-include("gridfs.hrl").
%% API
-export([close/1,
new/4,
next/1,
rest/1,
set_timeout/2,
take/2]).
%% gen_server callbacks
-export([init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3]).
%% Records
-record(state, {connection_parameters, bucket, mongo_cursor, parent_process, die_with_parent=true, timeout=infinity}).
%% External functions
%% @doc Closes a cursor.
-spec(close(cursor()) -> ok).
close(Pid) ->
gen_server:call(Pid, close, infinity).
%% @doc Creates a cursor using a specified connection to a database collection of files.
-spec(new(#gridfs_connection{}, bucket(), mongo:cursor(), pid()) -> cursor()).
new(Conn, Bucket, MongoCursor, ParentProcess) ->
{ok, Cursor} = gen_server:start_link(?MODULE, [Conn, Bucket, MongoCursor, ParentProcess], []),
Cursor.
%% @doc Returns the next GridFS file from a cursor or an empty tuple if there are no further
%% files.
-spec(next(cursor()) -> file()|{}).
next(Cursor) ->
gen_server:call(Cursor, next, infinity).
%% @doc Returns all GridFS files from a cursor.
-spec(rest(cursor()) -> [file()]).
rest(Cursor) ->
gen_server:call(Cursor, rest, infinity).
%% @doc Sets a timeout for a cursor. If the cursor is not read within the specified time,
%% the cursor is closed.
-spec(set_timeout(cursor(), integer()) -> ok).
set_timeout(Cursor, Timeout) ->
gen_server:call(Cursor, {set_timeout, Timeout}, infinity).
%% @doc Retrieves GridFS files from a cursor up to the specified maximum number.
-spec(take(integer(), cursor()) -> [file()]).
take(Limit, Cursor) when Limit >= 0 ->
gen_server:call(Cursor, {take, Limit}, infinity).
%% Server functions
%% @doc Initializes the server with connection parameters, a bucket and a mongo cursor.
init([ConnectionParameters, Bucket, MongoCursor, ParentProcess]) ->
monitor(process, ParentProcess),
{ok,
#state{connection_parameters=ConnectionParameters,
bucket=Bucket,
mongo_cursor=MongoCursor,
parent_process=ParentProcess},
infinity}.
%% @doc Responds to synchronous messages. Synchronous messages are sent to get the next file,
%% to get remaining files, to get the mongo:cursor(), to close the cursor and to set
%% the timeout of the cursor.
handle_call(close, _From, State) ->
{stop, normal, ok, State};
handle_call(next, _From, State) ->
MongoCursor = State#state.mongo_cursor,
case mc_cursor:next(MongoCursor) of
error ->
{stop, normal, {}, State};
{#{<<"_id">> := Id}} ->
{reply, create_file(State, Id), State, State#state.timeout}
end;
handle_call(rest, _From, State) ->
MongoCursor = State#state.mongo_cursor,
Ids = mc_cursor:rest(MongoCursor),
Reply = [create_file(State, Id) || #{<<"_id">> := Id} <- Ids],
{stop, normal, Reply, State};
handle_call({set_timeout, Timeout}, _From, State) ->
{reply, ok, State#state{die_with_parent=false, timeout=Timeout}, Timeout};
handle_call({take, Limit}, _From, State) ->
MongoCursor = State#state.mongo_cursor,
Docs = mc_cursor:take(MongoCursor, Limit),
Files = [create_file(State, Id) || #{<<"_id">> := Id} <- Docs],
{stop, normal, Files, State}.
%% @doc Responds asynchronously to messages. The module does not expect to receive asynchronous
%% messages.
handle_cast(_Msg, State) ->
{noreply, State, State#state.timeout}.
%% @doc Responds to non-OTP messages. The messages that are handled are a timeout and the
%% the termination of the parent process.
handle_info({'DOWN', _Ref, process, Pid, _Reason}, State) when Pid =:= State#state.parent_process andalso State#state.die_with_parent ->
{stop, normal, State};
handle_info(timeout, State) ->
{stop, normal, State};
handle_info(_Info, State) ->
{noreply, State, State#state.timeout}.
%% @doc Shuts down the server.
terminate(_Reason, _State) ->
ok.
%% @doc Handles code changes (changes are ignored).
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%% Internal functions
create_file(State, Id) ->
ConnectionParameters = State#state.connection_parameters,
ParentProcess = State#state.parent_process,
Bucket = State#state.bucket,
File = gridfs_file:new(ConnectionParameters, Bucket, Id, ParentProcess),
case State#state.die_with_parent of
true ->
File;
false ->
gridfs_file:set_timeout(File, State#state.timeout),
File
end. | src/gridfs_cursor.erl | 0.61832 | 0.403156 | gridfs_cursor.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% mi_bloom: bloom filter adapted for use by merge index.
%%
%% Copyright (c) 2007-2011 Basho Technologies, Inc. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%% @doc Implementation of the Bloom filter data structure.
%% @reference [http://en.wikipedia.org/wiki/Bloom_filter]
%% Adapted from http://code.google.com/p/bloomerl for use in
%% merge_index, where we are worried about speed of creating the bloom
%% filter and testing membership as well as size of the bloom
%% filter. By hard coding some parameters, we reduce the size. Also,
%% by calculating the bloom filter in batches, we improve the
%% performance.
-module(mi_bloom).
-export([new/1, is_element/2]).
-include("merge_index.hrl").
-ifdef(TEST).
-ifdef(EQC).
-include_lib("eqc/include/eqc.hrl").
-endif.
-include_lib("eunit/include/eunit.hrl").
-endif.
%% These settings give us a max 256 keys with 0.05 error rate.
-define(M, 1600).
-define(K, 4).
%% @doc Generate a new bloom filter containing the specified keys.
new(Keys) ->
OnBits = lists:usort(lists:flatten([calc_idxs(X) || X <- Keys])),
list_to_bitstring(generate_bits(0, OnBits)).
generate_bits(Pos, [NextOnPos|T]) ->
Gap = NextOnPos - Pos - 1,
case Gap > 0 of
true ->
Bits = <<0:Gap/integer, 1:1/integer>>,
[Bits|generate_bits(Pos + Gap + 1, T)];
false ->
Bits = <<1:1/integer>>,
[Bits|generate_bits(Pos + 1, T)]
end;
generate_bits(Pos, []) ->
Gap = ?M - Pos,
[<<0:Gap/integer>>].
%% @spec is_element(string(), bloom()) -> bool()
%% @doc Determines if the key is (probably) an element of the filter.
is_element(Key, Bitmap) ->
is_element(Key, Bitmap, calc_idxs(Key)).
is_element(Key, Bitmap, [Idx | T]) ->
%% If we are looking for the first bit, do slightly different math
%% than if we are looking for later bits.
case Idx > 0 of
true ->
PreSize = Idx - 1,
<<_:PreSize/bits, Bit:1/bits, _/bits>> = Bitmap;
false ->
<<Bit:1/bits, _/bits>> = Bitmap
end,
%% Check if the bit is on.
case Bit of
<<1:1>> -> is_element(Key, Bitmap, T);
<<0:1>> -> false
end;
is_element(_, _, []) ->
true.
% This uses the "enhanced double hashing" algorithm.
% Todo: handle case of m > 2^32.
calc_idxs(Key) ->
X = erlang:phash2(Key, ?M),
Y = erlang:phash2({"salt", Key}, ?M),
calc_idxs(?K - 1, X, Y, [X]).
calc_idxs(0, _, _, Acc) ->
Acc;
calc_idxs(I, X, Y, Acc) ->
Xi = (X+Y) rem ?M,
Yi = (Y+I) rem ?M,
calc_idxs(I-1, Xi, Yi, [Xi | Acc]).
%% UNIT TESTS
-ifdef(TEST).
-ifdef(EQC).
prop_bloom_test_() ->
{timeout, 60, fun() -> ?assert(eqc:quickcheck(prop_bloom())) end}.
g_keys() ->
non_empty(list(non_empty(binary()))).
prop_bloom() ->
?FORALL(Keys, g_keys(),
begin
Bloom = ?MODULE:new(Keys),
F = fun(X) -> is_element(X, Bloom) end,
lists:all(F, Keys)
end).
-endif.
-endif. | deps/merge_index/src/mi_bloom.erl | 0.595845 | 0.505859 | mi_bloom.erl | starcoder |
% Licensed under the Apache License, Version 2.0 (the "License"); you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
-module(porkrind_containers).
-include("porkrind_internal.hrl").
-export([
matches/1,
matching/1,
matches_inanyorder/1,
matching_inanyorder/1,
contains/1,
contains_inanyorder/1,
only_contains/1
]).
-export([
tail/1
]).
matches(Matcher) when ?IS_MATCHER(Matcher) ->
Matcher;
matches(Container) when is_list(Container) ->
Matchers = lists:map(fun matches/1, Container),
contains(Matchers);
matches(Container) when is_tuple(Container) ->
Matchers = lists:map(fun matches/1, tuple_to_list(Container)),
contains(list_to_tuple(Matchers));
matches(Term) ->
porkrind_util:maybe_wrap(Term).
matching(Term) ->
matches(Term).
matches_inanyorder(Matcher) when ?IS_MATCHER(Matcher) ->
Matcher;
matches_inanyorder(Container) when is_list(Container) ->
Matchers = lists:map(fun matches_inanyorder/1, Container),
contains_inanyorder(Matchers);
matches_inanyorder(Container) when is_tuple(Container) ->
Matchers = lists:map(fun matches_inanyorder/1, tuple_to_list(Container)),
contains_inanyorder(list_to_tuple(Matchers));
matches_inanyorder(Term) ->
porkrind_util:maybe_wrap(Term).
matching_inanyorder(Term) ->
matches_inanyorder(Term).
contains(Matcher) when ?IS_MATCHER(Matcher) ->
erlang:error({badarg, Matcher});
contains(Matchers0) when is_list(Matchers0) ->
M = contains_int(Matchers0),
porkrind_logic:all_of([
porkrind_types:is_list(),
M
]);
contains(Matchers0) when is_tuple(Matchers0) ->
M1 = contains_int(tuple_to_list(Matchers0)),
M2 = tuple_wrap(M1),
porkrind_logic:all_of([
porkrind_types:is_tuple(),
M2
]).
contains_inanyorder(Matcher) when ?IS_MATCHER(Matcher) ->
erlang:error({badarg, Matcher});
contains_inanyorder(Matchers0) when is_list(Matchers0) ->
M = contains_inanyorder_int(Matchers0),
porkrind_logic:all_of([
porkrind_types:is_list(),
M
]);
contains_inanyorder(Matchers0) when is_tuple(Matchers0) ->
M1 = contains_inanyorder_int(tuple_to_list(Matchers0)),
M2 = tuple_wrap(M1),
porkrind_logic:all_of([
porkrind_types:is_tuple(),
M2
]).
only_contains(Matcher) when ?IS_MATCHER(Matcher) ->
only_contains_single(Matcher);
only_contains(Matchers0) when is_list(Matchers0) ->
M = only_contains_int(Matchers0),
porkrind_logic:all_of([
porkrind_types:is_list(),
M
]);
only_contains(Matchers0) when is_tuple(Matchers0) ->
M1 = only_contains_int(tuple_to_list(Matchers0)),
M2 = tuple_wrap(M1),
porkrind_logic:all_of([
porkrind_types:is_tuple(),
M2
]);
only_contains(Term) ->
only_contains_single(Term).
tail(Matcher0) ->
Matcher = porkrind_util:maybe_wrap(Matcher0),
#'porkrind.matcher'{
name = tail,
args = [Matcher0],
match = fun(Value) ->
porkrind:match(Value, Matcher)
end
}.
contains_int(Matchers0) ->
Matchers = lists:map(fun porkrind_util:maybe_wrap/1, Matchers0),
#'porkrind.matcher'{
name = contains,
args = [Matchers0],
match = fun(Values) ->
case apply_contains_matchers(Values, Matchers) of
ok -> ok;
Else -> ?PR_FAIL({Else, Values})
end
end,
reason = fun({bad_length, Values}) ->
Args = [Values, length(Values), length(Matchers)],
io_lib:format("~p has length ~b, not ~b", Args)
end
}.
apply_contains_matchers(Values, [#'porkrind.matcher'{name = tail} = M]) ->
apply_contains_tail_matcher(Values, M);
apply_contains_matchers([], []) ->
ok;
apply_contains_matchers(_, []) ->
bad_length;
apply_contains_matchers([], _) ->
bad_length;
apply_contains_matchers([Value | RestValues], [Matcher | RestMatchers]) ->
porkrind:match(Value, Matcher),
apply_contains_matchers(RestValues, RestMatchers).
apply_contains_tail_matcher([], _Matcher) ->
ok;
apply_contains_tail_matcher([Value | RestValues], Matcher) ->
porkrind:match(Value, Matcher),
apply_contains_tail_matcher(RestValues, Matcher).
contains_inanyorder_int(Matchers0) ->
Matchers = lists:map(fun porkrind_util:maybe_wrap/1, Matchers0),
#'porkrind.matcher'{
name = contains_inanyorder,
args = [Matchers0],
match = fun(Values) ->
if length(Values) == length(Matchers) -> ok; true ->
?PR_FAIL({bad_length, Values})
end,
[] = lists:foldl(fun(Value, Acc) ->
case porkrind_util:find_first_match(Value, Acc) of
{match, M} ->
Acc -- [M];
nomatch ->
?PR_FAIL({no_match_for, Value})
end
end, Matchers, Values)
end,
reason = fun
({bad_length, Value}) ->
Args = [Value, length(Value), length(Matchers)],
io_lib:format("~p has length ~b, not ~b", Args);
({no_match_for, Value}) ->
Descs = lists:map(fun(M) -> porkrind:describe(M) end, Matchers),
Prefix = io_lib:format("no match for ~w in ", [Value]),
[Prefix, porkrind_util:str_join(Descs, " or ", "()")]
end
}.
only_contains_int(Matchers0) ->
Matchers = lists:map(fun porkrind_util:maybe_wrap/1, Matchers0),
AnyOfMatcher = porkrind_logic:any_of(Matchers),
#'porkrind.matcher'{
name = only_contains,
args = [Matchers0],
match = fun(Values) ->
lists:foreach(fun(Value) ->
porkrind:match(Value, AnyOfMatcher)
end, Values)
end
}.
only_contains_single(Matcher0) ->
Matcher = porkrind_util:maybe_wrap(Matcher0),
M1 = #'porkrind.matcher'{
name = only_contains,
args = [Matcher0],
match = fun(Values0) ->
Values = case Values0 of
_ when is_list(Values0) -> Values0;
_ when is_tuple(Values0) -> tuple_to_list(Values0)
end,
lists:foreach(fun(Value) ->
porkrind:match(Value, Matcher)
end, Values)
end
},
porkrind_logic:all_of([
porkrind_logic:any_of([
porkrind_types:is_list(),
porkrind_types:is_tuple()
]),
M1
]).
tuple_wrap(#'porkrind.matcher'{args = [Arg], match = Match} = M) ->
M#'porkrind.matcher'{
args = [list_to_tuple(Arg)],
match = fun(Tuple) ->
Match(tuple_to_list(Tuple))
end
}. | src/porkrind_containers.erl | 0.515132 | 0.513607 | porkrind_containers.erl | starcoder |
%%%
%%% Copyright 2017 RBKmoney
%%%
%%% Licensed under the Apache License, Version 2.0 (the "License");
%%% you may not use this file except in compliance with the License.
%%% You may obtain a copy of the License at
%%%
%%% http://www.apache.org/licenses/LICENSE-2.0
%%%
%%% Unless required by applicable law or agreed to in writing, software
%%% distributed under the License is distributed on an "AS IS" BASIS,
%%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%%% See the License for the specific language governing permissions and
%%% limitations under the License.
%%%
-module(mg_core_dirange_SUITE).
-include_lib("common_test/include/ct.hrl").
-include_lib("stdlib/include/assert.hrl").
-include_lib("proper/include/proper.hrl").
%% tests descriptions
-export([all/0]).
-export([init_per_suite/1]).
-export([end_per_suite/1]).
-export([direction_test/1]).
-export([size_test/1]).
-export([limit_test/1]).
-export([conjoin_test/1]).
-export([dissect_test/1]).
-export([intersect_test/1]).
-export([unify_test/1]).
-export([enumerate_test/1]).
-export([fold_test/1]).
-export([storage_test/1]).
%% tests
%%
%% tests descriptions
%%
-type test_name() :: atom().
-type config() :: [{atom(), _}].
-spec all() -> [test_name()].
all() ->
[
direction_test,
size_test,
limit_test,
dissect_test,
conjoin_test,
intersect_test,
unify_test,
enumerate_test,
fold_test,
storage_test
].
-spec init_per_suite(config()) -> config().
init_per_suite(C) ->
_ = logger:set_handler_config(default, formatter, {logger_formatter, #{}}),
C.
-spec end_per_suite(config()) -> _.
end_per_suite(_) ->
ok.
-spec direction_test(config()) -> _.
direction_test(_) ->
?assertEqual(+1, mg_core_dirange:direction(fw(1, 10))),
?assertEqual(-1, mg_core_dirange:direction(bw(10, 1))),
?assertEqual(-1, mg_core_dirange:direction(bw(42, 42))),
?assertEqual(0, mg_core_dirange:direction(undefined)),
?assert(
check_property(
% Reversal changes direction
?FORALL(
R,
range(),
equals(
-mg_core_dirange:direction(R),
mg_core_dirange:direction(mg_core_dirange:reverse(R))
)
)
)
).
-spec size_test(config()) -> _.
size_test(_) ->
?assertEqual(10, mg_core_dirange:size(fw(1, 10))),
?assertEqual(10, mg_core_dirange:size(bw(10, 1))),
?assertEqual(10, mg_core_dirange:size(fw(-10, -1))),
?assert(
check_property(
% Size is non-negative for every range
?FORALL(R, range(), mg_core_dirange:size(R) >= 0)
)
),
?assert(
check_property(
% Reversal preserves size
?FORALL(
R,
range(),
equals(mg_core_dirange:size(R), mg_core_dirange:size(mg_core_dirange:reverse(R)))
)
)
).
-spec limit_test(config()) -> _.
limit_test(_) ->
?assert(
check_property(
% Size of limited range always under limit
?FORALL(
{R, Limit},
{range(), non_neg_integer()},
mg_core_dirange:size(mg_core_dirange:limit(R, Limit)) =< Limit
)
)
).
-spec dissect_test(config()) -> _.
dissect_test(_) ->
% TODO
% Technically this matching is opaqueness violation. To be a good guy with
% dialyzer we probably should match on some exported representation. Still,
% fine for now I guess.
?assertEqual({empty(), empty()}, mg_core_dirange:dissect(empty(), 42)),
?assertEqual({fw(1, 10), empty()}, mg_core_dirange:dissect(fw(1, 10), 42)),
?assertEqual({empty(), bw(10, 1)}, mg_core_dirange:dissect(bw(10, 1), 42)),
?assertEqual({bw(10, 1), empty()}, mg_core_dirange:dissect(bw(10, 1), 1)),
?assertEqual({bw(10, 2), bw(1, 1)}, mg_core_dirange:dissect(bw(10, 1), 2)),
?assert(
check_property(
% Dissection does not change direction
?FORALL({R, At}, {range(), integer()}, begin
{R1, R2} = mg_core_dirange:dissect(R, At),
mg_core_dirange:direction(R1) * mg_core_dirange:direction(R2) =/= -1
end)
)
),
?assert(
check_property(
% Dissection preserves range size
?FORALL({R, At}, {range(), integer()}, begin
{R1, R2} = mg_core_dirange:dissect(R, At),
equals(mg_core_dirange:size(R), mg_core_dirange:size(R1) + mg_core_dirange:size(R2))
end)
)
),
?assert(
check_property(
% Dissection is complemented by conjoining
?FORALL({R, At}, {range(), integer()}, begin
{R1, R2} = mg_core_dirange:dissect(R, At),
equals(R, mg_core_dirange:conjoin(R1, R2))
end)
)
).
-spec conjoin_test(config()) -> _.
conjoin_test(_) ->
?assertEqual(fw(1, 10), mg_core_dirange:conjoin(fw(1, 10), empty())),
?assertEqual(fw(1, 10), mg_core_dirange:conjoin(empty(), fw(1, 10))),
?assertEqual(bw(10, 1), mg_core_dirange:conjoin(bw(10, 10), bw(9, 1))),
?assertError(badarg, mg_core_dirange:conjoin(bw(10, 10), fw(1, 9))),
?assertError(badarg, mg_core_dirange:conjoin(bw(10, 9), bw(9, 1))).
%% Used to suppress warning for mg_core_dirange:intersect/2 error assertion
-dialyzer({nowarn_function, intersect_test/1}).
-spec intersect_test(config()) -> _.
intersect_test(_) ->
?assertEqual({empty(), empty(), empty()}, mg_core_dirange:intersect(empty(), fw(1, 10))),
?assertEqual({bw(10, 7), bw(6, 5), bw(4, 1)}, mg_core_dirange:intersect(bw(10, 1), fw(5, 6))),
?assertError(badarg, mg_core_dirange:intersect(fw(1, 10), empty())),
?assert(
check_property(
% Range intersects with itself with no left/right differences
?FORALL(
R,
nonempty_range(),
equals({empty(), R, empty()}, mg_core_dirange:intersect(R, R))
)
)
),
?assert(
check_property(
% Range intersects with reversal of itself with no left/right differences
?FORALL(
R,
nonempty_range(),
equals(
{empty(), R, empty()},
mg_core_dirange:intersect(R, mg_core_dirange:reverse(R))
)
)
)
),
?assert(
check_property(
% Left/right differences end up being only nonempty ranges when intersected with
% original range
?FORALL({R, With}, {range(), nonempty_range()}, begin
{LD, _, RD} = mg_core_dirange:intersect(R, With),
conjunction([
{strictly_left_diff,
equals({LD, empty(), empty()}, mg_core_dirange:intersect(LD, With))},
{strictly_right_diff,
equals({empty(), empty(), RD}, mg_core_dirange:intersect(RD, With))}
])
end)
)
),
?assert(
check_property(
% Intersection preserve range size
?FORALL({R0, RWith}, {range(), nonempty_range()}, begin
{RL, RI, RR} = mg_core_dirange:intersect(R0, RWith),
equals(
mg_core_dirange:size(R0),
lists:sum([mg_core_dirange:size(R) || R <- [RL, RI, RR]])
)
end)
)
),
?assert(
check_property(
% Intersection complemented by conjoing
?FORALL({R0, RWith}, {range(), nonempty_range()}, begin
{RL, RI, RR} = mg_core_dirange:intersect(R0, RWith),
equals(R0, mg_core_dirange:conjoin(mg_core_dirange:conjoin(RL, RI), RR))
end)
)
).
-spec unify_test(config()) -> _.
unify_test(_) ->
?assert(
check_property(
% Range is the same when unified with itself
?FORALL(
R,
range(),
equals(R, mg_core_dirange:unify(R, R))
)
)
),
?assert(
check_property(
% Unified range is no smaller than either of ranges
?FORALL(
{R0, R1},
?SUCHTHAT(
{R0, R1},
{range(), range()},
mg_core_dirange:direction(R0) == mg_core_dirange:direction(R1)
),
begin
RU = mg_core_dirange:unify(R0, R1),
conjunction([
{no_smaller_than_r0,
equals(mg_core_dirange:size(RU) >= mg_core_dirange:size(R0), true)},
{no_smaller_than_r1,
equals(mg_core_dirange:size(RU) >= mg_core_dirange:size(R1), true)}
])
end
)
)
).
-spec enumerate_test(config()) -> _.
enumerate_test(_) ->
?assertEqual([1, 2, 3, 4, 5], mg_core_dirange:enumerate(fw(1, 5))),
?assertEqual([5, 4, 3, 2, 1], mg_core_dirange:enumerate(bw(5, 1))),
?assert(
check_property(
% Enumeration preserves range size
?FORALL(
R,
range(),
equals(length(mg_core_dirange:enumerate(R)), mg_core_dirange:size(R))
)
)
),
?assert(
check_property(
% Enumeration preserves reversal
?FORALL(
R,
range(),
equals(
lists:reverse(mg_core_dirange:enumerate(R)),
mg_core_dirange:enumerate(mg_core_dirange:reverse(R))
)
)
)
),
?assert(
check_property(
% Enumeration preserves range bounds
?FORALL(R, nonempty_range(), begin
{A, B} = mg_core_dirange:bounds(R),
L = [H | _] = mg_core_dirange:enumerate(R),
conjunction([
{nonempty, length(L) > 0},
{starts_with_bound, equals(H, A)},
{ends_with_bound, equals(lists:last(L), B)}
])
end)
)
).
-spec fold_test(config()) -> _.
fold_test(_) ->
?assert(
check_property(
% Folding with right accumulation is indistinguishable from enumeration
?FORALL(
R,
range(),
equals(
mg_core_dirange:enumerate(R),
mg_core_dirange:fold(fun(E, L) -> L ++ [E] end, [], R)
)
)
)
).
-spec storage_test(config()) -> _.
storage_test(_) ->
?assert(
check_property(
% Restoring stored representation preserves original range
?FORALL(
R,
range(),
equals(R, mg_core_dirange:from_opaque(mg_core_dirange:to_opaque(R)))
)
)
).
-spec range() -> proper_types:raw_type().
range() ->
frequency([
{9, nonempty_range()},
{1, empty()}
]).
-spec nonempty_range() -> proper_types:raw_type().
nonempty_range() ->
?LET({A, B}, {integer(), integer()}, oneof([fw(A, B), bw(A, B)])).
-spec check_property(proper:test()) -> boolean().
check_property(Property) ->
OuterTest = proper:test_to_outer_test(Property),
proper:quickcheck(OuterTest, [{numtests, 1000}, nocolors]).
%%
-spec fw(T, T) -> mg_core_dirange:dirange(T).
fw(A, B) ->
mg_core_dirange:forward(A, B).
-spec bw(T, T) -> mg_core_dirange:dirange(T).
bw(A, B) ->
mg_core_dirange:backward(A, B).
-spec empty() -> mg_core_dirange:dirange(_).
empty() ->
mg_core_dirange:empty(). | test/mg_core_dirange_SUITE.erl | 0.586996 | 0.482368 | mg_core_dirange_SUITE.erl | starcoder |
% Licensed under the Apache License, Version 2.0 (the "License"); you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
-module(mango_cursor).
-export([
create/3,
explain/1,
execute/3,
maybe_filter_indexes/2
]).
-include_lib("couch/include/couch_db.hrl").
-include("mango.hrl").
-include("mango_cursor.hrl").
-define(SUPERVISOR, mango_cursor_sup).
create(Db, Selector0, Opts) ->
Selector = mango_selector:normalize(Selector0),
UsableIndexes = mango_idx:get_usable_indexes(Db, Selector0, Opts),
{use_index, IndexSpecified} = proplists:lookup(use_index, Opts),
case {length(UsableIndexes), length(IndexSpecified)} of
{0, 1} ->
?MANGO_ERROR({no_usable_index, selector_unsupported});
{0, 0} ->
AllDocs = mango_idx:special(Db),
create_cursor(Db, AllDocs, Selector, Opts);
_ ->
create_cursor(Db, UsableIndexes, Selector, Opts)
end.
explain(#cursor{}=Cursor) ->
#cursor{
index = Idx,
selector = Selector,
opts = Opts0,
limit = Limit,
skip = Skip,
fields = Fields
} = Cursor,
Mod = mango_idx:cursor_mod(Idx),
Opts = lists:keydelete(user_ctx, 1, Opts0),
{[
{dbname, mango_idx:dbname(Idx)},
{index, mango_idx:to_json(Idx)},
{selector, Selector},
{opts, {Opts}},
{limit, Limit},
{skip, Skip},
{fields, Fields}
] ++ Mod:explain(Cursor)}.
execute(#cursor{index=Idx}=Cursor, UserFun, UserAcc) ->
Mod = mango_idx:cursor_mod(Idx),
Mod:execute(Cursor, UserFun, UserAcc).
maybe_filter_indexes(Indexes, Opts) ->
case lists:keyfind(use_index, 1, Opts) of
{use_index, []} ->
Indexes;
{use_index, [DesignId]} ->
filter_indexes(Indexes, DesignId);
{use_index, [DesignId, ViewName]} ->
filter_indexes(Indexes, DesignId, ViewName)
end.
filter_indexes(Indexes, DesignId0) ->
DesignId = case DesignId0 of
<<"_design/", _/binary>> ->
DesignId0;
Else ->
<<"_design/", Else/binary>>
end,
FiltFun = fun(I) -> mango_idx:ddoc(I) == DesignId end,
lists:filter(FiltFun, Indexes).
filter_indexes(Indexes0, DesignId, ViewName) ->
Indexes = filter_indexes(Indexes0, DesignId),
FiltFun = fun(I) -> mango_idx:name(I) == ViewName end,
lists:filter(FiltFun, Indexes).
create_cursor(Db, Indexes, Selector, Opts) ->
[{CursorMod, CursorModIndexes} | _] = group_indexes_by_type(Indexes),
CursorMod:create(Db, CursorModIndexes, Selector, Opts).
group_indexes_by_type(Indexes) ->
IdxDict = lists:foldl(fun(I, D) ->
dict:append(mango_idx:cursor_mod(I), I, D)
end, dict:new(), Indexes),
% The first cursor module that has indexes will be
% used to service this query. This is so that we
% don't suddenly switch indexes for existing client
% queries.
CursorModules = case module_loaded(dreyfus_index) of
true ->
[mango_cursor_view, mango_cursor_text, mango_cursor_special];
false ->
[mango_cursor_view, mango_cursor_special]
end,
lists:flatmap(fun(CMod) ->
case dict:find(CMod, IdxDict) of
{ok, CModIndexes} ->
[{CMod, CModIndexes}];
error ->
[]
end
end, CursorModules). | src/mango_cursor.erl | 0.632389 | 0.516839 | mango_cursor.erl | starcoder |
% Licensed under the Apache License, Version 2.0 (the "License"); you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
% Maintain cluster membership and stability notifications for replications.
% On changes to cluster membership, broadcast events to `replication` gen_event.
% Listeners will get `{cluster, stable}` or `{cluster, unstable}` events.
%
% Cluster stability is defined as "there have been no nodes added or removed in
% last `QuietPeriod` seconds". QuietPeriod value is configurable. To ensure a
% speedier startup, during initialization there is a shorter StartupQuietPeriod
% in effect (also configurable).
%
% This module is also in charge of calculating ownership of replications based
% on where their _repicator db documents shards live.
-module(couch_replicator_rate_limiter_tables).
-export([
create/1,
tids/0,
term_to_table/1
]).
-define(SHARDS_N, 16).
-spec create(non_neg_integer()) -> ok.
create(KeyPos) ->
Opts = [named_table, public, {keypos, KeyPos}, {read_concurrency, true}],
[ets:new(list_to_atom(TableName), Opts) || TableName <- table_names()],
ok.
-spec tids() -> [atom()].
tids() ->
[list_to_existing_atom(TableName) || TableName <- table_names()].
-spec term_to_table(any()) -> atom().
term_to_table(Term) ->
PHash = erlang:phash2(Term),
list_to_existing_atom(table_name(PHash rem ?SHARDS_N)).
-spec table_names() -> [string()].
table_names() ->
[table_name(N) || N <- lists:seq(0, ?SHARDS_N - 1)].
-spec table_name(non_neg_integer()) -> string().
table_name(Id) when is_integer(Id), Id >= 0 andalso Id < ?SHARDS_N ->
atom_to_list(?MODULE) ++ "_" ++ integer_to_list(Id). | src/couch_replicator/src/couch_replicator_rate_limiter_tables.erl | 0.774839 | 0.53358 | couch_replicator_rate_limiter_tables.erl | starcoder |
-module(aoc2019_day22).
-behavior(aoc_puzzle).
-export([parse/1, solve1/1, solve2/1, info/0]).
-include("aoc_puzzle.hrl").
-spec info() -> aoc_puzzle().
info() ->
#aoc_puzzle{module = ?MODULE,
year = 2019,
day = 22,
name = "<NAME>",
expected = {4775, 37889219674304},
has_input_file = true}.
-type instr() :: {cut, integer()} | {incr, integer()} | new.
-type input_type() :: [instr()].
-type result_type() :: integer().
-spec parse(Binary :: binary()) -> input_type().
parse(Binary) ->
Lines = string:tokens(binary_to_list(Binary), "\n\r"),
ToI = fun list_to_integer/1,
lists:map(fun(Line) ->
case string:tokens(Line, " ") of
["cut", Cut] -> {cut, ToI(Cut)};
["deal", "with", "increment", Incr] -> {incr, ToI(Incr)};
["deal", "into", "new", "stack"] -> new
end
end,
Lines).
-spec solve1(Input :: input_type()) -> result_type().
solve1(Input) ->
part1(Input).
-spec solve2(Input :: input_type()) -> result_type().
solve2(Input) ->
part2(Input).
part1(Instrs) ->
Deck = shuffle(Instrs, lists:seq(0, 10006)),
find_card(2019, Deck, 0).
find_card(_, [], _) ->
false;
find_card(Card, [Card | _], N) ->
N;
find_card(Card, [_ | Deck], N) ->
find_card(Card, Deck, N + 1).
shuffle([], Deck) ->
Deck;
shuffle([Instr | Instrs], Deck) ->
case Instr of
{cut, Cut} when Cut >= 0 ->
{L1, L2} = lists:split(Cut, Deck),
shuffle(Instrs, L2 ++ L1);
{cut, Cut} when Cut < 0 ->
{L1, L2} = lists:split(length(Deck) + Cut, Deck),
shuffle(Instrs, L2 ++ L1);
{incr, Incr} ->
Len = length(Deck),
{Positions, _} = lists:split(Len, lists:seq(0, Len * Incr, Incr)),
Tree =
lists:foldl(fun({Pos, Card}, Acc) ->
Index = Pos rem Len,
gb_trees:insert(Index, Card, Acc)
end,
gb_trees:empty(),
lists:zip(Positions, Deck)),
Deck0 = gb_trees:values(Tree),
shuffle(Instrs, Deck0);
new ->
shuffle(Instrs, lists:reverse(Deck))
end.
%% For part 2 we need some number theory which I was not able to
%% figure out myself. The code below is assembled from Python snippets
%% mostly from the day 22 reddit solution megathread.
part2(Instrs) ->
N = 101741582076661, %% Reps
D = 119315717514047, %% Deck size
X = 2020, %% The position we are interested in
%% Run the shuffling backwards twice to get Y and Z.
RInstrs = lists:reverse(Instrs),
Y = shuffle2(X, RInstrs, D),
Z = shuffle2(Y, RInstrs, D),
%% Apply number theory to compute what card eventually ends up in
%% position X.
A = mod((Y - Z) * modinv(X - Y + D, D), D),
B = mod(Y - A * X, D),
(powmod(A, N, D) * X + (powmod(A, N, D) - 1) * modinv(A - 1, D) * B) rem D.
%% Apply the shuffling rules to the card at position X.
shuffle2(X, [], _) ->
X;
shuffle2(X, [R | RInstrs], D) ->
case R of
{cut, Cut} ->
shuffle2((X + Cut + D) rem D, RInstrs, D);
{incr, Incr} ->
shuffle2(modinv(Incr, D) * X, RInstrs, D);
new ->
shuffle2(D - 1 - X, RInstrs, D)
end.
%% Remainder which handles negative numbers "correctly".
mod(X, Y) ->
mod0(X rem Y, Y).
mod0(M, _) when M >= 0 ->
M;
mod0(M, Y) ->
mod0(M + Y, Y).
%% Modular multiplicative inverse from
%% https://stackoverflow.com/a/9758173/13051
egcd(0, B) ->
{B, 0, 1};
egcd(A, B) ->
{G, Y, X} = egcd(mod(B, A), A),
{G, X - B div A * Y, Y}.
modinv(A, M) ->
{G, X, _} = egcd(A, M),
case G of
-1 ->
throw(mod_inv_does_not_exist);
_ ->
mod(X, M)
end.
%% Fast modular exponentiation from
%% https://gist.github.com/Radcliffe/e41b41a441deda19e7ac5731197f49be
powmod(A, B, M) ->
powmod(A, B, M, 1).
powmod(_, 0, _, R) ->
R;
powmod(A, B, M, R) when B rem 2 == 1 ->
powmod(A, B - 1, M, A * R rem M);
powmod(A, B, M, R) ->
powmod(A * A rem M, B div 2, M, R). | src/2019/aoc2019_day22.erl | 0.515132 | 0.649044 | aoc2019_day22.erl | starcoder |
%% @author <NAME> <<EMAIL>> [http://ferd.ca/]
%% @doc Zipper binary trees are binary trees which can be browsed
%% up and down, and left and right, much like binary trees where
%% child nodes link up to their parents.
%% An example use case could be to represent a decision tree that can be
%% rolled back.
%% Inserting a child either on the left or the right is of amortized
%% constant time, and so is deleting them.
%% Note that zippers are not search data structures.
%% @reference See <a href="http://ferd.ca/yet-another-article-on-zippers.html">
%% the related blog post</a> for more basic details on the concept of zippers
-module(zipper_bintrees).
-export([root/1, current/1, replace/2, right/1, left/1, top/1,
set_left_branch/2, set_right_branch/2, is_leaf/1]).
-export_type([zipper_bintree/0]).
-type node(A) :: undefined
| {fork, A, Left::node(A), Right::node(A)}.
-type choice(A) :: {left, A, node(A)}
| {right, A, node(A)}.
-type thread(A) :: [choice(A)].
%% @type zipper_bintree(). A zipper binary tree.
-type zipper_bintree() :: {thread(any()), node(any())}.
%% @doc Creates a basic binary zipper tree. Should be called first when
%% declaring the data structure
-spec root(term()) -> zipper_bintree().
root(A) -> {[], {fork, A, undefined, undefined}}.
%% @doc if the node has no child (both undefined) it is
%% considered to be a leaf node.
-spec is_leaf(zipper_bintree()) -> boolean().
is_leaf({_Thread, {fork, _, undefined, undefined}}) ->
true;
is_leaf({_Thread, {fork, _, _, _}}) ->
false.
%% @doc Fetches the value of the current position
-spec current(zipper_bintree()) -> {ok, Val::term()} | undefined.
current({_Thread, {fork, Val, _Left, _Right}}) -> {ok,Val};
current({_Thread, undefined}) -> undefined.
%% @doc Either replaces or create a new node (if it was <code>undefined</code>)
%% at the current position in the zipper binary tree.
-spec replace(Val::term(), zipper_bintree()) -> zipper_bintree().
replace(Val, {Thread, undefined}) ->
{Thread, {fork, Val, undefined, undefined}};
replace(Val, {Thread, {fork, _OldVal, L, R}}) ->
{Thread, {fork, Val, L, R}}.
%% @doc Moves down the tree one level, picking the right child.
%% If there is no right child, the function returns <code>undefined</code>.
-spec right(zipper_bintree()) -> zipper_bintree() | undefined.
right({Thread, {fork, Val, L, R}}) -> {[{right, Val, L}|Thread], R};
right({_Thread, undefined}) -> undefined.
%% @doc Moves down the tree one level, picking the left child.
%% If there is no left child, the function returns <code>undefined</code>.
-spec left(zipper_bintree()) -> zipper_bintree() | undefined.
left({Thread, {fork, Val, L, R}}) -> {[{left, Val, R}|Thread], L};
left({_Thread, undefined}) -> undefined.
%% @doc Moves back up one level. When doing so, it reassembles the
%% Current and Past parts of the trees as a complete node.
%% If there is no parent, the function returns <code>undefined</code>.
-spec top(zipper_bintree()) -> zipper_bintree() | undefined.
top({[{left, Val, R}|Thread], L}) -> {Thread, {fork, Val, L, R}};
top({[{right, Val, L}|Thread], R}) -> {Thread, {fork, Val, L, R}};
top({[], _Tree}) -> undefined.
%% @doc Shortcut function to add a left child
-spec set_left_branch(Val::term(), zipper_bintree()) -> zipper_bintree().
set_left_branch(A, Zipper) ->
top(replace(A, left(Zipper))).
%% @doc Shortcut function to add a right child
-spec set_right_branch(Val::term(), zipper_bintree()) -> zipper_bintree().
set_right_branch(A, Zipper) ->
top(replace(A, right(Zipper))). | src/zipper_bintrees.erl | 0.587352 | 0.586996 | zipper_bintrees.erl | starcoder |
%% @doc Read configuration from OS environment variables.
%%
%% This module privides means of mapping OS environment variables to
%% Lee configuration values. Values of environment variables are
%% parsed according to the following rules: Lee values of type
%% `string()' are used verbatim. Values of type `atom()' are
%% transformed using `list_to_atom/1' function and the rest of types
%% are parsed as Erlang terms.
-module(lee_os_env).
-export([ metamodel/0
, read/1
, read_to/2
, document_values/2
, meta_validate/4
]).
-include_lib("lee/src/framework/lee_internal.hrl").
-define(metatype, os_env).
%% @doc Metamodel module containing metatypes for reading
%% configuration from `eterm' files
%%
%% It defines the following metatype:
%% == os_env ==
%%
%% === Metaparameters ===
%% <ul><li>`os_env' of type `string()':
%% Environment variable mapping
%% </li>
%% </ul>
%%
%% === Depends on ===
%% {@link lee:base_metamodel/0 . value}
%%
%% === Example ===
%% ```
%% #{ home => {[value, os_env],
%% #{ os_env => "HOME"
%% , type => string()
%% }}
%% , path => {[value, os_env],
%% #{ os_env => "PATH"
%% , type => string()
%% }}
%% }'''
-spec metamodel() -> lee:lee_module().
metamodel() ->
#{ metatype =>
#{ ?metatype =>
{[metatype, documented]
, #{ doc_chapter_title => "OS Environment Variables"
, doc_gen => fun ?MODULE:document_values/2
, meta_validate => fun ?MODULE:meta_validate/4
}
}
}
}.
%% @private
-spec meta_validate(lee:model(), _, lee:key(), #mnode{}) ->
lee_lib:check_result().
meta_validate(_, _, Key, MNode) ->
lee_lib:inject_error_location(
Key,
lee_lib:validate_meta_attr( os_env
, typerefl:printable_latin1_list()
, MNode
)).
%% @doc Make a patch from OS environment variables
%% @throws {error, string()}
-spec read(lee:model()) -> lee:patch().
read(Model) ->
EnvVars = lee_model:get_metatype_index(?metatype, Model),
lists:foldl( fun(Key, Acc) ->
read_val(Model, Key, Acc)
end
, []
, EnvVars).
%% @doc Make a patch from OS environment variables and apply it to
%% data
%% @throws {error, string()}
-spec read_to(lee:model(), lee_storage:data()) -> lee_storage:data().
read_to(Model, Data) ->
Patch = read(Model),
lee_storage:patch(Data, Patch).
%% @private
read_val(Model, Key, Acc) ->
#mnode{metaparams = Attrs} = lee_model:get(Key, Model),
EnvVar = ?m_attr(?metatype, os_env, Attrs),
case os:getenv(EnvVar) of
false ->
Acc;
Value0 ->
case lee:from_string(Model, Key, Value0) of
{ok, Value} ->
[{set, Key, Value} | Acc];
{error, _} = Error ->
throw(Error)
end
end.
%% @private
-spec document_values(lee:model(), term()) -> lee_doc:doc().
document_values(Model, _Config) ->
#model{meta_class_idx = Idx} = Model,
Keys = maps:get(?metatype, Idx, []),
Fun = fun(Key) ->
MNode = lee_model:get(Key, Model),
#mnode{metaparams = Attrs} = MNode,
EnvVar = ?m_attr(?metatype, os_env, Attrs),
lee_doc:refer_value(Key, ?metatype, EnvVar, MNode)
end,
Intro = "<para>The following OS environment variables are used to
set configuration values. Values of type string() are
taken from OS environment variables verbatim, other types
are parsed as Erlang terms.</para>",
lee_doc:docbook(Intro) ++ lists:map(Fun, Keys). | src/application/lee_os_env.erl | 0.612194 | 0.550064 | lee_os_env.erl | starcoder |
%% Copyright (c) 2013-2014 <NAME> <<EMAIL>>
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(rational_SUITE).
-include_lib("common_test/include/ct.hrl").
-compile(export_all).
all() ->
common:all().
groups() ->
common:groups(?MODULE).
adding_quarters_to_thirds_test(_Config) ->
Quarter = rational:new(1, 4),
Third = rational:new(1, 3),
Sum = rational:add(Quarter, Third),
7 = rational:numerator(Sum),
12 = rational:denominator(Sum).
adding_three_quarters_to_five_twelves_test(_Config) ->
A = rational:new(3, 4),
B = rational:new(5, 12),
Sum = rational:add(A, B),
56 = rational:numerator(Sum),
48 = rational:denominator(Sum).
multiply_two_thirds_by_three_quarters_test(_Config) ->
A = rational:new(2, 3),
B = rational:new(3, 4),
Product = rational:multiply(A, B),
6 = rational:numerator(Product),
12 = rational:denominator(Product).
simplify_test(_Config) ->
A = rational:new(63, 462),
Simplified = rational:simplify(A),
3 = rational:numerator(Simplified),
22 = rational:denominator(Simplified).
is_greater_than_test(_Config) ->
A = rational:new(3, 4),
B = rational:new(2, 4),
true = rational:is_greater_than(A, B),
true = rational:is_greater_or_equal(A, B),
false = rational:is_greater_than(B, A),
false = rational:is_greater_than(A, A),
true = rational:is_greater_or_equal(A, A).
is_equal_to_test(_Config) ->
A = rational:new(3, 4),
B = rational:new(2, 4),
C = rational:new(1, 2),
false = rational:is_equal_to(A, B),
false = rational:is_equal_to(B, A),
true = rational:is_equal_to(A, A),
true = rational:is_equal_to(B, B),
true = rational:is_equal_to(B, C),
true = rational:is_equal_to(C, B).
is_less_than_test(_Config) ->
A = rational:new(3, 4),
B = rational:new(2, 4),
false = rational:is_less_than(A, B),
true = rational:is_less_or_equal(B, A),
true = rational:is_less_than(B, A).
subtraction_test(_Config) ->
A = rational:new(2, 3),
B = rational:new(1, 2),
Difference = rational:subtract(A, B),
1 = rational:numerator(Difference),
6 = rational:denominator(Difference).
mixed_numbers_test(_Config) ->
A = rational:new(6),
B = rational:new(3, 4),
Product = rational:multiply(A, B),
18 = rational:numerator(Product),
4 = rational:denominator(Product).
reciprocal_test(_Config) ->
A = rational:new(3, 4),
Reciprocal = rational:reciprocal(A),
4 = rational:numerator(Reciprocal),
3 = rational:denominator(Reciprocal).
divide_test(_Config) ->
A = rational:new(1, 2),
B = rational:new(3, 4),
R = rational:divide(A, B),
4 = rational:numerator(R),
6 = rational:denominator(R).
six_from_float_test(_Config) ->
A = rational:from_float(6.0),
6 = rational:numerator(A),
1 = rational:denominator(A).
point_seven_five_from_float_test(_Config) ->
A = rational:from_float(0.75),
3 = rational:numerator(A),
4 = rational:denominator(A).
point_five_from_float_test(_Config) ->
A = rational:from_float(0.5),
1 = rational:numerator(A),
2 = rational:denominator(A).
greatest_common_divisor_test(_Config) ->
6 = rational:gcd(48, 18). | test/rational_SUITE.erl | 0.682362 | 0.631523 | rational_SUITE.erl | starcoder |
%% =============================================================================
%% bondy_cidr.erl -
%%
%% Copyright (c) 2016-2021 Leapsight. All rights reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%% =============================================================================
-module(bondy_cidr).
-define(LOCAL_CIDRS, [
%% single class A network 10.0.0.0 – 10.255.255.255
{{10, 0, 0, 0}, 8},
%% 16 contiguous class B networks 172.16.0.0 – 172.31.255.255
{{172, 16, 0, 0}, 12},
%% 256 contiguous class C networks 192.168.0.0 – 192.168.255.255
{{192, 168, 0, 0}, 16}
]).
-type t() :: {inet:ip_address(), non_neg_integer()}.
-export([parse/1]).
-export([is_type/1]).
-export([mask/1]).
-export([anchor_mask/1]).
-export([match/2]).
%% =============================================================================
%% API
%% =============================================================================
%% -----------------------------------------------------------------------------
%% @doc Parses a binary string representation of a CIDR notation and returns its
%% erlang representation as a tuple `t()'.
%% Fails with a badarg exception if the binary `Bin' is not a valid input.
%% @end
%% -----------------------------------------------------------------------------
-spec parse(binary()) -> t() | no_return().
parse(Bin) when is_binary(Bin) ->
case re:split(Bin, "/", [{return, list}, {parts, 2}]) of
[Prefix, LenStr] ->
{ok, Addr} = inet:parse_address(Prefix),
{Maskbits, _} = string:to_integer(LenStr),
{Addr, Maskbits};
_ ->
error(badarg)
end;
parse(_) ->
error(badarg).
%% -----------------------------------------------------------------------------
%% @doc Returns `true' if term `Term' is a valid CIDR notation representation in
%% erlang. Otherwise returns `false'.
%% @end
%% -----------------------------------------------------------------------------
-spec is_type(Term :: binary()) -> t() | no_return().
is_type({IP, Maskbits})
when tuple_size(IP) == 4 andalso Maskbits >= 0 andalso Maskbits =< 32 ->
case inet:ntoa(IP) of
{error, einval} -> false;
_ -> true
end;
is_type({IP, Maskbits})
when tuple_size(IP) == 8 andalso Maskbits >= 0 andalso Maskbits =< 128 ->
case inet:ntoa(IP) of
{error, einval} -> false;
_ -> true
end;
is_type(_) ->
false.
%% -----------------------------------------------------------------------------
%% @doc Returns `true' if `Left' and `Right' are CIDR notation representations
%% in erlang and they match. Otherwise returns false.
%% @end
%% -----------------------------------------------------------------------------
-spec match(Left :: t(), Right :: t()) -> t() | no_return().
match({_, Maskbits} = Left, {_, Maskbits} = Right) ->
mask(Left) == mask(Right);
match(_, _) ->
false.
%% -----------------------------------------------------------------------------
%% @doc
%% @end
%% -----------------------------------------------------------------------------
-spec mask(t()) -> Subnet :: binary().
mask({{_, _, _, _} = Addr, Maskbits})
when Maskbits >= 0 andalso Maskbits =< 32 ->
B = list_to_binary(tuple_to_list(Addr)),
<<Subnet:Maskbits, _Host/bitstring>> = B,
Subnet;
mask({{A, B, C, D, E, F, G, H}, Maskbits})
when Maskbits >= 0 andalso Maskbits =< 128 ->
<<Subnet:Maskbits, _Host/bitstring>> = <<
A:16, B:16, C:16, D:16, E:16,F:16, G:16, H:16
>>,
Subnet.
%% -----------------------------------------------------------------------------
%% @doc returns the real bottom of a netmask. Eg if 192.168.1.1/16 is
%% provided, return 192.168.0.0/16
%% @end
%% -----------------------------------------------------------------------------
-spec anchor_mask(t()) -> t().
anchor_mask({Addr, Maskbits} = CIDR) when tuple_size(Addr) == 4 ->
M = mask(CIDR),
Rem = 32 - Maskbits,
<<A:8, B:8, C:8, D:8>> = <<M:Maskbits, 0:Rem>>,
{{A, B, C, D}, Maskbits};
anchor_mask({Addr, Maskbits} = CIDR) when tuple_size(Addr) == 8 ->
M = mask(CIDR),
Rem = 128 - Maskbits,
<<A:16, B:16, C:16, D:16, E:16, F:16, G:16, H:16>> = <<M:Maskbits, 0:Rem>>,
{{A, B, C, D, E, F, G, H}, Maskbits};
anchor_mask(_) ->
error(badarg). | apps/bondy/src/bondy_cidr.erl | 0.601125 | 0.51013 | bondy_cidr.erl | starcoder |
% BinaryVice - Improved Erlang Serialization
% Copyright (c) 2009 <NAME>
% See MIT-LICENSE for licensing information.
-module (vice_decode).
-export ([from_binary/2]).
from_binary(Schema, Binary) ->
% If it's a placeholder, then encode it.
% Otherwise, continue walking the structure.
case vice_utils:is_placeholder(Schema) of
true -> decode(Schema, Binary);
false ->
walk(Schema, Binary)
end.
walk(Schema, Binary) when is_list(Schema) ->
Length = length(Schema),
F = fun(_X, {Values, AccBinary, [S|AccSchema]}) ->
{Value, AccBinary1} = from_binary(S, AccBinary),
{[Value|Values], AccBinary1, AccSchema}
end,
{Values1, Binary1, _} = lists:foldl(F, {[], Binary, Schema}, lists:seq(1, Length)),
{lists:reverse(Values1), Binary1};
walk(Schema, Binary) when is_tuple(Schema) ->
{Values, Rest} = walk(tuple_to_list(Schema), Binary),
{list_to_tuple(Values), Rest};
%%% Didn't match anything, so ignore.
walk(Schema, Binary) ->
{Schema, Binary}.
%%% - DECODING - %%%
decode(atom@, B) ->
<<Size:16/integer, AtomName:Size/binary, Rest/binary>> = B,
{list_to_atom(binary_to_list(AtomName)), Rest};
decode(boolean@, <<B, Rest/binary>>) ->
{B == 1, Rest};
decode(binary@, B) -> decode({binary@, 4}, B);
decode({binary@, Size}, B) ->
BitSize = Size * 8,
<<Length:BitSize/integer, Binary:Length/binary, Rest/binary>> = B,
{Binary, Rest};
decode(bitstring@, B) -> decode({bitstring@, 4}, B);
decode({bitstring@, Size}, B) ->
BitSize = Size * 8,
<<Length:BitSize/integer, Rest/binary>> = B,
Padding = 8 - (Length rem 8),
<<BitString:Length/bits, 0:Padding, Rest1/binary>> = Rest,
{BitString, Rest1};
decode(integer@, B) -> decode({integer@, 4}, B);
decode({integer@, Size}, B) ->
BitSize = Size * 8 - 1,
<<IsNeg:1/integer, Value:BitSize/integer, Rest/binary>> = B,
Value1 = case IsNeg == 1 of
true -> bnot Value;
false -> Value
end,
{Value1, Rest};
decode(string@, B) -> decode({string@, 4}, B);
decode({string@, Size}, B) ->
{Binary, Rest} = decode({binary@, Size}, B),
{binary_to_list(Binary), Rest};
decode({list@, Schema}, B) ->
<<Length:32/integer, B1/binary>> = B,
F = fun(_, {Values, AccB}) ->
{Value, AccB1} = from_binary(Schema, AccB),
{[Value|Values], AccB1}
end,
{Values, Rest} = lists:foldl(F, {[], B1}, lists:seq(1, Length)),
{lists:reverse(Values), Rest};
decode({tuple@, Schema}, B) ->
{Values, Rest} = decode({list@, Schema}, B),
{list_to_tuple(Values), Rest};
decode(dict@, B) ->
{List, Rest} = decode(list@, B),
{dict:from_list(List), Rest};
decode({dict@, KeySchema, ValueSchema}, B) ->
<<Length:32/integer, B1/binary>> = B,
F = fun(_, {AccDict, AccB}) ->
{Key, AccB1} = from_binary(KeySchema, AccB),
{Value, AccB2} = from_binary(ValueSchema, AccB1),
{dict:store(Key, Value, AccDict), AccB2}
end,
{_Dict, _Rest} = lists:foldl(F, {dict:new(), B1}, lists:seq(1, Length));
decode(_Schema, B) ->
<<Size:16/integer, B1:Size/binary, Rest/binary>> = B,
{
binary_to_term(<<131, B1/binary>>),
Rest
}. | src/vice_decode.erl | 0.502441 | 0.411466 | vice_decode.erl | starcoder |
-module(ep_http).
-include("conversions.hrl").
-export([ url_decode/1
, url_encode/1
, etag/1
]).
%%
%% Taken from https://github.com/tim/erlang-percent-encoding/blob/master/src/percent.erl
%% Percent encoding/decoding as defined by the application/x-www-form-urlencoded
%% content type (http://www.w3.org/TR/html4/interact/forms.html#h-17.13.4.1).
%%
url_decode(Str) when is_binary(Str) ->
?l2b(url_decode(?b2l(Str), []));
url_decode(A) when is_atom(A) ->
url_decode(?a2l(A));
url_decode(Str) when is_list(Str) ->
url_decode(Str, []).
url_decode([$+ | T], Acc) ->
url_decode(T, [32 | Acc]);
url_decode([$%, A, B | T], Acc) ->
Char = (hexchr_decode(A) bsl 4) + hexchr_decode(B),
url_decode(T, [Char | Acc]);
url_decode([X | T], Acc) ->
url_decode(T, [X | Acc]);
url_decode([], Acc) ->
lists:reverse(Acc, []).
-define(is_alphanum(C), C >= $A, C =< $Z; C >= $a, C =< $z; C >= $0, C =< $9).
-type encodable() :: binary() | atom() | [byte()] | integer().
-spec url_encode(encodable()) -> string().
url_encode(Str) when is_binary(Str) ->
url_encode(binary_to_list(Str));
url_encode(Str) when is_list(Str) ->
url_encode(lists:reverse(Str, []), []);
url_encode(Str) when is_integer(Str) ->
url_encode(integer_to_list(Str));
url_encode(Str) when is_atom(Str) ->
url_encode(atom_to_list(Str)).
-spec url_encode(list(), list()) -> list().
url_encode([X | T], Acc) when ?is_alphanum(X); X =:= $-; X =:= $_; X =:= $. ->
url_encode(T, [X | Acc]);
url_encode([32 | T], Acc) ->
url_encode(T, [$+ | Acc]);
url_encode([X | T], Acc) ->
NewAcc = [$%, hexchr_encode(X bsr 4), hexchr_encode(X band 16#0f) | Acc],
url_encode(T, NewAcc);
url_encode([], Acc) ->
Acc.
etag(Term) ->
bin_to_hex(crypto:hash(sha, term_to_binary(Term))).
%% ==================================================================
%% Internal functions
%% ==================================================================
-compile({inline, [{hexchr_decode, 1}]}).
hexchr_decode(C) when C >= $a andalso C =< $f ->
C - $a + 10;
hexchr_decode(C) when C >= $A andalso C =< $F ->
C - $A + 10;
hexchr_decode(C) when C >= $0 andalso C =< $9 ->
C - $0;
hexchr_decode(_) ->
throw({ep_http, bad_input}).
-compile({inline, [{hexchr_encode, 1}]}).
hexchr_encode(N) when N >= 10 andalso N < 16 ->
N + $A - 10;
hexchr_encode(N) when N >= 0 andalso N < 10 ->
N + $0.
digit_to_hex(D) when (D >= 0) and (D < 10) ->
D + 48;
digit_to_hex(D) ->
D + 87.
bin_to_hex(Bin) ->
bin_to_hex(?b2l(Bin), []).
bin_to_hex([], Acc) ->
lists:reverse(Acc);
bin_to_hex([D | Ds], Acc) ->
bin_to_hex(Ds, [ digit_to_hex(D rem 16)
, digit_to_hex(D div 16) | Acc]). | src/ep_http.erl | 0.63443 | 0.435121 | ep_http.erl | starcoder |
%%%===================================================================
%%% @copyright 2019 Klarna Bank AB (publ)
%%%
%%% @doc This module defines a stateless stream processing node that
%%% combines {@link kflow_gen_map}, {@link kflow_gen_filter} and
%%% {@link kflow_gen_demux}.
%%%
%%% This behavior can be used in two modes: full and simplified. In
%%% simplified mode stream processing node is defined like following:
%%%
%%% ```{mfd, fun(Offset, Message) -> {true, Message} | {true, Route, Message} | false end}'''
%%%
%%% In full mode one has to create a callback module with
%%% `kflow_gen_mfd' behavior.
%%%
%%% `mfd' callback takes 3 arguments: first is offset of a message,
%%% second is the message itself and the third one is state of the
%%% callback module. This state is created in `init' callback and
%%% remains the same through the lifetime of the pipe. Return value of
%%% `mfd' callback should be of type {@link return_type/0}.
%%%
%%% `init' and `terminate' callbacks can be used e.g. when some
%%% resource should be obtained to process messages. Both callbacks
%%% are optional; configuration will be passed as is to `filter'
%%% callback when `init' is omitted.
%%%
%%% == Example ==
%%% ```
%%% -module(my_mfd).
%%%
%%% -behavior(kflow_gen_mfd).
%%%
%%% -export([init/1, filtermap/3, terminate/1]).
%%%
%%% init(Config) ->
%%% State = do_init(Config),
%%% State.
%%%
%%% mfd(Offset, Message, State) ->
%%% %% Apply `transform' to the message and pass it downstream:
%%% {true, transform(Message)};
%%% mfd(Offset, Message, State) ->
%%% %% Apply `transform' to the message and pass it to a substream `Route':
%%% {true, Route, transform(Message)};
%%% mfd(Offset, Message, State) ->
%%% %% Drop the message:
%%% false.
%%%
%%% terminate(State) ->
%%% do_cleanup(State).
%%% '''
%%%
%%% NOTE: Since state is immutable, it's actually shared between the
%%% routes.
%%%
%%% @end
%%%===================================================================
-module(kflow_gen_mfd).
-behavior(kflow_gen).
-include("kflow.hrl").
-include_lib("hut/include/hut.hrl").
-export([init/2, handle_message/3, handle_flush/2, terminate/2]).
-export_type([callback_fun/0, return_type/0]).
-type return_type() :: {true, Ret :: term()}
| {true, Route :: term(), Ret :: term()}
| false.
-callback init(_Config) -> _State.
-callback mfd(kflow:offset(), _DataIn, _State) -> return_type().
-callback terminate(_State) -> _.
-optional_callbacks([init/1, terminate/1]).
-type callback_fun() :: fun((kflow:offset(), _Message) -> return_type()).
-record(s1,
{ cb_module :: module()
, cb_state :: term()
}).
-record(s2,
{ function :: callback_fun()
}).
-type state() :: #s1{} | #s2{}.
%% @private
init(_NodeId, {?MODULE, Fun}) when is_function(Fun) ->
is_function(Fun, 2) orelse error({badarity, Fun}),
{ok, #s2{ function = Fun
}};
init(_NodeId, {CbModule, CbConfig}) ->
CbState = kflow_lib:optional_callback(CbModule, init, [CbConfig], CbConfig),
{ok, #s1{ cb_module = CbModule
, cb_state = CbState
}}.
%% @private
handle_message(Msg = #kflow_msg{hidden = true}, State, _) ->
%% Don't execute callback for a hidden message, simply pass it downstream:
{ok, [Msg], State};
handle_message(Msg0, State, _) ->
#kflow_msg{payload = Payload, offset = Offset, route = Route0} = Msg0,
Ret = case State of
#s1{cb_module = CbModule, cb_state = CbState} ->
CbModule:mfd(Offset, Payload, CbState);
#s2{function = Fun} ->
Fun(Offset, Payload)
end,
Msg = case Ret of
{true, NewPayload} ->
Msg0#kflow_msg{payload = NewPayload};
{true, Route, NewPayload} ->
Msg0#kflow_msg{payload = NewPayload, route = [Route|Route0]};
false ->
Msg0#kflow_msg{hidden = true}
end,
{ok, [Msg], State}.
%% @private
handle_flush(State, _) ->
{ok, [], State}.
%% @private
terminate(#s1{cb_state = CbState, cb_module = CbModule}, _) ->
kflow_lib:optional_callback(CbModule, terminate, [CbState]);
terminate(#s2{}, _) ->
ok. | src/framework/kflow_gen_mfd.erl | 0.588416 | 0.491517 | kflow_gen_mfd.erl | starcoder |
% Licensed under the Apache License, Version 2.0 (the "License"); you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
-module(couch_jobs_server).
-behaviour(gen_server).
-export([
start_link/0,
get_notifier_server/1,
force_check_types/0
]).
-export([
init/1,
terminate/2,
handle_call/3,
handle_cast/2,
handle_info/2,
code_change/3
]).
-define(TYPE_CHECK_PERIOD_DEFAULT, 15000).
-define(MAX_JITTER_DEFAULT, 5000).
start_link() ->
gen_server:start_link({local, ?MODULE}, ?MODULE, nil, []).
get_notifier_server(Type) ->
case get_type_pid_refs(Type) of
{{_, _}, {NotifierPid, _}} ->
{ok, NotifierPid};
not_found ->
force_check_types(),
case get_type_pid_refs(Type) of
{{_, _}, {NotifierPid, _}} ->
{ok, NotifierPid};
not_found ->
{error, not_found}
end
end.
force_check_types() ->
gen_server:call(?MODULE, check_types, infinity).
init(_) ->
% If couch_jobs_server is after the notifiers and activity supervisor. If
% it restart, there could be some stale notifier or activity monitors. Kill
% those as later on we'd start new ones anyway.
reset_monitors(),
reset_notifiers(),
ets:new(?MODULE, [protected, named_table]),
check_types(),
schedule_check(),
{ok, nil}.
terminate(_, _St) ->
ok.
handle_call(check_types, _From, St) ->
check_types(),
{reply, ok, St};
handle_call(Msg, _From, St) ->
{stop, {bad_call, Msg}, {bad_call, Msg}, St}.
handle_cast(Msg, St) ->
{stop, {bad_cast, Msg}, St}.
handle_info(check_types, St) ->
check_types(),
schedule_check(),
{noreply, St};
handle_info({'DOWN', _Ref, process, Pid, Reason}, St) ->
LogMsg = "~p : process ~p exited with ~p",
couch_log:error(LogMsg, [?MODULE, Pid, Reason]),
{stop, {unexpected_process_exit, Pid, Reason}, St};
handle_info({Ref, ready}, St) when is_reference(Ref) ->
% Don't crash out couch_jobs_server and the whole application would need to
% eventually do proper cleanup in erlfdb:wait timeout code.
LogMsg = "~p : spurious erlfdb future ready message ~p",
couch_log:error(LogMsg, [?MODULE, Ref]),
{noreply, St};
handle_info(Msg, St) ->
{stop, {bad_info, Msg}, St}.
code_change(_OldVsn, St, _Extra) ->
{ok, St}.
check_types() ->
FdbTypes = fdb_types(),
EtsTypes = ets_types(),
ToStart = FdbTypes -- EtsTypes,
ToStop = EtsTypes -- FdbTypes,
lists:foreach(fun(Type) -> start_monitors(Type) end, ToStart),
lists:foreach(fun(Type) -> stop_monitors(Type) end, ToStop).
start_monitors(Type) ->
MonPidRef = case couch_jobs_activity_monitor_sup:start_monitor(Type) of
{ok, Pid1} -> {Pid1, monitor(process, Pid1)};
{error, Error1} -> error({failed_to_start_monitor, Type, Error1})
end,
NotifierPidRef = case couch_jobs_notifier_sup:start_notifier(Type) of
{ok, Pid2} -> {Pid2, monitor(process, Pid2)};
{error, Error2} -> error({failed_to_start_notifier, Type, Error2})
end,
ets:insert_new(?MODULE, {Type, MonPidRef, NotifierPidRef}).
stop_monitors(Type) ->
{{MonPid, MonRef}, {NotifierPid, NotifierRef}} = get_type_pid_refs(Type),
ok = couch_jobs_activity_monitor_sup:stop_monitor(MonPid),
demonitor(MonRef, [flush]),
ok = couch_jobs_notifier_sup:stop_notifier(NotifierPid),
demonitor(NotifierRef, [flush]),
ets:delete(?MODULE, Type).
reset_monitors() ->
lists:foreach(fun(Pid) ->
couch_jobs_activity_monitor_sup:stop_monitor(Pid)
end, couch_jobs_activity_monitor_sup:get_child_pids()).
reset_notifiers() ->
lists:foreach(fun(Pid) ->
couch_jobs_notifier_sup:stop_notifier(Pid)
end, couch_jobs_notifier_sup:get_child_pids()).
get_type_pid_refs(Type) ->
case ets:lookup(?MODULE, Type) of
[{_, MonPidRef, NotifierPidRef}] -> {MonPidRef, NotifierPidRef};
[] -> not_found
end.
ets_types() ->
lists:flatten(ets:match(?MODULE, {'$1', '_', '_'})).
fdb_types() ->
try
couch_jobs_fdb:tx(couch_jobs_fdb:get_jtx(), fun(JTx) ->
couch_jobs_fdb:get_types(JTx)
end)
catch
error:{timeout, _} ->
couch_log:warning("~p : Timed out connecting to FDB", [?MODULE]),
[]
end.
schedule_check() ->
Timeout = get_period_msec(),
MaxJitter = max(Timeout div 2, get_max_jitter_msec()),
Wait = Timeout + rand:uniform(max(1, MaxJitter)),
erlang:send_after(Wait, self(), check_types).
get_period_msec() ->
config:get_integer("couch_jobs", "type_check_period_msec",
?TYPE_CHECK_PERIOD_DEFAULT).
get_max_jitter_msec() ->
config:get_integer("couch_jobs", "type_check_max_jitter_msec",
?MAX_JITTER_DEFAULT). | src/couch_jobs/src/couch_jobs_server.erl | 0.578805 | 0.412353 | couch_jobs_server.erl | starcoder |
-module(aoc2021_day19).
-behavior(aoc_puzzle).
-export([parse/1, solve/1, info/0]).
-include("aoc_puzzle.hrl").
-include_lib("eunit/include/eunit.hrl").
-spec info() -> aoc_puzzle().
info() ->
#aoc_puzzle{module = ?MODULE,
year = 2021,
day = 19,
name = "Beacon Scanner",
expected = {454, 10813},
has_input_file = true,
use_one_solver_fun = true}.
-type coord() :: {integer(), integer(), integer()}.
-type distance_vector() :: {integer(), integer(), integer()}.
-type scanner() :: {Id :: integer(), Coords :: [coord()]}.
-type input_type() :: [scanner()].
-type result_type() :: {integer(), integer()}.
scanner_id({Id, _}) ->
Id.
scanner_coords({_, Coords}) ->
Coords.
scanner(Id, Coords) ->
{Id, Coords}.
-spec parse(Binary :: binary()) -> input_type().
parse(Binary) ->
F = fun binary_to_integer/1,
Split = fun(B, C) -> binary:split(B, C, [trim_all, global]) end,
lists:map(fun(B) ->
[Header | Coords] = Split(B, <<"\n">>),
[_, _, ScannerNum, _] = Split(Header, <<" ">>),
{F(ScannerNum),
lists:map(fun(CoordBin) ->
[X, Y, Z] = Split(CoordBin, <<",">>),
{F(X), F(Y), F(Z)}
end,
Coords)}
end,
Split(Binary, <<"\n\n">>)).
-spec solve(Input :: input_type()) -> result_type().
solve(Scanners) ->
{MergedScanner, ScannerPositions} = merge(Scanners, [], rotation_funs()),
MaxManhattanDist =
lists:max([manhattan_dist(DV1, DV2) || DV1 <- ScannerPositions, DV2 <- ScannerPositions]),
{length(scanner_coords(MergedScanner)), MaxManhattanDist}.
-spec merge([scanner()], list(), [fun()]) -> {scanner(), list()}.
merge([A], ScannerPositions, _RotationFuns) ->
{A, ScannerPositions};
merge([A, B | Rest], ScannerPositions, RotationFuns) ->
case find_overlap(A, B, RotationFuns) of
false ->
% If the first two elements do not overlap, put the second one last,
% and retry with the next.
merge([A] ++ Rest ++ [B], ScannerPositions, RotationFuns);
{{Dx, Dy, Dz} = DV, RotatedB} ->
RemappedCoords =
lists:map(fun({X1, Y1, Z1}) -> {X1 + Dx, Y1 + Dy, Z1 + Dz} end,
scanner_coords(RotatedB)),
MergedCoords = lists:usort(RemappedCoords ++ scanner_coords(A)),
merge([scanner(scanner_id(A), MergedCoords) | Rest],
[DV | ScannerPositions],
RotationFuns)
end.
-spec distance_vector(coord(), coord()) -> distance_vector().
distance_vector({X0, Y0, Z0}, {X1, Y1, Z1}) ->
{X0 - X1, Y0 - Y1, Z0 - Z1}.
has_12_identical_elems([]) ->
false;
has_12_identical_elems([X, X, X, X, X, X, X, X, X, X, X, X | _]) ->
{true, X};
has_12_identical_elems([_ | Rest]) ->
has_12_identical_elems(Rest).
% Check if the set of scanner coordinates A and B overlap by at least 12 points.
% If so, return {DistanceVector, RotatedCoordinates}. Uses the process
% dictionary to cache rotations.
-spec find_overlap(A :: scanner(), B :: scanner(), RotationFuns :: [fun()]) ->
{DV :: distance_vector(), RotatedB :: scanner()} | false.
find_overlap(_, _, []) ->
false;
find_overlap(A, B, [RotationFun | Fs]) ->
Key = {scanner_id(B), RotationFun},
case get(Key) of
undefined ->
case do_find_overlap(A, B, RotationFun) of
false ->
find_overlap(A, B, Fs);
Result ->
put(Key, Result),
Result
end;
Result ->
Result
end.
-spec do_find_overlap(A :: scanner(), B :: scanner(), RotationFun :: fun()) ->
false | {DV :: distance_vector(), RotatedB :: scanner()}.
do_find_overlap(A, B, RotationFun) ->
RotatedBCoords = lists:map(fun(Coord) -> RotationFun(Coord) end, scanner_coords(B)),
DVs = [distance_vector(C1, C2) || C1 <- scanner_coords(A), C2 <- RotatedBCoords],
case has_12_identical_elems(lists:sort(DVs)) of
{true, DV} ->
{DV, scanner(scanner_id(B), RotatedBCoords)};
false ->
false
end.
manhattan_dist({X0, Y0, Z0}, {X1, Y1, Z1}) ->
abs(X0 - X1) + abs(Y0 - Y1) + abs(Z0 - Z1).
%% 24 possible rotations
%% Checked against https://github.com/mytbk/advent_of_code/blob/main/2021/19/positions-transforms.ads
rotation_funs() ->
[%% 1
fun({X, Y, Z}) -> {X, Y, Z} end,
fun({X, Y, Z}) -> {X, -Y, -Z} end,
fun({X, Y, Z}) -> {X, Z, -Y} end,
fun({X, Y, Z}) -> {X, -Z, Y} end,
fun({X, Y, Z}) -> {-X, Y, -Z} end,
%% 6
fun({X, Y, Z}) -> {-X, -Y, Z} end,
fun({X, Y, Z}) -> {-X, Z, Y} end,
fun({X, Y, Z}) -> {-X, -Z, -Y} end,
fun({X, Y, Z}) -> {Y, X, -Z} end,
fun({X, Y, Z}) -> {Y, -X, Z} end,
%% 11
fun({X, Y, Z}) -> {Y, Z, X} end,
fun({X, Y, Z}) -> {Y, -Z, -X} end,
fun({X, Y, Z}) -> {-Y, X, Z} end,
fun({X, Y, Z}) -> {-Y, -X, -Z} end,
fun({X, Y, Z}) -> {-Y, Z, -X} end,
%% 16
fun({X, Y, Z}) -> {-Y, -Z, X} end,
fun({X, Y, Z}) -> {Z, X, Y} end,
fun({X, Y, Z}) -> {Z, -X, -Y} end,
fun({X, Y, Z}) -> {Z, Y, -X} end,
fun({X, Y, Z}) -> {Z, -Y, X} end,
%% 21
fun({X, Y, Z}) -> {-Z, X, -Y} end,
fun({X, Y, Z}) -> {-Z, -X, Y} end,
fun({X, Y, Z}) -> {-Z, Y, X} end,
fun({X, Y, Z}) -> {-Z, -Y, -X} end].
%% Tests
-ifdef(TEST).
ex1_test() ->
%% Oh lord, erlfmt really fucks this up. Anyway, this is the big example for part 1.
Scanners =
parse(<<"--- scanner 0 ---\n404,-588,-901\n528,-643,409\n-838,591,734\n390,-6"
"75,-793\n-537,-823,-458\n-485,-357,347\n-345,-311,381\n-661,-816,-57"
"5\n-876,649,763\n-618,-824,-621\n553,345,-567\n474,580,667\n-447,-32"
"9,318\n-584,868,-557\n544,-627,-890\n564,392,-477\n455,729,728\n-892"
",524,684\n-689,845,-530\n423,-701,434\n7,-33,-71\n630,319,-379\n443,"
"580,662\n-789,900,-551\n459,-707,401\n\n--- scanner 1 ---\n686,422,5"
"78\n605,423,415\n515,917,-361\n-336,658,858\n95,138,22\n-476,619,847\n"
"-340,-569,-846\n567,-361,727\n-460,603,-452\n669,-402,600\n729,430,5"
"32\n-500,-761,534\n-322,571,750\n-466,-666,-811\n-429,-592,574\n-355"
",545,-477\n703,-491,-529\n-328,-685,520\n413,935,-424\n-391,539,-444\n"
"586,-435,557\n-364,-763,-893\n807,-499,-711\n755,-354,-619\n553,889,"
"-390\n\n--- scanner 2 ---\n649,640,665\n682,-795,504\n-784,533,-524\n"
"-644,584,-595\n-588,-843,648\n-30,6,44\n-674,560,763\n500,723,-460\n"
"609,671,-379\n-555,-800,653\n-675,-892,-343\n697,-426,-610\n578,704,"
"681\n493,664,-388\n-671,-858,530\n-667,343,800\n571,-461,-707\n-138,"
"-166,112\n-889,563,-600\n646,-828,498\n640,759,510\n-630,509,768\n-6"
"81,-892,-333\n673,-379,-804\n-742,-814,-386\n577,-820,562\n\n--- "
"scanner 3 ---\n-589,542,597\n605,-692,669\n-500,565,-823\n-660,373,5"
"57\n-458,-679,-417\n-488,449,543\n-626,468,-788\n338,-750,-386\n528,"
"-832,-391\n562,-778,733\n-938,-730,414\n543,643,-506\n-524,371,-870\n"
"407,773,750\n-104,29,83\n378,-903,-323\n-778,-728,485\n426,699,580\n"
"-438,-605,-362\n-469,-447,-387\n509,732,623\n647,635,-688\n-868,-804"
",481\n614,-800,639\n595,780,-596\n\n--- scanner 4 ---\n727,592,562\n"
"-293,-554,779\n441,611,-461\n-714,465,-776\n-743,427,-804\n-660,-479"
",-426\n832,-632,460\n927,-485,-438\n408,393,-506\n466,436,-512\n110,"
"16,151\n-258,-428,682\n-393,719,612\n-211,-452,876\n808,-476,-593\n-"
"575,615,604\n-485,667,467\n-680,325,-822\n-627,-443,-432\n872,-547,-"
"609\n833,512,582\n807,604,487\n839,-516,451\n891,-625,532\n-652,-548"
",-490\n30,-46,-14">>),
?assertEqual({79, 3621}, solve(Scanners)).
-endif. | src/2021/aoc2021_day19.erl | 0.57678 | 0.558387 | aoc2021_day19.erl | starcoder |
%%==============================================================================
%% Copyright 2020 Erlang Solutions Ltd.
%% Licensed under the Apache License, Version 2.0 (see LICENSE file)
%%==============================================================================
%% This module can be used directly only for the readonly env init parameters.
%% do not use it for the scenarios/helpers configuration, amoc_config module
%% must be used instead! This allows to provide configuration via REST API in
%% a JSON format
%%==============================================================================
-module(amoc_config_env).
-export([get/1, get/2, parse_value/1, format/2]).
-include_lib("kernel/include/logger.hrl").
%% ------------------------------------------------------------------
%% API
%% ------------------------------------------------------------------
-spec get(amoc_config:name()) -> amoc_config:value().
get(Name) ->
get(Name, undefined).
-spec get(amoc_config:name(), amoc_config:value()) -> amoc_config:value().
get(Name, Default) when is_atom(Name) ->
get_os_env(Name, Default).
-spec parse_value(string() | binary()) -> {ok, amoc_config:value()} | {error, any()}.
parse_value(Binary) when is_binary(Binary) ->
parse_value(binary_to_list(Binary));
parse_value(String) when is_list(String) ->
try
{ok, Tokens, _} = erl_scan:string(String ++ "."),
{ok, _} = erl_parse:parse_term(Tokens)
catch
_:E -> {error, E}
end.
-spec format(any(), binary) -> binary();
(any(), string) -> string().
format(Value, binary) ->
list_to_binary(format(Value, string));
format(Value, string) ->
lists:flatten(io_lib:format("~tp", [Value])).
%% ------------------------------------------------------------------
%% Internal Function Definitions
%% ------------------------------------------------------------------
-spec get_os_env(amoc_config:name(), amoc_config:value()) -> amoc_config:value().
get_os_env(Name, Default) ->
EnvName = os_env_name(Name),
Value = os:getenv(EnvName),
case parse_value(Value, Default) of
{ok, Term} -> Term;
{error, _} ->
?LOG_ERROR("cannot parse $~p value \"~p\", using default one", [EnvName, Value]),
Default
end.
-spec os_env_name(amoc_config:name()) -> string().
os_env_name(Name) when is_atom(Name) ->
"AMOC_" ++ string:uppercase(erlang:atom_to_list(Name)).
-spec parse_value(string() | false, any()) -> {ok, amoc_config:value()} | {error, any()}.
parse_value(false, Default) -> {ok, Default};
parse_value("", Default) -> {ok, Default};
parse_value(String, _) ->
parse_value(String). | src/amoc_config/amoc_config_env.erl | 0.52756 | 0.412944 | amoc_config_env.erl | starcoder |
-module(datetime_shift).
-export([
shift_days/1,
shift_months/1,
shift_hours/1,
shift_mins/1
]).
-define(SECONDS_IN_A_DAY, 86400).
%%%===================================================================
%%% API
%%%===================================================================
%% @doc Moves the received datetime number of days to the future or to the past
-spec shift_days(integer()) -> calendar:datetime().
shift_days(NumberOfDays) ->
Datetime = calendar:universal_time(),
Shift = ?SECONDS_IN_A_DAY * NumberOfDays,
Secs = calendar:datetime_to_gregorian_seconds(Datetime),
calendar:gregorian_seconds_to_datetime(Secs + Shift).
%% @doc Moves the received date number of months to the future or to the past
-spec shift_months(integer()) -> calendar:datetime().
shift_months(NumberOfMonths) ->
{{Y, M, D}, Time} = calendar:universal_time(),
%% in order for the modular arithmetic to work, months in this function range
%% from 0 to 11 (January to December)
TotalMonths = 12*Y + M-1 + NumberOfMonths,
case TotalMonths >= 0 of
true ->
Month = TotalMonths rem 12,
Year = (TotalMonths - Month) div 12,
%% add one back to the month to fix our tricky mod 12
{find_valid_date({Year, Month+1, D}), Time};
false ->
error(out_of_bounds)
end.
%% @doc Moves the received date number of hours to the future or to the past.
-spec shift_hours(integer()) -> calender:datetime().
shift_hours(NumberOfHours) ->
calendar:datetime_to_gregorian_seconds(calendar:universal_time()) + NumberOfHours * 60 * 60.
%% @doc Moves the received date number of mins to the future or to the past.
-spec shift_mins(integer()) -> calender:datetime().
shift_mins(NumberOfMins) ->
calendar:datetime_to_gregorian_seconds(calendar:universal_time()) + NumberOfMins * 60.
%%%===================================================================
%%% Internal Functions
%%%===================================================================
%% @doc Returns `Date' if valid. Otherwise, returns `Date' replacing `Day` with the last day of the month.
find_valid_date(Date) ->
case calendar:valid_date(Date) of
true ->
Date;
false ->
{Y, M, _} = Date,
{Y, M, calendar:last_day_of_the_month(Y, M)}
end. | src/datetime_shift.erl | 0.554712 | 0.489992 | datetime_shift.erl | starcoder |
%% Copyright 2018 Erlio GmbH Basel Switzerland (http://erl.io)
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(vmq_diversity_utils).
-compile([nowarn_export_all, export_all]).
%% @doc change modifiers into a canonical form so it can be run
%% through the modifier checker.
normalize_modifiers(auth_on_subscribe_m5, Mods) ->
Mods1 =
lists:map(
fun({topics, Topics}) ->
{topics, normalize_subscribe_topics(Topics)}
end, Mods),
maps:from_list(Mods1);
normalize_modifiers(auth_on_register_m5, Mods) ->
maps:from_list(Mods);
normalize_modifiers(auth_on_publish_m5, Mods) ->
maps:from_list(Mods);
normalize_modifiers(auth_on_subscribe, Mods) ->
normalize_subscribe_topics(Mods);
normalize_modifiers(_Hook, Mods) ->
Mods.
normalize_subscribe_topics(Topics0) ->
lists:map(
fun([T, [Q, SubOpts]]) ->
{T, {convert(Q), maps:from_list(SubOpts)}};
([T, Q]) ->
{T, convert(Q)}
end, Topics0).
convert(Val) when is_list(Val) ->
convert_list(Val, []);
convert(Val) when is_number(Val) ->
case round(Val) of
RVal when RVal == Val -> RVal;
_ -> Val
end;
convert(Val) when is_binary(Val) -> Val;
convert(Val) when is_boolean(Val) -> Val;
convert(nil) -> undefined.
convert_list([ListItem|Rest], Acc) ->
convert_list(Rest, [convert_list_item(ListItem)|Acc]);
convert_list([], Acc) -> lists:reverse(Acc).
convert_list_item({Idx, Val}) when is_integer(Idx) ->
%% lua array
convert(Val);
convert_list_item({BinKey, Val}) when is_binary(BinKey) ->
try list_to_existing_atom(binary_to_list(BinKey)) of
Key -> {Key, convert(Val)}
catch
_:_ ->
{BinKey, convert(Val)}
end.
%% map / unmap is currently only used by the mongodb
map(TableOrTables) ->
case map(TableOrTables, []) of
[Map] -> Map;
Maps -> Maps
end.
map([], []) -> map_([]);
map([{I, [{K, _}|_] = Doc}|Rest], Acc) when is_integer(I) and is_binary(K) ->
%% list of docs
map(Rest, [map_(Doc)|Acc]);
map([{K, _}|_] = Doc, Acc) when is_binary(K) ->
%% one doc
[map_(Doc)|Acc];
map([], Acc) -> lists:reverse(Acc).
map_([]) -> #{};
map_(Proplist) ->
lists:foldl(fun
({K, [{_, _}|_] = V}, AccIn) ->
maps:put(K, map_(V), AccIn);
({K, V}, AccIn) ->
maps:put(K, V, AccIn)
end, #{}, Proplist).
unmap(Map) when is_map(Map) ->
unmap(maps:to_list(Map), []);
unmap([Map|_] = Maps) when is_map(Map) ->
{_, Ret} =
lists:foldl(fun(M, {I, Acc}) ->
NextI = I + 1,
{NextI, [{NextI, unmap(M)}|Acc]}
end, {0, []}, Maps),
Ret.
unmap([{K, Map}|Rest], Acc) when is_map(Map) ->
unmap(Rest, [{K, unmap(Map)}|Acc]);
unmap([{K, [Map|_] = Maps}|Rest], Acc) when is_map(Map) ->
unmap(Rest, [{K, unmap(Maps)}|Acc]);
unmap([{K, V}|Rest], Acc) ->
unmap(Rest, [{K, V}|Acc]);
unmap([], Acc) -> lists:reverse(Acc).
int(I) when is_integer(I) -> I;
int(I) when is_number(I) -> round(I).
str(S) when is_list(S) -> S;
str(S) when is_binary(S) -> binary_to_list(S).
ustr(undefined) -> undefined;
ustr(S) -> str(S).
atom(A) when is_atom(A) -> A;
atom(A) -> list_to_atom(str(A)). | apps/vmq_diversity/src/vmq_diversity_utils.erl | 0.566858 | 0.482673 | vmq_diversity_utils.erl | starcoder |
%%
%% @doc Galois Fields and
%% Galois Counter Mode of Operation (GCM).
%%
%% ❗Attention: This module is for demonstration purposes only.
%% In production one should use `crypto' module from the standard library.
%%
%% @reference [1] <NAME>., <NAME>. <em>The Galois/Counter Mode of Operation (GCM)</em>
%% @reference [[https://dl.acm.org/citation.cfm?id=2206251 2]] <NAME>. <em>Galois/Counter Mode (GCM) and GMAC</em>
%%
-module(galois).
-export([gcm/5, ghash/3, gmac/3, mult/2]).
-import(bin, [lxor/2]).
-import(crypto, [exor/2]).
-import(maths, [mod/2]).
% http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.694.695&rep=rep1&type=pdf
-define(R, <<2#11100001:8, 0:120>>).
%%
%% @doc Multiplication in `GF(2^128)'.
%%
%% See [1] Algorithm 1.
%%
-spec mult(binary(), binary()) -> binary().
mult(X, Y) -> mult(<<0:128>>, X, Y).
mult(Z, _, <<>>) -> Z;
mult(Z, V, <<0:1, _/bitstring>> = Y) -> mult_shift(Z, V, Y);
mult(Z, V, <<1:1, _/bitstring>> = Y) -> mult_shift(exor(Z, V), V, Y).
mult_shift(Z, <<V:127, 0:1>>, <<_:1, Y/bitstring>>) -> mult(Z, <<0:1, V:127>>, Y);
mult_shift(Z, <<V:127, 1:1>>, <<_:1, Y/bitstring>>) -> mult(Z, exor(<<0:1, V:127>>, ?R), Y).
%%
%% @doc GHASH function as defined by [1](2).
%%
ghash(H, A, C) -> ghash(H, join(A, C)).
join(X, Y) ->
XL = bit_size(X),
YL = bit_size(Y),
<<X/binary, 0:(mod(-XL, 128)), Y/binary, 0:(mod(-YL, 128)), XL:64, YL:64>>.
%%
%% @doc GHASH function as defined by [2](Algorithm 2).
%%
ghash(H, Data) ->
Blocks = [<<X:128>> || <<X:128>> <= Data],
lists:foldl(fun(X, Y) -> mult(exor(Y, X), H) end, <<0:128>>, Blocks).
%%
%% @doc AES-256-GCM implementation as specified by [1](1).
%%
gcm(K, IV, A, P, TLength) ->
EK = fun(X) -> enc(K, X) end,
H = EK(<<0:128>>),
Y0 = init_counter(IV, H),
{_, CipherText} = lists:foldl(
fun(B, {Y, <<C/binary>>}) ->
Yi = incr(Y),
{Yi, <<C/binary, (lxor(B, EK(Yi)))/binary>>}
end,
{Y0, <<>>},
blocks(P, 16)),
CipherTag = lxor(ghash(H, A, CipherText), EK(Y0)),
{CipherText, <<CipherTag:TLength/binary>>}.
blocks(P, BlockSize) ->
PL = size(P),
[<<X:BlockSize/binary>> || <<X:BlockSize/binary>> <= P] ++ [binary_part(P, {PL, -(PL rem BlockSize)})].
%%
%% @doc GMAC implementation.
%%
gmac(K, Nonce, A) ->
{<<>>, Tag} = gcm(K, Nonce, A, <<>>, 16),
Tag.
%
% Utility functions
%
init_counter(IV, _) when bit_size(IV) =:= 96 -> <<IV/binary, 1:32>>;
init_counter(IV, H) -> ghash(H, <<>>, IV).
incr(<<Nonce:12/binary, Counter:32>>) -> <<Nonce/binary, (Counter + 1):32>>.
% Single block AES encryption for key K
enc(K, P) -> crypto:crypto_one_time(aes_256_ecb, K, P, true).
-include_lib("eunit/include/eunit.hrl").
my_aes_gcm_is_equivalent_to_erlang_aes_gcm_test() ->
K = <<16#92e11dcdaa866f5ce790fd24501f92509aacf4cb8b1339d50c9c1240935dd08b:256>>,
A = <<16#1e0889016f67601c8ebea4943bc23ad6:128>>,
P = <<16#2d71bcfa914e4ac045b2aa60955fad24:128>>,
IV = <<16#ac93a1a6145299bde902f21a:96>>,
?assertEqual(crypto:crypto_one_time_aead(aes_256_gcm, K, IV, P, A, true), gcm(K, IV, A, P, 16)).
gmac_test() ->
K = <<16#8000000000000000000000000000000000000000000000000000000000000001:256>>,
A = <<16#4d4143732061726520766572792075736566756c20696e2063727970746f67726170687921:296>>,
Nonce = <<16#000000000000000000000001:96>>,
?assertEqual(<<16#34B025A57D99315120912DEFBFE329C3:128>>, gmac(K, Nonce, A)).
%
% Test vectors from
% http://www.ieee802.org/1/files/public/docs2011/bn-randall-test-vectors-0511-v1.pdf
%
ghash_1_test() ->
?assertEqual(<<16#A4C350FB66B8C960E83363381BA90F50:128>>,
ghash(<<16#73A23D80121DE2D5A850253FCF43120E:128>>,
<<16#D609B1F056637A0D46DF998D88E52E00:128,
16#B2C2846512153524C0895E81:96>>,
<<16#701AFA1CC039C0D765128A665DAB6924:128,
16#3899BF7318CCDC81C9931DA17FBE8EDD:128,
16#7D17CB8B4C26FC81E3284F2B7FBA713D:128>>)).
ghash_2_test() ->
?assertEqual(<<16#F02428563BB7E67C378044C874498FF8:128>>,
ghash(<<16#E4E01725D724C1215C7309AD34539257:128>>,
<<16#E20106D7CD0DF0761E8DCD3D88E54000:128,
16#76D457ED08000F101112131415161718:128,
16#191A1B1C1D1E1F202122232425262728:128,
16#292A2B2C2D2E2F303132333435363738:128,
16#393A0003:32>>,
<<>>)).
%
% Test vectors from
% http://csrc.nist.gov/groups/STM/cavp/documents/mac/gcmtestvectors.zip
%
nist_256_96_0_0_128_test() ->
K = <<16#b52c505a37d78eda5dd34f20c22540ea1b58963cf8e5bf8ffa85f9f2492505b4:256>>,
A = <<>>,
P = <<>>,
IV = <<16#516c33929df5a3284ff463d7:96>>,
{CipherText, CipherTag} = gcm(K, IV, A, P, 16),
?assertEqual(<<>>, CipherText),
?assertEqual(<<16#BDC1AC884D332457A1D2664F168C76F0:128>>, CipherTag).
nist_256_96_0_128_128_test() ->
K = <<16#78dc4e0aaf52d935c3c01eea57428f00ca1fd475f5da86a49c8dd73d68c8e223:256>>,
A = <<16#b96baa8c1c75a671bfb2d08d06be5f36:128>>,
P = <<>>,
IV = <<16#d79cf22d504cc793c3fb6c8a:96>>,
{CipherText, CipherTag} = gcm(K, IV, A, P, 16),
?assertEqual(<<>>, CipherText),
?assertEqual(<<16#3E5D486AA2E30B22E040B85723A06E76:128>>, CipherTag).
nist_256_96_104_0_128_test() ->
K = <<16#82c4f12eeec3b2d3d157b0f992d292b237478d2cecc1d5f161389b97f999057a:256>>,
A = <<>>,
P = <<16#982a296ee1cd7086afad976945:104>>,
IV = <<16#7b40b20f5f397177990ef2d1:96>>,
{CipherText, CipherTag} = gcm(K, IV, A, P, 16),
?assertEqual(<<16#ec8e05a0471d6b43a59ca5335f:104>>, CipherText),
?assertEqual(<<16#113ddeafc62373cac2f5951bb9165249:128>>, CipherTag).
nist_256_96_128_0_128_test() ->
K = <<16#31bdadd96698c204aa9ce1448ea94ae1fb4a9a0b3c9d773b51bb1822666b8f22:256>>,
A = <<>>,
P = <<16#2db5168e932556f8089a0622981d017d:128>>,
IV = <<16#0d18e06c7c725ac9e362e1ce:96>>,
{CipherText, CipherTag} = gcm(K, IV, A, P, 16),
?assertEqual(<<16#FA4362189661D163FCD6A56D8BF0405A:128>>, CipherText),
?assertEqual(<<16#D636AC1BBEDD5CC3EE727DC2AB4A9489:128>>, CipherTag).
nist_256_96_128_128_128_test() ->
K = <<16#92e11dcdaa866f5ce790fd24501f92509aacf4cb8b1339d50c9c1240935dd08b:256>>,
A = <<16#1e0889016f67601c8ebea4943bc23ad6:128>>,
P = <<16#2d71bcfa914e4ac045b2aa60955fad24:128>>,
IV = <<16#ac93a1a6145299bde902f21a:96>>,
{CipherText, CipherTag} = gcm(K, IV, A, P, 16),
?assertEqual(<<16#8995AE2E6DF3DBF96FAC7B7137BAE67F:128>>, CipherText),
?assertEqual(<<16#ECA5AA77D51D4A0A14D9C51E1DA474AB:128>>, CipherTag).
nist_256_96_408_160_120_test() ->
K = <<16#f16202e6f3a04244cea18292f570217e3152571017801bcb6460d8f0a9a61a8b:256>>,
A = <<16#dd288bd757da22c1f05b639e84dc554fc8c7c620:160>>,
P = <<16#f7c12daf7faec4e66e15079c1dd4ed6123ba2ca63e3b4f342fccc33f57218860b6abf3cfe6440bc2f67d89e3ddd06452ef76ee:408>>,
IV = <<16#4fd8084392ac2e241d13477c:96>>,
{CipherText, CipherTag} = gcm(K, IV, A, P, 15),
?assertEqual(<<16#71060f9a2f04568c32db3e52744df78c1bbc38d90616ecc8626049fe8f80988d9ca47bc116f031117d6d269b05df8a876234df:408>>, CipherText),
?assertEqual(<<16#7f1f0e4c113549c462e65709403ab8:120>>, CipherTag).
%
% Auxiliary tests
%
manual_gcm_one_block_test() ->
K = <<16#92e11dcdaa866f5ce790fd24501f92509aacf4cb8b1339d50c9c1240935dd08b:256>>,
A = <<16#1e0889016f67601c8ebea4943bc23ad6:128>>,
P = <<16#2d71bcfa914e4ac045b2aa60955fad24:128>>,
IV = <<16#ac93a1a6145299bde902f21a:96>>,
H = enc(K, <<0:128>>),
Y0 = <<IV/binary, 1:32>>,
Y1 = <<IV/binary, 2:32>>,
C1 = exor(P, enc(K, Y1)),
H1 = exor(C1, mult(A, H)),
H2 = exor(mult(H1, H), <<128:64, 128:64>>),
T = exor(enc(K, Y0), mult(H2, H)),
?assertEqual({C1, T}, crypto:crypto_one_time_aead(aes_256_gcm, K, IV, P, A, true)).
aes_gcm_test() ->
K = <<16#92e11dcdaa866f5ce790fd24501f92509aacf4cb8b1339d50c9c1240935dd08b:256>>,
A = <<16#1e0889016f67601c8ebea4943bc23ad6:128>>,
P = <<16#2d71bcfa914e4ac045b2aa60955fad24:128>>,
IV = <<16#ac93a1a6145299bde902f21a:96>>,
{CipherText, CipherTag} = crypto:crypto_one_time_aead(aes_256_gcm, K, IV, P, A, true),
?assertEqual(<<16#8995AE2E6DF3DBF96FAC7B7137BAE67F:128>>, CipherText),
?assertEqual(<<16#ECA5AA77D51D4A0A14D9C51E1DA474AB:128>>, CipherTag).
mult_test() ->
?assertEqual(<<16#A3B928F1CECEBA0F612BFEEBE5AEA0E1:128>>,
mult(<<16#b96baa8c1c75a671bfb2d08d06be5f36:128>>, <<16#1e0889016f67601c8ebea4943bc23ad6:128>>)).
exor_test() ->
?assertEqual(<<2#0100000000011000:16>>, exor(<<2#1110101010110010:16>>, <<2#1010101010101010:16>>)). | lib/ndpar/src/galois.erl | 0.512937 | 0.481941 | galois.erl | starcoder |
%% Copyright 2015 <NAME>
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(fn_to_erl).
-export([ast_to_ast/2, to_erl/2, add_error/4]).
-include("efene.hrl").
new_state(Module) -> #{module => Module, errors => [], warnings => [], attrs => [], level => 0}.
to_erl(Ast, Module) -> ast_to_ast(Ast, new_state(Module)).
ast_to_ast(Nodes, State) when is_list(Nodes) -> ast_to_ast(Nodes, [], State);
%-export([...]).
ast_to_ast({attr, Line, [?Atom(export=Name)], Params, noresult}, #{level := 0}=State) ->
export_like_to_ast(Name, Line, Params, State);
%-export_type([...]).
ast_to_ast({attr, Line, [?Atom(export_type=Name)], Params, noresult}, #{level := 0}=State) ->
export_like_to_ast(Name, Line, Params, State);
ast_to_ast({attr, Line, [?Atom(AttrName)], [?Atom(BName)], noresult}, #{level := 0}=State)
when AttrName == behavior orelse AttrName == behaviour ->
R = {attribute, Line, AttrName, BName},
{R, State};
% top level function
ast_to_ast(?E(Line, fn, {Name, Attrs, ?E(_CLine, 'case', Cases)}), #{level := 0}=State) ->
[FirstCase|_TCases] = Cases,
{cmatch, _FCLine, {FCCond, _FCWhen, _FCBody}} = FirstCase,
Arity = length(FCCond),
{ok, FixedCases} = expand_case_else_match(Cases),
StateLevel1 = State#{level => 1},
{EFixedCases, State1} = ast_to_ast(FixedCases, StateLevel1),
BareName = unwrap(Name),
EFn = {function, Line, BareName, Arity, EFixedCases},
FnRef = {Name, Arity},
{R, RestAttrs, State2} = case extract_spec_attr(FnRef, Attrs, [], nil, State1) of
{found, ESpecAttr, IRestAttrs, State21} ->
{[EFn, ESpecAttr], IRestAttrs, State21};
{notfound, IRestAttrs, State21} ->
{EFn, IRestAttrs, State21}
end,
State3 = add_attributes(State2, fn, Line, {BareName, Arity}, Attrs),
State4 = add_attributes(State3, fn_attrs, Line, {BareName, Arity}, RestAttrs),
State5 = check_case_arities_equal(Cases, State4, Arity),
{R, State5#{level => 0}};
% record declaration
ast_to_ast({attr, Line, [?Atom(record)], [?Atom(RecordName)], ?S(_TLine, tuple, Fields)},
#{level := 0}=State) ->
{FieldsAndTypes, State1} = lists:mapfoldl(fun to_record_field_decl/2,
State#{level => 1}, Fields),
{RFields, RTypes} = lists:foldl(fun ({type, Field, Type}, {Fs, Ts}) ->
{[Field|Fs], [Type|Ts]};
(Field, {Fs, Ts}) ->
{[Field|Fs], Ts}
end, {[], []}, FieldsAndTypes),
R = {attribute, Line, record, {RecordName, lists:reverse(RFields)}},
maybe_type_record(R, Line, RecordName, RTypes, State1#{level => 0});
% type and opaque attributes
ast_to_ast({attr, Line, [?Atom(Type)], _Params, noresult}=Ast, #{level := 0}=State)
when Type == type orelse Type == opaque ->
invalid_type_declaration(State, Line, Ast);
ast_to_ast({attr, Line, [?Atom(Type)], noparams, _Result}=Ast, #{level := 0}=State)
when Type == type orelse Type == opaque ->
invalid_type_declaration(State, Line, Ast);
ast_to_ast({attr, _Line, [?Atom(Type)], _Params, _Result}=Ast, #{level := 0}=State)
when Type == type orelse Type == opaque ->
fn_spec:type_to_spec(Ast, State);
ast_to_ast(Ast, #{level := 0}=State) ->
Line = element(2, Ast),
State1 = add_error(State, invalid_top_level_expression, Line, {ast, Ast}),
R = {atom, Line, error},
{R, State1};
ast_to_ast(?E(_Line, call_do, {Place, Call, Fun}), State) ->
with_childs(State, Call, Fun,
fun ({call, CallLine, FCall, Args}, EFun) ->
AllArgs = case Place of
first -> [EFun|Args];
last -> Args ++ [EFun]
end,
{call, CallLine, FCall, AllArgs}
end);
ast_to_ast(?E(_Line, call_thread, {InitialVal, Calls}), State) ->
Threaded = lists:foldl(fun (Current, Accum) ->
{Pos, Call} = Current,
?E(CallLine, call, {Fun, Args}) = Call,
NewArgs = case Pos of
first -> [Accum|Args];
last -> Args ++ [Accum]
end,
?E(CallLine, call, {Fun, NewArgs})
end, InitialVal, Calls),
ast_to_ast(Threaded, State);
% ^_ <expr>
ast_to_ast(?T(_Line, [?Var('_')], _), State) ->
R = 'fn compiler ignore',
{R, State};
% binary list
ast_to_ast(?LTag(Line, [?Atom(b)], ?S(_LLine, list, TSList)), State) ->
type_specifiers_to_ast(Line, TSList, State);
% list
ast_to_ast(?S(Line, list, Val), State) ->
list_to_cons_list(Line, Val, State);
% <var>#<map>
ast_to_ast(?S(Line, map=Type, {Var, KVs}), State) ->
{EVar, State1} = ast_to_ast(Var, State),
{Items, State2} = state_map(fun to_map_field/2, KVs, State1),
R = {Type, Line, EVar, lists:reverse(Items)},
{R, State2};
% <map>
ast_to_ast(?S(Line, map=Type, KVs), State) ->
{Items, State1} = state_map(fun to_map_field/2, KVs, State),
R = {Type, Line, lists:reverse(Items)},
{R, State1};
% #i <atom>
ast_to_ast(?LTag(Line, [?Atom(i)], ?Atom(Name)), State) ->
info_to_ast(Line, Name, State);
% #r.<atom> <atom>
ast_to_ast(?LTag(Line, [?Atom(r), ?Atom(RecordName)], ?Atom(Field)), State) ->
R = {record_index, Line, RecordName, {atom, Line, Field}},
{R, State};
% #r.<atom>.<atom> <var>
ast_to_ast(?LTag(Line, [?Atom(r), ?Atom(RecordName), ?Atom(Field)], ?Var(RecordVar)), State) ->
R = {record_field, Line, {var, Line, RecordVar}, RecordName, {atom, Line, Field}},
{R, State};
% #r.<atom> <var>#<map>
ast_to_ast(?LTag(Line, [?Atom(r), ?Atom(RecordName)],
?S(_MapLine, map, {Var, KVs})), State) ->
{EVar, State1} = ast_to_ast(Var, State),
{Items, State2} = state_map(fun to_record_field/2, KVs, State1),
R = {record, Line, EVar, RecordName, Items},
{R, State2};
% #r.<atom> <map>
ast_to_ast(?LTag(Line, [?Atom(r), ?Atom(RecordName)], ?S(_MapLine, map, KVs)), State) ->
{Items, State1} = state_map(fun to_record_field/2, KVs, State),
R = {record, Line, RecordName, Items},
{R, State1};
% #c <string>
ast_to_ast(?LTag(Line, [?Atom(c)], ?V(_StrLine, string, [Char])), State) ->
{{char, Line, Char}, State};
% #atom <string>
ast_to_ast(?LTag(Line, [?Atom(atom)], ?V(_StrLine, string, AtomStr)), State) ->
{{atom, Line, list_to_atom(AtomStr)}, State};
% tuple
ast_to_ast(?S(Line, tuple=Type, Val), State) ->
{EVal, State1} = ast_to_ast(Val, State),
{{Type, Line, EVal}, State1};
% [<val> :: <val]
ast_to_ast(?S(Line, cons=Type, {[H], T}), State) ->
with_childs(State, H, T, fun (EH, ET) -> {Type, Line, EH, ET} end);
% [<seq> :: <val]
ast_to_ast(?S(Line, cons, {H, T}), State) ->
with_childs(State, H, T, fun (EH, ET) ->
ast_list_to_cons(lists:reverse(EH), Line, ET) end);
% function reference
ast_to_ast(?V(Line, fn_ref, {[Mod, Fun], Arity}), State) ->
with_childs(State, Mod, Fun, Arity,
fun (EMod, EFun, EArity) ->
{'fun', Line, {function, EMod, EFun, EArity}}
end);
ast_to_ast(?V(Line, fn_ref, {[?Var(Fun)=FunAst], Arity}), State) ->
State1 = add_error(State, invalid_fn_ref, Line,
expected_got("atom", {ast, FunAst})),
R = {'fun', Line, {function, Fun, unwrap(Arity)}},
{R, State1};
ast_to_ast(?V(Line, fn_ref, {[Fun], Arity}), State) ->
R = {'fun', Line, {function, unwrap(Fun), unwrap(Arity)}},
{R, State};
% when
ast_to_ast(?E(Line, 'when', Clauses), State) ->
{EClauses, State1} = ast_to_ast(Clauses, State),
R = {'if', Line, EClauses},
{R, State1};
ast_to_ast({wcond, Line, Cond, Body}, State) ->
{ECond, State1} = when_to_ast(Cond, State),
{EBody, State2} = ast_to_ast(Body, State1),
R = {clause, Line, [], ECond, EBody},
{R, State2};
ast_to_ast({welse, Line, Body}, State) ->
{EBody, State1} = ast_to_ast(Body, State),
R = {clause, Line, [], [[{atom, Line, true}]], EBody},
{R, State1};
% binary comprehension
ast_to_ast(?T(_TLine, [?Atom(b)], ?E(Line, 'for', {Qualifiers, Body})), State) ->
{Items, EBody, State1} = lc_to_ast(Line, Qualifiers, Body, State),
R = {bc, Line, EBody, Items},
{R, State1};
% list comprehension
ast_to_ast(?E(Line, 'for', {Qualifiers, Body}), State) ->
{Items, EBody, State1} = lc_to_ast(Line, Qualifiers, Body, State),
R = {lc, Line, EBody, Items},
{R, State1};
% try expression
ast_to_ast(?E(Line, 'try', {Body, Catch, After}), State) ->
{EBody, State1} = ast_to_ast(Body, State),
{ECatch, State2} = case Catch of
?E(_CLine, 'case', Clauses) ->
state_map(fun ast_to_catch/2, Clauses, State);
nocatch -> {[], State1}
end,
{EAfter, State3} = case After of
noafter -> {[], State2};
AfterBody -> ast_to_ast(AfterBody, State2)
end,
R = {'try', Line, EBody, [], lists:reverse(ECatch), EAfter},
{R, State3};
% receive
ast_to_ast(?E(Line, 'receive', {?E(_CLine, 'case', Clauses), noafter}), State) ->
{EClauses, State1} = ast_to_ast(Clauses, State),
TupleClauses = lists:map(fun to_tuple_clause/1, EClauses),
R= {'receive', Line, TupleClauses},
{R, State1};
ast_to_ast(?E(Line, 'receive', {?E(_CLine, 'case', Clauses), {After, AfterBody}}), State) ->
with_childs(State, Clauses, After, AfterBody,
fun(EClauses, EAfter, EAfterBody) ->
TupleClauses = lists:map(fun to_tuple_clause/1, EClauses),
{'receive', Line, TupleClauses, EAfter, EAfterBody}
end);
% match
ast_to_ast(?E(Line, switch, {Value, ?E(_CaseLine, 'case', Clauses)}), State) ->
with_childs(State, Clauses, Value,
fun(EClauses, EValue) ->
TupleClauses = lists:map(fun to_tuple_clause/1, EClauses),
{'case', Line, EValue, TupleClauses}
end);
ast_to_ast({cmatch, Line, {Conds, When, Body}}, State) ->
{EConds, State1} = ast_to_ast(Conds, State),
{EWhen, State2} = when_to_ast(When, State1),
{EBody, State3} = ast_to_ast(Body, State2),
R = {clause, Line, EConds, EWhen, EBody},
{R, State3};
ast_to_ast({celse, Line, Body}, State) ->
{EBody, State1} = ast_to_ast(Body, State),
R = {clause, Line, [{var, Line, '_'}], [], EBody},
{R, State1};
% begin
ast_to_ast(?E(Line, 'begin', Body), State) ->
{EBody, State1} = ast_to_ast(Body, State),
R = {block, Line, EBody},
{R, State1};
ast_to_ast(?E(Line, fn, ?E(_CLine, 'case', Cases)), State) ->
{ok, FixedCases} = expand_case_else_match(Cases),
{EFixedCases, State1} = ast_to_ast(FixedCases, State),
R = {'fun', Line, {clauses, EFixedCases}},
{R, State1};
ast_to_ast(?E(Line, fn, {?V(_VLine, var, FName), ?E(_CLine, 'case', Cases)}), State) ->
{ok, FixedCases} = expand_case_else_match(Cases),
{EFixedCases, State1} = ast_to_ast(FixedCases, State),
R = {named_fun, Line, FName, EFixedCases},
{R, State1};
% call
ast_to_ast(?E(Line, call, {[Mod, Fun], Args}), State) ->
with_childs(State, Mod, Fun, Args,
fun (EMod, EFun, EArgs) ->
{call, Line, {remote, Line, EMod, EFun}, EArgs}
end);
ast_to_ast(?E(Line, call, {[Fun], Args}), State) ->
with_childs(State, Fun, Args,
fun (EFun, EArgs) -> {call, Line, EFun, EArgs} end);
% =
ast_to_ast(?O(Line, '=', Left, Right), State) ->
with_childs(State, Left, Right,
fun (ELeft, ERight) -> {match, Line, ELeft, ERight} end);
% ops
ast_to_ast(?O(Line, Op, Left, Right), State) ->
with_childs(State, Left, Right,
fun (ELeft, ERight) -> {op, Line, map_op(Op), ELeft, ERight} end);
% values
ast_to_ast(?V(Line, atom=Type, Val), State) -> {{Type, Line, Val}, State};
ast_to_ast(?V(Line, integer=Type, Val), State) -> {{Type, Line, Val}, State};
ast_to_ast(?V(Line, float=Type, Val), State) -> {{Type, Line, Val}, State};
ast_to_ast(?V(Line, boolean, Val), State) -> {{atom, Line, Val}, State};
ast_to_ast(?V(Line, var=Type, Val), State) -> {{Type, Line, Val}, State};
ast_to_ast(?V(Line, string=Type, Val), State) -> {{Type, Line, Val}, State};
ast_to_ast(?V(Line, bstring, Val), State) ->
R = {bin, Line, [{bin_element, 5, {string, Line, Val}, default, default}]},
{R, State};
% unary ops
ast_to_ast(?UO(Line, Op, Val), State) ->
{EVal, State1} = ast_to_ast(Val, State),
R = {op, Line, map_op(Op), EVal},
{R, State1};
ast_to_ast(Ast, State) ->
Line = element(2, Ast),
State1 = add_error(State, invalid_expression, Line, {ast, Ast}),
R = {atom, Line, error},
{R, State1}.
ast_to_ast([], Accum, State) ->
{lists:reverse(Accum), State};
ast_to_ast([H|T], Accum, State) ->
{EH, State1} = ast_to_ast(H, State),
NewAccum = if is_list(EH) -> EH ++ Accum;
EH == 'fn compiler ignore' -> Accum;
true -> [EH|Accum]
end,
ast_to_ast(T, NewAccum, State1).
map_op('+') -> '+';
map_op('-') -> '-';
map_op('*') -> '*';
map_op('/') -> '/';
map_op('//') -> 'div';
map_op('%') -> 'rem';
map_op('|') -> 'bor';
map_op('&') -> 'band';
map_op('^') -> 'bxor';
map_op('>>') -> 'bsr';
map_op('<<') -> 'bsl';
map_op('~') -> 'bnot';
map_op('and') -> 'andalso';
map_op('andd') -> 'and';
map_op('or') -> 'orelse';
map_op('orr') -> 'or';
map_op('xor') -> 'xor';
map_op('!') -> '!';
map_op('not') -> 'not';
map_op('++') -> '++';
map_op('--') -> '--';
map_op('<') -> '<';
map_op('<=') -> '=<';
map_op('>') -> '>';
map_op('>=') -> '>=';
map_op('==') -> '==';
map_op('is') -> '=:=';
map_op('!=') -> '/=';
map_op('isnt') -> '=/='.
list_to_cons_list(Line, Val, State) ->
list_to_cons_list_r(Line, lists:reverse(Val), {nil, Line}, State).
list_to_cons_list_r(_Line, [], Cons, State) ->
{Cons, State};
list_to_cons_list_r(Line, [H|T], Cons, State) ->
{EH, State1} = ast_to_ast(H, State),
list_to_cons_list_r(Line, T, {cons, Line, EH, Cons}, State1).
ast_to_export_fun(?O(_Line, '/', ?Atom(FunName), ?V(_ArLine, integer, Arity)), State) ->
R = {FunName, Arity},
{R, State};
ast_to_export_fun(Ast, State) ->
Line = element(2, Ast),
State1 = add_error(State, invalid_export, Line,
expected_got("funname/Arity", {ast, Ast})),
{{atom, Line, error}, State1}.
ast_to_catch({cmatch, Line, {[Match], When, Body}}, State) ->
cmatch_to_catch(Line, ?V(Line, atom, throw), Match, When, Body, State);
ast_to_catch({cmatch, Line, {[?V(_ALine, atom, throw=_ClassName)=CN, Match], When, Body}}, State) ->
cmatch_to_catch(Line, CN, Match, When, Body, State);
ast_to_catch({cmatch, Line, {[?V(_ALine, atom, error=_ClassName)=CN, Match], When, Body}}, State) ->
cmatch_to_catch(Line, CN, Match, When, Body, State);
ast_to_catch({cmatch, Line, {[?V(_ALine, atom, exit=_ClassName)=CN, Match], When, Body}}, State) ->
cmatch_to_catch(Line, CN, Match, When, Body, State);
ast_to_catch({cmatch, Line, {[?V(_ALine, var, _VarName)=Var, Match], When, Body}}, State) ->
cmatch_to_catch(Line, Var, Match, When, Body, State);
ast_to_catch({celse, Line, Body}, State) ->
EMatch = {tuple, Line, [{var, Line, '_'}, {var, Line, '_'}, {var, Line, '_'}]},
{EBody, State1} = ast_to_ast(Body, State),
R = {clause, Line, [EMatch], [], EBody},
{R, State1};
ast_to_catch({cmatch, Line, {Match, _When, _Body}}, State) ->
State1 = add_error(State, invalid_catch, Line,
expected_got("throw:T, error:E, exit:X or T",
{ast, ?S(Line, tuple, Match)})),
{{atom, Line, error}, State1}.
cmatch_to_catch(Line, Class, Match, When, Body, State) ->
{EClass, State1} = ast_to_ast(Class, State),
{EMatch, State2} = ast_to_ast(Match, State1),
ETupleMatch = {tuple, Line, [EClass, EMatch, {var, Line, '_'}]},
{EBody, State3} = ast_to_ast(Body, State2),
{EWhen, State4} = when_to_ast(When, State3),
R = {clause, Line, [ETupleMatch], EWhen, EBody},
{R, State4}.
when_to_ast(nowhen, State) -> {[], State};
when_to_ast(When, State) when is_list(When) ->
{R, State1} = state_map(fun when_to_ast/2, When, State),
{lists:reverse(R), State1};
when_to_ast(When, State) ->
ast_to_ast(When, State).
kv_to_ast(Key, Val, State) ->
with_childs(State, Key, Val, fun (EKey, EVal) -> {EKey, EVal} end).
to_map_field({kv, Line, Key, Val}, State) ->
{{EKey, EVal}, State1} = kv_to_ast(Key, Val, State),
R = {map_field_assoc, Line, EKey, EVal},
{R, State1};
to_map_field({kvmatch, Line, Key, Val}, State) ->
{{EKey, EVal}, State1} = kv_to_ast(Key, Val, State),
R = {map_field_exact, Line, EKey, EVal},
{R, State1}.
to_record_field({kv, Line, Key, Val}, State) ->
{{EKey, EVal}, State1} = kv_to_ast(Key, Val, State),
R = {record_field, Line, EKey, EVal},
{R, State1};
to_record_field(Other, State) ->
Line = element(2, Other),
State1 = add_error(State, bad_record_field_init, Line,
expected_got("initialization", {ast, Other})),
{{atom, Line, error}, State1}.
to_record_field_decl(?O(Line, '=', ?V(FLine, atom, FieldName), ?O(_OLine, is, Val, Type)),
State) ->
{R, State1} = to_record_field_decl(?O(Line, '=', ?V(FLine, atom, FieldName), Val), State),
{{type, R, {Line, has_default, R, Type}}, State1};
to_record_field_decl(?O(_OLine, is, ?V(Line, 'atom', FieldName), Type),
State) ->
{R, State1} = to_record_field_decl(?V(Line, 'atom', FieldName), State),
{{type, R, {Line, no_default, R, Type}}, State1};
to_record_field_decl(?O(Line, '=', ?V(FLine, atom, FieldName), Val), State) ->
{EVal, State1} = ast_to_ast(Val, State),
R = {record_field, Line, {atom, FLine, FieldName}, EVal},
{R, State1};
to_record_field_decl(?V(Line, 'atom', FieldName), State) ->
R = {record_field, Line, {atom, Line, FieldName}},
{R, State};
to_record_field_decl(Other, State) ->
Line = element(2, Other),
State1 = add_error(State, bad_record_field_decl, Line,
expected_got("atom or assignment", {ast, Other})),
{{atom, Line, error}, State1}.
% erlang ast
% NOTE for now empty case in switch matches the empty tuple
to_tuple_clause({clause, Line, [], Guard, Body}) ->
{clause, Line, [{tuple, Line, []}], Guard, Body};
to_tuple_clause({clause, _Line, [_Match], _Guard, _Body}=Ast) ->
Ast;
to_tuple_clause({clause, Line, Matches, Guard, Body}) ->
{clause, Line, [{tuple, Line, Matches}], Guard, Body}.
for_qualifier_to_ast({filter, Ast}, State) -> ast_to_ast(Ast, State);
for_qualifier_to_ast({bgenerate, Line, Left, Right}, State) ->
{{ELeft, ERight}, State1} = kv_to_ast(Left, Right, State),
R = {b_generate, Line, ELeft, ERight},
{R, State1};
for_qualifier_to_ast({generate, Line, Left, Right}, State) ->
{{ELeft, ERight}, State1} = kv_to_ast(Left, Right, State),
R = {generate, Line, ELeft, ERight},
{R, State1}.
expand_case_else_match([{cmatch, _Line, {Matches, _When, _Body}}=H|T]) ->
Arity = length(Matches),
expand_case_else_match(T, Arity, [H]).
expand_case_else_match([], _Arity, Accum) ->
{ok, lists:reverse(Accum)};
expand_case_else_match([{celse, Line, Body}|T], Arity, Accum) ->
Matches = [?V(Line, var, '_') || _ <- lists:seq(1, Arity)],
NewElse = {cmatch, Line, {Matches, nowhen, Body}},
expand_case_else_match(T, Arity, [NewElse|Accum]);
expand_case_else_match([H|T], Arity, Accum) ->
expand_case_else_match(T, Arity, [H|Accum]).
state_map(Fun, Seq, State) ->
lists:foldl(fun (Item, {Accum, StateIn}) ->
{R, State1} = Fun(Item, StateIn),
{[R|Accum], State1}
end, {[], State}, Seq).
add_attributes(#{attrs := AttrList}=State, Type, Line, Name, Attrs) ->
NewAttrList = [{Type, Line, Name, Attrs}|AttrList],
State#{attrs => NewAttrList}.
expected_got(Expected, Got) -> {expected, Expected, got, Got}.
check_case_arities_equal([{cmatch, Line, {Cond, _When, _Body}}|T], State, Arity) ->
CaseArity = length(Cond),
if CaseArity == Arity -> check_case_arities_equal(T, State, Arity);
true ->
State1 = add_error(State, case_mismatch, Line,
expected_got(Arity, CaseArity)),
check_case_arities_equal(T, State1, Arity)
end;
check_case_arities_equal([{celse, _Line, _Body}|T], State, Arity) ->
check_case_arities_equal(T, State, Arity);
check_case_arities_equal([], State, _Arity) -> State.
lc_to_ast(Line, Qualifiers, Body, State) ->
{EBody, State1} = case Body of
[Node] -> ast_to_ast(Node, State);
Nodes ->
{EBlockBody, S1} = ast_to_ast(Nodes, State),
Ri = {block, Line, EBlockBody},
{Ri, S1}
end,
{Items, State2} = state_map(fun for_qualifier_to_ast/2, Qualifiers, State1),
{lists:reverse(Items), EBody, State2}.
info_to_ast(Line, line, State) ->
{{integer, Line, Line}, State};
info_to_ast(Line, module, #{module := Module}=State) ->
{{atom, Line, Module}, State};
info_to_ast(Line, Name, State) ->
State1 = add_error(State, unknown_compiler_info, Line,
expected_got("\"line\" or \"module\"", Name)),
{{atom, Line, error}, State1}.
add_bin_element_param(default, Param, State) ->
add_bin_element_param([], Param, State);
add_bin_element_param(L, Param, State) ->
{[Param|L], State}.
add_bin_element_param(Line, Params, Param, ValidValues, State) ->
IsInValues = lists:member(Param, ValidValues),
if IsInValues -> add_bin_element_param(Params, Param, State);
true ->
Msg = io_lib:format("one of ~p", [ValidValues]),
State1 = add_error(State, invalid_bin_type_specifier_value, Line,
expected_got(Msg, Param)),
{Params, State1}
end.
parse_bin_element_fields(_Line, [], State, BinElement) ->
{BinElement, State};
parse_bin_element_fields(Line, [{kv, _Line, ?Atom(val), ?V(_, var, _VarName)=NewName}|T],
State, {BeType, BeLine, _OldName, Size, Params}) ->
{ENewName, State1} = ast_to_ast(NewName, State),
NewBinElement = {BeType, BeLine, ENewName, Size, Params},
parse_bin_element_fields(Line, T, State1, NewBinElement);
parse_bin_element_fields(Line, [{kv, _Line, ?Atom(size), ?V(_, integer, _Size)=NewSize}|T],
State, {BeType, BeLine, BeName, _OldSize, Params}) ->
{ENewSize, State1} = ast_to_ast(NewSize, State),
NewBinElement = {BeType, BeLine, BeName, ENewSize, Params},
parse_bin_element_fields(Line, T, State1, NewBinElement);
parse_bin_element_fields(Line, [{kv, _Line, ?Atom(unit), ?V(_, integer, Unit)}|T],
State, {BeType, BeLine, BeName, BeSize, Params})
when Unit >= 1 andalso Unit =< 256 ->
{NewParams, State1} = add_bin_element_param(Params, {unit, Unit}, State),
NewBinElement = {BeType, BeLine, BeName, BeSize, NewParams},
parse_bin_element_fields(Line, T, State1, NewBinElement);
parse_bin_element_fields(Line, [{kv, KvLine, ?Atom(type), ?Atom(Type)}|T],
State, {BeType, BeLine, BeName, BeSize, Params}) ->
ValidValues = [integer, float, binary, bytes, bitstring, bits, utf8, utf16, utf32],
{NewParams, State1} = add_bin_element_param(KvLine, Params, Type, ValidValues, State),
NewBinElement = {BeType, BeLine, BeName, BeSize, NewParams},
parse_bin_element_fields(Line, T, State1, NewBinElement);
parse_bin_element_fields(Line, [{kv, KvLine, ?Atom(endianness), ?Atom(Endianness)}|T],
State, {BeType, BeLine, BeName, BeSize, Params}) ->
ValidValues = [big, little, native],
{NewParams, State1} = add_bin_element_param(KvLine, Params, Endianness, ValidValues, State),
NewBinElement = {BeType, BeLine, BeName, BeSize, NewParams},
parse_bin_element_fields(Line, T, State1, NewBinElement);
parse_bin_element_fields(Line, [{kv, KvLine, ?Atom(sign), ?Atom(Sign)}|T],
State, {BeType, BeLine, BeName, BeSize, Params}) ->
ValidValues = [signed, unsigned],
{NewParams, State1} = add_bin_element_param(KvLine, Params, Sign, ValidValues, State),
NewBinElement = {BeType, BeLine, BeName, BeSize, NewParams},
parse_bin_element_fields(Line, T, State1, NewBinElement);
parse_bin_element_fields(Line, [Other|T], State, BinElement) ->
Msg = "one of val (var), size (integer), type (atom), sign (atom), endianness (atom), unit (1..256)",
OtherLine = element(2, Other),
State1 = add_error(State, invalid_bin_type_specifier_field, OtherLine,
expected_got(Msg, {ast, Other})),
parse_bin_element_fields(Line, T, State1, BinElement).
to_bin_element(?S(Line, map, Fields), State) ->
InitialState = {bin_element, Line, {var, Line, '_'}, default, default},
parse_bin_element_fields(Line, Fields, State, InitialState);
to_bin_element(Other, State) ->
Line = element(2, Other),
State1 = add_error(State, invalid_bin_type_specifier, Line,
expected_got("\"line\" or \"module\"", Other)),
{{atom, Line, error}, State1}.
type_specifiers_to_ast(Line, TSList, State) ->
{RFields, State1} = lists:mapfoldl(fun to_bin_element/2, State, TSList),
R = {bin, Line, RFields},
{R, State1}.
add_error(#{errors:=Errors}=State, ErrType, Line, Detail) ->
Error = {ErrType, Line, Detail},
NewErrors = [Error|Errors],
State#{errors => NewErrors}.
unwrap(?V(_Line, _Type, Val)) -> Val.
with_childs(State, Ast1, Ast2, Fun) ->
{EAst1, State1} = ast_to_ast(Ast1, State),
{EAst2, State2} = ast_to_ast(Ast2, State1),
{Fun(EAst1, EAst2), State2}.
with_childs(State, Ast1, Ast2, Ast3, Fun) ->
{EAst1, State1} = ast_to_ast(Ast1, State),
{EAst2, State2} = ast_to_ast(Ast2, State1),
{EAst3, State3} = ast_to_ast(Ast3, State2),
{Fun(EAst1, EAst2, EAst3), State3}.
invalid_type_declaration(State, Line, Ast) ->
State1 = add_error(State, invalid_type_declaration, Line, {ast, Ast}),
R = {atom, Line, error},
{R, State1}.
extract_spec_attr({Name, Arity}, [], Accum, nil, State) ->
{notfound, make_fun_attrs(Name, Arity, Accum), State};
extract_spec_attr({Name, Arity}=FnRef, [], Accum, SpecAttr, State) ->
{ESpecAttr, State1} = parse_spec_attr(FnRef, SpecAttr, State),
{found, ESpecAttr, make_fun_attrs(Name, Arity, Accum), State1};
extract_spec_attr(FnRef, [{attr, _Line, [?Atom(spec)], _Params, _Result}=SpecAttr|T],
Accum, nil, State) ->
extract_spec_attr(FnRef, T, Accum, SpecAttr, State);
extract_spec_attr(FnRef, [{attr, Line, [?Atom(spec)], _Params, _Result}=SpecAttr|T],
Accum, ExistingSpecAttr, State) ->
State1 = add_error(State, duplicated_function_spec, Line, {ast, SpecAttr}),
extract_spec_attr(FnRef, T, Accum, ExistingSpecAttr, State1);
extract_spec_attr(FnRef, [{attr, Line, Name, Params, Result}|T],
Accum, SpecAttr, State) ->
{ENameList, State1} = ast_to_ast(Name, State),
EName = ast_list_to_cons(lists:reverse(ENameList), Line),
{NParams, State2} = if Params == noparams -> {[], State1};
true -> {Params, State1}
end,
{EResult, State3} = if Result == noresult -> {{nil, Line}, State2};
true -> ast_to_ast(Result, State2)
end,
{EParams, State4} = list_to_cons_list(Line, NParams, State3),
EAttr = {tuple, Line, [EName, {tuple, Line, [EParams, EResult]}]},
extract_spec_attr(FnRef, T, [EAttr|Accum], SpecAttr, State4).
parse_spec_attr({?Atom(Name), Arity}, {attr, Line, [?Atom(spec)], Args, Return},
State) ->
fn_spec:parse_spec_attr(Name, Arity, Line, Args, Return, State).
export_like_to_ast(Name, Line, Params, State) ->
{EFuns, State1} = state_map(fun ast_to_export_fun/2, Params, State),
R = {attribute, Line, Name, EFuns},
{R, State1}.
% assumes Items is reversed
ast_list_to_cons([], Line) ->
{nil, Line};
ast_list_to_cons(Items, Line) ->
ast_list_to_cons(Items, Line, {nil, Line}).
ast_list_to_cons([], _Line, Cons) ->
Cons;
ast_list_to_cons([H|T], Line, Cons) when is_list(T) ->
ast_list_to_cons(T, Line, {cons, Line, maybe_consify_head(H, Line), Cons});
ast_list_to_cons([H|T], Line, Cons) ->
{cons, Line, maybe_consify_head(H, Line), {cons, Line, T, Cons}}.
maybe_consify_head(H, Line) when is_list(H) -> ast_list_to_cons(H, Line);
maybe_consify_head(H, _Line) -> H.
make_fun_attrs(?V(Line, atom, Name), Arity, Accum) ->
ConsAttrs = {tuple, Line, [{atom, Line, Name}, {integer, Line, Arity},
ast_list_to_cons(Accum, Line)]},
{attribute, Line, fn_attrs, erl_syntax:concrete(ConsAttrs)}.
maybe_type_record(R, _Line, _RecordName, [], State) -> {R, State};
maybe_type_record(R, Line, RecordName, Types, State) ->
{RType, State1} = fn_spec:parse_record_types(RecordName, Line, Types, State),
{[RType, R], State1}. | src/fn_to_erl.erl | 0.566498 | 0.600423 | fn_to_erl.erl | starcoder |
%% @copyright 2014-2016 <NAME> <<EMAIL>>
%%
%% @doc Miscellaneous utility functions
%% @private
%% @end
-module(logi_utils).
%%----------------------------------------------------------------------------------------------------------------------
%% Exported API
%%----------------------------------------------------------------------------------------------------------------------
-export([is_timestamp/1]).
-export([is_non_neg_integer/1]).
-export([is_pos_integer/1]).
-export([function_exported/3]).
%%----------------------------------------------------------------------------------------------------------------------
%% Exported Functions
%%----------------------------------------------------------------------------------------------------------------------
%% @doc Returns `true' if `X' is a timestamp, `false' otherwise
-spec is_timestamp(X :: (erlang:timestamp() | term())) -> boolean().
is_timestamp({A, B, C}) when is_integer(A), A >= 0,
is_integer(B), B >= 0,
is_integer(C), C >= 0 ->
true;
is_timestamp(_) ->
false.
%% @doc Returns `true' if `X' is a non negative integer, `false' otherwise
-spec is_non_neg_integer(X :: (non_neg_integer() | term())) -> boolean().
is_non_neg_integer(X) ->
is_integer(X) andalso X >= 0.
%% @doc Returns `true' if `X' is a positive integer, `false' otherwise
-spec is_pos_integer(X :: (non_neg_integer() | term())) -> boolean().
is_pos_integer(X) ->
is_integer(X) andalso X > 0.
%% @doc Equivalent to {@link erlang:function_exported/3} except `Module' will be loaded if it has not been loaded
-spec function_exported(module(), atom(), arity()) -> boolean().
function_exported(Module, Function, Arity) ->
_ = is_atom(Module) orelse error(badarg, [Module, Function, Arity]),
_ = is_atom(Function) orelse error(badarg, [Module, Function, Arity]),
_ = (is_integer(Arity) andalso Arity >= 0) orelse error(badarg, [Module, Function, Arity]),
_ = code:is_loaded(Module) =/= false orelse code:load_file(Module),
erlang:function_exported(Module, Function, Arity). | src/logi_utils.erl | 0.60871 | 0.526221 | logi_utils.erl | starcoder |
%% @doc Main interface for Erlang captcha library
-module(ecaptcha).
-export([pixels/2, gif/2, png/2, fonts/0]).
-export_type([opts/0, effects/0, color_name/0, color_rgb/0, font_name/0, alphabet/0, err_reason/0]).
-type opts() :: #{
color => color_name() | color_rgb(),
effects => effects(),
font => font_name(),
alphabet => alphabet()
}.
%% `color' - what color to use for the gif/png image. predefined color name or RGB tuple;
%% default: `black'
%% `effects' - list of additional effects to apply to the text rendering; default: `[]'
%% `font' - name of the font to use; default: `<<"hplhs-oldstyle">>'
%% `alphabet' - set of characters to use to generate random string. One of predefined sets or
%% binary string with all the allowed characters (as long as they are defined in selected font);
%% default: latin_lowercase
-type effects() :: [line | blur | filter | dots | reverse_dots].
%% `line' - draws a curved horisontal line on top of the text
%% `blur' - blurs the image (averages each pixel's color with it's neighbours)
%% `filter' - makes letters hollow
%% `dots' - draws 100 random 2x2 white dots on top of the image, effectively removing small pathes
%% from it
%% `reverse_dots' - draws 20 random dots of a randomized size from 1 to 3px using a color opposite
%% to the current color (so, reversing the color - black becomes white, white becomes black)
-type color_name() :: ecaptcha_color:color_name().
-type color_rgb() :: ecaptcha_color:rgb().
-type font_name() :: binary().
-type alphabet() :: numbers | latin_lowercase | latin_uppercase | alphabet_bin().
-type alphabet_bin() :: binary().
-type err_reason() ::
font_name_not_binary
| font_not_found
| chars_not_binary
| wrong_chars_length
| character_out_of_alphabet_range
| bad_random
| small_rand_binary
| opts_not_list
| non_atom_option
| unknown_option.
%% @doc Generate greyscale array of pixels 200x70
%%
%% It returns a tuple where 1st element is a binary ASCII string which contains characters that are
%% printed on captcha image.
%% 2nd element is a plain binary that contains 200x70 = 14000 "pixels", 1 byte each, where
%% 0 means black and 255 - white, intermediate values are shades of grey.
%%
%% @param NumChars how many characters should be on a image, `1..6'
%% @param Opts map of additional options such as `font', `alphabet' and `effects'
-spec pixels(NumChars :: pos_integer(), opts()) ->
{Str :: binary(), Pixels :: binary()}
| {error, err_reason()}.
pixels(NumChars, Opts) ->
<<CharsRand:NumChars/binary, InnerRand/binary>> = crypto:strong_rand_bytes(
ecaptcha_nif:rand_size() + NumChars
),
Chars = chars(CharsRand, maps:get(alphabet, Opts, latin_lowercase)),
Font = maps:get(font, Opts, <<"hplhs-oldstyle">>),
Effects = maps:get(effects, Opts, []),
case ecaptcha_nif:pixels(Font, Chars, InnerRand, Effects) of
{error, _} = Err -> Err;
Pixels -> {Chars, Pixels}
end.
%% @doc Generate GIF image 200x70 with NumChars letters on it
%%
%% Same as {@link png/3}, but image is in GIF format.
-spec gif(NumChars :: pos_integer(), opts()) ->
{Str :: binary(), GifImg :: binary()}
| {error, err_reason()}.
gif(NumChars, Opts) ->
img(NumChars, Opts, fun ecaptcha_gif:encode/4).
%% @doc Generate PNG image 200x70 with NumChars letters on it
%%
%% It returns a 2-tuple where 1st element is the binary containing of `NumChars' size containing
%% ASCII string that is printed on the image and 2nd element is PNG-encoded image that can be, eg
%% sent directly to the browser with `Content-Type: image/png'.
%% @param NumChars - same as in {@link pixels/2}
%% @param Opts - same as in {@link pixels/2}, but also includes `color'. See {@link opts()}.
-spec png(NumChars :: pos_integer(), opts()) ->
{Str :: binary(), PngImg :: binary()}
| {error, err_reason()}.
png(NumChars, Opts) ->
img(NumChars, Opts, fun ecaptcha_png:encode/4).
%% @doc List avaliable fonts
%%
%% Returns a list of tuples where the 1st element is the font name and 2nd elemnt is the binary
%% containing all the available characters of this font.
%% Format may change in the future.
-spec fonts() -> [{font_name(), alphabet_bin()}, ...].
fonts() ->
ecaptcha_nif:fonts().
%% Internal
img(NumChars, Opts, Encoder) ->
case pixels(NumChars, Opts) of
{error, _} = Err ->
Err;
{Str, Pixels} ->
Color = maps:get(color, Opts, black),
{Str, Encoder(Pixels, 200, 70, Color)}
end.
chars(Rand, AlphabetSelector) ->
Alphabet = alphabet(AlphabetSelector),
AlphabetSize = byte_size(Alphabet),
<<<<(binary:at(Alphabet, C rem AlphabetSize))>> || <<C>> <= Rand>>.
alphabet(numbers) ->
<<"0123456789">>;
alphabet(latin_lowercase) ->
<<"abcdefghijklmnopqrstuvwxyz">>;
alphabet(latin_uppercase) ->
<<"ABCDEFGHIJKLMNOPQRSTUVWXYZ">>;
alphabet(Alphabet) when is_binary(Alphabet), byte_size(Alphabet) > 3 ->
Alphabet. | src/ecaptcha.erl | 0.712432 | 0.414188 | ecaptcha.erl | starcoder |
%%%------------------------------------------------------------------------
%% Copyright 2020, OpenTelemetry Authors
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% @doc All measurements are associated with an instrument. To record
%% measurements for an instrument it must first be created with `new' and
%% then can be referenced by name.
%% @end
%%%-------------------------------------------------------------------------
-module(ot_instrument).
%% @doc Calls the SDK to create a new instrument which can then be referenced
%% by name.
%% @end
-callback new(opentelemetry:meter(), ot_meter:name()) -> boolean().
-callback new(opentelemetry:meter(), ot_meter:name(), ot_meter:instrument_opts()) -> boolean().
%% @doc Returns an instrument definition which can be used to create a new instrument
%% by passing to `ot_meter:new_instruments/1'
%% @end
-callback definition(ot_meter:name()) -> ot_meter:instrument_definition().
-callback definition(ot_meter:name(), ot_meter:instrument_opts()) -> ot_meter:instrument_definition().
%% @doc Used by additive instruments to record measurements.
-callback add(ot_meter:bound_instrument(), number()) -> ok.
-callback add(opentelemetry:meter(), ot_meter:name(), number(), ot_meter:labels()) -> ok.
%% @doc Used by non-additive instruments to record measurements.
-callback record(ot_meter:bound_instrument(), number()) -> ok.
-callback record(opentelemetry:meter(), ot_meter:name(), number(), ot_meter:labels()) -> ok.
%% @doc Returns a measurement tuple that can be based to a batch recording through `ot_meter:batch_record/3'
-callback measurement(ot_meter:bound_instrument() | ot_meter:name(), number()) ->
{ot_meter:bound_instrument() | ot_meter:name(), number()}.
-optional_callbacks([add/2,
add/4,
record/2,
record/4,
measurement/2]). | src/ot_instrument.erl | 0.788257 | 0.457621 | ot_instrument.erl | starcoder |
%%--------------------------------------------------------------------
%% Copyright (c) 2020 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(prop_emqx_rpc).
-include_lib("proper/include/proper.hrl").
-include_lib("eunit/include/eunit.hrl").
-define(ALL(Vars, Types, Exprs),
?SETUP(fun() ->
State = do_setup(),
fun() -> do_teardown(State) end
end, ?FORALL(Vars, Types, Exprs))).
%%--------------------------------------------------------------------
%% Properties
%%--------------------------------------------------------------------
prop_node() ->
?ALL(Node, nodename(),
begin
?assert(emqx_rpc:cast(Node, erlang, system_time, [])),
case emqx_rpc:call(Node, erlang, system_time, []) of
{badrpc, _Reason} -> true;
Delivery when is_integer(Delivery) -> true;
_Other -> false
end
end).
prop_node_with_key() ->
?ALL({Node, Key}, nodename_with_key(),
begin
?assert(emqx_rpc:cast(Key, Node, erlang, system_time, [])),
case emqx_rpc:call(Key, Node, erlang, system_time, []) of
{badrpc, _Reason} -> true;
Delivery when is_integer(Delivery) -> true;
_Other -> false
end
end).
prop_nodes() ->
?ALL(Nodes, nodesname(),
begin
case emqx_rpc:multicall(Nodes, erlang, system_time, []) of
{badrpc, _Reason} -> true;
{RealResults, RealBadNodes}
when is_list(RealResults);
is_list(RealBadNodes) ->
true;
_Other -> false
end
end).
prop_nodes_with_key() ->
?ALL({Nodes, Key}, nodesname_with_key(),
begin
case emqx_rpc:multicall(Key, Nodes, erlang, system_time, []) of
{badrpc, _Reason} -> true;
{RealResults, RealBadNodes}
when is_list(RealResults);
is_list(RealBadNodes) ->
true;
_Other -> false
end
end).
%%--------------------------------------------------------------------
%% Helper
%%--------------------------------------------------------------------
do_setup() ->
{ok, _Apps} = application:ensure_all_started(gen_rpc),
ok = application:set_env(gen_rpc, call_receive_timeout, 1),
ok = emqx_logger:set_log_level(emergency),
ok = meck:new(gen_rpc, [passthrough, no_history]),
ok = meck:expect(gen_rpc, multicall,
fun(Nodes, Mod, Fun, Args) ->
gen_rpc:multicall(Nodes, Mod, Fun, Args, 1)
end).
do_teardown(_) ->
ok = emqx_logger:set_log_level(debug),
ok = application:stop(gen_rpc),
ok = meck:unload(gen_rpc).
%%--------------------------------------------------------------------
%% Generator
%%--------------------------------------------------------------------
nodename() ->
?LET({NodePrefix, HostName},
{node_prefix(), hostname()},
begin
Node = NodePrefix ++ "@" ++ HostName,
list_to_atom(Node)
end).
nodename_with_key() ->
?LET({NodePrefix, HostName, Key},
{node_prefix(), hostname(), choose(0, 10)},
begin
Node = NodePrefix ++ "@" ++ HostName,
{list_to_atom(Node), Key}
end).
nodesname() ->
oneof([list(nodename()), [node()]]).
nodesname_with_key() ->
oneof([{list(nodename()), choose(0, 10)}, {[node()], 1}]).
node_prefix() ->
oneof(["emqxct", text_like()]).
text_like() ->
?SUCHTHAT(Text, list(range($a, $z)), (length(Text) =< 5 andalso length(Text) > 0)).
hostname() ->
oneof(["127.0.0.1", "localhost"]). | test/props/prop_emqx_rpc.erl | 0.559771 | 0.405331 | prop_emqx_rpc.erl | starcoder |
%%==============================================================================
%% Copyright 2010 Erlang Solutions Ltd.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%==============================================================================
-module(escalus).
% Public API
-export([suite/0,
init_per_suite/1,
end_per_suite/1,
init_per_testcase/2,
end_per_testcase/2,
create_users/1,
delete_users/1,
override/3,
make_everyone_friends/1,
story/3,
assert/2,
assert/3,
assert_many/2,
send/2,
wait_for_stanza/1,
wait_for_stanza/2,
wait_for_stanzas/2,
wait_for_stanzas/3,
peek_stanzas/1]).
%%--------------------------------------------------------------------
%% Public API
%%--------------------------------------------------------------------
suite() ->
[{require, escalus_users}].
init_per_suite(Config) ->
application:start(exmpp),
Config.
end_per_suite(_Config) ->
ok.
init_per_testcase(_CaseName, Config) ->
escalus_cleaner:start(Config).
end_per_testcase(_CaseName, Config) ->
escalus_cleaner:stop(Config).
%%--------------------------------------------------------------------
%% Public API - forward functions from other modules
%%--------------------------------------------------------------------
-define(FORWARD1(M, F), F(X) -> M:F(X)).
-define(FORWARD2(M, F), F(X, Y) -> M:F(X, Y)).
-define(FORWARD3(M, F), F(X, Y, Z) -> M:F(X, Y, Z)).
?FORWARD1(escalus_users, create_users).
?FORWARD1(escalus_users, delete_users).
?FORWARD1(escalus_story, make_everyone_friends).
?FORWARD3(escalus_story, story).
?FORWARD2(escalus_new_assert, assert).
?FORWARD3(escalus_new_assert, assert).
?FORWARD2(escalus_new_assert, assert_many).
?FORWARD2(escalus_client, send).
?FORWARD1(escalus_client, wait_for_stanza).
?FORWARD2(escalus_client, wait_for_stanza).
?FORWARD2(escalus_client, wait_for_stanzas).
?FORWARD3(escalus_client, wait_for_stanzas).
?FORWARD1(escalus_client, peek_stanzas).
?FORWARD3(escalus_overridables, override). | src/escalus.erl | 0.565539 | 0.477615 | escalus.erl | starcoder |
%% @author Couchbase <<EMAIL>>
%% @copyright 2016 Couchbase, Inc.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% 1. Permission is defined as a pair {object, operation}
%% 2. Objects are organized in the tree structure with common root []
%% 3. One vertex of this tree can be parametrized: {bucket, bucket_name},
%% wildcard all can be used in place of bucket_name
%% 4. Permission pattern is a pair {Object pattern, Allowed operations}
%% 5. Allowed operations can be list of operations, all or none
%% 6. Object pattern is a list of vertices that define a certain subtree of the objects tree
%% 7. Object pattern vertex {bucket, bucket_name} always matches object vertex {bucket, any},
%% object pattern vertex {bucket, any} matches {bucket, bucket_name} with any bucket_name
%% otherwise vertices match if they are equal
%% 8. Object matches the object pattern if all the vertices of object pattern match
%% corresponding vertices of the object.
%% 9. Each role is defined as a list of permission patterns.
%% 10.To find which operations are allowed for certain object in certain role we look for the
%% first permission pattern with matching object pattern in the permission pattern list of
%% the role.
%% 11.The permission is allowed by the role if its operation is among the allowed operations
%% for its object.
%% 12.Each user can have multiple roles assigned
%% 13.Certain permission is allowed to the user if it is allowed at least by one of the roles
%% assigned to user.
%% @doc roles and permissions implementation
-module(menelaus_roles).
-include("ns_common.hrl").
-include("ns_config.hrl").
-include("rbac.hrl").
-include_lib("eunit/include/eunit.hrl").
-export([get_definitions/1,
preconfigured_roles/0,
preconfigured_roles_45/0,
is_allowed/2,
get_roles/1,
get_compiled_roles/1,
compile_roles/2,
get_all_assignable_roles/1,
validate_roles/2]).
-spec preconfigured_roles() -> [rbac_role_def(), ...].
preconfigured_roles() ->
upgrade_roles_spock(preconfigured_roles_45()) ++ preconfigured_roles_spock().
-spec preconfigured_roles_45() -> [rbac_role_def(), ...].
preconfigured_roles_45() ->
[{admin, [],
[{name, <<"Admin">>},
{desc, <<"Can manage ALL cluster features including security.">>}],
[{[], all}]},
{ro_admin, [],
[{name, <<"Read Only Admin">>},
{desc, <<"Can view ALL cluster features.">>}],
[{[{bucket, any}, password], none},
{[{bucket, any}, data], none},
{[admin, security], [read]},
{[admin], none},
{[], [read]}]},
{cluster_admin, [],
[{name, <<"Cluster Admin">>},
{desc, <<"Can manage all cluster features EXCEPT security.">>}],
[{[admin], none},
{[], all}]},
{bucket_admin, [bucket_name],
[{name, <<"Bucket Admin">>},
{desc, <<"Can manage ALL bucket features for specified buckets (incl. start/stop XDCR)">>}],
[{[{bucket, bucket_name}, xdcr], [read, execute]},
{[{bucket, bucket_name}], all},
{[{bucket, any}, settings], [read]},
{[{bucket, any}], none},
{[xdcr], none},
{[admin], none},
{[], [read]}]},
{bucket_sasl, [bucket_name],
[],
[{[{bucket, bucket_name}, data], all},
{[{bucket, bucket_name}, views], all},
{[{bucket, bucket_name}], [read, flush]},
{[pools], [read]}]},
{views_admin, [bucket_name],
[{name, <<"Views Admin">>},
{desc, <<"Can manage views for specified buckets">>}],
[{[{bucket, bucket_name}, views], all},
{[{bucket, bucket_name}, data], [read]},
{[{bucket, any}, settings], [read]},
{[{bucket, any}], none},
{[xdcr], none},
{[admin], none},
{[], [read]}]},
{replication_admin, [],
[{name, <<"Replication Admin">>},
{desc, <<"Can manage ONLY XDCR features (cluster AND bucket level)">>}],
[{[{bucket, any}, xdcr], all},
{[{bucket, any}, data], [read]},
{[{bucket, any}, settings], [read]},
{[{bucket, any}], none},
{[xdcr], all},
{[admin], none},
{[], [read]}]}].
-spec preconfigured_roles_spock() -> [rbac_role_def(), ...].
preconfigured_roles_spock() ->
[{data_reader, [bucket_name],
[{name, <<"Data Reader">>},
{desc, <<"Can read information from specified bucket">>}],
[{[{bucket, bucket_name}, stats], [read]},
{[{bucket, bucket_name}, data, docs], [read]},
{[{bucket, bucket_name}, data, meta], [read]},
{[{bucket, bucket_name}, data, xattr], [read]},
{[{bucket, bucket_name}, n1ql], [execute]},
{[pools], [read]}]},
{data_reader_writer, [bucket_name],
[{name, <<"Data Reader Writer">>},
{desc, <<"Can read and write information from/to specified bucket">>}],
[{[{bucket, bucket_name}, stats], [read]},
{[{bucket, bucket_name}, data, docs], [read, write]},
{[{bucket, bucket_name}, data, meta], [read, write]},
{[{bucket, bucket_name}, data, xattr], [read, write]},
{[{bucket, bucket_name}, n1ql], [execute]},
{[pools], [read]}]},
{data_dcp_reader, [bucket_name],
[{name, <<"Data DCP Reader">>},
{desc, <<"Can read DCP data streams and stats">>}],
[{[{bucket, bucket_name}, stats], [read]},
{[{bucket, bucket_name}, data, dcp], [read]},
{[pools], [read]}]},
{data_backup, [bucket_name],
[{name, <<"Data Backup">>},
{desc, <<"Can backup and restore bucket data">>}],
[{[{bucket, bucket_name}, stats], [read]},
{[{bucket, bucket_name}, data, meta], [read, write]},
{[pools], [read]}]},
{data_monitoring, [bucket_name],
[{name, <<"Data Monitoring">>},
{desc, <<"Can read full bucket stats">>}],
[{[{bucket, bucket_name}, stats], [read]},
{[pools], [read]}]},
{fts_admin, [bucket_name],
[{name, <<"FTS Admin">>},
{desc, <<"Can administer all FTS features">>}],
[{[{bucket, bucket_name}, fts], [read, write, manage]},
{[pools], [read]}]},
{fts_searcher, [bucket_name],
[{name, <<"FTS Searcher">>},
{desc, <<"Can query FTS indexes if they have bucket permissions">>}],
[{[{bucket, bucket_name}, fts], [read]},
{[pools], [read]}]},
{query_select, [bucket_name],
[{name, <<"Query Select">>},
{desc, <<"Can execute SELECT statement on bucket to retrieve data">>}],
[{[{bucket, bucket_name}, n1ql, select], [execute]},
{[pools], [read]}]},
{query_update, [bucket_name],
[{name, <<"Query Update">>},
{desc, <<"Can execute UPDATE statement on bucket to update data">>}],
[{[{bucket, bucket_name}, n1ql, update], [execute]},
{[pools], [read]}]},
{query_insert, [bucket_name],
[{name, <<"Query Insert">>},
{desc, <<"Can execute INSERT statement on bucket to add data">>}],
[{[{bucket, bucket_name}, n1ql, insert], [execute]},
{[pools], [read]}]},
{query_delete, [bucket_name],
[{name, <<"Query Delete">>},
{desc, <<"Can execute DELETE statement on bucket to delete data">>}],
[{[{bucket, bucket_name}, n1ql, delete], [execute]},
{[pools], [read]}]},
{manage_index, [bucket_name],
[{name, <<"Manage Index">>},
{desc, <<"Can manage indexes for the bucket">>}],
[{[{bucket, bucket_name}, n1ql, create_index], [execute]},
{[{bucket, bucket_name}, n1ql, alter_index], [execute]},
{[pools], [read]}]},
{system_catalog, [bucket_name],
[{name, <<"System Catalog">>},
{desc, <<"Can lookup system catalog information">>}],
[{[{bucket, bucket_name}, n1ql, list_indexes], [execute]},
{[pools], [read]}]}].
upgrade_roles_spock(Definitions) ->
{value, {views_admin, Params, Info, Permissions}} =
lists:keysearch(views_admin, 1, Definitions),
lists:keyreplace(views_admin, 1, Definitions,
{views_admin, Params, Info,
[{[{bucket, bucket_name}, n1ql], [execute]} | Permissions]}).
-spec get_definitions(ns_config()) -> [rbac_role_def(), ...].
get_definitions(Config) ->
{value, RolesDefinitions} = ns_config:search(Config, roles_definitions),
case cluster_compat_mode:is_cluster_spock(Config) of
true ->
RolesDefinitions;
false ->
upgrade_roles_spock(RolesDefinitions)
end.
-spec object_match(rbac_permission_object(), rbac_permission_pattern_object()) ->
boolean().
object_match(_, []) ->
true;
object_match([], [_|_]) ->
false;
object_match([{_Same, _} | RestOfObject], [{_Same, any} | RestOfObjectPattern]) ->
object_match(RestOfObject, RestOfObjectPattern);
object_match([{_Same, any} | RestOfObject], [{_Same, _} | RestOfObjectPattern]) ->
object_match(RestOfObject, RestOfObjectPattern);
object_match([_Same | RestOfObject], [_Same | RestOfObjectPattern]) ->
object_match(RestOfObject, RestOfObjectPattern);
object_match(_, _) ->
false.
-spec get_allowed_operations(rbac_permission_object(), [rbac_permission_pattern()]) ->
rbac_permission_pattern_operations().
get_allowed_operations(_Object, []) ->
none;
get_allowed_operations(Object, [{ObjectPattern, AllowedOperations} | Rest]) ->
case object_match(Object, ObjectPattern) of
true ->
AllowedOperations;
false ->
get_allowed_operations(Object, Rest)
end.
-spec operation_allowed(rbac_operation(), rbac_permission_pattern_operations()) ->
boolean().
operation_allowed(_, all) ->
true;
operation_allowed(_, none) ->
false;
operation_allowed(Operation, AllowedOperations) ->
lists:member(Operation, AllowedOperations).
-spec is_allowed(rbac_permission(), rbac_identity() | [rbac_compiled_role()]) -> boolean().
is_allowed(Permission, {_, _} = Identity) ->
Roles = get_compiled_roles(Identity),
is_allowed(Permission, Roles);
is_allowed({Object, Operation}, Roles) ->
lists:any(fun (Role) ->
Operations = get_allowed_operations(Object, Role),
operation_allowed(Operation, Operations)
end, Roles).
-spec substitute_params([string()], [atom()], [rbac_permission_pattern_raw()]) ->
[rbac_permission_pattern()].
substitute_params(Params, ParamDefinitions, Permissions) ->
ParamPairs = lists:zip(ParamDefinitions, Params),
lists:map(fun ({ObjectPattern, AllowedOperations}) ->
{lists:map(fun ({Name, any}) ->
{Name, any};
({Name, Param}) ->
{Param, Subst} = lists:keyfind(Param, 1, ParamPairs),
{Name, Subst};
(Vertex) ->
Vertex
end, ObjectPattern), AllowedOperations}
end, Permissions).
-spec compile_roles([rbac_role()], [rbac_role_def()]) -> [rbac_compiled_role()].
compile_roles(Roles, Definitions) ->
lists:map(fun (Name) when is_atom(Name) ->
{Name, [], _Props, Permissions} = lists:keyfind(Name, 1, Definitions),
Permissions;
({Name, Params}) ->
{Name, ParamDefinitions, _Props, Permissions} =
lists:keyfind(Name, 1, Definitions),
substitute_params(Params, ParamDefinitions, Permissions)
end, Roles).
-spec get_user_roles(rbac_identity()) -> [rbac_role()].
get_user_roles({User, saslauthd} = Identity) ->
case cluster_compat_mode:is_cluster_45() of
true ->
menelaus_users:get_roles(ns_config:latest(), Identity);
false ->
case saslauthd_auth:get_role_pre_45(User) of
admin ->
[admin];
ro_admin ->
[ro_admin];
false ->
[]
end
end;
get_user_roles({_User, builtin} = Identity) ->
menelaus_users:get_roles(ns_config:latest(), Identity).
-spec get_roles(rbac_identity()) -> [rbac_role()].
get_roles({"", wrong_token}) ->
case ns_config_auth:is_system_provisioned() of
false ->
[admin];
true ->
[]
end;
get_roles({"", anonymous}) ->
case ns_config_auth:is_system_provisioned() of
false ->
[admin];
true ->
[{bucket_sasl, [BucketName]} ||
BucketName <- ns_config_auth:get_no_auth_buckets(ns_config:latest())]
end;
get_roles({_, admin}) ->
[admin];
get_roles({_, ro_admin}) ->
[ro_admin];
get_roles({BucketName, bucket}) ->
[{bucket_sasl, [BucketName]}];
get_roles({_, builtin} = Identity) ->
get_user_roles(Identity);
get_roles({_, saslauthd} = Identity) ->
get_user_roles(Identity).
-spec get_compiled_roles(rbac_identity()) -> [rbac_compiled_role()].
get_compiled_roles(Identity) ->
Definitions =
case cluster_compat_mode:is_cluster_45() of
true ->
get_definitions(ns_config:latest());
false ->
preconfigured_roles()
end,
compile_roles(get_roles(Identity), Definitions).
-spec get_possible_param_values(ns_config(), atom()) -> [rbac_role_param()].
get_possible_param_values(Config, bucket_name) ->
[any | [Name || {Name, _} <- ns_bucket:get_buckets(Config)]].
-spec get_all_assignable_roles(ns_config()) -> [rbac_role()].
get_all_assignable_roles(Config) ->
BucketNames = get_possible_param_values(Config, bucket_name),
lists:foldr(
fun ({bucket_sasl, _, _, _}, Acc) ->
Acc;
({Role, [], Props, _}, Acc) ->
[{Role, Props} | Acc];
({Role, [bucket_name], Props, _}, Acc) ->
lists:foldr(
fun (BucketName, Acc1) ->
[{{Role, [BucketName]}, Props} | Acc1]
end, Acc, BucketNames)
end, [], get_definitions(Config)).
-spec validate_role(rbac_role(), [rbac_role_def()], ns_config()) -> boolean().
validate_role(Role, Definitions, Config) when is_atom(Role) ->
validate_role(Role, [], Definitions, Config);
validate_role({Role, Params}, Definitions, Config) ->
validate_role(Role, Params, Definitions, Config).
validate_role(Role, Params, Definitions, Config) ->
case lists:keyfind(Role, 1, Definitions) of
{Role, ParamsDef, _, _} when length(Params) =:= length(ParamsDef) ->
lists:all(fun ({Param, ParamDef}) ->
lists:member(Param, get_possible_param_values(Config, ParamDef))
end, lists:zip(Params, ParamsDef));
_ ->
false
end.
validate_roles(Roles, Config) ->
{value, Definitions} = ns_config:search(roles_definitions),
UnknownRoles = [Role || Role <- Roles,
not validate_role(Role, Definitions, Config)],
case UnknownRoles of
[] ->
ok;
_ ->
{error, roles_validation, UnknownRoles}
end.
%% assertEqual is used instead of assert and assertNot to avoid
%% dialyzer warnings
object_match_test() ->
?assertEqual(true, object_match([o1, o2], [o1, o2])),
?assertEqual(false, object_match([o1], [o1, o2])),
?assertEqual(true, object_match([o1, o2], [o1])),
?assertEqual(true, object_match([{b, "a"}], [{b, "a"}])),
?assertEqual(false, object_match([{b, "a"}], [{b, "b"}])),
?assertEqual(true, object_match([{b, any}], [{b, "b"}])),
?assertEqual(true, object_match([{b, "a"}], [{b, any}])),
?assertEqual(true, object_match([{b, any}], [{b, any}])).
compile_roles_test() ->
?assertEqual([[{[{bucket, "test"}], none}]],
compile_roles([{test_role, ["test"]}],
[{test_role, [param], [], [{[{bucket, param}], none}]}])).
admin_test() ->
Roles = compile_roles([admin], preconfigured_roles()),
?assertEqual(true, is_allowed({[buckets], create}, Roles)),
?assertEqual(true, is_allowed({[something, something], anything}, Roles)).
ro_admin_test() ->
Roles = compile_roles([ro_admin], preconfigured_roles()),
?assertEqual(false, is_allowed({[{bucket, "test"}, password], read}, Roles)),
?assertEqual(false, is_allowed({[{bucket, "test"}, data], read}, Roles)),
?assertEqual(true, is_allowed({[{bucket, "test"}, something], read}, Roles)),
?assertEqual(false, is_allowed({[{bucket, "test"}, something], write}, Roles)),
?assertEqual(false, is_allowed({[admin, security], write}, Roles)),
?assertEqual(true, is_allowed({[admin, security], read}, Roles)),
?assertEqual(false, is_allowed({[admin, other], write}, Roles)),
?assertEqual(true, is_allowed({[anything], read}, Roles)),
?assertEqual(false, is_allowed({[anything], write}, Roles)).
bucket_views_admin_check_global(Roles) ->
?assertEqual(false, is_allowed({[xdcr], read}, Roles)),
?assertEqual(false, is_allowed({[admin], read}, Roles)),
?assertEqual(true, is_allowed({[something], read}, Roles)),
?assertEqual(false, is_allowed({[something], write}, Roles)),
?assertEqual(false, is_allowed({[buckets], create}, Roles)).
bucket_views_admin_check_another(Roles) ->
?assertEqual(false, is_allowed({[{bucket, "another"}, xdcr], read}, Roles)),
?assertEqual(false, is_allowed({[{bucket, "another"}, views], read}, Roles)),
?assertEqual(false, is_allowed({[{bucket, "another"}, data], read}, Roles)),
?assertEqual(true, is_allowed({[{bucket, "another"}, settings], read}, Roles)),
?assertEqual(false, is_allowed({[{bucket, "another"}, settings], write}, Roles)),
?assertEqual(false, is_allowed({[{bucket, "another"}], read}, Roles)),
?assertEqual(false, is_allowed({[buckets], create}, Roles)).
bucket_admin_check_default(Roles) ->
?assertEqual(true, is_allowed({[{bucket, "default"}, xdcr], read}, Roles)),
?assertEqual(true, is_allowed({[{bucket, "default"}, xdcr], execute}, Roles)),
?assertEqual(true, is_allowed({[{bucket, "default"}, anything], anything}, Roles)),
?assertEqual(true, is_allowed({[{bucket, "default"}, anything], anything}, Roles)).
bucket_admin_test() ->
Roles = compile_roles([{bucket_admin, ["default"]}], preconfigured_roles()),
bucket_admin_check_default(Roles),
bucket_views_admin_check_another(Roles),
bucket_views_admin_check_global(Roles).
bucket_admin_wildcard_test() ->
Roles = compile_roles([{bucket_admin, [any]}], preconfigured_roles()),
bucket_admin_check_default(Roles),
bucket_views_admin_check_global(Roles).
views_admin_check_default(Roles) ->
?assertEqual(true, is_allowed({[{bucket, "default"}, views], anything}, Roles)),
?assertEqual(true, is_allowed({[{bucket, "default"}, data], read}, Roles)),
?assertEqual(false, is_allowed({[{bucket, "default"}, data], write}, Roles)),
?assertEqual(true, is_allowed({[{bucket, "default"}, settings], read}, Roles)),
?assertEqual(false, is_allowed({[{bucket, "default"}, settings], write}, Roles)),
?assertEqual(false, is_allowed({[{bucket, "default"}], read}, Roles)).
views_admin_test() ->
Roles = compile_roles([{views_admin, ["default"]}], preconfigured_roles()),
views_admin_check_default(Roles),
bucket_views_admin_check_another(Roles),
bucket_views_admin_check_global(Roles).
views_admin_wildcard_test() ->
Roles = compile_roles([{views_admin, [any]}], preconfigured_roles()),
views_admin_check_default(Roles),
bucket_views_admin_check_global(Roles).
bucket_sasl_check(Roles, Bucket, Allowed) ->
?assertEqual(Allowed, is_allowed({[{bucket, Bucket}, data], anything}, Roles)),
?assertEqual(Allowed, is_allowed({[{bucket, Bucket}], flush}, Roles)),
?assertEqual(Allowed, is_allowed({[{bucket, Bucket}], flush}, Roles)),
?assertEqual(false, is_allowed({[{bucket, Bucket}], write}, Roles)).
bucket_sasl_test() ->
Roles = compile_roles([{bucket_sasl, ["default"]}], preconfigured_roles()),
bucket_sasl_check(Roles, "default", true),
bucket_sasl_check(Roles, "another", false),
?assertEqual(true, is_allowed({[pools], read}, Roles)),
?assertEqual(false, is_allowed({[another], read}, Roles)).
replication_admin_test() ->
Roles = compile_roles([replication_admin], preconfigured_roles()),
?assertEqual(true, is_allowed({[{bucket, "default"}, xdcr], anything}, Roles)),
?assertEqual(false, is_allowed({[{bucket, "default"}, password], read}, Roles)),
?assertEqual(false, is_allowed({[{bucket, "default"}, views], read}, Roles)),
?assertEqual(true, is_allowed({[{bucket, "default"}, settings], read}, Roles)),
?assertEqual(false, is_allowed({[{bucket, "default"}, settings], write}, Roles)),
?assertEqual(true, is_allowed({[{bucket, "default"}, data], read}, Roles)),
?assertEqual(false, is_allowed({[{bucket, "default"}, data], write}, Roles)),
?assertEqual(true, is_allowed({[xdcr], anything}, Roles)),
?assertEqual(false, is_allowed({[admin], read}, Roles)),
?assertEqual(true, is_allowed({[other], read}, Roles)).
validate_role_test() ->
Config = [[{buckets, [{configs, [{"test", []}]}]}]],
Definitions = preconfigured_roles(),
?assertEqual(true, validate_role(admin, Definitions, Config)),
?assertEqual(true, validate_role({bucket_admin, ["test"]}, Definitions, Config)),
?assertEqual(true, validate_role({views_admin, [any]}, Definitions, Config)),
?assertEqual(false, validate_role(something, Definitions, Config)),
?assertEqual(false, validate_role({bucket_admin, ["something"]}, Definitions, Config)),
?assertEqual(false, validate_role({something, ["test"]}, Definitions, Config)),
?assertEqual(false, validate_role({admin, ["test"]}, Definitions, Config)),
?assertEqual(false, validate_role(bucket_admin, Definitions, Config)),
?assertEqual(false, validate_role({bucket_admin, ["test", "test"]}, Definitions, Config)). | src/menelaus_roles.erl | 0.675551 | 0.400456 | menelaus_roles.erl | starcoder |
%% --- Day 2: I Was Told There Would Be No Math ---
%%
%% The elves are running low on wrapping paper, and so they need to
%% submit an order for more. They have a list of the dimensions
%% (length l, width w, and height h) of each present, and only want to
%% order exactly as much as they need.
%%
%% Fortunately, every present is a box (a perfect right rectangular
%% prism), which makes calculating the required wrapping paper for
%% each gift a little easier: find the surface area of the box, which
%% is 2*l*w + 2*w*h + 2*h*l. The elves also need a little extra paper
%% for each present: the area of the smallest side.
%%
%% For example:
%%
%% A present with dimensions 2x3x4 requires 2*6 + 2*12 + 2*8 = 52
%% square feet of wrapping paper plus 6 square feet of slack, for a
%% total of 58 square feet. A present with dimensions 1x1x10 requires
%% 2*1 + 2*10 + 2*10 = 42 square feet of wrapping paper plus 1 square
%% foot of slack, for a total of 43 square feet. All numbers in the
%% elves' list are in feet. How many total square feet of wrapping
%% paper should they order?
%%
%% --- Part Two ---
%%
%% The elves are also running low on ribbon. Ribbon is all the same
%% width, so they only have to worry about the length they need to
%% order, which they would again like to be exact.
%%
%% The ribbon required to wrap a present is the shortest distance
%% around its sides, or the smallest perimeter of any one face. Each
%% present also requires a bow made out of ribbon as well; the feet of
%% ribbon required for the perfect bow is equal to the cubic feet of
%% volume of the present. Don't ask how they tie the bow, though;
%% they'll never tell.
%%
%% For example:
%%
%% A present with dimensions 2x3x4 requires 2+2+3+3 = 10 feet of
%% ribbon to wrap the present plus 2*3*4 = 24 feet of ribbon for the
%% bow, for a total of 34 feet. A present with dimensions 1x1x10
%% requires 1+1+1+1 = 4 feet of ribbon to wrap the present plus 1*1*10
%% = 10 feet of ribbon for the bow, for a total of 14 feet. How many
%% total feet of ribbon should they order?
-module(day2).
-compile([export_all]).
solve_part1() ->
solve(input(), fun surface_area/3).
solve_part2() ->
solve(input(), fun ribbon_length/3).
input() ->
{ok, Input} = file:read_file("input/day2"),
string:tokens(binary_to_list(Input), "\n").
solve(Input, F) ->
lists:sum([F(L, W, H) || {L, W, H} <- parse(Input)]).
parse(Input) ->
[parse_line(L) || L <- Input].
parse_line(Line) ->
{ok, [L, W, H], []} = io_lib:fread("~dx~dx~d", Line),
{L, W, H}.
surface_area(L, W, H) ->
A = L*W,
B = W*H,
C = H*L,
Min = lists:min([A,B,C]),
2*A + 2*B + 2*C + Min.
ribbon_length(L, W, H) ->
[A, B, _] = lists:sort([L, W, H]),
2*A + 2*B + L*W*H.
-include_lib("eunit/include/eunit.hrl").
surface_area_test_() ->
[ ?_assertEqual(58, surface_area(2,3,4))
, ?_assertEqual(43, surface_area(1,1,10))
].
ribbon_length_test_() ->
[ ?_assertEqual(34, ribbon_length(2,3,4))
, ?_assertEqual(14, ribbon_length(1,1,10))
].
parse_test() ->
?assertEqual([{1,2,3}, {3,2,1}], parse(["1x2x3","3x2x1"])).
solve_part1_test() ->
?assertEqual(1586300, solve_part1()).
solve_part2_test() ->
?assertEqual(3737498, solve_part2()). | src/day2.erl | 0.578567 | 0.943867 | day2.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% Copyright (c) 2011 Basho Technologies, Inc.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%% @doc A "reduce"-like fitting (in the MapReduce sense). Really more
%% like a keyed list-fold. This fitting expects inputs of the
%% form `{Key, Value}'. For each input, the fitting evaluates a
%% function (its argument) on the `Value' and any previous result
%% for that `Key', or `[]' (the empty list) if that `Key' has
%% never been seen by this worker. When `done' is finally
%% received, the fitting sends each key-value pair it has
%% evaluated as an input to the next fittin.
%%
%% The intent is that a fitting might receive a stream of inputs
%% like `{a, 1}, {b, 2}, {a 3}, {b, 4}' and send on results like
%% `{a, 4}, {b, 6}' by using a simple "sum" function.
%%
%% This function expects a function as its argument. The function
%% should be arity-4 and expect the arguments:
%%<dl><dt>
%% `Key' :: term()
%%</dt><dd>
%% Whatever aggregation key is necessary for the algorithm.
%%</dd><dt>
%% `InAccumulator' :: [term()]
%%</dt><dd>
%% A list composed of the new input, cons'd on the front of the
%% result of the last evaluation for this `Key' (or the empty list
%% if this is the first evaluation for the key).
%%</dd><dt>
%% `Partition' :: riak_pipe_vnode:partition()
%%</dt><dd>
%% The partition of the vnode on which this worker is running.
%% (Useful for logging, or sending other output.)
%%</dd><dt>
%% `FittingDetails' :: #fitting_details{}
%%</dt><dd>
%% The details of this fitting.
%% (Useful for logging, or sending other output.)
%%</dd></dl>
%%
%% The function should return a tuple of the form `{ok,
%% NewAccumulator}', where `NewAccumulator' is a list, onto which
%% the next input will be cons'd. For example, the function to
%% sum values for a key, as described above might look like:
%% ```
%% fun(_Key, Inputs, _Partition, _FittingDetails) ->
%% {ok, [lists:sum(Inputs)]}
%% end
%% '''
%%
%% The preferred consistent-hash function for this fitting is
%% {@link chashfun/1}. It hashes the input `Key'. Any other
%% partition function should work, but beware that a function
%% that sends values for the same `Key' to different partitions
%% will result in fittings down the pipe receiving multiple
%% results for the `Key'.
%%
%% This fitting produces as its archive, the store of evaluation
%% results for the keys it has seen. To merge handoff values,
%% the lists stored with each key are concatenated, and the
%% reduce function is re-evaluated.
-module(riak_pipe_w_reduce).
-behaviour(riak_pipe_vnode_worker).
-export([init/2,
process/3,
done/1,
archive/1,
handoff/2,
validate_arg/1]).
-export([chashfun/1]).
-include("riak_pipe.hrl").
-record(state, {accs :: dict(),
p :: riak_pipe_vnode:partition(),
fd :: riak_pipe_fitting:details()}).
-opaque state() :: #state{}.
-export_type([state/0]).
%% @doc Setup creates the store for evaluation results (a dict()) and
%% stashes away the `Partition' and `FittingDetails' for later.
-spec init(riak_pipe_vnode:partition(),
riak_pipe_fitting:details()) ->
{ok, state()}.
init(Partition, FittingDetails) ->
{ok, #state{accs=dict:new(), p=Partition, fd=FittingDetails}}.
%% @doc Process looks up the previous result for the `Key', and then
%% evaluates the funtion on that with the new `Input'.
-spec process({term(), term()}, boolean(), state()) -> {ok, state()}.
process({Key, Input}, _Last, #state{accs=Accs}=State) ->
case dict:find(Key, Accs) of
{ok, OldAcc} -> ok;
error -> OldAcc=[]
end,
InAcc = [Input|OldAcc],
case reduce(Key, InAcc, State) of
{ok, OutAcc} ->
{ok, State#state{accs=dict:store(Key, OutAcc, Accs)}};
{error, {Type, Error, Trace}} ->
%%TODO: forward
lager:error(
"~p:~p reducing:~n ~P~n ~P",
[Type, Error, InAcc, 2, Trace, 5]),
{ok, State}
end.
%% @doc Unless the aggregation function sends its own outputs, done/1
%% is where all outputs are sent.
-spec done(state()) -> ok.
done(#state{accs=Accs, p=Partition, fd=FittingDetails}) ->
_ = [ ok = riak_pipe_vnode_worker:send_output(A, Partition, FittingDetails)
|| A <- dict:to_list(Accs)],
ok.
%% @doc The archive is just the store (dict()) of evaluation results.
-spec archive(state()) -> {ok, dict()}.
archive(#state{accs=Accs}) ->
%% just send state of reduce so far
{ok, Accs}.
%% @doc The handoff merge is simple a dict:merge, where entries for
%% the same key are concatenated. The reduce function is also
%% re-evaluated for the key, such that {@link done/1} still has
%% the correct value to send, even if no more inputs arrive.
-spec handoff(dict(), state()) -> {ok, state()}.
handoff(HandoffAccs, #state{accs=Accs}=State) ->
%% for each Acc, add to local accs;
NewAccs = dict:merge(fun(K, HA, A) ->
handoff_acc(K, HA, A, State)
end,
HandoffAccs, Accs),
{ok, State#state{accs=NewAccs}}.
%% @doc The dict:merge function for handoff. Handles the reducing.
-spec handoff_acc(term(), [term()], [term()], state()) -> [term()].
handoff_acc(Key, HandoffAccs, LocalAccs, State) ->
InAcc = HandoffAccs++LocalAccs,
case reduce(Key, InAcc, State) of
{ok, OutAcc} ->
OutAcc;
{error, {Type, Error, Trace}} ->
lager:error(
"~p:~p reducing handoff:~n ~P~n ~P",
[Type, Error, InAcc, 2, Trace, 5]),
LocalAccs %% don't completely barf
end.
%% @doc Actually evaluate the aggregation function.
-spec reduce(term(), [term()], state()) ->
{ok, [term()]} | {error, {term(), term(), term()}}.
reduce(Key, InAcc, #state{p=Partition, fd=FittingDetails}) ->
Fun = FittingDetails#fitting_details.arg,
try
{ok, OutAcc} = Fun(Key, InAcc, Partition, FittingDetails),
true = is_list(OutAcc), %%TODO: nicer error
{ok, OutAcc}
catch Type:Error ->
{error, {Type, Error, erlang:get_stacktrace()}}
end.
%% @doc Check that the arg is a valid arity-4 function. See {@link
%% riak_pipe_v:validate_function/3}.
-spec validate_arg(term()) -> ok | {error, iolist()}.
validate_arg(Fun) when is_function(Fun) ->
riak_pipe_v:validate_function("arg", 4, Fun);
validate_arg(Fun) ->
{error, io_lib:format("~p requires a function as argument, not a ~p",
[?MODULE, riak_pipe_v:type_of(Fun)])}.
%% @doc The preferred hashing function. Chooses a partition based
%% on the hash of the `Key'.
-spec chashfun({term(), term()}) -> riak_pipe_vnode:chash().
chashfun({Key,_}) ->
chash:key_of(Key). | deps/riak_pipe/src/riak_pipe_w_reduce.erl | 0.694717 | 0.477128 | riak_pipe_w_reduce.erl | starcoder |
-module(watts_file_util).
%%
%% Copyright 2016 SCC/KIT
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0 (see also the LICENSE file)
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
-author("<NAME>, Bas.Wegh<at><EMAIL>").
-export([read_pem_entries/1]).
-export([to_abs/1]).
-export([to_abs/2]).
-include("watts.hrl").
%% @doc convert a maybe relative path to an absolute path
-spec to_abs(FileName :: string() | binary()) -> string() | binary().
to_abs(FileName) ->
combine_or_home(FileName, undefined).
%% @doc convert a maybe relative path to an absolute path, Base beeing the root.
-spec to_abs(Path :: string() | binary(),
BasePath :: string() | binary() | undefined)
-> string() | binary().
to_abs(FileName, BaseDirectory) ->
combine_or_home(FileName, BaseDirectory).
%% @doc check if the path starts with ~ and use home as root then.
-spec combine_or_home(Path :: string() | binary(),
BasePath :: string() | binary() | undefined)
-> string() | binary().
combine_or_home([ $~, $/ | Relative ], _BaseDir) ->
convert_home(Relative);
combine_or_home(<< $~:8, $/:8, Relative/binary >>, _BaseDir) ->
convert_home(Relative);
combine_or_home(NonHome, undefined) ->
combine_or_home(NonHome, ?CONFIG(config_path, ""));
combine_or_home(NonHome, BaseDir) ->
filename:join(BaseDir, NonHome).
%% @doc append the path to the home dir.
-spec convert_home(RelativePath :: string() | binary()) -> string() | binary().
convert_home(Relative) ->
{ok, [[Home]]} = init:get_argument(home),
filename:join(Home, Relative).
%% @doc helper function to read pem encoded files
-spec read_pem_entries(Path :: binary()) -> [tuple()].
read_pem_entries(Path) ->
extract_pem(file:read_file(Path), Path).
%% @doc helper function to decode pem entries
-spec extract_pem({ok, binary()} | any(), binary()) -> [tuple()].
extract_pem({ok, PemBin}, _) ->
public_key:pem_decode(PemBin);
extract_pem(Error, Path) ->
lager:error("Init: error reading file ~p: ~p", [Path, Error]),
[]. | src/watts_file_util.erl | 0.758511 | 0.408808 | watts_file_util.erl | starcoder |
%%% @doc Calculate the date of Trinidad Carnival
-module(jouvert).
-export([countdown/0,
jouvert/0,
jouvert/1,
easter/1,
%% easter_test/0,
%% Make module usable as an escript
main/1,
format_daystime/1
]).
%% for testing
-export([jouvert_after/1, daystime_diff/2]).
%% Types not exported from the `calendar' module
-type days() :: 0..366.
-type year() :: 1970..10000.
%% @doc Time to wait till next year's carnival opens
-spec countdown() -> {days(), calendar:time()}.
countdown() ->
UTCNow = calendar:universal_time(),
Jouvert = jouvert_after(UTCNow),
daystime_diff(Jouvert, UTCNow).
%% @doc Calculate difference between two dates
-spec daystime_diff(calendar:datetime(), calendar:datetime()) ->
{days(), calendar:time()}.
daystime_diff(Later, Earlier) ->
calendar:seconds_to_daystime(
calendar:datetime_to_gregorian_seconds(Later)
- calendar:datetime_to_gregorian_seconds(Earlier)).
%% @doc Date and time of the coming J'ouvert
-spec jouvert() -> calendar:datetime().
jouvert() ->
UTCNow = calendar:universal_time(),
jouvert_after(UTCNow).
%% @doc Date of the coming J'ouvert after a given date
-spec jouvert_after(calendar:datetime()) -> calendar:datetime().
jouvert_after({{Year, _M, _D}, _Time} = DateTime) ->
case jouvert(Year) of
Jouvert when DateTime < Jouvert ->
Jouvert;
_Passed ->
jouvert(Year + 1)
end.
%% @doc Calculate the start of J'overt in the given year
%%
%% The date for J'ouvert is the Monday before Ash Wednesday,
%% 48 days before Easter Sunday.
-spec jouvert(year()) -> calendar:datetime().
jouvert(Year) ->
Easter = easter(Year),
JouvertMon = days_before(Easter, 48),
{JouvertMon, jouvert_time()}.
%% @doc Start time of J'ouvert in UTC
%%
%% this is a constant function.
%% 4:00am Trinidad time(-4:00) is 8:00 UTC.
-spec jouvert_time() -> calendar:time().
jouvert_time() ->
{8, 0, 0}. %% in UTC
%% @doc Subtract a number of days from a date
-spec days_before(calendar:date(), days()) -> calendar:datetime().
days_before(Date, Days) ->
GD = calendar:date_to_gregorian_days(Date),
calendar:gregorian_days_to_date(GD - Days).
%% @doc Easter date calculation
%%
%% See http://en.wikipedia.org/wiki/Computus
%%
%% See https://www.drupal.org/node/1180480
-spec easter(year()) -> calendar:date().
easter(Year) ->
A = Year rem 19,
B = Year div 100,
C = Year rem 100,
D = B div 4,
E = B rem 4,
F = (B + 8) div 25,
G = (B - F + 1) div 3,
H = (19 * A + B - D - G + 15) rem 30,
I = C div 4,
K = C rem 4,
L = (32 + 2 * E + 2 * I - H - K) rem 7,
M = (A + 11 * H + 22 * L) div 451,
Month = (H + L - 7 * M + 114) div 31,
Day = (H + L - 7 * M + 114) rem 31 + 1,
{Year, Month, Day}.
%%% Functions for standalone use
%% @doc Escript entry point
-spec main([string()]) -> no_return().
main(_) ->
io:format("~s till J'ouvert\n",
[format_daystime(countdown())]).
%% @doc Format a {Days, Time} tuple as English text
-spec format_daystime({days(), calendar:time()}) -> iolist().
format_daystime({Days, {H, M, S}}) ->
io_lib:format("~w day~s and ~w:~2..0w:~2..0w",
[Days, case Days of 1 -> ""; _ -> "s" end, H, M, S]).
%% Computus test. OK.
%%
%% easter_test() ->
%% lists:foreach(fun ({Y, _M, _D} = Easter) ->
%% Easter = easter(Y)
%% end, easter_data()),
%% ok.
%%
%% %% easter_data() ->
%% [{2015, 4, 5},
%% {2016, 3, 27},
%% {2017, 4, 16},
%% {2018, 4, 1},
%% {2019, 4, 21},
%% {2020, 4, 12},
%% {2021, 4, 4},
%% {2022, 4, 17},
%% {2023, 4, 9},
%% {2024, 3, 31},
%% {2025, 4, 20},
%% {2026, 4, 5},
%% {2027, 3, 28},
%% {2028, 4, 16},
%% {2029, 4, 1},
%% {2030, 4, 21},
%% {2031, 4, 13},
%% {2032, 3, 28},
%% {2033, 4, 17},
%% {2034, 4, 9},
%% {2035, 3, 25}]. | src/jouvert.erl | 0.695648 | 0.614163 | jouvert.erl | starcoder |
%% Use this module to run some basic query against the chain
%% For example:
%%
%% - Lookup last 500 blocks for payment_v2 transactions:
%% lookup_txns_by_type(500, blockchain_txn_payment_v2).
%%
%% - Lookup last 500 blocks for particular txn hash:
%% lookup_txns_by_type(500, <<some_txn_hash>>).
-module(miner_query).
-export([poc_analyze/2,
txns/2, txns/3,
blocks/2,
lookup_txns_by_hash/2,
lookup_txns_by_type/2]).
poc_analyze(_Start, _End) ->
ok.
txns(Type, Start, End) when is_atom(Type) ->
txns([Type], Start, End);
txns(Types, Start, End) ->
Txns = txns(Start, End),
lists:filter(fun(T) -> lists:member(blockchain_txn:type(T), Types) end, Txns).
txns(Start, End) ->
Blocks = blocks(Start, End),
lists:flatten(lists:map(fun blockchain_block:transactions/1, Blocks)).
blocks(Start, End) ->
Chain = blockchain_worker:blockchain(),
[begin
{ok, B} = blockchain:get_block(N, Chain),
B
end
|| N <- lists:seq(Start, End)].
lookup_txns_by_type(LastXBlocks, TxnType) ->
C = blockchain_worker:blockchain(),
{ok, Current} = blockchain:height(C),
Range = lists:seq(Current - LastXBlocks, Current),
Blocks = [{I, element(2, blockchain:get_block(I, C))} || I <- Range],
Txns = lists:map(fun({I, B}) -> Ts = blockchain_block:transactions(B), {I, Ts} end, Blocks),
X = lists:map(fun({I, Ts}) -> {I, lists:filter(fun(T) ->
blockchain_txn:type(T) == TxnType
end, Ts)} end, Txns),
lists:filter(fun({_I, List}) -> length(List) /= 0 end, X).
lookup_txns_by_hash(LastXBlocks, TxnHash) ->
C = blockchain_worker:blockchain(),
{ok, Current} = blockchain:height(C),
Range = lists:seq(Current - LastXBlocks, Current),
Blocks = [{I, element(2, blockchain:get_block(I, C))} || I <- Range],
Txns = lists:map(fun({I, B}) -> Ts = blockchain_block:transactions(B), {I, Ts} end, Blocks),
X = lists:map(fun({I, Ts}) -> {I, lists:filter(fun(T) -> blockchain_txn:hash(T) == TxnHash
end, Ts)} end, Txns),
lists:filter(fun({_I, List}) -> length(List) /= 0 end, X). | src/miner_query.erl | 0.517327 | 0.582521 | miner_query.erl | starcoder |
% @copyright 2011 Zuse Institute Berlin
% Licensed under the Apache License, Version 2.0 (the "License");
% you may not use this file except in compliance with the License.
% You may obtain a copy of the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS,
% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
% See the License for the specific language governing permissions and
% limitations under the License.
%%% @author <NAME> <<EMAIL>>
%%% @doc Basic Histogram.
%%% @end
%% @version $Id$
-module(histogram).
-author('<EMAIL>').
-vsn('$Id$').
-ifdef(with_export_type_support).
-export_type([histogram/0]).
-endif.
% external API
-export([create/1, add/2, add/3, get_data/1]).
% private API
-export([resize/1, insert/2, find_smallest_interval/1, merge_interval/2]).
-include("record_helpers.hrl").
-type data_item() :: {float(), pos_integer()}.
-type data_list() :: list(data_item()).
-record(histogram, {size = ?required(histogram, size):: non_neg_integer(),
data = [] :: data_list()}).
-opaque histogram() :: #histogram{}.
-spec create(Size::non_neg_integer()) -> histogram().
create(Size) ->
#histogram{size = Size}.
-spec add(Value::float(), Histogram::histogram()) -> histogram().
add(Value, Histogram) ->
add(Value, 1, Histogram).
-spec add(Value::float(), Count::pos_integer(), Histogram::histogram()) -> histogram().
add(_Value, _Count, Histogram = #histogram{size = 0}) ->
Histogram;
add(Value, Count, Histogram = #histogram{data = OldData}) ->
resize(Histogram#histogram{data = insert({Value, Count}, OldData)}).
-spec get_data(Histogram::histogram()) -> data_list().
get_data(Histogram) ->
Histogram#histogram.data.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
% private
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec resize(Histogram::histogram()) -> histogram().
resize(Histogram = #histogram{data = Data, size = ExpectedSize}) ->
ActualSize = length(Data),
if
ActualSize > ExpectedSize ->
SmallestInterval = find_smallest_interval(Data),
NewHistogram = Histogram#histogram{data = merge_interval(SmallestInterval, Data)},
resize(NewHistogram);
true ->
Histogram
end.
-spec insert(Value::data_item(), Data::data_list()) -> data_list().
insert({Value, Count}, []) ->
[{Value, Count}];
insert({Value, Count}, [{Value2, Count2} | Rest]) ->
case Value < Value2 of
true ->
[{Value, Count}, {Value2, Count2} | Rest];
false ->
[{Value2, Count2} | insert({Value, Count}, Rest)]
end.
%@doc PRE: length(Data) >= 2
-spec find_smallest_interval(Data::data_list()) -> float().
find_smallest_interval([{Value, _}, {Value2, _} | Rest]) ->
find_smallest_interval_loop(Value2 - Value, Value2, Rest).
find_smallest_interval_loop(MinInterval, _LastValue, []) ->
MinInterval;
find_smallest_interval_loop(MinInterval, LastValue, [{Value, _} | Rest]) ->
find_smallest_interval_loop(util:min(MinInterval, Value - LastValue), Value, Rest).
%@doc PRE: length(Data) >= 2
-spec merge_interval(Interval::float(), Data::data_list()) -> data_list().
merge_interval(Interval, [{Value, Count}, {Value2, Count2} | Rest]) ->
case Value2 - Value of
Interval ->
[{(Value*Count + Value2*Count2) / (Count + Count2), Count + Count2} | Rest];
_ ->
[{Value, Count} | merge_interval(Interval, [{Value2, Count2} | Rest])]
end. | src/histogram.erl | 0.565659 | 0.490541 | histogram.erl | starcoder |
%% @doc
%% A module to encode and decode Erlang's Record.
%%
%% The rmapper module aims to take away the boilerplate of implementing
%% mapping functions from and to Erlang's records.
%%
%% This module is based on the idea that a Record should have a unique
%% mapping specification for encoding and decoding, per format to encode/decode.
%% A same record can then easily be re-used to encode from/to JSON
%% but also from/to your database layer, only the specification would change.
%%
%% @end
%%
%% @author <NAME>
-module(rmapper).
-export([
field_spec/3,
field_spec/2
]).
-export([
encode/2,
decode/3
]).
-type field_name() :: any().
-type field_value() :: any().
-type mapper_function() :: fun((field_value()) -> field_value()).
-record(field_spec, {
name :: field_name(),
index :: non_neg_integer(),
mapper :: mapper_function()
}).
%%%----------------------------------------------------------------------------
%%% Functions to build the specification.
%%%----------------------------------------------------------------------------
%% @doc
%% Create the specification for a field.
%%
%% The specification created with this function will use
%% the MapperFunction during the encoding/decoding
%% to transform the data into the right format.
%%
%% Example:
%% ```
%% rmapper:field_spec(<<"name">>, #country.name, fun string:titlecase/1)
%% '''
%%
%% @end
-spec field_spec(field_name(), non_neg_integer(), mapper_function()) -> #field_spec{}.
field_spec(Name, Index, MapperFunction) ->
#field_spec{name = Name, index = Index, mapper = MapperFunction}.
%% @doc
%% Create the specification for a field.
%%
%% The specification created with this function will
%% not transform the data during encoding/decoding.
%%
%% Example:
%% ```
%% rmapper:field_spec(<<"name">>, #country.name)
%% '''
%%
%% @end
-spec field_spec(field_name(), non_neg_integer()) -> #field_spec{}.
field_spec(Name, Index) ->
field_spec(Name, Index, fun id/1).
%%%----------------------------------------------------------------------------
%%% Encoding and decoding functions.
%%%----------------------------------------------------------------------------
-type record() :: tuple().
-type property() :: {field_name(), field_value()}.
%% @doc
%% Encode the Record to a PropList.
%%
%% Example:
%% ```
%% Record = #country{name = <<"Netherlands">>, country_code = <<"NL">>, currency_code = <<"EUR">>},
%% Spec = [
%% rmapper:field_spec(<<"name">>, #country.name),
%% rmapper:field_spec(<<"country_code">>, #country.country_code),
%% rmapper:field_spec(<<"currency_code">>, #country.currency_code)
%% ],
%% rmapper:encode(Record, Spec).
%%
%% [{<<"currency_code">>, <<"EUR">>}, {<<"country_code">>, <<"NL">>}, {<<"name">>, <<"Netherlands">>}]
%% '''
%%
%% @end
-spec encode(record(), list(#field_spec{})) -> list(property()).
encode(Record, FieldSpecs) ->
lists:foldl(
fun(Spec, Acc) ->
case element(Spec#field_spec.index, Record) of
undefined -> Acc;
Value ->
#field_spec{name = Name, mapper = Mapper} = Spec,
[{Name, Mapper(Value)} | Acc]
end
end,
[],
FieldSpecs
).
%% @doc
%% Decode the Record from a PropList.
%%
%% Example:
%% ```
%% PropList = [{<<"currency_code">>, <<"EUR">>}, {<<"country_code">>, <<"NL">>}, {<<"name">>, <<"Netherlands">>}],
%% Spec = [
%% rmapper:field_spec(<<"name">>, #country.name),
%% rmapper:field_spec(<<"country_code">>, #country.country_code),
%% rmapper:field_spec(<<"currency_code">>, #country.currency_code)
%% ],
%% rmapper:decode(#country{}, Spec, PropList).
%%
%% #country{name = <<"Netherlands">>, country_code = <<"NL">>, currency_code = <<"EUR">>}
%% '''
%%
%% @end
-spec decode(record(), list(#field_spec{}), list(property())) -> record().
decode(Record, FieldSpecs, PropList) ->
SpecMap = spec_to_map(FieldSpecs),
lists:foldl(
fun({Key, Value}, NewRecord) ->
case maps:get(Key, SpecMap, not_found) of
not_found ->
NewRecord;
#field_spec{index = Index, mapper = Mapper} ->
setelement(Index, NewRecord, Mapper(Value))
end
end,
Record,
PropList
).
%%%----------------------------------------------------------------------------
%%% Private functions
%%%----------------------------------------------------------------------------
-spec id(any()) -> any().
id(X) -> X.
-spec spec_to_map(list(#field_spec{})) -> #{ field_name() := field_value() }.
spec_to_map(FieldSpecs) ->
lists:foldl(
fun(#field_spec{name = Name} = Spec, Acc) ->
maps:put(Name, Spec, Acc)
end,
#{},
FieldSpecs
). | src/rmapper.erl | 0.596198 | 0.662206 | rmapper.erl | starcoder |
%%
%% %CopyrightBegin%
%%
%% Copyright Ericsson AB 1999-2018. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
%% Purpose : Transform Core Erlang to Kernel Erlang
%% Kernel erlang is like Core Erlang with a few significant
%% differences:
%%
%% 1. It is flat! There are no nested calls or sub-blocks.
%%
%% 2. All variables are unique in a function. There is no scoping, or
%% rather the scope is the whole function.
%%
%% 3. Pattern matching (in cases and receives) has been compiled.
%%
%% 4. The annotations contain variable usages. Seeing we have to work
%% this out anyway for funs we might as well pass it on for free to
%% later passes.
%%
%% 5. All remote-calls are to statically named m:f/a. Meta-calls are
%% passed via erlang:apply/3.
%%
%% The translation is done in two passes:
%%
%% 1. Basic translation, translate variable/function names, flatten
%% completely, pattern matching compilation.
%%
%% 2. Fun-lifting (lambda-lifting), variable usage annotation and
%% last-call handling.
%%
%% All new Kexprs are created in the first pass, they are just
%% annotated in the second.
%%
%% Functions and BIFs
%%
%% Functions are "call"ed or "enter"ed if it is a last call, their
%% return values may be ignored. BIFs are things which are known to
%% be internal by the compiler and can only be called, their return
%% values cannot be ignored.
%%
%% Letrec's are handled rather naively. All the functions in one
%% letrec are handled as one block to find the free variables. While
%% this is not optimal it reflects how letrec's often are used. We
%% don't have to worry about variable shadowing and nested letrec's as
%% this is handled in the variable/function name translation. There
%% is a little bit of trickery to ensure letrec transformations fit
%% into the scheme of things.
%%
%% To ensure unique variable names we use a variable substitution
%% table and keep the set of all defined variables. The nested
%% scoping of Core means that we must also nest the substitution
%% tables, but the defined set must be passed through to match the
%% flat structure of Kernel and to make sure variables with the same
%% name from different scopes get different substitutions.
%%
%% We also use these substitutions to handle the variable renaming
%% necessary in pattern matching compilation.
%%
%% The pattern matching compilation assumes that the values of
%% different types don't overlap. This means that as there is no
%% character type yet in the machine all characters must be converted
%% to integers!
-module(v3_kernel).
-export([module/2,format_error/1]).
-import(lists, [map/2,foldl/3,foldr/3,mapfoldl/3,splitwith/2,member/2,
keymember/3,keyfind/3,partition/2,droplast/1,last/1,sort/1,
reverse/1]).
-import(ordsets, [add_element/2,del_element/2,union/2,union/1,subtract/2]).
-import(cerl, [c_tuple/1]).
-include("core_parse.hrl").
-include("v3_kernel.hrl").
%% These are not defined in v3_kernel.hrl.
get_kanno(Kthing) -> element(2, Kthing).
set_kanno(Kthing, Anno) -> setelement(2, Kthing, Anno).
copy_anno(Kdst, Ksrc) ->
Anno = get_kanno(Ksrc),
set_kanno(Kdst, Anno).
%% Internal kernel expressions and help functions.
%% N.B. the annotation field is ALWAYS the first field!
-record(ivalues, {anno=[],args}).
-record(ifun, {anno=[],vars,body}).
-record(iset, {anno=[],vars,arg,body}).
-record(iletrec, {anno=[],defs}).
-record(ialias, {anno=[],vars,pat}).
-record(iclause, {anno=[],isub,osub,pats,guard,body}).
-record(ireceive_accept, {anno=[],arg}).
-record(ireceive_next, {anno=[],arg}).
-record(ignored, {anno=[]}).
-type warning() :: term(). % XXX: REFINE
%% State record for kernel translator.
-record(kern, {func, %Current host function
ff, %Current function
vcount=0, %Variable counter
fcount=0, %Fun counter
ds=cerl_sets:new() :: cerl_sets:set(), %Defined variables
funs=[], %Fun functions
free=#{}, %Free variables
ws=[] :: [warning()], %Warnings.
guard_refc=0}). %> 0 means in guard
-spec module(cerl:c_module(), [compile:option()]) ->
{'ok', #k_mdef{}, [warning()]}.
module(#c_module{anno=A,name=M,exports=Es,attrs=As,defs=Fs}, _Options) ->
Kas = attributes(As),
Kes = map(fun (#c_var{name={_,_}=Fname}) -> Fname end, Es),
St0 = #kern{},
{Kfs,St} = mapfoldl(fun function/2, St0, Fs),
{ok,#k_mdef{anno=A,name=M#c_literal.val,exports=Kes,attributes=Kas,
body=Kfs ++ St#kern.funs},lists:sort(St#kern.ws)}.
attributes([{#c_literal{val=Name},#c_literal{val=Val}}|As]) ->
case include_attribute(Name) of
false ->
attributes(As);
true ->
[{Name,Val}|attributes(As)]
end;
attributes([]) -> [].
include_attribute(type) -> false;
include_attribute(spec) -> false;
include_attribute(callback) -> false;
include_attribute(opaque) -> false;
include_attribute(export_type) -> false;
include_attribute(record) -> false;
include_attribute(optional_callbacks) -> false;
include_attribute(file) -> false;
include_attribute(compile) -> false;
include_attribute(_) -> true.
function({#c_var{name={F,Arity}=FA},Body}, St0) ->
%%io:format("~w/~w~n", [F,Arity]),
try
%% Find a suitable starting value for the variable counter. Note
%% that this pass assumes that new_var_name/1 returns a variable
%% name distinct from any variable used in the entire body of
%% the function. We use integers as variable names to avoid
%% filling up the atom table when compiling huge functions.
Count = cerl_trees:next_free_variable_name(Body),
St1 = St0#kern{func=FA,ff=undefined,vcount=Count,fcount=0,ds=cerl_sets:new()},
{#ifun{anno=Ab,vars=Kvs,body=B0},[],St2} = expr(Body, new_sub(), St1),
{B1,_,St3} = ubody(B0, return, St2),
%%B1 = B0, St3 = St2, %Null second pass
{make_fdef(#k{us=[],ns=[],a=Ab}, F, Arity, Kvs, B1),St3}
catch
Class:Error:Stack ->
io:fwrite("Function: ~w/~w\n", [F,Arity]),
erlang:raise(Class, Error, Stack)
end.
%% body(Cexpr, Sub, State) -> {Kexpr,[PreKepxr],State}.
%% Do the main sequence of a body. A body ends in an atomic value or
%% values. Must check if vector first so do expr.
body(#c_values{anno=A,es=Ces}, Sub, St0) ->
%% Do this here even if only in bodies.
{Kes,Pe,St1} = atomic_list(Ces, Sub, St0),
%%{Kes,Pe,St1} = expr_list(Ces, Sub, St0),
{#ivalues{anno=A,args=Kes},Pe,St1};
body(#ireceive_next{anno=A}, _, St) ->
{#k_receive_next{anno=A},[],St};
body(Ce, Sub, St0) ->
expr(Ce, Sub, St0).
%% guard(Cexpr, Sub, State) -> {Kexpr,State}.
%% We handle guards almost as bodies. The only special thing we
%% must do is to make the final Kexpr a #k_test{}.
%% Also, we wrap the entire guard in a try/catch which is
%% not strictly needed, but makes sure that every 'bif' instruction
%% will get a proper failure label.
guard(G0, Sub, St0) ->
{G1,St1} = wrap_guard(G0, St0),
{Ge0,Pre,St2} = expr(G1, Sub, St1),
{Ge1,St3} = gexpr_test(Ge0, St2),
{Ge,St} = guard_opt(Ge1, St3),
{pre_seq(Pre, Ge),St}.
%% guard_opt(Kexpr, State) -> {Kexpr,State}.
%% Optimize the Kexpr for the guard. Instead of evaluating a boolean
%% expression comparing it to 'true' in a final #k_test{},
%% replace BIF calls with #k_test{} in the expression.
%%
%% As an example, take the guard:
%%
%% when is_integer(V0), is_atom(V1) ->
%%
%% The unoptimized Kexpr translated to pseudo BEAM assembly
%% code would look like:
%%
%% bif is_integer V0 => Bool0
%% bif is_atom V1 => Bool1
%% bif and Bool0 Bool1 => Bool
%% test Bool =:= true else goto Fail
%% ...
%% Fail:
%% ...
%%
%% The optimized code would look like:
%%
%% test is_integer V0 else goto Fail
%% test is_atom V1 else goto Fail
%% ...
%% Fail:
%% ...
%%
%% An 'or' operation is only slightly more complicated:
%%
%% test is_integer V0 else goto NotFailedYet
%% goto Success
%%
%% NotFailedYet:
%% test is_atom V1 else goto Fail
%%
%% Success:
%% ...
%% Fail:
%% ...
guard_opt(G, St0) ->
{Root,Forest0,St1} = make_forest(G, St0),
{Exprs,Forest,St} = rewrite_bool(Root, Forest0, false, St1),
E = forest_pre_seq(Exprs, Forest),
{G#k_try{arg=E},St}.
%% rewrite_bool(Kexpr, Forest, Inv, St) -> {[Kexpr],Forest,St}.
%% Rewrite Kexpr to use #k_test{} operations instead of comparison
%% and type test BIFs.
%%
%% If Kexpr is a #k_test{} operation, the call will always
%% succeed. Otherwise, a 'not_possible' exception will be
%% thrown if Kexpr cannot be rewritten.
rewrite_bool(#k_test{op=#k_remote{mod=#k_atom{val=erlang},name=#k_atom{val='=:='}},
args=[#k_var{}=V,#k_atom{val=true}]}=Test, Forest0, Inv, St0) ->
try rewrite_bool_var(V, Forest0, Inv, St0) of
{_,_,_}=Res ->
Res
catch
throw:not_possible ->
{[Test],Forest0,St0}
end;
rewrite_bool(#k_test{op=#k_remote{mod=#k_atom{val=erlang},name=#k_atom{val='=:='}},
args=[#k_var{}=V,#k_atom{val=false}]}=Test, Forest0, Inv, St0) ->
try rewrite_bool_var(V, Forest0, not Inv, St0) of
{_,_,_}=Res ->
Res
catch
throw:not_possible ->
{[Test],Forest0,St0}
end;
rewrite_bool(#k_test{op=#k_remote{mod=#k_atom{val=erlang},name=#k_atom{val='=:='}},
args=[#k_atom{val=V1},#k_atom{val=V2}]}, Forest0, false, St0) ->
case V1 =:= V2 of
true ->
{[make_test(is_boolean, [#k_atom{val=true}])],Forest0,St0};
false ->
{[make_failing_test()],Forest0,St0}
end;
rewrite_bool(#k_test{}=Test, Forest, false, St) ->
{[Test],Forest,St};
rewrite_bool(#k_try{vars=[#k_var{name=X}],body=#k_var{name=X},
handler=#k_atom{val=false},ret=[]}=Prot,
Forest0, Inv, St0) ->
{Root,Forest1,St1} = make_forest(Prot, Forest0, St0),
{Exprs,Forest2,St} = rewrite_bool(Root, Forest1, Inv, St1),
InnerForest = maps:without(maps:keys(Forest0), Forest2),
Forest = maps:without(maps:keys(InnerForest), Forest2),
E = forest_pre_seq(Exprs, InnerForest),
{[Prot#k_try{arg=E}],Forest,St};
rewrite_bool(#k_match{body=Body,ret=[]}, Forest, Inv, St) ->
rewrite_match(Body, Forest, Inv, St);
rewrite_bool(Other, Forest, Inv, St) ->
case extract_bif(Other) of
{Name,Args} ->
rewrite_bif(Name, Args, Forest, Inv, St);
error ->
throw(not_possible)
end.
%% rewrite_bool_var(Var, Forest, Inv, St) -> {[Kexpr],Forest,St}.
%% Rewrite the boolean expression whose key in Forest is
%% given by Var. Throw a 'not_possible' expression if something
%% prevents the rewriting.
rewrite_bool_var(Arg, Forest0, Inv, St) ->
{Expr,Forest} = forest_take_expr(Arg, Forest0),
rewrite_bool(Expr, Forest, Inv, St).
%% rewrite_bool_args([Kexpr], Forest, Inv, St) -> {[[Kexpr]],Forest,St}.
%% Rewrite each Kexpr in the list. The input Kexpr should be variables
%% or boolean values. Throw a 'not_possible' expression if something
%% prevents the rewriting.
%%
%% This function is suitable for handling the arguments for both
%% 'and' and 'or'.
rewrite_bool_args([#k_atom{val=B}=A|Vs], Forest0, false=Inv, St0) when is_boolean(B) ->
{Tail,Forest1,St1} = rewrite_bool_args(Vs, Forest0, Inv, St0),
Bif = make_bif('=:=', [A,#k_atom{val=true}]),
{Exprs,Forest,St} = rewrite_bool(Bif, Forest1, Inv, St1),
{[Exprs|Tail],Forest,St};
rewrite_bool_args([#k_var{}=Var|Vs], Forest0, false=Inv, St0) ->
{Tail,Forest1,St1} = rewrite_bool_args(Vs, Forest0, Inv, St0),
{Exprs,Forest,St} =
case is_bool_expr(Var, Forest0) of
true ->
rewrite_bool_var(Var, Forest1, Inv, St1);
false ->
Bif = make_bif('=:=', [Var,#k_atom{val=true}]),
rewrite_bool(Bif, Forest1, Inv, St1)
end,
{[Exprs|Tail],Forest,St};
rewrite_bool_args([_|_], _Forest, _Inv, _St) ->
throw(not_possible);
rewrite_bool_args([], Forest, _Inv, St) ->
{[],Forest,St}.
%% rewrite_bif(Name, [Kexpr], Forest, Inv, St) -> {[Kexpr],Forest,St}.
%% Rewrite a BIF. Throw a 'not_possible' expression if something
%% prevents the rewriting.
rewrite_bif('or', Args, Forest, true, St) ->
rewrite_not_args('and', Args, Forest, St);
rewrite_bif('and', Args, Forest, true, St) ->
rewrite_not_args('or', Args, Forest, St);
rewrite_bif('and', [#k_atom{val=Val},Arg], Forest0, Inv, St0) ->
false = Inv, %Assertion.
case Val of
true ->
%% The result only depends on Arg.
rewrite_bool_var(Arg, Forest0, Inv, St0);
_ ->
%% Will fail. There is no need to evalute the expression
%% represented by Arg. Take it out from the forest and
%% discard the expression.
Failing = make_failing_test(),
try rewrite_bool_var(Arg, Forest0, Inv, St0) of
{_,Forest,St} ->
{[Failing],Forest,St}
catch
throw:not_possible ->
try forest_take_expr(Arg, Forest0) of
{_,Forest} ->
{[Failing],Forest,St0}
catch
throw:not_possible ->
%% Arg is probably a variable bound in an
%% outer scope.
{[Failing],Forest0,St0}
end
end
end;
rewrite_bif('and', [Arg,#k_atom{}=Atom], Forest, Inv, St) ->
false = Inv, %Assertion.
rewrite_bif('and', [Atom,Arg], Forest, Inv, St);
rewrite_bif('and', Args, Forest0, Inv, St0) ->
false = Inv, %Assertion.
{[Es1,Es2],Forest,St} = rewrite_bool_args(Args, Forest0, Inv, St0),
{Es1 ++ Es2,Forest,St};
rewrite_bif('or', Args, Forest0, Inv, St0) ->
false = Inv, %Assertion.
{[First,Then],Forest,St} = rewrite_bool_args(Args, Forest0, Inv, St0),
Alt = make_alt(First, Then),
{[Alt],Forest,St};
rewrite_bif('xor', [_,_], _Forest, _Inv, _St) ->
%% Rewriting 'xor' is not practical. Fortunately, 'xor' is
%% almost never used in practice.
throw(not_possible);
rewrite_bif('not', [Arg], Forest0, Inv, St) ->
{Expr,Forest} = forest_take_expr(Arg, Forest0),
rewrite_bool(Expr, Forest, not Inv, St);
rewrite_bif(Op, Args, Forest, Inv, St) ->
case is_test(Op, Args) of
true ->
rewrite_bool(make_test(Op, Args, Inv), Forest, false, St);
false ->
throw(not_possible)
end.
rewrite_not_args(Op, [A0,B0], Forest0, St0) ->
{A,Forest1,St1} = rewrite_not_args_1(A0, Forest0, St0),
{B,Forest2,St2} = rewrite_not_args_1(B0, Forest1, St1),
rewrite_bif(Op, [A,B], Forest2, false, St2).
rewrite_not_args_1(Arg, Forest, St) ->
Not = make_bif('not', [Arg]),
forest_add_expr(Not, Forest, St).
%% rewrite_match(Kvar, TypeClause, Forest, Inv, St) ->
%% {[Kexpr],Forest,St}.
%% Try to rewrite a #k_match{} originating from an 'andalso' or an 'orelse'.
rewrite_match(#k_alt{first=First,then=Then}, Forest, Inv, St) ->
case {First,Then} of
{#k_select{var=#k_var{name=V}=Var,types=[TypeClause]},#k_var{name=V}} ->
rewrite_match_1(Var, TypeClause, Forest, Inv, St);
{_,_} ->
throw(not_possible)
end.
rewrite_match_1(Var, #k_type_clause{values=Cs0}, Forest0, Inv, St0) ->
Cs = sort([{Val,B} || #k_val_clause{val=#k_atom{val=Val},body=B} <- Cs0]),
case Cs of
[{false,False},{true,True}] ->
rewrite_match_2(Var, False, True, Forest0, Inv, St0);
_ ->
throw(not_possible)
end.
rewrite_match_2(Var, False, #k_atom{val=true}, Forest0, Inv, St0) ->
%% Originates from an 'orelse'.
case False of
#k_atom{val=NotBool} when not is_boolean(NotBool) ->
rewrite_bool(Var, Forest0, Inv, St0);
_ ->
{CodeVar,Forest1,St1} = add_protected_expr(False, Forest0, St0),
rewrite_bif('or', [Var,CodeVar], Forest1, Inv, St1)
end;
rewrite_match_2(Var, #k_atom{val=false}, True, Forest0, Inv, St0) ->
%% Originates from an 'andalso'.
{CodeVar,Forest1,St1} = add_protected_expr(True, Forest0, St0),
rewrite_bif('and', [Var,CodeVar], Forest1, Inv, St1);
rewrite_match_2(_V, _, _, _Forest, _Inv, _St) ->
throw(not_possible).
%% is_bool_expr(#k_var{}, Forest) -> true|false.
%% Return true if the variable refers to a boolean expression
%% that does not need an explicit '=:= true' test.
is_bool_expr(V, Forest) ->
case forest_peek_expr(V, Forest) of
error ->
%% Defined outside of the guard. We can't know.
false;
Expr ->
case extract_bif(Expr) of
{Name,Args} ->
is_test(Name, Args) orelse
erl_internal:bool_op(Name, length(Args));
error ->
%% Not a BIF. Should be possible to rewrite
%% to a boolean. Definitely does not need
%% a '=:= true' test.
true
end
end.
make_bif(Op, Args) ->
#k_bif{op=#k_remote{mod=#k_atom{val=erlang},
name=#k_atom{val=Op},
arity=length(Args)},
args=Args}.
extract_bif(#k_bif{op=#k_remote{mod=#k_atom{val=erlang},
name=#k_atom{val=Name}},
args=Args}) ->
{Name,Args};
extract_bif(_) ->
error.
%% make_alt(First, Then) -> KMatch.
%% Make a #k_alt{} within a #k_match{} to implement
%% 'or' or 'orelse'.
make_alt(First0, Then0) ->
First1 = pre_seq(droplast(First0), last(First0)),
Then1 = pre_seq(droplast(Then0), last(Then0)),
First2 = make_protected(First1),
Then2 = make_protected(Then1),
Body = #ignored{},
First3 = #k_guard_clause{guard=First2,body=Body},
Then3 = #k_guard_clause{guard=Then2,body=Body},
First = #k_guard{clauses=[First3]},
Then = #k_guard{clauses=[Then3]},
Alt = #k_alt{first=First,then=Then},
#k_match{vars=[],body=Alt}.
add_protected_expr(#k_atom{}=Atom, Forest, St) ->
{Atom,Forest,St};
add_protected_expr(#k_var{}=Var, Forest, St) ->
{Var,Forest,St};
add_protected_expr(E0, Forest, St) ->
E = make_protected(E0),
forest_add_expr(E, Forest, St).
make_protected(#k_try{}=Try) ->
Try;
make_protected(B) ->
#k_try{arg=B,vars=[#k_var{name=''}],body=#k_var{name=''},
handler=#k_atom{val=false}}.
make_failing_test() ->
make_test(is_boolean, [#k_atom{val=fail}]).
make_test(Op, Args) ->
make_test(Op, Args, false).
make_test(Op, Args, Inv) ->
Remote = #k_remote{mod=#k_atom{val=erlang},
name=#k_atom{val=Op},
arity=length(Args)},
#k_test{op=Remote,args=Args,inverted=Inv}.
is_test(Op, Args) ->
A = length(Args),
erl_internal:new_type_test(Op, A) orelse erl_internal:comp_op(Op, A).
%% make_forest(Kexpr, St) -> {RootKexpr,Forest,St}.
%% Build a forest out of Kexpr. RootKexpr is the final expression
%% nested inside Kexpr.
make_forest(G, St) ->
make_forest_1(G, #{}, 0, St).
%% make_forest(Kexpr, St) -> {RootKexpr,Forest,St}.
%% Add to Forest from Kexpr. RootKexpr is the final expression
%% nested inside Kexpr.
make_forest(G, Forest0, St) ->
N = forest_next_index(Forest0),
make_forest_1(G, Forest0, N, St).
make_forest_1(#k_try{arg=B}, Forest, I, St) ->
make_forest_1(B, Forest, I, St);
make_forest_1(#iset{vars=[]}=Iset0, Forest, I, St0) ->
{UnrefVar,St} = new_var(St0),
Iset = Iset0#iset{vars=[UnrefVar]},
make_forest_1(Iset, Forest, I, St);
make_forest_1(#iset{vars=[#k_var{name=V}],arg=Arg,body=B}, Forest0, I, St) ->
Forest = Forest0#{V => {I,Arg}, {untaken,V} => true},
make_forest_1(B, Forest, I+1, St);
make_forest_1(Innermost, Forest, _I, St) ->
{Innermost,Forest,St}.
%% forest_take_expr(Kexpr, Forest) -> {Expr,Forest}.
%% If Kexpr is a variable, take out the expression corresponding
%% to variable in Forest. Expressions that have been taken out
%% of the forest will not be included the Kexpr returned
%% by forest_pre_seq/2.
%%
%% Throw a 'not_possible' exception if Kexpr is not a variable or
%% if the name of the variable is not a key in Forest.
forest_take_expr(#k_var{name=V}, Forest0) ->
%% v3_core currently always generates guard expressions that can
%% be represented as a tree. Other code generators (such as LFE)
%% could generate guard expressions that can only be represented
%% as a DAG (i.e. some nodes are referenced more than once). To
%% handle DAGs, we must never remove a node from the forest, but
%% just remove the {untaken,V} marker. That will effectively convert
%% the DAG to a tree by duplicating the shared nodes and their
%% descendants.
case maps:find(V, Forest0) of
{ok,{_,Expr}} ->
Forest = maps:remove({untaken,V}, Forest0),
{Expr,Forest};
error ->
throw(not_possible)
end;
forest_take_expr(_, _) ->
throw(not_possible).
%% forest_peek_expr(Kvar, Forest) -> Kexpr | error.
%% Return the expression corresponding to Kvar in Forest or
%% return 'error' if there is a corresponding expression.
forest_peek_expr(#k_var{name=V}, Forest0) ->
case maps:find(V, Forest0) of
{ok,{_,Expr}} -> Expr;
error -> error
end.
%% forest_add_expr(Kexpr, Forest, St) -> {Kvar,Forest,St}.
%% Add a new expression to Forest.
forest_add_expr(Expr, Forest0, St0) ->
{#k_var{name=V}=Var,St} = new_var(St0),
N = forest_next_index(Forest0),
Forest = Forest0#{V => {N,Expr}},
{Var,Forest,St}.
forest_next_index(Forest) ->
1 + lists:max([N || {N,_} <- maps:values(Forest),
is_integer(N)] ++ [0]).
%% forest_pre_seq([Kexpr], Forest) -> Kexpr.
%% Package the list of Kexprs into a nested Kexpr, prepending all
%% expressions in Forest that have not been taken out using
%% forest_take_expr/2.
forest_pre_seq(Exprs, Forest) ->
Es0 = [#k_var{name=V} || {untaken,V} <- maps:keys(Forest)],
Es = Es0 ++ Exprs,
Vs = extract_all_vars(Es, Forest, []),
Pre0 = sort([{maps:get(V, Forest),V} || V <- Vs]),
Pre = [#iset{vars=[#k_var{name=V}],arg=A} ||
{{_,A},V} <- Pre0],
pre_seq(Pre++droplast(Exprs), last(Exprs)).
extract_all_vars(Es, Forest, Acc0) ->
case extract_var_list(Es) of
[] ->
Acc0;
[_|_]=Vs0 ->
Vs = [V || V <- Vs0, maps:is_key(V, Forest)],
NewVs = ordsets:subtract(Vs, Acc0),
NewEs = [begin
{_,E} = maps:get(V, Forest),
E
end || V <- NewVs],
Acc = union(NewVs, Acc0),
extract_all_vars(NewEs, Forest, Acc)
end.
extract_vars(#iset{arg=A,body=B}) ->
union(extract_vars(A), extract_vars(B));
extract_vars(#k_bif{args=Args}) ->
ordsets:from_list(lit_list_vars(Args));
extract_vars(#k_call{}) ->
[];
extract_vars(#k_test{args=Args}) ->
ordsets:from_list(lit_list_vars(Args));
extract_vars(#k_match{body=Body}) ->
extract_vars(Body);
extract_vars(#k_alt{first=First,then=Then}) ->
union(extract_vars(First), extract_vars(Then));
extract_vars(#k_guard{clauses=Cs}) ->
extract_var_list(Cs);
extract_vars(#k_guard_clause{guard=G}) ->
extract_vars(G);
extract_vars(#k_select{var=Var,types=Types}) ->
union(ordsets:from_list(lit_vars(Var)),
extract_var_list(Types));
extract_vars(#k_type_clause{values=Values}) ->
extract_var_list(Values);
extract_vars(#k_val_clause{body=Body}) ->
extract_vars(Body);
extract_vars(#k_try{arg=Arg}) ->
extract_vars(Arg);
extract_vars(Lit) ->
ordsets:from_list(lit_vars(Lit)).
extract_var_list(L) ->
union([extract_vars(E) || E <- L]).
%% Wrap the entire guard in a try/catch if needed.
wrap_guard(#c_try{}=Try, St) -> {Try,St};
wrap_guard(Core, St0) ->
{VarName,St} = new_var_name(St0),
Var = #c_var{name=VarName},
Try = #c_try{arg=Core,vars=[Var],body=Var,evars=[],handler=#c_literal{val=false}},
{Try,St}.
%% gexpr_test(Kexpr, State) -> {Kexpr,State}.
%% Builds the final boolean test from the last Kexpr in a guard test.
%% Must enter try blocks and isets and find the last Kexpr in them.
%% This must end in a recognised BEAM test!
gexpr_test(#k_bif{anno=A,op=#k_remote{mod=#k_atom{val=erlang},
name=#k_atom{val=F},arity=Ar}=Op,
args=Kargs}=Ke, St) ->
%% Either convert to test if ok, or add test.
%% At this stage, erlang:float/1 is not a type test. (It should
%% have been converted to erlang:is_float/1.)
case erl_internal:new_type_test(F, Ar) orelse
erl_internal:comp_op(F, Ar) of
true -> {#k_test{anno=A,op=Op,args=Kargs},St};
false -> gexpr_test_add(Ke, St) %Add equality test
end;
gexpr_test(#k_try{arg=B0,vars=[#k_var{name=X}],body=#k_var{name=X},
handler=#k_atom{val=false}}=Try, St0) ->
{B,St} = gexpr_test(B0, St0),
%%ok = io:fwrite("~w: ~p~n", [?LINE,{B0,B}]),
{Try#k_try{arg=B},St};
gexpr_test(#iset{body=B0}=Iset, St0) ->
{B1,St1} = gexpr_test(B0, St0),
{Iset#iset{body=B1},St1};
gexpr_test(Ke, St) -> gexpr_test_add(Ke, St). %Add equality test
gexpr_test_add(Ke, St0) ->
Test = #k_remote{mod=#k_atom{val='erlang'},
name=#k_atom{val='=:='},
arity=2},
{Ae,Ap,St1} = force_atomic(Ke, St0),
{pre_seq(Ap, #k_test{anno=get_kanno(Ke),
op=Test,args=[Ae,#k_atom{val='true'}]}),St1}.
%% expr(Cexpr, Sub, State) -> {Kexpr,[PreKexpr],State}.
%% Convert a Core expression, flattening it at the same time.
expr(#c_var{anno=A,name={_Name,Arity}}=Fname, Sub, St) ->
%% A local in an expression.
%% For now, these are wrapped into a fun by reverse
%% eta-conversion, but really, there should be exactly one
%% such "lambda function" for each escaping local name,
%% instead of one for each occurrence as done now.
Vs = [#c_var{name=list_to_atom("V" ++ integer_to_list(V))} ||
V <- integers(1, Arity)],
Fun = #c_fun{anno=A,vars=Vs,body=#c_apply{anno=A,op=Fname,args=Vs}},
expr(Fun, Sub, St);
expr(#c_var{anno=A,name=V}, Sub, St) ->
{#k_var{anno=A,name=get_vsub(V, Sub)},[],St};
expr(#c_literal{anno=A,val=V}, _Sub, St) ->
Klit = case V of
[] ->
#k_nil{anno=A};
V when is_integer(V) ->
#k_int{anno=A,val=V};
V when is_float(V) ->
#k_float{anno=A,val=V};
V when is_atom(V) ->
#k_atom{anno=A,val=V};
_ ->
#k_literal{anno=A,val=V}
end,
{Klit,[],St};
expr(#c_cons{anno=A,hd=Ch,tl=Ct}, Sub, St0) ->
%% Do cons in two steps, first the expressions left to right, then
%% any remaining literals right to left.
{Kh0,Hp0,St1} = expr(Ch, Sub, St0),
{Kt0,Tp0,St2} = expr(Ct, Sub, St1),
{Kt1,Tp1,St3} = force_atomic(Kt0, St2),
{Kh1,Hp1,St4} = force_atomic(Kh0, St3),
{#k_cons{anno=A,hd=Kh1,tl=Kt1},Hp0 ++ Tp0 ++ Tp1 ++ Hp1,St4};
expr(#c_tuple{anno=A,es=Ces}, Sub, St0) ->
{Kes,Ep,St1} = atomic_list(Ces, Sub, St0),
{#k_tuple{anno=A,es=Kes},Ep,St1};
expr(#c_map{anno=A,arg=Var,es=Ces}, Sub, St0) ->
expr_map(A, Var, Ces, Sub, St0);
expr(#c_binary{anno=A,segments=Cv}, Sub, St0) ->
try atomic_bin(Cv, Sub, St0) of
{Kv,Ep,St1} ->
{#k_binary{anno=A,segs=Kv},Ep,St1}
catch
throw:bad_element_size ->
St1 = add_warning(get_line(A), bad_segment_size, A, St0),
Erl = #c_literal{val=erlang},
Name = #c_literal{val=error},
Args = [#c_literal{val=badarg}],
Error = #c_call{anno=A,module=Erl,name=Name,args=Args},
expr(Error, Sub, St1)
end;
expr(#c_fun{anno=A,vars=Cvs,body=Cb}, Sub0, #kern{ff=OldFF,func=Func}=St0) ->
FA = case OldFF of
undefined ->
Func;
_ ->
case lists:keyfind(id, 1, A) of
{id,{_,_,Name}} -> Name;
_ ->
case lists:keyfind(letrec_name, 1, A) of
{letrec_name,Name} -> Name;
_ -> unknown_fun
end
end
end,
{Kvs,Sub1,St1} = pattern_list(Cvs, Sub0, St0#kern{ff=FA}),
%%ok = io:fwrite("~w: ~p~n", [?LINE,{{Cvs,Sub0,St0},{Kvs,Sub1,St1}}]),
{Kb,Pb,St2} = body(Cb, Sub1, St1#kern{ff=FA}),
{#ifun{anno=A,vars=Kvs,body=pre_seq(Pb, Kb)},[],St2#kern{ff=OldFF}};
expr(#c_seq{arg=Ca,body=Cb}, Sub, St0) ->
{Ka,Pa,St1} = body(Ca, Sub, St0),
{Kb,Pb,St2} = body(Cb, Sub, St1),
{Kb,Pa ++ [Ka] ++ Pb,St2};
expr(#c_let{anno=A,vars=Cvs,arg=Ca,body=Cb}, Sub0, St0) ->
%%ok = io:fwrite("~w: ~p~n", [?LINE,{Cvs,Sub0,St0}]),
{Ka,Pa,St1} = body(Ca, Sub0, St0),
{Kps,Sub1,St2} = pattern_list(Cvs, Sub0, St1),
%%ok = io:fwrite("~w: ~p~n", [?LINE,{Kps,Sub1,St1,St2}]),
%% Break known multiple values into separate sets.
Sets = case Ka of
#ivalues{args=Kas} ->
foldr2(fun (V, Val, Sb) ->
[#iset{vars=[V],arg=Val}|Sb] end,
[], Kps, Kas);
_Other ->
[#iset{anno=A,vars=Kps,arg=Ka}]
end,
{Kb,Pb,St3} = body(Cb, Sub1, St2),
{Kb,Pa ++ Sets ++ Pb,St3};
expr(#c_letrec{anno=A,defs=Cfs,body=Cb}, Sub0, St0) ->
%% Make new function names and store substitution.
{Fs0,{Sub1,St1}} =
mapfoldl(fun ({#c_var{name={F,Ar}},B0}, {Sub,S0}) ->
{N,St1} = new_fun_name(atom_to_list(F)
++ "/" ++
integer_to_list(Ar),
S0),
B = set_kanno(B0, [{letrec_name,N}]),
{{N,B},{set_fsub(F, Ar, N, Sub),St1}}
end, {Sub0,St0}, Cfs),
%% Run translation on functions and body.
{Fs1,St2} = mapfoldl(fun ({N,Fd0}, S1) ->
{Fd1,[],St2} = expr(Fd0, Sub1, S1#kern{ff=N}),
Fd = set_kanno(Fd1, A),
{{N,Fd},St2}
end, St1, Fs0),
{Kb,Pb,St3} = body(Cb, Sub1, St2#kern{ff=St1#kern.ff}),
{Kb,[#iletrec{anno=A,defs=Fs1}|Pb],St3};
expr(#c_case{arg=Ca,clauses=Ccs}, Sub, St0) ->
{Ka,Pa,St1} = body(Ca, Sub, St0), %This is a body!
{Kvs,Pv,St2} = match_vars(Ka, St1), %Must have variables here!
{Km,St3} = kmatch(Kvs, Ccs, Sub, St2),
Match = flatten_seq(build_match(Kvs, Km)),
{last(Match),Pa ++ Pv ++ droplast(Match),St3};
expr(#c_receive{anno=A,clauses=Ccs0,timeout=Ce,action=Ca}, Sub, St0) ->
{Ke,Pe,St1} = atomic(Ce, Sub, St0), %Force this to be atomic!
{Rvar,St2} = new_var(St1),
%% Need to massage accept clauses and add reject clause before matching.
Ccs1 = map(fun (#c_clause{anno=Banno,body=B0}=C) ->
B1 = #c_seq{arg=#ireceive_accept{anno=A},body=B0},
C#c_clause{anno=Banno,body=B1}
end, Ccs0),
{Mpat,St3} = new_var_name(St2),
Rc = #c_clause{anno=[compiler_generated|A],
pats=[#c_var{name=Mpat}],guard=#c_literal{anno=A,val=true},
body=#ireceive_next{anno=A}},
{Km,St4} = kmatch([Rvar], Ccs1 ++ [Rc], Sub, add_var_def(Rvar, St3)),
{Ka,Pa,St5} = body(Ca, Sub, St4),
{#k_receive{anno=A,var=Rvar,body=Km,timeout=Ke,action=pre_seq(Pa, Ka)},
Pe,St5};
expr(#c_apply{anno=A,op=Cop,args=Cargs}, Sub, St) ->
c_apply(A, Cop, Cargs, Sub, St);
expr(#c_call{anno=A,module=#c_literal{val=erlang},name=#c_literal{val=is_record},
args=[_,Tag,Sz]=Args0}, Sub, St0) ->
{Args,Ap,St} = atomic_list(Args0, Sub, St0),
Remote = #k_remote{mod=#k_atom{val=erlang},name=#k_atom{val=is_record},arity=3},
case {Tag,Sz} of
{#c_literal{val=Atom},#c_literal{val=Int}}
when is_atom(Atom), is_integer(Int) ->
%% Tag and size are literals. Make it a BIF, which will actually
%% be expanded out in a later pass.
{#k_bif{anno=A,op=Remote,args=Args},Ap,St};
{_,_} ->
%% (Only in bodies.) Make it into an actual call to the BIF.
{#k_call{anno=A,op=Remote,args=Args},Ap,St}
end;
expr(#c_call{anno=A,module=M0,name=F0,args=Cargs}, Sub, St0) ->
Ar = length(Cargs),
{Type,St1} = case call_type(M0, F0, Ar) of
error ->
%% Invalid call (e.g. M:42/3). Issue a warning,
%% and let the generated code use the old explict apply.
{old_apply,add_warning(get_line(A), bad_call, A, St0)};
Type0 ->
{Type0,St0}
end,
case Type of
old_apply ->
Call = #c_call{anno=A,
module=#c_literal{val=erlang},
name=#c_literal{val=apply},
args=[M0,F0,cerl:make_list(Cargs)]},
expr(Call, Sub, St1);
_ ->
{[M1,F1|Kargs],Ap,St} = atomic_list([M0,F0|Cargs], Sub, St1),
Call = case Type of
bif ->
#k_bif{anno=A,op=#k_remote{mod=M1,name=F1,arity=Ar},
args=Kargs};
call ->
#k_call{anno=A,op=#k_remote{mod=M1,name=F1,arity=Ar},
args=Kargs};
apply ->
#k_call{anno=A,op=#k_remote{mod=M1,name=F1,arity=Ar},
args=Kargs}
end,
{Call,Ap,St}
end;
expr(#c_primop{anno=A,name=#c_literal{val=match_fail},args=Cargs0}, Sub, St0) ->
Cargs = translate_match_fail(Cargs0, Sub, A, St0),
{Kargs,Ap,St} = atomic_list(Cargs, Sub, St0),
Ar = length(Cargs),
Call = #k_call{anno=A,op=#k_remote{mod=#k_atom{val=erlang},
name=#k_atom{val=error},
arity=Ar},args=Kargs},
{Call,Ap,St};
expr(#c_primop{anno=A,name=#c_literal{val=N},args=Cargs}, Sub, St0) ->
{Kargs,Ap,St1} = atomic_list(Cargs, Sub, St0),
Ar = length(Cargs),
{#k_bif{anno=A,op=#k_internal{name=N,arity=Ar},args=Kargs},Ap,St1};
expr(#c_try{anno=A,arg=Ca,vars=Cvs,body=Cb,evars=Evs,handler=Ch}, Sub0, St0) ->
%% The normal try expression. The body and exception handler
%% variables behave as let variables.
{Ka,Pa,St1} = body(Ca, Sub0, St0),
{Kcvs,Sub1,St2} = pattern_list(Cvs, Sub0, St1),
{Kb,Pb,St3} = body(Cb, Sub1, St2),
{Kevs,Sub2,St4} = pattern_list(Evs, Sub0, St3),
{Kh,Ph,St5} = body(Ch, Sub2, St4),
{#k_try{anno=A,arg=pre_seq(Pa, Ka),
vars=Kcvs,body=pre_seq(Pb, Kb),
evars=Kevs,handler=pre_seq(Ph, Kh)},[],St5};
expr(#c_catch{anno=A,body=Cb}, Sub, St0) ->
{Kb,Pb,St1} = body(Cb, Sub, St0),
{#k_catch{anno=A,body=pre_seq(Pb, Kb)},[],St1};
%% Handle internal expressions.
expr(#ireceive_accept{anno=A}, _Sub, St) -> {#k_receive_accept{anno=A},[],St}.
%% Translate a function_clause exception to a case_clause exception if
%% it has been moved into another function. (A function_clause exception
%% will not work correctly if it is moved into another function, or
%% even if it is invoked not from the top level in the correct function.)
translate_match_fail(Args, Sub, Anno, St) ->
case Args of
[#c_tuple{es=[#c_literal{val=function_clause}|As]}] ->
translate_match_fail_1(Anno, As, Sub, St);
[#c_literal{val=Tuple}] when is_tuple(Tuple) ->
%% The inliner may have created a literal out of
%% the original #c_tuple{}.
case tuple_to_list(Tuple) of
[function_clause|As0] ->
As = [#c_literal{val=E} || E <- As0],
translate_match_fail_1(Anno, As, Sub, St);
_ ->
Args
end;
_ ->
%% Not a function_clause exception.
Args
end.
translate_match_fail_1(Anno, As, Sub, #kern{ff=FF}) ->
AnnoFunc = case keyfind(function_name, 1, Anno) of
false ->
none; %Force rewrite.
{function_name,{Name,Arity}} ->
{get_fsub(Name, Arity, Sub),Arity}
end,
case {AnnoFunc,FF} of
{Same,Same} ->
%% Still in the correct function.
translate_fc(As);
{{F,_},F} ->
%% Still in the correct function.
translate_fc(As);
_ ->
%% Wrong function or no function_name annotation.
%%
%% The inliner has copied the match_fail(function_clause)
%% primop from another function (or from another instance of
%% the current function). match_fail(function_clause) will
%% only work at the top level of the function it was originally
%% defined in, so we will need to rewrite it to a case_clause.
[c_tuple([#c_literal{val=case_clause},c_tuple(As)])]
end.
translate_fc(Args) ->
[#c_literal{val=function_clause},cerl:make_list(Args)].
expr_map(A,Var0,Ces,Sub,St0) ->
{Var,Mps,St1} = expr(Var0, Sub, St0),
{Km,Eps,St2} = map_split_pairs(A, Var, Ces, Sub, St1),
{Km,Eps++Mps,St2}.
map_split_pairs(A, Var, Ces, Sub, St0) ->
%% 1. Force variables.
%% 2. Group adjacent pairs with literal keys.
%% 3. Within each such group, remove multiple assignments to the same key.
%% 4. Partition each group according to operator ('=>' and ':=').
Pairs0 = [{Op,K,V} ||
#c_map_pair{op=#c_literal{val=Op},key=K,val=V} <- Ces],
{Pairs,Esp,St1} = foldr(fun
({Op,K0,V0}, {Ops,Espi,Sti0}) when Op =:= assoc; Op =:= exact ->
{K,Eps1,Sti1} = atomic(K0, Sub, Sti0),
{V,Eps2,Sti2} = atomic(V0, Sub, Sti1),
{[{Op,K,V}|Ops],Eps1 ++ Eps2 ++ Espi,Sti2}
end, {[],[],St0}, Pairs0),
map_split_pairs_1(A, Var, Pairs, Esp, St1).
map_split_pairs_1(A, Map0, [{Op,Key,Val}|Pairs1]=Pairs0, Esp0, St0) ->
{Map1,Em,St1} = force_atomic(Map0, St0),
case Key of
#k_var{} ->
%% Don't combine variable keys with other keys.
Kes = [#k_map_pair{key=Key,val=Val}],
Map = #k_map{anno=A,op=Op,var=Map1,es=Kes},
map_split_pairs_1(A, Map, Pairs1, Esp0 ++ Em, St1);
_ ->
%% Literal key. Split off all literal keys.
{L,Pairs} = splitwith(fun({_,#k_var{},_}) -> false;
({_,_,_}) -> true
end, Pairs0),
{Map,Esp,St2} = map_group_pairs(A, Map1, L, Esp0 ++ Em, St1),
map_split_pairs_1(A, Map, Pairs, Esp, St2)
end;
map_split_pairs_1(_, Map, [], Esp, St0) ->
{Map,Esp,St0}.
map_group_pairs(A, Var, Pairs0, Esp, St0) ->
Pairs = map_remove_dup_keys(Pairs0),
Assoc = [#k_map_pair{key=K,val=V} || {_,{assoc,K,V}} <- Pairs],
Exact = [#k_map_pair{key=K,val=V} || {_,{exact,K,V}} <- Pairs],
case {Assoc,Exact} of
{[_|_],[]} ->
{#k_map{anno=A,op=assoc,var=Var,es=Assoc},Esp,St0};
{[],[_|_]} ->
{#k_map{anno=A,op=exact,var=Var,es=Exact},Esp,St0};
{[_|_],[_|_]} ->
Map = #k_map{anno=A,op=assoc,var=Var,es=Assoc},
{Mvar,Em,St1} = force_atomic(Map, St0),
{#k_map{anno=A,op=exact,var=Mvar,es=Exact},Esp ++ Em,St1}
end.
map_remove_dup_keys(Es) ->
dict:to_list(map_remove_dup_keys(Es, dict:new())).
map_remove_dup_keys([{assoc,K0,V}|Es0],Used0) ->
K = map_key_clean(K0),
Op = case dict:find(K, Used0) of
{ok,{exact,_,_}} -> exact;
_ -> assoc
end,
Used1 = dict:store(K, {Op,K0,V}, Used0),
map_remove_dup_keys(Es0, Used1);
map_remove_dup_keys([{exact,K0,V}|Es0],Used0) ->
K = map_key_clean(K0),
Op = case dict:find(K, Used0) of
{ok,{assoc,_,_}} -> assoc;
_ -> exact
end,
Used1 = dict:store(K, {Op,K0,V}, Used0),
map_remove_dup_keys(Es0, Used1);
map_remove_dup_keys([], Used) -> Used.
%% Be explicit instead of using set_kanno(K, []).
map_key_clean(#k_var{name=V}) -> {var,V};
map_key_clean(#k_literal{val=V}) -> {lit,V};
map_key_clean(#k_int{val=V}) -> {lit,V};
map_key_clean(#k_float{val=V}) -> {lit,V};
map_key_clean(#k_atom{val=V}) -> {lit,V};
map_key_clean(#k_nil{}) -> {lit,[]}.
%% call_type(Module, Function, Arity) -> call | bif | apply | error.
%% Classify the call.
call_type(#c_literal{val=M}, #c_literal{val=F}, Ar) when is_atom(M), is_atom(F) ->
case is_remote_bif(M, F, Ar) of
false -> call;
true -> bif
end;
call_type(#c_var{}, #c_literal{val=A}, _) when is_atom(A) -> apply;
call_type(#c_literal{val=A}, #c_var{}, _) when is_atom(A) -> apply;
call_type(#c_var{}, #c_var{}, _) -> apply;
call_type(_, _, _) -> error.
%% match_vars(Kexpr, State) -> {[Kvar],[PreKexpr],State}.
%% Force return from body into a list of variables.
match_vars(#ivalues{args=As}, St) ->
foldr(fun (Ka, {Vs,Vsp,St0}) ->
{V,Vp,St1} = force_variable(Ka, St0),
{[V|Vs],Vp ++ Vsp,St1}
end, {[],[],St}, As);
match_vars(Ka, St0) ->
{V,Vp,St1} = force_variable(Ka, St0),
{[V],Vp,St1}.
%% c_apply(A, Op, [Carg], Sub, State) -> {Kexpr,[PreKexpr],State}.
%% Transform application, detect which are guaranteed to be bifs.
c_apply(A, #c_var{anno=Ra,name={F0,Ar}}, Cargs, Sub, St0) ->
{Kargs,Ap,St1} = atomic_list(Cargs, Sub, St0),
F1 = get_fsub(F0, Ar, Sub), %Has it been rewritten
{#k_call{anno=A,op=#k_local{anno=Ra,name=F1,arity=Ar},args=Kargs},
Ap,St1};
c_apply(A, Cop, Cargs, Sub, St0) ->
{Kop,Op,St1} = variable(Cop, Sub, St0),
{Kargs,Ap,St2} = atomic_list(Cargs, Sub, St1),
{#k_call{anno=A,op=Kop,args=Kargs},Op ++ Ap,St2}.
flatten_seq(#iset{anno=A,vars=Vs,arg=Arg,body=B}) ->
[#iset{anno=A,vars=Vs,arg=Arg}|flatten_seq(B)];
flatten_seq(Ke) -> [Ke].
pre_seq([#iset{anno=A,vars=Vs,arg=Arg,body=B}|Ps], K) ->
B = undefined, %Assertion.
#iset{anno=A,vars=Vs,arg=Arg,body=pre_seq(Ps, K)};
pre_seq([P|Ps], K) ->
#iset{vars=[],arg=P,body=pre_seq(Ps, K)};
pre_seq([], K) -> K.
%% atomic(Cexpr, Sub, State) -> {Katomic,[PreKexpr],State}.
%% Convert a Core expression making sure the result is an atomic
%% literal.
atomic(Ce, Sub, St0) ->
{Ke,Kp,St1} = expr(Ce, Sub, St0),
{Ka,Ap,St2} = force_atomic(Ke, St1),
{Ka,Kp ++ Ap,St2}.
force_atomic(Ke, St0) ->
case is_atomic(Ke) of
true -> {Ke,[],St0};
false ->
{V,St1} = new_var(St0),
{V,[#iset{vars=[V],arg=Ke}],St1}
end.
% force_atomic_list(Kes, St) ->
% foldr(fun (Ka, {As,Asp,St0}) ->
% {A,Ap,St1} = force_atomic(Ka, St0),
% {[A|As],Ap ++ Asp,St1}
% end, {[],[],St}, Kes).
atomic_bin([#c_bitstr{anno=A,val=E0,size=S0,unit=U0,type=T,flags=Fs0}|Es0],
Sub, St0) ->
{E,Ap1,St1} = atomic(E0, Sub, St0),
{S1,Ap2,St2} = atomic(S0, Sub, St1),
validate_bin_element_size(S1),
U1 = cerl:concrete(U0),
Fs1 = cerl:concrete(Fs0),
{Es,Ap3,St3} = atomic_bin(Es0, Sub, St2),
{#k_bin_seg{anno=A,size=S1,
unit=U1,
type=cerl:concrete(T),
flags=Fs1,
seg=E,next=Es},
Ap1++Ap2++Ap3,St3};
atomic_bin([], _Sub, St) -> {#k_bin_end{},[],St}.
validate_bin_element_size(#k_var{}) -> ok;
validate_bin_element_size(#k_int{val=V}) when V >= 0 -> ok;
validate_bin_element_size(#k_atom{val=all}) -> ok;
validate_bin_element_size(#k_atom{val=undefined}) -> ok;
validate_bin_element_size(_) -> throw(bad_element_size).
%% atomic_list([Cexpr], Sub, State) -> {[Kexpr],[PreKexpr],State}.
atomic_list(Ces, Sub, St) ->
foldr(fun (Ce, {Kes,Esp,St0}) ->
{Ke,Ep,St1} = atomic(Ce, Sub, St0),
{[Ke|Kes],Ep ++ Esp,St1}
end, {[],[],St}, Ces).
%% is_atomic(Kexpr) -> boolean().
%% Is a Kexpr atomic? Strings are NOT considered atomic!
is_atomic(#k_literal{}) -> true;
is_atomic(#k_int{}) -> true;
is_atomic(#k_float{}) -> true;
is_atomic(#k_atom{}) -> true;
%%is_atomic(#k_char{}) -> true; %No characters
is_atomic(#k_nil{}) -> true;
is_atomic(#k_var{}) -> true;
is_atomic(_) -> false.
%% variable(Cexpr, Sub, State) -> {Kvar,[PreKexpr],State}.
%% Convert a Core expression making sure the result is a variable.
variable(Ce, Sub, St0) ->
{Ke,Kp,St1} = expr(Ce, Sub, St0),
{Kv,Vp,St2} = force_variable(Ke, St1),
{Kv,Kp ++ Vp,St2}.
force_variable(#k_var{}=Ke, St) -> {Ke,[],St};
force_variable(Ke, St0) ->
{V,St1} = new_var(St0),
{V,[#iset{vars=[V],arg=Ke}],St1}.
%% pattern(Cpat, Isub, Osub, State) -> {Kpat,Sub,State}.
%% Convert patterns. Variables shadow so rename variables that are
%% already defined.
%%
%% Patterns are complicated by sizes in binaries. These are pure
%% input variables which create no bindings. We, therefore, need to
%% carry around the original substitutions to get the correct
%% handling.
pattern(#c_var{anno=A,name=V}, _Isub, Osub, St0) ->
case cerl_sets:is_element(V, St0#kern.ds) of
true ->
{New,St1} = new_var_name(St0),
{#k_var{anno=A,name=New},
set_vsub(V, New, Osub),
St1#kern{ds=cerl_sets:add_element(New, St1#kern.ds)}};
false ->
{#k_var{anno=A,name=V},Osub,
St0#kern{ds=cerl_sets:add_element(V, St0#kern.ds)}}
end;
pattern(#c_literal{anno=A,val=Val}, _Isub, Osub, St) ->
{#k_literal{anno=A,val=Val},Osub,St};
pattern(#c_cons{anno=A,hd=Ch,tl=Ct}, Isub, Osub0, St0) ->
{Kh,Osub1,St1} = pattern(Ch, Isub, Osub0, St0),
{Kt,Osub2,St2} = pattern(Ct, Isub, Osub1, St1),
{#k_cons{anno=A,hd=Kh,tl=Kt},Osub2,St2};
pattern(#c_tuple{anno=A,es=Ces}, Isub, Osub0, St0) ->
{Kes,Osub1,St1} = pattern_list(Ces, Isub, Osub0, St0),
{#k_tuple{anno=A,es=Kes},Osub1,St1};
pattern(#c_map{anno=A,es=Ces}, Isub, Osub0, St0) ->
{Kes,Osub1,St1} = pattern_map_pairs(Ces, Isub, Osub0, St0),
{#k_map{anno=A,op=exact,es=Kes},Osub1,St1};
pattern(#c_binary{anno=A,segments=Cv}, Isub, Osub0, St0) ->
{Kv,Osub1,St1} = pattern_bin(Cv, Isub, Osub0, St0),
{#k_binary{anno=A,segs=Kv},Osub1,St1};
pattern(#c_alias{anno=A,var=Cv,pat=Cp}, Isub, Osub0, St0) ->
{Cvs,Cpat} = flatten_alias(Cp),
{Kvs,Osub1,St1} = pattern_list([Cv|Cvs], Isub, Osub0, St0),
{Kpat,Osub2,St2} = pattern(Cpat, Isub, Osub1, St1),
{#ialias{anno=A,vars=Kvs,pat=Kpat},Osub2,St2}.
flatten_alias(#c_alias{var=V,pat=P}) ->
{Vs,Pat} = flatten_alias(P),
{[V|Vs],Pat};
flatten_alias(Pat) -> {[],Pat}.
pattern_map_pairs(Ces0, Isub, Osub0, St0) ->
%% pattern the pair keys and values as normal
{Kes,{Osub1,St1}} = lists:mapfoldl(fun
(#c_map_pair{anno=A,key=Ck,val=Cv},{Osubi0,Sti0}) ->
{Kk,[],Sti1} = expr(Ck, Isub, Sti0),
{Kv,Osubi2,Sti2} = pattern(Cv, Isub, Osubi0, Sti1),
{#k_map_pair{anno=A,key=Kk,val=Kv},{Osubi2,Sti2}}
end, {Osub0, St0}, Ces0),
%% It is later assumed that these keys are term sorted
%% so we need to sort them here
Kes1 = lists:sort(fun
(#k_map_pair{key=KkA},#k_map_pair{key=KkB}) ->
A = map_key_clean(KkA),
B = map_key_clean(KkB),
erts_internal:cmp_term(A,B) < 0
end, Kes),
{Kes1,Osub1,St1}.
pattern_bin(Es, Isub, Osub0, St0) ->
{Kbin,{_,Osub},St} = pattern_bin_1(Es, Isub, Osub0, St0),
{Kbin,Osub,St}.
pattern_bin_1([#c_bitstr{anno=A,val=E0,size=S0,unit=U,type=T,flags=Fs}|Es0],
Isub0, Osub0, St0) ->
{S1,[],St1} = expr(S0, Isub0, St0),
S = case S1 of
#k_int{} -> S1;
#k_var{} -> S1;
#k_atom{} -> S1;
_ ->
%% Bad size (coming from an optimization or Core Erlang
%% source code) - replace it with a known atom because
%% a literal or bit syntax construction can cause further
%% problems.
#k_atom{val=bad_size}
end,
U0 = cerl:concrete(U),
Fs0 = cerl:concrete(Fs),
%%ok= io:fwrite("~w: ~p~n", [?LINE,{B0,S,U0,Fs0}]),
{E,Osub1,St2} = pattern(E0, Isub0, Osub0, St1),
Isub1 = case E0 of
#c_var{name=V} ->
set_vsub(V, E#k_var.name, Isub0);
_ -> Isub0
end,
{Es,{Isub,Osub},St3} = pattern_bin_1(Es0, Isub1, Osub1, St2),
{#k_bin_seg{anno=A,size=S,
unit=U0,
type=cerl:concrete(T),
flags=Fs0,
seg=E,next=Es},
{Isub,Osub},St3};
pattern_bin_1([], Isub, Osub, St) -> {#k_bin_end{},{Isub,Osub},St}.
%% pattern_list([Cexpr], Sub, State) -> {[Kexpr],Sub,State}.
pattern_list(Ces, Sub, St) ->
pattern_list(Ces, Sub, Sub, St).
pattern_list(Ces, Isub, Osub, St) ->
foldr(fun (Ce, {Kes,Osub0,St0}) ->
{Ke,Osub1,St1} = pattern(Ce, Isub, Osub0, St0),
{[Ke|Kes],Osub1,St1}
end, {[],Osub,St}, Ces).
%% new_sub() -> Subs.
%% set_vsub(Name, Sub, Subs) -> Subs.
%% subst_vsub(Name, Sub, Subs) -> Subs.
%% get_vsub(Name, Subs) -> SubName.
%% Add/get substitute Sub for Name to VarSub. Use orddict so we know
%% the format is a list {Name,Sub} pairs. When adding a new
%% substitute we fold substitute chains so we never have to search
%% more than once.
new_sub() -> orddict:new().
get_vsub(V, Vsub) ->
case orddict:find(V, Vsub) of
{ok,Val} -> Val;
error -> V
end.
set_vsub(V, S, Vsub) ->
orddict:store(V, S, Vsub).
subst_vsub(Key, New, Vsub) ->
orddict:from_list(subst_vsub_1(Key, New, Vsub)).
subst_vsub_1(Key, New, [{K,Key}|Dict]) ->
%% Fold chained substitution.
[{K,New}|subst_vsub_1(Key, New, Dict)];
subst_vsub_1(Key, New, [{K,_}|_]=Dict) when Key < K ->
%% Insert the new substitution here, and continue
%% look for chained substitutions.
[{Key,New}|subst_vsub_2(Key, New, Dict)];
subst_vsub_1(Key, New, [{K,_}=E|Dict]) when Key > K ->
[E|subst_vsub_1(Key, New, Dict)];
subst_vsub_1(Key, New, []) -> [{Key,New}].
subst_vsub_2(V, S, [{K,V}|Dict]) ->
%% Fold chained substitution.
[{K,S}|subst_vsub_2(V, S, Dict)];
subst_vsub_2(V, S, [E|Dict]) ->
[E|subst_vsub_2(V, S, Dict)];
subst_vsub_2(_, _, []) -> [].
get_fsub(F, A, Fsub) ->
case orddict:find({F,A}, Fsub) of
{ok,Val} -> Val;
error -> F
end.
set_fsub(F, A, S, Fsub) ->
orddict:store({F,A}, S, Fsub).
new_fun_name(St) ->
new_fun_name("anonymous", St).
%% new_fun_name(Type, State) -> {FunName,State}.
new_fun_name(Type, #kern{func={F,Arity},fcount=C}=St) ->
Name = "-" ++ atom_to_list(F) ++ "/" ++ integer_to_list(Arity) ++
"-" ++ Type ++ "-" ++ integer_to_list(C) ++ "-",
{list_to_atom(Name),St#kern{fcount=C+1}}.
%% new_var_name(State) -> {VarName,State}.
new_var_name(#kern{vcount=C}=St) ->
{C,St#kern{vcount=C+1}}.
%% new_var(State) -> {#k_var{},State}.
new_var(St0) ->
{New,St1} = new_var_name(St0),
{#k_var{name=New},St1}.
%% new_vars(Count, State) -> {[#k_var{}],State}.
%% Make Count new variables.
new_vars(N, St) -> new_vars(N, St, []).
new_vars(N, St0, Vs) when N > 0 ->
{V,St1} = new_var(St0),
new_vars(N-1, St1, [V|Vs]);
new_vars(0, St, Vs) -> {Vs,St}.
make_vars(Vs) -> [ #k_var{name=V} || V <- Vs ].
add_var_def(V, St) ->
St#kern{ds=cerl_sets:add_element(V#k_var.name, St#kern.ds)}.
%%add_vars_def(Vs, St) ->
%% Ds = foldl(fun (#k_var{name=V}, Ds) -> add_element(V, Ds) end,
%% St#kern.ds, Vs),
%% St#kern{ds=Ds}.
%% is_remote_bif(Mod, Name, Arity) -> true | false.
%% Test if function is really a BIF.
is_remote_bif(erlang, get, 1) -> true;
is_remote_bif(erlang, N, A) ->
case erl_internal:guard_bif(N, A) of
true -> true;
false ->
try erl_internal:op_type(N, A) of
arith -> true;
bool -> true;
comp -> true;
list -> false;
send -> false
catch
_:_ -> false % not an op
end
end;
is_remote_bif(_, _, _) -> false.
%% bif_vals(Name, Arity) -> integer().
%% bif_vals(Mod, Name, Arity) -> integer().
%% Determine how many return values a BIF has. Provision for BIFs to
%% return multiple values. Only used in bodies where a BIF may be
%% called for effect only.
bif_vals(dsetelement, 3) -> 0;
bif_vals(bs_context_to_binary, 1) -> 0;
bif_vals(_, _) -> 1.
bif_vals(_, _, _) -> 1.
%% foldr2(Fun, Acc, List1, List2) -> Acc.
%% Fold over two lists.
foldr2(Fun, Acc0, [E1|L1], [E2|L2]) ->
Acc1 = Fun(E1, E2, Acc0),
foldr2(Fun, Acc1, L1, L2);
foldr2(_, Acc, [], []) -> Acc.
%% This code implements the algorithm for an optimizing compiler for
%% pattern matching given "The Implementation of Functional
%% Programming Languages" by <NAME>. The code is much
%% longer as the meaning of constructors is different from the book.
%%
%% In Erlang many constructors can have different values, e.g. 'atom'
%% or 'integer', whereas in the original algorithm thse would be
%% different constructors. Our view makes it easier in later passes to
%% handle indexing over each type.
%%
%% Patterns are complicated by having alias variables. The form of a
%% pattern is Pat | {alias,Pat,[AliasVar]}. This is hidden by access
%% functions to pattern arguments but the code must be aware of it.
%%
%% The compilation proceeds in two steps:
%%
%% 1. The patterns in the clauses to converted to lists of kernel
%% patterns. The Core clause is now hybrid, this is easier to work
%% with. Remove clauses with trivially false guards, this simplifies
%% later passes. Add locally defined vars and variable subs to each
%% clause for later use.
%%
%% 2. The pattern matching is optimised. Variable substitutions are
%% added to the VarSub structure and new variables are made visible.
%% The guard and body are then converted to Kernel form.
%% kmatch([Var], [Clause], Sub, State) -> {Kexpr,State}.
kmatch(Us, Ccs, Sub, St0) ->
{Cs,St1} = match_pre(Ccs, Sub, St0), %Convert clauses
Def = fail,
%% Def = #k_call{anno=[compiler_generated],
%% op=#k_remote{mod=#k_atom{val=erlang},
%% name=#k_atom{val=exit},
%% arity=1},
%% args=[#k_atom{val=kernel_match_error}]},
match(Us, Cs, Def, St1). %Do the match.
%% match_pre([Cclause], Sub, State) -> {[Clause],State}.
%% Must be careful not to generate new substitutions here now!
%% Remove clauses with trivially false guards which will never
%% succeed.
match_pre(Cs, Sub0, St) ->
foldr(fun (#c_clause{anno=A,pats=Ps,guard=G,body=B}, {Cs0,St0}) ->
{Kps,Osub1,St1} = pattern_list(Ps, Sub0, St0),
{[#iclause{anno=A,isub=Sub0,osub=Osub1,
pats=Kps,guard=G,body=B}|
Cs0],St1}
end, {[],St}, Cs).
%% match([Var], [Clause], Default, State) -> {MatchExpr,State}.
match([_U|_Us] = L, Cs, Def, St0) ->
%%ok = io:format("match ~p~n", [Cs]),
Pcss = partition(Cs),
foldr(fun (Pcs, {D,St}) -> match_varcon(L, Pcs, D, St) end,
{Def,St0}, Pcss);
match([], Cs, Def, St) ->
match_guard(Cs, Def, St).
%% match_guard([Clause], Default, State) -> {IfExpr,State}.
%% Build a guard to handle guards. A guard *ALWAYS* fails if no
%% clause matches, there will be a surrounding 'alt' to catch the
%% failure. Drop redundant cases, i.e. those after a true guard.
match_guard(Cs0, Def0, St0) ->
{Cs1,Def1,St1} = match_guard_1(Cs0, Def0, St0),
{build_alt(build_guard(Cs1), Def1),St1}.
match_guard_1([#iclause{anno=A,osub=Osub,guard=G,body=B}|Cs0], Def0, St0) ->
case is_true_guard(G) of
true ->
%% The true clause body becomes the default.
{Kb,Pb,St1} = body(B, Osub, St0),
St2 = maybe_add_warning(Cs0, A, St1),
St = maybe_add_warning(Def0, A, St2),
{[],pre_seq(Pb, Kb),St};
false ->
{Kg,St1} = guard(G, Osub, St0),
{Kb,Pb,St2} = body(B, Osub, St1),
{Cs1,Def1,St3} = match_guard_1(Cs0, Def0, St2),
{[#k_guard_clause{guard=Kg,body=pre_seq(Pb, Kb)}|Cs1],
Def1,St3}
end;
match_guard_1([], Def, St) -> {[],Def,St}.
maybe_add_warning([C|_], MatchAnno, St) ->
maybe_add_warning(C, MatchAnno, St);
maybe_add_warning([], _MatchAnno, St) -> St;
maybe_add_warning(fail, _MatchAnno, St) -> St;
maybe_add_warning(Ke, MatchAnno, St) ->
case is_compiler_generated(Ke) of
true ->
St;
false ->
Anno = get_kanno(Ke),
Line = get_line(Anno),
MatchLine = get_line(MatchAnno),
Warn = case MatchLine of
none -> nomatch_shadow;
_ -> {nomatch_shadow,MatchLine}
end,
add_warning(Line, Warn, Anno, St)
end.
get_line([Line|_]) when is_integer(Line) -> Line;
get_line([_|T]) -> get_line(T);
get_line([]) -> none.
get_file([{file,File}|_]) -> File;
get_file([_|T]) -> get_file(T);
get_file([]) -> "no_file". % should not happen
%% is_true_guard(Guard) -> boolean().
%% Test if a guard is trivially true.
is_true_guard(#c_literal{val=true}) -> true;
is_true_guard(_) -> false.
%% partition([Clause]) -> [[Clause]].
%% Partition a list of clauses into groups which either contain
%% clauses with a variable first argument, or with a "constructor".
partition([C1|Cs]) ->
V1 = is_var_clause(C1),
{More,Rest} = splitwith(fun (C) -> is_var_clause(C) =:= V1 end, Cs),
[[C1|More]|partition(Rest)];
partition([]) -> [].
%% match_varcon([Var], [Clause], Def, [Var], Sub, State) ->
%% {MatchExpr,State}.
match_varcon(Us, [C|_]=Cs, Def, St) ->
case is_var_clause(C) of
true -> match_var(Us, Cs, Def, St);
false -> match_con(Us, Cs, Def, St)
end.
%% match_var([Var], [Clause], Def, State) -> {MatchExpr,State}.
%% Build a call to "select" from a list of clauses all containing a
%% variable as the first argument. We must rename the variable in
%% each clause to be the match variable as these clause will share
%% this variable and may have different names for it. Rename aliases
%% as well.
match_var([U|Us], Cs0, Def, St) ->
Cs1 = map(fun (#iclause{isub=Isub0,osub=Osub0,pats=[Arg|As]}=C) ->
Vs = [arg_arg(Arg)|arg_alias(Arg)],
Osub1 = foldl(fun (#k_var{name=V}, Acc) ->
subst_vsub(V, U#k_var.name, Acc)
end, Osub0, Vs),
Isub1 = foldl(fun (#k_var{name=V}, Acc) ->
subst_vsub(V, U#k_var.name, Acc)
end, Isub0, Vs),
C#iclause{isub=Isub1,osub=Osub1,pats=As}
end, Cs0),
match(Us, Cs1, Def, St).
%% match_con(Variables, [Clause], Default, State) -> {SelectExpr,State}.
%% Build call to "select" from a list of clauses all containing a
%% constructor/constant as first argument. Group the constructors
%% according to type, the order is really irrelevant but tries to be
%% smart.
match_con(Us, Cs0, Def, St) ->
%% Expand literals at the top level.
Cs = [expand_pat_lit_clause(C) || C <- Cs0],
match_con_1(Us, Cs, Def, St).
match_con_1([U|_Us] = L, Cs, Def, St0) ->
%% Extract clauses for different constructors (types).
%%ok = io:format("match_con ~p~n", [Cs]),
Ttcs0 = select_types([k_binary], Cs) ++ select_bin_con(Cs) ++
select_types([k_cons,k_tuple,k_map,k_atom,k_float,
k_int,k_nil], Cs),
Ttcs = opt_single_valued(Ttcs0),
%%ok = io:format("ttcs = ~p~n", [Ttcs]),
{Scs,St1} =
mapfoldl(fun ({T,Tcs}, St) ->
{[S|_]=Sc,S1} = match_value(L, T, Tcs, fail, St),
%%ok = io:format("match_con type2 ~p~n", [T]),
Anno = get_kanno(S),
{#k_type_clause{anno=Anno,type=T,values=Sc},S1} end,
St0, Ttcs),
{build_alt_1st_no_fail(build_select(U, Scs), Def),St1}.
select_types(Types, Cs) ->
[{T,Tcs} || T <- Types, begin Tcs = select(T, Cs), Tcs =/= [] end].
expand_pat_lit_clause(#iclause{pats=[#ialias{pat=#k_literal{anno=A,val=Val}}=Alias|Ps]}=C) ->
P = expand_pat_lit(Val, A),
C#iclause{pats=[Alias#ialias{pat=P}|Ps]};
expand_pat_lit_clause(#iclause{pats=[#k_literal{anno=A,val=Val}|Ps]}=C) ->
P = expand_pat_lit(Val, A),
C#iclause{pats=[P|Ps]};
expand_pat_lit_clause(C) -> C.
expand_pat_lit([H|T], A) ->
#k_cons{anno=A,hd=literal(H, A),tl=literal(T, A)};
expand_pat_lit(Tuple, A) when is_tuple(Tuple) ->
#k_tuple{anno=A,es=[literal(E, A) || E <- tuple_to_list(Tuple)]};
expand_pat_lit(Lit, A) ->
literal(Lit, A).
literal([], A) ->
#k_nil{anno=A};
literal(Val, A) when is_integer(Val) ->
#k_int{anno=A,val=Val};
literal(Val, A) when is_float(Val) ->
#k_float{anno=A,val=Val};
literal(Val, A) when is_atom(Val) ->
#k_atom{anno=A,val=Val};
literal(Val, A) when is_list(Val); is_tuple(Val) ->
#k_literal{anno=A,val=Val}.
%% opt_singled_valued([{Type,Clauses}]) -> [{Type,Clauses}].
%% If a type only has one clause and if the pattern is literal,
%% the matching can be done more efficiently by directly comparing
%% with the literal (that is especially true for binaries).
opt_single_valued(Ttcs) ->
opt_single_valued(Ttcs, [], []).
opt_single_valued([{_,[#iclause{pats=[P0|Ps]}=Tc]}=Ttc|Ttcs], TtcAcc, LitAcc) ->
try combine_lit_pat(P0) of
P ->
LitTtc = Tc#iclause{pats=[P|Ps]},
opt_single_valued(Ttcs, TtcAcc, [LitTtc|LitAcc])
catch
not_possible ->
opt_single_valued(Ttcs, [Ttc|TtcAcc], LitAcc)
end;
opt_single_valued([Ttc|Ttcs], TtcAcc, LitAcc) ->
opt_single_valued(Ttcs, [Ttc|TtcAcc], LitAcc);
opt_single_valued([], TtcAcc, []) ->
reverse(TtcAcc);
opt_single_valued([], TtcAcc, LitAcc) ->
Literals = {k_literal,reverse(LitAcc)},
%% Test the literals as early as possible.
case reverse(TtcAcc) of
[{k_binary,_}=Bin|Ttcs] ->
%% The delayed creation of sub binaries requires
%% bs_start_match2 to be the first instruction in the
%% function.
[Bin,Literals|Ttcs];
Ttcs ->
[Literals|Ttcs]
end.
combine_lit_pat(#ialias{pat=Pat0}=Alias) ->
Pat = combine_lit_pat(Pat0),
Alias#ialias{pat=Pat};
combine_lit_pat(Pat) ->
case do_combine_lit_pat(Pat) of
#k_literal{val=Val} when is_atom(Val) ->
throw(not_possible);
#k_literal{val=Val} when is_number(Val) ->
throw(not_possible);
#k_literal{val=[]} ->
throw(not_possible);
#k_literal{}=Lit ->
Lit
end.
do_combine_lit_pat(#k_atom{anno=A,val=Val}) ->
#k_literal{anno=A,val=Val};
do_combine_lit_pat(#k_float{anno=A,val=Val}) ->
#k_literal{anno=A,val=Val};
do_combine_lit_pat(#k_int{anno=A,val=Val}) ->
#k_literal{anno=A,val=Val};
do_combine_lit_pat(#k_nil{anno=A}) ->
#k_literal{anno=A,val=[]};
do_combine_lit_pat(#k_binary{anno=A,segs=Segs}) ->
Bin = combine_bin_segs(Segs),
#k_literal{anno=A,val=Bin};
do_combine_lit_pat(#k_cons{anno=A,hd=Hd0,tl=Tl0}) ->
#k_literal{val=Hd} = do_combine_lit_pat(Hd0),
#k_literal{val=Tl} = do_combine_lit_pat(Tl0),
#k_literal{anno=A,val=[Hd|Tl]};
do_combine_lit_pat(#k_literal{}=Lit) ->
Lit;
do_combine_lit_pat(#k_tuple{anno=A,es=Es0}) ->
Es = [begin
#k_literal{val=Lit} = do_combine_lit_pat(El),
Lit
end || El <- Es0],
#k_literal{anno=A,val=list_to_tuple(Es)};
do_combine_lit_pat(_) ->
throw(not_possible).
combine_bin_segs(#k_bin_seg{size=Size0,unit=Unit,type=integer,
flags=[unsigned,big],seg=Seg,next=Next}) ->
#k_literal{val=Size1} = do_combine_lit_pat(Size0),
#k_literal{val=Int} = do_combine_lit_pat(Seg),
Size = Size1 * Unit,
if
0 < Size, Size < 64 ->
Bin = <<Int:Size>>,
case Bin of
<<Int:Size>> ->
NextBin = combine_bin_segs(Next),
<<Bin/bits,NextBin/bits>>;
_ ->
%% The integer Int does not fit in the segment,
%% thus it will not match.
throw(not_possible)
end;
true ->
%% Avoid creating huge binary literals.
throw(not_possible)
end;
combine_bin_segs(#k_bin_end{}) ->
<<>>;
combine_bin_segs(_) ->
throw(not_possible).
%% select_bin_con([Clause]) -> [{Type,[Clause]}].
%% Extract clauses for the k_bin_seg constructor. As k_bin_seg
%% matching can overlap, the k_bin_seg constructors cannot be
%% reordered, only grouped.
select_bin_con(Cs0) ->
Cs1 = lists:filter(fun (C) ->
Con = clause_con(C),
(Con =:= k_bin_seg) or (Con =:= k_bin_end)
end, Cs0),
select_bin_con_1(Cs1).
select_bin_con_1(Cs) ->
try
%% The usual way to match literals is to first extract the
%% value to a register, and then compare the register to the
%% literal value. Extracting the value is good if we need
%% compare it more than once.
%%
%% But we would like to combine the extracting and the
%% comparing into a single instruction if we know that
%% a binary segment must contain specific integer value
%% or the matching will fail, like in this example:
%%
%% <<42:8,...>> ->
%% <<42:8,...>> ->
%% .
%% .
%% .
%% <<42:8,...>> ->
%% <<>> ->
%%
%% The first segment must either contain the integer 42
%% or the binary must end for the match to succeed.
%%
%% The way we do is to replace the generic #k_bin_seg{}
%% record with a #k_bin_int{} record if all clauses will
%% select the same literal integer (except for one or more
%% clauses that will end the binary).
{BinSegs0,BinEnd} =
partition(fun (C) ->
clause_con(C) =:= k_bin_seg
end, Cs),
BinSegs = select_bin_int(BinSegs0),
case BinEnd of
[] -> BinSegs;
[_|_] -> BinSegs ++ [{k_bin_end,BinEnd}]
end
catch
throw:not_possible ->
select_bin_con_2(Cs)
end.
select_bin_con_2([C1|Cs]) ->
Con = clause_con(C1),
{More,Rest} = splitwith(fun (C) -> clause_con(C) =:= Con end, Cs),
[{Con,[C1|More]}|select_bin_con_2(Rest)];
select_bin_con_2([]) -> [].
%% select_bin_int([Clause]) -> {k_bin_int,[Clause]}
%% If the first pattern in each clause selects the same integer,
%% rewrite all clauses to use #k_bin_int{} (which will later be
%% translated to a bs_match_string/4 instruction).
%%
%% If it is not possible to do this rewrite, a 'not_possible'
%% exception is thrown.
select_bin_int([#iclause{pats=[#k_bin_seg{anno=A,type=integer,
size=#k_int{val=Bits0}=Sz,unit=U,
flags=Fl,seg=#k_literal{val=Val},
next=N}|Ps]}=C|Cs0])
when is_integer(Val) ->
Bits = U * Bits0,
if
Bits > 1024 -> throw(not_possible); %Expands the code too much.
true -> ok
end,
select_assert_match_possible(Bits, Val, Fl),
P = #k_bin_int{anno=A,size=Sz,unit=U,flags=Fl,val=Val,next=N},
case member(native, Fl) of
true -> throw(not_possible);
false -> ok
end,
Cs = select_bin_int_1(Cs0, Bits, Fl, Val),
[{k_bin_int,[C#iclause{pats=[P|Ps]}|Cs]}];
select_bin_int([#iclause{pats=[#k_bin_seg{anno=A,type=utf8,
flags=[unsigned,big]=Fl,
seg=#k_literal{val=Val0},
next=N}|Ps]}=C|Cs0])
when is_integer(Val0) ->
{Val,Bits} = select_utf8(Val0),
P = #k_bin_int{anno=A,size=#k_int{val=Bits},unit=1,
flags=Fl,val=Val,next=N},
Cs = select_bin_int_1(Cs0, Bits, Fl, Val),
[{k_bin_int,[C#iclause{pats=[P|Ps]}|Cs]}];
select_bin_int(_) -> throw(not_possible).
select_bin_int_1([#iclause{pats=[#k_bin_seg{anno=A,type=integer,
size=#k_int{val=Bits0}=Sz,
unit=U,
flags=Fl,seg=#k_literal{val=Val},
next=N}|Ps]}=C|Cs],
Bits, Fl, Val) when is_integer(Val) ->
if
Bits0*U =:= Bits -> ok;
true -> throw(not_possible)
end,
P = #k_bin_int{anno=A,size=Sz,unit=U,flags=Fl,val=Val,next=N},
[C#iclause{pats=[P|Ps]}|select_bin_int_1(Cs, Bits, Fl, Val)];
select_bin_int_1([#iclause{pats=[#k_bin_seg{anno=A,type=utf8,
flags=Fl,
seg=#k_literal{val=Val0},
next=N}|Ps]}=C|Cs],
Bits, Fl, Val) when is_integer(Val0) ->
case select_utf8(Val0) of
{Val,Bits} -> ok;
{_,_} -> throw(not_possible)
end,
P = #k_bin_int{anno=A,size=#k_int{val=Bits},unit=1,
flags=[unsigned,big],val=Val,next=N},
[C#iclause{pats=[P|Ps]}|select_bin_int_1(Cs, Bits, Fl, Val)];
select_bin_int_1([], _, _, _) -> [];
select_bin_int_1(_, _, _, _) -> throw(not_possible).
select_assert_match_possible(Sz, Val, Fs) ->
EmptyBindings = erl_eval:new_bindings(),
MatchFun = match_fun(Val),
EvalFun = fun({integer,_,S}, B) -> {value,S,B} end,
Expr = [{bin_element,0,{integer,0,Val},{integer,0,Sz},[{unit,1}|Fs]}],
{value,Bin,EmptyBindings} = eval_bits:expr_grp(Expr, EmptyBindings, EvalFun),
try
{match,_} = eval_bits:match_bits(Expr, Bin,
EmptyBindings,
EmptyBindings,
MatchFun, EvalFun),
ok % this is just an assertion (i.e., no return value)
catch
throw:nomatch ->
throw(not_possible)
end.
match_fun(Val) ->
fun(match, {{integer,_,_},NewV,Bs}) when NewV =:= Val ->
{match,Bs}
end.
select_utf8(Val0) ->
try
Bin = <<Val0/utf8>>,
Size = bit_size(Bin),
<<Val:Size>> = Bin,
{Val,Size}
catch
error:_ ->
throw(not_possible)
end.
%% select(Con, [Clause]) -> [Clause].
select(T, Cs) -> [ C || C <- Cs, clause_con(C) =:= T ].
%% match_value([Var], Con, [Clause], Default, State) -> {SelectExpr,State}.
%% At this point all the clauses have the same constructor, we must
%% now separate them according to value.
match_value(Us0, T, Cs0, Def, St0) ->
{Us1,Cs1,St1} = partition_intersection(T, Us0, Cs0, St0),
UCss = group_value(T, Us1, Cs1),
%%ok = io:format("match_value ~p ~p~n", [T, Css]),
mapfoldl(fun ({Us,Cs}, St) -> match_clause(Us, Cs, Def, St) end, St1, UCss).
%% partition_intersection
%% Partitions a map into two maps with the most common keys to the first map.
%% case <M> of
%% <#{a}>
%% <#{a,b}>
%% <#{a,c}>
%% <#{c}>
%% end
%% becomes
%% case <M,M> of
%% <#{a}, #{ }>
%% <#{a}, #{b}>
%% <#{ }, #{c}>
%% <#{a}, #{c}>
%% end
%% The intention is to group as many keys together as possible and thus
%% reduce the number of lookups to that key.
partition_intersection(k_map, [U|_]=Us0, [_,_|_]=Cs0,St0) ->
Ps = [clause_val(C) || C <- Cs0],
case find_key_partition(Ps) of
no_partition ->
{Us0,Cs0,St0};
Ks ->
{Cs1,St1} = mapfoldl(fun(#iclause{pats=[Arg|Args]}=C, Sti) ->
{{Arg1,Arg2},St} = partition_key_intersection(Arg, Ks, Sti),
{C#iclause{pats=[Arg1,Arg2|Args]}, St}
end, St0, Cs0),
{[U|Us0],Cs1,St1}
end;
partition_intersection(_, Us, Cs, St) ->
{Us,Cs,St}.
partition_key_intersection(#k_map{es=Pairs}=Map,Ks,St0) ->
F = fun(#k_map_pair{key=Key}) -> member(map_key_clean(Key), Ks) end,
{Ps1,Ps2} = partition(F, Pairs),
{{Map#k_map{es=Ps1},Map#k_map{es=Ps2}},St0};
partition_key_intersection(#ialias{pat=Map}=Alias,Ks,St0) ->
%% only alias one of them
{{Map1,Map2},St1} = partition_key_intersection(Map, Ks, St0),
{{Map1,Alias#ialias{pat=Map2}},St1}.
% Only check for the complete intersection of keys and not commonality
find_key_partition(Ps) ->
Sets = [sets:from_list(Ks)||Ks <- Ps],
Is = sets:intersection(Sets),
case sets:to_list(Is) of
[] -> no_partition;
KeyIntersection ->
%% Check if the intersection are all keys in all clauses.
%% Don't split if they are since this will only
%% infer extra is_map instructions with no gain.
All = foldl(fun (Kset, Bool) ->
Bool andalso sets:is_subset(Kset, Is)
end, true, Sets),
if All -> no_partition;
true -> KeyIntersection
end
end.
%% group_value([Clause]) -> [[Clause]].
%% Group clauses according to value. Here we know that
%% 1. Some types are singled valued
%% 2. The clauses in bin_segs cannot be reordered only grouped
%% 3. Other types are disjoint and can be reordered
group_value(k_cons, Us, Cs) -> [{Us,Cs}]; %These are single valued
group_value(k_nil, Us, Cs) -> [{Us,Cs}];
group_value(k_binary, Us, Cs) -> [{Us,Cs}];
group_value(k_bin_end, Us, Cs) -> [{Us,Cs}];
group_value(k_bin_seg, Us, Cs) -> group_bin_seg(Us,Cs);
group_value(k_bin_int, Us, Cs) -> [{Us,Cs}];
group_value(k_map, Us, Cs) -> group_map(Us,Cs);
group_value(_, Us, Cs) ->
%% group_value(Cs).
Cd = foldl(fun (C, Gcs0) -> dict:append(clause_val(C), C, Gcs0) end,
dict:new(), Cs),
dict:fold(fun (_, Vcs, Css) -> [{Us,Vcs}|Css] end, [], Cd).
group_bin_seg(Us, [C1|Cs]) ->
V1 = clause_val(C1),
{More,Rest} = splitwith(fun (C) -> clause_val(C) == V1 end, Cs),
[{Us,[C1|More]}|group_bin_seg(Us,Rest)];
group_bin_seg(_, []) -> [].
group_map(Us, [C1|Cs]) ->
V1 = clause_val(C1),
{More,Rest} = splitwith(fun (C) -> clause_val(C) =:= V1 end, Cs),
[{Us,[C1|More]}|group_map(Us,Rest)];
group_map(_, []) -> [].
%% Profiling shows that this quadratic implementation account for a big amount
%% of the execution time if there are many values.
% group_value([C|Cs]) ->
% V = clause_val(C),
% Same = [ Cv || Cv <- Cs, clause_val(Cv) == V ], %Same value
% Rest = [ Cv || Cv <- Cs, clause_val(Cv) /= V ], % and all the rest
% [[C|Same]|group_value(Rest)];
% group_value([]) -> [].
%% match_clause([Var], [Clause], Default, State) -> {Clause,State}.
%% At this point all the clauses have the same "value". Build one
%% select clause for this value and continue matching. Rename
%% aliases as well.
match_clause([U|Us], [C|_]=Cs0, Def, St0) ->
Anno = get_kanno(C),
{Match0,Vs,St1} = get_match(get_con(Cs0), St0),
Match = sub_size_var(Match0, Cs0),
{Cs1,St2} = new_clauses(Cs0, U, St1),
{B,St3} = match(Vs ++ Us, Cs1, Def, St2),
{#k_val_clause{anno=Anno,val=Match,body=B},St3}.
sub_size_var(#k_bin_seg{size=#k_var{name=Name}=Kvar}=BinSeg, [#iclause{isub=Sub}|_]) ->
BinSeg#k_bin_seg{size=Kvar#k_var{name=get_vsub(Name, Sub)}};
sub_size_var(K, _) -> K.
get_con([C|_]) -> arg_arg(clause_arg(C)). %Get the constructor
get_match(#k_cons{}, St0) ->
{[H,T]=L,St1} = new_vars(2, St0),
{#k_cons{hd=H,tl=T},L,St1};
get_match(#k_binary{}, St0) ->
{[V]=Mes,St1} = new_vars(1, St0),
{#k_binary{segs=V},Mes,St1};
get_match(#k_bin_seg{size=#k_atom{val=all},next={k_bin_end,[]}}=Seg, St0) ->
{[S]=Vars,St1} = new_vars(1, St0),
{Seg#k_bin_seg{seg=S,next=[]},Vars,St1};
get_match(#k_bin_seg{}=Seg, St0) ->
{[S,N0],St1} = new_vars(2, St0),
N = set_kanno(N0, [no_usage]),
{Seg#k_bin_seg{seg=S,next=N},[S,N],St1};
get_match(#k_bin_int{}=BinInt, St0) ->
{N0,St1} = new_var(St0),
N = set_kanno(N0, [no_usage]),
{BinInt#k_bin_int{next=N},[N],St1};
get_match(#k_tuple{es=Es}, St0) ->
{Mes,St1} = new_vars(length(Es), St0),
{#k_tuple{es=Mes},Mes,St1};
get_match(#k_map{op=exact,es=Es0}, St0) ->
{Mes,St1} = new_vars(length(Es0), St0),
{Es,_} = mapfoldl(fun
(#k_map_pair{}=Pair, [V|Vs]) ->
{Pair#k_map_pair{val=V},Vs}
end, Mes, Es0),
{#k_map{op=exact,es=Es},Mes,St1};
get_match(M, St) ->
{M,[],St}.
new_clauses(Cs0, U, St) ->
Cs1 = map(fun (#iclause{isub=Isub0,osub=Osub0,pats=[Arg|As]}=C) ->
Head = case arg_arg(Arg) of
#k_cons{hd=H,tl=T} -> [H,T|As];
#k_tuple{es=Es} -> Es ++ As;
#k_binary{segs=E} -> [E|As];
#k_bin_seg{size=#k_atom{val=all},
seg=S,next={k_bin_end,[]}} ->
[S|As];
#k_bin_seg{seg=S,next=N} ->
[S,N|As];
#k_bin_int{next=N} ->
[N|As];
#k_map{op=exact,es=Es} ->
Vals = [V || #k_map_pair{val=V} <- Es],
Vals ++ As;
_Other ->
As
end,
Vs = arg_alias(Arg),
Osub1 = foldl(fun (#k_var{name=V}, Acc) ->
subst_vsub(V, U#k_var.name, Acc)
end, Osub0, Vs),
Isub1 = foldl(fun (#k_var{name=V}, Acc) ->
subst_vsub(V, U#k_var.name, Acc)
end, Isub0, Vs),
C#iclause{isub=Isub1,osub=Osub1,pats=Head}
end, Cs0),
{Cs1,St}.
%% build_guard([GuardClause]) -> GuardExpr.
build_guard([]) -> fail;
build_guard(Cs) -> #k_guard{clauses=Cs}.
%% build_select(Var, [ConClause]) -> SelectExpr.
build_select(V, [Tc|_]=Tcs) ->
copy_anno(#k_select{var=V,types=Tcs}, Tc).
%% build_alt(First, Then) -> AltExpr.
%% Build an alt, attempt some simple optimisation.
build_alt(fail, Then) -> Then;
build_alt(First,Then) -> build_alt_1st_no_fail(First, Then).
build_alt_1st_no_fail(First, fail) -> First;
build_alt_1st_no_fail(First, Then) ->
copy_anno(#k_alt{first=First,then=Then}, First).
%% build_match([MatchVar], MatchExpr) -> Kexpr.
%% Build a match expr if there is a match.
build_match(Us, #k_alt{}=Km) -> copy_anno(#k_match{vars=Us,body=Km}, Km);
build_match(Us, #k_select{}=Km) -> copy_anno(#k_match{vars=Us,body=Km}, Km);
build_match(Us, #k_guard{}=Km) -> copy_anno(#k_match{vars=Us,body=Km}, Km);
build_match(_, Km) -> Km.
%% clause_arg(Clause) -> FirstArg.
%% clause_con(Clause) -> Constructor.
%% clause_val(Clause) -> Value.
%% is_var_clause(Clause) -> boolean().
clause_arg(#iclause{pats=[Arg|_]}) -> Arg.
clause_con(C) -> arg_con(clause_arg(C)).
clause_val(C) -> arg_val(clause_arg(C), C).
is_var_clause(C) -> clause_con(C) =:= k_var.
%% arg_arg(Arg) -> Arg.
%% arg_alias(Arg) -> Aliases.
%% arg_con(Arg) -> Constructor.
%% arg_val(Arg) -> Value.
%% These are the basic functions for obtaining fields in an argument.
arg_arg(#ialias{pat=Con}) -> Con;
arg_arg(Con) -> Con.
arg_alias(#ialias{vars=As}) -> As;
arg_alias(_Con) -> [].
arg_con(Arg) ->
case arg_arg(Arg) of
#k_literal{} -> k_literal;
#k_int{} -> k_int;
#k_float{} -> k_float;
#k_atom{} -> k_atom;
#k_nil{} -> k_nil;
#k_cons{} -> k_cons;
#k_tuple{} -> k_tuple;
#k_map{} -> k_map;
#k_binary{} -> k_binary;
#k_bin_end{} -> k_bin_end;
#k_bin_seg{} -> k_bin_seg;
#k_var{} -> k_var
end.
arg_val(Arg, C) ->
case arg_arg(Arg) of
#k_literal{val=Lit} -> Lit;
#k_int{val=I} -> I;
#k_float{val=F} -> F;
#k_atom{val=A} -> A;
#k_tuple{es=Es} -> length(Es);
#k_bin_seg{size=S,unit=U,type=T,flags=Fs} ->
case S of
#k_var{name=V} ->
#iclause{isub=Isub} = C,
{#k_var{name=get_vsub(V, Isub)},U,T,Fs};
_ ->
{set_kanno(S, []),U,T,Fs}
end;
#k_map{op=exact,es=Es} ->
lists:sort(fun(A,B) ->
%% on the form K :: {'lit' | 'var', term()}
%% lit < var as intended
erts_internal:cmp_term(A,B) < 0
end, [map_key_clean(Key) || #k_map_pair{key=Key} <- Es])
end.
%% ubody_used_vars(Expr, State) -> [UsedVar]
%% Return all used variables for the body sequence. Much more
%% efficient than using ubody/3 if the body contains nested letrecs.
ubody_used_vars(Expr, St) ->
{_,Used,_} = ubody(Expr, return, St#kern{funs=ignore}),
Used.
%% ubody(Expr, Break, State) -> {Expr,[UsedVar],State}.
%% Tag the body sequence with its used variables. These bodies
%% either end with a #k_break{}, or with #k_return{} or an expression
%% which itself can return, #k_enter{}, #k_match{} ... .
ubody(#iset{vars=[],arg=#iletrec{}=Let,body=B0}, Br, St0) ->
%% An iletrec{} should never be last.
St = iletrec_funs(Let, St0),
ubody(B0, Br, St);
ubody(#iset{anno=A,vars=Vs,arg=E0,body=B0}, Br, St0) ->
{E1,Eu,St1} = uexpr(E0, {break,Vs}, St0),
{B1,Bu,St2} = ubody(B0, Br, St1),
Ns = lit_list_vars(Vs),
Used = union(Eu, subtract(Bu, Ns)), %Used external vars
{#k_seq{anno=#k{us=Used,ns=Ns,a=A},arg=E1,body=B1},Used,St2};
ubody(#ivalues{anno=A,args=As}, return, St) ->
Au = lit_list_vars(As),
{#k_return{anno=#k{us=Au,ns=[],a=A},args=As},Au,St};
ubody(#ivalues{anno=A,args=As}, {break,_Vbs}, St) ->
Au = lit_list_vars(As),
case is_in_guard(St) of
true ->
{#k_guard_break{anno=#k{us=Au,ns=[],a=A},args=As},Au,St};
false ->
{#k_break{anno=#k{us=Au,ns=[],a=A},args=As},Au,St}
end;
ubody(E, return, St0) ->
%% Enterable expressions need no trailing return.
case is_enter_expr(E) of
true -> uexpr(E, return, St0);
false ->
{Ea,Pa,St1} = force_atomic(E, St0),
ubody(pre_seq(Pa, #ivalues{args=[Ea]}), return, St1)
end;
ubody(#ignored{}, {break,_} = Break, St) ->
ubody(#ivalues{args=[]}, Break, St);
ubody(E, {break,[_]} = Break, St0) ->
%%ok = io:fwrite("ubody ~w:~p~n", [?LINE,{E,Br}]),
%% Exiting expressions need no trailing break.
case is_exit_expr(E) of
true -> uexpr(E, return, St0);
false ->
{Ea,Pa,St1} = force_atomic(E, St0),
ubody(pre_seq(Pa, #ivalues{args=[Ea]}), Break, St1)
end;
ubody(E, {break,Rs}=Break, St0) ->
case is_exit_expr(E) of
true ->
uexpr(E, return, St0);
false ->
{Vs,St1} = new_vars(length(Rs), St0),
Iset = #iset{vars=Vs,arg=E},
PreSeq = pre_seq([Iset], #ivalues{args=Vs}),
ubody(PreSeq, Break, St1)
end.
iletrec_funs(#iletrec{defs=Fs}, St0) ->
%% Use union of all free variables.
%% First just work out free variables for all functions.
Free = foldl(fun ({_,#ifun{vars=Vs,body=Fb0}}, Free0) ->
Fbu = ubody_used_vars(Fb0, St0),
Ns = lit_list_vars(Vs),
Free1 = subtract(Fbu, Ns),
union(Free1, Free0)
end, [], Fs),
FreeVs = make_vars(Free),
%% Add this free info to State.
St1 = foldl(fun ({N,#ifun{vars=Vs}}, Lst) ->
store_free(N, length(Vs), FreeVs, Lst)
end, St0, Fs),
iletrec_funs_gen(Fs, FreeVs, St1).
%% Now regenerate local functions to use free variable information.
iletrec_funs_gen(_, _, #kern{funs=ignore}=St) ->
%% Optimization: The ultimate caller is only interested in the used variables,
%% not the updated state. Makes a difference if there are nested letrecs.
St;
iletrec_funs_gen(Fs, FreeVs, St) ->
foldl(fun ({N,#ifun{anno=Fa,vars=Vs,body=Fb0}}, Lst0) ->
Arity0 = length(Vs),
{Fb1,_,Lst1} = ubody(Fb0, return, Lst0#kern{ff={N,Arity0}}),
Arity = Arity0 + length(FreeVs),
Fun = make_fdef(#k{us=[],ns=[],a=Fa}, N, Arity,
Vs++FreeVs, Fb1),
Lst1#kern{funs=[Fun|Lst1#kern.funs]}
end, St, Fs).
%% is_exit_expr(Kexpr) -> boolean().
%% Test whether Kexpr always exits and never returns.
is_exit_expr(#k_receive_next{}) -> true;
is_exit_expr(_) -> false.
%% is_enter_expr(Kexpr) -> boolean().
%% Test whether Kexpr is "enterable", i.e. can handle return from
%% within itself without extra #k_return{}.
is_enter_expr(#k_try{}) -> true;
is_enter_expr(#k_call{}) -> true;
is_enter_expr(#k_match{}) -> true;
is_enter_expr(#k_receive{}) -> true;
is_enter_expr(#k_receive_next{}) -> true;
is_enter_expr(_) -> false.
%% uexpr(Expr, Break, State) -> {Expr,[UsedVar],State}.
%% Tag an expression with its used variables.
%% Break = return | {break,[RetVar]}.
uexpr(#k_test{anno=A,op=Op,args=As}=Test, {break,Rs}, St) ->
[] = Rs, %Sanity check
Used = union(op_vars(Op), lit_list_vars(As)),
{Test#k_test{anno=#k{us=Used,ns=lit_list_vars(Rs),a=A}},
Used,St};
uexpr(#iset{anno=A,vars=Vs,arg=E0,body=B0}, {break,_}=Br, St0) ->
Ns = lit_list_vars(Vs),
{E1,Eu,St1} = uexpr(E0, {break,Vs}, St0),
{B1,Bu,St2} = uexpr(B0, Br, St1),
Used = union(Eu, subtract(Bu, Ns)),
{#k_seq{anno=#k{us=Used,ns=Ns,a=A},arg=E1,body=B1},Used,St2};
uexpr(#k_call{anno=A,op=#k_local{name=F,arity=Ar}=Op,args=As0}=Call, Br, St) ->
Free = get_free(F, Ar, St),
As1 = As0 ++ Free, %Add free variables LAST!
Used = lit_list_vars(As1),
{case Br of
{break,Rs} ->
Call#k_call{anno=#k{us=Used,ns=lit_list_vars(Rs),a=A},
op=Op#k_local{arity=Ar + length(Free)},
args=As1,ret=Rs};
return ->
#k_enter{anno=#k{us=Used,ns=[],a=A},
op=Op#k_local{arity=Ar + length(Free)},
args=As1}
end,Used,St};
uexpr(#k_call{anno=A,op=Op,args=As}=Call, {break,Rs}, St) ->
Used = union(op_vars(Op), lit_list_vars(As)),
{Call#k_call{anno=#k{us=Used,ns=lit_list_vars(Rs),a=A},ret=Rs},
Used,St};
uexpr(#k_call{anno=A,op=Op,args=As}, return, St) ->
Used = union(op_vars(Op), lit_list_vars(As)),
{#k_enter{anno=#k{us=Used,ns=[],a=A},op=Op,args=As},
Used,St};
uexpr(#k_bif{anno=A,op=Op,args=As}=Bif, {break,Rs}, St0) ->
Used = union(op_vars(Op), lit_list_vars(As)),
{Brs,St1} = bif_returns(Op, Rs, St0),
{Bif#k_bif{anno=#k{us=Used,ns=lit_list_vars(Brs),a=A},ret=Brs},
Used,St1};
uexpr(#k_match{anno=A,vars=Vs0,body=B0}, Br, St0) ->
Vs = handle_reuse_annos(Vs0, St0),
Rs = break_rets(Br),
{B1,Bu,St1} = umatch(B0, Br, St0),
case is_in_guard(St1) of
true ->
{#k_guard_match{anno=#k{us=Bu,ns=lit_list_vars(Rs),a=A},
vars=Vs,body=B1,ret=Rs},Bu,St1};
false ->
{#k_match{anno=#k{us=Bu,ns=lit_list_vars(Rs),a=A},
vars=Vs,body=B1,ret=Rs},Bu,St1}
end;
uexpr(#k_receive{anno=A,var=V,body=B0,timeout=T,action=A0}, Br, St0) ->
Rs = break_rets(Br),
Tu = lit_vars(T), %Timeout is atomic
{B1,Bu,St1} = umatch(B0, Br, St0),
{A1,Au,St2} = ubody(A0, Br, St1),
Used = del_element(V#k_var.name, union(Bu, union(Tu, Au))),
{#k_receive{anno=#k{us=Used,ns=lit_list_vars(Rs),a=A},
var=V,body=B1,timeout=T,action=A1,ret=Rs},
Used,St2};
uexpr(#k_receive_accept{anno=A}, _, St) ->
{#k_receive_accept{anno=#k{us=[],ns=[],a=A}},[],St};
uexpr(#k_receive_next{anno=A}, _, St) ->
{#k_receive_next{anno=#k{us=[],ns=[],a=A}},[],St};
uexpr(#k_try{anno=A,arg=A0,vars=Vs,body=B0,evars=Evs,handler=H0},
{break,Rs0}=Br, St0) ->
case is_in_guard(St0) of
true ->
{[#k_var{name=X}],#k_var{name=X}} = {Vs,B0}, %Assertion.
#k_atom{val=false} = H0, %Assertion.
{A1,Bu,St1} = uexpr(A0, Br, St0),
{#k_protected{anno=#k{us=Bu,ns=lit_list_vars(Rs0),a=A},
arg=A1,ret=Rs0},Bu,St1};
false ->
{Avs,St1} = new_vars(length(Vs), St0),
{A1,Au,St2} = ubody(A0, {break,Avs}, St1),
{B1,Bu,St3} = ubody(B0, Br, St2),
{H1,Hu,St4} = ubody(H0, Br, St3),
Used = union([Au,subtract(Bu, lit_list_vars(Vs)),
subtract(Hu, lit_list_vars(Evs))]),
{#k_try{anno=#k{us=Used,ns=lit_list_vars(Rs0),a=A},
arg=A1,vars=Vs,body=B1,evars=Evs,handler=H1,ret=Rs0},
Used,St4}
end;
uexpr(#k_try{anno=A,arg=A0,vars=Vs,body=B0,evars=Evs,handler=H0},
return, St0) ->
{Avs,St1} = new_vars(length(Vs), St0), %Need dummy names here
{A1,Au,St2} = ubody(A0, {break,Avs}, St1), %Must break to clean up here!
{B1,Bu,St3} = ubody(B0, return, St2),
{H1,Hu,St4} = ubody(H0, return, St3),
Used = union([Au,subtract(Bu, lit_list_vars(Vs)),
subtract(Hu, lit_list_vars(Evs))]),
{#k_try_enter{anno=#k{us=Used,ns=[],a=A},
arg=A1,vars=Vs,body=B1,evars=Evs,handler=H1},
Used,St4};
uexpr(#k_catch{anno=A,body=B0}, {break,Rs0}, St0) ->
{Rb,St1} = new_var(St0),
{B1,Bu,St2} = ubody(B0, {break,[Rb]}, St1),
%% Guarantee ONE return variable.
{Ns,St3} = new_vars(1 - length(Rs0), St2),
Rs1 = Rs0 ++ Ns,
{#k_catch{anno=#k{us=Bu,ns=lit_list_vars(Rs1),a=A},body=B1,ret=Rs1},Bu,St3};
uexpr(#ifun{anno=A,vars=Vs,body=B0}, {break,Rs}, St0) ->
{B1,Bu,St1} = ubody(B0, return, St0), %Return out of new function
Ns = lit_list_vars(Vs),
Free = subtract(Bu, Ns), %Free variables in fun
Fvs = make_vars(Free),
Arity = length(Vs) + length(Free),
{Fname,St} =
case lists:keyfind(id, 1, A) of
{id,{_,_,Fname0}} ->
{Fname0,St1};
false ->
%% No id annotation. Must invent a fun name.
new_fun_name(St1)
end,
Fun = make_fdef(#k{us=[],ns=[],a=A}, Fname, Arity, Vs++Fvs, B1),
{#k_bif{anno=#k{us=Free,ns=lit_list_vars(Rs),a=A},
op=#k_internal{name=make_fun,arity=length(Free)+2},
args=[#k_atom{val=Fname},#k_int{val=Arity}|Fvs],
ret=Rs},
Free,add_local_function(Fun, St)};
uexpr(Lit, {break,Rs0}, St0) ->
%% Transform literals to puts here.
%%ok = io:fwrite("uexpr ~w:~p~n", [?LINE,Lit]),
Used = lit_vars(Lit),
{Rs,St1} = ensure_return_vars(Rs0, St0),
{#k_put{anno=#k{us=Used,ns=lit_list_vars(Rs),a=get_kanno(Lit)},
arg=Lit,ret=Rs},Used,St1}.
add_local_function(_, #kern{funs=ignore}=St) -> St;
add_local_function(F, #kern{funs=Funs}=St) -> St#kern{funs=[F|Funs]}.
%% Make a #k_fdef{}, making sure that the body is always a #k_match{}.
make_fdef(Anno, Name, Arity, Vs, #k_match{}=Body) ->
#k_fdef{anno=Anno,func=Name,arity=Arity,vars=Vs,body=Body};
make_fdef(Anno, Name, Arity, Vs, Body) ->
Ka = get_kanno(Body),
Match = #k_match{anno=#k{us=Ka#k.us,ns=[],a=Ka#k.a},
vars=Vs,body=Body,ret=[]},
#k_fdef{anno=Anno,func=Name,arity=Arity,vars=Vs,body=Match}.
%% handle_reuse_annos([#k_var{}], State) -> State.
%% In general, it is only safe to reuse a variable for a match context
%% if the original value of the variable will no longer be needed.
%%
%% If a variable has been bound in an outer letrec and is therefore
%% free in the current function, the variable may still be used.
%% We don't bother to check whether the variable is actually used,
%% but simply clears the 'reuse_for_context' annotation for any variable
%% that is free.
handle_reuse_annos(Vs, St) ->
[handle_reuse_anno(V, St) || V <- Vs].
handle_reuse_anno(#k_var{anno=A}=V, St) ->
case member(reuse_for_context, A) of
false -> V;
true -> handle_reuse_anno_1(V, St)
end.
handle_reuse_anno_1(#k_var{anno=Anno,name=Vname}=V, #kern{ff={F,A}}=St) ->
FreeVs = get_free(F, A, St),
case keymember(Vname, #k_var.name, FreeVs) of
true -> V#k_var{anno=Anno--[reuse_for_context]};
false -> V
end;
handle_reuse_anno_1(V, _St) -> V.
%% get_free(Name, Arity, State) -> [Free].
%% store_free(Name, Arity, [Free], State) -> State.
get_free(F, A, #kern{free=FreeMap}) ->
Key = {F,A},
case FreeMap of
#{Key:=Val} -> Val;
_ -> []
end.
store_free(F, A, Free, #kern{free=FreeMap0}=St) ->
Key = {F,A},
FreeMap = FreeMap0#{Key=>Free},
St#kern{free=FreeMap}.
break_rets({break,Rs}) -> Rs;
break_rets(return) -> [].
%% bif_returns(Op, [Ret], State) -> {[Ret],State}.
bif_returns(#k_remote{mod=M,name=N,arity=Ar}, Rs, St0) ->
%%ok = io:fwrite("uexpr ~w:~p~n", [?LINE,{M,N,Ar,Rs}]),
{Ns,St1} = new_vars(bif_vals(M, N, Ar) - length(Rs), St0),
{Rs ++ Ns,St1};
bif_returns(#k_internal{name=N,arity=Ar}, Rs, St0) ->
%%ok = io:fwrite("uexpr ~w:~p~n", [?LINE,{N,Ar,Rs}]),
{Ns,St1} = new_vars(bif_vals(N, Ar) - length(Rs), St0),
{Rs ++ Ns,St1}.
%% ensure_return_vars([Ret], State) -> {[Ret],State}.
ensure_return_vars([], St) -> new_vars(1, St);
ensure_return_vars([_]=Rs, St) -> {Rs,St}.
%% umatch(Match, Break, State) -> {Match,[UsedVar],State}.
%% Tag a match expression with its used variables.
umatch(#k_alt{anno=A,first=F0,then=T0}, Br, St0) ->
{F1,Fu,St1} = umatch(F0, Br, St0),
{T1,Tu,St2} = umatch(T0, Br, St1),
Used = union(Fu, Tu),
{#k_alt{anno=#k{us=Used,ns=[],a=A},first=F1,then=T1},
Used,St2};
umatch(#k_select{anno=A,var=V0,types=Ts0}, Br, St0) ->
V = handle_reuse_anno(V0, St0),
{Ts1,Tus,St1} = umatch_list(Ts0, Br, St0),
Used = case member(no_usage, get_kanno(V)) of
true -> Tus;
false -> add_element(V#k_var.name, Tus)
end,
{#k_select{anno=#k{us=Used,ns=[],a=A},var=V,types=Ts1},Used,St1};
umatch(#k_type_clause{anno=A,type=T,values=Vs0}, Br, St0) ->
{Vs1,Vus,St1} = umatch_list(Vs0, Br, St0),
{#k_type_clause{anno=#k{us=Vus,ns=[],a=A},type=T,values=Vs1},Vus,St1};
umatch(#k_val_clause{anno=A,val=P0,body=B0}, Br, St0) ->
{U0,Ps} = pat_vars(P0),
P = set_kanno(P0, #k{us=U0,ns=Ps,a=get_kanno(P0)}),
{B1,Bu,St1} = umatch(B0, Br, St0),
Used = union(U0, subtract(Bu, Ps)),
{#k_val_clause{anno=#k{us=Used,ns=[],a=A},val=P,body=B1},
Used,St1};
umatch(#k_guard{anno=A,clauses=Gs0}, Br, St0) ->
{Gs1,Gus,St1} = umatch_list(Gs0, Br, St0),
{#k_guard{anno=#k{us=Gus,ns=[],a=A},clauses=Gs1},Gus,St1};
umatch(#k_guard_clause{anno=A,guard=G0,body=B0}, Br, St0) ->
%%ok = io:fwrite("~w: ~p~n", [?LINE,G0]),
{G1,Gu,St1} = uexpr(G0, {break,[]},
St0#kern{guard_refc=St0#kern.guard_refc+1}),
%%ok = io:fwrite("~w: ~p~n", [?LINE,G1]),
{B1,Bu,St2} = umatch(B0, Br, St1#kern{guard_refc=St1#kern.guard_refc-1}),
Used = union(Gu, Bu),
{#k_guard_clause{anno=#k{us=Used,ns=[],a=A},guard=G1,body=B1},Used,St2};
umatch(B0, Br, St0) -> ubody(B0, Br, St0).
umatch_list(Ms0, Br, St) ->
foldr(fun (M0, {Ms1,Us,Sta}) ->
{M1,Mu,Stb} = umatch(M0, Br, Sta),
{[M1|Ms1],union(Mu, Us),Stb}
end, {[],[],St}, Ms0).
%% op_vars(Op) -> [VarName].
op_vars(#k_remote{mod=Mod,name=Name}) ->
ordsets:from_list([V || #k_var{name=V} <- [Mod,Name]]);
op_vars(#k_internal{}) -> [];
op_vars(Atomic) -> lit_vars(Atomic).
%% lit_vars(Literal) -> [VarName].
%% Return the variables in a literal.
lit_vars(#k_var{name=N}) -> [N];
lit_vars(#k_int{}) -> [];
lit_vars(#k_float{}) -> [];
lit_vars(#k_atom{}) -> [];
%%lit_vars(#k_char{}) -> [];
lit_vars(#k_nil{}) -> [];
lit_vars(#k_cons{hd=H,tl=T}) ->
union(lit_vars(H), lit_vars(T));
lit_vars(#k_map{var=Var,es=Es}) ->
lit_list_vars([Var|Es]);
lit_vars(#k_map_pair{key=K,val=V}) ->
union(lit_vars(K), lit_vars(V));
lit_vars(#k_binary{segs=V}) -> lit_vars(V);
lit_vars(#k_bin_end{}) -> [];
lit_vars(#k_bin_seg{size=Size,seg=S,next=N}) ->
union(lit_vars(Size), union(lit_vars(S), lit_vars(N)));
lit_vars(#k_tuple{es=Es}) ->
lit_list_vars(Es);
lit_vars(#k_literal{}) -> [].
lit_list_vars(Ps) ->
foldl(fun (P, Vs) -> union(lit_vars(P), Vs) end, [], Ps).
%% pat_vars(Pattern) -> {[UsedVarName],[NewVarName]}.
%% Return variables in a pattern. All variables are new variables
%% except those in the size field of binary segments.
%% and map_pair keys
pat_vars(#k_var{name=N}) -> {[],[N]};
%%pat_vars(#k_char{}) -> {[],[]};
pat_vars(#k_literal{}) -> {[],[]};
pat_vars(#k_int{}) -> {[],[]};
pat_vars(#k_float{}) -> {[],[]};
pat_vars(#k_atom{}) -> {[],[]};
pat_vars(#k_nil{}) -> {[],[]};
pat_vars(#k_cons{hd=H,tl=T}) ->
pat_list_vars([H,T]);
pat_vars(#k_binary{segs=V}) ->
pat_vars(V);
pat_vars(#k_bin_seg{size=Size,seg=S}) ->
{U1,New} = pat_list_vars([S]),
{[],U2} = pat_vars(Size),
{union(U1, U2),New};
pat_vars(#k_bin_int{size=Size}) ->
{[],U} = pat_vars(Size),
{U,[]};
pat_vars(#k_bin_end{}) -> {[],[]};
pat_vars(#k_tuple{es=Es}) ->
pat_list_vars(Es);
pat_vars(#k_map{es=Es}) ->
pat_list_vars(Es);
pat_vars(#k_map_pair{key=K,val=V}) ->
{U1,New} = pat_vars(V),
{[], U2} = pat_vars(K),
{union(U1,U2),New}.
pat_list_vars(Ps) ->
foldl(fun (P, {Used0,New0}) ->
{Used,New} = pat_vars(P),
{union(Used0, Used),union(New0, New)} end,
{[],[]}, Ps).
%% List of integers in interval [N,M]. Empty list if N > M.
integers(N, M) when N =< M ->
[N|integers(N + 1, M)];
integers(_, _) -> [].
%% is_in_guard(State) -> true|false.
is_in_guard(#kern{guard_refc=Refc}) ->
Refc > 0.
%%%
%%% Handling of errors and warnings.
%%%
-type error() :: 'bad_call' | 'nomatch_shadow' | {'nomatch_shadow', integer()}.
-spec format_error(error()) -> string().
format_error({nomatch_shadow,Line}) ->
M = io_lib:format("this clause cannot match because a previous clause at line ~p "
"always matches", [Line]),
lists:flatten(M);
format_error(nomatch_shadow) ->
"this clause cannot match because a previous clause always matches";
format_error(bad_call) ->
"invalid module and/or function name; this call will always fail";
format_error(bad_segment_size) ->
"binary construction will fail because of a type mismatch".
add_warning(none, Term, Anno, #kern{ws=Ws}=St) ->
File = get_file(Anno),
St#kern{ws=[{File,[{none,?MODULE,Term}]}|Ws]};
add_warning(Line, Term, Anno, #kern{ws=Ws}=St) ->
File = get_file(Anno),
St#kern{ws=[{File,[{Line,?MODULE,Term}]}|Ws]}.
is_compiler_generated(Ke) ->
Anno = get_kanno(Ke),
member(compiler_generated, Anno). | lib/compiler/src/v3_kernel.erl | 0.531939 | 0.518546 | v3_kernel.erl | starcoder |
%%%
%%% Copyright 2011, Boundary
%%%
%%% Licensed under the Apache License, Version 2.0 (the "License");
%%% you may not use this file except in compliance with the License.
%%% You may obtain a copy of the License at
%%%
%%% http://www.apache.org/licenses/LICENSE-2.0
%%%
%%% Unless required by applicable law or agreed to in writing, software
%%% distributed under the License is distributed on an "AS IS" BASIS,
%%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%%% See the License for the specific language governing permissions and
%%% limitations under the License.
%%%
%%%-------------------------------------------------------------------
%%% File: bear.erl
%%% @author <NAME> <<EMAIL>>
%%% @doc
%%% statistics functions for calucating based on id and a list of values
%%% @end
%%%------------------------------------------------------------------
-module(bear).
-compile([export_all]).
-export([
get_statistics/1,
get_statistics/2
]).
-define(HIST_BINS, 10).
-define(STATS_MIN, 5).
-record(scan_result, {n=0, sumX=0, sumXX=0, sumInv=0, sumLog, max, min}).
-record(scan_result2, {x2=0, x3=0, x4=0}).
get_statistics(Values) when length(Values) < ?STATS_MIN ->
[
{min, 0.0},
{max, 0.0},
{arithmetic_mean, 0.0},
{geometric_mean, 0.0},
{harmonic_mean, 0.0},
{median, 0.0},
{variance, 0.0},
{standard_deviation, 0.0},
{skewness, 0.0},
{kurtosis, 0.0},
{percentile,
[
{50, 0.0},
{75, 0.0},
{90, 0.0},
{95, 0.0},
{99, 0.0},
{999, 0.0}
]
},
{histogram, [{0, 0}]},
{n, 0}
];
get_statistics(Values) ->
Scan_res = scan_values(Values),
Scan_res2 = scan_values2(Values, Scan_res),
Variance = variance(Scan_res, Scan_res2),
SortedValues = lists:sort(Values),
[
{min, Scan_res#scan_result.min},
{max, Scan_res#scan_result.max},
{arithmetic_mean, arithmetic_mean(Scan_res)},
{geometric_mean, geometric_mean(Scan_res)},
{harmonic_mean, harmonic_mean(Scan_res)},
{median, percentile(SortedValues, Scan_res, 0.5)},
{variance, Variance},
{standard_deviation, std_deviation(Scan_res, Scan_res2)},
{skewness, skewness(Scan_res, Scan_res2)},
{kurtosis, kurtosis(Scan_res, Scan_res2)},
{percentile,
[
{50, percentile(SortedValues, Scan_res, 0.50)},
{75, percentile(SortedValues, Scan_res, 0.75)},
{90, percentile(SortedValues, Scan_res, 0.90)},
{95, percentile(SortedValues, Scan_res, 0.95)},
{99, percentile(SortedValues, Scan_res, 0.99)},
{999, percentile(SortedValues, Scan_res, 0.999)}
]
},
{histogram, get_histogram(Values, Scan_res, Scan_res2)},
{n, Scan_res#scan_result.n}
].
get_statistics(Values, _) when length(Values) < ?STATS_MIN ->
0.0;
get_statistics(_, Values) when length(Values) < ?STATS_MIN ->
0.0;
get_statistics(Values1, Values2) when length(Values1) /= length(Values2) ->
0.0;
get_statistics(Values1, Values2) ->
[
{covariance, get_covariance(Values1, Values2)},
{tau, get_kendall_correlation(Values1, Values2)},
{rho, get_pearson_correlation(Values1, Values2)},
{r, get_spearman_correlation(Values1, Values2)}
].
%%%===================================================================
%%% Internal functions
%%%===================================================================
scan_values([X|Values]) ->
scan_values(Values, #scan_result{n=1, sumX=X, sumXX=X*X,
sumLog=math_log(X),
max=X, min=X, sumInv=inverse(X)}).
scan_values([X|Values],
#scan_result{n=N, sumX=SumX, sumXX=SumXX, sumLog=SumLog,
max=Max, min=Min, sumInv=SumInv}=Acc) ->
scan_values(Values,
Acc#scan_result{n=N+1, sumX=SumX+X, sumXX=SumXX+X*X,
sumLog=SumLog+math_log(X),
max=max(X,Max), min=min(X,Min),
sumInv=SumInv+inverse(X)});
scan_values([], Acc) ->
Acc.
scan_values2(Values, #scan_result{n=N, sumX=SumX}) ->
scan_values2(Values, SumX/N, #scan_result2{}).
scan_values2([X|Values], Mean, #scan_result2{x2=X2, x3=X3, x4=X4}=Acc) ->
Diff = X-Mean,
Diff2 = Diff*Diff,
Diff3 = Diff2*Diff,
Diff4 = Diff2*Diff2,
scan_values2(Values, Mean, Acc#scan_result2{x2=X2+Diff2, x3=X3+Diff3,
x4=X4+Diff4});
scan_values2([], _, Acc) ->
Acc.
arithmetic_mean(#scan_result{n=N, sumX=Sum}) ->
Sum/N.
geometric_mean(#scan_result{n=N, sumLog=SumLog}) ->
math:exp(SumLog/N).
harmonic_mean(#scan_result{sumInv=0}) ->
%% Protect against divide by 0 if we have all 0 values
0;
harmonic_mean(#scan_result{n=N, sumInv=Sum}) ->
N/Sum.
percentile(SortedValues, #scan_result{n=N}, Percentile)
when is_list(SortedValues) ->
Element = round(Percentile * N),
lists:nth(Element, SortedValues).
%% Two pass variance
%% Results match those given by the 'var' function in R
variance(#scan_result{n=N}, #scan_result2{x2=X2}) ->
X2/(N-1).
std_deviation(Scan_res, Scan_res2) ->
math:sqrt(variance(Scan_res, Scan_res2)).
%% http://en.wikipedia.org/wiki/Skewness
%%
%% skewness results should match this R function:
%% skewness <- function(x) {
%% m3 <- mean((x - mean(x))^3)
%% skew <- m3 / (sd(x)^3)
%% skew
%% }
skewness(#scan_result{n=N}=Scan_res, #scan_result2{x3=X3}=Scan_res2) ->
case math:pow(std_deviation(Scan_res,Scan_res2), 3) of
0.0 ->
0.0; %% Is this really the correct thing to do here?
Else ->
(X3/N)/Else
end.
%% http://en.wikipedia.org/wiki/Kurtosis
%%
%% results should match this R function:
%% kurtosis <- function(x) {
%% m4 <- mean((x - mean(x))^4)
%% kurt <- m4 / (sd(x)^4) - 3
%% kurt
%% }
kurtosis(#scan_result{n=N}=Scan_res, #scan_result2{x4=X4}=Scan_res2) ->
case math:pow(std_deviation(Scan_res,Scan_res2), 4) of
0.0 ->
0.0; %% Is this really the correct thing to do here?
Else ->
((X4/N)/Else) - 3
end.
get_histogram(Values, Scan_res, Scan_res2) ->
Bins = get_hist_bins(Scan_res#scan_result.min,
Scan_res#scan_result.max,
std_deviation(Scan_res, Scan_res2),
length(Values)
),
Dict = lists:foldl(fun (Value, Dict) ->
update_bin(Value, Bins, Dict)
end,
dict:from_list([{Bin, 0} || Bin <- Bins]),
Values),
lists:sort(dict:to_list(Dict)).
update_bin(Value, [Bin|_Bins], Dict) when Value =< Bin ->
dict:update_counter(Bin, 1, Dict);
update_bin(Values, [_Bin|Bins], Dict) ->
update_bin(Values, Bins, Dict).
%% two pass covariance
%% (http://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Covariance)
%% matches results given by excel's 'covar' function
get_covariance(Values, _) when length(Values) < ?STATS_MIN ->
0.0;
get_covariance(_, Values) when length(Values) < ?STATS_MIN ->
0.0;
get_covariance(Values1, Values2) when length(Values1) /= length(Values2) ->
0.0;
get_covariance(Values1, Values2) ->
{SumX, SumY, N} = foldl2(fun (X, Y, {SumX, SumY, N}) ->
{SumX+X, SumY+Y, N+1}
end, {0,0,0}, Values1, Values2),
MeanX = SumX/N,
MeanY = SumY/N,
Sum = foldl2(fun (X, Y, Sum) ->
Sum + ((X - MeanX) * (Y - MeanY))
end,
0, Values1, Values2),
Sum/N.
get_kendall_correlation(Values, _) when length(Values) < ?STATS_MIN ->
0.0;
get_kendall_correlation(_, Values) when length(Values) < ?STATS_MIN ->
0.0;
get_kendall_correlation(Values1, Values2) when length(Values1) /= length(Values2) ->
0.0;
get_kendall_correlation(Values1, Values2) ->
bear:kendall_correlation(Values1, Values2).
get_spearman_correlation(Values, _) when length(Values) < ?STATS_MIN ->
0.0;
get_spearman_correlation(_, Values) when length(Values) < ?STATS_MIN ->
0.0;
get_spearman_correlation(Values1, Values2) when length(Values1) /= length(Values2) ->
0.0;
get_spearman_correlation(Values1, Values2) ->
TR1 = ranks_of(Values1),
TR2 = ranks_of(Values2),
Numerator = 6 * foldl2(fun (X, Y, Acc) ->
Diff = X-Y,
Acc + Diff*Diff
end, 0, TR1,TR2),
N = length(Values1),
Denominator = math:pow(N,3)-N,
1-(Numerator/Denominator).
ranks_of(Values) when is_list(Values) ->
[Fst|Rest] = revsort(Values),
TRs = ranks_of(Rest, [], 2, Fst, 1),
Dict = gb_trees:from_orddict(TRs),
L = lists:foldl(fun (Val, Acc) ->
Rank = gb_trees:get(Val, Dict),
[Rank|Acc]
end, [], Values),
lists:reverse(L).
ranks_of([E|Es],Acc, N, E, S) ->
ranks_of(Es, Acc, N+1, E, S);
ranks_of([E|Es], Acc, N, P, S) ->
ranks_of(Es,[{P,(S+N-1)/2}|Acc], N+1, E, N);
ranks_of([], Acc, N, P, S) ->
[{P,(S+N-1)/2}|Acc].
get_pearson_correlation(Values, _) when length(Values) < ?STATS_MIN ->
0.0;
get_pearson_correlation(_, Values) when length(Values) < ?STATS_MIN ->
0.0;
get_pearson_correlation(Values1, Values2) when length(Values1) /= length(Values2) ->
0.0;
get_pearson_correlation(Values1, Values2) ->
{SumX, SumY, SumXX, SumYY, SumXY, N} =
foldl2(fun (X,Y,{SX, SY, SXX, SYY, SXY, N}) ->
{SX+X, SY+Y, SXX+X*X, SYY+Y*Y, SXY+X*Y, N+1}
end, {0,0,0,0,0,0}, Values1, Values2),
Numer = (N*SumXY) - (SumX * SumY),
case math:sqrt(((N*SumXX)-(SumX*SumX)) * ((N*SumYY)-(SumY*SumY))) of
0.0 ->
0.0; %% Is this really the correct thing to do here?
Denom ->
Numer/Denom
end.
revsort(L) ->
lists:reverse(lists:sort(L)).
%% Foldl over two lists
foldl2(F, Acc, [I1|L1], [I2|L2]) when is_function(F,3) ->
foldl2(F, F(I1, I2, Acc), L1, L2);
foldl2(_F, Acc, [], []) ->
Acc.
%% wrapper for math:log/1 to avoid dividing by zero
math_log(0) ->
1;
math_log(X) ->
math:log(X).
%% wrapper for calculating inverse to avoid dividing by zero
inverse(0) ->
0;
inverse(X) ->
1/X.
get_hist_bins(Min, Max, StdDev, Count) ->
BinWidth = get_bin_width(StdDev, Count),
BinCount = get_bin_count(Min, Max, BinWidth),
case get_bin_list(BinWidth, BinCount, []) of
List when length(List) =< 1 ->
[Max];
Bins ->
%% add Min to Bins
[Bin + Min || Bin <- Bins]
end.
get_bin_list(Width, Bins, Acc) when Bins > length(Acc) ->
Bin = ((length(Acc) + 1) * Width ),
get_bin_list(Width, Bins, [round_bin(Bin)| Acc]);
get_bin_list(_, _, Acc) ->
lists:usort(Acc).
round_bin(Bin) ->
Base = case erlang:trunc(math:pow(10, round(math:log10(Bin) - 1))) of
0 ->
1;
Else ->
Else
end,
%io:format("bin ~p, base ~p~n", [Bin, Base]),
round_bin(Bin, Base).
round_bin(Bin, Base) when Bin rem Base == 0 ->
Bin;
round_bin(Bin, Base) ->
Bin + Base - (Bin rem Base).
% the following is up for debate as far as what the best method
% of choosing bin counts and widths. these seem to work *good enough*
% in my testing
% bin width based on Sturges
% http://www.jstor.org/pss/2965501
get_bin_width(StdDev, Count) ->
%io:format("stddev: ~p, count: ~p~n", [StdDev, Count]),
case round((3.5 * StdDev) / math:pow(Count, 0.3333333)) of
0 ->
1;
Else ->
Else
end.
% based on the simple ceilng function at
% http://en.wikipedia.org/wiki/Histograms#Number_of_bins_and_width
% with a modification to attempt to get on bin beyond the max value
get_bin_count(Min, Max, Width) ->
%io:format("min: ~p, max: ~p, width ~p~n", [Min, Max, Width]),
round((Max - Min) / Width) + 1.
%% taken from http://crunchyd.com/scutil/
%% All code here is MIT Licensed
%% http://scutil.com/license.html
% seems to match the value returned by the 'cor' (method="kendal") R function
% http://en.wikipedia.org/wiki/Kendall_tau_rank_correlation_coefficient
kendall_correlation(List1, List2) when is_list(List1), is_list(List2) ->
{RA,_} = lists:unzip(tied_ordered_ranking(List1)),
{RB,_} = lists:unzip(tied_ordered_ranking(List2)),
Ordering = lists:keysort(1, lists:zip(RA,RB)),
{_,OrdB} = lists:unzip(Ordering),
N = length(List1),
P = lists:sum(kendall_right_of(OrdB, [])),
-(( (4*P) / (N * (N - 1))) - 1).
simple_ranking(List) when is_list(List) ->
lists:zip(lists:seq(1,length(List)),lists:reverse(lists:sort(List))).
tied_ranking(List) ->
tied_rank_worker(simple_ranking(List), [], no_prev_value).
tied_ordered_ranking(List) when is_list(List) ->
tied_ordered_ranking(List, tied_ranking(List), []).
tied_ordered_ranking([], [], Work) ->
lists:reverse(Work);
tied_ordered_ranking([Front|Rem], Ranks, Work) ->
{value,Item} = lists:keysearch(Front,2,Ranks),
{IRank,Front} = Item,
tied_ordered_ranking(Rem, Ranks--[Item], [{IRank,Front}]++Work).
kendall_right_of([], Work) ->
lists:reverse(Work);
kendall_right_of([F|R], Work) ->
kendall_right_of(R, [kendall_right_of_item(F,R)]++Work).
kendall_right_of_item(B, Rem) ->
length([R || R <- Rem, R < B]).
tied_add_prev(Work, {FoundAt, NewValue}) ->
lists:duplicate( length(FoundAt), {lists:sum(FoundAt)/length(FoundAt), NewValue} ) ++ Work.
tied_rank_worker([], Work, PrevValue) ->
lists:reverse(tied_add_prev(Work, PrevValue));
tied_rank_worker([Item|Remainder], Work, PrevValue) ->
case PrevValue of
no_prev_value ->
{BaseRank,BaseVal} = Item,
tied_rank_worker(Remainder, Work, {[BaseRank],BaseVal});
{FoundAt,OldVal} ->
case Item of
{Id,OldVal} ->
tied_rank_worker(Remainder, Work, {[Id]++FoundAt,OldVal});
{Id,NewVal} ->
tied_rank_worker(Remainder, tied_add_prev(Work, PrevValue), {[Id],NewVal})
end
end. | deps/bear/src/bear.erl | 0.629775 | 0.434821 | bear.erl | starcoder |
%%%-------------------------------------------------------------------
%%% @doc
%%% A lazily evaluated lists module. This module provides an iterator
%%% type which is an opaque record wrapped around a list continuation.
%%% These iterators are then used to provide a version of the stdlib
%%% `lists' functions which only evaluate elements of the iterator
%%% when demanded.
%%%
%%% Several simple iterator constructors are provided as well as a
%%% general purpose `unfold/2' constructor. Conversion to built in
%%% types for lists and maps as the `from_list/1', `to_list/1',
%%% `from_map/1' and `to_map/1' functions.
%%%
%%% Iterators are evaluated using the `next/1' function to evaluate
%%% the next element of the iterator. The output of the next function
%%% is a lazy list: either an improper list of the element and the
%%% continuation or an empty list. Many additional iterator
%%% transformation and evaluation functions are also present.
%%%
%%% In general, iterators are not expected to be pure functions.
%%% Iterator transformations and evaluations should all evaluate each
%%% element exactly once per output iterator (though not all elements
%%% may be returned, depending on the function). This implies that
%%% impure iterators should not be used with functions which return
%%% multiple iterators if all iterators are to be evaluated.
%%%
%%% Many of the functions here are unsafe to use with infinite
%%% iterators and will either fail to return on the initial call or on
%%% the first attempt to evaluate an element of the iterator. Read the
%%% documentation carefully when working with such iterators.
%%%
%%% The interface for this module attempts to follow the `lists'
%%% behaviour as closely as possible. Guidelines for how past and
%%% future translation is performed is as follows:
%%%
%%% <ul>
%%% <li>Any input lists are changed to expect iterators.</li>
%%% <li>Any output lists are changed to be iterators.</li>
%%% <li>Elements of input iterators should be evaluated exactly
%%% once per output iterator.</li>
%%% <li>Any numeric counts for repetition are changed to allow
%%% 'infinity' as values and to be able to return infinite
%%% iterators.</li>
%%% <li>On error, the same exception should be raised, though it may
%%% not be raised until the triggering element of an iterator is
%%% evaluated.</li>
%%% <li>Iteration evaluation behaviour is documented.</li>
%%% </ul>
%%%
%%% As few functions outside of `lists' have been implemented as
%%% possible in order to have the best chance of keeping the namespace
%%% clean for future additions to the `lists' module. New
%%% functionality is instead implemented in the `llists_utils' module.
%%% @end
%%%-------------------------------------------------------------------
-module(llists).
-record(iterator, {next}).
-type iterator() :: iterator(any()).
-opaque iterator(Over) :: #iterator{next :: fun(() -> lazy_list(Over))}.
-type tuple_iterator() :: iterator(tuple()).
-type lazy_list(Over) :: nonempty_improper_list(Over, iterator(Over)) | [].
-type accumulator() :: any().
-type combine(A, B, Out) :: fun((A, B) -> Out).
-type combine3(A, B, C, Out) :: fun((A, B, C) -> Out).
-type compare(A, B) :: fun((A, B) -> boolean()).
-type filtermap(A, B) :: fun((A) -> boolean() | {true, B}).
-type fold(Elem, AccIn, AccOut) :: fun((Elem, AccIn) -> AccOut).
-type map(A, B) :: fun((A) -> B).
-type mapfold(A, AccIn, B, AccOut) :: fun((A, AccIn) -> {B, AccOut}).
-type predicate(Elem) :: fun((Elem) -> boolean()).
-type unfold(Elem, AccIn, AccOut) :: fun((AccIn) -> {Elem, AccOut} | none).
%% API
% Iterator construction.
-export([
is_iterator/1,
from_list/1,
from_map/1,
unfold/2,
duplicate/2,
seq/2,
seq/3,
% Iterator utilities.
next/1,
hd/1,
tl/1,
append/1,
append/2,
delete/2,
droplast/1,
dropwhile/2,
filter/2,
filtermap/2,
flatlength/1,
flatmap/2,
flatten/1,
flatten/2,
join/2,
keydelete/3,
keymap/3,
keymerge/3,
keyreplace/4,
keysort/2,
keystore/4,
keytake/3,
map/2,
merge/1,
merge/2,
merge/3,
merge3/3,
nthtail/2,
reverse/1,
reverse/2,
sublist/2,
sublist/3,
takewhile/2,
partition/2,
sort/1,
sort/2,
split/2,
splitwith/2,
subtract/2,
ukeymerge/3,
ukeysort/2,
umerge/1,
umerge/2,
umerge/3,
umerge3/3,
unzip/1,
unzip3/1,
usort/1,
usort/2,
zip/2,
zip3/3,
zipwith/3,
zipwith3/4,
% Iterator evaluation.
to_list/1,
to_map/1,
length/1,
all/2,
any/2,
concat/1,
foldl/3,
foldr/3,
foreach/2,
keyfind/3,
keymember/3,
keysearch/3,
last/1,
mapfoldl/3,
mapfoldr/3,
max/1,
member/2,
min/1,
nth/2,
prefix/2,
search/2,
suffix/2,
sum/1
]).
-export_type([
iterator/1,
lazy_list/1,
combine/3,
combine3/4,
compare/2,
filtermap/2,
fold/3,
map/2,
mapfold/4,
predicate/1,
unfold/3
]).
-compile(
{no_auto_import, [
hd/1,
length/1,
max/1,
min/1,
tl/1
]}
).
%%%===================================================================
%%% API - Iterator Construction
%%%===================================================================
%% @doc
%% Tests if the given `Candidate' is an iterator, returns `true' if it
%% and `false' otherwise.
%% @end
-spec is_iterator(Candidate) -> boolean() when Candidate :: any().
is_iterator(#iterator{}) ->
true;
is_iterator(_) ->
false.
%% @doc
%% Construct a new iterator from an existing list. Each element of the
%% list will be returned in order by the returned iterator.
%% @end
-spec from_list(List) -> Iterator when
List :: list(Elem),
Iterator :: iterator(Elem).
from_list(List) when is_list(List) ->
unfold(
fun
([]) -> none;
([Head | Tail]) -> {Head, Tail}
end,
List
).
%% @doc
%% Construct a new iterator from an existing map. Each `{Key, Value}'
%% tuple of the map will be returned in an arbitrary order by the
%% returned iterator.
%% @end
-spec from_map(Map) -> Iterator when
Map :: maps:map(Key, Value),
Iterator :: iterator({Key, Value}).
from_map(Map) when is_map(Map) ->
unfold(
fun(MapIterator) ->
case maps:next(MapIterator) of
none ->
none;
{Key, Value, NextIterator} ->
{{Key, Value}, NextIterator}
end
end,
maps:iterator(Map)
).
%% @doc
%% Construct a new iterator from a `Fun(AccIn)' function and an
%% initial accumulator value `Acc0'. When an element is demanded of
%% the iterator, `Fun' will be invoked with the current accumulator to
%% produce a value. `Fun' is expected to return a tuple of
%% `{Elem, AccOut}': the element to produce and the new accumulator
%% value. If iteration is complete, `Fun' should return `none'.
%% @end
-spec unfold(Fun, Acc0) -> Iterator when
Fun :: unfold(Elem, AccIn :: Acc0 | AccOut, AccOut),
Acc0 :: accumulator(),
Iterator :: iterator(Elem).
unfold(Fun, Acc) when is_function(Fun, 1) ->
new(fun() ->
case Fun(Acc) of
{Elem, NewAcc} ->
[Elem | unfold(Fun, NewAcc)];
none ->
[]
end
end).
%% @doc
%% Returns an iterator containing `N' copies of term `Elem'. If `N' is
%% `infinity' iterator will return infinite copies of `Elem'.
%% @end
-spec duplicate(N, Elem) -> Iterator when
N :: infinity | non_neg_integer(),
Elem :: any(),
Iterator :: iterator(Elem).
duplicate(infinity, Elem) ->
new(fun() ->
[Elem | duplicate(infinity, Elem)]
end);
duplicate(N, Elem) when is_integer(N), N >= 0 ->
unfold(
fun
(0) -> none;
(Count) -> {Elem, Count - 1}
end,
N
).
%% @see seq/3
-spec seq(From, To) -> Iterator when
From :: integer(),
To :: integer(),
Iterator :: iterator(integer()).
seq(From, To) ->
seq(From, To, 1).
%% @doc
%% Returns an iterator over a sequence of integers that starts with
%% `From' and contains the successive results of adding `Incr' to the
%% previous element, until `To' is reached or passed (in the latter
%% case, `To' is not an element of the sequence). `Incr' defaults to
%% 1.
%%
%% Failures:
%% <ul>
%% <li>If `To < From - Incr' and `Incr > 0'.</li>
%% <li>If `To > From - Incr' and `Incr < 0'.</li>
%% <li>If `Incr =:= 0' and `From =/= To'.</li>
%% </ul>
%% The following equalities hold for all sequences:
%% ```
%% length(lists:seq(From, To)) =:= To - From + 1
%% length(lists:seq(From, To, Incr)) =:= (To - From + Incr) div Incr
%% '''
%% @end
-spec seq(From, To, Incr) -> Iterator when
From :: integer(),
To :: infinity | '-infinity' | integer(),
Incr :: integer(),
Iterator :: iterator(integer()).
seq(From, infinity, Incr) when is_integer(From), is_integer(Incr), Incr > 0 ->
unfold(fun(Acc) -> {Acc, Acc + Incr} end, From);
seq(From, '-infinity', Incr) when is_integer(From), is_integer(Incr), Incr < 0 ->
unfold(fun(Acc) -> {Acc, Acc + Incr} end, From);
seq(From, To, Incr) when
is_integer(From), is_integer(To), is_integer(Incr), Incr > 0, From - Incr =< To
->
unfold(
fun
(Acc) when Acc > To -> none;
(Acc) -> {Acc, Acc + Incr}
end,
From
);
seq(From, To, Incr) when
is_integer(From), is_integer(To), is_integer(Incr), Incr < 0, From - Incr >= To
->
unfold(
fun
(Acc) when Acc < To -> none;
(Acc) -> {Acc, Acc + Incr}
end,
From
);
seq(From, From, 0) when is_integer(From) ->
from_list([From]).
%%%===================================================================
%%% API - Iterator Utilities
%%%===================================================================
%% @doc
%% Demand an element from `Iterator'. Will return either an improper
%% list containing the next element and an iterator as a continuation,
%% or an empty list if iteration is complete.
%%
%% Examples:
%%
%% ```
%% > llists:next(llists:seq(1, 5)).
%% [1|{iterator,#Fun<llists.1.134155648>}]
%% > llists:next(llists:from_list([])).
%% []
%% '''
%% @end
-spec next(Iterator) -> LazyList when
Iterator :: iterator(Elem),
LazyList :: lazy_list(Elem).
next(#iterator{next = Next}) ->
Next().
%% @doc
%% Returns the head of `Iterator', that is, the first element, for
%% example:
%%
%% ```
%% > llists:hd(llists:seq(1, 5)).
%% 1
%% '''
%%
%% Failure: `badarg' if `Iterator' is empty.
%% @end
-spec hd(Iterator) -> Elem when Iterator :: iterator(Elem).
hd(#iterator{} = Iterator) ->
erlang:hd(next(Iterator)).
%% @doc
%% Returns the tail of `Iterator1', that is, the iterator minus the
%% first element, for example:
%%
%% ```
%% > llists:to_list(
%% > llists:tl(
%% > llists:from_list([geesties, guilies, beasties]))).
%% [guilies, beasties]
%% '''
%%
%% Failure: `badarg' if `Iterator1' is empty.
%% @end
-spec tl(Iterator1) -> Iterator2 when
Iterator1 :: iterator(Elem),
Iterator2 :: iterator(Elem).
tl(#iterator{} = Iterator) ->
erlang:tl(next(Iterator)).
%% @doc
%% Returns an iterator in which all the subiterators of
%% `IteratorOfIterators' have been appended.
%% @end
-spec append(IteratorOfIterators) -> Iterator when
IteratorOfIterators :: iterator(iterator()),
Iterator :: iterator().
append(#iterator{} = Iterator) ->
unfold(
fun
Next({[], []}) ->
none;
Next({#iterator{} = HeadIterator, IofI}) ->
Next({next(HeadIterator), IofI});
Next({[Head | HeadIterator], IofI}) ->
{Head, {HeadIterator, IofI}};
Next({[], #iterator{} = IofI}) ->
Next({[], next(IofI)});
Next({[], [Head | IofI]}) ->
Next({Head, IofI})
end,
{[], Iterator}
).
%% @doc
%% Returns a new iterator `Iterator3', which is made from the elements
%% of `Iterator1' followed by the elements of `Iterator2'.
%% @end
-spec append(Iterator1, Iterator2) -> Iterator3 when
Iterator1 :: iterator(Elem1),
Iterator2 :: iterator(Elem2),
Iterator3 :: iterator(Elem1 | Elem2).
append(#iterator{} = Iterator1, #iterator{} = Iterator2) ->
append(from_list([Iterator1, Iterator2])).
%% @doc
%% Returns a copy of `Iterator1' where the first element matching
%% `Elem' is deleted, if there is such an element.
%% @end
-spec delete(Elem1, Iterator1) -> Iterator2 when
Elem1 :: any(),
Iterator1 :: iterator(Elem2),
Iterator2 :: iterator(Elem2),
Elem2 :: any().
delete(Elem, #iterator{} = Iterator) ->
new(fun() ->
case next(Iterator) of
[Elem | #iterator{} = NextIterator] ->
next(NextIterator);
[Other | #iterator{} = NextIterator] ->
[Other | delete(Elem, NextIterator)];
[] ->
[]
end
end).
%% @doc
%% Drops the last element of a `Iterator1'. The `Iterator1' is to be
%% non-empty, otherwise the function crashes with a `function_clause'.
%%
%% Evaluates one element further in the iterator than the current
%% value.
%% @end
-spec droplast(Iterator1) -> Iterator2 when
Iterator1 :: iterator(Elem),
Iterator2 :: iterator(Elem).
droplast(#iterator{} = Iterator) ->
% Notice the missing case for [] here, which forces the expected
% error on an empty list.
unfold(
fun
([Elem | #iterator{} = FoldIterator]) ->
case next(FoldIterator) of
[] ->
none;
[NextElem | #iterator{} = NextIterator] ->
{Elem, [NextElem | NextIterator]}
end;
([Elem | Tail]) ->
{Elem, Tail}
end,
next(Iterator)
).
%% @doc
%% Drops elements `Elem' from `Iterator1' while `Pred(Elem)' returns
%% true and returns the remaining iterator.
%% @end
-spec dropwhile(Pred, Iterator1) -> Iterator2 when
Pred :: predicate(Elem),
Iterator1 :: iterator(Elem),
Iterator2 :: iterator(Elem).
dropwhile(Pred, #iterator{} = Iterator) when is_function(Pred, 1) ->
new(fun() ->
case next(Iterator) of
[Elem | #iterator{} = NextIterator] ->
case Pred(Elem) of
true ->
next(dropwhile(Pred, NextIterator));
false ->
[Elem | NextIterator]
end;
[] ->
[]
end
end).
%% @doc
%% `Filtered' is an iterator of all elements `Elem' in `Iterator1' for
%% which `Pred(Elem)' returns `true'.
%% @end
-spec filter(Pred, Iterator1) -> Iterator2 when
Pred :: predicate(Elem),
Iterator1 :: iterator(Elem),
Iterator2 :: iterator(Elem).
filter(Pred, #iterator{} = Iterator) when is_function(Pred, 1) ->
new(fun() ->
case next(Iterator) of
[Elem | #iterator{} = NextIterator] ->
case Pred(Elem) of
true ->
[Elem | filter(Pred, NextIterator)];
false ->
next(filter(Pred, NextIterator))
end;
[] ->
[]
end
end).
%% @doc
%% Calls `Fun(Elem)' on successive elements `Elem' of `Iterator1'.
%% `Fun/1' must return either a Boolean or a tuple `{true, Value}'.
%% The function returns the iterator of elements for which `Fun'
%% returns a new value, where a value of `true' is synonymous with
%% `{true, Elem}'.
%%
%% That is, filtermap behaves as if it had been defined as follows,
%% except that the iterator is not fully evaluated before elements are
%% returned:
%%
%% ```
%% filtermap(Fun, Iterator) ->
%% llists:foldr(fun(Elem, Acc) ->
%% case Fun(Elem) of
%% false -> Acc;
%% true -> [Elem|Acc];
%% {true,Value} -> [Value|Acc]
%% end
%% end, [], Iterator).
%% '''
%%
%% Example:
%% ```
%% > llists:to_list(
%% > llists:filtermap(
%% > fun(X) -> case X rem 2 of 0 -> {true, X div 2}; _ -> false end end,
%% > llists:seq(1, 5))).
%% [1,2]
%% '''
%% @end
-spec filtermap(Fun, Iterator1) -> Iterator2 when
Fun :: filtermap(Elem, Value),
Iterator1 :: iterator(Elem),
Iterator2 :: iterator(Elem | Value).
filtermap(Fun, #iterator{} = Iterator) when is_function(Fun, 1) ->
new(fun() ->
case next(Iterator) of
[Elem | #iterator{} = NextIterator] ->
case Fun(Elem) of
false ->
next(filtermap(Fun, NextIterator));
true ->
[Elem | filtermap(Fun, NextIterator)];
{true, Value} ->
[Value | filtermap(Fun, NextIterator)]
end;
[] ->
[]
end
end).
%% @doc
%% Equivalent to `length(flatten(DeepIterator))'.
%% @end
-spec flatlength(DeepIterator) -> Length when
DeepIterator :: iterator(any() | iterator()),
Length :: non_neg_integer().
flatlength(#iterator{} = DeepIterator) ->
length(flatten(DeepIterator)).
%% @doc
%% Takes a function from `A's to iterators of `B's, and an iterator of
%% `A's (`Iterator1') and produces an iterator of `B's (`Iterator2')
%% by applying the function to every element in `Iterator1' and
%% appending the resulting iterators.
%%
%% That is, flatmap behaves as if it had been defined as follows:
%%
%% ```
%% llists:flatmap(Fun, Iterator) ->
%% llists:append(llists:map(Fun, Iterator)).
%% '''
%%
%% Example:
%% ```
%% > llists:to_list(
%% > llists:flatmap(
%% > fun(X)->llists:from_list([X,X]) end,
%% > llists:from_list([a,b,c]))).
%% [a,a,b,b,c,c]
%% '''
-spec flatmap(Fun, Iterator1) -> Iterator2 when
Fun :: map(A, iterator(B)),
Iterator1 :: iterator(A),
Iterator2 :: iterator(B).
flatmap(Fun, #iterator{} = Iterator) when is_function(Fun, 1) ->
llists:append(llists:map(Fun, Iterator)).
%% @doc
%% Returns a flattened version of `DeepIterator'.
%% @end
-spec flatten(DeepIterator) -> Iterator when
DeepIterator :: iterator(any() | iterator()),
Iterator :: iterator().
flatten(#iterator{} = DeepIterator) ->
unfold(
fun
Next([]) ->
none;
Next([#iterator{} = Iterator | Tail]) ->
Next([next(Iterator) | Tail]);
Next([[] | Tail]) ->
Next(Tail);
Next([[#iterator{} = NestedIterator | #iterator{} = Iterator] | Tail]) ->
Next([NestedIterator, Iterator | Tail]);
Next([[Elem | #iterator{} = Iterator] | Tail]) ->
{Elem, [Iterator | Tail]}
end,
[DeepIterator]
).
%% @doc
%% Returns a flattened version of `DeepIterator' with tail `Tail'
%% appended.
%% @end
-spec flatten(DeepIterator, TailIterator) -> Iterator when
DeepIterator :: iterator(any() | iterator()),
TailIterator :: iterator(),
Iterator :: iterator().
flatten(#iterator{} = DeepIterator, #iterator{} = Tail) ->
append(flatten(DeepIterator), Tail).
%% @doc
%% Inserts `Sep' between each element in `Iterator1'. Has no effect on
%% an empty iterator or on a singleton iterator. For example:
%%
%% ```
%% > llists:to_list(llists:join(x, llists:from_list([a,b,c]))).
%% [a,x,b,x,c]
%% > llists:to_list(lists:join(x, llists:from_list([a]))).
%% [a]
%% > llists:to_list(lists:join(x, llists:from_list([]))).
%% []
%% '''
%%
%% Evaluates one element further in the iterator than the current
%% value.
%% @end
-spec join(Sep, Iterator1) -> Iterator2 when
Sep :: any(),
Iterator1 :: iterator(Elem),
Iterator2 :: iterator(Sep | Elem).
join(Sep, #iterator{} = Iterator) ->
unfold(
fun
([]) ->
none;
([Elem | #iterator{} = FoldIterator]) ->
case next(FoldIterator) of
[] ->
{Elem, []};
[NextElem | #iterator{} = NextIterator] ->
{Elem, [Sep, NextElem | NextIterator]}
end;
([Elem | Tail]) ->
{Elem, Tail}
end,
next(Iterator)
).
%% @doc
%% Returns a copy of `TupleIterator1' where the first occurrence of a tuple
%% whose `N'th element compares equal to `Key' is deleted, if there is
%% such a tuple.
%% @end
-spec keydelete(Key, N, TupleIterator1) -> TupleIterator2 when
Key :: any(),
N :: pos_integer(),
TupleIterator1 :: iterator(Elem),
TupleIterator2 :: iterator(Elem).
keydelete(Key, N, #iterator{} = Iterator) when N > 0 ->
new(fun() ->
case next(Iterator) of
[Elem | #iterator{} = NextIterator] when Key == element(N, Elem) ->
next(NextIterator);
[Elem | #iterator{} = NextIterator] ->
[Elem | keydelete(Key, N, NextIterator)];
[] ->
[]
end
end).
%% @doc
%% Returns an iterator of tuples where, for each tuple in
%% `TupleIterator1', the `N'th element `Term1' of the tuple has been
%% replaced with the result of calling `Fun(Term1)'.
%%
%% Examples:
%% ```
%% > Fun = fun(Atom) -> atom_to_list(Atom) end.
%% #Fun<erl_eval.6.10732646>
%% 2> llists:to_list(
%% 2> llists:keymap(
%% 2> Fun,
%% 2> 2,
%% 2> llists:from_list([{name,jane,22},{name,lizzie,20},{name,lydia,15}]))).
%% [{name,"jane",22},{name,"lizzie",20},{name,"lydia",15}]
%% '''
%% @end
-spec keymap(Fun, N, TupleIterator1) -> TupleIterator2 when
Fun :: map(Term1 :: any(), Term2 :: any()),
N :: pos_integer,
TupleIterator1 :: tuple_iterator(),
TupleIterator2 :: tuple_iterator().
keymap(Fun, N, #iterator{} = Iterator) when is_function(Fun, 1), N > 0 ->
map(
fun(Tuple) ->
Modified = Fun(element(N, Tuple)),
setelement(N, Tuple, Modified)
end,
Iterator
).
%% @doc
%% Returns the sorted iterator formed by merging `TupleIterator1' and
%% `TupleIterator2'. The merge is performed on the `N'th element of
%% each tuple. Both `TupleIterator1' and `TupleIterator2' must be
%% key-sorted before evaluating this function. When two tuples compare
%% equal, the tuple from `TupleIterator1' is picked before the tuple
%% from `TupleIterator2'.
%%
%% The first element of each iterator will be evaluated.
%% @end
-spec keymerge(N, TupleIterator1, TupleIterator2) -> TupleIterator3 when
N :: pos_integer(),
TupleIterator1 :: iterator(Elem1),
TupleIterator2 :: iterator(Elem2),
TupleIterator3 :: iterator(Elem1 | Elem2),
Elem1 :: tuple(),
Elem2 :: tuple().
keymerge(N, #iterator{} = Iterator1, #iterator{} = Iterator2) when N > 0 ->
Compare = fun(A, B) -> element(N, A) =< element(N, B) end,
fmerge(Compare, [Iterator1, Iterator2]).
%% @doc
%% Returns a copy of `TupleIterator1' where the first occurrence of a T
%% tuple whose `N'th element compares equal to `Key' is replaced with
%% `NewTuple', if there is such a tuple `T'.
%% @end
-spec keyreplace(Key, N, TupleIterator1, NewTuple) -> TupleIterator2 when
Key :: any(),
N :: pos_integer(),
TupleIterator1 :: iterator(Elem),
NewTuple :: tuple(),
TupleIterator2 :: iterator(Elem | NewTuple).
keyreplace(Key, N, #iterator{} = Iterator, NewTuple) when N > 0, is_tuple(NewTuple) ->
new(fun() ->
case next(Iterator) of
[Elem | #iterator{} = NextIterator] when Key == element(N, Elem) ->
[NewTuple | NextIterator];
[Elem | #iterator{} = NextIterator] ->
[Elem | keyreplace(Key, N, NextIterator, NewTuple)];
[] ->
[]
end
end).
%% @doc
%% Returns an iterator containing the sorted elements of iterator
%% `TupleIterator1'. Sorting is performed on the `N'th element of the
%% tuples. The sort is stable.
%%
%% The iterator is fully evaluated, infinite iterators will never
%% return.
%% @end
-spec keysort(N, TupleIterator1) -> TupleIterator2 when
N :: pos_integer(),
TupleIterator1 :: iterator(Elem),
TupleIterator2 :: iterator(Elem),
Elem :: tuple().
keysort(N, #iterator{} = Iterator) when N > 0 ->
list_wrap(fun(I) -> lists:keysort(N, I) end, Iterator).
%% @doc
%% Returns a copy of `TupleIterator1' where the first occurrence of a
%% tuple `T' whose `N'th element compares equal to `Key' is replaced
%% with `NewTuple', if there is such a tuple `T'. If there is no such
%% tuple `T', a copy of `TupleIterator1' where `NewTuple' has been
%% appended to the end is returned.
%% @end
-spec keystore(Key, N, TupleIterator1, NewTuple) -> TupleIterator2 when
Key :: any(),
N :: pos_integer(),
TupleIterator1 :: iterator(Elem),
NewTuple :: tuple(),
TupleIterator2 :: iterator(Elem | NewTuple).
keystore(Key, N, #iterator{} = Iterator, NewTuple) when N > 0, is_tuple(NewTuple) ->
new(fun() ->
case next(Iterator) of
[Elem | #iterator{} = NextIterator] when Key == element(N, Elem) ->
[NewTuple | NextIterator];
[Elem | #iterator{} = NextIterator] ->
[Elem | keystore(Key, N, NextIterator, NewTuple)];
[] ->
[NewTuple | from_list([])]
end
end).
%% @doc
%% Searches the iterator of tuples `TupleIterator1' for a tuple whose
%% `N'th element compares equal to `Key'. Returns
%% `{value, Tuple, TupleIterator2}' if such a tuple is found,
%% otherwise `false'. `TupleIterator2' is a copy of `TupleIterator1'
%% where the first occurrence of `Tuple' has been removed.
%%
%% Evaluates `TupleIterator1' until a match is found. Iterating over
%% `TupleIterator2' will evaluate the same elements again. If no match
%% is found, infinite iterators will never return.
%% @end
-spec keytake(Key, N, TupleIterator1) -> {value, Tuple, TupleIterator2} when
Key :: any(),
N :: pos_integer(),
TupleIterator1 :: iterator(Elem),
Tuple :: tuple(),
TupleIterator2 :: iterator(Elem).
keytake(Key, N, #iterator{} = Iterator) when N > 0 ->
case keysearch(Key, N, Iterator) of
{value, Tuple} ->
{value, Tuple, keydelete(Key, N, Iterator)};
false ->
false
end.
%% @doc
%% Takes a function `Fun' from `A's to `B's, and an `Iterator1' of
%% `A's and produces an `Iterator2' of `B's by applying the function
%% to every element in the iterator.
%% @end
-spec map(Fun, Iterator1) -> Iterator2 when
Fun :: map(A, B),
Iterator1 :: iterator(A),
Iterator2 :: iterator(B).
map(Fun, #iterator{} = Iterator) when is_function(Fun, 1) ->
new(fun() ->
case next(Iterator) of
[Elem | #iterator{} = NextIterator] ->
[Fun(Elem) | map(Fun, NextIterator)];
[] ->
[]
end
end).
%% @doc
%% Returns the sorted iterator formed by merging all the subiterators
%% of `IteratorOfIterators'. All subiterators must be sorted before
%% evaluating this function. When two elements compare equal, the
%% element from the subiterator with the lowest position in
%% `IteratorOfIterators' is picked before the other element.
%%
%% The first element of each subiterator will be evaluated.
%% @end
-spec merge(IteratorOfIterators) -> Iterator when
IteratorOfIterators :: iterator(iterator()),
Iterator :: iterator().
merge(#iterator{} = IteratorOfIterators) ->
fmerge(to_list(IteratorOfIterators)).
%% @doc
%% Returns the sorted iterator formed by merging `Iterator1' and
%% `Iterator2'. Both `Iterator1' and `Iterator2' must be sorted before
%% evaluating this function. When two elements compare equal, the
%% element from `Iterator1' is picked before the element from
%% `Iterator2'.
%%
%% The first element of each iterator will be evaluated.
%% @end
-spec merge(Iterator1, Iterator2) -> Iterator3 when
Iterator1 :: iterator(A),
Iterator2 :: iterator(B),
Iterator3 :: iterator(A | B).
merge(#iterator{} = Iterator1, #iterator{} = Iterator2) ->
fmerge([Iterator1, Iterator2]).
%% @doc
%% Returns the sorted iterator formed by merging `Iterator1' and
%% `Iterator2'. Both `Iterator1' and `Iterator2' must be sorted
%% according to the ordering function `Fun' before evaluating this
%% function. `Fun(A, B)' is to return `true' if `A' compares less than
%% or equal to `B' in the ordering, otherwise `false'. When two
%% elements compare equal, the element from `Iterator1' is picked
%% before the element from `Iterator2'.
%%
%% The first element of each iterator will be evaluated.
%% @end
-spec merge(Fun, Iterator1, Iterator2) -> Iterator3 when
Fun :: compare(A, B),
Iterator1 :: iterator(A),
Iterator2 :: iterator(B),
Iterator3 :: iterator(A | B).
merge(Fun, #iterator{} = Iterator1, #iterator{} = Iterator2) ->
fmerge(Fun, [Iterator1, Iterator2]).
%% @doc
%% Returns the sorted iterator formed by merging `Iterator1',
%% `Iterator2', and `Iterator3'. All of `Iterator1', `Iterator2', and
%% `Iterator3' must be sorted before evaluating this function. When
%% two elements compare equal, the element from `Iterator1', if there
%% is such an element, is picked before the other element, otherwise
%% the element from `Iterator2' is picked before the element from
%% `Iterator3'.
%%
%% The first element of each iterator will be evaluated.
%% @end
-spec merge3(Iterator1, Iterator2, Iterator3) -> Iterator4 when
Iterator1 :: iterator(A),
Iterator2 :: iterator(B),
Iterator3 :: iterator(C),
Iterator4 :: iterator(A | B | C).
merge3(
#iterator{} = Iterator1,
#iterator{} = Iterator2,
#iterator{} = Iterator3
) ->
fmerge([Iterator1, Iterator2, Iterator3]).
%% @doc
%% Returns the `N'th tail of `Iterator1', that is, the subiterator of
%% `Iterator1' starting at `N'+1 and continuing up to the end of the
%% iterator.
%% @end
-spec nthtail(N, Iterator1) -> Iterator2 when
N :: non_neg_integer(),
Iterator1 :: iterator(Elem),
Iterator2 :: iterator(Elem).
nthtail(N, #iterator{} = Iterator) when is_integer(N), N >= 0 ->
nthtail_loop(N, Iterator).
%% @doc
%% Returns an iterator with the elements in `Iterator1' in reverse
%% order.
%%
%% The iterator will be fully evaluated, infinite iterators will never
%% return.
%% @end
-spec reverse(Iterator1) -> Iterator2 when
Iterator1 :: iterator(A),
Iterator2 :: iterator(A).
reverse(#iterator{} = Iterator) ->
list_wrap(fun lists:reverse/1, Iterator).
%% @doc
%% Returns a list with the elements in `Iterator1' in reverse order,
%% with tail `TailIterator' appended.
%%
%% Example:
%% ```
%% > lists:reverse([1, 2, 3, 4], [a, b, c]).
%% [4,3,2,1,a,b,c]
%% '''
%%
%% The iterator `Iterator1' will be fully evaluated, infinite
%% iterators will never return.
%% @end
-spec reverse(Iterator1, TailIterator) -> Iterator2 when
Iterator1 :: iterator(A),
TailIterator :: iterator(B),
Iterator2 :: iterator(A | B).
reverse(#iterator{} = Iterator, #iterator{} = Tail) ->
append(reverse(Iterator), Tail).
%% @see sublist/3
-spec sublist(Iterator1, Len) -> Iterator2 when
Iterator1 :: iterator(Elem),
Len :: non_neg_integer(),
Iterator2 :: iterator(Elem).
sublist(#iterator{} = Iterator, Len) when is_integer(Len), Len >= 0 ->
new(fun() ->
case {Len, next(Iterator)} of
{0, _} ->
[];
{_, []} ->
[];
{Len, [Elem | #iterator{} = NextIterator]} ->
[Elem | sublist(NextIterator, Len - 1)]
end
end).
%% @doc
%% Returns the portion of `Iterator1' starting at `Start' and with
%% (maximum) `Len' elements. `Start' defaults to 1. It is not an error
%% for `Start+Len' to exceed the length of the iterator, in that case
%% the whole iterator is returned.
%% @end
-spec sublist(Iterator1, Start, Len) -> Iterator2 when
Iterator1 :: iterator(Elem),
Start :: pos_integer(),
Len :: non_neg_integer(),
Iterator2 :: iterator(Elem).
sublist(#iterator{} = Iterator, Start, Len) when
is_integer(Start), is_integer(Len), Start > 0, Len >= 0
->
sublist(nthtail(Start - 1, Iterator), Len).
%% @doc
%% Takes elements `Elem' from `Iterator1' while `Pred(Elem)' returns
%% true, that is, the function returns the longest prefix of the
%% iterator for which all elements satisfy the predicate.
%% @end
-spec takewhile(Pred, Iterator1) -> Iterator2 when
Pred :: predicate(Elem),
Iterator1 :: iterator(Elem),
Iterator2 :: iterator(Elem).
takewhile(Pred, #iterator{} = Iterator) when is_function(Pred, 1) ->
new(fun() ->
case next(Iterator) of
[Elem | #iterator{} = NextIterator] ->
case Pred(Elem) of
true ->
[Elem | takewhile(Pred, NextIterator)];
false ->
[]
end;
[] ->
[]
end
end).
%% @doc
%% Partitions `Iterator1' into two iterators, where the first iterator
%% contains all elements for which `Pred(Elem)' returns `true', and
%% the second iterator contains all elements for which `Pred(Elem)'
%% returns `false'.
%%
%% Examples:
%% ```
%% > {Satisfying, NotSatisfying} = llists:partition(
%% > fun(A) -> A rem 2 == 1 end,
%% > llists:seq(1, 7)),
%% > {llists:to_list(Satisfying), llists:to_list(NotSatisfying)}.
%% {[1,3,5,7],[2,4,6]}
%% > {Satisfying, NotSatisfying} = llists:partition(
%% > fun(A) -> is_atom(A) end,
%% > llists:from_list([a,b,1,c,d,2,3,4,e])),
%% > {llists:to_list(Satisfying), llists:to_list(NotSatisfying)}.
%% {[a,b,c,d,e],[1,2,3,4]}
%% '''
%%
%% For a different way to partition a list, see splitwith/2.
%%
%% Each result iterator will evaluate elements of the original
%% iterator independently. If both are evaluated, this will result in
%% all elements being evaluated twice.
%% @end
%% @see splitwith/2
-spec partition(Pred, Iterator1) -> {Satisfying, NotSatisfying} when
Pred :: predicate(Elem),
Iterator1 :: iterator(Elem),
Satisfying :: iterator(Elem),
NotSatisfying :: iterator(Elem).
partition(Pred, #iterator{} = Iterator) ->
Satisfying = filter(Pred, Iterator),
NotSatisfying = filter(fun(Elem) -> not Pred(Elem) end, Iterator),
{Satisfying, NotSatisfying}.
%% @doc
%% Returns an iterator containing the sorted elements of `Iterator1'.
%%
%% The iterator is fully evaluated, infinite iterators will never
%% return.
%% @end
-spec sort(Iterator1) -> Iterator2 when
Iterator1 :: iterator(Elem),
Iterator2 :: iterator(Elem).
sort(#iterator{} = Iterator) ->
list_wrap(fun lists:sort/1, Iterator).
%% @doc
%% Returns an iterator containing the sorted elements of `Iterator1',
%% according to the ordering function `Fun'. `Fun(A, B)' is to return
%% `true' if `A' compares less than or equal to `B' in the ordering,
%% otherwise `false'.
%%
%% The iterator is fully evaluated, infinite iterators will never
%% return.
%% @end
-spec sort(Fun, Iterator1) -> Iterator2 when
Fun :: compare(A, B),
B :: A,
Iterator1 :: iterator(A),
Iterator2 :: iterator(A).
sort(Fun, #iterator{} = Iterator) when is_function(Fun, 2) ->
list_wrap(fun(I) -> lists:sort(Fun, I) end, Iterator).
%% @doc
%% Splits `Iterator1' into `Iterator2' and `Iterator3'. `Iterator2'
%% contains the first `N' elements and `Iterator3' the remaining
%% elements (the `N'th tail).
%%
%% Evaluates the first `N' elements of `Iterator1' to construct
%% `Iterator3'.
%% @end
-spec split(N, Iterator1) -> {Iterator2, Iterator3} when
N :: non_neg_integer(),
Iterator1 :: iterator(Elem),
Iterator2 :: iterator(Elem),
Iterator3 :: iterator(Elem).
split(N, #iterator{} = Iterator) when is_integer(N), N >= 0 ->
{sublist(Iterator, N), nthtail(N, Iterator)}.
%% @doc
%% Partitions `Iterator1' into two iterators according to `Pred'.
%% `splitwith/2' behaves as if it is defined as follows:
%%
%% ```
%% llists:splitwith(Pred, Iterator) ->
%% {llists:takewhile(Pred, Iterator),
%% llists:dropwhile(Pred, Iterator)}.
%% '''
%%
%% Examples:
%% ```
%% > {Before, After} = llists:splitwith(fun(A) -> A rem 2 == 1 end, llists:seq(1, 7)),
%% > {llists:to_list(Before), llists:to_list(After)}.
%% {[1],[2,3,4,5,6,7]}
%% > {Before, After} = lists:splitwith(fun(A) -> is_atom(A) end, [a,b,1,c,d,2,3,4,e]),
%% > {llists:to_list(Before), llists:to_list(After)}.
%% {[a,b],[1,c,d,2,3,4,e]}
%% '''
%%
%% For a different way to partition an iterator, see partition/2.
%%
%% Evaluates the elements of `Iterator' for which `Pred(Elem)' returns
%% `false'. If `Pred' never returns `false', infinite iterators will
%% not return.
%% @end
%% @see partition/2
-spec splitwith(Pred, Iterator1) -> {Iterator2, Iterator3} when
Pred :: predicate(Elem),
Iterator1 :: iterator(Elem),
Iterator2 :: iterator(Elem),
Iterator3 :: iterator(Elem).
splitwith(Pred, #iterator{} = Iterator) when is_function(Pred, 1) ->
{takewhile(Pred, Iterator), dropwhile(Pred, Iterator)}.
%% @doc
%% Returns a new iterator `Iterator3' that is a copy of `Iterator1',
%% subjected to the following procedure: for each element in
%% `Iterator2', its first occurrence in `Iterator1' is deleted.
%%
%% Example:
%% ```
%% > lists:subtract("123212", "212").
%% "312".
%% '''
%%
%% `Iterator2' is fully evaluated, infinite iterators will never return.
%% @end
-spec subtract(Iterator1, Iterator2) -> Iterator3 when
Iterator1 :: iterator(Elem),
Iterator2 :: iterator(),
Iterator3 :: iterator(Elem).
subtract(#iterator{} = BaseIterator, #iterator{} = RemoveIterator) ->
unfold(
fun Next({#iterator{} = FoldIterator, Remove}) ->
case next(FoldIterator) of
[] ->
none;
[Elem | #iterator{} = NextIterator] ->
case drop(Elem, Remove) of
none ->
{Elem, {NextIterator, Remove}};
{dropped, NewRemove} ->
Next({NextIterator, NewRemove})
end
end
end,
{BaseIterator, to_list(RemoveIterator)}
).
%% @doc
%% Returns the sorted iterator formed by merging `TupleIterator1' and
%% `TupleIterator2'. The merge is performed on the `N'th element of each
%% tuple. Both `TupleIterator1' and `TupleIterator2' must be key-sorted without
%% duplicates before evaluating this function. When two tuples compare
%% equal, the tuple from `TupleIterator1' is picked and the one from
%% `TupleIterator2' is deleted.
%%
%% The first element of each iterator will be evaluated.
%% @end
-spec ukeymerge(N, TupleIterator1, TupleIterator2) -> TupleIterator3 when
N :: pos_integer(),
TupleIterator1 :: iterator(Elem1),
TupleIterator2 :: iterator(Elem2),
TupleIterator3 :: iterator(Elem1 | Elem2),
Elem1 :: tuple(),
Elem2 :: tuple().
ukeymerge(N, #iterator{} = Iterator1, #iterator{} = Iterator2) ->
Equal = fun(A, B) -> element(N, A) == element(N, B) end,
Compare = fun(A, B) -> element(N, A) =< element(N, B) end,
ufmerge(Equal, Compare, [Iterator1, Iterator2]).
%% @doc
%% Returns a iterator containing the sorted elements of iterator
%% `TupleIterator1' where all except the first tuple of the tuples
%% comparing equal have been deleted. Sorting is performed on the
%% `N'th element of the tuples.
%%
%% The iterator is fully evaluated, infinite iterators will never
%% return.
%% @end
-spec ukeysort(N, TupleIterator1) -> TupleIterator2 when
N :: pos_integer(),
TupleIterator1 :: iterator(Elem),
TupleIterator2 :: iterator(Elem),
Elem :: tuple().
ukeysort(N, #iterator{} = Iterator) when N > 0 ->
list_wrap(fun(I) -> lists:ukeysort(N, I) end, Iterator).
%% @doc
%% Returns the sorted iterator formed by merging all the subiterators of
%% `IteratorOfIterators'. All subiterators must be sorted and contain no duplicates
%% before evaluating this function. When two elements compare equal,
%% the element from the subiterator with the lowest position in
%% `IteratorOfIterators' is picked and the other is deleted.
%%
%% The first element of each subiterator will be evaluated.
%% @end
-spec umerge(IteratorOfIterators) -> Iterator when
IteratorOfIterators :: iterator(iterator()),
Iterator :: iterator().
umerge(#iterator{} = IteratorOfIterators) ->
ufmerge(to_list(IteratorOfIterators)).
%% @doc
%% Returns the sorted iterator formed by merging `Iterator1' and
%% `Iterator2'. Both `Iterator1' and `Iterator2' must be sorted and
%% contain no duplicates before evaluating this function. When two
%% elements compare equal, the element from `Iterator1' is picked and
%% the one from `Iterator2' is deleted.
%%
%% The first element of each iterator will be evaluated.
%% @end
-spec umerge(Iterator1, Iterator2) -> Iterator3 when
Iterator1 :: iterator(A),
Iterator2 :: iterator(B),
Iterator3 :: iterator(A | B).
umerge(#iterator{} = Iterator1, #iterator{} = Iterator2) ->
ufmerge([Iterator1, Iterator2]).
%% @doc
%% Returns the sorted iterator formed by merging `Iterator1' and
%% `Iterator2'. Both `Iterator1' and `Iterator2' must be sorted
%% according to the ordering function `Fun' and contain no duplicates
%% before evaluating this function. `Fun(A, B)' is to return `true' if
%% `A' compares less than or equal to `B' in the ordering, otherwise
%% `false'. When two elements compare equal, the element from
%% `Iterator1' is picked and the one from `Iterator2' is deleted.
%%
%% The first element of each iterator will be evaluated.
%% @end
-spec umerge(Fun, Iterator1, Iterator2) -> Iterator3 when
Fun :: compare(A, B),
Iterator1 :: iterator(A),
Iterator2 :: iterator(B),
Iterator3 :: iterator(A | B).
umerge(Fun, #iterator{} = Iterator1, #iterator{} = Iterator2) ->
ufmerge(
fun(A, B) -> Fun(A, B) andalso Fun(B, A) end,
Fun,
[Iterator1, Iterator2]
).
%% @doc
%% Returns the sorted iterator formed by merging `Iterator1',
%% `Iterator2', and `Iterator3'. All of `Iterator1', `Iterator2', and
%% `Iterator3' must be sorted and contain no duplicates before
%% evaluating this function. When two elements compare equal, the
%% element from `Iterator1' is picked if there is such an element,
%% otherwise the element from `Iterator2' is picked, and the other is
%% deleted.
%%
%% The first element of each iterator will be evaluated.
%% @end
-spec umerge3(Iterator1, Iterator2, Iterator3) -> Iterator4 when
Iterator1 :: iterator(A),
Iterator2 :: iterator(B),
Iterator3 :: iterator(C),
Iterator4 :: iterator(A | B | C).
umerge3(
#iterator{} = Iterator1,
#iterator{} = Iterator2,
#iterator{} = Iterator3
) ->
ufmerge([Iterator1, Iterator2, Iterator3]).
%% @doc
%% "Unzips" a iterator of two-tuples into two iterators, where the
%% first iterator contains the first element of each tuple, and the
%% second iterator contains the second element of each tuple.
%% @end
-spec unzip(Iterator1) -> {Iterator2, Iterator3} when
Iterator1 :: iterator({A, B}),
Iterator2 :: iterator(A),
Iterator3 :: iterator(B).
unzip(#iterator{} = Iterator) ->
{map(fun({A, _}) -> A end, Iterator), map(fun({_, B}) -> B end, Iterator)}.
%% @doc
%% "Unzips" a iterator of three-tuples into three iterators, where the first
%% iterator contains the first element of each tuple, the second iterator
%% contains the second element of each tuple, and the third iterator
%% contains the third element of each tuple.
%% @end
-spec unzip3(Iterator1) -> {Iterator2, Iterator3, Iterator4} when
Iterator1 :: iterator({A, B, C}),
Iterator2 :: iterator(A),
Iterator3 :: iterator(B),
Iterator4 :: iterator(C).
unzip3(#iterator{} = Iterator) ->
{map(fun({A, _, _}) -> A end, Iterator), map(fun({_, B, _}) -> B end, Iterator),
map(fun({_, _, C}) -> C end, Iterator)}.
%% @doc
%% Returns a iterator containing the sorted elements of `Iterator1'
%% where all except the first element of the elements comparing equal
%% have been deleted.
%%
%% The iterator will be fully evaluated, infinite iterators will never
%% return.
%% @end
-spec usort(Iterator1) -> Iterator2 when
Iterator1 :: iterator(Elem),
Iterator2 :: iterator(Elem).
usort(#iterator{} = Iterator) ->
list_wrap(fun lists:usort/1, Iterator).
%% @doc
%% Returns a iterator containing the sorted elements of `Iterator1' where all
%% except the first element of the elements comparing equal according
%% to the ordering function `Fun' have been deleted. `Fun(A, B)' is to
%% return `true' if `A' compares less than or equal to `B' in the ordering,
%% otherwise `false'.
%%
%% The iterator will be fully evaluated, infinite iterators will never
%% return.
%% @end
-spec usort(Fun, Iterator1) -> Iterator2 when
Fun :: compare(A, B),
B :: A,
Iterator1 :: iterator(A),
Iterator2 :: iterator(A).
usort(Fun, #iterator{} = Iterator) when is_function(Fun, 2) ->
list_wrap(fun(I) -> lists:usort(Fun, I) end, Iterator).
%% @doc
%% "Zips" two iterators of equal length into one iterator of
%% two-tuples, where the first element of each tuple is taken from the
%% first iterator and the second element is taken from the
%% corresponding element in the second iterator.
%% @end
-spec zip(Iterator1, Iterator2) -> Iterator3 when
Iterator1 :: iterator(A),
Iterator2 :: iterator(B),
Iterator3 :: iterator({A, B}).
zip(#iterator{} = Iterator1, #iterator{} = Iterator2) ->
zipwith(fun(A, B) -> {A, B} end, Iterator1, Iterator2).
%% @doc
%% "Zips" three iterators of equal length into one iterator of
%% three-tuples, where the first element of each tuple is taken from
%% the first iterator, the second element is taken from the
%% corresponding element in the second iterator, and the third element
%% is taken from the corresponding element in the third iterator.
%% @end
-spec zip3(Iterator1, Iterator2, Iterator3) -> Iterator4 when
Iterator1 :: iterator(A),
Iterator2 :: iterator(B),
Iterator3 :: iterator(C),
Iterator4 :: iterator({A, B, C}).
zip3(#iterator{} = Iterator1, #iterator{} = Iterator2, #iterator{} = Iterator3) ->
zipwith3(fun(A, B, C) -> {A, B, C} end, Iterator1, Iterator2, Iterator3).
%% @doc
%% Combines the elements of two iterators of equal length into one iterator.
%% For each pair `X, Y' of iterator elements from the two iterators, the element
%% in the result iterator is `Combine(X, Y)'.
%%
%% `llists:zipwith(fun(X, Y) -> {X, Y} end, Iterator1, Iterator2)' is
%% equivalent to `llists:zip(Iterator1, Iterator2)'.
%%
%% Example:
%% ```
%% > llists:to_list(
%% > llists:zipwith(fun(X, Y) -> X + Y end, llists:seq(1, 3), llist:seq(4, 6))).
%% [5,7,9]
%% '''
%% @end
-spec zipwith(Combine, Iterator1, Iterator2) -> Iterator3 when
Combine :: combine(X, Y, Out),
Iterator1 :: iterator(X),
Iterator2 :: iterator(Y),
Iterator3 :: iterator(Out).
zipwith(Combine, #iterator{} = Iterator1, #iterator{} = Iterator2) when is_function(Combine, 2) ->
new(fun() ->
case {next(Iterator1), next(Iterator2)} of
{[Elem1 | #iterator{} = NextIterator1], [Elem2 | #iterator{} = NextIterator2]} ->
[Combine(Elem1, Elem2) | zipwith(Combine, NextIterator1, NextIterator2)];
{[], []} ->
[];
_ ->
% Because that's how lists:zip* crashes on
% unequal length lists.
error(function_clause)
end
end).
%% @doc
%% Combines the elements of three iterators of equal length into one
%% iterator. For each triple `X, Y, Z' of iterator elements from the
%% three iterators, the element in the result iterator is
%% `Combine(X, Y, Z)'.
%%
%% `zipwith3(fun(X, Y, Z) -> {X, Y, Z} end, Iterator1, Iterator2, Iterator3)'
%% is equivalent to `zip3(Iterator1, Iterator2, Iterator3)'.
%%
%% Examples:
%% ```
%% > llists:to_list(
%% > llists:zipwith3(
%% > fun(X, Y, Z) -> X + Y + Z end,
%% > llists:seq(1, 3),
%% > llists:seq(4, 6),
%% > llists:seq(7, 9))).
%% [12,15,18]
%% > llists:to_list(
%% > llists:zipwith3(
%% > fun(X, Y, Z) -> [X, Y, Z] end,
%% > llists:from_list([a,b,c]),
%% > llists:from_list([x,y,z]),
%% > llists:seq(1, 3))).
%% [[a,x,1],[b,y,2],[c,z,3]]
%% '''
%% @end
-spec zipwith3(Combine, Iterator1, Iterator2, Iterator3) -> Iterator4 when
Combine :: combine3(A, B, C, Out),
Iterator1 :: iterator(A),
Iterator2 :: iterator(B),
Iterator3 :: iterator(C),
Iterator4 :: iterator(Out).
zipwith3(
Combine,
#iterator{} = Iterator1,
#iterator{} = Iterator2,
#iterator{} = Iterator3
) when is_function(Combine, 3) ->
new(fun() ->
case {next(Iterator1), next(Iterator2), next(Iterator3)} of
{[Elem1 | #iterator{} = NextIterator1], [Elem2 | #iterator{} = NextIterator2], [
Elem3
| #iterator{} = NextIterator3
]} ->
[
Combine(Elem1, Elem2, Elem3)
| zipwith3(
Combine,
NextIterator1,
NextIterator2,
NextIterator3
)
];
{[], [], []} ->
[];
_ ->
% Because that's how lists:zip* crashes on
% unequal length lists.
error(function_clause)
end
end).
%%%===================================================================
%%% API - Iterator Evaluation
%%%===================================================================
%% @doc
%% Fully evaluate `Iterator' and return a list containing all elements
%% produced. Infinite iterators will never return.
%% @end
-spec to_list(Iterator) -> List when
Iterator :: iterator(Elem),
List :: [Elem].
to_list(#iterator{} = Iterator) ->
to_list_loop(next(Iterator)).
%% @doc
%% Fully evaluate an `Iterator' of `{Key, Value}' tuples and return a
%% map containing all pairs produced. Infinite iterators will never
%% return.
%%
%% If duplicate `Key's are present in the iterator it is undefined
%% which will appear in the final map.
%% @end
-spec to_map(Iterator) -> Map when
Iterator :: iterator({Key, Value}),
Key :: any(),
Value :: any(),
Map :: maps:map(Key, Value).
to_map(#iterator{} = Iterator) ->
to_map_loop(next(Iterator), #{}).
%% @doc
%% Returns the length of `Iterator', for example:
%%
%% ```
%% > llists:length(llists:seq(1, 9)).
%% 9
%% '''
%%
%% The iterator will be fully evaluated, infinite iterators will never
%% return.
%% @end
-spec length(Iterator) -> Length when
Iterator :: iterator(),
Length :: non_neg_integer().
length(#iterator{} = Iterator) ->
foldl(fun(_, Acc) -> Acc + 1 end, 0, Iterator).
%% @doc
%% Returns `true' if `Pred(Elem)' returns `true' for all elements
%% `Elem' in `Iterator'.
%%
%% Stops evaluating `Iterator' when `Pred(Elem)' returns `false' or
%% when `Iterator' is empty.
%% @end
-spec all(Pred, Iterator) -> boolean() when
Pred :: predicate(Elem),
Iterator :: iterator(Elem).
all(Pred, #iterator{} = Iterator) when is_function(Pred, 1) ->
all_loop(Pred, next(Iterator)).
%% @doc
%% Returns `true' if `Pred(Elem)' returns `true' for at least one
%% element `Elem' in `Iterator'.
%%
%% Stops evaluating `Iterator' when `Pred(Elem)' returns `true' or
%% when `Iterator' is empty.
%% @end
-spec any(Pred, Iterator) -> boolean() when
Pred :: predicate(Elem),
Iterator :: iterator(Elem).
any(Pred, #iterator{} = Iterator) when is_function(Pred, 1) ->
any_loop(Pred, next(Iterator)).
%% @doc
%% Concatenates the text representation of the elements of `Iterator'.
%% The elements of `Iterator' can be atoms, integers, floats, or
%% strings. The iterator will be fully evaluated, infinite iterators
%% will never return.
%% @end
-spec concat(Iterator) -> string() when
Iterator :: iterator(atom() | integer() | float() | string()).
concat(#iterator{} = Iterator) ->
lists:concat(to_list(Iterator)).
%% @doc
%% Calls `Fun(Elem, AccIn)' on successive elements `A' of `Iterator',
%% starting with `AccIn' == `Acc0'. `Fun/2' must return a new
%% accumulator, which is passed to the next call. The function returns
%% the final value of the accumulator. `Acc0' is returned if the
%% iterator is empty.
%%
%% The iterator will be fully evaluated, infinite iterators will never
%% return.
%% @end
-spec foldl(Fun, Acc0, Iterator) -> AccOut when
Fun :: fold(A, AccIn :: Acc0 | AccOut, AccOut),
Acc0 :: any(),
Iterator :: iterator(A).
foldl(Fun, Acc0, #iterator{} = Iterator) when is_function(Fun, 2) ->
foldl_loop(Fun, Acc0, next(Iterator)).
%% @doc
%% Like `foldl/3', but the list is traversed from right to left.
%%
%% Example:
%% ```
%% > P = fun(A, AccIn) -> io:format("~p ", [A]), AccIn end.
%% #Fun<erl_eval.12.2225172>
%% > llists:foldl(P, void, llists:seq(1, 3)).
%% 1 2 3 void
%% > lists:foldr(P, void, llists:seq(1, 3)).
%% 3 2 1 void
%% '''
%%
%% The iterator is fully evaluated before the fold begins, infinite
%% iterators will never return. `foldl/3' does not fully evaluate the
%% iterator and is usually preferred to `foldr/3'.
%% @end
%% @see foldl/3
-spec foldr(Fun, Acc0, Iterator) -> AccOut when
Fun :: fold(A, AccIn :: Acc0 | AccOut, AccOut),
Acc0 :: any(),
Iterator :: iterator(A).
foldr(Fun, Acc0, #iterator{} = Iterator) when is_function(Fun, 2) ->
foldl(Fun, Acc0, reverse(Iterator)).
%% @doc
%% Calls `Fun(Elem)' for each element `Elem' in `Iterator'. This
%% function is used for its side effects and the evaluation order is
%% defined to be the same as the order of the elements in the
%% iterator.
%%
%% The iterator will be fully evaluated, infinite iterators will never
%% return.
%% @end
-spec foreach(Fun, Iterator) -> ok when
Fun :: map(Elem, any()),
Iterator :: iterator(Elem).
foreach(Fun, #iterator{} = Iterator) when is_function(Fun, 1) ->
foreach_loop(Fun, next(Iterator)).
%% @doc
%% Searches the iterator of tuples `TupleIterator' for a tuple whose
%% `N'th element compares equal to `Key'. Returns `Tuple' if such a
%% tuple is found, otherwise `false'.
%%
%% The iterator will be evaluated until a match is found. If no match
%% is found, infinite iterators will never return.
%% @end
-spec keyfind(Key, N, TupleIterator) -> Tuple | false when
Key :: any(),
N :: pos_integer(),
TupleIterator :: iterator(),
Tuple :: tuple() | false.
keyfind(Key, N, #iterator{} = Iterator) when N > 0 ->
Found = search(
fun
(Elem) when element(N, Elem) == Key -> true;
(_) -> false
end,
Iterator
),
case Found of
{value, Value} when is_tuple(Value) ->
Value;
false ->
false
end.
%% @doc
%% Returns `true' if there is a tuple in `TupleIterator' whose `N'th
%% element compares equal to `Key', otherwise `false'.
%%
%% The iterator will be evaluated until a match is found. If no match
%% is found, infinite iterators will never return.
%% @end
-spec keymember(Key, N, TupleIterator) -> boolean() when
Key :: any(),
N :: pos_integer(),
TupleIterator :: iterator().
keymember(Key, N, #iterator{} = Iterator) when N > 0 ->
any(
fun
(Elem) when element(N, Elem) == Key -> true;
(_) -> false
end,
Iterator
).
%% @doc
%% Searches the iterator of tuples `TupleIterator' for a tuple whose
%% `N'th element compares equal to `Key'. Returns `{value, Tuple}' if
%% such a tuple is found, otherwise `false'.
%%
%% Function keyfind/3 is usually more convenient.
%% @end
%% @see keyfind/3
-spec keysearch(Key, N, TupleIterator) -> {value, Tuple} | false when
Key :: any(),
N :: pos_integer(),
TupleIterator :: iterator(),
Tuple :: tuple().
keysearch(Key, N, #iterator{} = Iterator) ->
search(
fun
(Elem) when element(N, Elem) == Key -> true;
(_) -> false
end,
Iterator
).
%% @doc
%% Returns the last element in `Iterator'.
%%
%% The iterator will be fully evaluated, infinite iterators will never
%% return.
%% @end
-spec last(Iterator) -> Elem when Iterator :: iterator(Elem).
last(#iterator{} = Iterator) ->
Last = foldl(
fun(Elem, _Acc) -> {last, Elem} end,
undefined,
Iterator
),
case Last of
undefined ->
% Because that's how lists:last([]) crashes.
error(function_clause);
{last, Elem} ->
Elem
end.
%% @doc
%% Combines the operations of `map/2' and `foldl/3' into one pass.
%%
%% Example:
%% ```
%% > % Summing the elements in an iterator and double them at the same time:
%% > DoubleAndSum = fun(X, Sum) -> {2*X, X+Sum} end,
%% > {Mapped, Acc} = llists:mapfoldl(DoubleAndSum, 0, llists:seq(1,5)),
%% > {llists:to_list(Mapped), Acc}.
%% {[2,4,6,8,10],15}
%% '''
%%
%% The iterator is fully evaluated before the mapfold begins, infinite
%% iterators will never return.
%% @end
-spec mapfoldl(Fun, Acc0, Iterator1) -> {Iterator2, AccOut} when
Fun :: mapfold(A, AccIn :: Acc0 | AccOut, B, AccOut),
Acc0 :: accumulator(),
Iterator1 :: iterator(A),
Iterator2 :: iterator(B).
mapfoldl(Fun, Acc0, #iterator{} = Iterator) ->
{Mapped, AccOut} = lists:mapfoldl(Fun, Acc0, to_list(Iterator)),
{llists:from_list(Mapped), AccOut}.
%% @doc
%% Combines the operations of map/2 and foldr/3 into one pass.
%%
%% The iterator is fully evaluated before the mapfold begins, infinite
%% iterators will never return.
%% @end
-spec mapfoldr(Fun, Acc0, Iterator1) -> {Iterator2, AccOut} when
Fun :: mapfold(A, AccIn :: Acc0 | AccOut, B, AccOut),
Acc0 :: accumulator(),
Iterator1 :: iterator(A),
Iterator2 :: iterator(B).
mapfoldr(Fun, Acc0, #iterator{} = Iterator) ->
{Mapped, AccOut} = lists:mapfoldr(Fun, Acc0, to_list(Iterator)),
{llists:from_list(Mapped), AccOut}.
%% @doc
%% Returns the first element of `Iterator' that compares greater than
%% or equal to all other elements of `Iterator'.
%%
%% The iterator is fully evaluated, infinite iterators will never
%% return.
%% @end
-spec max(Iterator) -> Elem when Iterator :: iterator(Elem).
max(#iterator{} = Iterator) ->
Max = foldl(
fun
(Elem, undefined) ->
{max, Elem};
(Elem, {max, Max}) when Elem > Max ->
{max, Elem};
(_Elem, Acc) ->
Acc
end,
undefined,
Iterator
),
case Max of
undefined ->
% Because that's how lists:max([]) crashes.
error(function_clause);
{max, Elem} ->
Elem
end.
%% @doc
%% Returns `true' if `Elem' matches some element of `Iterator',
%% otherwise `false'.
%%
%% Stops evaluating `Iterator' when a match is found or when
%% `Iterator' is empty.
%% @end
-spec member(Elem, Iterator) -> boolean() when
Elem :: any(),
Iterator :: iterator().
member(Elem, #iterator{} = Iterator) ->
any(
fun
(E) when E =:= Elem -> true;
(_) -> false
end,
Iterator
).
%% @doc
%% Returns the first element of `Iterator' that compares less than or
%% equal to all other elements of `Iterator'.
%%
%% The iterator is fully evaluated, infinite iterators will never
%% return.
%% @end
-spec min(Iterator) -> Elem when Iterator :: iterator(Elem).
min(#iterator{} = Iterator) ->
Min = foldl(
fun
(Elem, undefined) ->
{min, Elem};
(Elem, {min, Min}) when Elem < Min ->
{min, Elem};
(_Elem, Acc) ->
Acc
end,
undefined,
Iterator
),
case Min of
undefined ->
% Because that's how lists:min([]) crashes.
error(function_clause);
{min, Elem} ->
Elem
end.
%% @doc
%% Returns the `N'th element of `Iterator'.
%%
%% Example:
%% ```
%% > lists:nth(3, [a, b, c, d, e]).
%% c
%% '''
%% @end
-spec nth(N, Iterator) -> Elem when
N :: pos_integer(),
Iterator :: iterator(Elem).
nth(N, #iterator{} = Iterator) when N > 0 ->
Tail = nthtail(N - 1, Iterator),
case next(Tail) of
[] ->
% Because that's how lists:nth/2 & llists:nthtail/2 crash.
error(function_clause);
[Elem | #iterator{}] ->
Elem
end.
%% @doc
%% Returns `true' if `Iterator1' is a prefix of `Iterator2', otherwise `false'.
%%
%% Both iterators will be evaluated until the point they diverge. If
%% both iterators are identical and infinite, will never return.
%% @end
-spec prefix(Iterator1, Iterator2) -> boolean() when
Iterator1 :: iterator(),
Iterator2 :: iterator().
prefix(#iterator{} = Prefix, #iterator{} = Iterator) ->
prefix_loop(next(Prefix), next(Iterator)).
%% @doc
%% If there is a `Value' in `Iterator' such that `Pred(Value)' returns `true',
%% returns `{value, Value}' for the first such `Value', otherwise returns
%% `false'.
%%
%% The iterator is evaluated until a match is found. If no match is
%% ever found, infinite iterators will never return.
%% @end
-spec search(Pred, Iterator) -> {value, Value} | false when
Pred :: predicate(Value),
Iterator :: iterator().
search(Pred, #iterator{} = Iterator) when is_function(Pred, 1) ->
search_loop(Pred, next(Iterator)).
%% @doc
%% Returns `true' if `Iterator1' is a suffix of `Iterator2', otherwise
%% `false'.
%%
%% Both `Iterator1' and `Iterator2' are fully evaluated, infinite
%% iterators will never return.
%% @end
-spec suffix(Iterator1, Iterator2) -> boolean() when
Iterator1 :: iterator(),
Iterator2 :: iterator().
suffix(#iterator{} = Suffix, #iterator{} = Iterator) ->
prefix(reverse(Suffix), reverse(Iterator)).
%% @doc
%% Returns the sum of the elements in `Iterator'.
%%
%% The iterator is fully evaluated, infinite iterators will never
%% return.
%% @end
-spec sum(Iterator) -> Sum when
Iterator :: iterator(Elem),
Sum :: Elem,
Elem :: number().
sum(#iterator{} = Iterator) ->
foldl(fun(Elem, Acc) -> Elem + Acc end, 0, Iterator).
%%%===================================================================
%%% Internal Functions
%%%===================================================================
-spec new(fun(() -> lazy_list(Elem))) -> iterator(Elem).
new(Next) ->
#iterator{next = Next}.
list_wrap(ListFun, Iterator) ->
llists:from_list(ListFun(llists:to_list(Iterator))).
%% @private
%% @doc
%% Attempts to remove `Elem' from the list `List1'. If it is found
%% returns the list without the removed value as `{dropped, List2}',
%% otherwise returns `none'.
%% @end
drop(Elem, List) ->
case List -- [Elem] of
List ->
none;
Dropped ->
{dropped, Dropped}
end.
%% @private
%% @see fmerge/2
fmerge(IteratorOfIterators) ->
fmerge(fun(A, B) -> A =< B end, IteratorOfIterators).
%% @private
%% @doc
%% Merge a list of iterators according to an ordering function which
%% returns `true' when the first element is less than or equal to the
%% second, `false' otherwise. All iterators are expected to be already
%% ordered.
%%
%% This preserves the `lists' invariant that the leftmost iterator is
%% chosen when values are equal. It is not able to replicate the
%% behaviour of merge when given (invalid) unordered lists. Divergence
%% appears to be easiest to replicate with input like:
%% `[[0], [0], [0], [1, 0]]'.
%%
%% The name comes from the internals of the lists module, which
%% delegates to a similarly named function to handle merging with an
%% ordering function.
%% @end
fmerge(Compare, ListOfIterators) when is_function(Compare, 2) ->
LazyLists = [
Next
|| Iterator <- ListOfIterators,
Next <- [next(Iterator)],
Next /= []
],
unfold(
fun
([]) ->
none;
([_ | _] = Lists) ->
{Smallest, {Found, FoundBefore, FoundAfter}} = fmerge_sort(Compare, Lists),
{Smallest, fmerge_next(Found, FoundBefore, FoundAfter)}
end,
lists:reverse(LazyLists)
).
fmerge_compare(Fun, [Elem1 | _], [Elem2 | _]) ->
Fun(Elem1, Elem2).
fmerge_next(Iterator, Before, After) ->
case next(Iterator) of
[] ->
Before ++ After;
[_ | _] = Next ->
Before ++ [Next | After]
end.
fmerge_sort(Fun, [Smallest | FoundAfter] = After) ->
fmerge_sort(Fun, [], After, {Smallest, [], FoundAfter}).
fmerge_sort(_Fun, _Before, [], {[Smallest | Iterator], FoundBefore, FoundAfter}) ->
{Smallest, {Iterator, lists:reverse(FoundBefore), FoundAfter}};
fmerge_sort(Fun, Before, [Compare | After], {Smallest, _, _} = Found) ->
case fmerge_compare(Fun, Compare, Smallest) of
true ->
fmerge_sort(Fun, [Compare | Before], After, {Compare, Before, After});
false ->
fmerge_sort(Fun, [Compare | Before], After, Found)
end.
%% @private
%% @see ufmerge/3
ufmerge(IteratorOfIterators) ->
ufmerge(
fun(A, B) -> A == B end,
fun(A, B) -> A =< B end,
IteratorOfIterators
).
%% @private
%% @doc
%% Merge a list of iterators according to an ordering function which
%% returns `true' when the first element is less than or equal to the
%% second, `false' otherwise. Discards duplicates across iterators
%% according to an equality function. All iterators are expected to be
%% already ordered and without duplicates.
%%
%% This preserves the `lists' invariant that the leftmost iterator is
%% chosen when values are equal. It is not able to replicate the
%% behaviour of merge when given (invalid) unordered lists or
%% (invalid) lists with duplicates. Divergence appears to be easiest
%% to replicate with input like:
%% `[[0, 0], [0, 0]]'.
%%
%% It was very tempting to instead name this fumerge, as replicating
%% undocumented merge behaviour from the `lists' module has proven to
%% be far more difficult than expected.
%% @end
ufmerge(Equal, Compare, ListOfIterators) when is_function(Equal, 2), is_function(Compare, 2) ->
LazyLists = [
Next
|| Iterator <- ListOfIterators,
Next <- [next(Iterator)],
Next /= []
],
unfold(
fun
([]) ->
none;
([_ | _] = Lists) ->
{Smallest, {Found, FoundBefore, FoundAfter}} = fmerge_sort(Compare, Lists),
UniqueBefore = ufmerge_skip(Equal, Smallest, FoundBefore),
UniqueAfter = ufmerge_skip(Equal, Smallest, FoundAfter),
{Smallest, fmerge_next(Found, UniqueBefore, UniqueAfter)}
end,
lists:reverse(LazyLists)
).
ufmerge_skip(Equal, Smallest, LazyLists) ->
[
LazyList
|| [Elem | Iterator] <- LazyLists,
LazyList <- [
case Equal(Elem, Smallest) of
true ->
next(Iterator);
false ->
[Elem | Iterator]
end
],
LazyList /= []
].
nthtail_loop(0, #iterator{} = Iterator) ->
Iterator;
nthtail_loop(N, #iterator{} = Iterator) ->
nthtail_loop(N - 1, next(Iterator));
nthtail_loop(N, [_Head | #iterator{} = Iterator]) ->
nthtail_loop(N, Iterator).
to_list_loop([]) ->
[];
to_list_loop([Head | #iterator{} = Iterator]) ->
[Head | to_list_loop(next(Iterator))].
to_map_loop([], Acc) ->
Acc;
to_map_loop([{Key, Value} | #iterator{} = Iterator], Acc) ->
to_map_loop(next(Iterator), Acc#{Key => Value}).
all_loop(_Pred, []) ->
true;
all_loop(Pred, [Head | #iterator{} = Iterator]) ->
case Pred(Head) of
true ->
all_loop(Pred, next(Iterator));
false ->
false
end.
any_loop(_Pred, []) ->
false;
any_loop(Pred, [Head | #iterator{} = Iterator]) ->
case Pred(Head) of
true ->
true;
false ->
any_loop(Pred, next(Iterator))
end.
foldl_loop(_Fun, Acc, []) ->
Acc;
foldl_loop(Fun, Acc, [Elem | #iterator{} = Iterator]) ->
foldl_loop(Fun, Fun(Elem, Acc), next(Iterator)).
foreach_loop(_Fun, []) ->
ok;
foreach_loop(Fun, [Elem | #iterator{} = Iterator]) ->
_ = Fun(Elem),
foreach_loop(Fun, next(Iterator)).
prefix_loop([], _) ->
true;
prefix_loop(_, []) ->
% If first iterator is longer.
false;
prefix_loop([Elem | #iterator{} = Iterator1], [Elem | #iterator{} = Iterator2]) ->
prefix_loop(next(Iterator1), next(Iterator2));
prefix_loop([_Elem1 | #iterator{}], [_Elem2 | #iterator{}]) ->
% If elements differ.
false.
search_loop(_Pred, []) ->
false;
search_loop(Pred, [Value | #iterator{} = Iterator]) ->
case Pred(Value) of
true ->
{value, Value};
false ->
search_loop(Pred, next(Iterator))
end. | src/llists.erl | 0.681833 | 0.646704 | llists.erl | starcoder |
%% @author <NAME> <<EMAIL>>
%% @copyright 2010-2017 <NAME>
%% @doc Simple server to manage the translations, owns the ets table containing all translations.
%% When new translations are read then the previous table is kept and the one before the previous is deleted.
%% Copyright 2010-2017 <NAME>
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(z_trans_server).
-behaviour(gen_server).
%% gen_server exports
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
-export([start_tests/0,start_link/1]).
%% interface functions
-export([
load_translations/1,
load_translations/2,
table/1,
set_context_table/1,
observe_module_ready/2
]).
-include_lib("zotonic.hrl").
-record(state, {table, site}).
%%====================================================================
%% API
%%====================================================================
-spec start_tests() -> {ok, pid()} | {error, term()}.
start_tests() ->
io:format("Starting trans server.~n"),
gen_server:start_link({local, 'z_trans_server$test'}, ?MODULE, test, []).
%% @doc Starts the server
-spec start_link(Site :: atom()) -> {ok, pid()} | {error, term()}.
start_link(Site) ->
Name = z_utils:name_for_site(?MODULE, Site),
gen_server:start_link({local, Name}, ?MODULE, {Site, Name}, []).
%% @doc Parse all .po files and reload the found translations in the trans server
-spec load_translations(z:context()) -> ok.
load_translations(Context) ->
Ts = z_trans:parse_translations(Context),
load_translations(Ts, Context).
%% @doc Take a proplist with dicts and reload the translations table.
%% After reloading the the template server is flushed.
-spec load_translations(map(), z:context()) -> ok.
load_translations(Trans, Context) ->
Name = z_utils:name_for_site(?MODULE, z_context:site(Context)),
gen_server:cast(Name, {load_translations, Trans}).
%% @doc Return the name of the ets table holding all translations
-spec table(atom()|z:context()) -> atom().
table(Site) when is_atom(Site) ->
z_utils:name_for_site(?MODULE, Site);
table(#context{} = Context) ->
Context#context.translation_table.
%% @doc Set the table id in the context to the newest table id
-spec set_context_table(z:context()) -> z:context().
set_context_table(#context{} = Context) ->
Context#context{translation_table=table(z_context:site(Context))}.
%% @doc Reload the translations when modules are changed.
-spec observe_module_ready(module_ready, z:context()) -> ok.
observe_module_ready(module_ready, Context) ->
load_translations(Context).
%%====================================================================
%% gen_server callbacks
%%====================================================================
%% @doc Initiates the server.
-spec init({ Site :: atom(), Name :: atom() }) -> {ok, #state{}}.
init({Site, Name}) ->
lager:md([
{site, Site},
{module, ?MODULE}
]),
process_flag(trap_exit, true),
z_notifier:observe(module_ready, {?MODULE, observe_module_ready}, Site),
Table = ets:new(Name, [named_table, set, protected, {read_concurrency, true}]),
{ok, #state{table=Table, site=Site}}.
%% @doc Trap unknown calls
handle_call(Message, _From, State) ->
{stop, {unknown_call, Message}, State}.
%% @doc Rebuild the translations table. Call the template flush routines afterwards.
%% Trans is a map with all translations per translatable string.
handle_cast({load_translations, Trans}, State) ->
F = fun(Key, Value, Acc) ->
Value1 = case proplists:get_value(en, Value) of
undefined -> [{en,Key}|Value];
_ -> Value
end,
[{Key,Value1}|Acc]
end,
List = maps:fold(F, [], Trans),
sync_to_table(List, State#state.table),
z_template:reset(State#state.site),
{noreply, State};
%% @doc Trap unknown casts
handle_cast(Message, State) ->
{stop, {unknown_cast, Message}, State}.
%% @doc Handling all non call/cast messages
handle_info(_Info, State) ->
{noreply, State}.
%% @doc This function is called by a gen_server when it is about to
%% terminate. It should be the opposite of Module:init/1 and do any necessary
%% cleaning up. When it returns, the gen_server terminates with Reason.
%% The return value is ignored.
terminate(_Reason, State) ->
z_notifier:detach(module_ready, {?MODULE, observe_module_ready}, State#state.site),
ok.
%% @doc Convert process state when code is changed
code_change(_OldVsn, State, _Extra) ->
case State of
{state, Table, _OldTable} ->
{ok, #state{table=Table}};
_ ->
{ok, State}
end.
%%====================================================================
%% support functions
%%====================================================================
%% @doc Sync a list of translations to the ets table containing all translations
sync_to_table(List, Table) ->
LT = lists:sort(ets:tab2list(Table)),
List1 = lists:sort(List),
sync(List1, LT, Table).
sync([], [], _Table) ->
ok;
sync(L, [], Table) ->
ets:insert(Table, L);
sync([], L, Table) ->
lists:map(fun({Key,_}) -> ets:delete(Table, Key) end, L);
sync([H|NewList], [H|OldList], Table) ->
sync(NewList, OldList, Table);
sync([{K,V}|NewList], [{K,_}|OldList], Table) ->
ets:insert(Table, [{K,V}]),
sync(NewList, OldList, Table);
sync([{K1,V1}|NewList], [{K2,_}|_] = OldList, Table) when K1 < K2 ->
ets:insert(Table, [{K1,V1}]),
sync(NewList, OldList, Table);
sync([{K1,_}|_] = NewList, [{K2,_}|OldList], Table) when K1 > K2 ->
ets:delete(Table, K2),
sync(NewList, OldList, Table). | apps/zotonic_core/src/i18n/z_trans_server.erl | 0.538255 | 0.417034 | z_trans_server.erl | starcoder |
%% Copyright (c) 2019 <NAME>
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%% File : luerl_anno.erl
%% Author : <NAME>
%% Purpose : Handle annotations in the Luerl abstract code.
%% We keep the same standard as in the Erlang AST:
%%
%% - annotations with just the line number can be just the integer.
%% - in an aonnotation list the line number is just an integer while
%% all other annotations have the format {Key,Value}.
-module(luerl_anno).
-export([new/0,new/1,new/2,set_line/2,line/1,set/3,get/2]).
%% new() -> Anno.
%% new(Line) -> Anno.
%% new(Key, Val) -> Anno.
%% Create an empty annotation, one containing Line and one containing
%% a general Key/Val.
new() -> [].
new(Line) -> Line.
new(Key, Val) -> set(Key, Val, new()).
%% set_line(Line, Anno) -> Anno.
%% line(Anno) -> Line | undefined.
%% Specific functions for accessing line numbers in the anno.
set_line(Line, Anno) when is_integer(Anno) -> Line;
set_line(Line, Anno) -> set_line1(Line, Anno).
set_line1(Line, [Old|Anno]) when is_integer(Old) -> [Line|Anno];
set_line1(Line, [A|Anno]) ->
[A|set_line1(Line, Anno)];
set_line1(Line, []) -> [Line].
line(Anno) when is_integer(Anno) -> Anno;
line(Anno) -> line1(Anno).
line1([Line|_]) when is_integer(Line) -> Line;
line1([_|Anno]) -> line1(Anno);
line1([]) -> undefined.
%% set(Key, Value, Anno) -> Anno.
%% get(Key, Anno) -> Value | undefined.
%% Generic accessing functions for the anno.
set(line, Val, Anno) -> set_line(Val, Anno);
set(Key, Val, Anno) when is_integer(Anno) ->
[Anno,{Key,Val}];
set(Key, Val, Anno) -> set1(Key, Val, Anno).
set1(Key, Val, [{Key,_Old}|Anno]) ->
[{Key,Val}|Anno];
set1(Key, Val, [A|Anno]) ->
[A|set1(Key, Val, Anno)];
set1(Key, Val, []) ->
[{Key,Val}].
get(line, Anno) -> line(Anno); %This is untagged
get(_Key, Anno) when is_integer(Anno) -> %This is untagged so not Key
undefined;
get(Key, Anno) -> get1(Key, Anno).
get1(Key, [{Key,Val}|_Anno]) -> Val;
get1(Key, [_|Anno]) -> get1(Key, Anno);
get1(_Key, []) -> undefined. | src/luerl_anno.erl | 0.545044 | 0.436682 | luerl_anno.erl | starcoder |
-module(sqerl).
-export([sql/1, sql/2, unsafe_sql/1, unsafe_sql/2, encode/1]).
%% @doc Generate an iolist (a tree of strings and/or binaries)
%% for a literal SQL statement that corresponds to the Sqerl
%% structure. If the structure is invalid, this function would
%% crash.
%%
%% This function does not allow writing literal `WHERE', `LIMIT'
%% and other trailing clauses. To write such clauses,
%% call `unsafe_sql/1' or `unsafe_sql/2'.
-spec sql(Sqerl::term()) -> iolist().
sql(Sqerl) ->
sql2(Sqerl, true).
%% @doc Similar to sql/1, but accepts a boolean parameter
%% indicating if the return value should be a single binary
%% rather than an iolist.
-spec sql(Sqerl::term(), boolean()) -> binary() | iolist().
sql(Sqerl, true) ->
iolist_to_binary(sql(Sqerl));
sql(Sqerl, false) ->
sql(Sqerl).
%% @doc Generate an iolist (a tree of strings and/or binaries)
%% for a literal SQL statement that corresponds to the Sqerl
%% structure. If the structure is invalid, this function
%% throws an exception.
%%
%% This function allows writing literal `WHERE', `LIMIT'
%% and other trailing clauses, such as `{where, "a=" ++ Val}',
%% or `"WHERE a=" ++ Str ++ " LIMIT 5"'.
%%
%% Such clauses are unsafe because they expose you to SQL
%% injection attacks. When you use `unsafe_sql', make sure to
%% quote all your strings using the `encode/1' function.
%%
%% @throws {error, {unsafe_expression, Expr}}
-spec unsafe_sql(Sqerl::term()) -> iolist().
unsafe_sql(Sqerl) ->
sql2(Sqerl, false).
%% @doc Similar to `unsafe_sql/1', but accepts a boolean parameter
%% indicating if the return value should be a binary or an iolist.
%%
%% @throws {error, {unsafe_expression, Expr}}
-spec unsafe_sql(Sqerl::term(), AsBinary::boolean()) -> binary() | iolist().
unsafe_sql(Sqerl, true) ->
iolist_to_binary(unsafe_sql(Sqerl));
unsafe_sql(Sqerl, false) ->
unsafe_sql(Sqerl).
%% @doc Calls `encode(Val, true)'.
-spec encode(Val::term()) -> binary().
encode(Val) ->
encode(Val, true).
%% @doc Encode a value as a string or a binary to be embedded in
%% a SQL statement.
%%
%% This function can encode numbers, atoms, date/time/datetime values,
%% strings and binaries (which it escapes automatically).
-spec encode(Val::term(), AsBinary::boolean()) -> string() | binary().
encode(Val, false) when Val =:= undefined; Val =:= null ->
"null";
encode(Val, true) when Val =:= undefined; Val =:= null ->
<<"null">>;
encode(Val, false) when is_binary(Val) ->
binary_to_list(quote(Val));
encode(Val, true) when is_binary(Val) ->
quote(Val);
encode(Val, true) ->
list_to_binary(encode(Val,false));
encode(Val, false) when is_atom(Val) ->
quote(atom_to_list(Val));
encode(Val, false) when is_list(Val) ->
quote(Val);
encode(Val, false) when is_integer(Val) ->
integer_to_list(Val);
encode(Val, false) when is_float(Val) ->
nicedecimal:format(Val);
encode({datetime, Val}, AsBinary) ->
encode(Val, AsBinary);
encode({{Year,Month,Day}, {Hour,Minute,Second}}, false) ->
[Year1,Month1,Day1,Hour1,Minute1,Second1] =
lists:map(fun two_digits/1,[Year, Month, Day, Hour, Minute,Second]),
lists:flatten(io_lib:format("'~s-~s-~s ~s:~s:~s'",
[Year1,Month1,Day1,Hour1,Minute1,Second1]));
encode({date, {Year, Month, Day}}, false) ->
[Year1,Month1,Day1] =
lists:map(fun two_digits/1,[Year, Month, Day]),
lists:flatten(io_lib:format("'~s-~s-~s'",[Year1,Month1,Day1]));
encode({time, {Hour, Minute, Second}}, false) ->
[Hour1,Minute1,Second1] =
lists:map(fun two_digits/1,[Hour, Minute, Second]),
lists:flatten(io_lib:format("'~s:~s:~s'",[Hour1,Minute1,Second1]));
encode(Val, _AsBinary) ->
{error, {unrecognized_value, {Val}}}.
two_digits(Nums) when is_list(Nums) ->
[two_digits(Num) || Num <- Nums];
two_digits(Num) ->
[Str] = io_lib:format("~b", [Num]),
case length(Str) of
1 -> [$0 | Str];
_ -> Str
end.
sql2({select, Tables}, Safe)->
select(Tables, Safe);
sql2({select, Fields, {from, Tables}}, Safe) ->
select(Fields, Tables, Safe);
sql2({select, Fields, {from, Tables}, {where, WhereExpr}}, Safe) ->
select(undefined, Fields, Tables, WhereExpr, undefined, Safe);
sql2({select, Fields, {from, Tables}, {where, WhereExpr}, Extras}, Safe) ->
select(undefined, Fields, Tables, WhereExpr, Extras, Safe);
sql2({select, Fields, {from, Tables}, WhereExpr, Extras}, Safe) ->
select(undefined, Fields, Tables, WhereExpr, Extras, Safe);
sql2({select, Fields, {from, Tables}, Extras}, Safe) ->
select(undefined, Fields, Tables, undefined, Extras, Safe);
sql2({select, Tables, {where, WhereExpr}}, Safe) ->
select(undefined, undefined, Tables, WhereExpr, Safe);
sql2({select, Tables, WhereExpr}, Safe) ->
select(undefined, undefined, Tables, WhereExpr, Safe);
sql2({select, Modifier, Fields, {from, Tables}}, Safe) ->
select(Modifier, Fields, Tables, Safe);
sql2({select, Modifier, Fields, {from, Tables}, {where, WhereExpr}}, Safe) ->
select(Modifier, Fields, Tables, WhereExpr, Safe);
sql2({select, Modifier, Fields, {from, Tables}, Extras}, Safe) ->
select(Modifier, Fields, Tables, undefined, Extras, Safe);
sql2({select, Modifier, Fields, {from, Tables}, {where, WhereExpr}, Extras},
Safe) ->
select(Modifier, Fields, Tables, WhereExpr, Extras, Safe);
sql2({select, Modifier, Fields, {from, Tables}, WhereExpr, Extras}, Safe) ->
select(Modifier, Fields, Tables, WhereExpr, Extras, Safe);
sql2({Select1, union, Select2}, Safe) ->
[$(, sql2(Select1, Safe), <<") UNION (">>, sql2(Select2, Safe), $)];
sql2({Select1, union, Select2, {where, WhereExpr}}, Safe) ->
[sql2({Select1, union, Select2}, Safe), where(WhereExpr, Safe)];
sql2({Select1, union, Select2, Extras}, Safe) ->
[sql2({Select1, union, Select2}, Safe), extra_clause(Extras, Safe)];
sql2({Select1, union, Select2, {where, _} = Where, Extras}, Safe) ->
[sql2({Select1, union, Select2, Where}, Safe), extra_clause(Extras, Safe)];
sql2({Select1, union_all, Select2}, Safe) ->
[$(, sql2(Select1, Safe), <<") UNION ALL (">>, sql2(Select2, Safe), $)];
sql2({Select1, union_all, Select2, {where, WhereExpr}}, Safe) ->
[sql2({Select1, union, Select2}, Safe), where(WhereExpr, Safe)];
sql2({Select1, union_all, Select2, Extras}, Safe) ->
[sql2({Select1, union, Select2}, Safe), extra_clause(Extras, Safe)];
sql2({Select1, union_all, Select2, {where, _} = Where, Extras}, Safe) ->
[sql2({Select1, union, Select2, Where}, Safe), extra_clause(Extras, Safe)];
sql2({insert, Table, Params}, _Safe) ->
insert(Table, Params);
sql2({insert, Table, Params, Returning}, Safe) ->
insert(Table, Params, Returning, Safe);
sql2({update, Table, Props}, Safe) ->
update(Table, Props, Safe);
sql2({update, Table, Props, {where, Where}}, Safe) ->
update(Table, Props, Where, Safe);
sql2({update, Table, Props, Where}, Safe) ->
update(Table, Props, Where, Safe);
sql2({delete, {from, Table}}, Safe) ->
delete(Table, Safe);
sql2({delete, Table}, Safe) ->
delete(Table, Safe);
sql2({delete, {from, Table}, {where, Where}}, Safe) ->
delete(Table, undefined, Where, Safe);
sql2({delete, Table, {where, Where}}, Safe) ->
delete(Table, undefined, Where, Safe);
sql2({delete, Table, Where}, Safe) ->
delete(Table, undefined, Where, Safe);
sql2({delete, Table, Using, Where}, Safe) ->
delete(Table, Using, Where, Safe);
sql2({delete, Table, Using, Where, Extras}, Safe) ->
delete(Table, Using, Where, Extras, Safe).
select(Fields, Safe) ->
select(undefined, Fields, undefined, undefined, undefined, Safe).
select(Fields, Tables, Safe) ->
select(undefined, Fields, Tables, undefined, undefined, Safe).
select(Modifier, Fields, Tables, Safe) ->
select(Modifier, Fields, Tables, undefined, undefined, Safe).
select(Modifier, Fields, Tables, WhereExpr, Safe) ->
select(Modifier, Fields, Tables, WhereExpr, undefined, Safe).
select(Modifier, Fields, Tables, WhereExpr, Extras, Safe) ->
S1 = <<"SELECT ">>,
S2 = case Modifier of
undefined -> S1;
Modifier ->
Modifier1 = case Modifier of
distinct -> 'DISTINCT';
'all' -> 'ALL';
Other -> Other
end,
[S1, convert(Modifier1), $\s]
end,
S3 = [S2, make_list(Fields, fun(Val) -> expr2(Val, Safe) end)],
S4 = case Tables of
undefined -> S3;
_Other -> [S3, <<" FROM ">>,
make_list(Tables, fun(Val) -> join(Val, Safe) end)]
end,
S5 = case where(WhereExpr, Safe) of
undefined -> S4;
WhereClause -> [S4, WhereClause]
end,
case extra_clause(Extras, Safe) of
undefined -> S5;
Expr -> [S5, Expr]
end.
join({Table, Join, Table2, JoinExpr}, Safe) ->
[ expr2(Table, Safe),
join(Join),
expr2(Table2, Safe),
<<" ON ">>,
make_list(JoinExpr, fun(Val) -> expr(Val, Safe) end) ];
join({Table, Joins}, Safe) when is_list(Joins) ->
S1 = lists:map(fun({Join, Table2, JoinExpr}) ->
[ join(Join),
expr2(Table2, Safe),
<<" ON ">>,
make_list(JoinExpr, fun(Val) -> expr(Val, Safe)end)
]
end, Joins),
[expr2(Table, Safe), S1];
join(Table, Safe) ->
expr2(Table, Safe).
join(join) ->
<<" JOIN ">>;
join({left, join}) ->
<<" LEFT JOIN ">>;
join({inner, join}) ->
<<" INNER JOIN ">>;
join({right, join}) ->
<<" RIGHT JOIN ">>;
join({left, outer, join}) ->
<<" LEFT OUTER JOIN ">>;
join({right, outer, join}) ->
<<" RIGHT OUTER JOIN ">>;
join({full, outer, join}) ->
<<" FULL OUTER JOIN ">>;
join({cross, join}) ->
<<" CROSS JOIN ">>.
where(undefined, _) -> [];
where(Expr, true) when is_list(Expr); is_binary(Expr) ->
throw({error, {unsafe_expression, Expr}});
where(Expr, false) when is_binary(Expr) ->
Res = case Expr of
<<"WHERE ", _Rest/binary>> = Expr1 ->
Expr1;
<<"where ", Rest/binary>> ->
<<"WHERE ", Rest/binary>>;
Expr1 ->
<<"WHERE ", Expr1/binary>>
end,
[$\s, Res];
where(Exprs, false) when is_list(Exprs)->
where(list_to_binary(Exprs), false);
where(Expr, Safe) when is_tuple(Expr) ->
case expr(Expr, Safe) of
undefined -> [];
Other -> [<<" WHERE ">>, Other]
end.
extra_clause(undefined, _Safe) -> undefined;
extra_clause(Expr, true) when is_binary(Expr) ->
throw({error, {unsafe_expression, Expr}});
extra_clause(Expr, false) when is_binary(Expr) -> [$\s, Expr];
extra_clause([Expr], false) when is_binary(Expr) -> [$\s, Expr];
extra_clause(Exprs, Safe) when is_list(Exprs) ->
case is_tuple(hd(Exprs)) of
true ->
extra_clause2(Exprs, false);
false ->
if not Safe ->
[$\s, list_to_binary(Exprs)];
true ->
throw({error, {unsafe_expression, Exprs}})
end
end;
extra_clause(Exprs, true) when is_list(Exprs) ->
extra_clause2(Exprs, true);
extra_clause({limit, Num}, _Safe) ->
[<<" LIMIT ">>, encode(Num)];
extra_clause({limit, Offset, Num}, _Safe) ->
[<<" LIMIT ">>, encode(Offset), <<", ">> , encode(Num)];
extra_clause({group_by, ColNames}, _Safe) ->
[<<" GROUP BY ">>, make_list(ColNames, fun convert/1)];
extra_clause({group_by, ColNames, having, Expr}, Safe) ->
[extra_clause({group_by, ColNames}, Safe), <<" HAVING ">>,
expr(Expr, Safe)];
extra_clause({order_by, ColNames}, Safe) ->
[<<" ORDER BY ">>,
make_list(ColNames, fun({Name, Modifier}) when Modifier =:= 'asc' ->
[expr(Name, Safe), $\s, convert('ASC')];
({Name, Modifier}) when Modifier =:= 'desc' ->
[expr(Name, Safe), $\s, convert('DESC')];
(Name) ->
expr(Name, Safe)
end)].
extra_clause2(Exprs, Safe) ->
Res = [extra_clause(Expr,Safe) || Expr <- Exprs, Expr =/= undefined],
[Res].
insert(Table, Params) when is_list(Params) ->
Names = make_list(Params, fun({Name, _}) -> convert(Name) end),
Values = [$(, make_list(Params, fun({_, Val}) -> encode(Val) end), $)],
make_insert_query(Table, Names, Values);
insert(Table, {Fields, Records}) ->
Names = make_list(Fields, fun convert/1),
Values = make_list(Records, fun(Record) ->
Record1 = if is_tuple(Record) -> tuple_to_list(Record);
true -> Record
end,
[$(, make_list(Record1, fun encode/1), $)]
end),
make_insert_query(Table, Names, Values).
insert(Table, Params, undefined, _Safe) ->
insert(Table, Params);
insert(Table, Params, {returning, Ret}, Safe) ->
insert(Table, Params, Ret, Safe);
insert(Table, Params, Returning, Safe) ->
[insert(Table, Params), <<" RETURNING ">>, expr(Returning, Safe)].
make_insert_query(Table, Names, Values) ->
[<<"INSERT INTO ">>, convert(Table),
<<"(">>, Names, <<") VALUES ">>, Values].
update(Table, Props, Safe) ->
update(Table, Props, undefined, Safe).
update(Table, Props, Where, Safe) when not is_list(Props) ->
update(Table, [Props], Where, Safe);
update(Table, Props, Where, Safe) ->
S1 = case Table of
Table when is_tuple(Table) ->
join(Table, Safe);
_Other ->
convert(Table)
end,
S2 = make_list(Props, fun({Field, Val}) ->
[convert(Field), <<" = ">>, expr(Val, Safe)]
end),
[<<"UPDATE ">>, S1, <<" SET ">>, S2, where(Where, Safe)].
delete(Table, Safe) ->
delete(Table, undefined, undefined, undefined, Safe).
delete(Table, Using, WhereExpr, Safe) ->
delete(Table, Using, WhereExpr, undefined, Safe).
delete(Table, Using, WhereExpr, Extras, Safe) ->
S1 = case Table of
Table when is_tuple(Table) ->
join(Table, Safe);
_Other ->
convert(Table)
end,
S2 = [<<"DELETE FROM ">>, S1],
S3 = if Using =:= undefined -> S2;
true -> [S2, <<" USING ">>, make_list(Using, fun convert/1)]
end,
S4 = case where(WhereExpr, Safe) of
undefined -> S3;
WhereClause -> [S3, WhereClause]
end,
if Extras =:= undefined ->
S4;
true ->
[S4, extra_clause(Extras, Safe)]
end.
convert(Val) when is_atom(Val)->
atom_to_binary(Val, utf8).
make_list(Vals, ConvertFun) when is_list(Vals) ->
string:join([[ConvertFun(Val)] || Val <- Vals],", ");
make_list(Val, ConvertFun) ->
ConvertFun(Val).
expr(undefined, _Safe) -> <<"NULL">>;
expr({Not, Expr}, Safe) when (Not =:= 'not' orelse Not =:= '!') ->
[<<"NOT ">>, check_expr(Expr, Safe)];
expr({Table, Field}, _Safe) when is_atom(Table), is_atom(Field) ->
[convert(Table), $., convert(Field)];
expr({Expr1, as, Alias}, Safe) when is_atom(Alias) ->
[expr2(Expr1, Safe), <<" AS ">>, convert(Alias)];
expr({call, FuncName, []}, _Safe) ->
[convert(FuncName), <<"()">>];
expr({call, FuncName, Params}, _Safe) ->
[convert(FuncName), $(, make_list(Params, fun param/1), $)];
expr({Val, Op, {select, _} = Subquery}, Safe) ->
subquery(Val, Op, Subquery, Safe);
expr({Val, Op, {select, _, _} = Subquery}, Safe) ->
subquery(Val, Op, Subquery, Safe);
expr({Val, Op, {select, _, _, _} = Subquery}, Safe) ->
subquery(Val, Op, Subquery, Safe);
expr({Val, Op, {select, _, _, _, _} = Subquery}, Safe) ->
subquery(Val, Op, Subquery, Safe);
expr({Val, Op, {select, _, _, _, _, _} = Subquery}, Safe) ->
subquery(Val, Op, Subquery, Safe);
expr({Val, Op, {select, _, _, _, _, _, _} = Subquery}, Safe) ->
subquery(Val, Op, Subquery, Safe);
expr({Val, Op, {_, union, _} = Subquery}, Safe) ->
subquery(Val, Op, Subquery, Safe);
expr({Val, Op, {_, union, _, _} = Subquery}, Safe) ->
subquery(Val, Op, Subquery, Safe);
expr({Val, Op, {_, union, _, _, _} = Subquery}, Safe) ->
subquery(Val, Op, Subquery, Safe);
expr({_, in, []}, _Safe) -> <<"0">>;
expr({Val, Op, Values}, Safe) when (Op =:= in orelse
Op =:= any orelse
Op =:= some) andalso is_list(Values) ->
[expr2(Val, Safe), subquery_op(Op), make_list(Values, fun encode/1), $)];
expr({undefined, Op, Expr2}, Safe) when Op =:= 'and'; Op =:= 'not' ->
expr(Expr2, Safe);
expr({Expr1, Op, undefined}, Safe) when Op =:= 'and'; Op =:= 'not' ->
expr(Expr1, Safe);
expr({Expr1, Op, Expr2}, Safe) ->
{B1, B2} = if (Op =:= 'and' orelse Op =:= 'or') ->
{check_expr(Expr1, Safe), check_expr(Expr2, Safe)};
true ->
{expr2(Expr1, Safe), expr2(Expr2, Safe)}
end,
[$(, B1, $\s, op(Op), $\s, B2, $)];
expr({list, Vals}, _Safe) when is_list(Vals) ->
[$(, make_list(Vals, fun encode/1), $)];
expr({Op, Exprs}, Safe) when is_list(Exprs) ->
Res = [[expr(Expr,Safe)] || Expr <- Exprs ],
[$(, string:join(Res,[$\s, op(Op), $\s]), $)];
expr('?', _Safe) -> $?;
expr(null, _Safe) -> <<"NULL">>;
expr(Val, _Safe) when is_atom(Val) -> convert(Val);
expr(Val, _Safe) -> encode(Val).
check_expr(Expr, Safe) when is_list(Expr); is_binary(Expr) ->
if Safe -> throw({error, {unsafe_expression, Expr}});
true -> iolist_to_binary([$(, Expr, $)])
end;
check_expr(Expr, Safe) -> expr(Expr, Safe).
op(Op) -> convert(op1(Op)).
op1('and') -> 'AND';
op1('or') -> 'OR';
op1(like) -> 'LIKE';
op1(Op) -> Op.
subquery(Val, Op, Subquery, Safe) ->
[expr2(Val, Safe), subquery_op(Op), sql2(Subquery, Safe), $)].
subquery_op(in) -> <<" IN (">>;
subquery_op(any) -> <<" ANY (">>;
subquery_op(some) -> <<" SOME (">>.
expr2(undefined, _Safe) -> <<"NULL">>;
expr2(Expr, _Safe) when is_atom(Expr) -> convert(Expr);
expr2(Expr, Safe) -> expr(Expr, Safe).
param({call, FuncName, []}) ->
[convert(FuncName), <<"()">>];
param({call, FuncName, Params}) ->
[convert(FuncName), $(, make_list(Params, fun param/1), $)];
param({Key, Value}) when is_atom(Key) ->
[convert(Key), <<" := ">>, encode(Value)];
param(Key) when is_atom(Key) ->
convert(Key);
param(Value) ->
encode(Value).
quote(String) when is_list(String) ->
[$' | lists:reverse([$' | quote(String, [])])];
quote(Bin) when is_binary(Bin) ->
list_to_binary(quote(binary_to_list(Bin))).
quote([], Acc) ->
Acc;
quote([$\0 | Rest], Acc) ->
quote(Rest, [$0, $\\ | Acc]);
quote([$\n | Rest], Acc) ->
quote(Rest, [$n, $\\ | Acc]);
quote([$\r | Rest], Acc) ->
quote(Rest, [$r, $\\ | Acc]);
quote([$\\ | Rest], Acc) ->
quote(Rest, [$\\ , $\\ | Acc]);
quote([$' | Rest], Acc) ->
quote(Rest, [$', $\\ | Acc]);
quote([$" | Rest], Acc) ->
quote(Rest, [$", $\\ | Acc]);
quote([$\^Z | Rest], Acc) ->
quote(Rest, [$Z, $\\ | Acc]);
quote([C | Rest], Acc) ->
quote(Rest, [C | Acc]). | src/sqerl.erl | 0.611498 | 0.590455 | sqerl.erl | starcoder |
-module(guard).
-compile([export_all, nowarn_export_all]).
-spec atom(atom() | integer()) -> atom() | not_atom.
atom(A) when is_atom(A) -> A;
atom(_) -> not_atom.
-spec atom2(atom() | integer()) -> atom() | not_atom.
atom2(A) when erlang:is_atom(A) -> A;
atom2(_) -> not_atom.
-spec binary(binary() | atom()) -> binary() | not_binary.
binary(B) when is_binary(B) -> B;
binary(_) -> not_binary.
-spec bitstring(bitstring() | atom()) -> bitstring() | not_bitstring.
bitstring(B) when is_bitstring(B) -> B;
bitstring(_) -> not_bitstring.
-spec boolean(boolean() | atom()) -> boolean() | not_boolean.
boolean(B) when is_boolean(B) -> B;
boolean(_) -> not_boolean.
-spec float(float() | atom()) -> float() | not_float.
float(F) when is_float(F) -> F;
float(_) -> not_float.
-spec integer(integer() | atom()) -> integer() | not_integer.
integer(I) when is_integer(I) -> I;
integer(_) -> not_integer.
-spec number(number() | atom()) -> number() | not_number.
number(N) when is_number(N) -> N;
number(_) -> not_number.
-spec list(list() | atom()) -> list() | not_list.
list(L) when is_list(L) -> L;
list(_) -> not_list.
-spec map(map() | atom()) -> map() | not_map.
map(M) when is_map(M) -> M;
map(_) -> not_map.
-spec pid(pid() | atom()) -> pid() | not_pid.
pid(P) when is_pid(P) -> P;
pid(_) -> not_pid.
-spec port(port() | atom()) -> port() | not_port.
port(P) when is_port(P) -> P;
port(_) -> not_port.
-spec reference(reference() | atom()) -> reference() | not_reference.
reference(R) when is_reference(R) -> R;
reference(_) -> not_reference.
-spec tuple(tuple() | atom()) -> tuple() | not_tuple.
tuple(T) when is_tuple(T) -> T;
tuple(_) -> not_tuple.
-spec function(function() | atom()) -> function() | not_function.
function(F) when is_function(F) -> F;
function(_) -> not_function.
-spec function2(fun((any()) -> any()) | atom()) -> fun((any()) -> any()) | not_function.
function2(F) when is_function(F, 1) -> F;
function2(_) -> not_function.
-record(r, {
f
}).
-spec record(#r{} | atom()) -> #r{} | not_r.
record(R) when is_record(R, r) -> R;
record(_) -> not_r.
-spec record2(#r{} | atom()) -> #r{} | not_r.
record2(R) when is_record(R, r, 1) -> R;
record2(_) -> not_r.
-spec multiple(integer() | float() | atom()) -> integer() | float() | not_number.
multiple(I) when is_integer(I) -> I;
multiple(F) when is_float(F) -> F;
multiple(_) -> not_number.
-spec or_test(integer() | float() | atom()) -> integer() | float() | not_number.
or_test(N) when is_integer(N) or is_float(N) -> N;
or_test(_) -> not_number.
-spec orelse1(integer() | float() | atom()) -> integer() | float() | not_number.
orelse1(N) when is_integer(N) orelse is_float(N) -> N;
orelse1(_) -> not_number.
-spec orelse2(integer() | float() | atom()) -> integer() | float() | not_number.
orelse2(N) when is_integer(N); is_float(N) -> N;
orelse2(_) -> not_number.
-spec and_test(integer() | atom()) -> integer() | not_integer.
and_test(N) when is_integer(N) and is_number(N) -> N;
and_test(_) -> not_integer.
-spec andalso1(integer() | atom()) -> integer() | not_integer.
andalso1(N) when is_integer(N) andalso is_number(N) -> N;
andalso1(_) -> not_integer.
-spec andalso2(integer() | atom()) -> integer() | not_integer.
andalso2(N) when is_integer(N), is_number(N) -> N;
andalso2(_) -> not_integer.
-spec good_andalso(integer() | atom(), integer() | atom()) -> integer().
good_andalso(X, Y) when is_integer(X) andalso is_integer(Y) -> X + Y;
good_andalso(_, _) -> 42.
-spec all(All) -> All when
All :: atom()
| binary()
| bitstring()
| boolean()
| float()
| function()
| integer()
| list()
| map()
| number()
| pid()
| port()
| #r{}
| reference()
| tuple().
all(X) when is_atom(X) -> X;
all(X) when is_binary(X) -> X;
all(X) when is_bitstring(X) -> X;
all(X) when is_boolean(X) -> X;
all(X) when is_float(X) -> X;
all(X) when is_function(X) -> X;
all(X) when is_function(X, 1) -> X;
all(X) when is_integer(X) -> X;
all(X) when is_list(X) -> X;
all(X) when is_map(X) -> X;
all(X) when is_number(X) -> X;
all(X) when is_pid(X) -> X;
all(X) when is_port(X) -> X;
all(X) when is_record(X, r) -> X;
all(X) when is_record(X, r, 1) -> X;
all(X) when is_reference(X) -> X;
all(X) when is_tuple(X) -> X. | test/should_pass/guard.erl | 0.522689 | 0.499695 | guard.erl | starcoder |
%% Commont Tests Data helper
-module(ctdh).
-export([
match/2,
like/2
]).
-export([
strict_diff/2,
soft_diff/2
]).
-export([
eq/1,
map_like/1,
map_equals/1,
list_equals/1,
list_contains_all/1,
list_contains_exact/1,
check/1,
check/2,
pipe/2,
any/0
]).
-export([
strict/1,
soft/1,
change_type/2
]).
%% =============================================================================
%% Ct API
%% =============================================================================
match(Expected, Given) ->
Diff = strict_diff(Expected, Given),
cth:assert(Diff =:= [],
"given value is not equal to expected~n"
"diff : ~p", [Diff]),
Given.
like(Expected, Given) ->
Diff = soft_diff(Expected, Given),
cth:assert(Diff =:= [],
"given value is not equal to expected~n"
"diff : ~p", [Diff]),
Given.
strict_diff(Expected, Given) ->
diff(Expected, Given, fun strict/1).
soft_diff(Expected, Given) ->
diff(Expected, Given, fun soft/1).
strict(M) when is_map(M) -> map_equals(M);
strict(L) when is_list(L) -> list_equals(L);
strict(V) -> eq(V).
soft(M) when is_map(M) -> map_like(M);
soft(L) when is_list(L) -> list_contains_all(L);
soft(V) -> eq(V).
%% =============================================================================
%% Diff API
%% =============================================================================
diff(Expected, Given, DiffF) -> lists:reverse(diff_(Expected, Given, [], [], DiffF)).
diff_(Fun, To, Path, Log, DiffF) when is_function(Fun) -> Fun(To, Path, Log, DiffF);
diff_(From, To, Path, Log, DiffF) -> diff_(DiffF(From), To, Path, Log, DiffF).
%% =============================================================================
%% Comparators
%% =============================================================================
eq(From) -> fun(To, Path, Log, _DiffF) ->
case From =:= To of
true -> Log;
false -> [#{op => replace, path => path(Path), value => To}|Log]
end
end.
map_equals(From) -> change_type(map_equals_(From), fun strict/1).
map_equals_(From) when is_map(From) ->
fun (To, Path, Log, DiffF) when is_map(To) ->
FromKeys = maps:keys(From),
NewPairs = maps:to_list(maps:without(FromKeys, To)),
{Log2, NewPairs2} = maps:fold(
fun(K, FromV, {L, New}) ->
case maps:find(K, To) of
{ok, ToV} -> {diff_(FromV, ToV, [K|Path], L, DiffF), New};
error -> maybe_moved(K, FromV, NewPairs, Path, L)
end
end, {Log, NewPairs}, From),
lists:foldl(fun({K, V}, L) ->
[#{op => add, path => path([K|Path]), value => V}|L]
end, Log2, NewPairs2);
(To, Path, Log, DiffF) -> (eq(From))(To, Path, Log, DiffF)
end.
map_like(From) when is_map(From) ->
fun (To, Path, Log, DiffF) when is_map(To) ->
FromKeys = maps:keys(From),
Fun = change_type(map_equals_(From), fun soft/1),
Fun(maps:with(FromKeys, To), Path, Log, DiffF);
(To, Path, Log, DiffF) -> (eq(From))(To, Path, Log, DiffF)
end.
check(Fun) -> fun(To, Path, Log, _DiffF) ->
case Fun(To) of
ok -> Log;
{error, Reason} -> [#{op => check, path => path(Path), value => To, error => Reason}|Log]
end
end.
check(BoolFun, Msg) ->
check(fun(To) ->
case BoolFun(To) of
true -> ok;
false -> {error, Msg}
end
end).
pipe(DecoderFun, Expected) ->
fun(To, Path, Log, DiffF) ->
case DecoderFun(To) of
{ok, DecodedTo} ->
diff_(Expected, DecodedTo, Path, Log, DiffF);
{error, Reason} ->
[#{op => check, path => path(Path), value => To, error => Reason}|Log]
end
end.
any() -> check(fun(_) -> ok end).
list_equals(From) when is_list(From) ->
fun(To, Path, Log, DiffF) when is_list(To) -> list_diff_(From, To, Path, Log, DiffF, 0);
(To, Path, Log, DiffF) -> (eq(From))(To, Path, Log, DiffF)
end.
list_diff_([From|RestF], [To|RestT], Path, Log, DiffF, Cnt) ->
list_diff_(RestF, RestT, Path, diff_(From, To, [Cnt|Path], Log, DiffF), DiffF, Cnt+1);
list_diff_([_|Rest], [], Path, Log, DiffF, Cnt) ->
NewLog = [#{op => remove, path => path([Cnt|Path])}|Log],
list_diff_(Rest, [], Path, NewLog, DiffF, Cnt+1);
list_diff_([], Rest, Path, Log, _DiffF, _Cnt) ->
lists:foldl(fun(V, L) ->
[#{op => add, path => path(["-"|Path]), value => V}|L]
end, Log, Rest).
list_contains_all(From) when is_list(From) ->
fun(To, Path, Log, DiffF) when is_list(To) -> list_contains_all_(From, To, Path, Log, DiffF, 0);
(To, Path, Log, DiffF) -> (eq(From))(To, Path, Log, DiffF)
end.
list_contains_all_([], _To, _Path, Log, _DiffF, _Cnt) -> Log;
list_contains_all_([H|T], To, Path, Log, DiffF, Cnt) ->
NewLog = case take_first(fun(V) -> diff(H, V, DiffF) =:= [] end, To) of
{_, Rest} -> Log;
Rest -> [#{op => remove, path => path([Cnt|Path])}|Log]
end,
list_contains_all_(T, Rest, Path, NewLog, DiffF, Cnt + 1).
list_contains_exact(From) when is_list(From) ->
fun(To, Path, Log, DiffF) when is_list(To) -> list_contains_exact_(From, To, Path, Log, DiffF, 0);
(To, Path, Log, DiffF) -> (eq(From))(To, Path, Log, DiffF)
end.
list_contains_exact_([], [], _Path, Log, _DiffF, _Cnt) -> Log;
list_contains_exact_([], To, Path, Log, _DiffF, _Cnt) ->
lists:foldl(fun(V, L) ->
[#{op => add, path => path(["-"|Path]), value => V}|L]
end, Log, To);
list_contains_exact_([H|T], To, Path, Log, DiffF, Cnt) ->
NewLog = case take_first(fun(V) -> diff(H, V, DiffF) =:= [] end, To) of
{_, Rest} -> Log;
Rest -> [#{op => remove, path => path([Cnt|Path])}|Log]
end,
list_contains_exact_(T, Rest, Path, NewLog, DiffF, Cnt + 1).
%% =============================================================================
%% Internal functions
%% =============================================================================
change_type(Fun, DiffFun) ->
fun(To, Pairs, Path, _) -> Fun(To, Pairs, Path, DiffFun) end.
maybe_moved(K, FromV, Pairs, Path, L) ->
maybe_moved_(K, FromV, Pairs, Path, L, []).
maybe_moved_(K, _V, [], Path, Log, Acc) ->
{[#{op => remove, path => path([K|Path])}|Log], Acc};
maybe_moved_(K, V, [{NewK, V}|Rest], Path, Log, Acc) ->
{[#{op => move, path => path([NewK|Path]), from => path([K|Path])}|Log],
Acc ++ Rest};
maybe_moved_(K, V, [Other|Rest], Path, Log, Acc) ->
maybe_moved_(K, V, Rest, Path, Log, [Other|Acc]).
path(Path) ->
iolist_to_binary([["/", to_iodata(P)] || P <- lists:reverse(Path)]).
to_iodata(P) when is_atom(P) -> atom_to_list(P);
to_iodata(P) when is_integer(P) -> integer_to_list(P);
to_iodata(P) -> P.
take_first(Fun, List) -> take_first_(Fun, List, []).
take_first_(_Fun, [], Acc) -> lists:reverse(Acc);
take_first_(Fun, [H|T], Acc) ->
case Fun(H) of
true -> {H, lists:foldl(fun(A, L) -> [A|L] end, T, Acc)};
false -> take_first_(Fun, T, [H|Acc])
end.
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
diff_test_() ->
[
?_test(?assertEqual([], strict_diff(#{a => 1}, #{a => 1}))),
?_test(?assertEqual(
[
#{ op => replace, path => <<"/a">>, value => 2}
],
strict_diff(#{a => 1}, #{a => 2}))),
?_test(?assertEqual(
[
#{ op => move, path => <<"/b">>, from => <<"/a">>}
],
strict_diff(#{a => 1}, #{b => 1}))),
?_test(?assertEqual(
[
#{ op => add, path => <<"/a">>, value => 1}
],
strict_diff(#{}, #{a => 1}))),
?_test(?assertEqual(
[
#{ op => remove, path => <<"/a">>}
],
strict_diff(#{a => 1}, #{}))),
?_test(?assertEqual(
[
#{op => remove,path => <<"/a">>},
#{op => add,path => <<"/c">>,value => 3},
#{op => add,path => <<"/b">>,value => 2}
],
strict_diff(#{a => 1}, #{b => 2, c => 3}))),
?_test(?assertEqual(
[
#{op => move,path => <<"/e">>,from => <<"/a">>},
#{op => replace,path => <<"/b/0">>,value => 2},
#{op => replace,path => <<"/b/1/c">>,value => 4},
#{op => add,path => <<"/b/-">>,value => 7},
#{op => add,path => <<"/k">>,value => #{l => 1}}
],
strict_diff(#{a => 1, b => [1, #{c => 3}], d => 4},
#{e => 1, b => [2, #{c => 4}, 7], d => 4, k => #{l => 1}}))),
?_test(?assertEqual(
[
#{op => move,path => <<"/e">>,from => <<"/a">>},
#{op => replace,path => <<"/b/0">>,value => 2},
#{op => replace,path => <<"/b/1/c">>,value => 4},
#{op => remove,path => <<"/b/2">>},
#{op => add,path => <<"/k">>,value => #{l => 1}}
],
strict_diff(#{a => 1, b => [1, #{c => 3}, 7], d => 4},
#{e => 1, b => [2, #{c => 4}], d => 4, k => #{l => 1}})))
].
map_soft_strict_diff_test_() ->
[
?_test(?assertEqual([], strict_diff(map_like(#{a => 1}), #{a => 1}))),
?_test(?assertEqual([], strict_diff(map_like(#{a => 1}), #{a => 1, b => 1}))),
?_test(?assertEqual([], strict_diff(#{a => map_like(#{b => 1})}, #{a => #{b => 1}}))),
?_test(?assertEqual(
[
#{op => replace,path => <<"/a">>,value => 1}
],
strict_diff(map_like(#{a => 2}), #{a => 1, b => 1}))),
?_test(?assertEqual(
[
#{op => replace,path => <<"/a">>,value => 1}
],
strict_diff(map_like(#{a => #{b => 1}}), #{a => 1, b => 1})))
].
list_contains_all_test_() ->
[
?_test(?assertEqual([], strict_diff(list_contains_all([1,2,3]), [1,2,3]))),
?_test(?assertEqual([], strict_diff(list_contains_all([1,2,3]), [3,2,1]))),
?_test(?assertEqual([], strict_diff(list_contains_all([1,2,3]), [1,2,3,4]))),
?_test(?assertEqual([], strict_diff(list_contains_all([1,2,3]), [1,2,3,4]))),
?_test(?assertEqual([], strict_diff(list_contains_all([
map_like(#{a => 1}),
map_like(#{b => 2}),
map_like(#{c => 3})
]), [#{a => 1}, #{ b => 2 }, #{ c => 3}, #{ d => 4}]))),
?_test(?assertEqual([
#{op => remove,path => <<"/1">>},
#{op => remove,path => <<"/2">>}
], strict_diff(list_contains_all([
map_like(#{a => 1}),
map_like(#{b => 2}),
map_like(#{c => 3})
]), [#{a => 1, b => 2 , c => 3, d => 4}]))),
?_test(?assertEqual([
#{op => remove, path => <<"/0">>},
#{op => remove, path => <<"/1">>}
], strict_diff(list_contains_all([1,2,3]), [3])))
].
check_test_() ->
IsBinary =
fun(V) when is_binary(V) -> ok;
(_) -> {error, <<"not is binary">>}
end,
[
?_test(?assertEqual([], strict_diff(check(IsBinary), <<"asd">>))),
?_test(?assertEqual([], strict_diff([check(IsBinary)], [<<"asd">>]))),
?_test(?assertEqual([], strict_diff([#{a => check(IsBinary)}], [#{a => <<"asd">>}]))),
?_test(?assertEqual([
#{op => check, path => <<>>, value => 1, error => <<"not is binary">>}
], strict_diff(check(IsBinary), 1))),
?_test(?assertEqual([
#{op => check, path => <<"/0/a">>, value => 1, error => <<"not is binary">>}
], strict_diff([#{a => check(IsBinary)}], [#{a => 1}])))
].
pipe_test_() ->
IntDecoder =
fun (V) when is_integer(V) -> {ok, V};
(V) when is_binary(V) ->
try binary_to_integer(V) of Int -> {ok, Int}
catch _:_ -> {error, <<"can't decode integer">>}
end;
(_) -> {error, <<"bad integer">>}
end,
[
?_test(?assertEqual([], strict_diff(pipe(IntDecoder, 1), 1))),
?_test(?assertEqual([], strict_diff(pipe(IntDecoder, 1), <<"1">>))),
?_test(?assertEqual([], strict_diff([pipe(IntDecoder, 2)], [<<"2">>]))),
?_test(?assertEqual([
#{op => check, path => <<>>, value => atom, error => <<"bad integer">>}
], strict_diff(pipe(IntDecoder, 0), atom))),
?_test(?assertEqual([
#{op => check, path => <<>>, value => <<"str">>, error => <<"can't decode integer">>}
], strict_diff(pipe(IntDecoder, 0), <<"str">>))),
?_test(?assertEqual([
#{op => replace, path => <<>>, value => 10}
], strict_diff(pipe(IntDecoder, 0), <<"10">>)))
].
list_contains_exact_test_() ->
[
?_test(?assertEqual([], strict_diff(list_contains_exact([1,2,3]), [1,2,3]))),
?_test(?assertEqual([], strict_diff(list_contains_exact([1,2,3]), [3,2,1]))),
?_test(?assertEqual([], strict_diff(list_contains_exact([
map_like(#{a => 1}),
map_like(#{b => 2}),
map_like(#{c => 3})
]), [#{a => 1}, #{ b => 2 }, #{ c => 3}]))),
?_test(?assertEqual([
#{op => add,path => <<"/-">>,value => 4}
], strict_diff(list_contains_exact([1,2,3]), [1,2,3,4]))),
?_test(?assertEqual([
#{op => remove,path => <<"/1">>},
#{op => remove,path => <<"/2">>},
#{op => add,path => <<"/-">>,value => #{d => 4}}
], strict_diff(list_contains_exact([
map_like(#{a => 1}),
map_like(#{b => 2}),
map_like(#{c => 3})
]), [#{a => 1, b => 2 , c => 3}, #{d => 4}]))),
?_test(?assertEqual([
#{op => remove, path => <<"/0">>},
#{op => remove, path => <<"/1">>}
], strict_diff(list_contains_exact([1,2,3]), [3])))
].
-endif. | src/ctdh.erl | 0.541409 | 0.663166 | ctdh.erl | starcoder |
-module(oc_metrics_SUITE).
-compile(export_all).
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
-include("oc_metrics.hrl").
all() ->
[self_producer_default].
init_per_suite(Config) ->
_ = application:load(opencensus),
_ = application:ensure_all_started(opencensus),
Config.
end_per_suite(_Config) ->
application:stop(opencensus),
application:unload(opencensus),
ok.
self_producer_default(_Config) ->
?assertMatch([], lists:sort(oc_producer_registry:read_to_list(test_registry))),
oc_producer_registry:add_producer(test_registry, oc_self_producer),
SpanName1 = <<"span-1">>,
Span1 = oc_trace:start_span(SpanName1, undefined),
?assertMatch([#oc_metric{descriptor=#oc_metric_descriptor{
name="oc_span_buffer_bytes",
description="Size of the spans ETS table",
unit= <<"1">>,
type='GAUGE_INT64',
label_keys=[]},
timeseries=[#oc_time_series{
start_timestamp=undefined,
label_values=[],
points=[#oc_point{
timestamp={_, _},
value=S1}]}],
resource=undefined},
#oc_metric{descriptor=#oc_metric_descriptor{
name="oc_span_buffer_size",
description="Count of spans in the ETS table",
unit= <<"1">>,
type='GAUGE_INT64',
label_keys=[]},
timeseries=[#oc_time_series{
start_timestamp=undefined,
label_values=[],
points=[#oc_point{
timestamp={_, _},
value=1}]}],
resource=undefined}] when S1 > 5000,
lists:sort(oc_producer_registry:read_to_list(test_registry))),
ChildSpanName1 = <<"child-span-1">>,
ChildSpan1 = oc_trace:start_span(ChildSpanName1, Span1, #{}),
?assertMatch([#oc_metric{descriptor=#oc_metric_descriptor{
name="oc_span_buffer_bytes",
description="Size of the spans ETS table",
unit= <<"1">>,
type='GAUGE_INT64',
label_keys=[]},
timeseries=[#oc_time_series{
start_timestamp=undefined,
label_values=[],
points=[#oc_point{
timestamp={_, _},
value=S2}]}],
resource=undefined},
#oc_metric{descriptor=#oc_metric_descriptor{
name="oc_span_buffer_size",
description="Count of spans in the ETS table",
unit= <<"1">>,
type='GAUGE_INT64',
label_keys=[]},
timeseries=[#oc_time_series{
start_timestamp=undefined,
label_values=[],
points=[#oc_point{
timestamp={_, _},
value=2}]}],
resource=undefined}] when S2 > 10000,
lists:sort(oc_producer_registry:read_to_list(test_registry))),
oc_producer_registry:remove_producer(test_registry, oc_self_producer),
?assertMatch([], lists:sort(oc_producer_registry:read_to_list(test_registry))),
oc_trace:finish_span(ChildSpan1),
oc_trace:finish_span(Span1). | test/oc_metrics_SUITE.erl | 0.580233 | 0.47457 | oc_metrics_SUITE.erl | starcoder |
%% Copyright (c) 2021 <NAME>
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%% File : lfe_shell_docs.erl
%% Author : <NAME>
%% Purpose : Render LFE docs for output in shell.
%% The interface is loosely modelled on the shell_docs module.
-module(lfe_shell_docs).
-export([render/2,render/3,render/4]).
-include("lfe.hrl").
-include("lfe_docs.hrl").
%% Coloured strings for the LFE banner, red, green, yellow and blue.
-define(RED(Str), "\e[31m" ++ Str ++ "\e[0m").
-define(GRN(Str), "\e[1;32m" ++ Str ++ "\e[0m").
-define(YLW(Str), "\e[1;33m" ++ Str ++ "\e[0m").
-define(BLU(Str), "\e[1;34m" ++ Str ++ "\e[0m").
%% render(Module, Docs) -> unicode:chardata().
render(Bin, Docs) when is_binary(Bin) ->
{ok,{Mod,_}} = beam_lib:chunks(Bin, [], []), %Sneaky!
render(Mod, Docs);
render(Mod, #docs_v1{format = ?LFE_FORMAT, module_doc=Mdoc}) ->
[lfe_io:format1(?BLU("~p")++"\n\n", [Mod]), return_doc(Mdoc)].
%% render(Module, Function, Docs) -> unicode:chardata().
render(_Mod, Name, #docs_v1{format = ?LFE_FORMAT, docs = Docs}) ->
Fns = [ F || {{_,N,_},_,_,_,_}=F <- Docs, N =:= Name ],
Ret = lists:map(fun ({{function,_,_},_,Sig,Doc,_}) ->
[lfe_io:format1(?BLU("defun ~s")++"\n\n", [Sig]),
return_doc(Doc)];
({{macro,_,_},_,Sig,Doc,_}) ->
[lfe_io:format1(?BLU("defmacro ~s")++"\n\n", [Sig]),
return_doc(Doc)]
end, Fns),
return_render(Ret, function_missing).
%% render(Module, Function, Arity, Docs) -> unicode:chardata().
render(_Mod, Name, Arity, #docs_v1{format = ?LFE_FORMAT, docs = Docs}) ->
Fns = [ F || {{function,N,A},_,_,_,_}=F <- Docs, N =:= Name, A =:= Arity ],
Ret = lists:map(fun ({{function,_,_},_,Sig,Doc,_}) ->
[lfe_io:format1(?BLU("defun ~s")++"\n\n", [Sig]),
return_doc(Doc)]
end, Fns),
return_render(Ret, function_missing).
return_doc(#{<<"en">> := Dv}) -> lfe_io:format1("~s\n\n", [Dv]);
return_doc(_) -> "\n".
return_render([], Error) -> {error,Error};
return_render(FDocs, _Error) -> FDocs. | src/lfe_shell_docs.erl | 0.533154 | 0.41052 | lfe_shell_docs.erl | starcoder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.