code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1 value |
|---|---|---|---|---|---|
% Licensed under the Apache License, Version 2.0 (the "License"); you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
-module(mem3_reshard_api).
-export([
create_jobs/5,
get_jobs/0,
get_job/1,
get_summary/0,
resume_job/1,
stop_job/2,
start_shard_splitting/0,
stop_shard_splitting/1,
get_shard_splitting_state/0
]).
create_jobs(Node, Shard, Db, Range, split) ->
lists:map(fun(S) ->
N = mem3:node(S),
Name = mem3:name(S),
case rpc:call(N, mem3_reshard, start_split_job, [Name]) of
{badrpc, Error} ->
{error, Error, N, Name};
{ok, JobId} ->
{ok, JobId, N, Name};
{error, Error} ->
{error, Error, N, Name}
end
end, pick_shards(Node, Shard, Db, Range)).
get_jobs() ->
Nodes = mem3_util:live_nodes(),
{Replies, _Bad} = rpc:multicall(Nodes, mem3_reshard, jobs, []),
lists:flatten(Replies).
get_job(JobId) ->
Nodes = mem3_util:live_nodes(),
{Replies, _Bad} = rpc:multicall(Nodes, mem3_reshard, job, [JobId]),
case [JobInfo || {ok, JobInfo} <- Replies] of
[JobInfo | _] ->
{ok, JobInfo};
[] ->
{error, not_found}
end.
get_summary() ->
Nodes = mem3_util:live_nodes(),
{Replies, _Bad} = rpc:multicall(Nodes, mem3_reshard, get_state, []),
Stats0 = #{running => 0, total => 0, completed => 0, failed => 0,
stopped => 0},
StatsF = lists:foldl(fun({Res}, Stats) ->
maps:map(fun(Stat, OldVal) ->
OldVal + couch_util:get_value(Stat, Res, 0)
end, Stats)
end, Stats0, Replies),
{State, Reason} = state_and_reason(Replies),
StateReasonProps = [{state, State}, {state_reason, Reason}],
{StateReasonProps ++ lists:sort(maps:to_list(StatsF))}.
resume_job(JobId) ->
Nodes = mem3_util:live_nodes(),
{Replies, _Bad} = rpc:multicall(Nodes, mem3_reshard, resume_job,
[JobId]),
WithoutNotFound = [R || R <- Replies, R =/= {error, not_found}],
case lists:usort(WithoutNotFound) of
[ok] ->
ok;
[{error, Error} | _] ->
{error, {[{error, couch_util:to_binary(Error)}]}};
[] ->
{error, not_found}
end.
stop_job(JobId, Reason) ->
Nodes = mem3_util:live_nodes(),
{Replies, _Bad} = rpc:multicall(Nodes, mem3_reshard, stop_job,
[JobId, Reason]),
WithoutNotFound = [R || R <- Replies, R =/= {error, not_found}],
case lists:usort(WithoutNotFound) of
[ok] ->
ok;
[{error, Error} | _] ->
{error, {[{error, couch_util:to_binary(Error)}]}};
[] ->
{error, not_found}
end.
start_shard_splitting() ->
{Replies, _Bad} = rpc:multicall(mem3_reshard, start, []),
case lists:usort(lists:flatten(Replies)) of
[ok] ->
{ok, {[{ok, true}]}};
[Error | _] ->
{error, {[{error, couch_util:to_binary(Error)}]}}
end.
stop_shard_splitting(Reason) ->
{Replies, _Bad} = rpc:multicall(mem3_reshard, stop, [Reason]),
case lists:usort(lists:flatten(Replies)) of
[ok] ->
{ok, {[{ok, true}]}};
[Error | _] ->
{error, {[{error, couch_util:to_binary(Error)}]}}
end.
get_shard_splitting_state() ->
Nodes = mem3_util:live_nodes(),
{Replies, _Bad} = rpc:multicall(Nodes, mem3_reshard, get_state, []),
state_and_reason(Replies).
state_and_reason(StateReplies) ->
AccF = lists:foldl(fun({ResProps}, Acc) ->
Reason = get_reason(ResProps),
case couch_util:get_value(state, ResProps) of
<<"running">> -> orddict:append(running, Reason, Acc);
<<"stopped">> -> orddict:append(stopped, Reason, Acc);
undefined -> Acc
end
end, orddict:from_list([{running, []}, {stopped, []}]), StateReplies),
Running = orddict:fetch(running, AccF),
case length(Running) > 0 of
true ->
Reason = pick_reason(Running),
{running, Reason};
false ->
Reason = pick_reason(orddict:fetch(stopped, AccF)),
{stopped, Reason}
end.
pick_reason(Reasons) ->
Reasons1 = lists:usort(Reasons),
Reasons2 = [R || R <- Reasons1, R =/= undefined],
case Reasons2 of
[] -> null;
[R1 | _] -> R1
end.
get_reason(StateProps) when is_list(StateProps) ->
case couch_util:get_value(state_info, StateProps) of
[] -> undefined;
undefined -> undefined;
{SInfoProps} -> couch_util:get_value(reason, SInfoProps)
end.
pick_shards(undefined, undefined, Db, undefined) when is_binary(Db) ->
check_node_required(),
check_range_required(),
mem3:shards(Db);
pick_shards(Node, undefined, Db, undefined) when is_atom(Node),
is_binary(Db) ->
check_range_required(),
[S || S <- mem3:shards(Db), mem3:node(S) == Node];
pick_shards(undefined, undefined, Db, [_B, _E] = Range) when is_binary(Db) ->
check_node_required(),
[S || S <- mem3:shards(Db), mem3:range(S) == Range];
pick_shards(Node, undefined, Db, [_B, _E] = Range) when is_atom(Node),
is_binary(Db) ->
[S || S <- mem3:shards(Db), mem3:node(S) == Node, mem3:range(S) == Range];
pick_shards(undefined, Shard, undefined, undefined) when is_binary(Shard) ->
check_node_required(),
Db = mem3:dbname(Shard),
[S || S <- mem3:shards(Db), mem3:name(S) == Shard];
pick_shards(Node, Shard, undefined, undefined) when is_atom(Node),
is_binary(Shard) ->
Db = mem3:dbname(Shard),
[S || S <- mem3:shards(Db), mem3:name(S) == Shard, mem3:node(S) == Node];
pick_shards(_, undefined, undefined, _) ->
throw({bad_request, <<"Must specify at least `db` or `shard`">>});
pick_shards(_, Db, Shard, _) when is_binary(Db), is_binary(Shard) ->
throw({bad_request, <<"`db` and `shard` are mutually exclusive">>}).
check_node_required() ->
case config:get_boolean("reshard", "require_node_param", false) of
true ->
throw({bad_request, <<"`node` prameter is required">>});
false ->
ok
end.
check_range_required() ->
case config:get_boolean("reshard", "require_range_param", false) of
true ->
throw({bad_request, <<"`range` prameter is required">>});
false ->
ok
end. | src/mem3/src/mem3_reshard_api.erl | 0.547101 | 0.409988 | mem3_reshard_api.erl | starcoder |
%%%---------------------------------------------------------------------
%%% module pwd
%%%---------------------------------------------------------------------
%%% Decrypts md5-hashed strings of known length. This is demonstration
%%% code for an intro talk on Erlang concurrency.
%%%---------------------------------------------------------------------
-module(pwd).
-compile(export_all).
%%----------------------------------------------------------------------
%% Function: encrypt/1
%% Purpose: Hash a plaintext string using erlang:md5 function
%% Args: Plaintext string, must be lowercase with no numbers or special characters
%% Returns: Binary md5 hash
%%----------------------------------------------------------------------
encrypt(Plain) ->
erlang:md5(Plain).
%%----------------------------------------------------------------------
%% Function: decrypt/3
%% Purpose: Given a md5 hash of known length, return the plaintext used to create the hash
%% (crack the password)
%% Args: md5 hash, length of original string, number of processes to spawn
%% Returns: plaintext
%%----------------------------------------------------------------------
decrypt(Crypted, Len, Processes) ->
CharPartitions = partition_alphabet(Len, Processes),
Server = self(),
StartTime = now(),
lists:foreach(fun({Min,Max}) -> spawn(pwd, analyze, [Server,Crypted,Min,Max]) end, CharPartitions),
loop(Processes, StartTime, notfound).
%%----------------------------------------------------------------------
%% Function: loop/3
%% Purpose: Server loop that listens for results of clients processes which perform password analysis.
%% Captures time required for successful decryption but doesn't return until all processes have reported
%% either success or failure.
%% Args: Number of process, start time, return message
%% Returns: Return value of either notfound or {found,password,elapsed_time}
%%----------------------------------------------------------------------
loop(0, _Start, Ret) ->
Ret;
loop(Processes, Start, Ret) ->
receive
{found, Password} ->
Elapsed = timer:now_diff(now(), Start) / 1000 / 1000, %seconds
loop(Processes-1, Start, {found,Password,Elapsed});
notfound ->
io:format("Processes remaining: ~p~n",[Processes]),
loop(Processes-1, Start, Ret)
end.
%%----------------------------------------------------------------------
%% Function: analyze/5
%% Purpose: analyze each array of characters between Min and Max and compare its md5 hash against Crypted.
%% Notify server on success or failure
%% Args: Server Pid, encrypted hash, starting character array, ending character array, length of plaintext
%% Returns: notifies server of success or failure
%%----------------------------------------------------------------------
analyze(Server, Crypted, Max, Max) ->
case erlang:md5(Max) of
Crypted ->
Server ! {found, Max};
_ ->
Server ! notfound
end;
analyze(Server, Crypted, Cur, Max) ->
case erlang:md5(Cur) of
Crypted ->
Server ! {found, Cur};
_ ->
analyze(Server, Crypted, next(Cur), Max)
end.
%%----------------------------------------------------------------------
%% Function: partition_alphabet/2
%% Purpose: partition lowercase alphabet according to number of process to spawn.
%% The max number of strings to analyze will be 26 ^ Len.
%% When calculating the first item of the bounds, all possible combinations
%% less than that length will be added, for instance for length 4,
%% First = (26^3) + (26^2) + 26 = 18278.
%% Args: Length of plaintext, Number of processes
%% Returns: array of 2-tuples of form {min,max} where each are char arrays, e.g., {"aa","mm"}
%%----------------------------------------------------------------------
partition_alphabet(Len, Processes) ->
TotalStrings = round(math:pow(26, Len)),
First = first_int(Len),
Last = First + TotalStrings,
StringsPerProc = round(TotalStrings / Processes),
partition_alphabet(Processes, First - 1, Last, StringsPerProc, []).
partition_alphabet(1, Cur, Last, _StringsPerProc, L) ->
%final partition always receives all remaining arrays because
%TotalStrings / Processes in partition_alphabet/2 above may have had a remainder
Min = Cur + 1,
MinMax = {chr_array(Min), chr_array(Last - 1)},
[MinMax|L];
partition_alphabet(ProcsLeft, Cur, Last, StringsPerProc, L) ->
Min = Cur + 1,
Max = Min + StringsPerProc - 1,
MinMax = {chr_array(Min), chr_array(Max)},
partition_alphabet(ProcsLeft - 1, Max, Last, StringsPerProc, [MinMax|L]).
%%----------------------------------------------------------------------
%% Function: first_int/1
%% Purpose: given a string of length Len, return the base-10 number that represents the first string
%% having that length. For instance, Len 1 returns 0, Len 2 returns 26, etc...
%% Args: string length
%% Returns: integer
first_int(Len) when Len > 0 ->
first_int(Len-1, 0).
first_int(0, Acc) ->
Acc;
first_int(Len, Acc) ->
first_int(Len - 1, Acc + round(math:pow(26, Len))).
%%----------------------------------------------------------------------
%% Function: chr_array/1
%% Purpose: given a base-10 number, use base-26 math to return the corresponding character array
%% Based on Java example of hexavigesimal math from http://en.wikipedia.org/wiki/Hexavigesimal
%% $a represents ASCII 97. Thus 0 returns [97] or "a".
%% Args: base-10 number
%% Returns: ASCII character array representing conversion from base-10 to base-26
chr_array(I) ->
chr_array(I, []).
chr_array(I, L) when I > 25 ->
R = I rem 26,
D = I div 26,
chr_array(D - 1, [R+$a|L]);
chr_array(I, L) ->
[I + $a|L].
%%----------------------------------------------------------------------
%% Function: next/1
%% Purpose: increment char array L by one character. e.g., "aaa" becomes "aab" and "bzz" becomes "caa"
%% Args: character array to increment
%% Returns: character array
%%----------------------------------------------------------------------
next(L) ->
next(lists:reverse(L), [], true).
next([], L, _Incr) ->
L;
next([$z|T], L, true) ->
next(T, [$a|L], true); %roll over "z" to "a" and pass along the increment
next([H|T], L, true) ->
next(T, [H+1|L], false);
next([H|T], L, false) ->
next(T, [H|L], false). | pwd.erl | 0.542136 | 0.537466 | pwd.erl | starcoder |
%%=============================================================================
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%=============================================================================
%% @private
%% @doc Module containing functions needed by meck to integrate with cover.
-module(meck_cover).
%% Interface exports
-export([compile_beam/2]).
-export([rename_module/2]).
-export([dump_coverdata/1]).
-ignore_xref({cover, compile_beams, 1}).
-ignore_xref({cover, compile_beam, 2}).
-ignore_xref({cover, get_term, 1}).
-ignore_xref({cover, write, 2}).
%%=============================================================================
%% Interface exports
%%=============================================================================
%% @doc Enabled cover on `<name>_meck_original'.
compile_beam(OriginalMod, Bin) ->
CompileBeams = alter_cover(),
[{ok, _}] = CompileBeams([{OriginalMod, Bin}]).
%% @doc Given a cover file `File' exported by `cover:export' overwrite
%% the module name with `Name'.
rename_module(File, Name) ->
NewTerms = change_cover_mod_name(read_cover_file(File), Name),
write_terms(File, NewTerms),
ok.
%% @doc Dump cover data for `Mod' into a .coverdata file in the current
%% directory. Return the absolute path to the backup file.
dump_coverdata(Mod) ->
{ok, CWD} = file:get_cwd(),
File = lists:concat([Mod, ".", os:getpid(), ".coverdata"]),
Path = filename:join(CWD, File),
ok = cover:export(Path, Mod),
Path.
%%=============================================================================
%% Internal functions
%%=============================================================================
%% @private
%%
%% @doc Alter the cover BEAM module to export some of it's private
%% functions. This is done for two reasons:
%%
%% 1. Meck needs to alter the export analysis data on disk and
%% therefore needs to understand this format. This is why `get_term'
%% and `write' are exposed.
%%
%% 2. In order to avoid creating temporary files meck needs direct
%% access to `compile_beam/2' which allows passing a binary.
%% In OTP 18.0 the internal API of cover changed a bit and
%% compile_beam/2 was replaced by compile_beams/1.
-dialyzer({no_missing_calls, alter_cover/0}). % for cover:compile_beams/1
alter_cover() ->
CoverExports = cover:module_info(exports),
case {lists:member({compile_beams,1}, CoverExports),
lists:member({compile_beam,2}, CoverExports)} of
{true, _} ->
fun cover:compile_beams/1;
{_, true} ->
fun compile_beam_wrapper/1;
{false, false} ->
Beam = meck_code:beam_file(cover),
AbsCode = meck_code:abstract_code(Beam),
{Exports, CompileBeams} =
case lists:member({analyse,0}, CoverExports) of
true ->
%% new API from OTP 18.0 on
{[{compile_beams, 1}, {get_term, 1}, {write, 2}],
fun cover:compile_beams/1};
false ->
{[{compile_beam, 2}, {get_term, 1}, {write, 2}],
fun compile_beam_wrapper/1}
end,
AbsCode2 = meck_code:add_exports(Exports, AbsCode),
_Bin = meck_code:compile_and_load_forms(AbsCode2),
CompileBeams
end.
%% wrap cover's pre-18.0 internal API to simulate the new API
-dialyzer({no_missing_calls, compile_beam_wrapper/1}). % for cover:compile_beam/2
compile_beam_wrapper(ModFiles) ->
[cover:compile_beam(Mod, Bin)||{Mod, Bin} <- ModFiles].
change_cover_mod_name(CoverTerms, Name) ->
{_, Terms} = lists:foldl(fun change_name_in_term/2, {Name,[]}, CoverTerms),
Terms.
change_name_in_term({file, Mod, File}, {Name, Terms}) ->
Term2 = {file, Name, replace_string(File, Mod, Name)},
{Name, [Term2|Terms]};
change_name_in_term({Bump={bump,_,_,_,_,_},_}=Term, {Name, Terms}) ->
Bump2 = setelement(2, Bump, Name),
Term2 = setelement(1, Term, Bump2),
{Name, [Term2|Terms]};
change_name_in_term({_Mod,Clauses}, {Name, Terms}) ->
Clauses2 = lists:foldl(fun change_name_in_clause/2, {Name, []}, Clauses),
Term2 = {Name, Clauses2},
{Name, [Term2|Terms]}.
change_name_in_clause(Clause, {Name, NewClauses}) ->
{Name, [setelement(1, Clause, Name)|NewClauses]}.
replace_string(File, Old, New) ->
Old2 = atom_to_list(Old),
New2 = atom_to_list(New),
re:replace(File, Old2, New2, [{return, list}]).
read_cover_file(File) ->
{ok, Fd} = file:open(File, [read, binary, raw]),
Terms = get_terms(Fd, []),
ok = file:close(Fd),
Terms.
-dialyzer({no_missing_calls, get_terms/2}). % for cover:get_term/1
get_terms(Fd, Terms) ->
case cover:get_term(Fd) of
eof -> Terms;
Term -> get_terms(Fd, [Term|Terms])
end.
write_terms(File, Terms) ->
{ok, Fd} = file:open(File, [write, binary, raw]),
lists:foreach(write_term(Fd), Terms),
ok.
-dialyzer({no_missing_calls, write_term/1}). % for cover:write/2
write_term(Fd) ->
fun(Term) -> cover:write(Term, Fd) end. | src/meck_cover.erl | 0.564819 | 0.459379 | meck_cover.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% Copyright <2013-2018> <
%% Technische Universität Kaiserslautern, Germany
%% Université Pierre et Marie Curie / Sorbonne-Université, France
%% Universidade NOVA de Lisboa, Portugal
%% Université catholique de Louvain (UCL), Belgique
%% INESC TEC, Portugal
%% >
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either expressed or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% List of the contributors to the development of Antidote: see AUTHORS file.
%% Description and complete License: see LICENSE file.
%% -------------------------------------------------------------------
%% @doc module antidote_crdt_map_rr - A CRDT map datatype with a reset functionality
%%
%% Inserting a new element in the map:
%% if element already there -> do nothing
%% if not create a map entry where value is initial state of the embedded
%% data type (a call to the create function of the embedded data type)
%%
%% Update operations on entries(embedded CRDTs) are calls to the update fucntions of the entries.
%%
%% Deleting an entry in the map:
%% 1- calls the reset function of this entry (tries to reset entry to its initial state)
%% As reset only affects operations that are locally (where reset was invoked) seen
%% i.e. operations on the same entry that are concurrent to the reset operation are
%% not affected and their effect should be observable once delivered.
%%
%% if there were no operations concurrent to the reset (all operations where in the causal past of the reset),
%% then the state of the entry is bottom (the initial state of the entry)
%%
%% 2- checks if the state of the entry after the reset is bottom (its initial state)
%% if bottom, delete the entry from the map
%% if not bottom, keep the entry
%%
%% An entry exists in a map, if there is at least one update (inserts included) on the key, which is not followed by a remove
%%
%% Resetting the map means removing all the current entries
%%
-module(antidote_crdt_map_rr).
-behaviour(antidote_crdt).
%% API
-export([
new/0,
value/1,
update/2,
equal/2,
get/2,
to_binary/1,
from_binary/1,
is_operation/1,
downstream/2,
require_state_downstream/1,
is_bottom/1
]).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-type typedKey() :: {Key :: term(), Type :: atom()}.
-type state() :: dict:dict(typedKey(), {NestedState :: term()}).
-type op() ::
{update, nested_op()}
| {update, [nested_op()]}
| {remove, typedKey()}
| {remove, [typedKey()]}
| {batch, {Updates :: [nested_op()], Removes :: [typedKey()]}}
| {reset, {}}.
-type nested_op() :: {typedKey(), Op :: term()}.
-type effect() ::
{Adds :: [nested_downstream()], Removed :: [nested_downstream()]}.
-type nested_downstream() :: {typedKey(), none | {ok, Effect :: term()}}.
-type value() :: orddict:orddict(typedKey(), term()).
-spec new() -> state().
new() ->
dict:new().
-spec value(state()) -> value().
value(Map) ->
lists:sort([
{{Key, Type}, antidote_crdt:value(Type, Value)}
|| {{Key, Type}, Value} <- dict:to_list(Map)
]).
% get a value from the map
% returns empty value if the key is not present in the map
-spec get(typedKey(), value()) -> term().
get({_K, Type} = Key, Map) ->
case orddict:find(Key, Map) of
{ok, Val} -> Val;
error -> antidote_crdt:value(Type, antidote_crdt:new(Type))
end.
-spec require_state_downstream(op()) -> boolean().
require_state_downstream(_Op) ->
true.
-spec downstream(op(), state()) -> {ok, effect()}.
downstream({update, {{Key, Type}, Op}}, CurrentMap) ->
downstream({update, [{{Key, Type}, Op}]}, CurrentMap);
downstream({update, NestedOps}, CurrentMap) ->
downstream({batch, {NestedOps, []}}, CurrentMap);
downstream({remove, {Key, Type}}, CurrentMap) ->
downstream({remove, [{Key, Type}]}, CurrentMap);
downstream({remove, Keys}, CurrentMap) ->
downstream({batch, {[], Keys}}, CurrentMap);
downstream({batch, {Updates, Removes}}, CurrentMap) ->
UpdateEffects = [generate_downstream_update(Op, CurrentMap) || Op <- Updates],
RemoveEffects = [generate_downstream_remove(Key, CurrentMap) || Key <- Removes],
{ok, {UpdateEffects, RemoveEffects}};
downstream({reset, {}}, CurrentMap) ->
% reset removes all keys
AllKeys = [Key || {Key, _Val} <- value(CurrentMap)],
downstream({remove, AllKeys}, CurrentMap).
-spec generate_downstream_update({typedKey(), Op :: term()}, state()) -> nested_downstream().
generate_downstream_update({{Key, Type}, Op}, CurrentMap) ->
CurrentState =
case dict:is_key({Key, Type}, CurrentMap) of
true -> dict:fetch({Key, Type}, CurrentMap);
false -> antidote_crdt:new(Type)
end,
{ok, DownstreamEffect} = antidote_crdt:downstream(Type, Op, CurrentState),
{{Key, Type}, {ok, DownstreamEffect}}.
-spec generate_downstream_remove(typedKey(), state()) -> nested_downstream().
generate_downstream_remove({Key, Type}, CurrentMap) ->
CurrentState =
case dict:is_key({Key, Type}, CurrentMap) of
true -> dict:fetch({Key, Type}, CurrentMap);
false -> antidote_crdt:new(Type)
end,
DownstreamEffect =
case antidote_crdt:is_operation(Type, {reset, {}}) of
true ->
{ok, _} = antidote_crdt:downstream(Type, {reset, {}}, CurrentState);
false ->
none
end,
{{Key, Type}, DownstreamEffect}.
-spec update(effect(), state()) -> {ok, state()}.
update({Updates, Removes}, State) ->
State2 = lists:foldl(fun(E, S) -> update_entry(E, S) end, State, Updates),
State3 = dict:fold(fun(K, V, S) -> remove_obsolete(K, V, S) end, new(), State2),
State4 = lists:foldl(fun(E, S) -> remove_entry(E, S) end, State3, Removes),
{ok, State4}.
update_entry({{Key, Type}, {ok, Op}}, Map) ->
case dict:find({Key, Type}, Map) of
{ok, State} ->
{ok, UpdatedState} = antidote_crdt:update(Type, Op, State),
dict:store({Key, Type}, UpdatedState, Map);
error ->
NewValue = antidote_crdt:new(Type),
{ok, NewValueUpdated} = antidote_crdt:update(Type, Op, NewValue),
dict:store({Key, Type}, NewValueUpdated, Map)
end.
remove_entry({{Key, Type}, {ok, Op}}, Map) ->
case dict:find({Key, Type}, Map) of
{ok, State} ->
{ok, UpdatedState} = antidote_crdt:update(Type, Op, State),
case is_bottom(Type, UpdatedState) of
true ->
dict:erase({Key, Type}, Map);
false ->
dict:store({Key, Type}, UpdatedState, Map)
end;
error ->
Map
end;
remove_entry({{_Key, _Type}, none}, Map) ->
Map.
remove_obsolete({Key, Type}, Val, Map) ->
case is_bottom(Type, Val) of
false ->
dict:store({Key, Type}, Val, Map);
true ->
Map
end.
is_bottom(Type, State) ->
T = antidote_crdt:alias(Type),
erlang:function_exported(T, is_bottom, 1) andalso T:is_bottom(State).
equal(Map1, Map2) ->
% TODO better implementation (recursive equals)
Map1 == Map2.
-define(TAG, 101).
-define(V1_VERS, 1).
to_binary(Policy) ->
<<?TAG:8/integer, ?V1_VERS:8/integer, (term_to_binary(Policy))/binary>>.
from_binary(<<?TAG:8/integer, ?V1_VERS:8/integer, Bin/binary>>) ->
{ok, binary_to_term(Bin)}.
is_operation(Operation) ->
case Operation of
{update, {{_Key, Type}, Op}} ->
antidote_crdt:is_type(Type) andalso antidote_crdt:is_operation(Type, Op);
{update, Ops} when is_list(Ops) ->
lists:all(fun(Op) -> is_operation({update, Op}) end, Ops);
{remove, {_Key, Type}} ->
antidote_crdt:is_type(Type);
{remove, Keys} when is_list(Keys) ->
lists:all(fun(Key) -> is_operation({remove, Key}) end, Keys);
{batch, {Updates, Removes}} ->
is_list(Updates) andalso
is_list(Removes) andalso
lists:all(fun(Key) -> is_operation({remove, Key}) end, Removes) andalso
lists:all(fun(Op) -> is_operation({update, Op}) end, Updates);
{reset, {}} ->
true;
is_bottom ->
true;
_ ->
false
end.
is_bottom(Map) ->
dict:is_empty(Map).
%% ===================================================================
%% EUnit tests
%% ===================================================================
-ifdef(TEST).
reset1_test() ->
Map0 = new(),
% DC1: a.incr
{ok, Incr1} = downstream({update, {{a, antidote_crdt_counter_fat}, {increment, 1}}}, Map0),
{ok, Map1a} = update(Incr1, Map0),
% DC1 reset
{ok, Reset1} = downstream({reset, {}}, Map1a),
{ok, Map1b} = update(Reset1, Map1a),
% DC2 a.remove
{ok, Remove1} = downstream({remove, {a, antidote_crdt_counter_fat}}, Map0),
{ok, Map2a} = update(Remove1, Map0),
% DC2 --> DC1
{ok, Map1c} = update(Remove1, Map1b),
% DC1 reset
{ok, Reset2} = downstream({reset, {}}, Map1c),
{ok, Map1d} = update(Reset2, Map1c),
% DC1: a.incr
{ok, Incr2} = downstream({update, {{a, antidote_crdt_counter_fat}, {increment, 2}}}, Map1d),
{ok, Map1e} = update(Incr2, Map1d),
io:format("Map0 = ~p~n", [Map0]),
io:format("Incr1 = ~p~n", [Incr1]),
io:format("Map1a = ~p~n", [Map1a]),
io:format("Reset1 = ~p~n", [Reset1]),
io:format("Map1b = ~p~n", [Map1b]),
io:format("Remove1 = ~p~n", [Remove1]),
io:format("Map2a = ~p~n", [Map2a]),
io:format("Map1c = ~p~n", [Map1c]),
io:format("Reset2 = ~p~n", [Reset2]),
io:format("Map1d = ~p~n", [Map1d]),
io:format("Incr2 = ~p~n", [Incr2]),
io:format("Map1e = ~p~n", [Map1e]),
?assertEqual([], value(Map0)),
?assertEqual([{{a, antidote_crdt_counter_fat}, 1}], value(Map1a)),
?assertEqual([], value(Map1b)),
?assertEqual([], value(Map2a)),
?assertEqual([], value(Map1c)),
?assertEqual([], value(Map1d)),
?assertEqual([{{a, antidote_crdt_counter_fat}, 2}], value(Map1e)).
reset2_test() ->
Map0 = new(),
% DC1: s.add
{ok, Add1} = downstream({update, {{s, antidote_crdt_set_rw}, {add, a}}}, Map0),
{ok, Map1a} = update(Add1, Map0),
% DC1 reset
{ok, Reset1} = downstream({reset, {}}, Map1a),
{ok, Map1b} = update(Reset1, Map1a),
% DC2 s.remove
{ok, Remove1} = downstream({remove, {s, antidote_crdt_set_rw}}, Map0),
{ok, Map2a} = update(Remove1, Map0),
% DC2 --> DC1
{ok, Map1c} = update(Remove1, Map1b),
% DC1 reset
{ok, Reset2} = downstream({reset, {}}, Map1c),
{ok, Map1d} = update(Reset2, Map1c),
% DC1: s.add
{ok, Add2} = downstream({update, {{s, antidote_crdt_set_rw}, {add, b}}}, Map1d),
{ok, Map1e} = update(Add2, Map1d),
io:format("Map0 = ~p~n", [value(Map0)]),
io:format("Add1 = ~p~n", [Add1]),
io:format("Map1a = ~p~n", [value(Map1a)]),
io:format("Reset1 = ~p~n", [Reset1]),
io:format("Map1b = ~p~n", [value(Map1b)]),
io:format("Remove1 = ~p~n", [Remove1]),
io:format("Map2a = ~p~n", [value(Map2a)]),
io:format("Map1c = ~p~n", [value(Map1c)]),
io:format("Reset2 = ~p~n", [Reset2]),
io:format("Map1d = ~p~n", [value(Map1d)]),
io:format("Add2 = ~p~n", [Add2]),
io:format("Map1e = ~p~n", [value(Map1e)]),
?assertEqual([], value(Map0)),
?assertEqual([{{s, antidote_crdt_set_rw}, [a]}], value(Map1a)),
?assertEqual([], value(Map1b)),
?assertEqual([], value(Map2a)),
?assertEqual([], value(Map1c)),
?assertEqual([], value(Map1d)),
?assertEqual([{{s, antidote_crdt_set_rw}, [b]}], value(Map1e)).
prop1_test() ->
Map0 = new(),
% DC1: s.add
{ok, Add1} = downstream(
{update, {{a, antidote_crdt_map_rr}, {update, {{a, antidote_crdt_set_rw}, {add, a}}}}}, Map0
),
{ok, Map1a} = update(Add1, Map0),
% DC1 reset
{ok, Reset1} = downstream({remove, {a, antidote_crdt_map_rr}}, Map1a),
{ok, Map1b} = update(Reset1, Map1a),
io:format("Map0 = ~p~n", [Map0]),
io:format("Add1 = ~p~n", [Add1]),
io:format("Map1a = ~p~n", [Map1a]),
io:format("Reset1 = ~p~n", [Reset1]),
io:format("Map1b = ~p~n", [Map1b]),
?assertEqual([], value(Map0)),
?assertEqual([{{a, antidote_crdt_map_rr}, [{{a, antidote_crdt_set_rw}, [a]}]}], value(Map1a)),
?assertEqual([], value(Map1b)).
prop2_test() ->
Map0 = new(),
% DC1: update remove
{ok, Add1} = downstream(
{update, [{{b, antidote_crdt_map_rr}, {remove, {a, antidote_crdt_set_rw}}}]}, Map0
),
{ok, Map1a} = update(Add1, Map0),
% DC2 remove
{ok, Remove2} = downstream({remove, {b, antidote_crdt_map_rr}}, Map0),
{ok, Map2a} = update(Remove2, Map0),
% pull DC2 -> DC1
{ok, Map1b} = update(Remove2, Map1a),
io:format("Map0 = ~p~n", [Map0]),
io:format("Add1 = ~p~n", [Add1]),
io:format("Map1a = ~p~n", [Map1a]),
io:format("Remove2 = ~p~n", [Remove2]),
io:format("Map1b = ~p~n", [Map1b]),
?assertEqual([], value(Map0)),
?assertEqual([], value(Map1a)),
?assertEqual([], value(Map2a)),
?assertEqual([], value(Map1b)).
upd(Update, State) ->
{ok, Downstream} = downstream(Update, State),
{ok, Res} = update(Downstream, State),
Res.
remove_test() ->
M1 = new(),
?assertEqual([], value(M1)),
?assertEqual(true, is_bottom(M1)),
M2 = upd(
{update, [
{{<<"a">>, antidote_crdt_set_aw}, {add, <<"1">>}},
{{<<"b">>, antidote_crdt_register_mv}, {assign, <<"2">>}},
{{<<"c">>, antidote_crdt_counter_fat}, {increment, 1}}
]},
M1
),
?assertEqual(
[
{{<<"a">>, antidote_crdt_set_aw}, [<<"1">>]},
{{<<"b">>, antidote_crdt_register_mv}, [<<"2">>]},
{{<<"c">>, antidote_crdt_counter_fat}, 1}
],
value(M2)
),
?assertEqual(false, is_bottom(M2)),
M3 = upd({reset, {}}, M2),
io:format("M3 state = ~p~n", [dict:to_list(M3)]),
?assertEqual([], value(M3)),
?assertEqual(true, is_bottom(M3)),
ok.
-endif. | apps/antidote_crdt/src/antidote_crdt_map_rr.erl | 0.516352 | 0.507934 | antidote_crdt_map_rr.erl | starcoder |
%%
%% @doc This module implements a hash ring.
%% Items are hashed into the ring NumReplicas times.
%%
%% An item is hashed using the following:
%%
%% sha1(Item + string(N)) where N is (1..NumReplicas)
%%
%% Note: When N is converted to a string it is not zero padded.
%%
-module(hash_ring).
-export([
create_ring/2,
get_item/2,
add_item/2,
remove_item/2
]).
%%
%% @doc Finds the item that contains the Key on the Ring
%%
get_item(Key, {_NumReplicas, Circle}) ->
Point = hash_key(Key),
{Item, _Replica} = case lists:dropwhile(fun({_Item, Replica}) ->
Replica =< Point
end, Circle) of
[] ->
hd(Circle);
[H|_T] ->
H
end,
Item.
%%
%% @doc Creates a hash ring that places Items in the ring NumReplicas times
%%
%% A "replica" just means that the Item is placed on the ring in multiple places to
%% make the distribution more even.
%%
create_ring(Items, NumReplicas) ->
lists:foldl(fun(Item, Ring) ->
add_item(Item, Ring)
end, {NumReplicas, []}, Items).
%%
%% @doc Adds an item into the ring.
%% Returns the ring with item added in.
%%
add_item(Item, {NumReplicas, Circle}) ->
sort_ring({NumReplicas, lists:flatten(lists:append(Circle, get_item_points(Item, NumReplicas)))}).
%%
%% @doc Removes an item and its replicas from the ring.
%% Returns the ring without the item.
%%
remove_item(Item, {NumReplicas, Circle}) ->
Points = get_item_points(Item, NumReplicas),
sort_ring({NumReplicas, lists:filter(fun(Point) ->
not lists:member(Point, Points)
end, Circle)}).
%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Internal functions
%%%%%%%%%%%%%%%%%%%%%%%%%%%
sort_ring({NumReplicas, Circle}) ->
{NumReplicas, lists:usort(fun({_ItemA, PartitionA}, {_ItemB, PartitionB}) ->
PartitionA =< PartitionB
end, Circle)}.
%%
%% @doc This function returns a list of N {Item, Point} tuples
%%
%% Each point is generated by hashing (item + 1) to (item + n) to evenly distribute the points
%%
get_item_points(Item, N) ->
lists:map(fun(Partition) ->
{Item, Partition}
end,
lists:usort(lists:map(fun(X) ->
hash_key(Item ++ integer_to_list(X))
end, lists:seq(1, N)))).
%%
%% Hashes the given Key with SHA1 and converts it to a big integer
%%
hash_key(Key) when is_binary(Key) ->
hash_key(binary_to_list(Key));
hash_key(Key) ->
<<Int:160/unsigned-integer>> = crypto:hash(sha, Key),
Int.
%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Tests
%%%%%%%%%%%%%%%%%%%%%%%%%%%
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
hash_key_known_test() ->
lists:foreach(fun(_) ->
?assertEqual(653878565946713713149629104275478104571867727804, hash_key("test123"))
end, lists:seq(1, 1000)).
add_remove_item_test() ->
Ring = create_ring([], 2),
PointA1 = hash_key("A1"),
PointA2 = hash_key("A2"),
PointB1 = hash_key("B1"),
PointB2 = hash_key("B2"),
AddedRing = add_item("B", add_item("A", Ring)),
?assertEqual({2, [
{"A", PointA1},
{"A", PointA2},
{"B", PointB2},
{"B", PointB1}
]}, AddedRing),
RemovedRing = remove_item("A", AddedRing),
?assertEqual({2, [
{"B", PointB2},
{"B", PointB1}
]}, RemovedRing).
-endif. | src/hash_ring.erl | 0.522202 | 0.527803 | hash_ring.erl | starcoder |
%%
%% @doc Various functions to work on binaries.
%%
%% @reference [A2] Chapter 7
%%
-module(bin).
-export([integer_to_bitstring/1]).
-export([lxor/1, lxor/2]).
-export([bin_to_hexstr/1, hexstr_to_bin/1]).
-export([reverse_bytes/1, reverse_bits/1]).
-export([term_to_packet/1, packet_to_term/1]).
%%
%% @doc Returns a bitstring representation of the
%% given integer with leading zeroes removed.
%%
%% ```<<2#1100:4>> = bin:integer_to_bitstring(12).'''
%%
%% @see binary:encode_unsigned/1.
%%
-spec integer_to_bitstring(non_neg_integer()) -> bitstring().
integer_to_bitstring(Int) -> trim_bitstring(binary:encode_unsigned(Int)).
trim_bitstring(<<>>) -> <<>>;
trim_bitstring(<<1:1, _/bitstring>> = B) -> B;
trim_bitstring(<<0:1, B/bitstring>>) -> trim_bitstring(B).
%%
%% @doc Left XOR.
%%
%% ```<<16#B9F9:16>> = bin:lxor(<<16#ABCDEF:24>>, <<16#1234:16>>).'''
%%
-spec lxor(binary(), binary()) -> binary().
lxor(X, Y) ->
Length = min(size(X), size(Y)),
crypto:exor(binary:part(X, 0, Length), binary:part(Y, 0, Length)).
%%
%% @doc Left XOR.
%% Can be used from escript or shell.
%%
-spec lxor([string()]) -> string().
lxor([H | T]) ->
F = fun(X, Acc) -> lxor(hexstr_to_bin(X), Acc) end,
bin_to_hexstr(lists:foldl(F, hexstr_to_bin(H), T)).
%%
%% @doc Converts hexadecimal string to binary.
%% The string length is expected to be even.
%%
-spec hexstr_to_bin(string()) -> binary().
hexstr_to_bin(String) ->
0 = length(String) rem 2,
<<1:8, Bin/binary>> = binary:encode_unsigned(list_to_integer([$1 | String], 16)),
Bin.
%%
%% @doc Converts binary to hexadecimal string.
%%
-spec bin_to_hexstr(binary()) -> string().
bin_to_hexstr(Bin) ->
binary_to_list(<<<<Y>> || <<X:4>> <= Bin, Y <- integer_to_list(X, 16)>>).
%%
%% @doc Reverses the order of bytes in a binary.
%%
-spec reverse_bytes(binary()) -> binary().
reverse_bytes(B) ->
list_to_binary(lists:reverse(binary_to_list(B))).
%%
%% @doc Reverses the bits in a binary.
%%
-spec reverse_bits(binary()) -> binary().
reverse_bits(B) -> reverse_bits(B, <<>>).
reverse_bits(<<>>, Acc) -> Acc;
reverse_bits(<<X:1, Rest/bitstring>>, Acc) ->
reverse_bits(Rest, <<X:1, Acc/bitstring>>).
%%
%% @doc Returns a binary consisting of a 4-byte length header N
%% followed by N bytes of data produced by calling
%% `term_to_binary(Term)'.
%% @see packet_to_term/1
%%
-spec term_to_packet(term()) -> binary().
term_to_packet(Term) ->
<<Header:32, Data/binary>> = term_to_binary(Term),
Length = size(Data),
<<Header:32, Length:32, Data/binary>>.
%%
%% @doc The inverse of the {@link term_to_packet} function.
%%
-spec packet_to_term(B::binary()) -> term().
packet_to_term(<<Header:32, Length:32, Data/binary>>) ->
binary_to_term(<<Header:32, Data:Length/binary>>).
%% =============================================================================
%% Unit tests
%% =============================================================================
-include_lib("eunit/include/eunit.hrl").
integer_to_bitstring_test() ->
?assertEqual(<<>>, integer_to_bitstring(0)),
?assertEqual(<<2#10001100:8, 0:2>>, integer_to_bitstring(560)).
hexstr_to_bin_test() ->
?assertEqual(<<0, 18, 52, 86, 120, 144, 171, 205, 239>>, hexstr_to_bin("001234567890ABCDEF")).
bin_to_hexstr_test() ->
?assertEqual("001234567890ABCDEF", bin_to_hexstr(<<0, 18, 52, 86, 120, 144, 171, 205, 239>>)).
lxor_test() ->
?assertEqual("94D5513AC50DFF660F9FDE299DF35718",
lxor(["E20106D7CD0DF0761E8DCD3D88E54000", "76D457ED08000F101112131415161718"])),
?assertEqual("94D5513AC50D",
lxor(["E20106D7CD0DF0761E8DCD3D88E54000", "76D457ED0800"])).
reverse_bytes_test() ->
?assertEqual(<<12, 34, 56, 78>>, reverse_bytes(<<78, 56, 34, 12>>)).
reverse_bits_test() ->
?assertEqual(<<2#1001100100010111:16>>, reverse_bits(<<2#1110100010011001:16>>)).
term_to_packet_test() ->
?assertEqual(<<131, 104, 3, 100, 0, 0, 0, 17, 0, 4, 97, 116, 111, 109, 97, 5, 107, 0, 6, 115, 116, 114, 105, 110, 103>>,
term_to_packet({atom, 5, "string"})).
packet_to_term_test() ->
?assertEqual({atom, 5, "string"},
packet_to_term(<<131, 104, 3, 100, 0, 0, 0, 17, 0, 4, 97, 116, 111, 109, 97, 5, 107, 0, 6, 115, 116, 114, 105, 110, 103, "garbage">>)). | lib/ndpar/src/bin.erl | 0.609873 | 0.576393 | bin.erl | starcoder |
%-*-Mode:erlang;coding:utf-8;tab-width:4;c-basic-offset:4;indent-tabs-mode:()-*-
% ex: set ft=erlang fenc=utf-8 sts=4 ts=4 sw=4 et:
% Data Structures For String (List of Integers) Keys
% rbdict/aadict taken from:
% https://github.com/rvirding/rb/tree/develop/src
% trie taken from:
% https://github.com/okeuday/CloudI/blob/master/src/lib/cloud_stdlib/src/trie.erl
% run:
% erlc rbdict.erl
% erlc aadict.erl
% erlc trie.erl
% erlc string_key.erl
% erl -noshell -s string_key test -s init stop
-module(string_key).
-export([test/0, test/1, get/3, get_concurrent/2, set/3]).
-include("erlbench.hrl").
-define(WORDLIST, "data/words").
data1(_) ->
gb_trees:empty().
data2(_) ->
rbdict:new().
data3(_) ->
aadict:new().
%data4(_) ->
% orddict:new().
data5(_) ->
dict:new().
data6(_) ->
trie:new().
data7(_) ->
ets:new(ets_test_1, [set]).
data8(_) ->
undefined.
data9(_) ->
ets:new(ets_test_2, [set, {read_concurrency, true}]).
%data10(N) ->
% hasht:new(N).
%data11(N) ->
% hashtl2:new(N).
%data12(N) ->
% hashtl3:new(N).
%data13(N) ->
% hashtl4:new(N).
data14(N) ->
hashtl:new(N).
data15(_) ->
ets:new(ets_test_3, [ordered_set]).
data16(_) ->
ets:new(ets_test_4, [ordered_set, {read_concurrency, true}]).
data17(_) ->
btrie:new().
data18(_) ->
htrie:new().
data19(_) ->
hamt:new().
data20(_) ->
hashdict:new().
data21(_) ->
maps:new().
data22(_) ->
ttdict:new().
gb_trees_set(Tree, String, Value) ->
gb_trees:enter(String, Value, Tree).
rbdict_set(Dict, String, Value) ->
rbdict:store(String, Value, Dict).
aadict_set(Dict, String, Value) ->
aadict:store(String, Value, Dict).
%orddict_set(Dict, String, Value) ->
% orddict:store(String, Value, Dict).
dict_set(Dict, String, Value) ->
dict:store(String, Value, Dict).
trie_set(Trie, String, Value) ->
trie:store(String, Value, Trie).
ets_set(Tid, String, Value) ->
true = ets:insert(Tid, {String, Value}),
Tid.
pdict_set(_, String, Value) ->
erlang:put(String, Value).
%hasht_set(HashT, String, Value) ->
% hasht:store(String, Value, HashT).
%hashtl2_set(HashT, String, Value) ->
% hashtl2:store(String, Value, HashT).
%hashtl3_set(HashT, String, Value) ->
% hashtl3:store(String, Value, HashT).
%hashtl4_set(HashT, String, Value) ->
% hashtl4:store(String, Value, HashT).
hashtl_set(HashT, String, Value) ->
hashtl:store(String, Value, HashT).
btrie_set(Trie, String, Value) ->
btrie:store(String, Value, Trie).
%htrie_set(Trie, String, Value) ->
% htrie:put(String, Value, Trie).
%hamt_set(Trie, String, Value) ->
% hamt:put(String, Value, Trie).
hashdict_set(Dict, String, Value) ->
hashdict:store(String, Value, Dict).
maps_set(Map, String, Value) ->
maps:put(String, Value, Map).
ttdict_set(Dict, String, Value) ->
ttdict:store(String, Value, Dict).
gb_trees_get(Tree, String) ->
gb_trees:get(String, Tree).
rbdict_get(Dict, String) ->
rbdict:fetch(String, Dict).
aadict_get(Dict, String) ->
aadict:fetch(String, Dict).
%orddict_get(Dict, String) ->
% orddict:fetch(String, Dict).
dict_get(Dict, String) ->
dict:fetch(String, Dict).
trie_get(Trie, String) ->
trie:fetch(String, Trie).
ets_get(Tid, String) ->
ets:lookup_element(Tid, String, 2).
pdict_get(_, String) ->
erlang:get(String).
%hasht_get(HashT, String) ->
% hasht:fetch(String, HashT).
%hashtl2_get(HashT, String) ->
% empty = hashtl2:fetch(String, HashT).
%hashtl3_get(HashT, String) ->
% empty = hashtl3:fetch(String, HashT).
%hashtl4_get(HashT, String) ->
% empty = hashtl4:fetch(String, HashT).
hashtl_get(HashT, String) ->
empty = hashtl:fetch(String, HashT).
btrie_get(Trie, String) ->
btrie:fetch(String, Trie).
%htrie_get(Trie, String) ->
% htrie:get(String, Trie).
%hamt_get(Trie, String) ->
% hamt:get(String, Trie).
hashdict_get(Dict, String) ->
hashdict:fetch(String, Dict).
maps_get(Map, String) ->
{ok, Value} = maps:find(String, Map),
Value.
ttdict_get(Dict, String) ->
ttdict:fetch(String, Dict).
get(_, _, []) ->
ok;
get(Fun, Data, [H | T]) ->
empty = Fun(Data, H),
get(Fun, Data, T).
get_concurrent(Processes, Arguments) ->
Parent = self(),
Children = lists:map(fun(_) ->
erlang:spawn(fun() ->
ok = erlang:apply(string_key, get, Arguments),
Parent ! {self(), done}
end)
end, lists:seq(1, Processes)),
lists:foreach(fun(Child) ->
receive
{Child, done} ->
ok
end
end, Children),
ok.
set(_, Data, []) ->
Data;
set(Fun, Data, [H | T]) ->
Data1 = Fun(Data, H, empty),
set(Fun, Data1, T).
test() ->
test(10000).
test(N) ->
WordListLines = erlang:min(50000, N),
Nfinal = N - N rem WordListLines,
Words = lists:foldl(fun (_, L) ->
array:to_list(array:resize(WordListLines, read_wordlist())) ++ L
end, [], lists:seq(WordListLines, Nfinal, WordListLines)),
true = erlang:length(Words) == Nfinal,
BWords = [erlang:list_to_binary(Word) || Word <- Words],
%% gb_trees
{S1, D1} = timer:tc(?MODULE, set, [fun gb_trees_set/3, data1(N), Words]),
{G1, _} = timer:tc(?MODULE, get, [fun gb_trees_get/2, D1, Words]),
%% rbdict
{S2, D2} = timer:tc(?MODULE, set, [fun rbdict_set/3, data2(N), Words]),
{G2, _} = timer:tc(?MODULE, get, [fun rbdict_get/2, D2, Words]),
%% aadict
{S3, D3} = timer:tc(?MODULE, set, [fun aadict_set/3, data3(N), Words]),
{G3, _} = timer:tc(?MODULE, get, [fun aadict_get/2, D3, Words]),
%% orddict
%{S4, D4} = timer:tc(?MODULE, set, [fun orddict_set/3, data4(N), Words]),
%{G4, _} = timer:tc(?MODULE, get, [fun orddict_get/2, D4, Words]),
%% dict
{S5, D5} = timer:tc(?MODULE, set, [fun dict_set/3, data5(N), Words]),
{G5, _} = timer:tc(?MODULE, get, [fun dict_get/2, D5, Words]),
%% trie
{S6, D6} = timer:tc(?MODULE, set, [fun trie_set/3, data6(N), Words]),
{G6, _} = timer:tc(?MODULE, get, [fun trie_get/2, D6, Words]),
%% ets
{S7, D7} = timer:tc(?MODULE, set, [fun ets_set/3, data7(N), Words]),
{G7, _} = timer:tc(?MODULE, get, [fun ets_get/2, D7, Words]),
ets:delete(D7),
%% process dictionary
{S8, D8} = timer:tc(?MODULE, set, [fun pdict_set/3, data8(N), Words]),
{G8, _} = timer:tc(?MODULE, get, [fun pdict_get/2, D8, Words]),
%% ets with 10 concurrent accesses
{_, D9} = timer:tc(?MODULE, set, [fun ets_set/3, data9(N), Words]),
{G9, _} = timer:tc(?MODULE, get_concurrent, [10, [fun ets_get/2, D9, Words]]),
ets:delete(D9),
%% hash table
%{S10, D10} = timer:tc(?MODULE, set, [fun hasht_set/3, data10(N), Words]),
%{G10, _} = timer:tc(?MODULE, get, [fun hasht_get/2, D10, Words]),
%% hash table layered
%{S11, D11} = timer:tc(?MODULE, set, [fun hashtl2_set/3, data11(N), Words]),
%{G11, _} = timer:tc(?MODULE, get, [fun hashtl2_get/2, D11, Words]),
%% hash table layered
%{S12, D12} = timer:tc(?MODULE, set, [fun hashtl3_set/3, data12(N), Words]),
%{G12, _} = timer:tc(?MODULE, get, [fun hashtl3_get/2, D12, Words]),
%% hash table layered
%{S13, D13} = timer:tc(?MODULE, set, [fun hashtl4_set/3, data13(N), Words]),
%{G13, _} = timer:tc(?MODULE, get, [fun hashtl4_get/2, D13, Words]),
%% hash table layered
{S14, D14} = timer:tc(?MODULE, set, [fun hashtl_set/3, data14(N), Words]),
{G14, _} = timer:tc(?MODULE, get, [fun hashtl_get/2, D14, Words]),
%% ets
{S15, D15} = timer:tc(?MODULE, set, [fun ets_set/3, data15(N), Words]),
{G15, _} = timer:tc(?MODULE, get, [fun ets_get/2, D15, Words]),
ets:delete(D15),
%% ets with 10 concurrent accesses
{_, D16} = timer:tc(?MODULE, set, [fun ets_set/3, data16(N), Words]),
{G16, _} = timer:tc(?MODULE, get_concurrent, [10, [fun ets_get/2, D16, Words]]),
ets:delete(D16),
% btrie
{S17, D17} = timer:tc(?MODULE, set, [fun btrie_set/3, data17(N), BWords]),
{G17, _} = timer:tc(?MODULE, get, [fun btrie_get/2, D17, BWords]),
% htrie
%{S18, D18} = timer:tc(?MODULE, set, [fun htrie_set/3, data18(N), Words]),
%{G18, _} = timer:tc(?MODULE, get, [fun htrie_get/2, D18, Words]),
% hamt
%{S19, D19} = timer:tc(?MODULE, set, [fun hamt_set/3, data19(N), Words]),
%{G19, _} = timer:tc(?MODULE, get, [fun hamt_get/2, D19, Words]),
% hashdict
{S20, D20} = timer:tc(?MODULE, set, [fun hashdict_set/3, data20(N), Words]),
{G20, _} = timer:tc(?MODULE, get, [fun hashdict_get/2, D20, Words]),
% map in Erlang/OTP 18.0
{S21, D21} = timer:tc(?MODULE, set, [fun maps_set/3, data21(N), Words]),
{G21, _} = timer:tc(?MODULE, get, [fun maps_get/2, D21, Words]),
% ttdict from https://github.com/rvirding/luerl
{S22, D22} = timer:tc(?MODULE, set, [fun ttdict_set/3, data22(N), Words]),
{G22, _} = timer:tc(?MODULE, get, [fun ttdict_get/2, D22, Words]),
%% results
[
#result{name = "gb_trees", get = G1, set = S1},
#result{name = "rbdict", get = G2, set = S2},
#result{name = "aadict", get = G3, set = S3},
%#result{name = "orddict", get = G4, set = S4},
#result{name = "dict", get = G5, set = S5},
#result{name = "trie", get = G6, set = S6},
#result{name = "ets (set)", get = G7, set = S7},
#result{name = "process dictionary", get = G8, set = S8},
#result{name = "ets x10 read (set)", get = erlang:round(G9 / 10.0)},
%#result{name = "hasht", get = G10, set = S10},
%#result{name = "hashtl2", get = G11, set = S11},
%#result{name = "hashtl3", get = G12, set = S12},
%#result{name = "hashtl4", get = G13, set = S13},
#result{name = "hashtl", get = G14, set = S14},
#result{name = "ets (ordered_set)", get = G15, set = S15},
#result{name = "ets x10 read (ordered_set)",
get = erlang:round(G16 / 10.0)},
#result{name = "btrie (binaries)", get = G17, set = S17},
%#result{name = "htrie", get = G18, set = S18},
%#result{name = "hamt", get = G19, set = S19}
#result{name = "hashdict", get = G20, set = S20},
#result{name = "map", get = G21, set = S21},
#result{name = "ttdict", get = G22, set = S22}
].
read_wordlist() ->
{ok, F} = file:open(?WORDLIST, [read_ahead, raw, read]),
Array = array:new([{size, 524288}, {default, -1}, {fixed, false}]),
shuffle:shuffle(read_wordlist(0, F, Array)).
read_wordlist(I, F, Array) ->
case file:read_line(F) of
{ok, Line} ->
Word = lists:sublist(Line, erlang:length(Line) - 1),
if
Word == "" ->
read_wordlist(I, F, Array);
true ->
read_wordlist(I + 1, F, array:set(I, Word, Array))
end;
eof ->
array:fix(array:resize(I, Array))
end. | src/string_key.erl | 0.629319 | 0.425963 | string_key.erl | starcoder |
%% WorkDir = os:cmd("mktemp -d /tmp/grass.XXXXXX"),%% @doc Grass is a toy graph database with LevelBD as a backend.
%% @version 0.1
%% @reference <a href="https://github.com/eiri/grass">https://github.com/eiri/grass</a>
%% @author <NAME> <<EMAIL>>
%% @copyright 2012 <NAME>
%% @todo Make it compatable with <a href="https://github.com/tinkerpop">Tinkerpop</a>
-module(grass).
-author('<NAME> <<EMAIL>>').
-behaviour(application).
-behaviour(supervisor).
-define(SUPER(M), {M, {M, start_link, []}, permanent, 5000, supervisor, [M]}).
-define(WORKER(M, A), {M, {M, start_link, [A]}, permanent, 2000, worker, [M]}).
-define(EDGE, " -- ").
-type graph() :: binary() | pid().
-type vertex() :: binary().
-type key() :: binary().
-type value() :: term().
-type tag() :: {key(), value()}.
-type error() :: not_found | already_exists | {eleveldb, binary()}.
%% pub API
-export([
graphs/0,
create/1,
verticies/1,
verticies/2,
vertex_exists/2,
add_vertex/2,
clone_vertex/3,
modify_vertex/3,
del_vertex/2,
edges/1,
edges/2,
edge_exists/3,
add_edge/3,
del_edge/3,
tags/2,
tags/3,
add_tag/4,
del_tag/3,
drop/1,
destroy/1,
is_empty/1
]).
%% stats, example & pick-inside functions
-export([stats/1, example/1]).
%% behaviours callbacks
-export([start/0, stop/0, start/2, stop/1, init/1]).
%%
%% API
%%
%% @doc Shows list of available graphs.
-spec graphs() -> [graph(),...] | [].
graphs() ->
[ G || {G,_} <- gs_register_server:get() ].
%% @doc Creates a new graph G.
-spec create(graph()) -> ok | error().
create(G) ->
gs_register_server:create(G).
%% @doc Deletes the graph G.
%% This function not only drops all the verticies on the graph, but also
%% removes graph's directory and shutting down graph's server.
-spec destroy(graph()) -> ok | error().
destroy(G) ->
gs_register_server:destroy(G).
%% @doc Returns a list of all verticies of the graph G.
%% If the graph is empty, returns an empty list.
-spec verticies(graph()) -> list() | error().
verticies(G) ->
gkeys(G).
%% @doc Returns a list of the verticies of graph G connected to the vertex V.
%% If the vertex doesn't have the connections returns an empty list.
-spec verticies(graph(), vertex()) -> list() | {error, error()}.
verticies(G, V) ->
case gget(G, V) of
{ok, V, Ins} -> Ins;
E -> E
end.
%% @doc Checks is the vertex V exists in graph G.
-spec vertex_exists(graph(), vertex()) -> boolean().
vertex_exists(G, V) ->
gexists(G, V).
%% @doc Creates a new vertex V in graph G
-spec add_vertex(graph(), vertex()) -> ok | {error, error()}.
add_vertex(G, V) ->
case gexists(G, V) of
true -> {error, already_exists};
false -> gput(G, V, [])
end.
%% @doc Creates an exact copy of the vertex V1 with name V2 in graph G.
%% Duplicates all the edges connecting to V1.
-spec clone_vertex(graph(), vertex(), vertex()) -> ok | {error, error()}.
clone_vertex(G, From, To) ->
case gget(G, From) of
{ok, From, Ins} ->
ok = gput(G, To, []),
[ ok = grass:add_edge(G, V, To) || V <- Ins ],
ok;
E -> E
end.
%% @doc Convinience funtion. Creates an exact copy of the vertex V1 with name V2 in graph G.
%% and deletes the vertex V1.
-spec modify_vertex(graph(), vertex(), vertex()) -> ok | {error, error()}.
modify_vertex(G, From, To) ->
ok = grass:clone_vertex(G, From, To),
ok = del_vertex(G, From).
%% @doc Deletes the vertex V in graph G.
-spec del_vertex(graph(), vertex()) -> ok | {error, error()}.
del_vertex(G, V) ->
case gget(G, V) of
{ok, V, Ins} ->
lists:foreach(fun(V2) ->
{ok, V2, Ins2} = gget(G, V2),
ok = gput(G, V2, lists:delete(V, Ins2))
end, Ins),
gdel(G, V);
E -> E
end.
%% @doc Returns a list of all edges of the graph G.
%% If the graph is empty, returns an empty list.
-spec edges(graph()) -> list().
edges(G) ->
case gall(G) of
{error, E} -> {error, E};
All ->
Edges = lists:foldl(fun
({V1, Ins}, Acc) ->
lists:foldl(fun(V2, A) ->
case lists:member([V2, V1], A) of
true -> A;
false -> [[V1, V2]|A]
end
end, Acc, Ins)
end, [], All),
lists:reverse(Edges)
end.
%% @doc Returns a list of the edges connecting to the vertex V in graph G.
-spec edges(graph(), vertex()) -> list() | {error, error()}.
edges(G, V) ->
case gget(G, V) of
{ok, V, Ins} -> [ [V, V2] || V2 <- Ins];
E -> E
end.
%% @doc Checks if an edge between verticies 'From' and 'To' exists in graph G.
-spec edge_exists(graph(), vertex(), vertex()) -> boolean().
edge_exists(G, From, To) ->
case {gget(G, From), gget(G, To)} of
{{ok, From, Ins1}, {ok, To, Ins2}} ->
lists:member(To, Ins1) and lists:member(From, Ins2);
_ -> false
end.
%% @doc Adds edge between verticies 'From' and 'To' in graph G.
%% Returns 'ok' even if the edge already exists.
-spec add_edge(graph(), vertex(), vertex()) -> ok | {error, error()}.
add_edge(G, From, To) ->
case {gget(G, From), gget(G, To)} of
{{ok, From, Ins1}, {ok, To, Ins2}} ->
ok = gput(G, From, lists:usort([To|Ins1])),
ok = gput(G, To, lists:usort([From|Ins2]));
{E, {ok, _, _}} -> E;
{_, E} -> E
end.
%% @doc Deletes the edge between the verticies 'From' and 'To' in graph G.
%% Returns 'ok' even if the edge doesn't exist.
-spec del_edge(graph(), vertex(), vertex()) -> ok | {error, error()}.
del_edge(G, From, To) ->
case {gget(G, From), gget(G, To)} of
{{ok, From, Ins1}, {ok, To, Ins2}} ->
ok = gput(G, From, lists:delete(To, Ins1)),
ok = gput(G, To, lists:delete(From, Ins2));
{E, {ok, _, _}} -> E;
{_, E} -> E
end.
%% @doc Gets all the attributes that belong to a given vertex V in graph G.
-spec tags(graph(), vertex()) -> [tag(),...] | [] | {error, error()}.
tags(G, V) ->
case gall(G, tags, V) of
{error, E} -> {error, E};
All ->
Fold = fun({PfxKey, Val}, Acc) ->
case binary:split(PfxKey, <<"/">>) of
[V, Key] -> [{Key, Val}] ++ Acc;
_ -> Acc
end
end,
lists:reverse(lists:foldl(Fold, [], All))
end.
%% @doc Gets the value of the attribute K in vertex V of graph G
%% Return undefined if there is no such attribute.
-spec tags(graph(), vertex(), key()) -> {key(), value()} | undefined | {error, error()}.
tags(G, V, Key) ->
case gexists(G, V) of
false -> {error, not_found};
true ->
PfxKey = <<V/binary, "/", Key/binary>>,
case gget(G, tags, PfxKey) of
{ok, PfxKey, Val} -> {Key, Val};
Err -> Err
end
end.
%% @doc Adds the attribute to a vertex V in graph G.
%% If the attribute already exists, updates it.
-spec add_tag(graph(), vertex(), key(), value()) -> ok | {error, error()}.
add_tag(G, V, Key, Val) ->
case gexists(G, V) of
false -> {error, not_found};
true ->
PfxKey = <<V/binary, "/", Key/binary>>,
gput(G, tags, PfxKey, Val)
end.
%% @doc Removes the attribute with key K from vertex V in graph G.
%% If the attribute doesn't exists just returns ok.
-spec del_tag(graph(), vertex(), key()) -> ok | {error, error()}.
del_tag(G, V, Key) ->
PfxKey = <<V/binary, "/", Key/binary>>,
gdel(G, tags, PfxKey).
%% @doc Checks if the graph G have any verticies.
-spec is_empty(graph()) -> boolean().
is_empty(G) when is_binary(G) ->
case gs_register_server:get(G) of
{ok, Pid} -> grass:is_empty(Pid);
E -> E
end;
is_empty(G) ->
gen_server:call(G, is_empty).
%% @doc Deletes all the verticies and edges in the graph G.
%% Kind of like ab analog of 'drop table' in RDBMS.
-spec drop(graph()) -> ok | {error, error()}.
drop(G) when is_binary(G) ->
case gs_register_server:get(G) of
{ok, Pid} -> grass:drop(Pid);
E -> E
end;
drop(G) ->
gen_server:call(G, drop).
%% Private
gall(G) ->
gall(G, verticies).
gall(G, DB) when is_binary(G) ->
case gs_register_server:get(G) of
{ok, Pid} -> gall(Pid, DB);
E -> E
end;
gall(G, DB) ->
gen_server:call(G, {all, DB}).
gall(G, DB, From) when is_binary(G) ->
case gs_register_server:get(G) of
{ok, Pid} -> gall(Pid, DB, From);
E -> E
end;
gall(G, DB, From) ->
gen_server:call(G, {all, DB, From}).
gkeys(G) ->
gkeys(G, verticies).
gkeys(G, DB) when is_binary(G) ->
case gs_register_server:get(G) of
{ok, Pid} -> gkeys(Pid, DB);
E -> E
end;
gkeys(G, DB) ->
gen_server:call(G, {keys, DB}).
gexists(G, K) ->
gexists(G, verticies, K).
gexists(G, DB, K) when is_binary(G) ->
case gs_register_server:get(G) of
{ok, Pid} -> gexists(Pid, DB, K);
E -> E
end;
gexists(G, DB, K) ->
gen_server:call(G, {exists, DB, K}).
gput(G, K, V) ->
gput(G, verticies, K, V).
gput(G, DB, K, V) when is_binary(G) ->
case gs_register_server:get(G) of
{ok, Pid} -> gput(Pid, DB, K, V);
E -> E
end;
gput(G, DB, K, V) ->
gen_server:cast(G, {put, DB, K, V}).
gget(G, K) ->
gget(G, verticies, K).
gget(G, DB, K) when is_binary(G) ->
case gs_register_server:get(G) of
{ok, Pid} -> gget(Pid, DB, K);
E -> E
end;
gget(G, DB, K) ->
gen_server:call(G, {get, DB, K}).
gdel(G, K) ->
gdel(G, verticies, K).
gdel(G, DB, K) when is_binary(G) ->
case gs_register_server:get(G) of
{ok, Pid} -> gdel(Pid, DB, K);
E -> E
end;
gdel(G, DB, K) ->
gen_server:cast(G, {delete, DB, K}).
%% Move to public API with multigraph
stats(G) when is_binary(G) ->
case gs_register_server:get(G) of
{ok, Pid} -> grass:stats(Pid);
E -> E
end;
stats(G) ->
Stats = gen_server:call(G, stats),
VStats = proplists:get_value(verticies, Stats),
TStats = proplists:get_value(tags, Stats),
lager:info("~n-= Verticies =-~n~s~n-= Attributes =-~n~s", [VStats, TStats]).
example(tiger) ->
example(<<"tiger">>, "tiger.txt");
example(limeric) ->
example(<<"limeric">>, "limeric.txt");
example(jabberwocky) ->
example(<<"jabberwocky">>, "jabberwocky.txt").
example(G, File) ->
DirBin = filename:dirname(code:which(?MODULE)),
{ok, Bin} = file:read_file(filename:join([DirBin,"..", "priv", File])),
Color = fun(<<F,_/binary>>) ->
case lists:member(F, [97, 101, 105, 111, 117]) of
true -> <<"red">>;
false -> <<"green">>
end
end,
Punct = [<<"\n">>, <<".">>, <<",">>, <<"?">>, <<"!">>, <<"\"">>, <<":">>, <<";">>, <<" ">>],
Parts = binary:split(Bin, Punct, [global, trim]),
Text = [ list_to_binary(xmerl_lib:to_lower(binary_to_list(W))) || W <- Parts, W /= <<>> ],
%% grass:destroy(G),
grass:create(G),
lists:foldl(fun
(W, first) -> grass:add_vertex(G, W), W;
(W, Prev) ->
grass:add_vertex(G, W),
grass:add_tag(G, W, <<"color">>, Color(W)),
grass:add_tag(G, W, <<"sides">>, erlang:size(W)),
grass:add_edge(G, Prev, W),
W
end, first, Text),
done.
%% app/sup start/stop
start() ->
Deps = [lager, crypto, inets, mochiweb, webmachine, grass],
[ application:start(App) || App <- Deps ].
stop() ->
Deps = [lager, crypto, inets, mochiweb, webmachine, grass],
[ application:stop(App) || App <- lists:reverse(Deps) ].
start(_Type, _StartArgs) ->
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
stop(_State) ->
ok.
init([]) ->
%% web (in dispatcher '*' is atom, even when first argument is a string!)
{ok, Port} = application:get_env(port),
WebConfig = [{ip, "0.0.0.0"}, {port, Port}, {dispatch, [{['*'], gs_web, []}]}],
Web = {web, {webmachine_mochiweb, start, [WebConfig]}, permanent, 2000, worker, dynamic},
%% graph
{ok, DBDir} = application:get_env(work_dir),
BaseDir = filename:dirname(code:which(?MODULE)),
WorkDir = filename:join([BaseDir, "..", DBDir]),
file:make_dir(WorkDir),
Tid = ets:new(gs_register_server, [public]),
Register = ?WORKER(gs_register_server, [Tid, WorkDir]),
GraphSup = ?SUPER(gs_graph_sup),
% strategy
MaxRestart = 3,
MaxWait = 3600,
RestartStrategy = {one_for_one, MaxRestart, MaxWait},
Children = [Web, GraphSup, Register],
{ok, {RestartStrategy, Children}}.
%%
%% EUnit tests
%%
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
grass_test_() ->
{setup,
fun test_setup/0,
fun test_teardown/1,
fun(Data) ->
Pid = proplists:get_value(ref, Data),
{inorder, [
test_add_vertex(Pid),
test_vertex_exists(Pid),
test_verticies(Pid),
test_tags(Pid),
test_add_edge(Pid),
test_edges(Pid),
test_verticies_for_vertex(Pid),
test_edges_for_vertex(Pid),
test_edge_exists(Pid),
test_clone_vertex(Pid),
test_modify_vertex(Pid),
test_del_edge(Pid),
test_del_vertex(Pid),
test_drop_and_is_empty(Pid)
]}
end
}.
test_setup() ->
WD = os:cmd("mktemp -d /tmp/grass.XXXXXX"),
WorkDir = string:strip(WD, right, $\n),
%% os:cmd(io_lib:format("rm -rf ~p", [WorkDir])),
?debugFmt("Work Dir: ~p~n", [WorkDir]),
{ok, Ref} = gs_graph_server:start_link([{<<"test">>, WorkDir}]),
[{dir, WorkDir}, {ref, Ref}].
test_teardown(TestData) ->
WorkDir = proplists:get_value(dir, TestData),
Ref = proplists:get_value(ref, TestData),
gs_graph_server:stop(Ref),
os:cmd(io_lib:format("rm -rf ~p", [WorkDir])).
test_add_vertex(G) ->
[
{"Add vertex", ?_assertEqual(ok, grass:add_vertex(G, <<"a">>))},
{"Error on duplicate", ?_assertEqual({error, already_exists}, grass:add_vertex(G, <<"a">>))}
].
test_vertex_exists(G) ->
[
{"True on existing vertex", ?_assert(grass:vertex_exists(G, <<"a">>))},
{"False on missing vertex", ?_assertNot(grass:vertex_exists(G, <<"b">>))}
].
test_verticies(G) ->
All = fun() ->
grass:add_vertex(G, <<"b">>),
grass:add_vertex(G, <<"c">>),
grass:verticies(G)
end,
[
{"List of all verticies", ?_assertEqual([<<"a">>, <<"b">>, <<"c">>], All())}
].
test_tags(G) ->
[
{"Empty list of tags", ?_assertEqual([], grass:tags(G, <<"b">>))},
{"Can add tag", ?_assertEqual(ok, grass:add_tag(G, <<"b">>, <<"color">>, <<"red">>))},
{"Can add another tag", ?_assertEqual(ok, grass:add_tag(G, <<"b">>, <<"mood">>, <<"blue">>))},
{"Can add tag to different vertex", ?_assertEqual(ok, grass:add_tag(G, <<"c">>, <<"color">>, <<"green">>))},
{"Can get all tags", ?_assertEqual([{<<"color">>, <<"red">>}, {<<"mood">>, <<"blue">>}], grass:tags(G, <<"b">>))},
{"Can get one tag", ?_assertEqual({<<"color">>, <<"red">>}, grass:tags(G, <<"b">>, <<"color">>))},
{"Can delete tag", ?_assertEqual(ok, grass:del_tag(G, <<"b">>, <<"color">>))},
{"Proper tag deleted", ?_assertEqual([{<<"mood">>, <<"blue">>}], grass:tags(G, <<"b">>))},
{"No tag on other vertex affected", ?_assertEqual([{<<"color">>, <<"green">>}], grass:tags(G, <<"c">>))}
].
test_add_edge(G) ->
[
{"Add edge", ?_assertEqual(ok, grass:add_edge(G, <<"a">>, <<"b">>))},
{"'ok' on adding an existing edge", ?_assertEqual(ok, grass:add_edge(G, <<"b">>, <<"a">>))}
].
test_edges(G) ->
Edges = fun() ->
grass:add_vertex(G, <<"d">>),
grass:add_edge(G, <<"c">>, <<"b">>),
grass:add_edge(G, <<"c">>, <<"d">>),
grass:add_edge(G, <<"a">>, <<"c">>),
grass:edges(G)
end,
[
{"Small list of all edges", ?_assertEqual([[<<"a">>, <<"b">>]], grass:edges(G))},
{"Full list of all edges", ?_assertEqual([[<<"a">>, <<"b">>], [<<"a">>, <<"c">>], [<<"b">>, <<"c">>], [<<"c">>, <<"d">>]], Edges())}
].
test_verticies_for_vertex(G) ->
[
{"Got verticies for 'a'", ?_assertEqual([<<"b">>, <<"c">>], grass:verticies(G, <<"a">>))},
{"Got verticies for 'b'", ?_assertEqual([<<"a">>, <<"c">>], grass:verticies(G, <<"b">>))},
{"Got verticies for 'c'", ?_assertEqual([<<"a">>, <<"b">>, <<"d">>], grass:verticies(G, <<"c">>))},
{"Got verticies for 'd'", ?_assertEqual([<<"c">>], grass:verticies(G, <<"d">>))}
].
test_edges_for_vertex(G) ->
[
{"Got edges for 'a'", ?_assertEqual([[<<"a">>, <<"b">>], [<<"a">>, <<"c">>]], grass:edges(G, <<"a">>))},
{"Got edges for 'b'", ?_assertEqual([[<<"b">>, <<"a">>], [<<"b">>, <<"c">>]], grass:edges(G, <<"b">>))},
{"Got edges for 'c'", ?_assertEqual([[<<"c">>, <<"a">>], [<<"c">>, <<"b">>], [<<"c">>, <<"d">>]], grass:edges(G, <<"c">>))},
{"Got edges for 'd'", ?_assertEqual([[<<"d">>, <<"c">>]], grass:edges(G, <<"d">>))}
].
test_edge_exists(G) ->
[
{"Got 'true' for existing edge", ?_assert(grass:edge_exists(G, <<"a">>, <<"c">>))},
{"Got 'false' for non existing edge", ?_assertNot(grass:edge_exists(G, <<"a">>, <<"d">>))}
].
test_clone_vertex(G) ->
[
{"Clone vertex", ?_assertEqual(ok, grass:clone_vertex(G, <<"c">>, <<"e">>))},
{"Correct connections in cloned", ?_assertEqual([<<"a">>, <<"b">>, <<"d">>], grass:verticies(G, <<"e">>))},
{"Correct edges in cloned", ?_assertEqual([[<<"e">>, <<"a">>], [<<"e">>, <<"b">>], [<<"e">>, <<"d">>]], grass:edges(G, <<"e">>))},
{"'e' knows 'a'", ?_assert(grass:edge_exists(G, <<"e">>, <<"a">>))},
{"'a' knows 'e'", ?_assert(grass:edge_exists(G, <<"a">>, <<"e">>))}
].
test_modify_vertex(G) ->
[
{"Modify vertex", ?_assertEqual(ok, grass:modify_vertex(G, <<"e">>, <<"eh">>))},
{"'e' no more", ?_assertNot(grass:vertex_exists(G, <<"e">>))},
{"Correct connections in modified", ?_assertEqual([<<"a">>, <<"b">>, <<"d">>], grass:verticies(G, <<"eh">>))},
{"Correct edges in modified", ?_assertEqual([[<<"eh">>, <<"a">>], [<<"eh">>, <<"b">>], [<<"eh">>, <<"d">>]], grass:edges(G, <<"eh">>))},
{"'eh' knows 'a'", ?_assert(grass:edge_exists(G, <<"eh">>, <<"a">>))},
{"'a' knows 'eh'", ?_assert(grass:edge_exists(G, <<"a">>, <<"eh">>))},
{"'eh' knows 'b'", ?_assert(grass:edge_exists(G, <<"eh">>, <<"b">>))},
{"'b' knows 'eh'", ?_assert(grass:edge_exists(G, <<"b">>, <<"eh">>))},
{"'eh' knows 'd'", ?_assert(grass:edge_exists(G, <<"eh">>, <<"d">>))},
{"'d' knows 'eh'", ?_assert(grass:edge_exists(G, <<"d">>, <<"eh">>))}
].
test_del_edge(G) ->
[
{"Delete existing edge", ?_assertEqual(ok, grass:del_edge(G, <<"eh">>, <<"d">>))},
{"Delete non existing edge", ?_assertEqual(ok, grass:del_edge(G, <<"a">>, <<"d">>))},
{"Deleted edge gone", ?_assertNot(grass:edge_exists(G, <<"eh">>, <<"d">>))},
{"Non delete edge to 'a' preserved", ?_assert(grass:edge_exists(G, <<"eh">>, <<"a">>))},
{"Non delete edge to 'b' preserved", ?_assert(grass:edge_exists(G, <<"b">>, <<"eh">>))}
].
test_del_vertex(G) ->
[
{"Delete vertex 'eh'", ?_assertEqual(ok, grass:del_vertex(G, <<"eh">>))},
{"All edges of 'eh' gone", ?_assertEqual([[<<"a">>, <<"b">>], [<<"a">>, <<"c">>], [<<"b">>, <<"c">>], [<<"c">>, <<"d">>]], grass:edges(G))},
{"All verticies preserved", ?_assertEqual([<<"a">>, <<"b">>, <<"c">>, <<"d">>], grass:verticies(G))}
].
test_drop_and_is_empty(G) ->
[
{"Drop", ?_assertEqual(ok, grass:drop(G))},
{"IsEmpty", ?_assert(grass:is_empty(G))},
{"It is really empty", ?_assertEqual([], grass:verticies(G))}
].
-endif. | src/grass.erl | 0.525612 | 0.575588 | grass.erl | starcoder |
%%%-------------------------------------------------------------------
%%% @doc
%%% A set of optics specific to arrays.
%%% @end
%%%-------------------------------------------------------------------
-module(optic_array).
%% API
-export([all/0,
all/1,
nth/1,
nth/2]).
%%%===================================================================
%%% API
%%%===================================================================
%% @see all/1
-spec all() -> optic:optic().
all() ->
all(#{}).
%% @doc
%% Focus on all values of an array.
%%
%% Example:
%%
%% ```
%% > optic:get([optic_array:all()], array:from_list([1,2,3])).
%% {ok,[1,2,3]}
%% '''
%% @end
%% @param Options Common optic options.
%% @returns An opaque optic record.
-spec all(Options) -> optic:optic() when
Options :: optic:variations().
all(Options) ->
Fold =
fun (Fun, Acc, Array) ->
case is_array(Array) of
true ->
{ok, array:foldl(fun (_Index, Elem, InnerAcc) ->
Fun(Elem, InnerAcc)
end, Acc, Array)};
false ->
{error, undefined}
end
end,
MapFold =
fun (Fun, Acc, Array) ->
case is_array(Array) of
true ->
{ok, array:foldl(fun (Index, Elem, {InnerArray, InnerAcc}) ->
{NewElem, NewAcc} = Fun(Elem, InnerAcc),
{array:set(Index, NewElem, InnerArray), NewAcc}
end,
{Array, Acc},
Array)};
false ->
{error, undefined}
end
end,
New =
fun (_Data, Template) ->
array:new([{default, Template}])
end,
Optic = optic:new(MapFold, Fold),
optic:variations(Optic, Options, New).
%% @see nth/2
-spec nth(N) -> optic:optic() when
N :: pos_integer().
nth(N) ->
nth(N, #{}).
%% @doc
%% Focus on the nth value of an array. Like lists, but unlike the
%% standard array operations, indexing begins at 1.
%%
%% Example:
%%
%% ```
%% > optic:get([optic_array:nth(1)], array:from_list([1,2,3])).
%% {ok,[1]}
%% '''
%% @end
%% @param N The index of the array value to focus on.
%% @param Options Common optic options.
%% @returns An opaque optic record.
-spec nth(N, Options) -> optic:optic() when
N :: pos_integer(),
Options :: optic:variations().
nth(N, Options) when N >= 1 ->
Index = N - 1,
Fold =
fun (Fun, Acc, Array) ->
case is_array(Array) andalso valid_index(Index, Array) of
true ->
Elem = array:get(Index, Array),
{ok, Fun(Elem, Acc)};
false ->
{error, undefined}
end
end,
MapFold =
fun (Fun, Acc, Array) ->
case is_array(Array) andalso valid_index(Index, Array) of
true ->
Elem = array:get(Index, Array),
{NewElem, NewAcc} = Fun(Elem, Acc),
{ok, {array:set(Index, NewElem, Array), NewAcc}};
false ->
{error, undefined}
end
end,
New =
fun (Data, Template) ->
case is_array(Data) of
true ->
array:resize(N, Data);
false ->
array:new(N, [{default, Template}])
end
end,
Optic = optic:new(MapFold, Fold),
optic:variations(Optic, Options, New).
%%%===================================================================
%%% Internal Functions
%%%===================================================================
is_array(Unknown) ->
try array:size(Unknown) of
_ ->
true
catch
error:badarg ->
false
end.
valid_index(Index, Array) when Index >= 0 ->
case array:is_fix(Array) of
true ->
Index < array:size(Array);
false ->
true
end. | src/optic_array.erl | 0.619356 | 0.495056 | optic_array.erl | starcoder |
% Licensed under the Apache License, Version 2.0 (the "License"); you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
-module(mem3_reshard_job).
-export([
start_link/1,
checkpoint_done/1,
jobfmt/1,
pickfun/3
]).
-export([
init/1,
initial_copy/1,
initial_copy_impl/1,
topoff/1,
topoff_impl/1,
build_indices/1,
copy_local_docs/1,
copy_local_docs_impl/1,
update_shardmap/1,
wait_source_close/1,
wait_source_close_impl/1,
source_delete/1,
source_delete_impl/1,
completed/1
]).
-include_lib("couch/include/couch_db.hrl").
-include("mem3_reshard.hrl").
% Batch size for internal replication topoffs
-define(INTERNAL_REP_BATCH_SIZE, 2000).
% The list of possible job states. The order of this
% list is important as a job will progress linearly
% through it. However, when starting a job we may
% have to resume from an earlier state as listed
% below in STATE_RESTART.
-define(SPLIT_STATES, [
new,
initial_copy,
topoff1,
build_indices,
topoff2,
copy_local_docs,
update_shardmap,
wait_source_close,
topoff3,
source_delete,
completed
]).
% When a job starts it may be resuming from a partially
% completed state. These state pairs list the state
% we have to restart from for each possible state.
-define(STATE_RESTART, #{
new => initial_copy,
initial_copy => initial_copy,
topoff1 => topoff1,
build_indices => topoff1,
topoff2 => topoff1,
copy_local_docs => topoff1,
update_shardmap => update_shardmap,
wait_source_close => wait_source_close,
topoff3 => wait_source_close,
source_delete => wait_source_close,
completed => completed
}).
% If we have a worker failing during any of these
% states we need to clean up the targets
-define(CLEAN_TARGET_STATES, [
initial_copy,
topoff1,
build_indices,
topoff2,
copy_local_docs
]).
start_link(#job{} = Job) ->
proc_lib:start_link(?MODULE, init, [Job]).
% This is called by the main proces after it has checkpointed the progress
% of the job. After the new state is checkpointed, we signal the job to start
% executing that state.
checkpoint_done(#job{pid = Pid} = Job) ->
couch_log:notice(" ~p : checkpoint done for ~p", [?MODULE, jobfmt(Job)]),
Pid ! checkpoint_done,
ok.
% Formatting function, used for logging mostly
jobfmt(#job{} = Job) ->
#job{
id = Id,
source = #shard{name = Source},
target = Target,
split_state = State,
job_state = JobState,
pid = Pid
} = Job,
TargetCount = length(Target),
Msg = "#job{~s ~s /~B job_state:~s split_state:~s pid:~p}",
Fmt = io_lib:format(Msg, [Id, Source, TargetCount, JobState, State, Pid]),
lists:flatten(Fmt).
% This is the function which picks between various targets. It is used here as
% well as in mem3_rep internal replicator and couch_db_split bulk copy logic.
% Given a document id and list of ranges, and a hash function, it will pick one
% of the range or return not_in_range atom.
pickfun(DocId, [[B, E] | _] = Ranges, {_M, _F, _A} = HashFun) when
is_integer(B), is_integer(E), B =< E ->
HashKey = mem3_hash:calculate(HashFun, DocId),
Pred = fun([Begin, End]) ->
Begin =< HashKey andalso HashKey =< End
end,
case lists:filter(Pred, Ranges) of
[] -> not_in_range;
[Key] -> Key
end.
init(#job{} = Job0) ->
process_flag(trap_exit, true),
Job1 = set_start_state(Job0#job{
pid = self(),
start_time = mem3_reshard:now_sec(),
workers = [],
retries = 0
}),
Job2 = update_split_history(Job1),
proc_lib:init_ack({ok, self()}),
couch_log:notice("~p starting job ~s", [?MODULE, jobfmt(Job2)]),
ok = checkpoint(Job2),
run(Job2).
run(#job{split_state = CurrState} = Job) ->
StateFun = case CurrState of
topoff1 -> topoff;
topoff2 -> topoff;
topoff3 -> topoff;
_ -> CurrState
end,
NewJob = try
Job1 = ?MODULE:StateFun(Job),
Job2 = wait_for_workers(Job1),
Job3 = switch_to_next_state(Job2),
ok = checkpoint(Job3),
Job3
catch
throw:{retry, RetryJob} ->
RetryJob
end,
run(NewJob).
set_start_state(#job{split_state = State} = Job) ->
case {State, maps:get(State, ?STATE_RESTART, undefined)} of
{_, undefined} ->
Fmt1 = "~p recover : unknown state ~s",
couch_log:error(Fmt1, [?MODULE, jobfmt(Job)]),
erlang:error({invalid_split_job_recover_state, Job});
{initial_copy, initial_copy} ->
% Since we recover from initial_copy to initial_copy, we need
% to reset the target state as initial_copy expects to
% create a new target
Fmt2 = "~p recover : resetting target ~s",
couch_log:notice(Fmt2, [?MODULE, jobfmt(Job)]),
reset_target(Job);
{_, StartState} ->
Job#job{split_state = StartState}
end.
get_next_state(#job{split_state = State}) ->
get_next_state(State, ?SPLIT_STATES).
get_next_state(completed, _) ->
completed;
get_next_state(CurrState, [CurrState, NextState | _]) ->
NextState;
get_next_state(CurrState, [_ | Rest]) ->
get_next_state(CurrState, Rest).
switch_to_next_state(#job{} = Job0) ->
Info0 = Job0#job.state_info,
Info1 = info_delete(error, Info0),
Info2 = info_delete(reason, Info1),
Job1 = Job0#job{
split_state = get_next_state(Job0),
update_time = mem3_reshard:now_sec(),
retries = 0,
state_info = Info2,
workers = []
},
Job2 = update_split_history(Job1),
check_state(Job2).
checkpoint(Job) ->
% Ask main process to checkpoint. When it has finished it will notify us
% by calling by checkpoint_done/1. The reason not to call the main process
% via a gen_server:call is because the main process could be in the middle
% of terminating the job and then it would deadlock (after sending us a
% shutdown message) and it would end up using the whole supervisor
% termination timeout before finally.
ok = mem3_reshard:checkpoint(Job#job.manager, Job),
Parent = parent(),
receive
{'EXIT', Parent, Reason} ->
handle_exit(Job, Reason);
checkpoint_done ->
ok;
Other ->
handle_unknown_msg(Job, "checkpoint", Other)
end.
wait_for_workers(#job{workers = []} = Job) ->
Job;
wait_for_workers(#job{workers = Workers} = Job) ->
Parent = parent(),
receive
{'EXIT', Parent, Reason} ->
handle_exit(Job, Reason);
{'EXIT', Pid, Reason} ->
case lists:member(Pid, Workers) of
true ->
NewJob = handle_worker_exit(Job, Pid, Reason),
wait_for_workers(NewJob);
false ->
handle_unknown_msg(Job, "wait_for_workers", {Pid, Reason})
end;
Other ->
handle_unknown_msg(Job, "wait_for_workers", Other)
end.
handle_worker_exit(#job{workers = Workers} = Job, Pid, normal) ->
Job#job{workers = Workers -- [Pid]};
handle_worker_exit(#job{} = Job, _Pid, {error, missing_source}) ->
Msg1 = "~p stopping worker due to source missing ~p",
couch_log:error(Msg1, [?MODULE, jobfmt(Job)]),
kill_workers(Job),
case lists:member(Job#job.split_state, ?CLEAN_TARGET_STATES) of
true ->
Msg2 = "~p cleaning target after db was deleted ~p",
couch_log:error(Msg2, [?MODULE, jobfmt(Job)]),
reset_target(Job),
exit({error, missing_source});
false ->
exit({error, missing_source})
end;
handle_worker_exit(#job{} = Job, _Pid, {error, missing_target}) ->
Msg = "~p stopping worker due to target db missing ~p",
couch_log:error(Msg, [?MODULE, jobfmt(Job)]),
kill_workers(Job),
exit({error, missing_target});
handle_worker_exit(#job{} = Job0, _Pid, Reason) ->
couch_log:error("~p worker error ~p ~p", [?MODULE, jobfmt(Job0), Reason]),
kill_workers(Job0),
Job1 = Job0#job{workers = []},
case Job1#job.retries =< max_retries() of
true ->
retry_state(Job1, Reason);
false ->
exit(Reason)
end.
% Cleanup and exit when we receive an 'EXIT' message from our parent. In case
% the shard map is being updated, try to wait some time for it to finish.
handle_exit(#job{split_state = update_shardmap, workers = [WPid]} = Job,
Reason) ->
Timeout = update_shard_map_timeout_sec(),
Msg1 = "~p job exit ~s ~p while shard map is updating, waiting ~p sec",
couch_log:warning(Msg1, [?MODULE, jobfmt(Job), Reason, Timeout]),
receive
{'EXIT', WPid, normal} ->
Msg2 = "~p ~s shard map finished updating successfully, exiting",
couch_log:notice(Msg2, [?MODULE, jobfmt(Job)]),
exit(Reason);
{'EXIT', WPid, Error} ->
Msg3 = "~p ~s shard map update failed with error ~p",
couch_log:error(Msg3, [?MODULE, jobfmt(Job), Error]),
exit(Reason)
after Timeout * 1000->
Msg4 = "~p ~s shard map update timeout exceeded ~p sec",
couch_log:error(Msg4, [?MODULE, jobfmt(Job), Timeout]),
kill_workers(Job),
exit(Reason)
end;
handle_exit(#job{} = Job, Reason) ->
kill_workers(Job),
exit(Reason).
retry_state(#job{retries = Retries, state_info = Info} = Job0, Error) ->
Job1 = Job0#job{
retries = Retries + 1,
state_info = info_update(error, Error, Info)
},
couch_log:notice("~p retrying ~p ~p", [?MODULE, jobfmt(Job1), Retries]),
Job2 = report(Job1),
Timeout = retry_interval_sec(),
Parent = parent(),
receive
{'EXIT', Parent, Reason} ->
handle_exit(Job2, Reason);
Other ->
handle_unknown_msg(Job2, "retry_state", Other)
after Timeout * 1000 ->
ok
end,
throw({retry, Job2}).
report(#job{manager = ManagerPid} = Job) ->
Job1 = Job#job{update_time = mem3_reshard:now_sec()},
ok = mem3_reshard:report(ManagerPid, Job1),
Job1.
kill_workers(#job{workers = Workers}) ->
lists:foreach(fun(Worker) ->
unlink(Worker),
exit(Worker, kill)
end, Workers),
flush_worker_messages().
flush_worker_messages() ->
Parent = parent(),
receive
{'EXIT', Pid, _} when Pid =/= Parent ->
flush_worker_messages()
after 0 ->
ok
end.
parent() ->
case get('$ancestors') of
[Pid | _] when is_pid(Pid) -> Pid;
[Name | _] when is_atom(Name) -> whereis(Name);
_ -> undefined
end.
handle_unknown_msg(Job, When, RMsg) ->
LogMsg = "~p ~s received an unknown message ~p when in ~s",
couch_log:error(LogMsg, [?MODULE, jobfmt(Job), RMsg, When]),
erlang:error({invalid_split_job_message, Job#job.id, When, RMsg}).
initial_copy(#job{} = Job) ->
Pid = spawn_link(?MODULE, initial_copy_impl, [Job]),
report(Job#job{workers = [Pid]}).
initial_copy_impl(#job{source = Source, target = Targets0} = Job) ->
#shard{name = SourceName} = Source,
Targets = [{R, N} || #shard{range = R, name = N} <- Targets0],
TMap = maps:from_list(Targets),
LogMsg1 = "~p initial_copy started ~s",
LogArgs1 = [?MODULE, shardsstr(Source, Targets0)],
couch_log:notice(LogMsg1, LogArgs1),
case couch_db_split:split(SourceName, TMap, fun pickfun/3) of
{ok, Seq} ->
LogMsg2 = "~p initial_copy of ~s finished @ seq:~p",
LogArgs2 = [?MODULE, shardsstr(Source, Targets0), Seq],
couch_log:notice(LogMsg2, LogArgs2),
create_artificial_mem3_rep_checkpoints(Job, Seq);
{error, Error} ->
LogMsg3 = "~p initial_copy of ~p finished @ ~p",
LogArgs3 = [?MODULE, shardsstr(Source, Targets0), Error],
couch_log:notice(LogMsg3, LogArgs3),
exit({error, Error})
end.
topoff(#job{} = Job) ->
Pid = spawn_link(?MODULE, topoff_impl, [Job]),
report(Job#job{workers = [Pid]}).
topoff_impl(#job{source = #shard{} = Source, target = Targets}) ->
couch_log:notice("~p topoff ~p", [?MODULE, shardsstr(Source, Targets)]),
check_source_exists(Source, topoff),
check_targets_exist(Targets, topoff),
TMap = maps:from_list([{R, T} || #shard{range = R} = T <- Targets]),
Opts = [{batch_size, ?INTERNAL_REP_BATCH_SIZE}, {batch_count, all}],
case mem3_rep:go(Source, TMap, Opts) of
{ok, Count} ->
Args = [?MODULE, shardsstr(Source, Targets), Count],
couch_log:notice("~p topoff done ~s, count: ~p", Args),
ok;
{error, Error} ->
Args = [?MODULE, shardsstr(Source, Targets), Error],
couch_log:error("~p topoff failed ~s, error: ~p", Args),
exit({error, Error})
end.
build_indices(#job{} = Job) ->
#job{
source = #shard{name = SourceName} = Source,
target = Targets,
retries = Retries,
state_info = Info
} = Job,
check_source_exists(Source, build_indices),
{ok, DDocs} = mem3_reshard_index:design_docs(SourceName),
Indices = mem3_reshard_index:target_indices(DDocs, Targets),
case mem3_reshard_index:spawn_builders(Indices) of
{ok, []} ->
% Skip the log spam if this is a no-op
Job#job{workers = []};
{ok, Pids} ->
report(Job#job{workers = Pids});
{error, Error} ->
case Job#job.retries =< max_retries() of
true ->
build_indices(Job#job{
retries = Retries + 1,
state_info = info_update(error, Error, Info)
});
false ->
exit(Error)
end
end.
copy_local_docs(#job{split_state = copy_local_docs} = Job) ->
Pid = spawn_link(?MODULE, copy_local_docs_impl, [Job]),
report(Job#job{workers = [Pid]}).
copy_local_docs_impl(#job{source = Source, target = Targets0}) ->
#shard{name = SourceName} = Source,
Targets = [{R, N} || #shard{range = R, name = N} <- Targets0],
TMap = maps:from_list(Targets),
LogArg1 = [?MODULE, shardsstr(Source, Targets)],
couch_log:notice("~p copy local docs start ~s", LogArg1),
case couch_db_split:copy_local_docs(SourceName, TMap, fun pickfun/3) of
ok ->
couch_log:notice("~p copy local docs finished for ~s", LogArg1),
ok;
{error, Error} ->
LogArg2 = [?MODULE, shardsstr(Source, Targets), Error],
couch_log:error("~p copy local docs failed for ~s ~p", LogArg2),
exit({error, Error})
end.
update_shardmap(#job{} = Job) ->
Pid = spawn_link(mem3_reshard_dbdoc, update_shard_map, [Job]),
report(Job#job{workers = [Pid]}).
wait_source_close(#job{source = #shard{name = Name}} = Job) ->
couch_event:notify(Name, deleted),
Pid = spawn_link(?MODULE, wait_source_close_impl, [Job]),
report(Job#job{workers = [Pid]}).
wait_source_close_impl(#job{source = #shard{name = Name}, target = Targets}) ->
Timeout = config:get_integer("reshard", "source_close_timeout_sec", 600),
check_targets_exist(Targets, wait_source_close),
case couch_db:open_int(Name, [?ADMIN_CTX]) of
{ok, Db} ->
Now = mem3_reshard:now_sec(),
case wait_source_close(Db, 1, Now + Timeout) of
true ->
ok;
false ->
exit({error, source_db_close_timeout, Name, Timeout})
end;
{not_found, _} ->
couch_log:warning("~p source already deleted ~p", [?MODULE, Name]),
ok
end.
wait_source_close(Db, SleepSec, UntilSec) ->
case couch_db:monitored_by(Db) -- [self()] of
[] ->
true;
[_ | _] ->
Now = mem3_reshard:now_sec(),
case Now < UntilSec of
true ->
LogMsg = "~p : Waiting for source shard ~p to be closed",
couch_log:notice(LogMsg, [?MODULE, couch_db:name(Db)]),
timer:sleep(SleepSec * 1000),
wait_source_close(Db, SleepSec, UntilSec);
false ->
false
end
end.
source_delete(#job{} = Job) ->
Pid = spawn_link(?MODULE, source_delete_impl, [Job]),
report(Job#job{workers = [Pid]}).
source_delete_impl(#job{source = #shard{name = Name}, target = Targets}) ->
check_targets_exist(Targets, source_delete),
case config:get_boolean("mem3_reshard", "delete_source", true) of
true ->
case couch_server:delete(Name, [?ADMIN_CTX]) of
ok ->
couch_log:notice("~p : deleted source shard ~p",
[?MODULE, Name]);
not_found ->
couch_log:warning("~p : source was already deleted ~p",
[?MODULE, Name])
end;
false ->
% Emit deleted event even when not actually deleting the files this
% is the second one emitted, the other one was before
% wait_source_close. They should be idempotent. This one is just to
% match the one that couch_server would emit had the config not
% been set
couch_event:notify(Name, deleted),
LogMsg = "~p : according to configuration not deleting source ~p",
couch_log:warning(LogMsg, [?MODULE, Name])
end,
TNames = [TName || #shard{name = TName} <- Targets],
lists:foreach(fun(TName) -> couch_event:notify(TName, updated) end, TNames).
completed(#job{} = Job) ->
couch_log:notice("~p : ~p completed, exit normal", [?MODULE, jobfmt(Job)]),
exit(normal).
% This is for belt and suspenders really. Call periodically to validate the
% state is one of the expected states.
-spec check_state(#job{}) -> #job{} | no_return().
check_state(#job{split_state = State} = Job) ->
case lists:member(State, ?SPLIT_STATES) of
true ->
Job;
false ->
erlang:error({invalid_shard_split_state, State, Job})
end.
create_artificial_mem3_rep_checkpoints(#job{} = Job, Seq) ->
#job{source = Source = #shard{name = SourceName}, target = Targets} = Job,
check_source_exists(Source, initial_copy),
TNames = [TN || #shard{name = TN} <- Targets],
Timestamp = list_to_binary(mem3_util:iso8601_timestamp()),
couch_util:with_db(SourceName, fun(SDb) ->
[couch_util:with_db(TName, fun(TDb) ->
Doc = mem3_rep_checkpoint_doc(SDb, TDb, Timestamp, Seq),
{ok, _} = couch_db:update_doc(SDb, Doc, []),
{ok, _} = couch_db:update_doc(TDb, Doc, []),
ok
end) || TName <- TNames]
end),
ok.
mem3_rep_checkpoint_doc(SourceDb, TargetDb, Timestamp, Seq) ->
Node = atom_to_binary(node(), utf8),
SourceUUID = couch_db:get_uuid(SourceDb),
TargetUUID = couch_db:get_uuid(TargetDb),
History = {[
{<<"source_node">>, Node},
{<<"source_uuid">>, SourceUUID},
{<<"source_seq">>, Seq},
{<<"timestamp">>, Timestamp},
{<<"target_node">>, Node},
{<<"target_uuid">>, TargetUUID},
{<<"target_seq">>, Seq}
]},
Body = {[
{<<"seq">>, Seq},
{<<"target_uuid">>, TargetUUID},
{<<"history">>, {[{Node, [History]}]}}
]},
Id = mem3_rep:make_local_id(SourceUUID, TargetUUID),
#doc{id = Id, body = Body}.
check_source_exists(#shard{name = Name}, StateName) ->
case couch_server:exists(Name) of
true ->
ok;
false ->
ErrMsg = "~p source ~p is unexpectedly missing in ~p",
couch_log:error(ErrMsg, [?MODULE, Name, StateName]),
exit({error, missing_source})
end.
check_targets_exist(Targets, StateName) ->
lists:foreach(fun(#shard{name = Name}) ->
case couch_server:exists(Name) of
true ->
ok;
false ->
ErrMsg = "~p target ~p is unexpectedly missing in ~p",
couch_log:error(ErrMsg, [?MODULE, Name, StateName]),
exit({error, missing_target})
end
end, Targets).
-spec max_retries() -> integer().
max_retries() ->
config:get_integer("reshard", "max_retries", 1).
-spec retry_interval_sec() -> integer().
retry_interval_sec() ->
config:get_integer("reshard", "retry_interval_sec", 10).
-spec update_shard_map_timeout_sec() -> integer().
update_shard_map_timeout_sec() ->
config:get_integer("reshard", "update_shardmap_timeout_sec", 60).
-spec info_update(atom(), any(), [tuple()]) -> [tuple()].
info_update(Key, Val, StateInfo) ->
lists:keystore(Key, 1, StateInfo, {Key, Val}).
-spec info_delete(atom(), [tuple()]) -> [tuple()].
info_delete(Key, StateInfo) ->
lists:keydelete(Key, 1, StateInfo).
-spec shardsstr(#shard{}, #shard{} | [#shard{}]) -> string().
shardsstr(#shard{name = SourceName}, #shard{name = TargetName}) ->
lists:flatten(io_lib:format("~s -> ~s", [SourceName, TargetName]));
shardsstr(#shard{name = SourceName}, Targets) ->
TNames = [TN || #shard{name = TN} <- Targets],
TargetsStr = string:join([binary_to_list(T) || T <- TNames], ","),
lists:flatten(io_lib:format("~s -> ~s", [SourceName, TargetsStr])).
-spec reset_target(#job{}) -> #job{}.
reset_target(#job{source = Source, target = Targets} = Job) ->
ShardNames = try
[N || #shard{name = N} <- mem3:local_shards(mem3:dbname(Source))]
catch
error:database_does_not_exist ->
[]
end,
lists:map(fun(#shard{name = Name}) ->
case {couch_server:exists(Name), lists:member(Name, ShardNames)} of
{_, true} ->
% Should never get here but if we do crash and don't continue
LogMsg = "~p : ~p target unexpectedly found in shard map ~p",
couch_log:error(LogMsg, [?MODULE, jobfmt(Job), Name]),
erlang:error({target_present_in_shard_map, Name});
{true, false} ->
LogMsg = "~p : ~p resetting ~p target",
couch_log:warning(LogMsg, [?MODULE, jobfmt(Job), Name]),
couch_db_split:cleanup_target(Source#shard.name, Name);
{false, false} ->
ok
end
end, Targets),
Job.
-spec update_split_history(#job{}) -> #job{}.
update_split_history(#job{split_state = St, update_time = Ts} = Job) ->
Hist = Job#job.history,
JobSt = case St of
completed -> completed;
failed -> failed;
new -> new;
stopped -> stopped;
_ -> running
end,
Job#job{history = mem3_reshard:update_history(JobSt, St, Ts, Hist)}. | src/mem3/src/mem3_reshard_job.erl | 0.57081 | 0.419172 | mem3_reshard_job.erl | starcoder |
%%% @copyright Erlware, LLC. All Rights Reserved.
%%%
%%% This file is provided to you under the BSD License; you may not use
%%% this file except in compliance with the License.
-module(erlcron).
-export([validate/1,
cron/1,
at/2,
once/2,
cancel/1,
datetime/0,
set_datetime/1,
multi_set_datetime/1,
multi_set_datetime/2]).
-export_type([job/0,
job_ref/0,
run_when/0,
callable/0,
dow/0,
dom/0,
period/0,
duration/0,
constraint/0,
cron_time/0,
seconds/0]).
%%%===================================================================
%%% Types
%%%===================================================================
-type seconds() :: integer().
-type cron_time() :: {integer(), am | pm}
| {integer(), integer(), am | pm}
| calendar:time().
-type constraint() :: {between, cron_time(), cron_time()}.
-type duration() :: {integer(), hr | min | sec}.
-type period() :: cron_time() | {every, duration(), constraint()}.
-type dom() :: integer().
-type dow() :: mon | tue | wed | thu | fri | sat | sun.
-type callable() :: {M :: module(), F :: atom(), A :: [term()]} | function().
-type run_when() :: {once, cron_time()}
| {once, seconds()}
| {daily, period()}
| {weekly, dow(), period()}
| {monthly, dom(), period()}.
-type job() :: {run_when(), callable()}.
%% should be opaque but dialyzer does not allow it
-type job_ref() :: reference().
%%%===================================================================
%%% API
%%%===================================================================
%% @doc
%% Check that the spec specified is valid or invalid
-spec validate(run_when()) -> valid | invalid.
validate(Spec) ->
ecrn_agent:validate(Spec).
%% @doc
%% Adds a new job to the cron system. Jobs are described in the job()
%% spec. It returns the JobRef that can be used to manipulate the job
%% after it is created.
-spec cron(job()) -> job_ref().
cron(Job) ->
JobRef = make_ref(),
ecrn_cron_sup:add_job(JobRef, Job).
%% @doc
%% Convienience method to specify a job run to run on a daily basis
%% at a specific time.
-spec at(cron_time() | seconds(), function()) -> job_ref().
at(When, Fun) ->
Job = {{daily, When}, Fun},
cron(Job).
%% @doc
%% Run the specified job once after the amount of time specifed.
-spec once(cron_time() | seconds(), function()) -> job_ref().
once(When, Fun) ->
Job = {{once, When}, Fun},
cron(Job).
%% @doc
%% Cancel the job specified by the jobref.
-spec cancel(job_ref()) -> ok | undefined.
cancel(JobRef) ->
ecrn_control:cancel(JobRef).
%% @doc
%% Get the current date time of the running erlcron system.
-spec datetime() -> {calendar:datetime(), seconds()}.
datetime() ->
ecrn_control:datetime().
%% @doc
%% Set the current date time of the running erlcron system.
-spec set_datetime(calendar:datetime()) -> ok.
set_datetime(DateTime) ->
ecrn_control:set_datetime(DateTime).
%% @doc
%% Set the current date time of the erlcron system running on different nodes.
-spec multi_set_datetime(calendar:datetime()) -> ok.
multi_set_datetime(DateTime) ->
ecrn_control:multi_set_datetime([node()|nodes()], DateTime).
%% @doc
%% Set the current date time of the erlcron system running on the
%% specified nodes
-spec multi_set_datetime([node()], calendar:datetime()) -> ok.
multi_set_datetime(Nodes, DateTime) when is_list(Nodes) ->
ecrn_control:multi_set_datetime(Nodes, DateTime). | deps/erlcron/src/erlcron.erl | 0.589598 | 0.401482 | erlcron.erl | starcoder |
%% Copyright (c) 2018 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(minirest_handler).
-export([init/1, dispatch/3]).
-type(config() :: #{apps => list(atom()), modules => list(module())}).
-export_type([config/0]).
-spec(init(config()) -> {?MODULE, dispatch, [map()]}).
init(Config) ->
Routes = lists:usort(
[API#{module => Module, pattern => string:tokens(Path, "/")}
|| Module <- modules(Config), {rest_api, [API = #{path := Path, name := Name}]}
<- Module:module_info(attributes), not lists:member(Name, maps:get(except, Config, [])) ]),
{?MODULE, dispatch, [Routes]}.
modules(Config) ->
lists:foldl(fun(App, Acc) ->
{ok, Mods} = application:get_key(App, modules),
lists:append(Mods, Acc)
end, maps:get(modules, Config, []), maps:get(apps, Config, [])).
%% Get API List
dispatch("/", Req, Routes) ->
case binary_to_atom(cowboy_req:method(Req), utf8) of
'GET' ->
jsonify(200, [{code, 0}, {data, [format_route(Route) || Route <- Routes]}], Req);
_ ->
reply(400, <<"Bad Request">>, Req)
end;
%% Dispatch request to REST APIs
dispatch(Path, Req, Routes) ->
case catch match_route(binary_to_atom(cowboy_req:method(Req), utf8), Path, Routes) of
{ok, #{module := Mod, func := Fun, bindings := Bindings}} ->
case catch parse_params(Req) of
{'EXIT', Reason} ->
error_logger:error_msg("Params error: ~p", [Reason]),
reply(400, <<"Bad Request">>, Req);
Params ->
jsonify(erlang:apply(Mod, Fun, [Bindings, Params]), Req)
end;
{'EXIT', {badarg, _}} ->
reply(404, <<"Not found.">>, Req);
false ->
reply(404, <<"Not found.">>, Req)
end.
format_route(#{name := Name, method := Method, path := Path, descr := Descr}) ->
[{name, Name}, {method, Method}, {path, format_path(Path)}, {descr, iolist_to_binary(Descr)}].
%% Remove the :type field.
format_path(Path) ->
re:replace(Path, <<":[^:]+(:[^/]+)">>, <<"\\1">>, [global, {return, binary}]).
match_route(_Method, _Path, []) ->
false;
match_route(Method, Path, [Route|Routes]) ->
case match_route(Method, Path, Route) of
{ok, Bindings} ->
{ok, Route#{bindings => Bindings}};
false ->
match_route(Method, Path, Routes)
end;
match_route(Method, Path, #{method := Method, pattern := Pattern}) ->
match_path(string:tokens(Path, "/"), Pattern, #{});
match_route(_Method, _Path, _Route) ->
false.
match_path([], [], Bindings) ->
{ok, Bindings};
match_path([], [_H|_T], _) ->
false;
match_path([_H|_T], [], _) ->
false;
match_path([H1|T1], [":" ++ H2|T2], Bindings) ->
match_path(T1, T2, case string:tokens(H2, ":") of
[Type, Name] ->
Bindings#{list_to_atom(Name) => parse_var(Type, H1)};
[Name] ->
Bindings#{list_to_atom(Name) => H1}
end);
match_path([H|T1], [H|T2], Bindings) ->
match_path(T1, T2, Bindings);
match_path(_Path, _Pattern, _Bindings) ->
false.
parse_params(Req) ->
QueryParams = cowboy_req:parse_qs(Req),
BodyParams =
case cowboy_req:has_body(Req) of
true -> {_, Body, _} = cowboy_req:read_body(Req),
jsx:decode(Body);
false -> []
end,
QueryParams ++ BodyParams.
parse_var("atom", S) -> list_to_existing_atom(S);
parse_var("int", S) -> list_to_integer(S);
parse_var("bin", S) -> iolist_to_binary(S).
jsonify(ok, Req) ->
jsonify(200, <<"ok">>, Req);
jsonify({ok, Response}, Req) ->
jsonify(200, Response, Req);
jsonify({error, Reason}, Req) ->
jsonify(500, Reason, Req);
jsonify({Code, Response}, Req) when is_integer(Code) ->
jsonify(Code, Response, Req);
jsonify({Code, Headers, Response}, Req) when is_integer(Code) ->
jsonify(Code, Headers, Response, Req).
jsonify(Code, Response, Req) ->
jsonify(Code, #{}, Response, Req).
jsonify(Code, Headers, Response, Req) ->
cowboy_req:reply(Code, maps:merge(#{<<"content-type">> => <<"application/json">>}, Headers), jsx:encode(Response), Req).
reply(Code, Text, Req) ->
cowboy_req:reply(Code, #{<<"content-type">> => <<"text/plain">>}, Text, Req). | src/minirest_handler.erl | 0.525369 | 0.404096 | minirest_handler.erl | starcoder |
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Contraction Clustering (RASTER):
% Reference Implementation in Erlang with an Example
% (c) 2016, 2017 Fraunhofer-Chalmers Centre for Industrial Mathematics
%
% Algorithm development and implementation:
% <NAME> (<EMAIL>)
%
% Requirements:
% . Erlang
% . external libraries: numpy, pandas
%
%
% This demo has been developed and tested on Ubuntu Linux 16.04.
%
% For a description of the algorithm including relevant theory, please
% consult our paper on Contraction Clustering (RASTER).
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-module(raster).
-compile([export_all]).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Implementation note:
%
% The code below uses 'ordsets' instead of 'sets'. The latter is faster,
% yet cannot be easily inspected in the REPL, which outputs the internal
% representation.
%
% A simple solution would be to simply find/replace all instances of
% 'ordsets' with 'sets' in the source code.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
allPoints([] , Acc, _ ) -> Acc;
allPoints(Points, Acc, Scalar) ->
[ {X, Y} | T ] = Points,
% scale X, Y
X_ = trunc(X * Scalar),
Y_ = trunc(Y * Scalar),
case maps:find({X_, Y_}, Acc) of
{ok, Value} -> New_Acc = maps:put({X_, Y_}, Value + 1, Acc );
error -> New_Acc = maps:put({X_, Y_}, 1, Acc )
end,
allPoints(T, New_Acc, Scalar).
mapToTiles(Points, Precision, Threshold) ->
Scalar = math:pow(10, Precision),
All_Points = allPoints(Points, maps:new(), Scalar),
% retain tiles that contain at least the provided threshold value of
% observations
Significant_Tiles =
maps:keys(
maps:filter(
fun(_K, V) -> V >= Threshold end,
All_Points)),
{Significant_Tiles, Scalar}.
getNeighbors({X, Y}, Tiles) ->
% neighbor lookup in O(1)
% 8-way clustering
Neighbors = [{X + 1, Y },
{X - 1, Y },
{X , Y + 1},
{X , Y - 1},
{X + 1, Y - 1},
{X + 1, Y + 1},
{X - 1, Y - 1},
{X - 1, Y + 1}],
Set_Tiles = ordsets:from_list(Tiles),
Pred = fun(X_) -> ordsets:is_element(X_, Set_Tiles) end,
ordsets:filter(Pred, Neighbors).
% Clusters: list of lists, where each list represents a cluster
cluster_all([] , _ , Clusters) -> Clusters;
cluster_all(Tiles, Min_Size, Clusters) ->
[ Start | _T ] = Tiles,
Cluster = cluster_one([Start], Tiles, ordsets:new()),
% remove all points from set
New_Tiles = ordsets:subtract(Tiles, Cluster),
Set_Cluster = ordsets:to_list(Cluster),
case length(Set_Cluster) >= Min_Size of
true -> cluster_all(New_Tiles, Min_Size, [Set_Cluster | Clusters]);
false -> cluster_all(New_Tiles, Min_Size, Clusters)
end.
cluster_one([] , _ , Visited) -> Visited;
cluster_one(To_Check, Tiles, Visited) ->
[H | T] = To_Check,
New_Visited = ordsets:add_element(H, Visited),
Candidates = getNeighbors(H, Tiles),
Vals =
lists:filter(
fun(X) -> not ordsets:is_element(X, New_Visited) end,
Candidates),
cluster_one(T ++ Vals, Tiles, New_Visited).
get_tuples([] , Acc) -> Acc;
get_tuples([H | T], Acc) ->
% input e.g.
% <<"13.55103746471259,9.811559258820193">>
[X, Y] = binary:split(H, [<<",">>], [global]),
{X_Float, _Rest} = string:to_float(binary_to_list(X)),
{Y_Float, _Rest} = string:to_float(binary_to_list(Y)),
get_tuples(T, [ {X_Float, Y_Float} | Acc]).
number_clusters([] , Acc, _ ) -> Acc;
number_clusters([C | Cs], Acc, Num) ->
A = lists:map(fun({X, Y}) -> {Num, X, Y} end, C),
number_clusters(Cs, Acc ++ A, Num + 1).
demo() ->
File_in = "input/sample.csv",
File_out = "output/clustered.csv",
{ok, Data} = file:read_file(File_in),
All_Points = get_tuples(
binary:split(Data, [<<"\n">>], [global]), []),
% Step 1: Projection
Threshold = 5,
Precision = 1, % i.e. 1 place value after the decimal point
{Significant_Tiles, Scalar} =
mapToTiles(All_Points, Precision, Threshold),
% Step 2: Agglomeration
Min_size = 5,
Set_Significant_Tiles = ordsets:from_list(Significant_Tiles),
Clusters = cluster_all(
Set_Significant_Tiles, Min_size, []),
io:format("Number of clusters: ~p ~n", [length(Clusters)]),
Body = number_clusters(Clusters, [], 1),
Header = {"Cluster Number", "X-Position", "Y-Position"},
Body_Scaled = lists:map(
fun({X, Y, Z}) -> { X, Y / Scalar, Z / Scalar } end, Body),
% write to file
{ok, F} = file:open(File_out, write),
lists:foreach(
fun({X, Y, Z}) -> io:format(F, "~p, ~p, ~p~n", [X, Y, Z]) end,
[ Header | Body_Scaled ]),
file:close(F). | 5_Erlang/raster.erl | 0.502197 | 0.554229 | raster.erl | starcoder |
%% Copyright (c) 2008-2021 <NAME>
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%% File : lfe_trans.erl
%% Author : <NAME>
%% Purpose : Lisp Flavoured Erlang translator.
%%% Translate LFE code to/from vanilla Erlang AST.
%%%
%%% Note that we don't really check code here as such, we assume the
%%% input is correct. If there is an error in the input we just fail.
%%% This allows us to accept forms which are actually illegal but we
%%% may special case, for example functions call in patterns which
%%% will become macro expansions.
%%%
%%% Having import from and rename forces us to explicitly convert the
%%% call as we can't use an import attribute to do this properly for
%%% us. Hence we collect the imports in lfe_codegen and pass them onto
%%% us.
%%%
%%% Module aliases are collected in lfe_codegen and passed on to us.
-module(lfe_translate).
-export([from_expr/1,from_expr/2,from_body/1,from_body/2,from_lit/1]).
-export([to_expr/2,to_expr/3,to_exprs/2,to_exprs/3]).
-export([to_gexpr/2,to_gexpr/3,to_gexprs/2,to_gexprs/3,to_lit/2]).
-include("lfe.hrl").
-record(from, {vc=0 %Variable counter
}).
%% from_expr(AST) -> Sexpr.
%% from_expr(AST, Variables) -> {Sexpr,Variables}.
%% from_body([AST]) -> Sexpr.
%% from_body([AST], Variables) -> {Sexpr,Variables}.
%% Translate a vanilla Erlang expression into LFE. The main
%% difficulty is in the handling of variables. The implicit matching
%% of known variables in vanilla must be translated into explicit
%% equality tests in guards (which is what the compiler does
%% internally). For this we need to keep track of visible variables
%% and detect when they reused in patterns.
from_expr(E) ->
{S,_,_} = from_expr(E, ordsets:new(), #from{}),
S.
from_expr(E, Vs0) ->
Vt0 = ordsets:from_list(Vs0), %We are clean
{S,Vt1,_} = from_expr(E, Vt0, #from{}),
{S,ordsets:to_list(Vt1)}.
from_body(Es) ->
{Les,_,_} = from_body(Es, ordsets:new(), #from{}),
[progn|Les].
from_body(Es, Vs0) ->
Vt0 = ordsets:from_list(Vs0), %We are clean
{Les,Vt1,_} = from_body(Es, Vt0, #from{}),
{[progn|Les],ordsets:to_list(Vt1)}.
%% from_expr(AST, VarTable, State) -> {Sexpr,VarTable,State}.
from_expr({var,_,V}, Vt, St) -> {V,Vt,St}; %Unquoted atom
from_expr({nil,_}, Vt, St) -> {[],Vt,St};
from_expr({integer,_,I}, Vt, St) -> {I,Vt,St};
from_expr({float,_,F}, Vt, St) -> {F,Vt,St};
from_expr({atom,_,A}, Vt, St) -> {?Q(A),Vt,St}; %Quoted atom
from_expr({string,_,S}, Vt, St) -> {?Q(S),Vt,St}; %Quoted string
from_expr({cons,_,H,T}, Vt0, St0) ->
{Car,Vt1,St1} = from_expr(H, Vt0, St0),
{Cdr,Vt2,St2} = from_expr(T, Vt1, St1),
{from_cons(Car, Cdr),Vt2,St2};
%% {[cons,Car,Cdr],Vt2,St2};
from_expr({tuple,_,Es}, Vt0, St0) ->
{Ss,Vt1,St1} = from_expr_list(Es, Vt0, St0),
{[tuple|Ss],Vt1,St1};
from_expr({bin,_,Segs}, Vt0, St0) ->
{Ss,Vt1,St1} = from_bitsegs(Segs, Vt0, St0),
{[binary|Ss],Vt1,St1};
from_expr({map,_,Assocs}, Vt0, St0) -> %Build a map
{Ps,Vt1,St1} = from_map_assocs(Assocs, Vt0, St0),
{[map|Ps],Vt1,St1};
from_expr({map,_,Map,Assocs}, Vt0, St0) -> %Update a map
{Lm,Vt1,St1} = from_expr(Map, Vt0, St0),
from_map_update(Assocs, nul, Lm, Vt1, St1);
%% Record special forms, though some are function calls in Erlang.
from_expr({record,_,Name,Fs}, Vt0, St0) ->
{Lfs,Vt1,St1} = from_record_fields(Fs, Vt0, St0),
{['record',Name|Lfs],Vt1,St1};
from_expr({call,_,{atom,_,is_record},[E,{atom,_,Name}]}, Vt0, St0) ->
{Le,Vt1,St1} = from_expr(E, Vt0, St0),
{['is-record',Le,Name],Vt1,St1};
from_expr({record_index,_,Name,{atom,_,F}}, Vt, St) -> %We KNOW!
{['record-index',Name,F],Vt,St};
from_expr({record_field,_,E,Name,{atom,_,F}}, Vt0, St0) -> %We KNOW!
{Le,Vt1,St1} = from_expr(E, Vt0, St0),
{['record-field',Le,Name,F],Vt1,St1};
from_expr({record,_,E,Name,Fs}, Vt0, St0) ->
{Le,Vt1,St1} = from_expr(E, Vt0, St0),
{Lfs,Vt2,St2} = from_record_fields(Fs, Vt1, St1),
{['record-update',Le,Name|Lfs],Vt2,St2};
from_expr({record_field,_,_,_}=M, Vt, St) -> %Pre R16 packages
from_package_module(M, Vt, St);
%% Function special forms.
from_expr({'fun',_,{clauses,Cls}}, Vt, St0) ->
{Lcls,St1} = from_fun_cls(Cls, Vt, St0),
{['match-lambda'|Lcls],Vt,St1}; %Don't bother using lambda
from_expr({'fun',_,{function,F,A}}, Vt, St) ->
%% These are just literal values.
{[function,F,A],Vt,St};
from_expr({'fun',_,{function,M,F,A}}, Vt, St) ->
%% These are abstract values.
{[function,from_lit(M),from_lit(F),from_lit(A)],Vt,St};
%% Core control special forms.
from_expr({match,_,_,_}=Match, Vt, St) ->
from_match(Match, Vt, St);
from_expr({block,_,Es}, Vt, St) ->
from_block(Es, Vt, St);
from_expr({'if',_,Cls}, Vt0, St0) -> %This is the Erlang if
{Lcls,Vt1,St1} = from_icrt_cls(Cls, Vt0, St0),
{['case',[]|Lcls],Vt1,St1};
from_expr({'case',_,E,Cls}, Vt0, St0) ->
{Le,Vt1,St1} = from_expr(E, Vt0, St0),
{Lcls,Vt2,St2} = from_icrt_cls(Cls, Vt1, St1),
{['case',Le|Lcls],Vt2,St2};
from_expr({'receive',_,Cls}, Vt0, St0) ->
{Lcls,Vt1,St1} = from_icrt_cls(Cls, Vt0, St0),
{['receive'|Lcls],Vt1,St1};
from_expr({'receive',_,Cls,Timeout,Body}, Vt0, St0) ->
{Lcls,Vt1,St1} = from_icrt_cls(Cls, Vt0, St0),
{Lt,Vt2,St2} = from_expr(Timeout, Vt1, St1),
{Lb,Vt3,St3} = from_body(Body, Vt2, St2),
{['receive'|Lcls ++ [['after',Lt|Lb]]],Vt3,St3};
from_expr({'catch',_,E}, Vt0, St0) ->
{Le,Vt1,St1} = from_expr(E, Vt0, St0),
{['catch',Le],Vt1,St1};
from_expr({'try',_,Es,Scs,Ccs,As}, Vt, St) ->
from_try(Es, Scs, Ccs, As, Vt, St);
%% List/binary comprensions.
from_expr({lc,_,E,Qs}, Vt, St) ->
from_listcomp(E, Qs, Vt, St);
from_expr({bc,_,Seg,Qs}, Vt, St) ->
from_binarycomp(Seg, Qs, Vt, St);
%% Function calls.
from_expr({call,_,{remote,_,M,F},As}, Vt0, St0) -> %Remote function call
{Lm,Vt1,St1} = from_expr(M, Vt0, St0),
{Lf,Vt2,St2} = from_expr(F, Vt1, St1),
{Las,Vt3,St3} = from_expr_list(As, Vt2, St2),
{[call,Lm,Lf|Las],Vt3,St3};
from_expr({call,_,{atom,_,F},As}, Vt0, St0) -> %Local function call
{Las,Vt1,St1} = from_expr_list(As, Vt0, St0),
{[F|Las],Vt1,St1};
from_expr({call,_,F,As}, Vt0, St0) -> %F not an atom or remote
{Lf,Vt1,St1} = from_expr(F, Vt0, St0),
{Las,Vt2,St2} = from_expr_list(As, Vt1, St1),
{[funcall,Lf|Las],Vt2,St2};
from_expr({op,_,Op,A}, Vt0, St0) ->
{La,Vt1,St1} = from_expr(A, Vt0, St0),
{[Op,La],Vt1,St1};
from_expr({op,_,Op,L,R}, Vt0, St0) ->
{Ll,Vt1,St1} = from_expr(L, Vt0, St0),
{Lr,Vt2,St2} = from_expr(R, Vt1, St1),
{[Op,Ll,Lr],Vt2,St2}.
from_cons(Car, [list|Es]) -> [list,Car|Es];
from_cons(Car, []) -> [list,Car];
from_cons(Car, Cdr) -> [cons,Car,Cdr].
%% from_body(Expressions, VarTable, State) -> {Body,VarTable,State}.
%% Handle '=' specially here and translate into let containing rest
%% of body.
from_body([{match,_,_,_}=Match], Vt0,St0) -> %Last match
{Lm,Vt1,St1} = from_expr(Match, Vt0, St0), %Must return pattern as value
{[Lm],Vt1,St1};
from_body([{match,_,P,E}|Es], Vt0, St0) ->
{Lp,Eqt,Vt1,St1} = from_pat(P, Vt0, St0),
{Le,Vt2,St2} = from_expr(E, Vt1, St1),
{Les,Vt3,St4} = from_body(Es, Vt2, St2),
Leg = from_eq_tests(Eqt), %Implicit guard tests
Lbody = from_add_guard(Leg, [Le]),
{[['let',[[Lp|Lbody]]|Les]],Vt3,St4};
from_body([E|Es], Vt0, St0) ->
{Le,Vt1,St1} = from_expr(E, Vt0, St0),
{Les,Vt2,St2} = from_body(Es, Vt1, St1),
{[Le|Les],Vt2,St2};
from_body([], Vt, St) -> {[],Vt,St}.
from_expr_list(Es, Vt, St) -> mapfoldl2(fun from_expr/3, Vt, St, Es).
%% from_block(Body, VarTable, State) -> {Block,State}.
from_block(Es, Vt0, St0) ->
case from_body(Es, Vt0, St0) of
{[Le],Vt1,St1} -> {Le,Vt1,St1};
{Les,Vt1,St1} -> {[progn|Les],Vt1,St1}
end.
%% from_add_guard(GuardTests, Body) -> Body.
%% Only prefix with a guard when there are tests.
from_add_guard([], Body) -> Body; %No guard tests
from_add_guard(Gts, Body) ->
[['when'|Gts]|Body].
%% from_match(Match, VarTable, State) -> {LetForm,State}.
%% Match returns the value of the expression. Use a let to do
%% matching with an alias which we return for value.
from_match({match,L,P,E}, Vt0, St0) ->
{Alias,St1} = new_from_var(St0), %Alias variable value
MP = {match,L,{var,L,Alias},P},
{Lp,Eqt,Vt1,St2} = from_pat(MP, Vt0, St1), %The alias pattern
{Le,Vt2,St3} = from_expr(E, Vt1, St2), %The expression
Leg = from_eq_tests(Eqt), %Implicit guard tests
Lbody = from_add_guard(Leg, [Le]), %Now build the whole body
{['let',[[Lp|Lbody]],Alias],Vt2,St3}.
%% from_bitsegs(Segs, VarTable, State) -> {Segs,VarTable,State}.
from_bitsegs([{bin_element,_,Seg,Size,Type}|Segs], Vt0, St0) ->
{S,Vt1,St1} = from_bitseg(Seg, Size, Type, Vt0, St0),
{Ss,Vt2,St2} = from_bitsegs(Segs, Vt1, St1),
{[S|Ss],Vt2,St2};
from_bitsegs([], Vt, St) -> {[],Vt,St}.
from_bitseg({bin_element,_,Seg,Size,Type}, Vt, St) ->
from_bitseg(Seg, Size, Type, Vt, St).
%% So it won't get confused with strings.
from_bitseg({integer,_,I}, default, default, Vt, St) -> {I,Vt,St};
from_bitseg({integer,_,I}, Size, Type, Vt0, St0) ->
{Lsize,Vt1,St1} = from_bitseg_size(Size, Vt0, St0),
{[I|from_bitseg_type(Type) ++ Lsize],Vt1,St1};
from_bitseg({float,_,F}, Size, Type, Vt0, St0) ->
{Lsize,Vt1,St1} = from_bitseg_size(Size, Vt0, St0),
{[F|from_bitseg_type(Type) ++ Lsize],Vt1,St1};
from_bitseg({string,_,S}, Size, Type, Vt0, St0) ->
{Lsize,Vt1,St1} = from_bitseg_size(Size, Vt0, St0),
{[S|from_bitseg_type(Type) ++ Lsize],Vt1,St1};
from_bitseg(E, Size, Type, Vt0, St0) ->
{Le,Vt1,St1} = from_expr(E, Vt0, St0),
{Lsize,Vt2,St2} = from_bitseg_size(Size, Vt1, St1),
{[Le|from_bitseg_type(Type) ++ Lsize],Vt2,St2}.
from_bitseg_size(default, Vt, St) -> {[],Vt,St};
from_bitseg_size(Size, Vt0, St0) ->
{Ssize,Vt1,St1} = from_expr(Size, Vt0, St0),
{[[size,Ssize]],Vt1,St1}.
from_bitseg_type(default) -> [];
from_bitseg_type(Ts) ->
lists:map(fun ({unit,U}) -> [unit,U]; (T) -> T end, Ts).
%% from_map_assocs(MapAssocs, VarTable, State) -> {Pairs,VarTable,State}.
from_map_assocs([{_,_,Key,Val}|As], Vt0, St0) ->
{Lk,Vt1,St1} = from_expr(Key, Vt0, St0),
{Lv,Vt2,St2} = from_expr(Val, Vt1, St1),
{Las,Vt3,St3} = from_map_assocs(As, Vt2, St2),
{[Lk,Lv|Las],Vt3,St3};
from_map_assocs([], Vt, St) -> {[],Vt,St}.
%% from_map_update(MapAssocs, CurrAssoc, CurrMap, VarTable, State) ->
%% {Map,VarTable,State}.
%% We need to be a bit cunning here and do everything left-to-right
%% and minimize nested calls.
from_map_update([{Assoc,_,Key,Val}|As], Curr, Map0, Vt0, St0) ->
{Lk,Vt1,St1} = from_expr(Key, Vt0, St0),
{Lv,Vt2,St2} = from_expr(Val, Vt1, St1),
%% Check if can continue this mapping or need to start a new one.
Map1 = if Assoc =:= Curr -> Map0 ++ [Lk,Lv];
Assoc =:= map_field_assoc -> ['map-set',Map0,Lk,Lv];
Assoc =:= map_field_exact -> ['map-update',Map0,Lk,Lv]
end,
from_map_update(As, Assoc, Map1, Vt2, St2);
%% from_map_update([{Assoc,_,Key,Val}|Fs], Assoc, Map0, Vt0, St0) ->
%% {Lk,Vt1,St1} = from_expr(Key, Vt0, St0),
%% {Lv,Vt2,St2} = from_expr(Val, Vt1, St1),
%% from_map_update(Fs, Assoc, Map0 ++ [Lk,Lv], Vt2, St2);
%% from_map_update([{Assoc,_,Key,Val}|Fs], _, Map0, Vt0, St0) ->
%% {Lk,Vt1,St1} = from_expr(Key, Vt0, St0),
%% {Lv,Vt2,St2} = from_expr(Val, Vt1, St1),
%% Op = if Assoc =:= map_field_assoc -> 'map-set';
%% true -> 'map-update'
%% end,
%% from_map_update(Fs, Assoc, [Op,Map0,Lk,Lv], Vt2, St2);
from_map_update([], _, Map, Vt, St) -> {Map,Vt,St}.
%% from_record_fields(Recfields, VarTable, State) -> {Recfields,VarTable,State}.
from_record_fields([{record_field,_,{atom,_,F},V}|Fs], Vt0, St0) ->
{Lv,Vt1,St1} = from_expr(V, Vt0, St0),
{Lfs,Vt2,St2} = from_record_fields(Fs, Vt1, St1),
{[F,Lv|Lfs],Vt2,St2};
from_record_fields([{record_field,_,{var,_,F},V}|Fs], Vt0, St0) ->
%% Special case!!
{Lv,Vt1,St1} = from_expr(V, Vt0, St0),
{Lfs,Vt2,St2} = from_record_fields(Fs, Vt1, St1),
{[F,Lv|Lfs],Vt2,St2};
from_record_fields([], Vt, St) -> {[],Vt,St}.
%% from_icrt_cls(Clauses, VarTable, State) -> {Clauses,VarTable,State}.
%% from_icrt_cl(Clause, VarTable, State) -> {Clause,VarTable,State}.
%% If/case/receive/try clauses.
%% No ; in guards, so no guard sequence only one list of guard tests.
from_icrt_cls(Cls, Vt, St) -> from_cls(fun from_icrt_cl/3, Cls, Vt, St).
from_icrt_cl({clause,_,[],[G],B}, Vt0, St0) -> %If clause
{Lg,Vt1,St1} = from_body(G, Vt0, St0),
{Lb,Vt2,St2} = from_body(B, Vt1, St1),
Lbody = from_add_guard(Lg, Lb),
{['_'|Lbody],Vt2,St2};
from_icrt_cl({clause,_,H,[],B}, Vt0, St0) ->
{[Lh],Eqt,Vt1,St1} = from_pats(H, Vt0, St0), %List of one
{Lb,Vt2,St2} = from_body(B, Vt1, St1),
Leg = from_eq_tests(Eqt),
Lbody = from_add_guard(Leg, Lb),
{[Lh|Lbody],Vt2,St2};
from_icrt_cl({clause,_,H,[G],B}, Vt0, St0) ->
{[Lh],Eqt,Vt1,St1} = from_pats(H, Vt0, St0), %List of one
{Lg,Vt2,St2} = from_body(G, Vt1, St1),
{Lb,Vt3,St3} = from_body(B, Vt2, St2),
Leg = from_eq_tests(Eqt),
Lbody = from_add_guard(Leg ++ Lg, Lb),
{[Lh|Lbody],Vt3,St3}.
%% from_fun_cls(Clauses, VarTable, State) -> {Clauses,State}.
%% from_fun_cl(Clause, VarTable, State) -> {Clause,VarTable,State}.
%% Function clauses, all variables in the patterns are new variables
%% which shadow existing variables without equality tests.
from_fun_cls(Cls, Vt, St0) ->
{Lcls,_,St1} = from_cls(fun from_fun_cl/3, Cls, Vt, St0),
{Lcls,St1}.
from_fun_cl({clause,_,H,[],B}, Vt0, St0) ->
{Lh,Eqt,Vtp,St1} = from_pats(H, [], St0),
Vt1 = ordsets:union(Vtp, Vt0), %All variables so far
{Lb,Vt2,St2} = from_body(B, Vt1, St1),
Leg = from_eq_tests(Eqt),
Lbody = from_add_guard(Leg, Lb),
{[Lh|Lbody],Vt2,St2};
from_fun_cl({clause,_,H,[G],B}, Vt0, St0) ->
{Lh,Eqt,Vtp,St1} = from_pats(H, [], St0),
Vt1 = ordsets:union(Vtp, Vt0), %All variables so far
{Lg,Vt2,St2} = from_body(G, Vt1, St1),
{Lb,Vt3,St3} = from_body(B, Vt2, St2),
Leg = from_eq_tests(Eqt),
Lbody = from_add_guard(Leg ++ Lg, Lb),
{[Lh|Lbody],Vt3,St3}.
%% from_cls(ClauseFun, Clauses, VarTable, State) -> {Clauses,VarTable,State}.
%% Translate the clauses but only export variables that are defined
%% in all clauses, the intersection of the variables.
from_cls(Fun, [C], Vt0, St0) ->
{Lc,Vt1,St1} = Fun(C, Vt0, St0),
{[Lc],Vt1,St1};
from_cls(Fun, [C|Cs], Vt0, St0) ->
{Lc,Vtc,St1} = Fun(C, Vt0, St0),
{Lcs,Vtcs,St2} = from_cls(Fun, Cs, Vt0, St1),
{[Lc|Lcs],ordsets:intersection(Vtc, Vtcs),St2}.
from_eq_tests(Gs) -> [ ['=:=',V,V1] || {V,V1} <- Gs ].
%% from_try(Exprs, CaseClauses, CatchClauses, After, VarTable, State) ->
%% {Try,State}.
%% Only return the parts which have contents.
from_try(Es, Scs, Ccs, As, Vt, St0) ->
%% Try does not allow any exports!
{Les,_,St1} = from_body(Es, Vt, St0),
%% These maybe empty.
{Lscs,_,St2} = if Scs =:= [] -> {[],[],St1};
true -> from_icrt_cls(Scs, Vt, St1)
end,
{Lccs,_,St3} = if Ccs =:= [] -> {[],[],St2};
true -> from_icrt_cls(Ccs, Vt, St2)
end,
{Las,_,St4} = from_body(As, Vt, St3),
{['try',[progn|Les]|
from_maybe('case', Lscs) ++
from_maybe('catch', Lccs) ++
from_maybe('after', Las)],Vt,St4}.
from_maybe(_, []) -> [];
from_maybe(Tag, Es) -> [[Tag|Es]].
%% from_listcomp(Expr, Qualifiers, VarTable, State) -> {Listcomp,State}.
from_listcomp(E, Qs, Vt0, St0) ->
{Lqs,Vt1,St1} = from_comp_quals(Qs, Vt0, St0),
{Le,Vt2,St2} = from_expr(E, Vt1, St1),
{['list-comp',Lqs,Le],Vt2,St2}.
%% from_binarycomp(Seg, Qualifiers, VarTable, State) -> {Listcomp,State}.
from_binarycomp({bin,_,[Seg]}, Qs, Vt0, St0) ->
{Lqs,Vt1,St1} = from_comp_quals(Qs, Vt0, St0),
{Lseg,Vt2,St2} = from_bitseg(Seg, Vt1, St1),
{['binary-comp',Lqs,Lseg],Vt2,St2}.
%% from_lc_quals(Qualifiers, VarTable, State) -> {Qualifiers,VarTable,State}.
from_comp_quals([{generate,_,P,E}|Qs], Vt0, St0) ->
{Lp,Eqt,Vt1,St1} = from_pat(P, Vt0, St0),
{Le,Vt2,St2} = from_expr(E, Vt1, St1),
{Lqs,Vt3,St3} = from_comp_quals(Qs, Vt2, St2),
Leg = from_eq_tests(Eqt),
Lbody = from_add_guard(Leg, Le),
{[['<-',Lp|Lbody]|Lqs],Vt3,St3};
%% from_comp_quals([{b_generate,_,P,E}|Qs], Vt0, St0) ->
%% {Gen,Vt1,St1} = from_comp_binarygen(P, E, Vt0, St0),
%% {Lqs,Vt2,St2} = from_comp_quals(Qs, Vt1, St1),
%% {[Gen|Lqs],Vt2,St2};
from_comp_quals([T|Qs], Vt0, St0) ->
{Lt,Vt1,St1} = from_expr(T, Vt0, St0),
{Lqs,Vt2,St2} = from_comp_quals(Qs, Vt1, St1),
{[Lt|Lqs],Vt2,St2};
from_comp_quals([], Vt, St) -> {[],Vt,St}.
%% from_comp_listgen(Pat, Exp, Vt0, St0) ->
%% {Le,Vt1,St1} = from_expr(Exp, Vt0, St0),
%% {Lp,Vt2,St2} = from_expr(Pat, Vt1, St1),
%% {['<-',Lp,Le],Vt2,St2}.
%% from_comp_binarygen(Pat, Exp, Vt0, St0) ->
%% {Le,Vt1,St1} = from_expr(Exp, Vt0, St0),
%% {Lp,Vt2,St2} = from_expr(Pat, Vt1, St1),
%% [binary,Lseg] = Lp,
%% {['<=',Lseg,Le],Vt2,St2}.
%% from_package_module(Module, VarTable, State) -> {Module,VarTable,State}.
%% We must handle the special case where in pre-R16 you could have
%% packages with a dotted module path. It used a special record_field
%% tuple. This does not work in R16 and later!
from_package_module({record_field,_,_,_}=M, Vt, St) ->
Segs = erl_parse:package_segments(M),
A = list_to_atom(packages:concat(Segs)),
{?Q(A),Vt,St}.
%% new_from_var(State) -> {VarName,State}.
new_from_var(#from{vc=C}=St) ->
V = list_to_atom(lists:concat(['-var-',C,'-'])),
{V,St#from{vc=C+1}}.
%% from_pat(Pattern, VarTable, State) ->
%% {Pattern,EqualVar,VarTable,State}.
from_pat({var,_,_}=V, Vt, St) ->
from_pat_var(V, Vt, St);
from_pat({nil,_}, Vt, St) -> {[],[],Vt,St};
from_pat({integer,_,I}, Vt, St) -> {I,[],Vt,St};
from_pat({float,_,F}, Vt, St) -> {F,[],Vt,St};
from_pat({atom,_,A}, Vt, St) -> {?Q(A),[],Vt,St}; %Quoted atom
from_pat({string,_,S}, Vt, St) -> {?Q(S),[],Vt,St}; %Quoted string
from_pat({cons,_,H,T}, Vt0, St0) ->
{Car,Eqt1,Vt1,St1} = from_pat(H, Vt0, St0),
{Cdr,Eqt2,Vt2,St2} = from_pat(T, Vt1, St1),
{from_cons(Car, Cdr),Eqt1++Eqt2,Vt2,St2};
from_pat({tuple,_,Es}, Vt0, St0) ->
{Ss,Eqt,Vt1,St1} = from_pats(Es, Vt0, St0),
{[tuple|Ss],Eqt,Vt1,St1};
from_pat({bin,_,Segs}, Vt0, St0) ->
{Ss,Eqt,Vt1,St1} = from_pat_bitsegs(Segs, Vt0, St0),
{[binary|Ss],Eqt,Vt1,St1};
from_pat({map,_,Assocs}, Vt0, St0) ->
{Ps,Eqt,Vt1,St1} = from_pat_map_assocs(Assocs, Vt0, St0),
{[map|Ps],Eqt,Vt1,St1};
from_pat({record,_,Name,Fs}, Vt0, St0) -> %Match a record
{Sfs,Eqt,Vt1,St1} = from_pat_rec_fields(Fs, Vt0, St0),
{['record',Name|Sfs],Eqt,Vt1,St1};
from_pat({record_index,_,Name,{atom,_,F}}, Vt, St) -> %We KNOW!
{['record-index',Name,F],Vt,St};
from_pat({match,_,P1,P2}, Vt0, St0) -> %Pattern aliases
{Lp1,Eqt1,Vt1,St1} = from_pat(P1, Vt0, St0),
{Lp2,Eqt2,Vt2,St2} = from_pat(P2, Vt1, St1),
{['=',Lp1,Lp2],Eqt1++Eqt2,Vt2,St2};
%% Basically illegal syntax which maybe generated by internal tools.
from_pat({call,_,{atom,_,F},As}, Vt0, St0) ->
%% This will never occur in real code but for macro expansions.
{Las,Eqt,Vt1,St1} = from_pats(As, Vt0, St0),
{[F|Las],Eqt,Vt1,St1}.
from_pats([P|Ps], Vt0, St0) ->
{Lp,Eqt,Vt1,St1} = from_pat(P, Vt0, St0),
{Lps,Eqts,Vt2,St2} = from_pats(Ps, Vt1, St1),
{[Lp|Lps],Eqt++Eqts,Vt2,St2};
from_pats([], Vt, St) -> {[],[],Vt,St}.
from_pat_var({var,_,'_'}, Vt, St) -> %Don't need to handle _
{'_',[],Vt,St};
from_pat_var({var,_,V}, Vt, St0) ->
case ordsets:is_element(V, Vt) of %Is variable bound?
true ->
{V1,St1} = new_from_var(St0), %New var for pattern
{V1,[{V,V1}],Vt,St1}; %Add to guard tests
false ->
{V,[],ordsets:add_element(V, Vt),St0}
end.
%% from_pat_bitsegs(Segs, VarTable, State) -> {Segs,EqTable,VarTable,State}.
from_pat_bitsegs([Seg|Segs], Vt0, St0) ->
{S,Eqt,Vt1,St1} = from_pat_bitseg(Seg, Vt0, St0),
{Ss,Eqts,Vt2,St2} = from_pat_bitsegs(Segs, Vt1, St1),
{[S|Ss],Eqt++Eqts,Vt2,St2};
from_pat_bitsegs([], Vt, St) -> {[],[],Vt,St}.
from_pat_bitseg({bin_element,_,Seg,Size,Type}, Vt, St) ->
from_pat_bitseg(Seg, Size, Type, Vt, St).
from_pat_bitseg({string,_,S}, Size, Type, Vt0, St0) ->
{Lsize,Vt1,St1} = from_pat_bitseg_size(Size, Vt0, St0),
{[S|from_bitseg_type(Type) ++ Lsize],[],Vt1,St1};
from_pat_bitseg(P, Size, Type, Vt0, St0) ->
{Lp,Eqt,Vt1,St1} = from_pat(P, Vt0, St0),
{Lsize,Vt2,St2} = from_pat_bitseg_size(Size, Vt1, St1),
{[Lp|from_bitseg_type(Type) ++ Lsize],Eqt,Vt2,St2}.
from_pat_bitseg_size(default, Vt, St) -> {[],Vt,St};
from_pat_bitseg_size({var,_,V}, Vt, St) -> %Size vars never match
{[[size,V]],Vt,St};
from_pat_bitseg_size(Size, Vt0, St0) ->
{Ssize,_,Vt1,St1} = from_pat(Size, Vt0, St0),
{[[size,Ssize]],Vt1,St1}.
%% from_pat_map_assocs(Fields, VarTable, State) ->
%% {Fields,EqTable,VarTable,State}.
from_pat_map_assocs([{map_field_exact,_,Key,Val}|As], Vt0, St0) ->
{Lk,Eqt1,Vt1,St1} = from_pat(Key, Vt0, St0),
{Lv,Eqt2,Vt2,St2} = from_pat(Val, Vt1, St1),
{Lfs,Eqt3,Vt3,St3} = from_pat_map_assocs(As, Vt2, St2),
{[Lk,Lv|Lfs],Eqt1 ++ Eqt2 ++ Eqt3,Vt3,St3};
from_pat_map_assocs([], Vt, St) -> {[],[],Vt,St}.
%% from_pat_rec_fields(Recfields, VarTable, State) ->
%% {Recfields,EqTable,VarTable,State}.
from_pat_rec_fields([{record_field,_,{atom,_,F},P}|Fs], Vt0, St0) ->
{Lp,Eqt,Vt1,St1} = from_pat(P, Vt0, St0),
{Lfs,Eqts,Vt2,St2} = from_pat_rec_fields(Fs, Vt1, St1),
{[F,Lp|Lfs],Eqt++Eqts,Vt2,St2};
from_pat_rec_fields([{record_field,_,{var,_,F},P}|Fs], Vt0, St0) ->
%% Special case!!
{Lp,Eqt,Vt1,St1} = from_pat(P, Vt0, St0),
{Lfs,Eqts,Vt2,St2} = from_pat_rec_fields(Fs, Vt1, St1),
{[F,Lp|Lfs],Eqt++Eqts,Vt2,St2};
from_pat_rec_fields([], Vt, St) -> {[],[],Vt,St}.
%% from_lit(Literal) -> Literal.
%% Build a literal value from AST. No quoting here.
from_lit(Lit) ->
erl_parse:normalise(Lit).
%% Converting LFE to Erlang AST.
%% This is relatively straightforward except for 2 things:
%% - No shadowing of variables so they must be uniquely named.
%% - Local functions must lifted to top level. This is difficult for
%% one expression so this is illegal here as we don't do it.
%%
%% We keep track of all existing variables so when we get a variable
%% in a pattern we can check if this variable has been used before. If
%% so then we must create a new unique variable, add a guard test and
%% add the old-new mapping to the variable table. The existence is
%% global while the mapping is local to that scope. Multiple
%% occurences of variables in an LFE pattern map directly to multiple
%% occurrences in the Erlang AST.
%% Use macros for key-value tables if they exist.
-ifdef(HAS_FULL_KEYS).
-define(NEW_VT, #{}).
-define(VT_GET(K, Vt), maps:get(K, Vt)).
-define(VT_GET(K, Vt, Def), maps:get(K, Vt, Def)).
-define(VT_IS_KEY(K, Vt), maps:is_key(K, Vt)).
-define(VT_PUT(K, V, Vt), Vt#{K => V}).
-else.
-define(NEW_VT, orddict:new()).
-define(VT_GET(K, Vt), orddict:fetch(K, Vt)).
-define(VT_GET(K, Vt, Def),
%% Safe as no new variables created.
case orddict:is_key(K, Vt) of
true -> orddict:fetch(K, Vt);
false -> Def
end).
-define(VT_IS_KEY(K, Vt), orddict:is_key(K, Vt)).
-define(VT_PUT(K, V, Vt), orddict:store(K, V, Vt)).
-endif.
%% safe_fetch(Key, Dict, Default) -> Value.
%% Fetch a value with a default if it doesn't exist.
%% safe_fetch(Key, Dict, Def) ->
%% case orddict:find(Key, Dict) of
%% {ok,Val} -> Val;
%% error -> Def
%% end.
-record(to, {vs=[], %Existing variables
vc=?NEW_VT, %Variable counters
imports=[], %Function renames
aliases=[] %Module aliases
}).
%% to_expr(Expr, LineNumber) -> ErlExpr.
%% to_expr(Expr, LineNumber, {Imports, Aliases}) -> ErlExpr.
%% to_exprs(Exprs, LineNumber) -> ErlExprs.
%% to_exprs(Exprs, LineNumber, {Imports, Aliases}) -> ErlExprs.
to_expr(E, L) ->
to_expr(E, L, {[],[]}).
to_expr(E, L, {Imports,Aliases}) ->
ToSt = #to{imports=Imports,aliases=Aliases},
{Ee,_} = to_expr(E, L, ?NEW_VT, ToSt),
Ee.
to_exprs(Es, L) ->
to_exprs(Es, L, {[],[]}).
to_exprs(Es, L, {Imports,Aliases}) ->
ToSt = #to{imports=Imports,aliases=Aliases},
{Ees,_} = to_exprs(Es, L, ?NEW_VT, ToSt),
Ees.
to_gexpr(E, L) ->
to_gexpr(E, L, {[],[]}).
to_gexpr(E, L, {Imports,Aliases}) ->
ToSt = #to{imports=Imports,aliases=Aliases},
{Ee,_} = to_gexpr(E, L, ?NEW_VT, ToSt),
Ee.
to_gexprs(Es, L) ->
to_gexprs(Es, L, {[],[]}).
to_gexprs(Es, L, {Imports,Aliases}) ->
ToSt = #to{imports=Imports,aliases=Aliases},
{Ees,_} = to_gexprs(Es, L, ?NEW_VT, ToSt),
Ees.
%% to_expr(Expr, LineNumber, VarTable, State) -> {ErlExpr,State}.
%% Core data special forms.
to_expr(?Q(Lit), L, _, St) ->
{to_lit(Lit, L),St};
to_expr([cons,H,T], L, Vt, St0) ->
{Eh,St1} = to_expr(H, L, Vt, St0),
{Et,St2} = to_expr(T, L, Vt, St1),
{{cons,L,Eh,Et},St2};
to_expr([car,E], L, Vt, St0) ->
{Ee,St1} = to_expr(E, L, Vt, St0),
{{call,L,{atom,L,hd},[Ee]},St1};
to_expr([cdr,E], L, Vt, St0) ->
{Ee,St1} = to_expr(E, L, Vt, St0),
{{call,L,{atom,L,tl},[Ee]},St1};
to_expr([list|Es], L, Vt, St) ->
to_list(fun to_expr/4, Es, L, Vt, St);
to_expr(['list*'|Es], L, Vt, St) -> %Macro
to_list_s(fun to_expr/4, Es, L, Vt, St);
to_expr([tuple|Es], L, Vt, St0) ->
{Ees,St1} = to_exprs(Es, L, Vt, St0),
{{tuple,L,Ees},St1};
to_expr([tref,T,I], L, Vt, St0) ->
{Et,St1} = to_expr(T, L, Vt, St0),
{Ei,St2} = to_expr(I, L, Vt, St1),
%% Get the argument order correct.
{{call,L,{atom,L,element},[Ei,Et]},St2};
to_expr([tset,T,I,V], L, Vt, St0) ->
{Et,St1} = to_expr(T, L, Vt, St0),
{Ei,St2} = to_expr(I, L, Vt, St1),
{Ev,St2} = to_expr(V, L, Vt, St2),
%% Get the argument order correct.
{{call,L,{atom,L,setelement},[Ei,Et,Ev]},St2};
to_expr([binary|Segs], L, Vt, St0) ->
{Esegs,St1} = to_expr_bitsegs(Segs, L, Vt, St0),
{{bin,L,Esegs},St1};
to_expr([map|Pairs], L, Vt, St0) ->
{Eps,St1} = to_map_pairs(fun to_expr/4, Pairs, map_field_assoc, L, Vt, St0),
{{map,L,Eps},St1};
to_expr([msiz,Map], L, Vt, St) ->
to_expr([map_size,Map], L, Vt, St);
to_expr([mref,Map,Key], L, Vt, St) ->
to_map_get(Map, Key, L, Vt, St);
to_expr([mset,Map|Pairs], L, Vt, St) ->
to_map_set(Map, Pairs, L, Vt, St);
to_expr([mupd,Map|Pairs], L, Vt, St) ->
to_map_update(Map, Pairs, L, Vt, St);
to_expr([mrem,Map|Keys], L, Vt, St) ->
to_map_remove(Map, Keys, L, Vt, St);
to_expr(['map-size',Map], L, Vt, St) ->
to_expr([map_size,Map], L, Vt, St);
to_expr(['map-get',Map,Key], L, Vt, St) ->
to_map_get(Map, Key, L, Vt, St);
to_expr(['map-set',Map|Pairs], L, Vt, St) ->
to_map_set(Map, Pairs, L, Vt, St);
to_expr(['map-update',Map|Pairs], L, Vt, St) ->
to_map_update(Map, Pairs, L, Vt, St);
to_expr(['map-remove',Map|Keys], L, Vt, St) ->
to_map_remove(Map, Keys, L, Vt, St);
%% Record special forms.
to_expr(['record',Name|Fs], L, Vt, St0) ->
{Efs,St1} = to_record_fields(fun to_expr/4, Fs, L, Vt, St0),
{{record,L,Name,Efs},St1};
%% make-record has been deprecated but we sill accept it for now.
to_expr(['make-record',Name|Fs], L, Vt, St) ->
to_expr(['record',Name|Fs], L, Vt, St);
to_expr(['is-record',E,Name], L, Vt, St0) ->
{Ee,St1} = to_expr(E, L, Vt, St0),
%% This expands to a function call.
{{call,L,{atom,L,is_record},[Ee,{atom,L,Name}]},St1};
to_expr(['record-index',Name,F], L, _, St) ->
{{record_index,L,Name,{atom,L,F}},St};
to_expr(['record-field',E,Name,F], L, Vt, St0) ->
{Ee,St1} = to_expr(E, L, Vt, St0),
{{record_field,L,Ee,Name,{atom,L,F}},St1};
to_expr(['record-update',E,Name|Fs], L, Vt, St0) ->
{Ee,St1} = to_expr(E, L, Vt, St0),
{Efs,St2} = to_record_fields(fun to_expr/4, Fs, L, Vt, St1),
{{record,L,Ee,Name,Efs},St2};
%% Struct special forms.
%% We try and do the same as Elixir 1.13.3 when we can.
to_expr(['struct',Name|Fs], L, Vt, St) ->
%% Need the right format to call the predefined mod:__struct_ function.
%% Call mod:__struct__ at runtime so we know ours is loaded, BAD!
Pairs = to_struct_pairs(Fs),
Make = [call,?Q(Name),?Q('__struct__'),[list|Pairs]],
to_expr(Make, L, Vt, St);
to_expr(['is-struct',E], L, Vt, St) ->
Is = ['case',E,
[[map,?Q('__struct__'),'|-struct-|'],
['when',[is_atom,'|-struct-|']],?Q(true)],
['_',?Q(false)]],
to_expr(Is, L, Vt, St);
to_expr(['is-struct',E,Name], L, Vt, St) ->
Is = ['case',E,
[[map,?Q('__struct__'),?Q(Name)],?Q(true)],
['_',?Q(false)]],
to_expr(Is, L, Vt, St);
to_expr(['struct-field',E,Name,F], L, Vt, St) ->
Field = ['case',E,
[[map,?Q('__struct__'),?Q(Name),?Q(F),'|-field-|'],'|-field-|'],
['|-struct-|',
[call,?Q(erlang),?Q(error),
[tuple,?Q(badstruct),?Q(Name),'|-struct-|']]]],
to_expr(Field, L, Vt, St);
to_expr(['struct-update',E,Name|Fs], L, Vt, St) ->
Update = ['case',E,
[['=',[map,?Q('__struct__'),?Q(Name)],'|-struct-|'],
['map-update','|-struct-|'|to_struct_fields(Fs)]],
['|-struct-|',
[call,?Q(erlang),?Q(error),
[tuple,?Q(badstruct),?Q(Name),'|-struct-|']]]],
to_expr(Update, L, Vt, St);
%% Function forms.
to_expr([function,F,Ar], L, Vt, St) ->
%% Must handle the special cases here.
case lfe_internal:is_erl_bif(F, Ar) of
true -> to_expr([function,erlang,F,Ar], L, Vt, St);
false ->
case lfe_internal:is_lfe_bif(F, Ar) of
true -> to_expr([function,lfe,F,Ar], L, Vt, St);
false -> {{'fun',L,{function,F,Ar}},St}
end
end;
to_expr([function,M,F,Ar], L, _, St) ->
%% Need the abstract values here.
{{'fun',L,{function,to_lit(M, L),to_lit(F, L),to_lit(Ar, L)}},St};
%% Special known data type operations.
to_expr(['andalso'|Es], L, Vt, St) ->
to_lazy_logic(fun to_expr/4, Es, 'andalso', L, Vt, St);
to_expr(['orelse'|Es], L, Vt, St) ->
to_lazy_logic(fun to_expr/4, Es, 'orelse', L, Vt, St);
%% Core closure special forms.
to_expr([lambda,Args|Body], L, Vt, St) ->
to_lambda(Args, Body, L, Vt, St);
to_expr(['match-lambda'|Cls], L, Vt, St) ->
to_match_lambda(Cls, L, Vt, St);
to_expr(['let',Lbs|B], L, Vt, St) ->
to_let(Lbs, B, L, Vt, St);
to_expr(['let-function'|_], L, _, _) -> %Can't do this efficently
illegal_code_error(L, 'let-function');
to_expr(['letrec-function'|_], L, _, _) -> %Can't do this efficently
illegal_code_error(L, 'letrec-function');
%% Core control special forms.
to_expr([progn|B], L, Vt, St) ->
to_block(B, L, Vt, St);
to_expr(['if'|Body], L, Vt, St) ->
to_if(Body, L, Vt, St);
to_expr(['case'|Body], L, Vt, St) ->
to_case(Body, L, Vt, St);
to_expr(['receive'|Cls], L, Vt, St) ->
to_receive(Cls, L, Vt, St);
to_expr(['catch'|B], L, Vt, St0) ->
{Eb,St1} = to_block(B, L, Vt, St0),
{{'catch',L,Eb},St1};
to_expr(['try'|Try], L, Vt, St) -> %Can't do this yet
%% lfe_io:format("try ~w\n~p\n", [L,['try'|Try]]),
to_try(Try, L, Vt, St);
to_expr([funcall,F|As], L, Vt, St0) ->
{Ef,St1} = to_expr(F, L, Vt, St0),
{Eas,St2} = to_exprs(As, L, Vt, St1),
{{call,L,Ef,Eas},St2};
%% List/binary comprehensions.
to_expr([lc,Qs,E], L, Vt, St) ->
to_listcomp(Qs, E, L, Vt, St);
to_expr(['list-comp',Qs,E], L, Vt, St) ->
to_listcomp(Qs, E, L, Vt, St);
to_expr([bc,Qs,BS], L, Vt, St) ->
to_binarycomp(Qs, BS, L, Vt, St);
to_expr(['binary-comp',Qs,BS], L, Vt, St) ->
to_binarycomp(Qs, BS, L, Vt, St);
%% General function calls.
to_expr([call,?Q(erlang),?Q(F)|As], L, Vt, St0) ->
%% This is semantically the same but some tools behave differently
%% (qlc_pt).
{Eas,St1} = to_exprs(As, L, Vt, St0),
case is_erl_op(F, length(As)) of
true -> {list_to_tuple([op,L,F|Eas]),St1};
false ->
to_remote_call({atom,L,erlang}, {atom,L,F}, Eas, L, St1)
end;
to_expr([call,?Q(M0),F|As], L, Vt, St0) ->
%% Alias modules are literals.
Mod = case orddict:find(M0, St0#to.aliases) of
{ok,M1} -> M1;
error -> M0
end,
{Ef,St1} = to_expr(F, L, Vt, St0),
{Eas,St2} = to_exprs(As, L, Vt, St1),
to_remote_call({atom,L,Mod}, Ef, Eas, L, St2);
to_expr([call,M,F|As], L, Vt, St0) ->
{Em,St1} = to_expr(M, L, Vt, St0),
{Ef,St2} = to_expr(F, L, Vt, St1),
{Eas,St3} = to_exprs(As, L, Vt, St2),
to_remote_call(Em, Ef, Eas, L, St3);
%% General function call.
to_expr([F|As], L, Vt, St0) when is_atom(F) ->
{Eas,St1} = to_exprs(As, L, Vt, St0),
Ar = length(As), %Arity
%% Check for import.
case orddict:find({F,Ar}, St1#to.imports) of
{ok,{Mod,R}} -> %Imported
to_remote_call({atom,L,Mod}, {atom,L,R}, Eas, L, St1);
error -> %Not imported
case is_erl_op(F, Ar) of
true -> {list_to_tuple([op,L,F|Eas]),St1};
false ->
case lfe_internal:is_lfe_bif(F, Ar) of
true ->
to_remote_call({atom,L,lfe}, {atom,L,F}, Eas, L, St1);
false ->
{{call,L,{atom,L,F},Eas},St1}
end
end
end;
to_expr([_|_]=List, L, _, St) ->
case lfe_lib:is_posint_list(List) of
true -> {{string,L,List},St};
false ->
illegal_code_error(L, list) %Not right!
end;
to_expr(V, L, Vt, St) when is_atom(V) -> %Unquoted atom
to_expr_var(V, L, Vt, St);
to_expr(Lit, L, _, St) -> %Everything else is a literal
{to_lit(Lit, L),St}.
to_exprs(Es, L, Vt, St) ->
Fun = fun (E, St0) -> to_expr(E, L, Vt, St0) end,
lists:mapfoldl(Fun, St, Es).
to_expr_var(V, L, Vt, St) ->
Var = ?VT_GET(V, Vt, V), %Hmm
{{var,L,Var},St}.
%% to_list(ExprFun, Elements, LineNumber, VarTable, State) -> {ListExpr, State}.
to_list(Expr, Es, L, Vt, St) ->
Cons = fun (E, {Tail,St0}) ->
{Ee,St1} = Expr(E, L, Vt, St0),
{{cons,L,Ee,Tail},St1}
end,
lists:foldr(Cons, {{nil,L},St}, Es).
%% to_list_s(ExprFun, Elements, LineNumber, VarTable, State) ->
%% {ListExpr, State}.
%% A list* macro expression that probably should have been expanded.
to_list_s(Expr, [E], L, Vt, St) -> Expr(E, L, Vt, St);
to_list_s(Expr, [E|Es], L, Vt, St0) ->
{Le,St1} = Expr(E, L, Vt, St0),
{Les,St2} = to_list_s(Expr, Es, L, Vt, St1),
{{cons,L,Le,Les},St2};
to_list_s(_Expr, _Es, L, _Vt, St) -> {{nil,L},St}.
%% to_remote_call(Module, Function, Args, LineNumber, VarTable, State) ->
%% {Call,State}.
to_remote_call(M, F, As, L, St) ->
{{call,L,{remote,L,M,F},As},St}.
%% is_erl_op(Op, Arity) -> bool().
%% Is Op/Arity one of the known Erlang operators?
is_erl_op(Op, Ar) ->
erl_internal:arith_op(Op, Ar)
orelse erl_internal:bool_op(Op, Ar)
orelse erl_internal:comp_op(Op, Ar)
orelse erl_internal:list_op(Op, Ar)
orelse erl_internal:send_op(Op, Ar).
to_body(Es, L, Vt, St) ->
Fun = fun (E, St0) -> to_expr(E, L, Vt, St0) end,
lists:mapfoldl(Fun, St, Es).
%% to_expr_bitsegs(Segs, LineNumber, VarTable, State) -> {Segs,State}.
%% We don't do any real checking here but just assume that everything
%% is correct and in worst case pass the buck to the Erlang compiler.
to_expr_bitsegs(Segs, L, Vt, St) ->
BitSeg = fun (Seg, St0) -> to_bitseg(fun to_expr/4, Seg, L, Vt, St0) end,
lists:mapfoldl(BitSeg, St, Segs).
%% to_bitseg(ExprFun, Seg, LineNumber, VarTable, State) -> {Seg,State}.
%% We must specially handle the case where the segment is a string.
to_bitseg(Expr, [Val|Specs]=Seg, L, Vt, St) ->
%% io:format("tbs ~p ~p\n ~p\n", [Seg,Vt,St]),
case lfe_lib:is_posint_list(Seg) of
true ->
{{bin_element,L,{string,L,Seg},default,default},St};
false ->
to_bin_element(Expr, Val, Specs, L, Vt, St)
end;
to_bitseg(Expr, Val, L, Vt, St) ->
to_bin_element(Expr, Val, [], L, Vt, St).
to_bin_element(Expr, Val, Specs, L, Vt, St0) ->
{Eval,St1} = Expr(Val, L, Vt, St0),
{Size,Type} = to_bitseg_type(Specs, default, []),
{Esiz,St2} = to_bit_size(Size, L, Vt, St1),
{{bin_element,L,Eval,Esiz,Type},St2}.
to_bitseg_type([[size,Size]|Specs], _, Type) ->
to_bitseg_type(Specs, Size, Type);
to_bitseg_type([[unit,Unit]|Specs], Size, Type) ->
to_bitseg_type(Specs, Size, Type ++ [{unit,Unit}]);
to_bitseg_type([Spec|Specs], Size, Type) ->
to_bitseg_type(Specs, Size, Type ++ [Spec]);
to_bitseg_type([], Size, []) -> {Size,default};
to_bitseg_type([], Size, Type) -> {Size,Type}.
to_bit_size(all, _, _, St) -> {default,St};
to_bit_size(default, _, _, St) -> {default,St};
to_bit_size(undefined, _, _, St) -> {default,St};
to_bit_size(Size, L, Vt, St) -> to_expr(Size, L, Vt, St).
%% to_map_get(Map, Key, L, Vt, State) -> {MapGet, State}.
%% Check if there is a BIF and in that case use it as this will also
%% work in a guard. The linter has checked if map_get is guardable.
to_map_get(Map, Key, L, Vt, St0) ->
{Eas,St1} = to_exprs([Key,Map], L, Vt, St0),
case erlang:function_exported(erlang, map_get, 2) of
true -> {{call,L,{atom,L,map_get},Eas},St1};
false ->
to_remote_call({atom,L,maps}, {atom,L,get}, Eas, L, St1)
end.
%% to_map_set(Map, Pairs, L, Vt, State) -> {MapSet,State}.
%% to_map_update(Map, Pairs, L, Vt, State) -> {MapUpdate,State}.
%% to_map_remove(Map, Keys, L, Vt, State) -> {MapRemove,State}.
to_map_set(Map, Pairs, L, Vt, St0) ->
{Em,St1} = to_expr(Map, L, Vt, St0),
{Eps,St2} = to_map_pairs(fun to_expr/4, Pairs, map_field_assoc, L, Vt, St1),
{{map,L,Em,Eps},St2}.
to_map_update(Map, Pairs, L, Vt, St0) ->
{Em,St1} = to_expr(Map, L, Vt, St0),
{Eps,St2} = to_map_pairs(fun to_expr/4, Pairs, map_field_exact, L, Vt, St1),
{{map,L,Em,Eps},St2}.
to_map_remove(Map, Keys, L, Vt, St0) ->
{Em,St1} = to_expr(Map, L, Vt, St0),
{Eks,St2} = to_exprs(Keys, L, Vt, St1),
Fun = fun (K, {F,St}) ->
to_remote_call({atom,L,maps}, {atom,L,remove}, [K,F], L, St)
end,
lists:foldl(Fun, {Em,St2}, Eks).
%% to_map_pairs(ExprFun, Pairs, FieldType, LineNumber, VarTable, State) ->
%% {Fields,State}.
to_map_pairs(Expr, [K,V|Ps], Field, L, Vt, St0) ->
{Ek,St1} = Expr(K, L, Vt, St0),
{Ev,St2} = Expr(V, L, Vt, St1),
{Eps,St3} = to_map_pairs(Expr, Ps, Field, L, Vt, St2),
{[{Field,L,Ek,Ev}|Eps],St3};
to_map_pairs(_Expr, [], _Field, _L, _Vt, St) -> {[],St}.
%% to_record_fields(ExprFun, Fields, LineNumber, VarTable, State) ->
%% {Fields,State}.
to_record_fields(Expr, ['_',V|Fs], L, Vt, St0) ->
%% Special case!!
{Ev,St1} = Expr(V, L, Vt, St0),
{Efs,St2} = to_record_fields(Expr, Fs, L, Vt, St1),
{[{record_field,L,{var,L,'_'},Ev}|Efs],St2};
to_record_fields(Expr, [F,V|Fs], L, Vt, St0) ->
{Ev,St1} = Expr(V, L, Vt, St0),
{Efs,St2} = to_record_fields(Expr, Fs, L, Vt, St1),
{[{record_field,L,{atom,L,F},Ev}|Efs],St2};
to_record_fields(_Expr, [], _L, _Vt, St) -> {[],St}.
%% to_struct_fields(Fields) -> Fields.
to_struct_fields([F,V|Fs]) ->
[?Q(F),V|to_struct_fields(Fs)];
to_struct_fields([]) -> [].
to_struct_pairs([F,V|Fs]) ->
[[tuple,?Q(F),V]|to_struct_pairs(Fs)];
to_struct_pairs([]) -> [].
%% to_fun_cls(Clauses, LineNumber) -> Clauses.
%% to_fun_cl(Clause, LineNumber) -> Clause.
%% Function clauses.
to_fun_cls(Cls, L, Vt, St) ->
Fun = fun (Cl, St0) -> to_fun_cl(Cl, L, Vt, St0) end,
lists:mapfoldl(Fun, St, Cls).
to_fun_cl([As,['when']|B], L, Vt0, St0) ->
%% Skip empty guards.
{Eas,Vt1,St1} = to_pats(As, L, Vt0, St0),
{Eb,St2} = to_body(B, L, Vt1, St1),
{{clause,L,Eas,[],Eb},St2};
to_fun_cl([As,['when'|G]|B], L, Vt0, St0) ->
{Eas,Vt1,St1} = to_pats(As, L, Vt0, St0),
{Eg,St2} = to_guard(G, L, Vt1, St1),
{Eb,St3} = to_body(B, L, Vt1, St2),
{{clause,L,Eas,[Eg],Eb},St3};
to_fun_cl([As|B], L, Vt0, St0) ->
{Eas,Vt1,St1} = to_pats(As, L, Vt0, St0),
{Eb,St2} = to_body(B, L, Vt1, St1),
{{clause,L,Eas,[],Eb},St2}.
%% to_lazy_logic(ExprFun, Exprs, Type, LineNumber, VarTable, State) ->
%% {Logic,State}.
%% These go pairwise right-to-left.
to_lazy_logic(Expr, [E1,E2], Type, L, Vt, St0) ->
{Ee1,St1} = Expr(E1, L, Vt, St0),
{Ee2,St2} = Expr(E2, L, Vt, St1),
{{op,L,Type,Ee1,Ee2},St2};
to_lazy_logic(Expr, [E1|Es], Type, L, Vt, St0) ->
{Ee1,St1} = Expr(E1, L, Vt, St0),
{Ees,St2} = to_lazy_logic(Expr, Es, Type, L, Vt, St1),
{{op,L,Type,Ee1,Ees},St2}.
%% to_lambda(Args, Body, LineNumber, VarTable, State) -> {Fun,State}.
to_lambda(As, B, L, Vt, St0) ->
{Ecl,St1} = to_fun_cl([As|B], L, Vt, St0),
{{'fun',L,{clauses,[Ecl]}},St1}.
%% to_match_lambda(Clauses, LineNumber, VarTable, State) -> {Fun,State}.
to_match_lambda(Cls, L, Vt, St0) ->
{Ecls,St1} = to_fun_cls(Cls, L, Vt, St0),
{{'fun',L,{clauses,Ecls}},St1}.
%% to_let(VarBindings, Body, LineNumber, VarTable, State) -> {Block,State}.
to_let(Lbs, B, L, Vt0, St0) ->
{Ebs,Vt1,St1} = to_let_bindings(Lbs, L, Vt0, St0),
{Eb,St2} = to_body(B, L, Vt1, St1),
{{block,L,Ebs ++ Eb},St2}.
%% to_let_bindings(Bindings, LineNumber, VarTable, State) ->
%% {Block,VarTable,State}.
%% When we have a guard translate into a case but special case where
%% we have an empty guard as erlang compiler doesn't like this.
to_let_bindings(Lbs, L, Vt, St) ->
Fun = fun ([P,E], Vt0, St0) ->
{Ep,Vt1,St1} = to_pat(P, L, Vt0, St0),
{Ee,St2} = to_expr(E, L, Vt0, St1),
{{match,L,Ep,Ee},Vt1,St2};
([P,['when'],E], Vt0, St0) ->
%% Skip empty guards.
{Ep,Vt1,St1} = to_pat(P, L, Vt0, St0),
{Ee,St2} = to_expr(E, L, Vt0, St1),
{{match,L,Ep,Ee},Vt1,St2};
([P,['when'|G],E], Vt0, St0) ->
{Ep,Vt1,St1} = to_pat(P, L, Vt0, St0),
{Eg,St2} = to_guard(G, L, Vt1, St1),
{Ee,St3} = to_expr(E, L, Vt1, St2),
{{'case',L,Ee,[{clause,L,[Ep],[Eg],[Ep]}]},Vt1,St3}
end,
mapfoldl2(Fun, Vt, St, Lbs).
%% to_block(Expressions, LineNumber, VarTable, State) -> {Block,State}.
%% Specially check for empty block and then just return (), and for
%% block with one expression and then just return that expression.
to_block(Es, L, Vt, St0) ->
case to_exprs(Es, L, Vt, St0) of
{[Ee],St1} -> {Ee,St1}; %No need to wrap
{[],St1} -> {{nil,L},St1}; %Returns ()
{Ees,St1} -> {{block,L,Ees},St1} %Must wrap
end.
%% to_if(IfBody, LineNumber, VarTable, State) -> {ErlCase,State}.
to_if([Test,True], L, Vt, St) ->
to_if(Test, True, ?Q(false), L, Vt, St);
to_if([Test,True,False], L, Vt, St) ->
to_if(Test, True, False, L, Vt, St);
to_if(_, L, _, _) ->
illegal_code_error(L, 'if').
to_if(Test, True, False, L, Vt, St0) ->
{Etest,St1} = to_expr(Test, L, Vt, St0),
{Ecls,St2} = to_icr_cls([[?Q(true),True],[?Q(false),False]], L, Vt, St1),
{{'case',L,Etest,Ecls},St2}.
%% to_case(CaseBody, LineNumber, VarTable, State) -> {ErlCase,State}.
to_case([E|Cls], L, Vt, St0) ->
{Ee,St1} = to_expr(E, L, Vt, St0),
{Ecls,St2} = to_icr_cls(Cls, L, Vt, St1),
{{'case',L,Ee,Ecls},St2};
to_case(_, L, _, _) ->
illegal_code_error(L, 'case').
%% to_receive(RecClauses, LineNumber, VarTable, State) -> {ErlRec,State}.
to_receive(Cls0, L, Vt, St0) ->
%% Get the right receive form depending on whether there is an after.
Split = fun (['after'|_]) -> false;
(_) -> true
end,
{Cls1,A} = lists:splitwith(Split, Cls0),
{Ecls,St1} = to_icr_cls(Cls1, L, Vt, St0),
case A of
[['after',T|B]] ->
{Et,St2} = to_expr(T, L, Vt, St1),
{Eb,St3} = to_body(B, L, Vt, St2),
{{'receive',L,Ecls,Et,Eb},St3};
[] ->
{{'receive',L,Ecls},St1}
end.
%% to_icr_cls(Clauses, LineNumber, VarTable, State) -> {Clauses,State}.
%% to_icr_cl(Clause, LineNumber, VarTable, State) -> {Clause,State}.
%% If/case/receive clauses.
to_icr_cls(Cls, L, Vt, St) ->
Fun = fun (Cl, St0) -> to_icr_cl(Cl, L, Vt, St0) end,
lists:mapfoldl(Fun, St, Cls).
to_icr_cl([P,['when']|B], L, Vt0, St0) ->
%% Skip empty guards.
{Ep,Vt1,St1} = to_pat(P, L, Vt0, St0),
{Eb,St2} = to_body(B, L, Vt1, St1),
{{clause,L,[Ep],[],Eb},St2};
to_icr_cl([P,['when'|G]|B], L, Vt0, St0) ->
{Ep,Vt1,St1} = to_pat(P, L, Vt0, St0),
{Eg,St2} = to_guard(G, L, Vt1, St1),
{Eb,St3} = to_body(B, L, Vt1, St2),
{{clause,L,[Ep],[Eg],Eb},St3};
to_icr_cl([P|B], L, Vt0, St0) ->
{Ep,Vt1,St1} = to_pat(P, L, Vt0, St0),
{Eb,St2} = to_body(B, L, Vt1, St1),
{{clause,L,[Ep],[],Eb},St2}.
%% to_try(Try, LineNumber, VarTable, State) -> {ErlTry,State}.
%% Step down the try body doing each section separately then put them
%% together. We expand _ catch pattern to {_,_,_}. We remove wrapping
%% progn in try expression which is not really necessary.
to_try([E|Try], L, Vt, St0) ->
{Ee,St1} = to_try_expr(E, L, Vt, St0),
{Ecase,Ecatch,Eafter,St2} = to_try_sections(Try, L, Vt, St1, [], [], []),
{{'try',L,Ee,Ecase,Ecatch,Eafter},St2}.
to_try_expr([progn|Exprs], L, Vt, St) ->
to_exprs(Exprs, L, Vt, St);
to_try_expr(Expr, L, Vt, St) ->
to_exprs([Expr], L, Vt, St).
to_try_sections([['case'|Case]|Try], L, Vt, St0, _, Ecatch, Eafter) ->
{Ecase,St1} = to_icr_cls(Case, L, Vt, St0),
to_try_sections(Try, L, Vt, St1, Ecase, Ecatch, Eafter);
to_try_sections([['catch'|Catch]|Try], L, Vt, St0, Ecase, _, Eafter) ->
{Ecatch,St1} = to_try_catch_cls(Catch, L, Vt, St0),
to_try_sections(Try, L, Vt, St1, Ecase, Ecatch, Eafter);
to_try_sections([['after'|After]|Try], L, Vt, St0, Ecase, Ecatch, _) ->
{Eafter,St1} = to_exprs(After, L, Vt, St0),
to_try_sections(Try, L, Vt, St1, Ecase, Ecatch, Eafter);
to_try_sections([], _, _, St, Ecase, Ecatch, Eafter) ->
{Ecase,Ecatch,Eafter,St}.
to_try_catch_cls(Cls, L, Vt, St) ->
Fun = fun (Cl, St0) -> to_try_catch_cl(Cl, L, Vt, St0) end,
lists:mapfoldl(Fun, St, Cls).
to_try_catch_cl(['_'|Body], L, Vt, St) ->
to_try_catch_cl([[tuple,'_','_','_']|Body], L, Vt, St);
to_try_catch_cl(Cl, L, Vt, St) ->
to_icr_cl(Cl, L, Vt, St).
%% to_listcomp(Qualifiers, Expr, LineNumber, VarTable. State) ->
%% {ListComprehension,State}.
to_listcomp(Qs, E, L, Vt0, St0) ->
{Eqs,Vt1,St1} = to_comp_quals(Qs, L, Vt0, St0),
{Ee,St2} = to_expr(E, L, Vt1, St1),
{{lc,L,Ee,Eqs},St2}.
%% to_binarycomp(Qualifiers, BitSeg, LineNumber, VarTable. State) ->
%% {ListComprehension,State}.
%% The expression must be bitsegment as this is what will go into the
%% binary and it must be wrapped as a binary.
to_binarycomp(Qs, Seg, L, Vt0, St0) ->
{Eqs,Vt1,St1} = to_comp_quals(Qs, L, Vt0, St0),
{Eseg,St2} = to_bitseg(fun to_expr/4, Seg, L, Vt1, St1),
{{bc,L,{bin,L,[Eseg]},Eqs},St2}.
%% to_comp_quals(Qualifiers, LineNumber, VarTable, State) ->
%% {Qualifiers,VarTable,State}.
%% Can't use mapfoldl2 as guard handling modifies Qualifiers.
to_comp_quals([['<-',P,E]|Qs], L, Vt0, St0) ->
{Gen,Vt1,St1} = to_comp_listgen(P, E, L, Vt0, St0),
{Eqs,Vt2,St2} = to_comp_quals(Qs, L, Vt1, St1),
{[Gen|Eqs],Vt2,St2};
to_comp_quals([['<-',P,['when'|G],E]|Qs], L, Vt, St) ->
%% Move guards to qualifiers as tests.
to_comp_quals([['<-',P,E]|G ++ Qs], L, Vt, St);
to_comp_quals([['<=',P,E]|Qs], L, Vt0, St0) ->
{Gen,Vt1,St1} = to_comp_binarygen(P, E, L, Vt0, St0),
{Eqs,Vt2,St2} = to_comp_quals(Qs, L, Vt1, St1),
{[Gen|Eqs],Vt2,St2};
to_comp_quals([['<=',P,['when'|G],E]|Qs], L, Vt, St) ->
%% Move guards to qualifiers as tests.
to_comp_quals([['<=',P,E]|G ++ Qs], L, Vt, St);
to_comp_quals([Test|Qs], L, Vt0, St0) ->
{Etest,St1} = to_expr(Test, L, Vt0, St0),
{Eqs,Vt1,St2} = to_comp_quals(Qs, L, Vt0, St1),
{[Etest|Eqs],Vt1,St2};
to_comp_quals([], _, Vt, St) -> {[],Vt,St}.
%% to_comp_listgen(Pattern, Expression, LineNumber, VarTable, State) ->
%% {Generator,VarTable,State}.
%% to_comp_binarygen(Pattern, BitSeg, LineNumber, VarTable, State) ->
%% {Generator,VarTable,State}.
%% Must be careful in a generator to do the Expression first as the
%% Pattern may update variables in it and changes should only be seen
%% AFTER the generator.
to_comp_listgen(Pat, Exp, L, Vt0, St0) ->
{Eexp,St1} = to_expr(Exp, L, Vt0, St0),
{Epat,Vt1,St2} = to_pat(Pat, L, Vt0, St1),
{{generate,L,Epat,Eexp},Vt1,St2}.
to_comp_binarygen(Pat, Exp, L, Vt0, St0) ->
{Eexp,St1} = to_expr(Exp, L, Vt0, St0),
{Epat,_Pva,Vt1,St2} = to_pat_bitseg(Pat, L, [], Vt0, St1),
%% The pattern is a whole binary.
{{b_generate,L,{bin,L,[Epat]},Eexp},Vt1,St2}.
%% new_to_var(Base, State) -> {VarName, State}.
%% Each base has it's own counter which makes it easier to keep track
%% of a series. We make sure the variable actually is new and update
%% the state.
new_to_var(Base, #to{vs=Vs,vc=Vct}=St) ->
C = ?VT_GET(Base, Vct, 0),
new_to_var_loop(Base, C, Vs, Vct, St).
new_to_var_loop(Base, C, Vs, Vct, St) ->
V = list_to_atom(lists:concat(["-",Base,"-",C,"-"])),
case lists:member(V, Vs) of
true -> new_to_var_loop(Base, C+1, Vs, Vct, St);
false ->
{V,St#to{vs=[V|Vs],vc=?VT_PUT(Base, C+1, Vct)}}
end.
%% to_guard(GuardTests, LineNumber, VarTable, State) -> {ErlGuard,State}.
%% to_guard_test(Test, LineNumber, VarTable, State) -> {ErlGuardTest,State}.
%% Having a top level guard function allows us to optimise at the top
%% guard level
to_guard(Es, L, Vt, St) ->
Fun = fun (E, St0) -> to_guard_test(E, L, Vt, St0) end,
lists:mapfoldl(Fun, St, Es).
to_guard_test(['is-struct',E], L, Vt, St) ->
Is = [is_atom,[mref,E,?Q('__struct__')]],
to_gexpr(Is, L, Vt, St);
to_guard_test(['is-struct',E,Name], L, Vt, St) ->
Is = ['=:=',[mref,E,?Q('__struct__')],?Q(Name)],
to_gexpr(Is, L, Vt, St);
to_guard_test(Test, L, Vt, St) ->
to_gexpr(Test, L, Vt, St).
%% to_gexpr(Expr, LineNumber, VarTable, State) -> {ErlExpr,State}.
to_gexpr(?Q(Lit), L, _, St) ->
{to_lit(Lit, L),St};
to_gexpr([cons,H,T], L, Vt, St0) ->
{Eh,St1} = to_gexpr(H, L, Vt, St0),
{Et,St2} = to_gexpr(T, L, Vt, St1),
{{cons,L,Eh,Et},St2};
to_gexpr([car,E], L, Vt, St0) ->
{Ee,St1} = to_gexpr(E, L, Vt, St0),
{{call,L,{atom,L,hd},[Ee]},St1};
to_gexpr([cdr,E], L, Vt, St0) ->
{Ee,St1} = to_gexpr(E, L, Vt, St0),
{{call,L,{atom,L,tl},[Ee]},St1};
to_gexpr([list|Es], L, Vt, St) ->
to_list(fun to_gexpr/4, Es, L, Vt, St);
to_gexpr(['list*'|Es], L, Vt, St) -> %Macro
to_list_s(fun to_gexpr/4, Es, L, Vt, St);
to_gexpr([tuple|Es], L, Vt, St0) ->
{Ees,St1} = to_gexprs(Es, L, Vt, St0),
{{tuple,L,Ees},St1};
to_gexpr([tref,T,I], L, Vt, St0) ->
{Et,St1} = to_gexpr(T, L, Vt, St0),
{Ei,St2} = to_gexpr(I, L, Vt, St1),
%% Get the argument order correct.
{{call,L,{atom,L,element},[Ei,Et]},St2};
to_gexpr([binary|Segs], L, Vt, St0) ->
{Esegs,St1} = to_gexpr_bitsegs(Segs, L, Vt, St0),
{{bin,L,Esegs},St1};
to_gexpr([map|Pairs], L, Vt, St0) ->
{Eps,St1} = to_map_pairs(fun to_gexpr/4, Pairs, map_field_assoc, L, Vt, St0),
{{map,L,Eps},St1};
to_gexpr([msiz,Map], L, Vt, St) ->
to_gexpr([map_size,Map], L, Vt, St);
to_gexpr([mref,Map,Key], L, Vt, St) ->
to_gmap_get(Map, Key, L, Vt, St);
to_gexpr([mset,Map|Pairs], L, Vt, St) ->
to_gmap_set(Map, Pairs, L, Vt, St);
to_gexpr([mupd,Map|Pairs], L, Vt, St) ->
to_gmap_update(Map, Pairs, L, Vt, St);
to_gexpr(['map-size',Map], L, Vt, St) ->
to_gexpr([map_size,Map], L, Vt, St);
to_gexpr(['map-get',Map,Key], L, Vt, St) ->
to_gmap_get(Map, Key, L, Vt, St);
to_gexpr(['map-set',Map|Pairs], L, Vt, St) ->
to_gmap_set(Map, Pairs, L, Vt, St);
to_gexpr(['map-update',Map|Pairs], L, Vt, St) ->
to_gmap_update(Map, Pairs, L, Vt, St);
%% Record special forms.
to_gexpr(['record',Name|Fs], L, Vt, St0) ->
{Efs,St1} = to_record_fields(fun to_gexpr/4, Fs, L, Vt, St0),
{{record,L,Name,Efs},St1};
to_gexpr(['is-record',E,Name], L, Vt, St0) ->
{Ee,St1} = to_gexpr(E, L, Vt, St0),
%% This expands to a function call.
{{call,L,{atom,L,is_record},[Ee,{atom,L,Name}]},St1};
to_gexpr(['record-index',Name,F], L, _, St) ->
{{record_index,L,Name,{atom,L,F}},St};
to_gexpr(['record-field',E,Name,F], L, Vt, St0) ->
{Ee,St1} = to_gexpr(E, L, Vt, St0),
{{record_field,L,Ee,Name,{atom,L,F}},St1};
%% Struct special forms.
%% We try and do the same as Elixir 1.13.3 when we can.
to_gexpr(['is-struct',E], L, Vt, St) ->
Is = ['andalso',
[is_map,E],
[call,?Q(erlang),?Q(is_map_key),?Q('__struct__'),E],
[is_atom,[mref,E,?Q('__struct__')]]],
to_gexpr(Is, L, Vt, St);
to_gexpr(['is-struct',E,Name], L, Vt, St) ->
Is = ['andalso',
[is_map,E],
[call,?Q(erlang),?Q(is_map_key),?Q('__struct__'),E],
['=:=',[mref,E,?Q('__struct__')],?Q(Name)]],
to_gexpr(Is, L, Vt, St);
to_gexpr(['struct-field',E,Name,F], L, Vt, St) ->
Field = ['andalso',
[is_map,E],['=:=',[mref,E,?Q('__struct__')],?Q(Name)],
[mref,E,?Q(F)]],
to_gexpr(Field, L, Vt, St);
%% Special known data type operations.
to_gexpr(['andalso'|Es], L, Vt, St) ->
to_lazy_logic(fun to_gexpr/4, Es, 'andalso', L, Vt, St);
to_gexpr(['orelse'|Es], L, Vt, St) ->
to_lazy_logic(fun to_gexpr/4, Es, 'orelse', L, Vt, St);
%% General function call.
to_gexpr([call,?Q(erlang),?Q(F)|As], L, Vt, St0) ->
{Eas,St1} = to_gexprs(As, L, Vt, St0),
case is_erl_op(F, length(As)) of
true -> {list_to_tuple([op,L,F|Eas]),St1};
false ->
to_remote_call({atom,L,erlang}, {atom,L,F}, Eas, L, St1)
end;
to_gexpr([call|_], L, _Vt, _St) ->
illegal_code_error(L, call);
to_gexpr([F|As], L, Vt, St0) when is_atom(F) ->
{Eas,St1} = to_gexprs(As, L, Vt, St0),
Ar = length(As),
case is_erl_op(F, Ar) of
true -> {list_to_tuple([op,L,F|Eas]),St1};
false ->
{{call,L,{atom,L,F},Eas},St1}
end;
to_gexpr([_|_]=List, L, _, St) ->
case lfe_lib:is_posint_list(List) of
true -> {{string,L,List},St};
false ->
illegal_code_error(L, list) %Not right!
end;
to_gexpr(V, L, Vt, St) when is_atom(V) -> %Unquoted atom
to_gexpr_var(V, L, Vt, St);
to_gexpr(Lit, L, _, St) -> %Everything else is a literal
{to_lit(Lit, L),St}.
to_gexprs(Es, L, Vt, St) ->
Fun = fun (E, St0) -> to_gexpr(E, L, Vt, St0) end,
lists:mapfoldl(Fun, St, Es).
to_gexpr_var(V, L, Vt, St) ->
Var = ?VT_GET(V, Vt, V), %Hmm
{{var,L,Var},St}.
%% to_gexpr_bitsegs(Segs, LineNumber, VarTable, State) -> {Segs,State}.
to_gexpr_bitsegs(Segs, L, Vt, St) ->
BitSeg = fun (Seg, St0) -> to_bitseg(fun to_gexpr/4, Seg, L, Vt, St0) end,
lists:mapfoldl(BitSeg, St, Segs).
%% to_gmap_get(Map, Key, LineNumber, VarTable, State) -> {MapGet,State}.
%% to_gmap_set(Map, Pairs, LineNumber, VarTable, State) -> {MapSet,State}.
%% to_gmap_update(Map, Pairs, LineNumber, VarTable, State) -> {MapUpdate,State}.
to_gmap_get(Map, Key, L, Vt, St0) ->
{Eas,St1} = to_gexprs([Key,Map], L, Vt, St0),
{{call,L,{atom,L,map_get},Eas},St1}.
to_gmap_set(Map, Pairs, L, Vt, St0) ->
{Em,St1} = to_gexpr(Map, L, Vt, St0),
{Eps,St2} = to_map_pairs(fun to_gexpr/4, Pairs, map_field_assoc, L, Vt, St1),
{{map,L,Em,Eps},St2}.
to_gmap_update(Map, Pairs, L, Vt, St0) ->
{Em,St1} = to_gexpr(Map, L, Vt, St0),
{Eps,St2} = to_map_pairs(fun to_gexpr/4, Pairs, map_field_exact, L, Vt, St1),
{{map,L,Em,Eps},St2}.
%% to_pat(Pattern, LineNumber, VarTable, State) -> {Pattern,VarTable,State}.
%% to_pat(Pattern, LineNumber, PatVars, VarTable, State) ->
%% {Pattern,VarTable,State}.
to_pat(Pat, L, Vt0, St0) ->
{Epat,_Pvs,Vt1,St1} = to_pat(Pat, L, [], Vt0, St0),
{Epat,Vt1,St1}.
to_pat([], L, Pvs, Vt, St) -> {{nil,L},Pvs,Vt,St};
to_pat(I, L, Pvs, Vt, St) when is_integer(I) ->
{{integer,L,I},Pvs,Vt,St};
to_pat(F, L, Pvs, Vt, St) when is_float(F) ->
{{float,L,F},Pvs,Vt,St};
to_pat(V, L, Pvs, Vt, St) when is_atom(V) -> %Unquoted atom
to_pat_var(V, L, Pvs, Vt, St);
to_pat(T, L, Pvs, Vt, St) when is_tuple(T) -> %Tuple literal
{to_lit(T, L),Pvs,Vt,St};
to_pat(B, L, Pvs, Vt, St) when is_binary(B) -> %Binary literal
{to_lit(B, L),Pvs,Vt,St};
to_pat(M, L, Pvs, Vt, St) when ?IS_MAP(M) -> %Map literal
{to_lit(M, L),Pvs,Vt,St};
to_pat(?Q(P), L, Pvs, Vt, St) -> %Everything quoted here
{to_lit(P, L),Pvs,Vt,St};
to_pat([cons,H,T], L, Pvs0, Vt0, St0) ->
{[Eh,Et],Pvs1,Vt1,St1} = to_pats([H,T], L, Pvs0, Vt0, St0),
{{cons,L,Eh,Et},Pvs1,Vt1,St1};
to_pat([list|Es], L, Pvs, Vt, St) ->
to_pat_list(Es, L, Pvs, Vt, St);
to_pat(['list*'|Es], L, Pvs, Vt, St) -> %Macro
to_pat_list_s(Es, L, Pvs, Vt, St);
to_pat([tuple|Es], L, Pvs0, Vt0, St0) ->
{Ees,Pvs1,Vt1,St1} = to_pats(Es, L, Pvs0, Vt0, St0),
{{tuple,L,Ees},Pvs1,Vt1,St1};
to_pat([binary|Segs], L, Pvs, Vt, St) ->
to_pat_binary(Segs, L, Pvs, Vt, St);
to_pat([map|Pairs], L, Pvs0, Vt0, St0) ->
{As,Pvs1,Vt1,St1} = to_pat_map_pairs(Pairs, L, Pvs0, Vt0, St0),
{{map,L,As},Pvs1,Vt1,St1};
%% Record patterns.
to_pat(['record',R|Fs], L, Pvs0, Vt0, St0) ->
{Efs,Pvs1,Vt1,St1} = to_pat_rec_fields(Fs, L, Pvs0, Vt0, St0),
{{record,L,R,Efs},Pvs1,Vt1,St1};
%% make-record has been deprecated but we sill accept it for now.
to_pat(['make-record',R|Fs], L, Pvs, Vt, St) ->
to_pat(['record',R|Fs], L, Pvs, Vt, St);
to_pat(['record-index',R,F], L, Pvs, Vt, St) ->
{{record_index,L,R,{atom,L,F}},Pvs,Vt,St};
%% Struct patterns.
to_pat(['struct',Name|Fs], L, Pvs, Vt, St) ->
Pat = [map,?Q('__struct__'),?Q(Name)|to_struct_fields(Fs)],
to_pat(Pat, L, Pvs, Vt, St);
%% Alias pattern.
to_pat(['=',P1,P2], L, Pvs0, Vt0, St0) -> %Alias
{Ep1,Pvs1,Vt1,St1} = to_pat(P1, L, Pvs0, Vt0, St0),
{Ep2,Pvs2,Vt2,St2} = to_pat(P2, L, Pvs1, Vt1, St1),
{{match,L,Ep1,Ep2},Pvs2, Vt2,St2};
%% General string pattern.
to_pat([_|_]=List, L, Pvs, Vt, St) ->
case lfe_lib:is_posint_list(List) of
true -> {to_lit(List, L),Pvs,Vt,St};
false -> illegal_code_error(L, string)
end.
to_pats(Ps, L, Vt0, St0) ->
{Eps,_Pvs1,Vt1,St1} = to_pats(Ps, L, [], Vt0, St0),
{Eps,Vt1,St1}.
to_pats(Ps, L, Pvs, Vt, St) ->
Fun = fun (P, Pvs0, Vt0, St0) -> to_pat(P, L, Pvs0, Vt0, St0) end,
mapfoldl3(Fun, Pvs, Vt, St, Ps).
to_pat_var('_', L, Pvs, Vt, St) -> %Don't need to handle _
{{var,L,'_'},Pvs,Vt,St};
to_pat_var(V, L, Pvs, Vt0, St0) ->
case lists:member(V, Pvs) of
true -> %Have seen this var in pattern
V1 = ?VT_GET(V, Vt0), % so reuse it
{{var,L,V1},Pvs,Vt0,St0};
false ->
{V1,St1} = new_to_var(V, St0),
Vt1 = ?VT_PUT(V, V1, Vt0),
{{var,L,V1},[V|Pvs],Vt1,St1}
end.
%% to_pat_list(Elements, LineNumber, PatVars, VarTable, State) ->
%% {ListPat,PatVars,VarTable,State}.
to_pat_list(Es, L, Pvs, Vt, St) ->
Cons = fun (E, {Tail,Pvs0,Vt0,St0}) ->
{Ee,Pvs1,Vt1,St1} = to_pat(E, L, Pvs0, Vt0, St0),
{{cons,L,Ee,Tail},Pvs1,Vt1,St1}
end,
lists:foldr(Cons, {{nil,L},Pvs,Vt,St}, Es).
%% to_pat_list_s(Elements, LineNumber, PatVars, VarTable, State) ->
%% {ListPat,PatVars,VarTable,State}.
%% A list* macro expression that probably should have been expanded.
to_pat_list_s([E], L, Pvs, Vt, St) -> to_pat(E, L, Pvs, Vt, St);
to_pat_list_s([E|Es], L, Pvs0, Vt0, St0) ->
{Les,Pvs1,Vt1,St1} = to_pat_list_s(Es, L, Pvs0, Vt0, St0),
{Le,Pvs2, Vt2,St2} = to_pat(E, L, Pvs1, Vt1, St1),
{{cons,L,Le,Les},Pvs2,Vt2,St2};
to_pat_list_s([], L, Pvs, Vt, St) -> {{nil,L},Pvs,Vt,St}.
%% to_pat_map_pairs(MapPairs, LineNumber, PatVars, VarTable, State) ->
%% {Args,PatVars,VarTable,State}.
to_pat_map_pairs([K,V|Ps], L, Pvs0, Vt0, St0) ->
{Ek,Pvs1,Vt1,St1} = to_pat(K, L, Pvs0, Vt0, St0),
{Ev,Pvs2,Vt2,St2} = to_pat(V, L, Pvs1, Vt1, St1),
{Eps,Pvs3,Vt3,St3} = to_pat_map_pairs(Ps, L, Pvs2, Vt2, St2),
{[{map_field_exact,L,Ek,Ev}|Eps],Pvs3,Vt3,St3};
to_pat_map_pairs([], _, Pvs, Vt, St) -> {[],Pvs,Vt,St}.
%% to_pat_binary(Segs, LineNumber, PatVars, VarTable, State) ->
%% {Segs,PatVars,VarTable,State}.
%% We don't do any real checking here but just assume that everything
%% is correct and in worst case pass the buck to the Erlang compiler.
to_pat_binary(Segs, L, Pvs0, Vt0, St0) ->
{Esegs,Pvs1,Vt1,St1} = to_pat_bitsegs(Segs, L, Pvs0, Vt0, St0),
{{bin,L,Esegs},Pvs1,Vt1,St1}.
to_pat_bitsegs(Segs, L, Pvs, Vt, St) ->
BitSeg = fun (Seg, Pvs0, Vt0, St0) ->
to_pat_bitseg(Seg, L, Pvs0, Vt0, St0)
end,
mapfoldl3(BitSeg, Pvs, Vt, St, Segs).
%% to_pat_bitseg(Seg, LineNumber, PatVars, VarTable, State) ->
%% {Seg,PatVars,VarTable,State}.
%% We must specially handle the case where the segment is a string.
to_pat_bitseg([Val|Specs]=Seg, L, Pvs, Vt, St) ->
case lfe_lib:is_posint_list(Seg) of
true ->
{{bin_element,L,{string,L,Seg},default,default},Pvs,Vt,St};
false ->
to_pat_bin_element(Val, Specs, L, Pvs, Vt, St)
end;
to_pat_bitseg(Val, L, Pvs, Vt, St) ->
to_pat_bin_element(Val, [], L, Pvs, Vt, St).
to_pat_bin_element(Val, Specs, L, Pvs0, Vt0, St0) ->
{Eval,Pvs1,Vt1,St1} = to_pat(Val, L, Pvs0, Vt0, St0),
{Size,Type} = to_bitseg_type(Specs, default, []),
{Esiz,Pvs2,Vt2,St2} = to_pat_bit_size(Size, L, Pvs1, Vt1, St1),
{{bin_element,L,Eval,Esiz,Type},Pvs2,Vt2,St2}.
to_pat_bit_size(all, _, Pvs, Vt, St) -> {default,Pvs,Vt,St};
to_pat_bit_size(default, _, Pvs, Vt, St) -> {default,Pvs,Vt,St};
to_pat_bit_size(undefined, _, Pvs, Vt, St) -> {default,Pvs,Vt,St};
to_pat_bit_size(Size, L, Pvs, Vt, St) when is_integer(Size) ->
{{integer,L,Size},Pvs,Vt,St};
to_pat_bit_size(Size, L, Pvs, Vt, St) when is_atom(Size) ->
%% We require the variable to have a value here.
Var = ?VT_GET(Size, Vt, Size), %Hmm
{{var,L,Var},Pvs,Vt,St}.
%% to_pat_rec_fields(Fields, LineNumber, PatVars, VarTable, State) ->
%% {Fields,PatVars,VarTable,State}.
to_pat_rec_fields(['_',P|Fs], L, Pvs0, Vt0, St0) ->
%% Special case!!
{Ep,Pvs1,Vt1,St1} = to_pat(P, L, Pvs0, Vt0, St0),
{Efs,Pvs2,Vt2,St2} = to_pat_rec_fields(Fs, L, Pvs1, Vt1, St1),
{[{record_field,L,{var,L,'_'},Ep}|Efs],Pvs2,Vt2,St2};
to_pat_rec_fields([F,P|Fs], L, Pvs0, Vt0, St0) ->
{Ep,Pvs1,Vt1,St1} = to_pat(P, L, Pvs0, Vt0, St0),
{Efs,Pvs2,Vt2,St2} = to_pat_rec_fields(Fs, L, Pvs1, Vt1, St1),
{[{record_field,L,{atom,L,F},Ep}|Efs],Pvs2,Vt2,St2};
to_pat_rec_fields([], _, Pvs, Vt, St) -> {[],Pvs,Vt,St}.
%% to_lit(Literal, LineNumber) -> ErlLiteral.
%% Convert a literal value. Note that we KNOW it is a literal value.
to_lit(Lit, L) ->
%% This does all the work for us.
erl_parse:abstract(Lit, L).
%% mapfoldl2(Fun, Acc1, Acc2, List) -> {List,Acc1,Acc2}.
%% mapfoldl3(Fun, Acc1, Acc2, Acc3, List) -> {List,Acc1,Acc2,Acc3}.
%% Like normal mapfoldl but with 2/3 accumulators.
mapfoldl2(Fun, A0, B0, [E0|Es0]) ->
{E1,A1,B1} = Fun(E0, A0, B0),
{Es1,A2,B2} = mapfoldl2(Fun, A1, B1, Es0),
{[E1|Es1],A2,B2};
mapfoldl2(_, A, B, []) -> {[],A,B}.
mapfoldl3(Fun, A0, B0, C0, [E0|Es0]) ->
{E1,A1,B1,C1} = Fun(E0, A0, B0, C0),
{Es1,A2,B2,C2} = mapfoldl3(Fun, A1, B1, C1, Es0),
{[E1|Es1],A2,B2,C2};
mapfoldl3(_, A, B, C, []) -> {[],A,B,C}.
illegal_code_error(Line, Error) ->
error({illegal_code,Line,Error}). | src/lfe_translate.erl | 0.52829 | 0.492005 | lfe_translate.erl | starcoder |
-module(rstar).
-export([new/1, new/2, insert/2, delete/2, search_within/2, search_nearest/3, search_around/3]).
-export_type([rtree/0, geometry/0]).
-include("../include/rstar.hrl").
% Expose type references
-type rtree() :: #rtree{}.
-type geometry() :: #geometry{}.
% Returns a new empty R* tree of the specified dimensionality and default parameters
-spec new(integer()) -> rtree() | {error, badarg}.
new(Dimensions) -> new(Dimensions, #rt_params{}).
% Returns a new empty R* tree of the specified dimensionality and parameters
-spec new(integer(), #rt_params{}) -> rtree() | {error, badarg}.
new(Dimensions, _) when Dimensions < 1 -> {error, badarg};
new(Dimensions, Params) ->
RootGeo = rstar_geometry:origin(Dimensions),
Root = RootGeo#geometry{value=#leaf{}},
#rtree{dimensions=Dimensions, params=Params, root=Root}.
% Inserts a new geometric point into the R* tree and
% returns a new tree
-spec insert(rtree(), geometry()) -> rtree() | {error, dimensionality}.
insert(#rtree{dimensions=TD}, #geometry{dimensions=GD}) when TD =/= GD ->
{error, dimensionality};
insert(Tree, Geometry) ->
NewRoot = rstar_insert:insert(Tree#rtree.params, Tree#rtree.root, Geometry),
Tree#rtree{root=NewRoot}.
% Removes a geometric point from the R* tree and
% returns a new tree
-spec delete(rtree(), geometry()) -> not_found | rtree().
delete(#rtree{dimensions=TD}, #geometry{dimensions=GD}) when TD =/= GD ->
{error, dimensionality};
delete(Tree, Geometry) ->
case rstar_delete:delete(Tree#rtree.params, Tree#rtree.root, Geometry) of
not_found -> not_found;
NewRoot -> Tree#rtree{root=NewRoot}
end.
% Searches for the geometries contained or intersecting
% the given geometry
-spec search_within(rtree(), geometry()) -> [geometry()].
search_within(#rtree{dimensions=TD}, #geometry{dimensions=GD}) when TD =/= GD ->
{error, dimensionality};
search_within(Tree, Geometry) ->
rstar_search:search_within(Tree#rtree.root, Geometry).
% Searches for the K-nearest neighbors to the given
% geometry.
-spec search_nearest(rtree(), geometry(), integer()) -> [geometry()].
search_nearest(#rtree{dimensions=TD}, #geometry{dimensions=GD}, _) when TD =/= GD ->
{error, dimensionality};
search_nearest(_, _, Nearest) when Nearest =< 0 ->
{error, badarg};
search_nearest(Tree, Geometry, K) ->
rstar_search:search_near(Tree#rtree.root, Geometry, K).
% Searches for the geometries contained or intersecting
% the given geometry
-spec search_around(rtree(), geometry(), float()) -> [geometry()].
search_around(#rtree{dimensions=TD}, #geometry{dimensions=GD}, _) when TD =/= GD ->
{error, dimensionality};
search_around(Tree, Geometry, Distance) ->
% Build and expanded geometry around the point
NewMBR = [{Min - Distance, Max + Distance} || {Min, Max} <- Geometry#geometry.mbr],
NewGeo = Geometry#geometry{mbr=NewMBR},
% Perform a search_within
PrimaryResults = rstar_search:search_within(Tree#rtree.root, NewGeo),
% Apply a secondary filter
lists:filter(fun (R) ->
rstar_geometry:distance(R, Geometry) =< Distance
end, PrimaryResults). | src/rstar.erl | 0.651133 | 0.720786 | rstar.erl | starcoder |
-module(ecc_curves).
-export([execute_ecdh/0]).
-record(point, {x, y}).
execute_ecdh() ->
io:fwrite("An example of Elliptic Curve Diffie-Hellman over secp256k1~n"),
io:fwrite("==========================================================~n"),
io:fwrite("THIS IMPLEMENTATION OF THIS ALGORITHM IS NOT SECURE~n"),
io:fwrite("IT IS JUST FOR EDUCATIONAL PURPOSES~n"),
io:fwrite("=========================================================="),
io:fwrite("~n~nElliptic Curve Diffie-Hellman Example:~n~n"),
io:fwrite("First get a generator G: "),
G = get_generator(rand:uniform(10000)),
io:fwrite("~p~n~nThis generator will be knwon by all integrating parties. ", [G]),
io:fwrite("Alice and Bob generate a PrivateKey called Alpha. It will be used to derive a PublicKey.~n"),
AliceAlpha = 16, %% 16 => 4 rounds -> 1+1 => 2+2 => 4+4 => 8+8
BobAlpha = 32, %% It doesn't need to be the same as Alice's Alpha
io:fwrite("PublicKey derive will be calculated as follows:~n-Alice -> AliceAlpha*G~n-Bob -> BobAlpha*G~n"),
AlicePK = get_coordinates(G, 1, AliceAlpha), %% Starting by 1*G, calculate AliceAlpha*G. This will be Alice's PublicKey
BobPK = get_coordinates(G, 1, BobAlpha), %% Bob also calculates his own PublicKey. This value is sent to Alice
io:fwrite("-AlicePK -> ~p(~p, ~p) = (~p, ~p)~n-BobPK -> ~p(~p, ~p) = (~p, ~p)~n~n", [AliceAlpha, G#point.x, G#point.y, AlicePK#point.x, AlicePK#point.y, BobAlpha, G#point.x, G#point.y, BobPK#point.x, BobPK#point.y]),
BobSharedSymmetricKey = get_coordinates(AlicePK, 1, BobAlpha),
AliceSharedSymetricKey = get_coordinates(BobPK, 1, AliceAlpha),
io:fwrite("Alice will receive Bob's PublicKey and vice-versa. After that, over Alice's point, Bob will calculate BobAlpha*AlicePK and Alice will calculate AliceAlpha*BobPK~n"),
io:fwrite("Because of multiplication's commutative property, both Alice and Bob will reach the same Symmetric key.~n~n- BobAlpha * AlicePK = BobAlpha * (AliceAlpha*G)~n- AliceAlpha * BobPK = AliceAlpha*(BobAlpha*G)~n- AliceAlpha*(BobAlpha*G) = BobAlpha*(AliceAlpha*G)~n"),
io:fwrite("~n~p == ~p (~p)~n", [AliceSharedSymetricKey#point.x, BobSharedSymmetricKey#point.x, AliceSharedSymetricKey#point.x == BobSharedSymmetricKey#point.x]).
get_generator(X) ->
ecc_math:generate(X).
get_coordinates(G, Nmr, SK) when Nmr == SK -> G;
get_coordinates(G, Nmr, SK) ->
%% SK is a number. G is a point in the graph
%% Generate a point in SK
get_coordinates(ecc_math:single_point_addition(G, curve_secp256k1_derivative), Nmr + Nmr, SK). | src/ecc_curves.erl | 0.524638 | 0.436862 | ecc_curves.erl | starcoder |
-module(mzbl_loop).
-compile({inline, [msnow/0, eval_rates/7, time_of_next_iteration/3, batch_size/4]}).
-export([eval/5]).
% For Common and EUnit tests
-export([time_of_next_iteration/3, msnow/0]).
-define(MAXSLEEP, 1000).
-define(DEFAULT_MAX_BATCH, 1000000).
-define(MSEC_in_SEC, 1000).
-include("mzbl_types.hrl").
-record(const_rate, {
rate_fun = undefined :: fun((State :: term()) -> {Rate :: undefined | number(), NewState :: term()}),
value = undefined :: undefined | number()
}).
-record(linear_rate, {
from_fun = undefined :: fun((State :: term()) -> {Rate :: undefined | number(), NewState :: term()}),
to_fun = undefined :: fun((State :: term()) -> {Rate :: undefined | number(), NewState :: term()}),
from = undefined :: undefined | number(),
to = undefined :: undefined | number()
}).
-record(opts, {
spawn = false :: true | false,
iterator = undefined :: undefined | string(),
parallel = 1 :: pos_integer(),
poisson = false :: true | false,
while = [] :: list()
}).
-spec eval([proplists:property()], [script_expr()], worker_state(), worker_env(), module())
-> {script_value(), worker_state()}.
eval(LoopSpec, Body, State, Env, WorkerProvider) ->
% Evaluator returns fun for evaluation of specific expression
% For example:
% F = Evaluator(Expr),
% {Res1, State2} = F(State1), % evaluates expression Expr
% {Res2, State3} = F(State2) % evaluates expression Expr again
Evaluator =
fun (Expr) ->
fun (S) ->
{Value, NewS} = mzbl_interpreter:eval(Expr, S, Env, WorkerProvider),
case mzbl_literals:convert(Value) of
#constant{value = R} -> {R, NewS};
R -> {R, NewS}
end
end
end,
% ArgEvaluator returns fun for finding specific operation in list and evaluation
% of it's args
% For example:
% Specs = [#operation{name = rate, args = RateArgs}, #operation{name = time, args = TimeArgs}],
% F = ArgEvaluator(time, Specs, undefined),
% {ArgsRes, State2} = F(State1) % Evaluates TimeArgs and returns results as ArgsRes
%
% We use these funs in order to reevaluate some key loop parameters (like
% rate) while executing loop because vars values could be changed at any
% moment. So, for instance, instead of passing Rate to a loop function, we
% pass function that evaluates Rate as soon as we need it.
ArgEvaluator =
fun (Name, Spec, Default) ->
case lists:keyfind(Name, #operation.name, Spec) of
false -> fun (S) -> {Default, S} end;
#operation{args = [Expr]} -> Evaluator(Expr)
end
end,
TimeFun = ArgEvaluator(time, LoopSpec, undefined),
{Iterator, State1} = (ArgEvaluator(iterator, LoopSpec, undefined))(State),
{ProcNum, State2} = (ArgEvaluator(parallel, LoopSpec, 1))(State1),
{Spawn, State3} = (ArgEvaluator(spawn, LoopSpec, false))(State2),
{Poisson, State4} = (ArgEvaluator(poisson, LoopSpec, false))(State3),
Asserts = mzbl_ast:find_operation_and_extract_args(while, LoopSpec, []),
Opts = #opts{
spawn = Spawn,
iterator = Iterator,
parallel = ProcNum,
poisson = Poisson,
while = Asserts
},
case mzbl_ast:find_operation_and_extract_args(rate, LoopSpec, [#constant{value = undefined, units = rps}]) of
[#constant{value = _, units = _} = RPS] ->
RPSFun = Evaluator(RPS),
looprun(TimeFun, #const_rate{rate_fun = RPSFun}, Body, WorkerProvider, State4, Env, Opts);
[#operation{name = think_time,
args = [#constant{units = _} = ThinkTime,
#constant{units = _} = Rate]}] ->
RPSFun1 = Evaluator(Rate),
PeriodFun1 = fun (S) -> {1000, S} end,
RPSFun2 = fun (S) -> {0, S} end,
PeriodFun2 = Evaluator(ThinkTime),
superloop(TimeFun, [RPSFun1, PeriodFun1, RPSFun2, PeriodFun2], Body,
WorkerProvider, State4, Env, Opts);
[#operation{name = comb, args = RatesAndPeriods}] ->
RatesAndPeriodsFuns = [Evaluator(E) || E <- RatesAndPeriods],
superloop(TimeFun, RatesAndPeriodsFuns, Body, WorkerProvider, State4, Env, Opts);
[#operation{name = ramp, args = [
linear,
#constant{value = _, units = _} = From,
#constant{value = _, units = _} = To]}] ->
looprun(TimeFun, #linear_rate{from_fun = Evaluator(From), to_fun = Evaluator(To)}, Body, WorkerProvider, State4, Env, Opts)
end.
-spec msnow() -> integer().
msnow() ->
{MegaSecs, Secs, MicroSecs} = os:timestamp(),
MegaSecs * 1000000000 + Secs * 1000 + MicroSecs div 1000.
-spec time_of_next_iteration(#const_rate{} | #linear_rate{}, number(), float())
-> number().
time_of_next_iteration(#const_rate{value = undefined}, _, _) -> 0;
time_of_next_iteration(#const_rate{value = 0}, Duration, _) -> Duration * 2; % should be more than loop length "2" does not stand for anything important
time_of_next_iteration(#const_rate{value = 0.0}, Duration, _) -> Duration * 2;
time_of_next_iteration(#const_rate{value = Rate}, _Duration, IterationNumber) ->
(IterationNumber * ?MSEC_in_SEC) / Rate;
time_of_next_iteration(#linear_rate{from = F, to = T}, Duration, _) when F == 0, T == 0 -> Duration * 2; % we want to match 0 and 0.0 hence the guard usage
time_of_next_iteration(#linear_rate{from = Rate1, to = Rate2}, _, IterationNumber) when Rate1 == Rate2 ->
(IterationNumber * ?MSEC_in_SEC) / Rate1;
time_of_next_iteration(#linear_rate{from = StartRPS, to = FinishRPS}, RampDuration, IterationNumber) ->
% This function solves the following equation for Elapsed:
%
% Use linear interpolation for y0 = StartRPS, y1 = FinishRPS, x0 = 0, x1 = RampDuration
% f(x) = StartRPS + (FinishRPS - StartRPS) * x / Duration, where x - time from start, y - RPS at moment x
%
% IterationNumber = Elapsed * (StartRPS + f(Elapsed)) / 2
%
% Expanding linear interpolation:
%
% IterationNumber = (FinishRPS - StartRPS) * Elapsed ^ 2 / (2 * RampDuration) + StartRPS * Elapsed
%
% Solve quadratic equation and choose positive solution
%
% Elapsed = (sqrt((StartRPS * Time) ^ 2 + 2 * (FinishRPS - StartRPS) * IterationNumber * RampDuration) - StartRPS * RampDuration) / (FinishRPS - StartRPS)
%
% Please note that we could calculate time for the next iteration which could be out of boundary if FinishRPS < StartRPS.
% To avoid this we use discriminant = 0. Retured value will be higher then RampDuration in this case
DRPS = FinishRPS - StartRPS,
Time = RampDuration / 1000,
Discriminant = max(0, StartRPS * StartRPS * Time * Time + 2 * IterationNumber * DRPS * Time),
1000 * (math:sqrt(Discriminant) - StartRPS * Time)
/ DRPS.
superloop(TimeFun, Rates, Body, WorkerProvider, State, Env, Opts) ->
case TimeFun(State) of
{Time, NewState} when Time =< 0 -> {nil, NewState};
{Time, NewState} when Rates == [] ->
timer:sleep(Time),
{nil, NewState};
{_, NewState} ->
[PRateFun, PTimeFun | Tail] = Rates,
LocalStart = msnow(),
looprun(PTimeFun, #const_rate{rate_fun = PRateFun}, Body, WorkerProvider, NewState, Env, Opts),
LoopTime = msnow() - LocalStart,
NewTimeFun =
fun (S) ->
{T, NewS} = TimeFun(S),
{T - LoopTime, NewS}
end,
superloop(NewTimeFun, Tail ++ [PRateFun, PTimeFun],
Body, WorkerProvider, NewState, Env, Opts)
end.
looprun(TimeFun, Rate, Body, WorkerProvider, State, Env, Opts = #opts{parallel = 1}) ->
timerun(msnow(), random:uniform(), TimeFun, Rate, Body, WorkerProvider, Env, true, Opts, 1, State, 0, 0, {0, 0});
looprun(TimeFun, Rate, Body, WorkerProvider, State, Env, Opts = #opts{parallel = N}) ->
StartTime = msnow(),
_ = mzb_lists:pmap(fun (I) ->
_ = random:seed(now()),
timerun(StartTime, I + random:uniform(), TimeFun, Rate, Body, WorkerProvider, Env, true, Opts, 1, State, 0, 0, {0, I})
end, lists:seq(0, N - 1)),
{nil, State}.
timerun(Start, Shift, TimeFun, Rate, Body, WorkerProvider, Env, IsFirst, Opts, Batch, OldState, OldDone, OldIter, OldRun) ->
case mzbl_asserts:check_loop_expr(Opts#opts.while, [{Opts#opts.iterator, OldIter}|Env]) of
false -> {nil, OldState};
_ ->
State = receive
{run_command, AST} ->
{_, NewState} = mzbl_interpreter:eval(AST, OldState, Env, WorkerProvider),
NewState
after 0 -> OldState
end,
LocalTime = msnow() - Start,
{Time, State1} = TimeFun(State),
{NewRate, Done, State2, NewRun} = eval_rates(Rate, OldDone, LocalTime, Time, State1, Opts#opts.parallel, OldRun),
ShouldBe = time_of_next_iteration(NewRate, Time, Done + Shift),
Remain = round(ShouldBe) - LocalTime,
GotTime = round(Time) - LocalTime,
NeedToSleep = max(0, min(Remain, GotTime)),
Sleep =
case Opts#opts.poisson of
true -> max(0, min(round(-Remain * math:log(random:uniform())), GotTime));
false -> NeedToSleep
end,
case Sleep > ?MAXSLEEP of
true ->
timer:sleep(?MAXSLEEP),
timerun(Start, Shift, TimeFun, NewRate, Body, WorkerProvider, Env, IsFirst, Opts, Batch, State2, Done, OldIter, NewRun);
false ->
Sleep > 0 andalso timer:sleep(Sleep),
case Time =< LocalTime + Sleep of
true -> {nil, State2};
false ->
BatchStart = msnow(),
Iterator = Opts#opts.iterator,
Step = Opts#opts.parallel,
NextState =
case Opts#opts.spawn of
false ->
case Iterator of
undefined -> k_times(Body, WorkerProvider, Env, State2, Batch);
_ -> k_times_iter(Body, WorkerProvider, Iterator, Env, Step, State2, OldIter + erlang:trunc(Shift), Batch)
end;
true ->
k_times_spawn(Body, WorkerProvider, Iterator, Env, Step, State2, OldIter + erlang:trunc(Shift), Batch)
end,
BatchEnd = msnow(),
NewBatch = case IsFirst of
true -> Batch;
false -> batch_size(BatchEnd - BatchStart, GotTime, NeedToSleep, Batch)
end,
timerun(Start, Shift, TimeFun, NewRate, Body, WorkerProvider, Env, false, Opts, NewBatch, NextState, Done + Step*Batch, OldIter + Step*Batch, NewRun)
end
end
end.
eval_rates(#const_rate{rate_fun = F, value = Prev} = RateState, Done, CurTime, _Time, State, Step, {OldRun, DoneLastUpdate}) ->
{Rate, State1} = F(State),
% If rate changes we have to change number of "done" iterations accordingly
% Also we need to reset done counter every minute or so
% to make sure we don't generate more load than we were asked for
% It happens after periods of time when we were unable to maintain needed rate (for some external reasons)
NewRun = CurTime div (60*?MSEC_in_SEC),
NewDone =
case {Rate, Prev} of
{undefined, _} -> Done;
{Prev, _} when (NewRun =< OldRun) orelse (Done < DoneLastUpdate + 10 * Step) -> Done;
{Prev, _} ->
ND = Prev * CurTime / ?MSEC_in_SEC,
case ND > Done + Step of
true -> ND;
false -> Done
end;
{New, _} -> (New * CurTime / ?MSEC_in_SEC) % It's important not to round done counter here
end,
NewDoneLastUpdate =
case NewDone == Done of
true -> DoneLastUpdate;
false -> NewDone
end,
{RateState#const_rate{value = Rate}, NewDone, State1, {NewRun, NewDoneLastUpdate}};
eval_rates(#linear_rate{from_fun = FFun, to_fun = ToFun, from = OldF, to = OldT} = RateState, Done, CurTime, Time, State, Step, {OldRun, DoneLastUpdate}) ->
{F, State1} = FFun(State),
{T, State2} = ToFun(State1),
% If rate changes we have to change number of "done" iterations accordingly
% Also we need to reset done counter every minute or so
% to make sure we don't generate more load than we were asked for
% It happens after periods of time when we were unable to maintain needed rate (for some external reasons)
NewRun = CurTime div (60*?MSEC_in_SEC),
NewDone =
case {F, T} of
{OldF, OldT} when (NewRun =< OldRun) orelse (Done < DoneLastUpdate + 10 * Step) -> Done;
{OldF, OldT} ->
ND = F * CurTime/?MSEC_in_SEC + (T - F) * CurTime * CurTime / (2 * ?MSEC_in_SEC * Time),
case ND > Done + Step of
true -> ND;
false -> Done
end;
{_, _} when OldF == undefined -> Done;
{_, _} ->
% Calculating area under new ramp graph (which is trapezium)
% Kinematic equations also could be used (S = v0*t + a*t^2/2)
%
% It's important not to round done counter here
F * CurTime/?MSEC_in_SEC + (T - F) * CurTime * CurTime / (2 * ?MSEC_in_SEC * Time)
end,
NewDoneLastUpdate =
case NewDone == Done of
true -> DoneLastUpdate;
false -> NewDone
end,
{RateState#linear_rate{from = F, to = T}, NewDone, State2, {NewRun, NewDoneLastUpdate}}.
batch_size(BatchTime, TimeLeft, Sleep, Batch) ->
MaxBatch =
case BatchTime of
0 -> ?DEFAULT_MAX_BATCH;
_ -> max(Batch*?MSEC_in_SEC div BatchTime, 1) % Batch execution shouldn't take more than 1 sec
end,
TimePerIter = max(0, BatchTime div Batch),
NewBatch =
if BatchTime * 4 > TimeLeft -> Batch div 2 + 1;
(Sleep == 0) and (Batch < MaxBatch) -> Batch + Batch div 2 + 1;
Sleep > 2*TimePerIter -> max(Batch - Batch div 2 - 1, 1);
true -> Batch
end,
min(NewBatch, MaxBatch).
k_times(_, _, _, S, 0) -> S;
k_times(Expr, Provider, Env, S, N) ->
{_, NewS} = mzbl_interpreter:eval(Expr, S, Env, Provider),
k_times(Expr, Provider, Env, NewS, N-1).
k_times_iter(_, _, _, _, _, S, _, 0) -> S;
k_times_iter(Expr, Provider, I, Env, Step, S, Iter, N) ->
{_, NewS} = mzbl_interpreter:eval(Expr, S, [{I, Iter}|Env], Provider),
k_times_iter(Expr, Provider, I, Env, Step, NewS, Iter + Step, N-1).
k_times_spawn(_, _, _, _, _, S, _, 0) -> S;
k_times_spawn(Expr, Provider, I, Env, Step, S, Iter, N) ->
spawn_link(fun() -> mzbl_interpreter:eval(Expr, S, [{I, Iter}|Env], Provider) end),
k_times_iter(Expr, Provider, I, Env, Step, S, Iter + Step, N-1). | common_apps/mzbench_language/src/mzbl_loop.erl | 0.547464 | 0.446615 | mzbl_loop.erl | starcoder |
%% @author <NAME> <<EMAIL>>
%% @copyright 2018 <NAME> <<EMAIL>>
%%
%% @doc 'Species1' rules implementation module for 'cgolam' app.
%%
%% This is SIMILAR to the coloured version of Conway's Game of
%% Life, but here the different colours are relabelled species
%% as they are broadly UNcooperative.
%%
%% The calculation of a cells new 'alive' state is done as follows...
%%
%% 1. The surrounding eight cells, and the mid cell, are examined
%% and all the unique colours (up to nine) are determined. What is
%% considered unique depends on a tolerance, based on a bitmap, so
%% a number of lower order bits may be discarded for the purpose of
%% this comparison.
%%
%% 2. For each unique colour, CGoL (Conway's Game of life) rules
%% are applied, counting only those surrounding cells which are
%% the same colour (given the above mentioned tolerance).
%%
%% 3. If the CGoL rules return dead for all colours, the cell
%% becomes dead.
%%
%% 4. If the CGoL rules return alive for one or more colours and
%% one of the colours is the same (given the above mentioned
%% tolerance) as the mid cell, then the cell remains unchanged.
%%
%% 5. If the CGoL rules return alive for one or more colours and
%% the mid cell is dead or none of the colours is the same (given
%% the above mentionedtolerance) as the mid cell, then the new
%% cell colour is calculated by averaging the colours of the
%% surrounding cells (for which the CGoL rules return alive)
%% (these cells are chosen using the tolerance mentioned above
%% but the actual unchanged cell colours are used in the average).
%% The brightness level of the resultant calculated average
%% colour will be adjusted (all RGB components multipled by a
%% common factor) so that at least one component is 255.
%%
%% With this algorithm, any lone pure colour should behave
%% entirely like the standard CGoL, but when colours collide it
%% gets more interesting.
-module(cgolam_rules_species1).
-behaviour(cgolam_rules).
-export([new/1, calc/4, init/4]).
-record(cgolam_rules_species1, {
field_mod :: module(),
colmatch_bm :: integer()
}).
-type cgolam_rules_species1() :: cgolam_rules:rules() .
-export_type([cgolam_rules_species1/0]).
%% @private
-spec new
(RulesModCfg :: list()) ->
cgolam_rules_species1() .
new(RulesModCfg) ->
FieldMod = case lists:keysearch(field, 1, RulesModCfg) of
{value, {field, M}} -> M;
{value, {field, M, _C}} -> M
end,
ColMatchBitmask = case lists:keysearch(colmatch_bitmask, 1, RulesModCfg) of
{value, {colmatch_bitmask, BM}} -> BM;
false -> 16#80
end,
#cgolam_rules_species1{
field_mod = FieldMod,
colmatch_bm = ColMatchBitmask
}
.
%% @private
-spec calc
(Rules :: cgolam_rules_species1(), Field :: cgolam_field:field(), X :: integer(), Y :: integer()) ->
CellState :: term() .
calc(#cgolam_rules_species1{
field_mod = FieldMod,
colmatch_bm = ColMatchBitmask
}, Field, X, Y) ->
SurroundingRGBs = lists:foldl(
fun ({Xdiff, Ydiff}, SurroundingRGBsAcc) ->
case FieldMod:get(Field, X + Xdiff, Y + Ydiff) of
false -> SurroundingRGBsAcc;
{col, RGB} -> [RGB | SurroundingRGBsAcc]
end
end,
[],
[{-1, -1}, {0, -1}, {1, -1},
{-1, 0}, {1, 0},
{-1, 1}, {0, 1}, {1, 1}]
),
MidCol = FieldMod:get(Field, X, Y),
MidRGB = case MidCol of
false -> false;
{col, RGB} -> RGB
end,
UniqueRGBs = lists:foldl(
fun (RGB, UniqueRGBsAcc) ->
Repeat = lists:any(
fun (UniqueRGB) -> tolerated_same(RGB, UniqueRGB, ColMatchBitmask) /= false end,
UniqueRGBsAcc
),
if Repeat -> UniqueRGBsAcc; true -> [RGB | UniqueRGBsAcc] end
end,
[],
case MidCol of
{col, MidRGB} -> [MidRGB | SurroundingRGBs];
false -> SurroundingRGBs
end
),
{ContendingRGBs, MidRGBContending} = lists:foldl(
fun (RGB, {ContendingRGBsAcc, MidCellRGBContending}) ->
SurroundingSameRGBs = [
SurroundingRGB ||
SurroundingRGB <- SurroundingRGBs,
tolerated_same(SurroundingRGB, RGB, ColMatchBitmask) /= false
],
case cgol_rule(length(SurroundingSameRGBs)) of
true ->
{[{RGB, SurroundingSameRGBs} | ContendingRGBsAcc], MidCellRGBContending};
false ->
{ContendingRGBsAcc, MidCellRGBContending};
unchanged when MidRGB == false ->
{ContendingRGBsAcc, MidCellRGBContending};
unchanged ->
case tolerated_same(MidRGB, RGB, ColMatchBitmask) of
false -> {ContendingRGBsAcc, MidCellRGBContending};
_Alive -> {[{RGB, SurroundingSameRGBs} | ContendingRGBsAcc], true}
end
end
end,
{[], false},
UniqueRGBs
),
if ContendingRGBs == [] ->
false;
MidRGBContending ->
MidCol;
true ->
{col, adjust_brightness(
merge_cellstates(
lists:flatten(
[Contribs || {_RGB, Contribs} <- ContendingRGBs]
)
)
)}
end
.
%% @private
tolerated_same({Ra, Ga, Ba}, {Rb, Gb, Bb}, ColMatchBitmask)
when ((Ra band ColMatchBitmask) == (Rb band ColMatchBitmask))
and ((Ga band ColMatchBitmask) == (Gb band ColMatchBitmask))
and ((Ba band ColMatchBitmask) == (Bb band ColMatchBitmask)) ->
{Ra band ColMatchBitmask, Ga band ColMatchBitmask, Ba band ColMatchBitmask}
;
tolerated_same(_A, _B, _ColMatchBitmask) ->
false
.
%% @private
cgol_rule(SurroundingCells) ->
if SurroundingCells > 3 -> false;
SurroundingCells < 2 -> false;
SurroundingCells == 3 -> true;
SurroundingCells == 2 -> unchanged
end
.
%% @private
merge_cellstates([{R, G, B} | T]) ->
merge_cellstates(R, G, B, T, 1)
.
merge_cellstates(Rs, Gs, Bs, [{R, G, B} | T], N) ->
merge_cellstates(Rs + R, Gs + G, Bs + B, T, N + 1)
;
merge_cellstates(Rs, Gs, Bs, [], N) ->
{trunc(Rs/N), trunc(Gs/N), trunc(Bs/N)}
.
adjust_brightness({R, G, B}) when (R >= G) and (R >= B) and (R /= 0) ->
adjust_brightness({R, G, B}, 255 / R)
;
adjust_brightness({R, G, B}) when (G >= R) and (G >= B) and (G /= 0) ->
adjust_brightness({R, G, B}, 255 / G)
;
adjust_brightness({R, G, B}) when (B >= R) and (B >= G) and (B /= 0) ->
adjust_brightness({R, G, B}, 255 / B)
;
adjust_brightness({0, 0, 0}) ->
{0, 0, 0}
.
adjust_brightness({R, G, B}, F) ->
{trunc(R*F), trunc(G*F), trunc(B*F)}
.
%% @private
-spec init
(Rules :: cgolam_rules_species1(), Field0 :: cgolam_field:field(), Type :: atom(), InitCfg :: list()) ->
Field1 :: cgolam_field:field() .
init(#cgolam_rules_species1{field_mod=FieldMod}, Field0, default, InitCfg) ->
Width = FieldMod:width(Field0),
Height = FieldMod:height(Field0),
Clusters = case lists:keysearch(clusters, 1, InitCfg) of
{value, {clusters, I}} -> I;
false -> 3
end,
ClusterSizeCfg = case lists:keysearch(cluster_size, 1, InitCfg) of
{value, {cluster_size, CSC}} -> CSC / 100;
false -> 1
end,
ClusterDensityCfg = case lists:keysearch(cluster_density, 1, InitCfg) of
{value, {cluster_density, CDC}} -> CDC / 100;
false -> 1
end,
ClusterCols = [{255,0,0}, {0,255,0}, {0,0,255}, {255,255,0}, {255,0,255}, {0,255,255}],
ClusterSize = trunc(math:sqrt(Width * Height) / 2 * ClusterSizeCfg),
ClusterDensity = trunc(ClusterSize * ClusterSize / 20 * ClusterDensityCfg),
{Field1, _RemainingCols} = lists:foldl(
fun (_, {Field01Acc, [Col | ClusterCols01AccT]}) ->
% per cluster, accumulator is field and depleting colour selection
ClusterX = trunc(rand:uniform(Width)),
ClusterY = trunc(rand:uniform(Height)),
Field01Acc2 = lists:foldl(
fun (_, Field02Acc) ->
% per cell in cluster,
CellX = trunc((rand:uniform()-0.5)*(rand:uniform()-0.5) * ClusterSize) + ClusterX,
CellY = trunc((rand:uniform()-0.5)*(rand:uniform()-0.5) * ClusterSize) + ClusterY,
FieldMod:set(Field02Acc, CellX, CellY, {col, Col})
end,
Field01Acc,
lists:seq(1, ClusterDensity)
),
{Field01Acc2, ClusterCols01AccT};
(_, {Field01Acc, []}) ->
% cycle round colours if run out
{Field01Acc, ClusterCols}
end,
{Field0, ClusterCols},
lists:seq(1, Clusters)
),
Field1
;
init(#cgolam_rules_species1{field_mod=FieldMod}, Field0, term, InitCfg) ->
{value, {set, InitTerm}} = lists:keysearch(set, 1, InitCfg),
lists:foldl(
fun ({{X, Y}, Col = {col, _RGB}}, Field0Acc) ->
FieldMod:set(Field0Acc, X, Y, Col)
end,
Field0,
InitTerm
)
. | src/cgolam_rules_species1.erl | 0.557123 | 0.620564 | cgolam_rules_species1.erl | starcoder |
%%
%% %CopyrightBegin%
%%
%% Copyright Ericsson AB 2005-2013. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
%%
%%% Description : SSH 1/2 pdu elements encode/decode
-module(ssh_bits).
-include("ssh.hrl").
-export([encode/2]).
-export([mpint/1, string/1, name_list/1]).
-export([random/1]).
-define(name_list(X),
(fun(B) -> ?binary(B) end)(list_to_binary(name_concat(X)))).
name_concat([Name]) when is_atom(Name) -> atom_to_list(Name);
name_concat([Name]) when is_list(Name) -> Name;
name_concat([Name|Ns]) ->
if is_atom(Name) ->
[atom_to_list(Name),"," | name_concat(Ns)];
is_list(Name) ->
[Name,"," | name_concat(Ns)]
end;
name_concat([]) -> [].
name_list(Ns) ->
?name_list(Ns).
string(Str) ->
?string(Str).
%% MP representaion (SSH2)
mpint(X) when X < 0 ->
if X == -1 ->
<<0,0,0,1,16#ff>>;
true ->
mpint_neg(X,0,[])
end;
mpint(X) ->
if X == 0 ->
<<0,0,0,0>>;
true ->
mpint_pos(X,0,[])
end.
mpint_neg(-1,I,Ds=[MSB|_]) ->
if MSB band 16#80 =/= 16#80 ->
<<?UINT32((I+1)), (list_to_binary([255|Ds]))/binary>>;
true ->
(<<?UINT32(I), (list_to_binary(Ds))/binary>>)
end;
mpint_neg(X,I,Ds) ->
mpint_neg(X bsr 8,I+1,[(X band 255)|Ds]).
mpint_pos(0,I,Ds=[MSB|_]) ->
if MSB band 16#80 == 16#80 ->
<<?UINT32((I+1)), (list_to_binary([0|Ds]))/binary>>;
true ->
(<<?UINT32(I), (list_to_binary(Ds))/binary>>)
end;
mpint_pos(X,I,Ds) ->
mpint_pos(X bsr 8,I+1,[(X band 255)|Ds]).
encode(List, Types) ->
list_to_binary(enc(List, Types)).
%%
%% Encode record element
%%
enc(Xs, Ts) ->
enc(Xs, Ts, 0).
enc(Xs, [boolean|Ts], Offset) ->
X = hd(Xs),
[?boolean(X) | enc(tl(Xs), Ts, Offset+1)];
enc(Xs, [byte|Ts], Offset) ->
X = hd(Xs),
[?byte(X) | enc(tl(Xs), Ts,Offset+1)];
enc(Xs, [uint16|Ts], Offset) ->
X = hd(Xs),
[?uint16(X) | enc(tl(Xs), Ts,Offset+2)];
enc(Xs, [uint32 |Ts], Offset) ->
X = hd(Xs),
[?uint32(X) | enc(tl(Xs), Ts,Offset+4)];
enc(Xs, [uint64|Ts], Offset) ->
X = hd(Xs),
[?uint64(X) | enc(tl(Xs), Ts,Offset+8)];
enc(Xs, [mpint|Ts], Offset) ->
Y = mpint(hd(Xs)),
[Y | enc(tl(Xs), Ts,Offset+size(Y))];
enc(Xs, [string|Ts], Offset) ->
X0 = hd(Xs),
Y = ?string(X0),
[Y | enc(tl(Xs),Ts,Offset+size(Y))];
enc(Xs, [string_utf8|Ts], Offset) ->
X0 = hd(Xs),
Y = ?string_utf8(X0),
[Y | enc(tl(Xs),Ts,Offset+size(Y))];
enc(Xs, [binary|Ts], Offset) ->
X0 = hd(Xs),
Y = ?binary(X0),
[Y | enc(tl(Xs), Ts,Offset+size(Y))];
enc(Xs, [name_list|Ts], Offset) ->
X0 = hd(Xs),
Y = ?name_list(X0),
[Y | enc(tl(Xs), Ts, Offset+size(Y))];
enc(Xs, [cookie|Ts], Offset) ->
[random(16) | enc(tl(Xs), Ts, Offset+16)];
enc(Xs, [{pad,N}|Ts], Offset) ->
K = (N - (Offset rem N)) rem N,
[fill_bits(K,0) | enc(Xs, Ts, Offset+K)];
enc(Xs, ['...'| []], _Offset) ->
X = hd(Xs),
if is_binary(X) ->
[X];
is_list(X) ->
[list_to_binary(X)];
X==undefined ->
[]
end;
enc([], [],_) ->
[].
%%
%% Create a binary with constant bytes
%%
fill_bits(N,C) ->
list_to_binary(fill(N,C)).
fill(0,_C) -> [];
fill(1,C) -> [C];
fill(N,C) ->
Cs = fill(N div 2, C),
Cs1 = [Cs,Cs],
if N band 1 == 0 ->
Cs1;
true ->
[C,Cs,Cs]
end.
%% random/1
%% Generate N random bytes
%%
random(N) ->
crypto:strong_rand_bytes(N). | lib/ssh/src/ssh_bits.erl | 0.53048 | 0.493592 | ssh_bits.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% External functions for schema writers and cuttlefish invokers
%%
%% Copyright (c) 2013 Basho Technologies, Inc. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
-module(cuttlefish).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-compile(export_all).
-endif.
-export([
conf_get/2,
conf_get/3,
unset/0,
invalid/1,
otp/2,
otp/3,
warn/1
]).
% @doc If DesiredMinimum =< the OTP you're running, then return
% IfGreaterOrEqual, otherwise IfLessThan.
-spec otp(string(), any(), any()) -> any().
otp(DesiredMinimumOTPVersion, IfGreaterOrEqual, IfLessThan) ->
ActualOTPVersion = erlang:system_info(otp_release),
case otp(DesiredMinimumOTPVersion, ActualOTPVersion) of
true -> IfGreaterOrEqual;
_ -> IfLessThan
end.
% @doc is ActualOTPVersion >= DesiredMinimumOTPVersion?
-spec otp(string(), string()) -> boolean().
otp([], []) ->
%% They're the same length AND all previous chars were matches
true;
otp([H|TMin], [H|TVer]) ->
%% The head chars are equal, test the tails
otp(TMin, TVer);
otp([HMin|_], [HVer|_]) ->
%% The heads are different, check which is greater
HVer >= HMin;
otp([], _Ver) ->
%% The actual OTP release is a longer string, but
%% everything matched up until this point
%% e.g. R16B02, R16B02-basho4
true;
otp(_Min, []) ->
%% Our Min is a longer string
%% e.g. R16B02-basho4, R16B02
false.
% @doc conf_get/2 is a convenience wrapper for proplists:get_value/2
% for schema writers. Keys to a Conf proplist are variable()s which
% are a list of strings. This function will look for those, but if
% you pass it a string() instead, it will be nice and split that
% string on "." since that's how cuttlefish do. Also, it will
% throw(not_found) if the key is not found in the list which is
% different that proplists:get_value/2's default behavior of returning
% 'undefined'. This makes it easy for cuttlefish translations to abort
% and on error, and not assume a value. If that's what you want,
% please use conf_get/3. formerly cuttlefish_util:conf_get_value/2
-spec conf_get(
string() | cuttlefish_variable:variable(),
cuttlefish_conf:conf()) -> any().
conf_get([H|_T]=Variable, ConfigProplist) when is_list(H) ->
case proplists:is_defined(Variable, ConfigProplist) of
true ->
proplists:get_value(Variable, ConfigProplist);
false ->
throw({not_found, Variable})
end;
conf_get(Variable, ConfigProplist) ->
conf_get(
cuttlefish_variable:tokenize(Variable),
ConfigProplist).
% @doc conf_get/3 works just like proplists:get_value/3. It expects a
% variable() as the Key, but is nice enough to take a string() and
% split it on "." formerly cuttlefish_util:conf_get_value/3
-spec conf_get(
string() | cuttlefish_variable:variable(),
cuttlefish_conf:conf(), any()) -> any().
conf_get([H|_T]=Variable, ConfigProplist, Default) when is_list(H) ->
proplists:get_value(Variable, ConfigProplist, Default);
conf_get(Variable, ConfigProplist, Default) ->
conf_get(cuttlefish_variable:tokenize(Variable), ConfigProplist, Default).
%% @doc When called inside a translation, tells cuttlefish to omit the
%% Erlang setting from the generated configuration.
-spec unset() -> no_return().
unset() ->
throw(unset).
%% @doc When called inside a translation, informs the user that input
%% configuration is invalid, using the supplied reason string.
-spec invalid(string()) -> no_return().
invalid(Reason) ->
throw({invalid, Reason}).
%% @doc When called inside a translation, results in a warning message
%% being logged.
-spec warn(iodata()) -> ok.
warn(Str) ->
lager:warning(Str, []).
-ifdef(TEST).
otp_test() ->
?assert(otp("R15B02", "R15B02-basho3")),
?assert(not(otp("R15B02-basho3", "R15B02"))),
?assert(otp("R16B02-basho3", "R16B03")),
?assert(otp("R15B01", "R15B02")),
?assert(otp("R15B01", "R15B02-basho3")),
?assert(not(otp("R16B01", "R15B02"))),
?assert(otp("R16", "R16B03")),
?assert(otp("R16", "R16A")),
?assert(not(otp("R16B01", "R16A"))),
?assert(otp("R16A", "R16A")),
ok.
-endif. | deps/cuttlefish/src/cuttlefish.erl | 0.5 | 0.410638 | cuttlefish.erl | starcoder |
%%%------------------------------------------------------------------------
%% Copyright 2019, OpenTelemetry Authors
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% @doc
%% @end
%%%-------------------------------------------------------------------------
-module(ot_meter_default).
-behaviour(ot_meter).
-behaviour(gen_server).
-export([start_link/1,
new_instruments/2,
lookup_instrument/1,
record/4,
record_batch/3,
%% functions used for bound instruments
record/3,
bind/3,
release/2,
%% observer functions
observer_tab/0,
register_observer/3,
set_observer_callback/3,
observe/3]).
-export([init/1,
handle_call/3,
handle_cast/2]).
-include("ot_meter.hrl").
-define(OBSERVER_TAB, ot_metric_accumulator_observers).
-define(TAB, ?MODULE).
-record(state, {}).
start_link(Opts) ->
gen_server:start_link({local, ?MODULE}, ?MODULE, Opts, []).
-spec new_instruments(opentelemetry:meter(), [ot_meter:instrument_opts()]) -> boolean().
new_instruments(_Meter, List) ->
gen_server:call(?MODULE, {new, List}).
-spec record(opentelemetry:meter(), bound_instrument(), number()) -> ok.
record(_Meter, unknown_instrument, Number) when is_number(Number) ->
ok;
record(_Meter, BoundInstrument, Number) when is_number(Number) ->
_ = ot_metric_accumulator:record(BoundInstrument, Number),
ok;
record(_, _, _) ->
ok.
-spec record(opentelemetry:meter(), ot_meter:name(), ot_meter:label_set(), number()) -> ok.
record(_Meter, Name, LabelSet, Number) when is_number(Number) ->
_ = ot_metric_accumulator:record(Name, LabelSet, Number),
ok;
record(_, _, _, _) ->
ok.
-spec record_batch(opentelemetry:meter(), [{ot_meter:name(), number()}], ot_meter:label_set()) -> ok.
record_batch(_Meter, Measures, LabelSet) ->
[ot_metric_accumulator:record(Name, LabelSet, Number) || {Name, Number} <- Measures, is_number(Number)],
ok.
-spec release(opentelemetry:meter(), bound_instrument()) -> ok.
release(_Meter, _BoundInstrument) ->
ok.
-spec bind(opentelemetry:meter(), instrument() | ot_meter:name(), ot_meter:label_set())
-> bound_instrument().
bind(_Meter, Instrument=#instrument{}, LabelSet) ->
bind_instrument(Instrument, LabelSet);
bind(_Meter, Name, LabelSet) ->
case lookup_instrument(Name) of
unknown_instrument ->
unknown_instrument;
Instrument ->
bind_instrument(Instrument, LabelSet)
end.
-spec lookup_instrument(ot_meter:name()) -> instrument() | unknown_instrument.
lookup_instrument(Name) ->
case ets:lookup(?TAB, Name) of
[Instrument] ->
Instrument;
[] ->
unknown_instrument
end.
observer_tab() ->
?OBSERVER_TAB.
-spec register_observer(opentelemetry:meter(), ot_meter:name(), ot_observer:callback()) -> ok.
register_observer(_Meter, Name, Callback) ->
case lookup_instrument(Name) of
unknown_instrument ->
unknown_instrument;
Instrument ->
gen_server:call(?MODULE, {register_observer, Name, Instrument, Callback})
end.
-spec set_observer_callback(opentelemetry:meter(), ot_meter:name(), ot_observer:callback())
-> ok | unknown_instrument.
set_observer_callback(_Meter, Name, Callback) ->
case lookup_instrument(Name) of
unknown_instrument ->
unknown_instrument;
Instrument ->
gen_server:call(?MODULE, {register_observer, Name, Instrument, Callback})
end.
-spec observe(instrument(), number(), ot_meter:label_set()) -> ok.
observe(ObserverInstrument, Number, LabelSet) when is_number(Number) ->
ot_metric_accumulator:observe(ObserverInstrument, Number, LabelSet),
ok;
observe(_, _, _) ->
ok.
init(_Opts) ->
%% TODO: we do not want to lose instrument and observer tables ever
%% eventually need to have an heir to take them if this process crashes.
%% Another option is to just use persistent_term since these things
%% don't change after creation.
%% ets table is required for other parts to not crash so we create
%% it in init and not in a handle_continue or whatever else
case ets:info(?TAB, name) of
undefined ->
ets:new(?TAB, [named_table,
protected,
{read_concurrency, true},
{keypos, #instrument.name}
]);
_ ->
ok
end,
%% observers are stored in a separate table from other instruments
case ets:info(?OBSERVER_TAB, name) of
undefined ->
_ = ets:new(?OBSERVER_TAB, [named_table,
protected,
{keypos, #observer.name}]);
_ ->
ok
end,
{ok, #state{}}.
handle_call({new, List}, _From, State) ->
Result = ets:insert_new(?TAB,
[#instrument{name=Name,
description=maps:get(description, I, <<>>),
kind=MetricKind,
input_type=maps:get(input_type, I, integer),
unit=maps:get(unit, I, one),
label_keys=maps:get(label_keys, I, [])} || I=#{name := Name,
kind := MetricKind} <- List]),
{reply, Result, State};
handle_call({register_observer, Name, Instrument, Callback}, _From, State) ->
_ = ets:insert(?OBSERVER_TAB, #observer{name=Name,
instrument={ot_meter_default, Instrument},
callback=Callback}),
{reply, ok, State}.
handle_cast(_Msg, State) ->
{noreply, State}.
%% internal
%% TODO: use a counter ref for `sum' and `mmsc' aggregated
%% instruments with `input_type' `integer'?
bind_instrument(Instrument, LabelSet) ->
ot_metric_accumulator:lookup_active(Instrument, LabelSet). | src/ot_meter_default.erl | 0.552057 | 0.439386 | ot_meter_default.erl | starcoder |
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% =====================================================================
%% Ordered Sets implemented as General Balanced Trees
%%
%% Copyright (C) 1999-2001 <NAME>
%%
%% An implementation of ordered sets using Prof. <NAME>'s
%% General Balanced Trees. This can be much more efficient than using
%% ordered lists, for larger sets, but depends on the application. See
%% notes below for details.
%% ---------------------------------------------------------------------
%% Notes:
%%
%% The complexity on set operations is bounded by either O(|S|) or O(|T|
%% * log(|S|)), where S is the largest given set, depending on which is
%% fastest for any particular function call. For operating on sets of
%% almost equal size, this implementation is about 3 times slower than
%% using ordered-list sets directly. For sets of very different sizes,
%% however, this solution can be arbitrarily much faster; in practical
%% cases, often between 10 and 100 times. This implementation is
%% particularly suited for ackumulating elements a few at a time,
%% building up a large set (more than 100-200 elements), and repeatedly
%% testing for membership in the current set.
%%
%% As with normal tree structures, lookup (membership testing),
%% insertion and deletion have logarithmic complexity.
%%
%% Operations:
%%
%% - empty(): returns empty set.
%%
%% Alias: new(), for compatibility with `sets'.
%%
%% - is_empty(S): returns 'true' if S is an empty set, and 'false'
%% otherwise.
%%
%% - size(S): returns the number of nodes in the set as an integer.
%% Returns 0 (zero) if the set is empty.
%%
%% - singleton(X): returns a set containing only the element X.
%%
%% - is_member(X, S): returns `true' if element X is a member of set S,
%% and `false' otherwise.
%%
%% Alias: is_element(), for compatibility with `sets'.
%%
%% - insert(X, S): inserts element X into set S; returns the new set.
%% *Assumes that the element is not present in S.*
%%
%% - add(X, S): adds element X to set S; returns the new set. If X is
%% already an element in S, nothing is changed.
%%
%% Alias: add_element(), for compatibility with `sets'.
%%
%% - delete(X, S): removes element X from set S; returns new set.
%% Assumes that the element exists in the set.
%%
%% - delete_any(X, S): removes key X from set S if the key is present
%% in the set, otherwise does nothing; returns new set.
%%
%% Alias: del_element(), for compatibility with `sets'.
%%
%% - balance(S): rebalances the tree representation of S. Note that this
%% is rarely necessary, but may be motivated when a large number of
%% elements have been deleted from the tree without further
%% insertions. Rebalancing could then be forced in order to minimise
%% lookup times, since deletion only does not rebalance the tree.
%%
%% - union(S1, S2): returns a new set that contains each element that is
%% in either S1 or S2 or both, and no other elements.
%%
%% - union(Ss): returns a new set that contains each element that is in
%% at least one of the sets in the list Ss, and no other elements.
%%
%% - intersection(S1, S2): returns a new set that contains each element
%% that is in both S1 and S2, and no other elements.
%%
%% - intersection(Ss): returns a new set that contains each element that
%% is in all of the sets in the list Ss, and no other elements.
%%
%% - is_disjoint(S1, S2): returns `true' if none of the elements in S1
%% occurs in S2.
%%
%% - difference(S1, S2): returns a new set that contains each element in
%% S1 that is not also in S2, and no other elements.
%%
%% Alias: subtract(), for compatibility with `sets'.
%%
%% - is_subset(S1, S2): returns `true' if each element in S1 is also a
%% member of S2, and `false' otherwise.
%%
%% - to_list(S): returns an ordered list of all elements in set S. The
%% list never contains duplicates.
%%
%% - from_list(List): creates a set containing all elements in List,
%% where List may be unordered and contain duplicates.
%%
%% - from_ordset(L): turns an ordered-set list L into a set. The list
%% must not contain duplicates.
%%
%% - smallest(S): returns the smallest element in set S. Assumes that
%% the set S is nonempty.
%%
%% - largest(S): returns the largest element in set S. Assumes that the
%% set S is nonempty.
%%
%% - take_smallest(S): returns {X, S1}, where X is the smallest element
%% in set S, and S1 is the set S with element X deleted. Assumes that
%% the set S is nonempty.
%%
%% - take_largest(S): returns {X, S1}, where X is the largest element in
%% set S, and S1 is the set S with element X deleted. Assumes that the
%% set S is nonempty.
%%
%% - iterator(S): returns an iterator that can be used for traversing
%% the entries of set S; see `next'. The implementation of this is
%% very efficient; traversing the whole set using `next' is only
%% slightly slower than getting the list of all elements using
%% `to_list' and traversing that. The main advantage of the iterator
%% approach is that it does not require the complete list of all
%% elements to be built in memory at one time.
%%
%% - iterator_from(X, S): returns an iterator that can be used for
%% traversing the elements of set S greater than or equal to X;
%% see `next'.
%%
%% - next(T): returns {X, T1} where X is the smallest element referred
%% to by the iterator T, and T1 is the new iterator to be used for
%% traversing the remaining elements, or the atom `none' if no
%% elements remain.
%%
%% - filter(P, S): Filters set S using predicate function P. Included
%% for compatibility with `sets'.
%%
%% - fold(F, A, S): Folds function F over set S with A as the initial
%% ackumulator. Included for compatibility with `sets'.
%%
%% - is_set(S): returns 'true' if S appears to be a set, and 'false'
%% otherwise. Not recommended; included for compatibility with `sets'.
-module(gb_sets).
-export([empty/0, is_empty/1, size/1, singleton/1, is_member/2,
insert/2, add/2, delete/2, delete_any/2, balance/1, union/2,
union/1, intersection/2, intersection/1, is_disjoint/2, difference/2,
is_subset/2, to_list/1, from_list/1, from_ordset/1, smallest/1,
largest/1, take_smallest/1, take_largest/1, iterator/1,
iterator_from/2, next/1, filter/2, fold/3, is_set/1]).
%% `sets' compatibility aliases:
-export([new/0, is_element/2, add_element/2, del_element/2,
subtract/2]).
%% GB-trees adapted from Sven-<NAME>'s implementation for
%% representation of sets.
%%
%% Data structures:
%% - {Size, Tree}, where `Tree' is composed of nodes of the form:
%% - {Key, Smaller, Bigger}, and the "empty tree" node:
%% - nil.
%%
%% No attempt is made to balance trees after deletions. Since deletions
%% don't increase the height of a tree, this should be OK.
%%
%% Original balance condition h(T) <= ceil(c * log(|T|)) has been
%% changed to the similar (but not quite equivalent) condition 2 ^ h(T)
%% <= |T| ^ c. This should also be OK.
%%
%% Behaviour is logarithmic (as it should be).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Some macros.
-define(p, 2). % It seems that p = 2 is optimal for sorted keys
-define(pow(A, _), A * A). % correct with exponent as defined above.
-define(div2(X), X bsr 1).
-define(mul2(X), X bsl 1).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Some types.
-export_type([set/0, set/1, iter/0, iter/1]).
-type gb_set_node(Element) :: 'nil' | {Element, _, _}.
-opaque set(Element) :: {non_neg_integer(), gb_set_node(Element)}.
-type set() :: set(_).
-opaque iter(Element) :: [gb_set_node(Element)].
-type iter() :: iter(_).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec empty() -> Set when
Set :: set().
empty() ->
{0, nil}.
-spec new() -> Set when
Set :: set().
new() -> empty().
-spec is_empty(Set) -> boolean() when
Set :: set().
is_empty({0, nil}) ->
true;
is_empty(_) ->
false.
-spec size(Set) -> non_neg_integer() when
Set :: set().
size({Size, _}) ->
Size.
-spec singleton(Element) -> set(Element).
singleton(Key) ->
{1, {Key, nil, nil}}.
-spec is_element(Element, Set) -> boolean() when
Set :: set(Element).
is_element(Key, S) ->
is_member(Key, S).
-spec is_member(Element, Set) -> boolean() when
Set :: set(Element).
is_member(Key, {_, T}) ->
is_member_1(Key, T).
is_member_1(Key, {Key1, Smaller, _}) when Key < Key1 ->
is_member_1(Key, Smaller);
is_member_1(Key, {Key1, _, Bigger}) when Key > Key1 ->
is_member_1(Key, Bigger);
is_member_1(_, {_, _, _}) ->
true;
is_member_1(_, nil) ->
false.
-spec insert(Element, Set1) -> Set2 when
Set1 :: set(Element),
Set2 :: set(Element).
insert(Key, {S, T}) ->
S1 = S + 1,
{S1, insert_1(Key, T, ?pow(S1, ?p))}.
insert_1(Key, {Key1, Smaller, Bigger}, S) when Key < Key1 ->
case insert_1(Key, Smaller, ?div2(S)) of
{T1, H1, S1} when is_integer(H1) ->
T = {Key1, T1, Bigger},
{H2, S2} = count(Bigger),
H = ?mul2(erlang:max(H1, H2)),
SS = S1 + S2 + 1,
P = ?pow(SS, ?p),
if
H > P ->
balance(T, SS);
true ->
{T, H, SS}
end;
T1 ->
{Key1, T1, Bigger}
end;
insert_1(Key, {Key1, Smaller, Bigger}, S) when Key > Key1 ->
case insert_1(Key, Bigger, ?div2(S)) of
{T1, H1, S1} when is_integer(H1) ->
T = {Key1, Smaller, T1},
{H2, S2} = count(Smaller),
H = ?mul2(erlang:max(H1, H2)),
SS = S1 + S2 + 1,
P = ?pow(SS, ?p),
if
H > P ->
balance(T, SS);
true ->
{T, H, SS}
end;
T1 ->
{Key1, Smaller, T1}
end;
insert_1(Key, nil, 0) ->
{{Key, nil, nil}, 1, 1};
insert_1(Key, nil, _) ->
{Key, nil, nil};
insert_1(Key, _, _) ->
erlang:error({key_exists, Key}).
count({_, nil, nil}) ->
{1, 1};
count({_, Sm, Bi}) ->
{H1, S1} = count(Sm),
{H2, S2} = count(Bi),
{?mul2(erlang:max(H1, H2)), S1 + S2 + 1};
count(nil) ->
{1, 0}.
-spec balance(Set1) -> Set2 when
Set1 :: set(Element),
Set2 :: set(Element).
balance({S, T}) ->
{S, balance(T, S)}.
balance(T, S) ->
balance_list(to_list_1(T), S).
balance_list(L, S) ->
{T, _} = balance_list_1(L, S),
T.
balance_list_1(L, S) when S > 1 ->
Sm = S - 1,
S2 = Sm div 2,
S1 = Sm - S2,
{T1, [K | L1]} = balance_list_1(L, S1),
{T2, L2} = balance_list_1(L1, S2),
T = {K, T1, T2},
{T, L2};
balance_list_1([Key | L], 1) ->
{{Key, nil, nil}, L};
balance_list_1(L, 0) ->
{nil, L}.
-spec add_element(Element, Set1) -> Set2 when
Set1 :: set(Element),
Set2 :: set(Element).
add_element(X, S) ->
add(X, S).
-spec add(Element, Set1) -> Set2 when
Set1 :: set(Element),
Set2 :: set(Element).
add(X, S) ->
case is_member(X, S) of
true ->
S; % we don't have to do anything here
false ->
insert(X, S)
end.
-spec from_list(List) -> Set when
List :: [Element],
Set :: set(Element).
from_list(L) ->
from_ordset(ordsets:from_list(L)).
-spec from_ordset(List) -> Set when
List :: [Element],
Set :: set(Element).
from_ordset(L) ->
S = length(L),
{S, balance_list(L, S)}.
-spec del_element(Element, Set1) -> Set2 when
Set1 :: set(Element),
Set2 :: set(Element).
del_element(Key, S) ->
delete_any(Key, S).
-spec delete_any(Element, Set1) -> Set2 when
Set1 :: set(Element),
Set2 :: set(Element).
delete_any(Key, S) ->
case is_member(Key, S) of
true ->
delete(Key, S);
false ->
S
end.
-spec delete(Element, Set1) -> Set2 when
Set1 :: set(Element),
Set2 :: set(Element).
delete(Key, {S, T}) ->
{S - 1, delete_1(Key, T)}.
delete_1(Key, {Key1, Smaller, Larger}) when Key < Key1 ->
Smaller1 = delete_1(Key, Smaller),
{Key1, Smaller1, Larger};
delete_1(Key, {Key1, Smaller, Bigger}) when Key > Key1 ->
Bigger1 = delete_1(Key, Bigger),
{Key1, Smaller, Bigger1};
delete_1(_, {_, Smaller, Larger}) ->
merge(Smaller, Larger).
merge(Smaller, nil) ->
Smaller;
merge(nil, Larger) ->
Larger;
merge(Smaller, Larger) ->
{Key, Larger1} = take_smallest1(Larger),
{Key, Smaller, Larger1}.
-spec take_smallest(Set1) -> {Element, Set2} when
Set1 :: set(Element),
Set2 :: set(Element).
take_smallest({S, T}) ->
{Key, Larger} = take_smallest1(T),
{Key, {S - 1, Larger}}.
take_smallest1({Key, nil, Larger}) ->
{Key, Larger};
take_smallest1({Key, Smaller, Larger}) ->
{Key1, Smaller1} = take_smallest1(Smaller),
{Key1, {Key, Smaller1, Larger}}.
-spec smallest(Set) -> Element when
Set :: set(Element).
smallest({_, T}) ->
smallest_1(T).
smallest_1({Key, nil, _Larger}) ->
Key;
smallest_1({_Key, Smaller, _Larger}) ->
smallest_1(Smaller).
-spec take_largest(Set1) -> {Element, Set2} when
Set1 :: set(Element),
Set2 :: set(Element).
take_largest({S, T}) ->
{Key, Smaller} = take_largest1(T),
{Key, {S - 1, Smaller}}.
take_largest1({Key, Smaller, nil}) ->
{Key, Smaller};
take_largest1({Key, Smaller, Larger}) ->
{Key1, Larger1} = take_largest1(Larger),
{Key1, {Key, Smaller, Larger1}}.
-spec largest(Set) -> Element when
Set :: set(Element).
largest({_, T}) ->
largest_1(T).
largest_1({Key, _Smaller, nil}) ->
Key;
largest_1({_Key, _Smaller, Larger}) ->
largest_1(Larger).
-spec to_list(Set) -> List when
Set :: set(Element),
List :: [Element].
to_list({_, T}) ->
to_list(T, []).
to_list_1(T) -> to_list(T, []).
to_list({Key, Small, Big}, L) ->
to_list(Small, [Key | to_list(Big, L)]);
to_list(nil, L) -> L.
-spec iterator(Set) -> Iter when
Set :: set(Element),
Iter :: iter(Element).
iterator({_, T}) ->
iterator(T, []).
%% The iterator structure is really just a list corresponding to the
%% call stack of an in-order traversal. This is quite fast.
iterator({_, nil, _} = T, As) ->
[T | As];
iterator({_, L, _} = T, As) ->
iterator(L, [T | As]);
iterator(nil, As) ->
As.
-spec iterator_from(Element, Set) -> Iter when
Set :: set(Element),
Iter :: iter(Element).
iterator_from(S, {_, T}) ->
iterator_from(S, T, []).
iterator_from(S, {K, _, T}, As) when K < S ->
iterator_from(S, T, As);
iterator_from(_, {_, nil, _} = T, As) ->
[T | As];
iterator_from(S, {_, L, _} = T, As) ->
iterator_from(S, L, [T | As]);
iterator_from(_, nil, As) ->
As.
-spec next(Iter1) -> {Element, Iter2} | 'none' when
Iter1 :: iter(Element),
Iter2 :: iter(Element).
next([{X, _, T} | As]) ->
{X, iterator(T, As)};
next([]) ->
none.
%% Set operations:
%% If |X| < |Y|, then we traverse the elements of X. The cost for
%% testing a single random element for membership in a tree S is
%% proportional to log(|S|); thus, if |Y| / |X| < c * log(|Y|), for some
%% c, it is more efficient to scan the ordered sequence of elements of Y
%% while traversing X (under the same ordering) in order to test whether
%% elements of X are already in Y. Since the `math' module does not have
%% a `log2'-function, we rewrite the condition to |X| < |Y| * c1 *
%% ln(|X|), where c1 = c / ln 2.
-define(c, 1.46). % 1 / ln 2; this appears to be best
%% If the sets are not very different in size, i.e., if |Y| / |X| >= c *
%% log(|Y|), then the fastest way to do union (and the other similar set
%% operations) is to build the lists of elements, traverse these lists
%% in parallel while building a reversed ackumulator list, and finally
%% rebuild the tree directly from the ackumulator. Other methods of
%% traversing the elements can be devised, but they all have higher
%% overhead.
-spec union(Set1, Set2) -> Set3 when
Set1 :: set(Element),
Set2 :: set(Element),
Set3 :: set(Element).
union({N1, T1}, {N2, T2}) when N2 < N1 ->
union(to_list_1(T2), N2, T1, N1);
union({N1, T1}, {N2, T2}) ->
union(to_list_1(T1), N1, T2, N2).
%% We avoid the expensive mathematical computations if there is little
%% chance at saving at least the same amount of time by making the right
%% choice of strategy. Recall that N1 < N2 here.
union(L, N1, T2, N2) when N2 < 10 ->
%% Break even is about 7 for N1 = 1 and 10 for N1 = 2
union_2(L, to_list_1(T2), N1 + N2);
union(L, N1, T2, N2) ->
X = N1 * round(?c * math:log(N2)),
if N2 < X ->
union_2(L, to_list_1(T2), N1 + N2);
true ->
union_1(L, mk_set(N2, T2))
end.
-spec mk_set(non_neg_integer(), gb_set_node(T)) -> set(T).
mk_set(N, T) ->
{N, T}.
%% If the length of the list is in proportion with the size of the
%% target set, this version spends too much time doing lookups, compared
%% to the below version.
union_1([X | Xs], S) ->
union_1(Xs, add(X, S));
union_1([], S) ->
S.
%% If the length of the first list is too small in comparison with the
%% size of the target set, this version spends too much time scanning
%% the element list of the target set for possible membership, compared
%% with the above version.
%% Some notes on sequential scanning of ordered lists
%%
%% 1) We want to put the equality case last, if we can assume that the
%% probability for overlapping elements is relatively low on average.
%% Doing this also allows us to completely skip the (arithmetic)
%% equality test, since the term order is arithmetically total.
%%
%% 2) We always test for `smaller than' first, i.e., whether the head of
%% the left list is smaller than the head of the right list, and if the
%% `greater than' test should instead turn out to be true, we switch
%% left and right arguments in the recursive call under the assumption
%% that the same is likely to apply to the next element also,
%% statistically reducing the number of failed tests and automatically
%% adapting to cases of lists having very different lengths. This saves
%% 10-40% of the traversation time compared to a "fixed" strategy,
%% depending on the sizes and contents of the lists.
%%
%% 3) A tail recursive version using `lists:reverse/2' is about 5-10%
%% faster than a plain recursive version using the stack, for lists of
%% more than about 20 elements and small stack frames. For very short
%% lists, however (length < 10), the stack version can be several times
%% faster. As stack frames grow larger, the advantages of using
%% `reverse' could get greater.
union_2(Xs, Ys, S) ->
union_2(Xs, Ys, [], S). % S is the sum of the sizes here
union_2([X | Xs1], [Y | _] = Ys, As, S) when X < Y ->
union_2(Xs1, Ys, [X | As], S);
union_2([X | _] = Xs, [Y | Ys1], As, S) when X > Y ->
union_2(Ys1, Xs, [Y | As], S);
union_2([X | Xs1], [_ | Ys1], As, S) ->
union_2(Xs1, Ys1, [X | As], S - 1);
union_2([], Ys, As, S) ->
{S, balance_revlist(push(Ys, As), S)};
union_2(Xs, [], As, S) ->
{S, balance_revlist(push(Xs, As), S)}.
push([X | Xs], As) ->
push(Xs, [X | As]);
push([], As) ->
As.
balance_revlist(L, S) ->
{T, _} = balance_revlist_1(L, S),
T.
balance_revlist_1(L, S) when S > 1 ->
Sm = S - 1,
S2 = Sm div 2,
S1 = Sm - S2,
{T2, [K | L1]} = balance_revlist_1(L, S1),
{T1, L2} = balance_revlist_1(L1, S2),
T = {K, T1, T2},
{T, L2};
balance_revlist_1([Key | L], 1) ->
{{Key, nil, nil}, L};
balance_revlist_1(L, 0) ->
{nil, L}.
-spec union(SetList) -> Set when
SetList :: [set(Element),...],
Set :: set(Element).
union([S | Ss]) ->
union_list(S, Ss);
union([]) -> empty().
union_list(S, [S1 | Ss]) ->
union_list(union(S, S1), Ss);
union_list(S, []) -> S.
%% The rest is modelled on the above.
-spec intersection(Set1, Set2) -> Set3 when
Set1 :: set(Element),
Set2 :: set(Element),
Set3 :: set(Element).
intersection({N1, T1}, {N2, T2}) when N2 < N1 ->
intersection(to_list_1(T2), N2, T1, N1);
intersection({N1, T1}, {N2, T2}) ->
intersection(to_list_1(T1), N1, T2, N2).
intersection(L, _N1, T2, N2) when N2 < 10 ->
intersection_2(L, to_list_1(T2));
intersection(L, N1, T2, N2) ->
X = N1 * round(?c * math:log(N2)),
if N2 < X ->
intersection_2(L, to_list_1(T2));
true ->
intersection_1(L, T2)
end.
%% We collect the intersecting elements in an accumulator list and count
%% them at the same time so we can balance the list afterwards.
intersection_1(Xs, T) ->
intersection_1(Xs, T, [], 0).
intersection_1([X | Xs], T, As, N) ->
case is_member_1(X, T) of
true ->
intersection_1(Xs, T, [X | As], N + 1);
false ->
intersection_1(Xs, T, As, N)
end;
intersection_1([], _, As, N) ->
{N, balance_revlist(As, N)}.
intersection_2(Xs, Ys) ->
intersection_2(Xs, Ys, [], 0).
intersection_2([X | Xs1], [Y | _] = Ys, As, S) when X < Y ->
intersection_2(Xs1, Ys, As, S);
intersection_2([X | _] = Xs, [Y | Ys1], As, S) when X > Y ->
intersection_2(Ys1, Xs, As, S);
intersection_2([X | Xs1], [_ | Ys1], As, S) ->
intersection_2(Xs1, Ys1, [X | As], S + 1);
intersection_2([], _, As, S) ->
{S, balance_revlist(As, S)};
intersection_2(_, [], As, S) ->
{S, balance_revlist(As, S)}.
-spec intersection(SetList) -> Set when
SetList :: [set(Element),...],
Set :: set(Element).
intersection([S | Ss]) ->
intersection_list(S, Ss).
intersection_list(S, [S1 | Ss]) ->
intersection_list(intersection(S, S1), Ss);
intersection_list(S, []) -> S.
-spec is_disjoint(Set1, Set2) -> boolean() when
Set1 :: set(Element),
Set2 :: set(Element).
is_disjoint({N1, T1}, {N2, T2}) when N1 < N2 ->
is_disjoint_1(T1, T2);
is_disjoint({_, T1}, {_, T2}) ->
is_disjoint_1(T2, T1).
is_disjoint_1({K1, Smaller1, Bigger}, {K2, Smaller2, _}=Tree) when K1 < K2 ->
not is_member_1(K1, Smaller2) andalso
is_disjoint_1(Smaller1, Smaller2) andalso
is_disjoint_1(Bigger, Tree);
is_disjoint_1({K1, Smaller, Bigger1}, {K2, _, Bigger2}=Tree) when K1 > K2 ->
not is_member_1(K1, Bigger2) andalso
is_disjoint_1(Bigger1, Bigger2) andalso
is_disjoint_1(Smaller, Tree);
is_disjoint_1({_K1, _, _}, {_K2, _, _}) -> %K1 == K2
false;
is_disjoint_1(nil, _) ->
true;
is_disjoint_1(_, nil) ->
true.
%% Note that difference is not symmetric. We don't use `delete' here,
%% since the GB-trees implementation does not rebalance after deletion
%% and so we could end up with very unbalanced trees indeed depending on
%% the sets. Therefore, we always build a new tree, and thus we need to
%% traverse the whole element list of the left operand.
-spec subtract(Set1, Set2) -> Set3 when
Set1 :: set(Element),
Set2 :: set(Element),
Set3 :: set(Element).
subtract(S1, S2) ->
difference(S1, S2).
-spec difference(Set1, Set2) -> Set3 when
Set1 :: set(Element),
Set2 :: set(Element),
Set3 :: set(Element).
difference({N1, T1}, {N2, T2}) ->
difference(to_list_1(T1), N1, T2, N2).
difference(L, N1, T2, N2) when N2 < 10 ->
difference_2(L, to_list_1(T2), N1);
difference(L, N1, T2, N2) ->
X = N1 * round(?c * math:log(N2)),
if N2 < X ->
difference_2(L, to_list_1(T2), N1);
true ->
difference_1(L, T2)
end.
difference_1(Xs, T) ->
difference_1(Xs, T, [], 0).
difference_1([X | Xs], T, As, N) ->
case is_member_1(X, T) of
true ->
difference_1(Xs, T, As, N);
false ->
difference_1(Xs, T, [X | As], N + 1)
end;
difference_1([], _, As, N) ->
{N, balance_revlist(As, N)}.
difference_2(Xs, Ys, S) ->
difference_2(Xs, Ys, [], S). % S is the size of the left set
difference_2([X | Xs1], [Y | _] = Ys, As, S) when X < Y ->
difference_2(Xs1, Ys, [X | As], S);
difference_2([X | _] = Xs, [Y | Ys1], As, S) when X > Y ->
difference_2(Xs, Ys1, As, S);
difference_2([_X | Xs1], [_Y | Ys1], As, S) ->
difference_2(Xs1, Ys1, As, S - 1);
difference_2([], _Ys, As, S) ->
{S, balance_revlist(As, S)};
difference_2(Xs, [], As, S) ->
{S, balance_revlist(push(Xs, As), S)}.
%% Subset testing is much the same thing as set difference, but
%% without the construction of a new set.
-spec is_subset(Set1, Set2) -> boolean() when
Set1 :: set(Element),
Set2 :: set(Element).
is_subset({N1, T1}, {N2, T2}) ->
is_subset(to_list_1(T1), N1, T2, N2).
is_subset(L, _N1, T2, N2) when N2 < 10 ->
is_subset_2(L, to_list_1(T2));
is_subset(L, N1, T2, N2) ->
X = N1 * round(?c * math:log(N2)),
if N2 < X ->
is_subset_2(L, to_list_1(T2));
true ->
is_subset_1(L, T2)
end.
is_subset_1([X | Xs], T) ->
case is_member_1(X, T) of
true ->
is_subset_1(Xs, T);
false ->
false
end;
is_subset_1([], _) ->
true.
is_subset_2([X | _], [Y | _]) when X < Y ->
false;
is_subset_2([X | _] = Xs, [Y | Ys1]) when X > Y ->
is_subset_2(Xs, Ys1);
is_subset_2([_ | Xs1], [_ | Ys1]) ->
is_subset_2(Xs1, Ys1);
is_subset_2([], _) ->
true;
is_subset_2(_, []) ->
false.
%% For compatibility with `sets':
-spec is_set(Term) -> boolean() when
Term :: term().
is_set({0, nil}) -> true;
is_set({N, {_, _, _}}) when is_integer(N), N >= 0 -> true;
is_set(_) -> false.
-spec filter(Pred, Set1) -> Set2 when
Pred :: fun((Element) -> boolean()),
Set1 :: set(Element),
Set2 :: set(Element).
filter(F, S) ->
from_ordset([X || X <- to_list(S), F(X)]).
-spec fold(Function, Acc0, Set) -> Acc1 when
Function :: fun((Element, AccIn) -> AccOut),
Acc0 :: Acc,
Acc1 :: Acc,
AccIn :: Acc,
AccOut :: Acc,
Set :: set(Element).
fold(F, A, {_, T}) when is_function(F, 2) ->
fold_1(F, A, T).
fold_1(F, Acc0, {Key, Small, Big}) ->
Acc1 = fold_1(F, Acc0, Small),
Acc = F(Key, Acc1),
fold_1(F, Acc, Big);
fold_1(_, Acc, _) ->
Acc. | lib/stdlib/src/gb_sets.erl | 0.807119 | 0.567637 | gb_sets.erl | starcoder |
-module(day16).
-export([main/0]).
-include_lib("eunit/include/eunit.hrl").
-record(range, {min, max}).
-record(field, {name, r1, r2}).
-record(matchset, {index, names}).
-record(match, {index, name}).
main() ->
Fields = fields(),
[Mine | Nearby] = tickets(),
{Valid, ErrRate} = part_one(Fields, Nearby),
io:format("~p~n", [ErrRate]),
io:format("~p~n", [part_two(Fields, Valid, Mine)]),
ok.
% Part one: filter out invalid tickets, returning the remaining valid tickets
% and the sum of the invalid values.
part_one(Fields, Tickets) ->
lists:foldl(fun (Ticket, {Valid, ErrRate}) ->
case is_ticket_valid(Fields, Ticket) of
true -> {[Ticket | Valid], ErrRate};
{false, N} -> {Valid, ErrRate + N}
end
end, {[], 0}, Tickets).
% Determine whether the given ticket is valid. If it is not, return the
% invalid value.
is_ticket_valid(Fields, [N | Rest]) ->
case is_value_valid(N, Fields) of
true -> is_ticket_valid(Fields, Rest);
false -> {false, N} % found an invalid value
end;
is_ticket_valid(_, []) -> true. % no invalid values means it's a valid ticket
-ifdef(TEST).
is_ticket_valid_test() ->
Fields = test_fields(),
?assertEqual(true, is_ticket_valid(Fields, [7,3,47])),
?assertEqual({false, 4}, is_ticket_valid(Fields, [40,4,50])),
?assertEqual({false, 55}, is_ticket_valid(Fields, [55,2,20])).
-endif.
% Returns true if N is a valid value for at least one of the given fields.
is_value_valid(N, [Field | Rest]) ->
case is_valid_value_for(N, Field) of
true -> true; % matched at least one of the fields
false -> is_value_valid(N, Rest)
end;
is_value_valid(_, []) -> false. % made it to the end without matching any fields
-ifdef(TEST).
is_value_valid_test() ->
Fields = [
#field{name="class", r1=#range{min=1,max=3}, r2=#range{min=5, max=7}},
#field{name="row", r1=#range{min=6,max=11}, r2=#range{min=33,max=44}},
#field{name="seat", r1=#range{min=13,max=40}, r2=#range{min=45,max=50}}
],
?assertEqual(true, is_value_valid(40, Fields)),
?assertEqual(false, is_value_valid(4, Fields)),
?assertEqual(true, is_value_valid(50, Fields)).
-endif.
% Part two: determine the names of each column and return the product of the 'departure' fields
% from my ticket.
part_two(Fields, Tickets, Mine) ->
Columns = get_column_names(Fields, Tickets),
Ticket = maps:from_list(lists:zip(Columns, Mine)),
maps:fold(fun (K, V, Acc) ->
Acc * case string:prefix(K, "departure") of
nomatch -> 1;
_ -> V
end
end, 1, Ticket).
% Figure out the names of each column by process of elimination.
get_column_names(Fields, Tickets) ->
Matches = get_possible_matches(Fields, Tickets),
ByLength = lists:sort(fun (A, B) ->
length(A#matchset.names) =< length(B#matchset.names)
end, Matches),
{_, Result} = lists:foldl(fun (E, {Mem, Acc}) ->
[Name] = lists:filter(fun (E2) -> not maps:is_key(E2, Mem) end, E#matchset.names),
Match = #match{name = Name, index = E#matchset.index},
{Mem#{Name => ok}, [Match | Acc]}
end, {#{}, []}, ByLength),
ByIndex = lists:sort(fun (A, B) ->
A#match.index =< B#match.index
end, Result),
lists:map(fun (E) -> E#match.name end, ByIndex).
-ifdef(TEST).
get_column_names_test() ->
Fields = [
#field{name="class", r1=#range{min=0,max=1}, r2=#range{min=4,max=19}},
#field{name="row", r1=#range{min=0,max=5}, r2=#range{min=8,max=19}},
#field{name="seat", r1=#range{min=0,max=13}, r2=#range{min=16,max=19}}
],
Tickets = [
[3,9,18],
[15,1,5],
[5,14,9]
],
?assertEqual(["row","class","seat"], get_column_names(Fields, Tickets)).
-endif.
% Gets the matchset for each column of the given set of tickets.
get_possible_matches(Fields, Tickets) ->
lists:map(fun (N) ->
#matchset{index = N, names = get_possible_matches(N, Fields, Tickets)}
end, lists:seq(1, length(Fields))).
% Gets a list of possible field names for the Nth column by looking at the ticket data.
get_possible_matches(N, Fields, Tickets) ->
lists:foldl(fun (Field, Acc) ->
case is_match_for_column(N, Field, Tickets) of
true -> [Field#field.name | Acc];
false -> Acc
end
end, [], Fields).
% Is the given field a match for the Nth column of every ticket?
is_match_for_column(N, Field, Tickets) ->
lists:all(fun (Ticket) ->
is_valid_value_for(lists:nth(N, Ticket), Field)
end, Tickets).
% Is N a valid value for the given field?
is_valid_value_for(N, Field) ->
is_in_range(N, Field#field.r1) orelse is_in_range(N, Field#field.r2).
is_in_range(N, Range) ->
N >= Range#range.min andalso N =< Range#range.max.
% Data loading functions.
fields() ->
parse_fields(load("fields.txt")).
-ifdef(TEST).
test_fields() -> [
#field{name="class", r1=#range{min=1,max=3}, r2=#range{min=5, max=7}},
#field{name="row", r1=#range{min=6,max=11}, r2=#range{min=33,max=44}},
#field{name="seat", r1=#range{min=13,max=40}, r2=#range{min=45,max=50}}
].
-endif.
parse_fields(Lines) ->
parse_fields(Lines, []).
parse_fields([], Acc) -> lists:reverse(Acc);
parse_fields([First | Rest], Acc) ->
parse_fields(Rest, [parse_field(First) | Acc]).
parse_field(Line) ->
[Name, Rest] = string:split(Line, ": "),
[First, Second] = string:split(Rest, " or "),
#field{name = Name, r1 = parse_range(First), r2 = parse_range(Second)}.
parse_range(Line) ->
[First, Second] = string:split(Line, "-"),
#range{min = list_to_integer(First), max = list_to_integer(Second)}.
tickets() ->
parse_tickets(load("input.txt")).
parse_tickets(Lines) ->
parse_tickets(Lines, []).
parse_tickets([], Acc) -> lists:reverse(Acc);
parse_tickets([First | Rest], Acc) ->
parse_tickets(Rest, [parse_ticket(First) | Acc]).
parse_ticket(Line) ->
Parts = string:split(Line, ",", all),
lists:map(fun list_to_integer/1, Parts).
load(Filename) ->
{ok, File} = file:open(Filename, [read]),
{ok, Text} = file:read(File, 1024*1024),
string:split(Text, "\n", all). | day16/day16.erl | 0.511229 | 0.500488 | day16.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% Copyright (c) 2018 <NAME>. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%% @doc VClock.
-module(vclock).
-author("<NAME> <<EMAIL>>").
-include("ldb.hrl").
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-export([new/0,
from_list/1,
get_next_dot/2,
add_dot/2,
is_element/2,
is_inflation/2,
can_deliver/3,
union/2,
intersection/2,
subtract/2,
size/1]).
-export_type([v/0]).
-type v() :: maps:map(ldb_node_id(), sequence()).
%% @doc Create an vclock.
-spec new() -> v().
new() ->
maps:new().
%% @doc Create a vclock from a list of sequences.
-spec from_list([{ldb_node_id(), sequence()}]) -> v().
from_list(L) ->
maps:from_list(L).
%% @doc Return the next dot for a given id.
-spec get_next_dot(ldb_node_id(), v()) -> dot().
get_next_dot(Id, Clock) ->
Seq = maps:get(Id, Clock, 0),
NewSeq = Seq + 1,
{Id, NewSeq}.
%% @doc Add a dot to the vv.
-spec add_dot(dot(), v()) -> v().
add_dot({Id, Seq}, Clock) ->
maps:update_with(
Id,
fun(CurrentSeq) -> max(Seq, CurrentSeq) end,
Seq,
Clock
).
%% @doc Check if a dot is in the clock.
-spec is_element(dot(), v()) -> boolean().
is_element({Id, Seq}, Clock) ->
CurrentSeq = maps:get(Id, Clock, 0),
Seq =< CurrentSeq.
%% @doc Check is a `ClockB' dominates `ClockA'.
-spec is_inflation(v(), v()) -> boolean().
is_inflation(ClockA, ClockB) ->
is_inflation_loop(maps:to_list(ClockA), ClockB).
%% @private
-spec is_inflation_loop([{ldb_node_id(), non_neg_integer()}], v()) -> boolean().
is_inflation_loop([], _) ->
true;
is_inflation_loop([Dot|Rest], ClockB) ->
case is_element(Dot, ClockB) of
true -> is_inflation_loop(Rest, ClockB);
false -> false
end.
%% @doc Check is a clock dominates another with the exception of the origin dot.
-spec can_deliver(dot(), v(), v()) -> boolean().
can_deliver({Id, Seq}=_RemoteDot, RemoteClock, LocalClock) ->
case is_element({Id, Seq - 1}, LocalClock) of
true -> is_inflation(maps:remove(Id, RemoteClock), LocalClock);
false -> false
end.
%% @doc Union clocks.
-spec union(v(), v()) ->v().
union(ClockA, ClockB) ->
maps_ext:merge_all(
fun(_, SeqA, SeqB) -> max(SeqA, SeqB) end,
ClockA,
ClockB
).
%% @doc Intersect clocks.
-spec intersection(v(), v()) -> v().
intersection(ClockA, ClockB) ->
Clock0 = maps:filter(
fun(Id, _) -> maps:is_key(Id, ClockB) end,
ClockA
),
maps:map(
fun(Id, SeqA) ->
SeqB = maps:get(Id, ClockB),
min(SeqA, SeqB)
end,
Clock0
).
%% @doc Subtract clocks.
-spec subtract(v(), v()) -> list(dot()).
subtract(ClockA, ClockB) ->
maps:fold(
fun(Id, SeqA, Acc0) ->
SeqB = maps:get(Id, ClockB, 0),
case SeqB >= SeqA of
true ->
Acc0;
false ->
lists:foldl(
fun(Seq, Acc1) -> [{Id, Seq} | Acc1] end,
Acc0,
lists:seq(SeqB + 1, SeqA)
)
end
end,
[],
ClockA
).
%% @doc Size of clock.
-spec size(v()) -> non_neg_integer().
size(Clock) ->
maps:size(Clock).
-ifdef(TEST).
is_inflation_test() ->
Bottom = #{},
VClockA = #{a => 4, b => 1},
VClockB = #{a => 6, c => 3},
VClockC = #{a => 6, b => 1, c => 3},
?assert(is_inflation(Bottom, VClockA)),
?assert(is_inflation(Bottom, VClockB)),
?assert(is_inflation(Bottom, VClockC)),
?assert(is_inflation(VClockA, VClockA)),
?assertNot(is_inflation(VClockA, VClockB)),
?assertNot(is_inflation(VClockB, VClockA)),
?assert(is_inflation(VClockA, VClockC)),
?assert(is_inflation(VClockB, VClockC)),
?assertNot(is_inflation(VClockC, VClockA)),
?assertNot(is_inflation(VClockC, VClockB)).
can_deliver_test() ->
VClockA = #{b => 1},
VClockB = #{a => 6},
VClockC = #{a => 4, c => 4},
Local = #{a => 5, c => 3},
?assert(can_deliver({b, 1}, VClockA, Local)),
?assert(can_deliver({a, 6}, VClockB, Local)),
?assertNot(can_deliver({c, 5}, VClockB, Local)),
?assert(can_deliver({c, 4}, VClockC, Local)).
union_test() ->
Bottom = #{},
VClockA = #{a => 4, b => 1},
VClockB = #{a => 6, c => 3},
Expected = #{a => 6, b => 1, c => 3},
?assertEqual(VClockA, union(Bottom, VClockA)),
?assertEqual(VClockA, union(VClockA, Bottom)),
?assertEqual(Expected, union(VClockA, VClockB)),
?assertEqual(Expected, union(VClockB, VClockA)),
ok.
intersection_test() ->
Bottom = #{},
VClockA = #{a => 4, b => 1},
VClockB = #{a => 6, c => 3},
Expected = #{a => 4},
?assertEqual(Bottom, intersection(Bottom, VClockA)),
?assertEqual(Bottom, intersection(VClockA, Bottom)),
?assertEqual(Expected, intersection(VClockA, VClockB)),
?assertEqual(Expected, intersection(VClockB, VClockA)),
ok.
subtract_test() ->
Bottom = #{},
VClockA = #{a => 4, b => 1},
VClockB = #{a => 6, c => 3},
?assertEqual([], subtract(Bottom, VClockA)),
?assertEqual([{a, 1}, {a, 2}, {a, 3}, {a, 4}, {b, 1}], lists:sort(subtract(VClockA, Bottom))),
?assertEqual([{b, 1}], subtract(VClockA, VClockB)),
?assertEqual([{a, 5}, {a, 6}, {c, 1}, {c, 2}, {c, 3}], lists:sort(subtract(VClockB, VClockA))),
ok.
-endif. | src/vclock.erl | 0.695958 | 0.400105 | vclock.erl | starcoder |
%%% @doc Prioritize a list of IPs for a specific key in increasing order
%%% for their weight. The original algorithm in
%%% http://www.eecs.umich.edu/techreports/cse/96/CSE-TR-316-96.pdf goes for the
%%% highest weight available, but describes that no loss of generality will happen
%%% going with LRW or HRW (see page 13 of the technical report). This module
%%% implements LRW specifically.
%%%
%%% Do note that this implements the algorithm as mentioned in the paper, and as
%%% such does not support IPv6 at this time.
%%% @end
-module(lrw).
-export([all/2, top/3,
all_ip/2, top_ip/3]).
-export([all/3, top/4]).
-define(MOD, 2147483648). % 1 bsl 31
-type hashfun() :: fun((Key::term(), Node::term()) -> number()).
-export_type([hashfun/0]).
%% @doc Returns the given set of nodes sorted in increasing order of
%% their weight for a given key. The weights aren't included in the returned
%% results.
-spec all(Key :: term(), [Node, ...]) -> [Node, ...] when
Node :: term().
all(Key, Nodes) ->
Mod = 1 bsl 32,
all(Key, Nodes, fun(K, Node) -> erlang:phash2({K,Node}, Mod) end).
%% @doc Like `all/2' but for IPs specifically; with an optimized hash.
-spec all_ip(Key :: term(), [NodeIP, ...]) -> [NodeIP, ...] when
NodeIP :: inet:ip4_address().
all_ip(Key, NodeIPs) ->
all(Key, NodeIPs, fun(K, NodeIP) -> wrand2(K, to_num_ip(NodeIP)) end).
%% @doc Returns the given set of nodes sorted in increasing order of their
%% weight for a given key. The weights aren't included in the returned results.
%% The third argument must be a function that accepts and returns arbitrary
%% hashes.
-spec all(Key :: term(), [Node, ...], hashfun()) -> [Node, ...] when
Node :: term().
all(Key, Nodes, Hash) ->
Weighted = [{Hash(Key, Node), Node} || Node <- Nodes],
[Node || {_, Node} <- lists:sort(Weighted)].
%% @doc Only keep the `N' top entries, compared to `all/2'. Note that
%% this call is unoptimized and just picks a sublist of the `all/2' algorithm.
-spec top(Key :: term(), [Node, ...], Len :: pos_integer()) -> [Node, ...] when
Node :: term().
top(Key, Nodes, Len) ->
lists:sublist(all(Key, Nodes), 1, Len).
%% @doc Like `top/3' but optimized with a special hash for IPs.
-spec top_ip(Key :: term(), [NodeIP, ...], Len :: pos_integer()) -> [NodeIP, ...] when
NodeIP :: inet:ip4_address().
top_ip(Key, NodeIPs, Len) ->
lists:sublist(all_ip(Key, NodeIPs), 1, Len).
%% @doc Only keep the `N' top entries, compared to `all/3'. Note that
%% this call is unoptimized and just picks a sublist of the `all/3' algorithm.
%% The fourth argument must be a function that accepts and returns arbitrary
%% hashes.
-spec top(Key :: term(), [Node, ...], Len :: pos_integer(), hashfun()) ->
[Node, ...] when
Node :: term().
top(Key, Nodes, Len, Hash) ->
lists:sublist(all(Key, Nodes, Hash), 1, Len).
%% @private Convert an IPv4 inet address of the form `{A,B,C,D}' to a
%% 32 bit integer to be used in the hashing function.
-spec to_num_ip(inet:ip4_address()) -> non_neg_integer().
to_num_ip({A,B,C,D}) ->
<<X:32>> = <<A,B,C,D>>,
X.
%% @private Hashing function as recommended on p.21 of
%% http://www.eecs.umich.edu/techreports/cse/96/CSE-TR-316-96.pdf
-spec wrand2(term(), non_neg_integer()) -> non_neg_integer().
wrand2(K, NodeIP) ->
Digest = erlang:phash2(K, ?MOD), % should lead to a 2^31 digest
(1103515245 * ((1103515245 * Digest + 12345) bxor NodeIP)+12345) rem ?MOD. | src/lrw.erl | 0.630571 | 0.689621 | lrw.erl | starcoder |
%% Copyright (c) 2019-2021, <NAME> <<EMAIL>>. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(binbo_bb).
-export([bb_not/2, bb_or/1]).
-export([to_index/1, to_file/1, to_index_list/1, to_notation/1]).
-export([rank_bb/1, file_bb/1]).
-export([edges_bb/1, empty_bb/0]).
-export([
shift/2,
shift_north/1,
shift_south/1,
shift_east/1,
shift_west/1,
shift_north_east/1,
shift_north_west/1,
shift_south_east/1,
shift_south_west/1
]).
-export([
pawn_attacks_bb/2,
knight_attacks_bb/1,
bishop_attacks_bb/2,
rook_attacks_bb/2,
king_attacks_bb/1,
pawn_pushes_bb/3
]).
-export([enpassant_bb/3]).
%%%------------------------------------------------------------------------------
%%% Includes
%%%------------------------------------------------------------------------------
-include("binbo_board.hrl").
%%%------------------------------------------------------------------------------
%%% Types
%%%------------------------------------------------------------------------------
-type bb() :: ?EMPTY_BB .. ?ALL_SQUARES_BB. % any bitboard including empty
-type piece() :: binbo_board:piece().
-type sq_idx() :: binbo_board:square_index().
-type empty_bb() :: ?EMPTY_BB. % empty bitboard
-type color() :: binbo_board:color().
-type attacks_bb() :: ?A1_BB .. ?ALL_SQUARES_BB.
-type sq_bb() :: ?A1_BB .. ?H8_BB. % square bitboard
-type bishop_direction() :: ?NORTH_WEST | ?NORTH_EAST | ?SOUTH_WEST | ?SOUTH_EAST.
-type rook_direction() :: ?NORTH | ?SOUTH | ?EAST | ?WEST.
-type sliding_direction() :: bishop_direction() | rook_direction().
-type enpa_bb() :: sq_bb() | empty_bb().
-export_type([bb/0, sq_bb/0, empty_bb/0, enpa_bb/0]).
-compile({inline, [bb_not/2]}).
-compile({inline, [to_index/1]}).
-compile({inline, [shift/2]}).
%%%------------------------------------------------------------------------------
%%% API
%%%------------------------------------------------------------------------------
%% bb_not/2
-spec bb_not(bb(), bb()) -> bb().
bb_not(BB1, BB2) ->
(BB1 band (bnot BB2)).
%% bb_or/1
-spec bb_or([bb()]) -> bb().
bb_or(List) ->
bb_or(List, ?EMPTY_BB).
%% rank_bb/1
-spec rank_bb(sq_idx()) -> bb().
rank_bb(Idx) ->
Rank = binbo_board:rank_of_index(Idx),
?RANK_1_BB bsl (8 * Rank).
%% file_bb/1
-spec file_bb(sq_idx()) -> bb().
file_bb(Idx) ->
File = binbo_board:file_of_index(Idx),
?FILE_A_BB bsl File.
%% to_index/1
-spec to_index(sq_bb()) -> sq_idx().
to_index(SqBB) ->
% The bitboard of square is a single populated bitboard, a power of two value.
% Therefore, the index of square is equal to base-2 logarithm of the bitboard.
erlang:trunc(math:log2(SqBB)).
%% to_file/1
-spec to_file(sq_bb()) -> binbo_board:file().
to_file(SqBB) ->
Idx = to_index(SqBB),
binbo_board:file_of_index(Idx).
%% to_index_list/1
-spec to_index_list(bb()) -> [sq_idx()].
to_index_list(BB) ->
to_index_list(BB, []).
%% to_notation/1
-spec to_notation(sq_bb()) -> binbo_board:square_notation().
to_notation(SqBB) ->
Idx = to_index(SqBB),
binbo_board:index_to_notation(Idx).
%% edges_bb/1
-spec edges_bb(sq_idx()) -> bb().
edges_bb(Idx) ->
bb_or([
bb_not(?RANK_1_BB bor ?RANK_8_BB, rank_bb(Idx)),
bb_not(?FILE_A_BB bor ?FILE_H_BB, file_bb(Idx))
]).
%% empty_bb/0
-spec empty_bb() -> empty_bb().
empty_bb() ->
?EMPTY_BB.
%% shift/1
-spec shift(bb(), integer()) -> bb().
shift(BB, Bits) ->
(BB bsl Bits).
%% shift_north/1
-spec shift_north(bb()) -> bb().
shift_north(BB) ->
shift(BB, ?NORTH).
%% shift_south/1
-spec shift_south(bb()) -> bb().
shift_south(BB) ->
shift(BB, ?SOUTH).
%% shift_east/1
-spec shift_east(bb()) -> bb().
shift_east(BB) ->
shift(BB band ?NOT_FILE_H_BB, ?EAST).
%% shift_west/1
-spec shift_west(bb()) -> bb().
shift_west(BB) ->
shift(BB band ?NOT_FILE_A_BB, ?WEST).
%% shift_north_east/1
-spec shift_north_east(bb()) -> bb().
shift_north_east(BB) ->
shift(BB band ?NOT_FILE_H_BB, ?NORTH_EAST).
%% shift_north_west/1
-spec shift_north_west(bb()) -> bb().
shift_north_west(BB) ->
shift(BB band ?NOT_FILE_A_BB, ?NORTH_WEST).
%% shift_south_east/1
-spec shift_south_east(bb()) -> bb().
shift_south_east(BB) ->
shift(BB band ?NOT_FILE_H_BB, ?SOUTH_EAST).
%% shift_south_west/1
-spec shift_south_west(bb()) -> bb().
shift_south_west(BB) ->
shift(BB band ?NOT_FILE_A_BB, ?SOUTH_WEST).
%% enpassant_bb/3
-spec enpassant_bb(piece(), sq_bb(), sq_bb()) -> enpa_bb().
enpassant_bb(?WHITE_PAWN, FromBB, ToBB) when ?IS_AND(FromBB, ?RANK_2_BB) andalso ?IS_AND(ToBB, ?RANK_4_BB) ->
shift_north(FromBB);
enpassant_bb(?BLACK_PAWN, FromBB, ToBB) when ?IS_AND(FromBB, ?RANK_7_BB) andalso ?IS_AND(ToBB, ?RANK_5_BB) ->
shift_south(FromBB);
enpassant_bb(_, _, _) ->
?EMPTY_BB.
%%%------------------------------------------------------------------------------
%%% Attack bitboards
%%%------------------------------------------------------------------------------
%% king_attacks_bb/1
%% https://www.chessprogramming.org/King_Pattern
%% https://www.chessprogramming.org/General_Setwise_Operations
%%--------------------
%% . . . . . . . .
%% . . . . . . . .
%% . . . . . . . .
%% . . . . . . . .
%% . . . . . . . .
%% . . . . . 1 2 3
%% . . . . . 8 K 4
%% . . . . . 7 6 5
%%--------------------
-spec king_attacks_bb(sq_bb()) -> attacks_bb().
king_attacks_bb(BB) ->
bb_or([
shift_north_west(BB), % 1
shift_north(BB), % 2
shift_north_east(BB), % 3
shift_east(BB), % 4
shift_south_east(BB), % 5
shift_south(BB), % 6
shift_south_west(BB), % 7
shift_west(BB) % 8
]) band ?ALL_SQUARES_BB.
%% knight_attacks_bb/1
%% https://www.chessprogramming.org/Knight_Pattern
%%--------------------
%% . . . . . . . .
%% . . . . . . . .
%% . . 2 . 3 . . .
%% . 1 . . . 4 . .
%% . . . N . . . .
%% . 8 . . . 5 . .
%% . . 7 . 6 . . .
%% . . . . . . . .
%%--------------------
-spec knight_attacks_bb(sq_bb()) -> attacks_bb().
knight_attacks_bb(BB) ->
NotFileA = ?NOT_FILE_A_BB,
NotFileH = ?NOT_FILE_H_BB,
NotFileAB = ?NOT_FILE_AB_BB,
NotFileGH = ?NOT_FILE_GH_BB,
bb_or([
(BB band NotFileAB) bsl 6, % 1
(BB band NotFileA) bsl 15, % 2
(BB band NotFileH) bsl 17, % 3
(BB band NotFileGH) bsl 10, % 4
(BB band NotFileGH) bsr 6, % 5
(BB band NotFileH) bsr 15, % 6
(BB band NotFileA) bsr 17, % 7
(BB band NotFileAB) bsr 10 % 8
]) band ?ALL_SQUARES_BB.
%% pawn_attacks_bb/2
-spec pawn_attacks_bb(color(), sq_bb()) -> bb().
pawn_attacks_bb(?WHITE, BB) ->
BB2 = shift_north_west(BB) bor shift_north_east(BB),
BB2 band ?ALL_SQUARES_BB;
pawn_attacks_bb(?BLACK, BB) ->
shift_south_west(BB) bor shift_south_east(BB).
%% pawn_pushes_bb/3
-spec pawn_pushes_bb(color(), sq_bb(), bb()) -> bb().
pawn_pushes_bb(?WHITE, BB, EmptySquaresBB) ->
Push1BB = shift_north(BB band ?NOT_RANK_1_BB) band EmptySquaresBB,
Push2BB = shift_north(Push1BB band ?RANK_3_BB) band EmptySquaresBB,
(Push1BB bor Push2BB) band ?ALL_SQUARES_BB;
pawn_pushes_bb(?BLACK, BB, EmptySquaresBB) ->
Push1BB = shift_south(BB band ?NOT_RANK_8_BB) band EmptySquaresBB,
Push2BB = shift_south(Push1BB band ?RANK_6_BB) band EmptySquaresBB,
Push1BB bor Push2BB.
%% bishop_attacks_bb/2
-spec bishop_attacks_bb(sq_idx(), bb()) -> bb().
bishop_attacks_bb(FromIdx, OccupiedBB) ->
sliding_attacks_bb(FromIdx, ?BISHOP_DIRECTIONS, OccupiedBB).
%% rook_attacks_bb/2
-spec rook_attacks_bb(sq_idx(), bb()) -> bb().
rook_attacks_bb(FromIdx, OccupiedBB) ->
sliding_attacks_bb(FromIdx, ?ROOK_DIRECTIONS, OccupiedBB).
%%%------------------------------------------------------------------------------
%%% Internal functions
%%%------------------------------------------------------------------------------
%% bb_or/2
-spec bb_or([bb()], bb()) -> bb().
bb_or([], AccBB)->
AccBB;
bb_or([BB|Tail], AccBB) ->
bb_or(Tail, AccBB bor BB).
%% to_index_list/2
-spec to_index_list(bb(), [sq_idx()]) -> [sq_idx()].
to_index_list(0, List) ->
% We don't care about the order of indices.
% So, it's not necessary to reverse the list.
% lists:reverse(List);
List;
to_index_list(BB, List) ->
Idx = to_index(BB band (-BB)), % the index of the least significant bit (LSB)
to_index_list(BB band (BB - 1), [Idx | List]). % reset LSB and continue
%% sliding_attacks_bb/3
-spec sliding_attacks_bb(sq_idx(), [sliding_direction()], bb()) -> bb().
sliding_attacks_bb(Idx, Directions, OccupiedBB) ->
sliding_attacks_bb(Idx, Directions, OccupiedBB, Idx, ?EMPTY_BB).
%% sliding_attacks_bb/4
-spec sliding_attacks_bb(sq_idx(), [sliding_direction()], bb(), sq_idx(), bb()) -> bb().
sliding_attacks_bb(_Idx0, [], _OccupiedBB, _Idx, AccBB) ->
AccBB;
sliding_attacks_bb(Idx0, [Bits|Tail] = Directions, OccupiedBB, Idx, AccBB) ->
Idx2 = Idx + Bits,
IsOk = ?IS_VALID_INDEX(Idx2) andalso (binbo_board:sq_distance(Idx, Idx2) =:= 1),
case IsOk of
true ->
SqBB = ?SQUARE_BB(Idx2),
AccBB2 = AccBB bor SqBB,
case ?IS_AND(SqBB, OccupiedBB) of
true -> sliding_attacks_bb(Idx0, Tail, OccupiedBB, Idx0, AccBB2);
false -> sliding_attacks_bb(Idx0, Directions, OccupiedBB, Idx2, AccBB2)
end;
false ->
sliding_attacks_bb(Idx0, Tail, OccupiedBB, Idx0, AccBB)
end. | src/binbo_bb.erl | 0.607547 | 0.401394 | binbo_bb.erl | starcoder |
-module(codewars).
%% API exports
-compile(export_all).
% -export([row_sum_odd_numbers/1]).
%%====================================================================
%% API functions
%%====================================================================
% Given the triangle of consecutive odd numbers:
% 1
% 3 5
% 7 9 11
% 13 15 17 19
% 21 23 25 27 29
% ...
% Calculate the sum of the numbers in the nth row of this triangle (starting at index 1) e.g.: (Input --> Output)
% 1 --> 1
% 2 --> 3 + 5 = 8
row_sum_odd_numbers(N) ->
Rows = lists:sum(lists:seq(1,N)),
lists:sum([X || X <- lists:seq(Rows*2 - N*2,Rows*2), X rem 2 == 1]).
row_sum_odd_numbers_2(N) -> N * N * N.
% Timmy & Sarah think they are in love, but around where they live, they will only know once they pick a flower each. If one of the flowers has an even number of petals and the other has an odd number of petals it means they are in love.
% Write a function that will take the number of petals of each flower and return true if they are in love and false if they aren't.
lovefunc(Flower1, Flower2) when (Flower1 rem 2 =:= 0) and (Flower2 rem 2 =/= 0) ->
true;
lovefunc(Flower1, Flower2) when (Flower1 rem 2 =/= 0) and (Flower2 rem 2 =:= 0) ->
true;
lovefunc(_, _) ->
false.
% Write a program that finds the summation of every number from 1 to num. The number will always be a positive integer greater than 0.
% For example:
% summation(2) -> 3
% 1 + 2
% summation(8) -> 36
% 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8
summation(0) -> 0;
summation(N) -> N + summation(N-1).
% In this Kata your task will be to return the count of pairs that have consecutive numbers as follows:
% pairs([1,2,5,8,-4,-3,7,6,5]) = 3
% The pairs are selected as follows [(1,2),(5,8),(-4,-3),(7,6),5]
% --the first pair is (1,2) and the numbers in the pair are consecutive; Count = 1
% --the second pair is (5,8) and are not consecutive
% --the third pair is (-4,-3), consecutive. Count = 2
% --the fourth pair is (7,6), also consecutive. Count = 3.
% --the last digit has no pair, so we ignore.
% More examples in the test cases.
% -module(kata).
% -export([pairs/1, pairs/2]).
pairs([]) ->
0;
pairs(List) ->
pairs(List, 0).
pairs(L, Acc) when length(L) =< 1 ->
Acc;
pairs([X,Y| Tail], Acc) when abs(Y-X) =:= 1 ->
pairs(Tail, Acc+1);
pairs([X,Y | Tail], Acc) when Y =/= X+1 ->
pairs(Tail, Acc).
% Definition
% Strong number is the number that the sum of the factorial of its digits is equal to number itself.
% For example: 145, since
% 1! + 4! + 5! = 1 + 24 + 120 = 145
% So, 145 is a Strong number.
strong(N) ->
strong(integer_to_list(N), N, 0).
strong([], N, Acc) ->
if
N =/= Acc ->
"Not Strong !!";
N =:= Acc ->
"STRONG!!!!"
end;
strong([H|T], N, Acc) ->
strong(T, N, Acc+factorial(H -$0)).
factorial(0) -> 1;
factorial(N) when N > 0 ->
N * factorial(N-1).
% The first century spans from the year 1 up to and including the year 100, The second - from the year 101 up to and including the year 200, etc.
% Task :
% Given a year, return the century it is in.
% Input , Output Examples :
% 1705 --> 18
% 1900 --> 19
% 1601 --> 17
% 2000 --> 20
century(Year) ->
if
Year =< 999 ->
List = string:right(integer_to_list(Year), 4, $0);
Year >= 1000 ->
List = integer_to_list(Year)
end,
Century = list_to_integer(lists:sublist(List, length(List) - 2)),
Year2 = list_to_integer(integer_to_list(Year) -- integer_to_list(Century)),
if
Year2 >= 1 ->
Century + 1;
Year2 =:= 0 ->
Century
end.
century_better(Year) ->
((Year-1) div 100)+1.
% Task
% Given an array/list [] of integers , Find the product of the k maximal numbers.
% Notes
% Array/list size is at least 3 .
% Array/list's numbers Will be mixture of positives , negatives and zeros
% Repetition of numbers in the array/list could occur.
max_product(A, S) ->
List = lists:reverse(lists:sort(A)),
Reversed = lists:sublist(List,S),
product(Reversed).
product(List) ->
product(List, 1).
product([], Result) ->
Result;
product([H|T], Result) ->
product(T, Result*H).
% Clock shows h hours, m minutes and s seconds after midnight.
% Your task is to write a function which returns the time since midnight in milliseconds.
% Example:
% h = 0
% m = 1
% s = 1
% result = 61000
% Input constraints:
% 0 <= h <= 23
% 0 <= m <= 59
% 0 <= s <= 59
past(H, M, S) -> timer:hours(H) + timer:minutes(M) + timer:seconds(S).
% You are going to be given a word. Your job is to return the middle character of the word. If the word's length is odd, return the middle character. If the word's length is even, return the middle 2 characters.
% #Examples:
% Kata.getMiddle("test") should return "es"
% Kata.getMiddle("testing") should return "t"
% Kata.getMiddle("middle") should return "dd"
% Kata.getMiddle("A") should return "A"
middle(String) when length(String) rem 2 =:= 0 ->
lists:sublist(String, length(String) div 2, 2);
middle(String) when length(String) rem 2 =:= 1 ->
lists:sublist(String, length(String) div 2 + 1, 1).
%%====================================================================
%% Internal functions
%%==================================================================== | codewars/src/codewars.erl | 0.545165 | 0.765769 | codewars.erl | starcoder |
% MIT License
% Copyright (c) 2020 <NAME>
% Permission is hereby granted, free of charge, to any person obtaining a copy
% of this software and associated documentation files (the "Software"), to deal
% in the Software without restriction, including without limitation the rights
% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
% copies of the Software, and to permit persons to whom the Software is
% furnished to do so, subject to the following conditions:
% The above copyright notice and this permission notice shall be included in all
% copies or substantial portions of the Software.
% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
% SOFTWARE.
-module(lesson4).
-export([is_even/1, is_odd/1, is_even2/1, is_even3/1, filter_even/1, filter_odd/1, filter/2,
filter_even_simple/1, filter_odd_simple/1, any/2, all/2, zip/2, is_sorted/1]).
is_even(0) -> true;
is_even(N) -> is_odd(N - 1).
is_odd(0) -> false;
is_odd(N) -> is_even(N - 1).
is_even2(N) -> N rem 2 =:= 0.
is_even3(N) -> 1 band N =:= 0.
filter_even([]) -> [];
filter_even([H|T]) ->
case is_even(H) of
true -> [H|filter_even(T)];
false -> filter_even(T)
end.
filter_odd([]) -> [];
filter_odd([H|T]) ->
case is_odd(H) of
true -> [H|filter_odd(T)];
false -> filter_odd(T)
end.
filter(_, []) -> [];
filter(P, [H|T]) ->
case P(H) of
true -> [H|filter(P, T)];
false -> filter(P, T)
end.
filter_even_simple(L) -> filter(fun is_even/1, L).
filter_odd_simple(L) -> filter(fun is_odd/1, L).
any(_, []) -> false;
any(P, [H|T]) -> P(H) orelse any(P, T).
all(_, []) -> true;
all(P, [H|T]) -> P(H) andalso all(P, T).
% differs from lists:zip!
zip([HX|TX], [HY|TY]) -> [{HX, HY}|zip(TX, TY)];
zip(_, _) -> [].
gte({X, Y}) -> X =< Y.
lte({X, Y}) -> X >= Y.
is_sorted([]) -> true;
is_sorted([_|T] = L) ->
Pairs = zip(L, T),
all(fun gte/1, Pairs) orelse
all(fun lte/1, Pairs). | erlang/src/lesson4.erl | 0.582847 | 0.426919 | lesson4.erl | starcoder |
%% Puzzle:
%%
%% First half: Find most common binary digit in each position. Binary
%% number composed of most common is "gamma", inverse is "epsilon.
%%
%% https://adventofcode.com/2021/day/3
%%
%% explanation:
%% https://blog.beerriot.com/2021/12/14/advent-of-code-day-3/
-module(puzzle03).
-export([
solveA/0,
solveA/1,
count_digits/1,
make_gamma/2,
solveB/0,
solveB/1
]).
solveA() ->
{ok, Data} = file:read_file("puzzles/puzzle03-input.txt"),
Numbers = string:split(Data, <<"\n">>, all),
solveA(Numbers).
solveA(Numbers) ->
%% Total is theoretically length(Numbers), but that relies on file
%% split doing the right thing with ending newlines, so count
%% explicitly instead.
{Total, Counts} = count_digits(Numbers),
Gamma = make_gamma(Total, Counts),
%% We can't just `bnot` because that will flip bits above our
%% highest
Epsilon = Gamma bxor (trunc(math:pow(2, length(Counts))) - 1),
{Gamma, Epsilon}.
count_digits([First|_]=Numbers) ->
Init = lists:duplicate(size(First), 0),
count_digits(Numbers, 0, Init).
count_digits([<<>>], Total, Init) ->
%% trailing newline
{Total, Init};
count_digits([Number|Rest], Total, Counts) ->
NewCounts = [ A+B ||
{A, B} <- lists:zip(Counts,
[ C - $0 || <<C>> <= Number])],
count_digits(Rest, Total+1, NewCounts);
count_digits([], Total, Init) ->
%% no trailing newline
{Total, Init}.
make_gamma(Total, Counts) ->
Threshold = Total div 2,
<<Int:(length(Counts))/integer>> =
<< <<(C div Threshold):1>> || C <- Counts >>,
Int.
solveB() ->
{ok, Data} = file:read_file("puzzles/puzzle03-input.txt"),
Numbers = string:split(Data, <<"\n">>, all),
solveB([ N || N <- Numbers, N =/= <<>>]).
solveB(Numbers) ->
{binary_to_integer(solveB(Numbers, {most, 1}, 0), 2),
binary_to_integer(solveB(Numbers, {least, 0}, 0), 2)}.
solveB([], _, _) ->
[];
solveB([Answer], _, _) ->
Answer;
solveB(Numbers, Preference, Offset) ->
{Zeros, Ones} = lists:partition(
fun(<<_:Offset/binary, Bit, _/binary>>) ->
Bit == $0
end,
Numbers),
Selection = case {length(Zeros) > length(Ones), Preference} of
{true, {most, _}} ->
Zeros;
{true, {least, _}} ->
Ones;
{false, {_, 1}} ->
Ones;
{false, {_, 0}} ->
Zeros
end,
solveB(Selection, Preference, Offset+1). | src/puzzle03.erl | 0.553505 | 0.786049 | puzzle03.erl | starcoder |
%% vim: set ai et sw=4 sts=4:
%% See LICENSE for licensing information.
-module(yaml_double).
-export([ scalar/3
]).
-include("yaml_grapheme.hrl").
%=======================================================================
-type style() :: block | flow.
%=======================================================================
-spec scalar(yaml_event:state(), style(), yaml:props()) ->
{yaml_event:event(), list(), yaml_event:state()}.
scalar(Event, Style, Props)
when (Style =:= block orelse Style =:= flow) andalso
is_map(Props) ->
{T, S} = yaml_token:start(Event, double, fun construct/2, Props),
first(Style, T, S).
%=======================================================================
-spec construct(list(), map()) -> {yaml_event:event(), list()}.
construct(Ps, #{ from := From, thru := Thru, anchor := Anchor, tag := Tag}) ->
Folds = fold_by_space(Ps, []),
Token = {double, From, Thru, Anchor, Tag, Folds},
{Token, []}.
%-----------------------------------------------------------------------
fold_by_space([{_, _, escape} | Rest], Acc) ->
fold_by_space(Rest, Acc);
fold_by_space(Rest = [{_, _, fold}, {_, _, fold} | _], Acc) ->
fold_by_line(Rest, Acc);
fold_by_space([{F, T, fold} | Rest], Acc) ->
fold_by_space(Rest, [{F, T, <<" ">>} | Acc]);
fold_by_space([Text | Rest], Acc) ->
fold_by_space(Rest, [Text | Acc]);
fold_by_space([], Acc) ->
Acc.
%-----------------------------------------------------------------------
fold_by_line([{F, T, fold} | Rest = [{_, _, fold} | _]], Acc) ->
fold_by_line(Rest, [{F, T, <<"\n">>} | Acc]);
fold_by_line([{_, _, fold} | Rest], Acc) ->
fold_by_space(Rest, Acc).
%=======================================================================
first(Style, T, S) ->
$\" = yaml_scan:grapheme(S),
Z = yaml_scan:next(S),
text(Style, yaml_token:skip(T, Z), Z).
%=======================================================================
text(Style, T, S) ->
case yaml_scan:grapheme(S) of
break ->
fold(Style, fold, T, S, yaml_scan:next(S));
$\" ->
text_finish(Style, T, S);
$\\ ->
text_escape(Style, T, S, yaml_scan:next(S));
G when ?IS_WHITE(G) ->
text_white(Style, T, S, yaml_scan:next(S));
G when ?IS_PRINTABLE(G) ->
text(Style, T, yaml_scan:next(S))
end.
%-----------------------------------------------------------------------
text_white(Style, T, White, S) ->
case yaml_scan:grapheme(S) of
break ->
fold(Style, fold, T, White, yaml_scan:next(S));
$\" ->
text_finish(Style, T, S);
$\\ ->
text_escape(Style, T, S, yaml_scan:next(S));
G when ?IS_WHITE(G) ->
text_white(Style, T, White, yaml_scan:next(S));
G when ?IS_PRINTABLE(G) ->
text(Style, T, yaml_scan:next(S))
end.
%-----------------------------------------------------------------------
text_escape(Style, T, Escape, S) ->
case escape_to_code_point(yaml_scan:grapheme(S)) of
end_of_stream ->
bad_escape(Style, T, Escape, S);
bad_escape ->
bad_escape(Style, T, Escape, yaml_scan:next(S));
break ->
fold(Style, escape, T, Escape, yaml_scan:next(S));
{hex, N} ->
text_escape_hex(Style, T, Escape, N, 0, yaml_scan:next(S));
CodePoint ->
text_escape_as(Style, T, Escape, CodePoint, yaml_scan:next(S))
end.
%-----------------------------------------------------------------------
escape_to_code_point($0) -> 0;
escape_to_code_point($a) -> $\a;
escape_to_code_point($b) -> $\b;
escape_to_code_point($e) -> $\e;
escape_to_code_point($f) -> $\f;
escape_to_code_point($n) -> $\n;
escape_to_code_point($r) -> $\r;
escape_to_code_point($t) -> $\t;
escape_to_code_point($u) -> {hex, 4};
escape_to_code_point($v) -> $\v;
escape_to_code_point($x) -> {hex, 2};
escape_to_code_point($L) -> 16#2028;
escape_to_code_point($N) -> 16#85;
escape_to_code_point($P) -> 16#2029;
escape_to_code_point($U) -> {hex, 8};
escape_to_code_point($_) -> 16#A0;
escape_to_code_point($\s) -> $\s;
escape_to_code_point($\t) -> $\t;
escape_to_code_point($\") -> $\";
escape_to_code_point($\\) -> $\\;
escape_to_code_point(break) -> break;
escape_to_code_point(end_of_stream) -> end_of_stream;
escape_to_code_point(bad_encoding) -> end_of_stream;
escape_to_code_point(_) -> bad_escape.
%-----------------------------------------------------------------------
text_escape_hex(Style, T, Escape, 0, CodePoint, S) ->
text_escape_as(Style, T, Escape, CodePoint, S);
text_escape_hex(Style, T, Escape, N, Acc, S) ->
case yaml_scan:grapheme(S) of
G when (G >= $0 andalso G =< $9) ->
Calc = (Acc * 16) + (G - $0),
text_escape_hex(Style, T, Escape, N - 1, Calc, yaml_scan:next(S));
G when (G >= $a andalso G =< $f) ->
Calc = (Acc * 16) + (G - $a + 10),
text_escape_hex(Style, T, Escape, N - 1, Calc, yaml_scan:next(S));
G when (G >= $A andalso G =< $F) ->
Calc = (Acc * 16) + (G - $A + 10),
text_escape_hex(Style, T, Escape, N - 1, Calc, yaml_scan:next(S));
$\" ->
bad_escape(Style, T, Escape, S);
G when ?IS_PRINTABLE(G) ->
bad_escape(Style, T, Escape, yaml_scan:next(S));
_ ->
bad_escape(Style, T, Escape, S)
end.
%-----------------------------------------------------------------------
text_escape_as(Style, T, Escape, CodePoint, S) ->
Before = yaml_token:keep(T, Escape),
Escaped = yaml_token:keep(Before, <<CodePoint/utf8>>, S),
text(Style, Escaped, S).
%-----------------------------------------------------------------------
bad_escape(Style, T, Escape, S) ->
Before = yaml_token:keep(T, Escape),
Errored = yaml_token:error_range(Before, bad_escape, Escape, S),
Bad = yaml_token:keep(Errored, S),
text(Style, Bad, S).
%-----------------------------------------------------------------------
text_finish(_Style, T, S) ->
Z = yaml_scan:next(S),
Kept = yaml_token:keep(T, S),
Skip = yaml_token:skip(Kept, Z),
yaml_token:finish(Skip, Z).
%=======================================================================
fold(Style, Fold, T, White, S) ->
Abort = {T, White},
fold_break(Style, Abort, Fold, yaml_token:keep(T, White), White, S).
%-----------------------------------------------------------------------
fold_abort({T, White}) ->
yaml_token:finish(T, White).
%-----------------------------------------------------------------------
fold_break(Style, Abort, Fold, T, White, S) ->
case yaml_scan:end_of(S) of
{_, _, _} ->
fold_abort(Abort);
false ->
fold_indent(Style, Abort, Fold, T, White, S)
end.
%-----------------------------------------------------------------------
fold_indent(Style, Abort, Fold, T, White, S) ->
case yaml_scan:grapheme(S) of
end_of_stream ->
fold_abort(Abort);
break ->
Kept = yaml_token:keep(T, Fold, S),
fold_break(Style, Abort, fold, Kept, S, yaml_scan:next(S));
$\s ->
fold_indent(Style, Abort, Fold, T, White, yaml_scan:next(S));
$\t ->
Indented = yaml_token:is_indented(T, S),
fold_white(Style, Abort, Fold, T, White, Indented, yaml_scan:next(S));
G when ?IS_PRINTABLE(G) ->
case yaml_token:is_indented(T, S) of
true ->
Kept = yaml_token:keep(T, Fold, S),
text(Style, Kept, S);
false ->
fold_abort(Abort)
end
end.
%-----------------------------------------------------------------------
fold_white(Style, Abort, Fold, T, White, Indented, S) ->
case yaml_scan:grapheme(S) of
end_of_stream ->
fold_abort(Abort);
break ->
Kept = yaml_token:keep(T, Fold, S),
fold_break(Style, Abort, fold, Kept, S, yaml_scan:next(S));
G when ?IS_WHITE(G) ->
fold_white(Style, Abort, Fold, T, White, Indented, yaml_scan:next(S));
G when ?IS_PRINTABLE(G) ->
case Indented of
true ->
Kept = yaml_token:keep(T, Fold, S),
text(Style, Kept, S);
false ->
fold_abort(Abort)
end
end. | src/yaml_double.erl | 0.60288 | 0.437944 | yaml_double.erl | starcoder |
%%--------------------------------------------------------------------
%% Copyright (c) 2020 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_vm_SUITE).
-compile(export_all).
-compile(nowarn_export_all).
-include_lib("eunit/include/eunit.hrl").
all() -> emqx_ct:all(?MODULE).
t_load(_Config) ->
?assertMatch([{load1, _}, {load5, _}, {load15, _}], emqx_vm:loads()).
t_systeminfo(_Config) ->
?assertEqual(emqx_vm:system_info_keys(),
[Key || {Key, _} <- emqx_vm:get_system_info()]),
?assertEqual(undefined, emqx_vm:get_system_info(undefined)).
t_mem_info(_Config) ->
application:ensure_all_started(os_mon),
MemInfo = emqx_vm:mem_info(),
[{total_memory, _}, {used_memory, _}]= MemInfo,
application:stop(os_mon).
t_process_info(_Config) ->
ProcessInfo = emqx_vm:get_process_info(),
?assertEqual(emqx_vm:process_info_keys(), [K || {K, _V}<- ProcessInfo]).
t_process_gc(_Config) ->
GcInfo = emqx_vm:get_process_gc_info(),
?assertEqual(emqx_vm:process_gc_info_keys(), [K || {K, _V}<- GcInfo]).
t_get_ets_list(_Config) ->
ets:new(test, [named_table]),
Ets = emqx_vm:get_ets_list(),
true = lists:member(test, Ets).
t_get_ets_info(_Config) ->
ets:new(test, [named_table]),
[] = emqx_vm:get_ets_info(test1),
EtsInfo = emqx_vm:get_ets_info(test),
test = proplists:get_value(name, EtsInfo),
Tid = proplists:get_value(id, EtsInfo),
EtsInfos = emqx_vm:get_ets_info(),
?assertEqual(true, lists:foldl(fun(Info, Acc) ->
case proplists:get_value(id, Info) of
Tid -> true;
_ -> Acc
end
end, false, EtsInfos)).
t_scheduler_usage(_Config) ->
emqx_vm:scheduler_usage(5000).
t_get_memory(_Config) ->
emqx_vm:get_memory().
t_schedulers(_Config) ->
emqx_vm:schedulers().
t_get_process_group_leader_info(_Config) ->
emqx_vm:get_process_group_leader_info(self()).
t_get_process_limit(_Config) ->
emqx_vm:get_process_limit().
t_cpu_util(_Config) ->
_Cpu = emqx_vm:cpu_util().
easy_server() ->
{ok, LSock} = gen_tcp:listen(5678, [binary, {packet, 0}, {active, false}]),
{ok, Sock} = gen_tcp:accept(LSock),
ok = do_recv(Sock),
ok = gen_tcp:close(Sock),
ok = gen_tcp:close(LSock).
do_recv(Sock) ->
case gen_tcp:recv(Sock, 0) of
{ok, _} ->
do_recv(Sock);
{error, closed} ->
ok
end. | test/emqx_vm_SUITE.erl | 0.57093 | 0.405154 | emqx_vm_SUITE.erl | starcoder |
%%==============================================================================
%% @copyright 2019-2020 Erlang Solutions Ltd.
%% Licensed under the Apache License, Version 2.0 (see LICENSE file)
%% @end
%%
%% @doc
%% In this scenario, users are communicating using PEP and MUC Light, while GDPR
%% removal requests are performed.
%%
%% Users are publishing items to their PEP nodes and receiving items from other
%% users' nodes. Each node has a number of subscribers limited by the
%% `n_of_subscribers' variable. Publishing can start depending on the
%% `node_activation_policy' variable, either after `all_nodes' or after
%% `n_nodes' are subscribed to. Similarly, users send and receive messages from
%% MUC Light rooms. Each room has `room_size' users, and sending messages to
%% rooms can start depending on the `room_activation_policy'. Interactions
%% between users, pubsub PEP nodes and MUC Light rooms are managed by the
%% `amoc_coordinator'.
%%
%% == User steps: ==
%%
%% 1. Connect to the XMPP host given by the `mim_host' variable.
%%
%% 2. Create a PEP node and send presence `available', in order to receive
%% messages from the PEP nodes. The rate of nodes' creation is limited by the
%% `node_creation_rate' per minute. Node creation results in a timeout when
%% `iq_timeout' is exceeded.
%%
%% 3. Create a MUC Light room. The rate of rooms' creation is limited by the
%% `room_creation_rate' per minute. The virtual MUC host is defined by the
%% `muc_host' variable. Room creation counts as a timeout when `iq_timeout'
%% is exceeded.
%%
%% 4. Wait for the following messages in a loop:
%%
%% - {publish_item_room, RoomJid} - message from `amoc_coordinator', sent after
%% users have created their rooms, or from `amoc_throttle', scheduled after
%% receiving a groupchat message from self.
%% Send a groupchat message to the Room. Size of this message is defined by
%% the `publication_size' variable. The rate of `publish_item_room' messages
%% is handled by `amoc_throttle' and depends on the `room_publication_rate'
%% variable.
%%
%% - publish_item_node - a message analogous to the `publish_item_room' message.
%% Send a message, which size is defined by the `publication_size' variable,
%% to the PEP node. The rate of these `publish_item_node' messages is handled
%% by `amoc_throttle' and depends on the `node_publication_rate' variable.
%%
%% - {add_users, MemberJids} - message from `amoc_coordinator', sent after it
%% had grouped `room_size' users together. Add users with given jids to own
%% MUC Light room as members.
%%
%% - remove_user - send a GDPR removal request and quit scenario execution for
%% this user. The rate of these messages is handled by `amoc_throttle' and
%% depends on the `gdpr_removal_rate' variable. They should start being sent
%% after all users have logged in.
%%
%% - {stanza, _, MsgStanza, TimeStamp} - process either a pubsub or a groupchat
%% message and update corresponding metrics. If it's the first MUC Light
%% affiliation message, remember the randomly generated RoomJid. Discard all
%% other affiliation change messages. In the case of a pubsub message, check
%% if it contains user's own jid. If it does, schedule a `publish_item_node'
%% message. Similarly for a MUC Light message: check if it contains user's own
%% jid and if it does, schedule a `publish_item_room' message.
%%
%% - {stanza, _, IqStanza, TimeStamp} - process an `iq' stanza and update
%% corresponding metrics. In the case of an iq result for the room creation,
%% send client data to the coordinator, which informs that the user is ready
%% to send groupchat messages.
%%
%% - {stanza, _, PresenceStanza, TimeStamp} - respond to the `subscribe'
%% presence stanzas.
%%
%% 5. Continue execution of the `user_loop'. If no message is received for
%% `iq_timeout', timeouts are calculated for every user request.
%%
%% == Metrics exposed by this scenario: ==
%%
%% === Counters: ===
%% ==== Message ====
%% - pubsub_message - incremented with every message stanza received from
%% pubsub PEP.
%%
%% - muc_light_message - incremented with every message stanza received
%% from MUC Light.
%%
%% - muc_light_message_sent - incremented with every message sent to the
%% room.
%%
%% - muc_light_affiliation_change_messages - incremented with every
%% affiliation change message from MUC Light.
%% ==== Node ====
%% - node_creation_success - incremented when node creation succeeded.
%%
%% - node_creation_failure - incremented when node creation failed.
%%
%% - node_creation_timeout - incremented when node creation timed out.
%% ==== Room ====
%% - room_creation_success - incremented when room creation succeeded.
%%
%% - room_creation_failure - incremented when room creation failed.
%%
%% - room_creation_timeout - incremented when room creation timed out.
%%
%% - room_affiliation_change_success - incremented when room creation
%% failed.
%%
%% - room_affiliation_change_failure - incremented when room creation
%% succeeded.
%%
%% - room_affiliation_change_timeout - incremented when room creation
%% timed out.
%% ==== Publication ====
%% - publication_query - incremented for every pubsub publication query
%% that was sent.
%%
%% - publication_result - incremented for every correct response to
%% publication query.
%%
%% - publication_error - incremented for every incorrect response to
%% publication query.
%%
%% - publication_success - incremented for every correct response to
%% publication query which didn't timeout.
%%
%% - publication_timeout - incremented for every correct response to
%% publication query that timeout.
%% ==== GDPR ====
%% - gdpr_removal - incremented when a user is removed.
%% === Times: ===
%% - room_creation - time for the MUC Light room to be created.
%%
%% - node_creation - time for the pubsub PEP node to be created.
%%
%% - pubsub_publication - time to publish a pubsub item.
%%
%% - pubsub_message_ttd - message time to delivery for pubsub.
%%
%% - muc_light_ttd - message time to delivery for MUC Light.
%%
%% - room_affiliation_change - time to change room affiliation.
%%
%% - gdpr_removal - time to perform the GDPR removal request.
%%
%% @end
%%==============================================================================
-module(gdpr_removal).
-behaviour(amoc_scenario).
-include_lib("exml/include/exml.hrl").
-include_lib("escalus/include/escalus.hrl").
-include_lib("escalus/include/escalus_xmlns.hrl").
-include_lib("kernel/include/logger.hrl").
-define(V(X), fun amoc_config_validation:X/1).
-required_variable([
#{name => iq_timeout, default_value => 10000, verification => ?V(positive_integer),
description => "IQ timeout (milliseconds, def: 10000ms)"},
#{name => room_creation_rate, default_value => 600, verification => ?V(positive_integer),
description => "Rate of room creations (per minute, def:600)"},
#{name => node_creation_rate, default_value => 600, verification => ?V(positive_integer),
description => "Rate of node creations (per minute, def:600)"},
#{name => room_publication_rate, default_value => 1500, verification => ?V(positive_integer),
description => "Rate of publications to room (per minute, def:1500)"},
#{name => node_publication_rate, default_value => 1500, verification => ?V(positive_integer),
description => "Rate of publications to PEP node (per minute, def:1500)"},
#{name => room_size, default_value => 10, verification => ?V(positive_integer),
description => "Number of users in a room."},
#{name => n_of_subscribers, default_value => 50, verification => ?V(nonnegative_integer),
description => "Number of subscriptions for each node (def: 50)"},
#{name => room_activation_policy, default_value => all_rooms, verification => [all_rooms, n_rooms],
description => "Publish after setup of (def: all_rooms | n_sers)"},
#{name => node_activation_policy, default_value => all_nodes, verification => [all_nodes, n_nodes],
description => "Publish after setup of (def: all_nodes | n_nodes)"},
#{name => gdpr_removal_rate, default_value => 2, verification => ?V(positive_integer),
description => "Rate of user removals (per minute, def:1)"},
#{name => publication_size, default_value => 300, verification => ?V(nonnegative_integer),
description => "Size of additional payload (bytes, def:300)"},
#{name => mim_host, default_value => <<"localhost">>, verification => ?V(binary),
description => "The virtual host served by the server (def: <<\"localhost\">>)"},
#{name => muc_host, default_value => <<"muclight.localhost">>, verification => ?V(binary),
description => "The virtual MUC host served by the server (def: <<\"muclight.localhost\">>)"}
]).
-define(ROOM_CREATION_THROTTLING, room_creation).
-define(NODE_CREATION_THROTTLING, node_creation).
-define(ROOM_PUBLICATION_THROTTLING, room_publication).
-define(NODE_PUBLICATION_THROTTLING, node_publication).
-define(REMOVAL_THROTTLING, user_removal).
-define(ROOM_CREATION_ID, <<"room_creation_id">>).
-define(NODE_CREATION_ID, <<"node_creation_id">>).
-define(PEP_NODE_NS, <<"just_some_random_namespace">>).
-define(CAPS_HASH, <<"erNmVoMSwRBR4brUU/inYQ5NFr0=">>). %% mod_caps:make_disco_hash(feature_elems(), sha1).
-define(NODE, {pep, ?PEP_NODE_NS}).
-define(NS_MUC_LIGHT_AFFILIATIONS, <<"urn:xmpp:muclight:0#affiliations">>).
-define(NS_MUC_LIGHT_CREATION, <<"urn:xmpp:muclight:0#create">>).
-export([init/0, start/1]).
-spec init() -> ok.
init() ->
init_metrics(),
http_req:start(),
[RoomPublicationRate, NodePublicationRate, RoomCreationRate, NodeCreationRate, GDPRRemovalRate] =
[amoc_config:get(Key) ||
Key <- [room_publication_rate, node_publication_rate,
room_creation_rate, node_creation_rate,
gdpr_removal_rate]],
amoc_throttle:start(?ROOM_CREATION_THROTTLING, RoomCreationRate),
amoc_throttle:start(?ROOM_PUBLICATION_THROTTLING, RoomPublicationRate),
amoc_throttle:start(?NODE_CREATION_THROTTLING, NodeCreationRate),
amoc_throttle:start(?NODE_PUBLICATION_THROTTLING, NodePublicationRate),
amoc_throttle:start(?REMOVAL_THROTTLING, GDPRRemovalRate),
start_coordinator(),
ok.
-spec start(amoc_scenario:user_id()) -> any().
start(Id) ->
Client = connect_amoc_user(Id),
start_user(Client).
init_metrics() ->
Counters = [pubsub_message, muc_light_message,
room_creation_success, room_creation_timeout, room_creation_failure,
node_creation_success, node_creation_timeout, node_creation_failure,
publication_query, publication_result, publication_error,
publication_success, publication_timeout,
muc_light_message_sent,
muc_light_affiliation_change_messages,
room_affiliation_change_success, room_affiliation_change_timeout,
room_affiliation_change_failure,
gdpr_removal],
Times = [room_creation, node_creation,
pubsub_publication,
pubsub_message_ttd, muc_light_ttd,
room_affiliation_change,
gdpr_removal],
[amoc_metrics:init(counters, Metric) || Metric <- Counters],
[amoc_metrics:init(times, Metric) || Metric <- Times].
%%------------------------------------------------------------------------------------------------
%% Coordinator
%%------------------------------------------------------------------------------------------------
start_coordinator() ->
Plan = get_plan(),
amoc_coordinator:start(?MODULE, Plan).
get_plan() ->
[{amoc_config:get(room_size),
fun(_, PidsAndClients) ->
make_full_rooms(PidsAndClients),
room_activate_users(pids(PidsAndClients), n_rooms)
end},
{amoc_config:get(n_of_subscribers),
fun(_, PidsAndClients) ->
make_all_clients_friends(clients(PidsAndClients)),
node_activate_users(pids(PidsAndClients), n_nodes)
end},
{all,
fun(_, PidsAndClients) ->
room_activate_users(pids(PidsAndClients), all_rooms),
node_activate_users(pids(PidsAndClients), all_nodes),
activate_removal(pids(PidsAndClients))
end}].
clients(PidsAndClients) ->
{_Pids, Clients} = lists:unzip(PidsAndClients),
Clients.
pids(PidsAndClients) ->
{Pids, _Clients} = lists:unzip(PidsAndClients),
Pids.
node_activate_users(Pids, ActivationPolicy) ->
case amoc_config:get(node_activation_policy) of
ActivationPolicy ->
?LOG_DEBUG("Node activate users running. Policy ~p. Pids: ~p", [ActivationPolicy, Pids]),
[schedule_node_publishing(Pid) || Pid <- Pids];
_ -> ok
end.
room_activate_users(Pids, ActivationPolicy) ->
case amoc_config:get(room_activation_policy) of
ActivationPolicy ->
?LOG_DEBUG("Room activate users running. Policy ~p. Pids: ~p", [ActivationPolicy, Pids]),
[schedule_room_publishing(Pid) || Pid <- Pids];
_ -> ok
end.
activate_removal(Pids) ->
[schedule_removal(Pid) || Pid <- Pids].
make_all_clients_friends(Clients) ->
?LOG_DEBUG("Make all clients friends."),
escalus_utils:distinct_pairs(
fun(C1, C2) ->
send_presence(C1, <<"subscribe">>, C2),
send_presence(C2, <<"subscribe">>, C1)
end, Clients).
make_full_rooms(PidsAndClients) ->
PidsAndJids = [{Pid, Client#client.jid} || {Pid, Client} <- PidsAndClients],
[begin
MemberJids = [Jid || {_, Jid} <- PidsAndJids, Jid =/= OwnerJid],
OwnerPid ! {add_users, MemberJids}
end || {OwnerPid, OwnerJid} <- PidsAndJids].
schedule_removal(Pid) ->
amoc_throttle:send(?REMOVAL_THROTTLING, Pid, remove_user).
%%------------------------------------------------------------------------------------------------
%% User
%%------------------------------------------------------------------------------------------------
start_user(Client) ->
?LOG_DEBUG("User process ~p", [self()]),
erlang:monitor(process, Client#client.rcv_pid),
create_new_node(Client),
?LOG_DEBUG("Node created User process ~p", [self()]),
send_presence_with_caps(Client),
escalus_tcp:set_active(Client#client.rcv_pid, true),
{TS, Id} = request_muc_light_room(Client),
user_loop(Client, #{Id=>{new, TS}}).
create_new_node(Client) ->
amoc_throttle:send_and_wait(?NODE_CREATION_THROTTLING, create_node),
create_pubsub_node(Client).
user_loop(Client, Requests) ->
IqTimeout = amoc_config:get(iq_timeout),
receive
{publish_item_room, RoomJid} ->
amoc_metrics:update_counter(muc_light_message_sent),
send_message_to_room(Client, RoomJid),
user_loop(Client, Requests);
publish_item_node ->
{TS, Id} = publish_pubsub_item(Client),
amoc_metrics:update_counter(publication_query),
user_loop(Client, Requests#{Id=>{new, TS}});
{add_users, MemberJids} ->
{TS, Id} = add_users_to_room(Client, MemberJids),
user_loop(Client, Requests#{Id=>{new, TS}});
remove_user ->
?LOG_DEBUG("GDPR: Removing myself ~p (~p)", [escalus_client:short_jid(Client), self()]),
remove_self(Client);
{stanza, _, #xmlel{name = <<"message">>} = Stanza, #{recv_timestamp := RecvTimeStamp}} ->
process_message(Stanza, RecvTimeStamp),
user_loop(Client, Requests);
{stanza, _, #xmlel{name = <<"iq">>} = Stanza, #{recv_timestamp := RecvTimeStamp}} ->
NewRequests = process_iq(Client, Stanza, RecvTimeStamp, Requests),
user_loop(Client, NewRequests);
{stanza, _, #xmlel{name = <<"presence">>} = Stanza, _} ->
process_presence(Client, Stanza),
user_loop(Client, Requests);
{'DOWN', _, process, Pid, Info} when Pid =:= Client#client.rcv_pid ->
?LOG_ERROR("TCP connection process ~p down: ~p", [Pid, Info]);
Msg ->
?LOG_ERROR("unexpected message ~p", [Msg])
after IqTimeout ->
user_loop(Client, verify_request(Requests))
end.
verify_request(Requests) ->
IqTimeout = amoc_config:get(iq_timeout),
Now = os:system_time(microsecond),
VerifyFN =
fun(Key, Value) ->
case Value of
{new, TS} when Now > TS + IqTimeout * 1000 ->
update_timeout_metrics(Key),
{timeout, TS};
_ -> Value
end
end,
maps:map(VerifyFN, Requests).
update_timeout_metrics(<<"publish", _/binary>>) ->
amoc_metrics:update_counter(publication_timeout);
update_timeout_metrics(<<"affiliation", _/binary>>) ->
amoc_metrics:update_counter(room_affiliation_change_timeout);
update_timeout_metrics(?ROOM_CREATION_ID) ->
amoc_metrics:update_counter(room_creation_timeout);
update_timeout_metrics(Id) ->
?LOG_ERROR("unknown iq id ~p", Id).
schedule_room_publishing(Pid) ->
amoc_throttle:send(?ROOM_PUBLICATION_THROTTLING, Pid, {publish_item_room, undefined}).
schedule_room_publishing(Pid, RoomJid) ->
amoc_throttle:send(?ROOM_PUBLICATION_THROTTLING, Pid, {publish_item_room, RoomJid}).
schedule_node_publishing(Pid) ->
amoc_throttle:send(?NODE_PUBLICATION_THROTTLING, Pid, publish_item_node).
remove_self(Client) ->
%TODO when running with clt-swarm make sure to use correct cfg, change ports here etc.
Path = list_to_binary(["/api/users/", amoc_config:get(mim_host), "/", escalus_client:username(Client)]),
{RemovalTime, {ok, _}} = timer:tc(fun() -> http_req:request("http://localhost:8088", Path, <<"DELETE">>, []) end),
amoc_metrics:update_counter(gdpr_removal),
amoc_metrics:update_time(gdpr_removal, RemovalTime),
% Suppresses errors from escalus, unlike just jumping out of loop
throw(stop).
%%------------------------------------------------------------------------------------------------
%% User connection
%%------------------------------------------------------------------------------------------------
connect_amoc_user(Id) ->
Cfg = make_user_cfg(Id),
{ok, Client, _} = escalus_connection:start(Cfg),
erlang:put(jid, Client#client.jid),
Client.
make_user_cfg(Id) ->
BinId = integer_to_binary(Id),
Username = <<"user_", BinId/binary>>,
Password = <<"password_", BinId/binary>>,
Resource = <<"res1">>,
ConnectionDetails = amoc_xmpp:pick_server([[{host, "127.0.0.1"}]]),
[{username, Username},
{server, amoc_config:get(mim_host)},
{resource, Resource},
{password, Password},
{carbons, false},
{stream_management, false},
{socket_opts, socket_opts()} |
ConnectionDetails].
socket_opts() ->
[binary,
{reuseaddr, false},
{nodelay, true}].
%%------------------------------------------------------------------------------------------------
%% Node creation
%%------------------------------------------------------------------------------------------------
create_pubsub_node(Client) ->
ReqId = ?NODE_CREATION_ID,
Request = publish_pubsub_stanza(Client, ReqId, #xmlel{name = <<"nothing">>}),
escalus:send(Client, Request),
{CreateNodeTime, CreateNodeResult} = timer:tc(
fun() ->
catch escalus:wait_for_stanza(Client, amoc_config:get(iq_timeout))
end),
case {escalus_pred:is_iq_result(Request, CreateNodeResult), CreateNodeResult} of
{true, _} ->
?LOG_DEBUG("node creation ~p (~p)", [?NODE, self()]),
amoc_metrics:update_counter(node_creation_success),
amoc_metrics:update_time(node_creation, CreateNodeTime);
{false, {'EXIT', {timeout_when_waiting_for_stanza, _}}} ->
amoc_metrics:update_counter(node_creation_timeout),
?LOG_ERROR("Timeout creating node: ~p", [CreateNodeResult]),
exit(node_creation_timeout);
{false, _} ->
amoc_metrics:update_counter(node_creation_failure),
?LOG_ERROR("Error creating node: ~p", [CreateNodeResult]),
exit(node_creation_failed)
end.
%%------------------------------------------------------------------------------------------------
%% User presence & caps
%%------------------------------------------------------------------------------------------------
send_presence(From, Type, To = #client{}) ->
ToJid = escalus_client:short_jid(To),
send_presence(From, Type, ToJid);
send_presence(From, Type, To) ->
Presence = escalus_stanza:presence_direct(To, Type),
escalus_client:send(From, Presence).
send_presence_with_caps(Client) ->
?LOG_DEBUG("Send presence with caps ~p, (~p).", [escalus_client:short_jid(Client), self()]),
Presence = escalus_stanza:presence(<<"available">>, [caps()]),
escalus:send(Client, Presence).
caps() ->
#xmlel{name = <<"c">>,
attrs = [{<<"xmlns">>, <<"http://jabber.org/protocol/caps">>},
{<<"hash">>, <<"sha-1">>},
{<<"node">>, <<"http://www.chatopus.com">>},
{<<"ver">>, ?CAPS_HASH}]}.
%%------------------------------------------------------------------------------------------------
%% Room creation
%%------------------------------------------------------------------------------------------------
request_muc_light_room(Client) ->
amoc_throttle:send_and_wait(?ROOM_CREATION_THROTTLING, create_room),
Id = ?ROOM_CREATION_ID,
MucHost = amoc_config:get(muc_host),
CreateRoomStanza = escalus_stanza:iq_set(?NS_MUC_LIGHT_CREATION, []),
CreateRoomStanzaWithTo = escalus_stanza:to(CreateRoomStanza, MucHost),
CreateRoomStanzaWithId = escalus_stanza:set_id(CreateRoomStanzaWithTo, Id),
escalus:send(Client, CreateRoomStanzaWithId),
{os:system_time(microsecond), Id}.
%%------------------------------------------------------------------------------------------------
%% Room affiliation change
%%------------------------------------------------------------------------------------------------
add_users_to_room(Client, Jids) ->
Id = iq_id(affiliation, Client),
RoomJid = erlang:get(my_room),
AffList = [#xmlel{name = <<"user">>,
attrs = [{<<"affiliation">>, <<"member">>}],
children = [#xmlcdata{content = Jid}]} || Jid <- Jids],
AffChangeStanza = escalus_stanza:iq_set(?NS_MUC_LIGHT_AFFILIATIONS, AffList),
AffChangeStanzaWithId = escalus_stanza:set_id(AffChangeStanza, Id),
?LOG_DEBUG("Adding users to room: ~p", [Jids]),
escalus:send(Client, escalus_stanza:to(AffChangeStanzaWithId, RoomJid)),
{os:system_time(microsecond), Id}.
%%------------------------------------------------------------------------------------------------
%% Sending muc_light messages
%%------------------------------------------------------------------------------------------------
send_message_to_room(Client, undefined) ->
RoomJid = erlang:get(my_room),
send_message_to_room(Client, RoomJid);
send_message_to_room(Client, RoomJid) ->
PayloadSize = amoc_config:get(publication_size),
MessageBody = item_content(PayloadSize),
Message = #xmlel{name = <<"message">>,
attrs = [{<<"to">>, RoomJid},
{<<"type">>, <<"groupchat">>}],
children = [MessageBody]},
escalus:send(Client, Message).
%%------------------------------------------------------------------------------------------------
%% Item publishing
%%------------------------------------------------------------------------------------------------
publish_pubsub_item(Client) ->
Id = iq_id(publish, Client),
PayloadSize = amoc_config:get(publication_size),
Content = item_content(PayloadSize),
Request = publish_pubsub_stanza(Client, Id, Content),
escalus:send(Client, Request),
{os:system_time(microsecond), Id}.
publish_pubsub_stanza(Client, Id, Content) ->
ItemId = <<"current">>,
escalus_pubsub_stanza:publish(Client, ItemId, Content, Id, ?NODE).
item_content(PayloadSize) ->
Payload = #xmlcdata{content = <<<<"A">> || _ <- lists:seq(1, PayloadSize)>>},
#xmlel{
name = <<"entry">>,
attrs = [{<<"timestamp">>, integer_to_binary(os:system_time(microsecond))},
{<<"jid">>, erlang:get(jid)}],
children = [Payload]}.
%%------------------------------------------------------------------------------------------------
%% Item processing
%%------------------------------------------------------------------------------------------------
process_message(Stanza, RecvTimeStamp) ->
Type = exml_query:attr(Stanza, <<"type">>),
case Type of
<<"groupchat">> -> process_muc_light_message(Stanza, RecvTimeStamp);
_ -> process_pubsub_msg(Stanza, RecvTimeStamp)
end.
process_pubsub_msg(#xmlel{name = <<"message">>} = Stanza, TS) ->
Entry = exml_query:path(Stanza, [{element, <<"event">>}, {element, <<"items">>},
{element, <<"item">>}, {element, <<"entry">>}]),
case Entry of
undefined -> ok;
_ ->
case {exml_query:attr(Entry, <<"jid">>), erlang:get(jid)} of
{JID, JID} -> schedule_node_publishing(self());
_ -> ok
end,
TimeStampBin = exml_query:attr(Entry, <<"timestamp">>),
TimeStamp = binary_to_integer(TimeStampBin),
TTD = TS - TimeStamp,
%% ?LOG_DEBUG("pubsub time to delivery ~p", [TTD]),
amoc_metrics:update_counter(pubsub_message),
amoc_metrics:update_time(pubsub_message_ttd, TTD)
end.
process_muc_light_message(Stanza, RecvTimeStamp) ->
case exml_query:subelement(Stanza, <<"x">>) of
undefined ->
handle_normal_muc_light_message(Stanza, RecvTimeStamp);
#xmlel{name = <<"x">>, attrs = [{<<"xmlns">>, ?NS_MUC_LIGHT_AFFILIATIONS}], children = _} ->
handle_muc_light_affiliation_message(Stanza);
_ -> ?LOG_ERROR("Unknown message.")
end.
handle_normal_muc_light_message(Stanza, RecvTimeStamp) ->
ReqTimeStampBin = exml_query:path(Stanza, [{element, <<"entry">>}, {attr, <<"timestamp">>}]),
ReqTimeStamp = binary_to_integer(ReqTimeStampBin),
RoomBareJid = get_sender_bare_jid(Stanza),
From = exml_query:path(Stanza, [{element, <<"entry">>}, {attr, <<"jid">>}]),
case erlang:get(jid) of
From -> schedule_room_publishing(self(), RoomBareJid);
_ -> ok
end,
TTD = RecvTimeStamp - ReqTimeStamp,
%% ?LOG_DEBUG("muc light time to delivery ~p", [TTD]),
amoc_metrics:update_counter(muc_light_message),
amoc_metrics:update_time(muc_light_ttd, TTD).
handle_muc_light_affiliation_message(Stanza) ->
amoc_metrics:update_counter(muc_light_affiliation_change_messages),
case exml_query:subelement(Stanza, <<"prev-version">>) of
% actually XEP states only that prev-version SHOULD NOT be sent to new users - not sure if can rely on that
undefined -> handle_first_affiliation_message(Stanza);
_ -> ok % drop affiliation change stanzas
end.
handle_first_affiliation_message(Stanza) ->
RoomJid = exml_query:attr(Stanza, <<"from">>),
case erlang:get(rooms) of
undefined ->
erlang:put(rooms, [RoomJid]),
erlang:put(my_room, RoomJid);
RoomList ->
case lists:member(RoomJid, RoomList) of
true -> ok;
false -> erlang:put(rooms, [RoomJid | RoomList])
end
end.
process_presence(Client, Stanza) ->
case exml_query:attr(Stanza, <<"type">>) of
<<"subscribe">> ->
From = exml_query:attr(Stanza, <<"from">>),
send_presence(Client, <<"subscribed">>, From);
_ ->
ok %%it's ok to just ignore other presence notifications
end.
process_iq(Client, #xmlel{name = <<"iq">>} = Stanza, TS, Requests) ->
Id = exml_query:attr(Stanza, <<"id">>),
Type = exml_query:attr(Stanza, <<"type">>),
NS = exml_query:path(Stanza, [{element, <<"query">>}, {attr, <<"xmlns">>}]),
case {Type, NS, Id, maps:get(Id, Requests, undefined)} of
{<<"result">>, undefined, ?ROOM_CREATION_ID, {Tag, ReqTS}} ->
handle_muc_light_room_iq_result(Stanza, {Tag, TS - ReqTS}),
send_info_to_coordinator(Client);
{<<"result">>, _, <<"affiliation", _/binary>>, {Tag, ReqTS}} ->
handle_affiliation_change_iq(Stanza, {Tag, TS - ReqTS});
{<<"get">>, ?NS_DISCO_INFO, _, undefined} ->
handle_disco_query(Client, Stanza);
{<<"set">>, ?NS_ROSTER, _, undefined} ->
ok; %%it's ok to just ignore roster pushes
{_, undefined, <<"publish", _/binary>>, undefined} ->
?LOG_WARNING("unknown publish iq ~p", [Stanza]);
{_, undefined, <<"publish", _/binary>>, {Tag, ReqTS}} ->
handle_publish_resp(Stanza, {Tag, TS - ReqTS});
_ ->
?LOG_WARNING("unexpected iq ~p", [Stanza])
end,
maps:remove(Id, Requests).
handle_muc_light_room_iq_result(CreateRoomResult, {Tag, RoomCreationTime}) ->
case {escalus_pred:is_iq_result(CreateRoomResult), CreateRoomResult} of
{true, _} ->
?LOG_DEBUG("Room creation ~p took ~p", [self(), RoomCreationTime]),
amoc_metrics:update_time(room_creation, RoomCreationTime),
IqTimeout = amoc_config:get(iq_timeout),
case Tag of
new when IqTimeout * 1000 > RoomCreationTime ->
amoc_metrics:update_counter(room_creation_success);
new ->
amoc_metrics:update_counter(room_creation_timeout);
timeout -> ok %% do nothing, it's already reported as timeout
end;
{false, _} ->
amoc_metrics:update_counter(room_creation_failure),
?LOG_ERROR("Error creating room: ~p", [CreateRoomResult]),
exit(room_creation_failed)
end.
send_info_to_coordinator(Client) ->
?LOG_DEBUG("Process ~p, sending info about myself to coordinator", [self()]),
amoc_coordinator:add(?MODULE, Client).
handle_affiliation_change_iq(AffiliationChangeResult, {Tag, AffiliationChangeTime}) ->
case {escalus_pred:is_iq_result(AffiliationChangeResult), AffiliationChangeResult} of
{true, _} ->
?LOG_DEBUG("Adding users to room ~p took ~p", [self(), AffiliationChangeTime]),
amoc_metrics:update_time(room_affiliation_change, AffiliationChangeTime),
IqTimeout = amoc_config:get(iq_timeout),
case Tag of
new when IqTimeout * 1000 > AffiliationChangeTime ->
amoc_metrics:update_counter(room_affiliation_change_success);
new ->
amoc_metrics:update_counter(room_affiliation_change_timeout);
timeout -> ok %% do nothing, it's already reported as timeout
end;
{false, _} ->
amoc_metrics:update_counter(room_affiliation_change_failure),
?LOG_ERROR("Error affiliation change: ~p", [AffiliationChangeTime]),
exit(affiliation_change_timeout)
end.
handle_publish_resp(PublishResult, {Tag, PublishTime}) ->
IqTimeout = amoc_config:get(iq_timeout),
case escalus_pred:is_iq_result(PublishResult) of
true ->
amoc_metrics:update_counter(publication_result),
amoc_metrics:update_time(pubsub_publication, PublishTime),
case Tag of
new when IqTimeout * 1000 > PublishTime ->
amoc_metrics:update_counter(publication_success);
new ->
amoc_metrics:update_counter(publication_timeout);
timeout -> ok %% do nothing, it's already reported as timeout
end;
_ ->
amoc_metrics:update_counter(publication_error),
?LOG_ERROR("Error publishing failed: ~p", [PublishResult]),
exit(publication_failed)
end.
handle_disco_query(Client, DiscoRequest) ->
?LOG_DEBUG("handle_disco_query ~p", [self()]),
QueryEl = escalus_stanza:query_el(<<"http://jabber.org/protocol/disco#info">>,
feature_elems()),
DiscoResult = escalus_stanza:iq_result(DiscoRequest, [QueryEl]),
escalus:send(Client, DiscoResult).
feature_elems() ->
NodeNs = ?PEP_NODE_NS,
[#xmlel{name = <<"identity">>,
attrs = [{<<"category">>, <<"client">>},
{<<"name">>, <<"Psi">>},
{<<"type">>, <<"pc">>}]},
#xmlel{name = <<"feature">>,
attrs = [{<<"var">>, <<"http://jabber.org/protocol/disco#info">>}]},
#xmlel{name = <<"feature">>,
attrs = [{<<"var">>, NodeNs}]},
#xmlel{name = <<"feature">>,
attrs = [{<<"var">>, <<NodeNs/bitstring, "+notify">>}]}].
%%------------------------------------------------------------------------------------------------
%% Stanza helpers
%%------------------------------------------------------------------------------------------------
iq_id(Type, Client) ->
UserName = escalus_utils:get_username(Client),
Suffix = random_suffix(),
list_to_binary(io_lib:format("~s-~s-~p",
[Type, UserName, Suffix])).
random_suffix() ->
Suffix = base64:encode(crypto:strong_rand_bytes(5)),
re:replace(Suffix, "/", "_", [global, {return, binary}]).
get_sender_bare_jid(Stanza) ->
From = exml_query:attr(Stanza, <<"from">>),
[BareJid | _] = binary:split(From, <<"/">>),
BareJid. | src/scenarios/gdpr_removal.erl | 0.670824 | 0.562657 | gdpr_removal.erl | starcoder |
%%%------------------------------------------------------------------------
%% Copyright 2018, OpenCensus Authors
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% @doc
%% This sampler makes sure you will have at least 1 sample during `period'
%% or `1/count' samples otherwise.
%% @end
%%%-----------------------------------------------------------------------
-module(oc_sampler_period_or_count).
-behaviour(oc_sampler).
-export([init/1,
should_sample/4]).
-define(DEFAULT_PERIOD, 10).
-define(DEFAULT_COUNT, 1000).
-define(ETS_TABLE, sampler_period_or_count).
%% public
init(Opts) ->
_ = ets:new(?ETS_TABLE, [named_table, public]),
Period0 = proplists:get_value(period, Opts, ?DEFAULT_PERIOD),
%% TODO: check Period0 is a non-negative integer
Count = proplists:get_value(count, Opts, ?DEFAULT_COUNT),
%% TODO: check Counter is a non-negative? integer
CountThreshold = Count,
Period = erlang:convert_time_unit(Period0, second, native),
_ = ets:insert(?ETS_TABLE, {sampler, erlang:monotonic_time(), CountThreshold}),
{Period, CountThreshold}.
should_sample(_TraceId, _, _, {Period, CountThreshold}) ->
should_sample(Period, CountThreshold).
%% private
-define(COUNT_PART(Count, Now), {{sampler, '$1', '$2'},
[{'>=', '$2', CountThreshold}],
[{{sampler, Now, 0}}]}).
-define(PERIOD_PART(Period, Now), {{sampler, '$1', '$2'},
[{'>=', {'-', Now, '$1'}, Period}],
[{{sampler, Now, '$2'}}]}).
should_sample(0, 0) ->
false;
should_sample(_, 1) ->
true;
should_sample(0, CountThreshold) ->
ets:update_counter(?ETS_TABLE, sampler, {3, 1, CountThreshold, 1}) =:= 1;
should_sample(Period, 0) ->
Now = erlang:monotonic_time(),
ets:select_replace(?ETS_TABLE,
[?PERIOD_PART(Period, Now)]) > 0;
should_sample(Period, CountThreshold) ->
Now = erlang:monotonic_time(),
Res = ets:select_replace(?ETS_TABLE,
[?COUNT_PART(CountThreshold, Now),
?PERIOD_PART(Period, Now)
]) > 0,
ets:update_counter(?ETS_TABLE, sampler, {3, 1}),
Res. | src/oc_sampler_period_or_count.erl | 0.521471 | 0.480235 | oc_sampler_period_or_count.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% Copyright (c) 2014 <NAME>, Inc. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%% @doc
%% This module implements a self-validating hashtree that is used within
%% riak_ensemble as the primary data integrity mechanism. This tree is
%% designed similar to the hashtree used in file systems such as ZFS, with
%% parent nodes containing the hash of their children. Thus, during traversal
%% a tree path can be completely verified from the root node to the endpoint.
%%
%% This tree is designed to be replicated on multiple nodes and provides
%% built-in exchange logic that efficiently determines the differences between
%% two trees -- thus trees can exchange with peers and heal missing/corrupted
%% data.
%%
%% To perform an exchange, trees must be of the same shape regardless of the
%% data inserted. Thus, the tree implemented by this module has a fixed
%% structure and is therefore more akin to a hash trie. To enable this fixed
%% structure, data inserted into the tree is uniformly mapped to one of a fixed
%% number of segments. These segments are sorted key/value lists. Hashes are
%% computed over each segment, with each hash being stored as a leaf in the
%% actual hash tree. Since there are a fixed number of segments, there is a
%% fixed number of leaf hashes. The remaining levels in the hash tree are then
%% generated on top of these leaf hashes as normal.
%%
%% This design is therefore similar to hashtree.erl from riak_core.
%%
%% The main high-levels differences are as follows:
%% 1. The synctree is built entirely on pure key/value (get/put) operations,
%% there is no concept of iteration nor any need for a sorted backend.
%%
%% 2. The synctree is always up-to-date. An insert into the tree immediately
%% updates the appropriate segment and relevant tree path. There is no
%% concept of a delayed, bulk update as used by hashtree.erl
%%
%% 3. All operations on the tree are self-validating. Every traversal through
%% the tree whether for reads, inserts, or exchanges verifies the hashes
%% down all encountered tree paths.
%%
%% 4. The synctree supports pluggable backends. It was originally designed
%% and tested against both orddict and ETS backends, and then later
%% extended with a LevelDB backend for persistent storage as used in
%% riak_ensemble.
%%
%% Most of the differences from hashtree.erl are not strictly better, but
%% rather are designed to address differences between Riak AAE (which uses
%% hashtree) and riak_ensemble integrity checking (which uses synctree).
%%
%% Specifically, AAE is designed to be a fast, mostly background process
%% with limited impact on normal Riak operations. While the integrity logic
%% requires an always up-to-date tree that is used to verify every get/put
%% operation as they occur, ensuring 100% validity. In other terms, for AAE
%% the hashtree is not expected to be the truth but rather a best-effort
%% projection of the truth (the K/V backend is the truth). Whereas for the
%% integrity logic, the synctree is the truth -- if the backend differs, it's
%% wrong and we consider the backend data corrupted.
-module(synctree).
-export([new/0, new/1, new/3, new/4, new/5]).
-export([newdb/1, newdb/2]).
-export([height/1, top_hash/1]).
-export([insert/3, get/2, exchange_get/3, corrupt/2]).
-export([compare/3, compare/4, compare/5, local_compare/2, direct_exchange/1]).
-export([rehash_upper/1, rehash/1]).
-export([verify_upper/1, verify/1]).
%% TODO: Should we eeally exporting these directly?
-export([m_batch/2, m_flush/1]).
-define(WIDTH, 16).
-define(SEGMENTS, 1024*1024).
-type action() :: {put, _, _} |
{delete, _}.
-type hash() :: binary().
-type key() :: term().
-type value() :: binary().
-type level() :: non_neg_integer().
-type bucket() :: non_neg_integer().
-type hashes() :: [{_, hash()}].
-type corrupted() :: {corrupted, level(), bucket()}.
-record(tree, {id :: term(),
width :: pos_integer(),
segments :: pos_integer(),
height :: pos_integer(),
shift :: pos_integer(),
shift_max :: pos_integer(),
top_hash :: hash(),
buffer :: [action()],
buffered :: non_neg_integer(),
mod :: module(),
modstate :: any()
}).
-type tree() :: #tree{}.
-type maybe_integer() :: pos_integer() | default.
-type options() :: proplists:proplist().
%% Supported hash methods
-define(H_MD5, 0).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% API
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec newdb(term()) -> tree().
newdb(Id) ->
newdb(Id, []).
-spec newdb(term(), options()) -> tree().
newdb(Id, Opts) ->
new(Id, default, default, synctree_leveldb, Opts).
-spec new() -> tree().
new() ->
new(undefined).
-spec new(term()) -> tree().
new(Id) ->
new(Id, ?WIDTH, ?SEGMENTS).
-spec new(term(), maybe_integer(), maybe_integer()) -> tree().
new(Id, Width, Segments) ->
new(Id, Width, Segments, synctree_ets).
-spec new(term(), maybe_integer(), maybe_integer(), module()) -> tree().
new(Id, Width, Segments, Mod) ->
new(Id, Width, Segments, Mod, []).
-spec new(term(), maybe_integer(), maybe_integer(), module(), options()) -> tree().
new(Id, default, Segments, Mod, Opts) ->
new(Id, ?WIDTH, Segments, Mod, Opts);
new(Id, Width, default, Mod, Opts) ->
new(Id, Width, ?SEGMENTS, Mod, Opts);
new(Id, Width, Segments, Mod, Opts) ->
Height = compute_height(Segments, Width),
Shift = compute_shift(Width),
ShiftMax = Shift * Height,
Tree = #tree{id=Id,
width=Width,
segments=Segments,
height=Height,
shift=Shift,
shift_max=ShiftMax,
buffer=[],
buffered=0,
mod=Mod,
modstate=Mod:new(Opts)},
reload_top_hash(Tree).
-spec reload_top_hash(tree()) -> tree().
reload_top_hash(Tree) ->
{ok, TopHash} = m_fetch({0,0}, undefined, Tree),
Tree#tree{top_hash=TopHash}.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec height(tree()) -> pos_integer().
height(#tree{height=Height}) ->
Height.
-spec top_hash(tree()) -> hash() | undefined.
top_hash(#tree{top_hash=TopHash}) ->
TopHash.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec insert(key(), value(), tree()) -> tree() | corrupted().
insert(Key, Value, Tree) when is_binary(Value) ->
Segment = get_segment(Key, Tree),
case get_path(Segment, Tree) of
{corrupted,_,_}=Error ->
Error;
Path ->
{TopHash, Updates} = update_path(Path, Key, Value, []),
Tree2 = m_store(Updates, Tree),
Tree2#tree{top_hash=TopHash}
end.
update_path([], _, ChildHash, Acc) ->
%% Make sure to also save top hash
Acc2 = [{put, {0,0}, ChildHash}|Acc],
{ChildHash, Acc2};
update_path([{{Level,Bucket}, Hashes}|Path], Child, ChildHash, Acc) ->
Hashes2 = orddict:store(Child, ChildHash, Hashes),
NewHash = hash(Hashes2),
Acc2 = [{put, {Level,Bucket}, Hashes2}|Acc],
update_path(Path, Bucket, NewHash, Acc2).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec get(key(), tree()) -> value() | notfound | corrupted().
get(Key, Tree) ->
TopHash = top_hash(Tree),
case TopHash of
undefined ->
notfound;
_ ->
Segment = get_segment(Key, Tree),
case get_path(Segment, Tree) of
{corrupted,_,_}=Error ->
Error;
[{_, Hashes}|_] ->
orddict_find(Key, notfound, Hashes)
end
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec exchange_get(bucket(), level(), tree()) -> hashes() | corrupted().
exchange_get(0, 0, Tree) ->
TopHash = top_hash(Tree),
[{0,TopHash}];
exchange_get(Level, Bucket, Tree) ->
Hashes = verified_hashes(Level, Bucket, Tree),
Hashes.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec corrupt(key(), tree()) -> tree().
corrupt(Key, Tree=#tree{height=Height}) ->
Segment = get_segment(Key, Tree),
Bucket = {Height + 1, Segment},
{ok, Hashes} = m_fetch(Bucket, [], Tree),
Hashes2 = orddict:erase(Key, Hashes),
m_store(Bucket, Hashes2, Tree).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
get_segment(Key, #tree{segments=Segments}) ->
<<HashKey:128/integer>> = crypto:hash(md5, ensure_binary(Key)),
HashKey rem Segments.
-spec hash([{_, binary()}]) -> hash().
hash(Term) ->
L = [H || {_,H} <- Term],
HashBin = crypto:hash(md5, L),
<<?H_MD5, HashBin/binary>>.
ensure_binary(Key) when is_integer(Key) ->
<<Key:64/integer>>;
ensure_binary(Key) when is_atom(Key) ->
atom_to_binary(Key, utf8);
ensure_binary(Key) when is_binary(Key) ->
Key;
ensure_binary(Key) ->
term_to_binary(Key).
compute_height(Segments, Width) ->
%% By design, we require segments to be a power of width.
Height = erlang:trunc(math:log(Segments) / math:log(Width)),
case erlang:trunc(math:pow(Width, Height)) =:= Segments of
true ->
Height
end.
compute_shift(Width) ->
%% By design, we require width to be a power of 2.
Shift = erlang:trunc(math:log(Width) / math:log(2)),
case erlang:trunc(math:pow(2, Shift)) =:= Width of
true ->
Shift
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec verified_hashes(level(), bucket(), tree()) -> hashes() | corrupted().
verified_hashes(Level, Bucket, Tree=#tree{shift=Shift}) ->
N = (Level - 1) * Shift,
TopHash = top_hash(Tree),
case get_path(N, 1, Shift, Bucket, [{0, TopHash}], Tree, []) of
{corrupted,_,_}=Error ->
%% io:format("Tree corrupted (verified_hashes)~n"),
Error;
[{_, Hashes}|_] ->
Hashes
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
get_path(Segment, Tree=#tree{shift=Shift, shift_max=N}) ->
TopHash = top_hash(Tree),
get_path(N, 1, Shift, Segment, [{0, TopHash}], Tree, []).
get_path(N, Level, Shift, Segment, UpHashes, Tree, Acc) ->
Bucket = Segment bsr N,
Expected = orddict_find(Bucket, undefined, UpHashes),
{ok, Hashes} = m_fetch({Level, Bucket}, [], Tree),
Acc2 = [{{Level, Bucket}, Hashes}|Acc],
Verify = verify_hash(Expected, Hashes),
case {Verify, N} of
{false, _} ->
lager:warning("Corrupted at ~p/~p~n", [Level, Bucket]),
{corrupted, Level, Bucket};
{_, 0} ->
Acc2;
_ ->
get_path(N-Shift, Level+1, Shift, Segment, Hashes, Tree, Acc2)
end.
verify_hash(undefined, []) ->
true;
verify_hash(undefined, _Actual) ->
%% io:format("Expected (undef): []~n"),
%% io:format("Actual: ~p~n", [_Actual]),
false;
verify_hash(Expected, Hashes) ->
%% Note: when we add support for multiple hash functions, update this
%% function to compute Actual using the same function that was
%% previously used to compute Expected.
Actual = hash(Hashes),
case Expected of
Actual ->
true;
_ ->
%% io:format("Expected: ~p~n", [Expected]),
%% io:format("Actual: ~p~n", [Actual]),
false
end.
orddict_find(Key, Default, L) ->
case lists:keyfind(Key, 1, L) of
false ->
Default;
{_, Value} ->
Value
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% exchange logic
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
direct_exchange(Tree=#tree{}) ->
fun(exchange_get, {Level, Bucket}) ->
exchange_get(Level, Bucket, Tree);
(start_exchange_level, {_Level, _Buckets}) ->
ok
end.
local_compare(T1, T2) ->
Local = direct_exchange(T1),
Remote = fun(exchange_get, {Level, Bucket}) ->
exchange_get(Level, Bucket, T2);
(start_exchange_level, {_Level, _Buckets}) ->
ok
end,
compare(height(T1), Local, Remote).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
compare(Height, Local, Remote) ->
compare(Height, Local, Remote, fun(Keys, KeyAcc) ->
Keys ++ KeyAcc
end).
compare(Height, Local, Remote, AccFun) ->
compare(Height, Local, Remote, AccFun, []).
compare(Height, Local, Remote, AccFun, Opts) ->
Final = Height + 1,
exchange(0, [0], Final, Local, Remote, AccFun, [], Opts).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
exchange(_Level, [], _Final, _Local, _Remote, _AccFun, Acc, _Opts) ->
Acc;
exchange(Level, Diff, Final, Local, Remote, AccFun, Acc, Opts) ->
%% io:format("~p :: ~w~n", [Level, Diff]),
if Level =:= Final ->
exchange_final(Level, Diff, Local, Remote, AccFun, Acc, Opts);
true ->
Diff2 = exchange_level(Level, Diff, Local, Remote, Opts),
exchange(Level+1, Diff2, Final, Local, Remote, AccFun, Acc, Opts)
end.
exchange_level(Level, Buckets, Local, Remote, Opts) ->
Remote(start_exchange_level, {Level, Buckets}),
FilterType = filter_type(Opts),
lists:flatmap(fun(Bucket) ->
A = Local(exchange_get, {Level, Bucket}),
B = Remote(exchange_get, {Level, Bucket}),
Delta = riak_ensemble_util:orddict_delta(A, B),
Diffs = filter(FilterType, Delta),
[BK || {BK, _} <- Diffs]
end, Buckets).
exchange_final(Level, Buckets, Local, Remote, AccFun, Acc0, Opts) ->
Remote(start_exchange_level, {Level, Buckets}),
FilterType = filter_type(Opts),
lists:foldl(fun(Bucket, Acc) ->
A = Local(exchange_get, {Level, Bucket}),
B = Remote(exchange_get, {Level, Bucket}),
Delta = riak_ensemble_util:orddict_delta(A, B),
Diffs = filter(FilterType, Delta),
AccFun(Diffs, Acc)
end, Acc0, Buckets).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
filter_type(Opts) ->
LocalOnly = lists:member(local_only, Opts),
RemoteOnly = lists:member(remote_only, Opts),
%% Intentionally fail if both local and remote only are provided
case {LocalOnly, RemoteOnly} of
{true, false} ->
local_only;
{false, true} ->
remote_only;
{false, false} ->
all
end.
filter(all, Delta) ->
Delta;
filter(local_only, Delta) ->
%% filter out remote_missing differences
lists:filter(fun({_, {_, '$none'}}) ->
false;
(_) ->
true
end, Delta);
filter(remote_only, Delta) ->
%% filter out local_missing differences
lists:filter(fun({_, {'$none', _}}) ->
false;
(_) ->
true
end, Delta).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
m_fetch(Key, Default, #tree{mod=Mod, modstate=ModState}) ->
Mod:fetch(Key, Default, ModState).
m_store(Key, Val, Tree=#tree{mod=Mod, modstate=ModState}) ->
ModState2 = Mod:store(Key, Val, ModState),
Tree#tree{modstate=ModState2}.
-spec m_store([action()], tree()) -> tree().
m_store(Updates, Tree=#tree{mod=Mod, modstate=ModState}) ->
ModState2 = Mod:store(Updates, ModState),
Tree#tree{modstate=ModState2}.
m_exists(Key, #tree{mod=Mod, modstate=ModState}) ->
Mod:exists(Key, ModState).
m_batch(Update, Tree=#tree{buffer=Buffer, buffered=Buffered}) ->
Tree2 = Tree#tree{buffer=[Update|Buffer],
buffered=Buffered + 1},
maybe_flush_buffer(Tree2).
maybe_flush_buffer(Tree=#tree{buffered=Buffered}) ->
Threshold = 200,
case Buffered > Threshold of
true ->
m_flush(Tree);
false ->
Tree
end.
m_flush(Tree=#tree{buffer=Buffer}) ->
Updates = lists:reverse(Buffer),
Tree2 = m_store(Updates, Tree),
Tree2#tree{buffer=[], buffered=0}.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec rehash_upper(tree()) -> tree().
rehash_upper(Tree=#tree{height=Height}) ->
rehash(Height, Tree).
-spec rehash(tree()) -> tree().
rehash(Tree=#tree{height=Height}) ->
rehash(Height + 1, Tree).
rehash(MaxDepth, Tree) ->
{Tree2, Hashes} = rehash(1, MaxDepth, 0, Tree),
{TopHash, Tree3} = case Hashes of
[] ->
NewTree = delete_existing_batch({0,0}, Tree2),
{undefined, NewTree};
_ ->
NewHash = hash(Hashes),
NewTree = m_batch({put, {0,0}, NewHash}, Tree2),
{NewHash, NewTree}
end,
Tree4 = m_flush(Tree3),
Tree4#tree{top_hash=TopHash}.
rehash(Level, MaxDepth, Bucket, Tree) when Level =:= MaxDepth ->
%% final level, just return the stored value
{ok, Hashes} = m_fetch({Level, Bucket}, [], Tree),
{Tree, Hashes};
rehash(Level, MaxDepth, Bucket, Tree=#tree{width=Width}) ->
X0 = Bucket * Width,
Children = [X || X <- lists:seq(X0, X0+Width-1)],
{Tree2, CH} = lists:foldl(fun(X, {TreeAcc, Acc}) ->
case rehash(Level+1, MaxDepth, X, TreeAcc) of
{Tree2, []} ->
{Tree2, Acc};
{Tree2, Hashes} ->
NewHash = hash(Hashes),
Acc2 = [{X, NewHash}|Acc],
{Tree2, Acc2}
end
end, {Tree, []}, Children),
CH2 = lists:reverse(CH),
Tree3 = case CH2 of
[] ->
delete_existing_batch({Level,Bucket}, Tree2);
_ ->
m_batch({put, {Level,Bucket}, CH2}, Tree2)
end,
{Tree3, CH2}.
delete_existing_batch(Key, Tree) ->
case m_exists(Key, Tree) of
true ->
m_batch({delete, Key}, Tree);
false ->
Tree
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% verify using top-down BFS traversal
-spec verify_upper(tree()) -> boolean().
verify_upper(Tree=#tree{height=Height}) ->
verify(Height, Tree).
-spec verify(tree()) -> boolean().
verify(Tree=#tree{height=Height}) ->
verify(Height + 1, Tree).
verify(MaxDepth, Tree) ->
verify(1, MaxDepth, 0, top_hash(Tree), Tree).
verify(Level, MaxDepth, Bucket, UpHash, Tree) ->
{ok, Hashes} = m_fetch({Level, Bucket}, [], Tree),
case verify_hash(UpHash, Hashes) of
false ->
false;
true when Level == MaxDepth ->
true;
true ->
lists:all(fun({Child, ChildHash}) ->
verify(Level + 1, MaxDepth, Child, ChildHash, Tree)
end, Hashes)
end. | deps/riak_ensemble/src/synctree.erl | 0.576423 | 0.521227 | synctree.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% jam_erlang: Convert jam-specific records to and from Erlang's
%% date/time tuples
%%
%% Copyright (c) 2016 Basho Technologies, Inc. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
-module(jam_erlang).
-include("jam_internal.hrl").
-export([to_erlangish_date/1, to_erlangish_time/1, to_erlangish_datetime/1]).
-export([record_to_tuple/1, tuple_to_record/2]).
%% These functions are termed `erlangish' because the resulting tuples
%% may contain `undefined' values, but if not they should be standard
%% Erlang date/time tuples.
%%
%% No timezone or fractional seconds information is included, since
%% Erlang supports none of that as part of the data structures, so if
%% you wish to convert to a different time zone or round fractional
%% seconds do that via the relevant `jam' functions before calling
%% these.
to_erlangish_date(#datetime{date=Date}) ->
record_to_tuple(Date);
to_erlangish_date(Date) ->
record_to_tuple(Date).
to_erlangish_time(#datetime{time=Time}) ->
record_to_tuple(Time);
to_erlangish_time(Time) ->
record_to_tuple(Time).
to_erlangish_datetime(#datetime{}=DT) ->
{to_erlangish_date(DT), to_erlangish_time(DT)}.
tuple_to_record(#datetime{}=DT, {Date, Time}) ->
DT#datetime{date=tuple_to_record(#date{}, Date),
time=tuple_to_record(#time{}, Time)};
tuple_to_record(#date{}=Date, {Year, Month, Day}) ->
Date#date{year=Year, month=Month, day=Day};
tuple_to_record(#time{}=Time, {Hour, Minute, Second}) ->
Time#time{hour=Hour, minute=Minute, second=Second};
tuple_to_record(#fraction{}=Fraction, {Value, Precision}) ->
Fraction#fraction{value=Value, precision=Precision}.
record_to_tuple(#date{year=Year, month=Month, day=Day}) ->
{Year, Month, Day};
record_to_tuple(#parsed_calendar{year=Year, month=Month, day=Day}) ->
{Year, Month, Day};
record_to_tuple(#time{hour=Hour, minute=Minute, second=Second}) ->
{Hour, Minute, Second};
record_to_tuple(#parsed_time{hour=Hour, minute=Minute, second=Second}) ->
{Hour, Minute, Second}. | src/jam_erlang.erl | 0.598312 | 0.431644 | jam_erlang.erl | starcoder |
% Licensed under the Apache License, Version 2.0 (the "License"); you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
-module(mango_selector_text).
-export([
convert/1,
convert/2,
append_sort_type/2
]).
-include_lib("couch/include/couch_db.hrl").
-include("mango.hrl").
%% Regex for <<"\\.">>
-define(PERIOD, "\\.").
convert(Object) ->
TupleTree = convert([], Object),
iolist_to_binary(to_query(TupleTree)).
convert(Path, {[{<<"$and">>, Args}]}) ->
Parts = [convert(Path, Arg) || Arg <- Args],
{op_and, Parts};
convert(Path, {[{<<"$or">>, Args}]}) ->
Parts = [convert(Path, Arg) || Arg <- Args],
{op_or, Parts};
convert(Path, {[{<<"$not">>, Arg}]}) ->
{op_not, {field_exists_query(Path), convert(Path, Arg)}};
convert(Path, {[{<<"$default">>, Arg}]}) ->
{op_field, {_, Query}} = convert(Path, Arg),
{op_default, Query};
% The $text operator specifies a Lucene syntax query
% so we just pull it in directly.
convert(Path, {[{<<"$text">>, Query}]}) when is_binary(Query) ->
{op_field, {make_field(Path, Query), value_str(Query)}};
% The MongoDB docs for $all are super confusing and read more
% like they screwed up the implementation of this operator
% and then just documented it as a feature.
%
% This implementation will match the behavior as closely as
% possible based on the available docs but we'll need to have
% the testing team validate how MongoDB handles edge conditions
convert(Path, {[{<<"$all">>, Args}]}) ->
case Args of
[Values] when is_list(Values) ->
% If Args is a single element array then we have to
% either match if Path is that array or if it contains
% the array as an element of an array (which isn't at all
% confusing). For Lucene to return us all possible matches
% that means we just need to search for each value in
% Path.[] and Path.[].[] and rely on our filtering to limit
% the results properly.
Fields1 = convert(Path, {[{<<"$eq">>, Values}]}),
Fields2 = convert([<<"[]">> | Path], {[{<<"$eq">>, Values}]}),
{op_or, [Fields1, Fields2]};
_ ->
% Otherwise the $all operator is equivalent to an $and
% operator so we treat it as such.
convert([<<"[]">> | Path], {[{<<"$and">>, Args}]})
end;
% The $elemMatch Lucene query is not an exact translation
% as we can't enforce that the matches are all for the same
% item in an array. We just rely on the final selector match
% to filter out anything that doesn't match. The only trick
% is that we have to add the `[]` path element since the docs
% say this has to match against an array.
convert(Path, {[{<<"$elemMatch">>, Arg}]}) ->
convert([<<"[]">> | Path], Arg);
convert(Path, {[{<<"$allMatch">>, Arg}]}) ->
convert([<<"[]">> | Path], Arg);
% Our comparison operators are fairly straight forward
convert(Path, {[{<<"$lt">>, Arg}]}) when
is_list(Arg);
is_tuple(Arg);
Arg =:= null
->
field_exists_query(Path);
convert(Path, {[{<<"$lt">>, Arg}]}) ->
{op_field, {make_field(Path, Arg), range(lt, Arg)}};
convert(Path, {[{<<"$lte">>, Arg}]}) when
is_list(Arg);
is_tuple(Arg);
Arg =:= null
->
field_exists_query(Path);
convert(Path, {[{<<"$lte">>, Arg}]}) ->
{op_field, {make_field(Path, Arg), range(lte, Arg)}};
%% This is for indexable_fields
convert(Path, {[{<<"$eq">>, Arg}]}) when Arg =:= null ->
{op_null, {make_field(Path, Arg), value_str(Arg)}};
convert(Path, {[{<<"$eq">>, Args}]}) when is_list(Args) ->
Path0 = [<<"[]">> | Path],
LPart = {op_field, {make_field(Path0, length), value_str(length(Args))}},
Parts0 = [convert(Path0, {[{<<"$eq">>, Arg}]}) || Arg <- Args],
Parts = [LPart | Parts0],
{op_and, Parts};
convert(Path, {[{<<"$eq">>, {_} = Arg}]}) ->
convert(Path, Arg);
convert(Path, {[{<<"$eq">>, Arg}]}) ->
{op_field, {make_field(Path, Arg), value_str(Arg)}};
convert(Path, {[{<<"$ne">>, Arg}]}) ->
{op_not, {field_exists_query(Path), convert(Path, {[{<<"$eq">>, Arg}]})}};
convert(Path, {[{<<"$gte">>, Arg}]}) when
is_list(Arg);
is_tuple(Arg);
Arg =:= null
->
field_exists_query(Path);
convert(Path, {[{<<"$gte">>, Arg}]}) ->
{op_field, {make_field(Path, Arg), range(gte, Arg)}};
convert(Path, {[{<<"$gt">>, Arg}]}) when
is_list(Arg);
is_tuple(Arg);
Arg =:= null
->
field_exists_query(Path);
convert(Path, {[{<<"$gt">>, Arg}]}) ->
{op_field, {make_field(Path, Arg), range(gt, Arg)}};
convert(Path, {[{<<"$in">>, Args}]}) ->
{op_or, convert_in(Path, Args)};
convert(Path, {[{<<"$nin">>, Args}]}) ->
{op_not, {field_exists_query(Path), convert(Path, {[{<<"$in">>, Args}]})}};
convert(Path, {[{<<"$exists">>, ShouldExist}]}) ->
FieldExists = field_exists_query(Path),
case ShouldExist of
true -> FieldExists;
false -> {op_not, {FieldExists, false}}
end;
% We're not checking the actual type here, just looking for
% anything that has a possibility of matching by checking
% for the field name. We use the same logic for $exists on
% the actual query.
convert(Path, {[{<<"$type">>, _}]}) ->
field_exists_query(Path);
convert(Path, {[{<<"$mod">>, _}]}) ->
field_exists_query(Path, "number");
% The lucene regular expression engine does not use java's regex engine but
% instead a custom implementation. The syntax is therefore different, so we do
% would get different behavior than our view indexes. To be consistent, we will
% simply return docs for fields that exist and then run our match filter.
convert(Path, {[{<<"$regex">>, _}]}) ->
field_exists_query(Path, "string");
convert(Path, {[{<<"$size">>, Arg}]}) ->
{op_field, {make_field([<<"[]">> | Path], length), value_str(Arg)}};
% All other operators are internal assertion errors for
% matching because we either should've removed them during
% normalization or something else broke.
convert(_Path, {[{<<"$", _/binary>> = Op, _}]}) ->
?MANGO_ERROR({invalid_operator, Op});
% We've hit a field name specifier. Check if the field name is accessing
% arrays. Convert occurrences of element position references to .[]. Then we
% need to break the name into path parts and continue our conversion.
convert(Path, {[{Field0, Cond}]}) ->
{ok, PP0} =
case Field0 of
<<>> ->
{ok, []};
_ ->
mango_util:parse_field(Field0)
end,
% Later on, we perform a lucene_escape_user call on the
% final Path, which calls parse_field again. Calling the function
% twice converts <<"a\\.b">> to [<<"a">>,<<"b">>]. This leads to
% an incorrect query since we need [<<"a.b">>]. Without breaking
% our escaping mechanism, we simply revert this first parse_field
% effect and replace instances of "." to "\\.".
MP = mango_util:cached_re(mango_period, ?PERIOD),
PP1 = [
re:replace(
P,
MP,
<<"\\\\.">>,
[global, {return, binary}]
)
|| P <- PP0
],
{PP2, HasInteger} = replace_array_indexes(PP1, [], false),
NewPath = PP2 ++ Path,
case HasInteger of
true ->
OldPath = lists:reverse(PP1, Path),
OldParts = convert(OldPath, Cond),
NewParts = convert(NewPath, Cond),
{op_or, [OldParts, NewParts]};
false ->
convert(NewPath, Cond)
end;
%% For $in
convert(Path, Val) when is_binary(Val); is_number(Val); is_boolean(Val) ->
{op_field, {make_field(Path, Val), value_str(Val)}};
% Anything else is a bad selector.
convert(_Path, {Props} = Sel) when length(Props) > 1 ->
erlang:error({unnormalized_selector, Sel}).
to_query_nested(Args) ->
QueryArgs = lists:map(fun to_query/1, Args),
% removes empty queries that result from selectors with empty arrays
FilterFun = fun(A) -> A =/= [] andalso A =/= "()" end,
lists:filter(FilterFun, QueryArgs).
to_query({op_and, []}) ->
[];
to_query({op_and, Args}) when is_list(Args) ->
case to_query_nested(Args) of
[] -> [];
QueryArgs -> ["(", mango_util:join(<<" AND ">>, QueryArgs), ")"]
end;
to_query({op_or, []}) ->
[];
to_query({op_or, Args}) when is_list(Args) ->
case to_query_nested(Args) of
[] -> [];
QueryArgs -> ["(", mango_util:join(" OR ", QueryArgs), ")"]
end;
to_query({op_not, {ExistsQuery, Arg}}) when is_tuple(Arg) ->
case to_query(Arg) of
[] -> ["(", to_query(ExistsQuery), ")"];
Query -> ["(", to_query(ExistsQuery), " AND NOT (", Query, "))"]
end;
%% For $exists:false
to_query({op_not, {ExistsQuery, false}}) ->
["($fieldnames:/.*/ ", " AND NOT (", to_query(ExistsQuery), "))"];
to_query({op_insert, Arg}) when is_binary(Arg) ->
["(", Arg, ")"];
%% We escape : and / for now for values and all lucene chars for fieldnames
%% This needs to be resolved.
to_query({op_field, {Name, Value}}) ->
NameBin = iolist_to_binary(Name),
["(", mango_util:lucene_escape_user(NameBin), ":", Value, ")"];
%% This is for indexable_fields
to_query({op_null, {Name, Value}}) ->
NameBin = iolist_to_binary(Name),
["(", mango_util:lucene_escape_user(NameBin), ":", Value, ")"];
to_query({op_fieldname, {Name, Wildcard}}) ->
NameBin = iolist_to_binary(Name),
["($fieldnames:", mango_util:lucene_escape_user(NameBin), Wildcard, ")"];
to_query({op_default, Value}) ->
["($default:", Value, ")"].
%% We match on fieldname and fieldname.[]
convert_in(Path, Args) ->
Path0 = [<<"[]">> | Path],
lists:map(
fun(Arg) ->
case Arg of
{Object} ->
Parts = lists:map(
fun(SubObject) ->
Fields1 = convert(Path, {[SubObject]}),
Fields2 = convert(Path0, {[SubObject]}),
{op_or, [Fields1, Fields2]}
end,
Object
),
{op_or, Parts};
SingleVal ->
Fields1 = {op_field, {make_field(Path, SingleVal), value_str(SingleVal)}},
Fields2 = {op_field, {make_field(Path0, SingleVal), value_str(SingleVal)}},
{op_or, [Fields1, Fields2]}
end
end,
Args
).
make_field(Path, length) ->
[path_str(Path), <<":length">>];
make_field(Path, Arg) ->
[path_str(Path), <<":">>, type_str(Arg)].
range(lt, Arg) ->
Min = get_range(min, Arg),
[<<"[", Min/binary, " TO ">>, value_str(Arg), <<"}">>];
range(lte, Arg) ->
Min = get_range(min, Arg),
[<<"[", Min/binary, " TO ">>, value_str(Arg), <<"]">>];
range(gte, Arg) ->
Max = get_range(max, Arg),
[<<"[">>, value_str(Arg), <<" TO ", Max/binary, "]">>];
range(gt, Arg) ->
Max = get_range(max, Arg),
[<<"{">>, value_str(Arg), <<" TO ", Max/binary, "]">>].
get_range(min, Arg) when is_number(Arg) ->
<<"-Infinity">>;
get_range(min, _Arg) ->
<<"\"\"">>;
get_range(max, Arg) when is_number(Arg) ->
<<"Infinity">>;
get_range(max, _Arg) ->
<<"\u0x10FFFF">>.
field_exists_query(Path) ->
% We specify two here for :* and .* so that we don't incorrectly
% match a path foo.name against foo.name_first (if were to just
% appened * isntead).
Parts = [
% We need to remove the period from the path list to indicate that it is
% a path separator. We escape the colon because it is not used as a
% separator and we escape colons in field names.
{op_fieldname, {[path_str(Path), ":"], "*"}},
{op_fieldname, {[path_str(Path)], ".*"}}
],
{op_or, Parts}.
field_exists_query(Path, Type) ->
{op_fieldname, {[path_str(Path), ":"], Type}}.
path_str(Path) ->
path_str(Path, []).
path_str([], Acc) ->
Acc;
path_str([Part], Acc) ->
% No reverse because Path is backwards
% during recursion of convert.
[Part | Acc];
path_str([Part | Rest], Acc) ->
case Part of
% do not append a period if Part is blank
<<>> ->
path_str(Rest, [Acc]);
_ ->
path_str(Rest, [<<".">>, Part | Acc])
end.
type_str(Value) when is_number(Value) ->
<<"number">>;
type_str(Value) when is_boolean(Value) ->
<<"boolean">>;
type_str(Value) when is_binary(Value) ->
<<"string">>;
type_str(null) ->
<<"null">>.
value_str(Value) when is_binary(Value) ->
case mango_util:is_number_string(Value) of
true ->
<<"\"", Value/binary, "\"">>;
false ->
Escaped = mango_util:lucene_escape_query_value(Value),
<<"\"", Escaped/binary, "\"">>
end;
value_str(Value) when is_integer(Value) ->
list_to_binary(integer_to_list(Value));
value_str(Value) when is_float(Value) ->
list_to_binary(float_to_list(Value));
value_str(true) ->
<<"true">>;
value_str(false) ->
<<"false">>;
value_str(null) ->
<<"true">>.
append_sort_type(RawSortField, Selector) ->
EncodeField = mango_util:lucene_escape_user(RawSortField),
String = mango_util:has_suffix(EncodeField, <<"_3astring">>),
Number = mango_util:has_suffix(EncodeField, <<"_3anumber">>),
case {String, Number} of
{true, _} ->
<<EncodeField/binary, "<string>">>;
{_, true} ->
<<EncodeField/binary, "<number>">>;
_ ->
Type = get_sort_type(RawSortField, Selector),
<<EncodeField/binary, Type/binary>>
end.
get_sort_type(Field, Selector) ->
Types = get_sort_types(Field, Selector, []),
case lists:usort(Types) of
[str] -> <<"_3astring<string>">>;
[num] -> <<"_3anumber<number>">>;
_ -> ?MANGO_ERROR({text_sort_error, Field})
end.
get_sort_types(Field, {[{Field, {[{<<"$", _/binary>>, Cond}]}}]}, Acc) when
is_binary(Cond)
->
[str | Acc];
get_sort_types(Field, {[{Field, {[{<<"$", _/binary>>, Cond}]}}]}, Acc) when
is_number(Cond)
->
[num | Acc];
get_sort_types(Field, {[{_, Cond}]}, Acc) when is_list(Cond) ->
lists:foldl(
fun(Arg, InnerAcc) ->
get_sort_types(Field, Arg, InnerAcc)
end,
Acc,
Cond
);
get_sort_types(Field, {[{_, Cond}]}, Acc) when is_tuple(Cond) ->
get_sort_types(Field, Cond, Acc);
get_sort_types(_Field, _, Acc) ->
Acc.
replace_array_indexes([], NewPartsAcc, HasIntAcc) ->
{NewPartsAcc, HasIntAcc};
replace_array_indexes([Part | Rest], NewPartsAcc, HasIntAcc) ->
{NewPart, HasInt} =
try
_ = list_to_integer(binary_to_list(Part)),
{<<"[]">>, true}
catch
_:_ ->
{Part, false}
end,
replace_array_indexes(
Rest,
[NewPart | NewPartsAcc],
HasInt or HasIntAcc
). | src/mango/src/mango_selector_text.erl | 0.585101 | 0.47658 | mango_selector_text.erl | starcoder |
%% @author <NAME> <<EMAIL>>
%% @copyright 2013 <NAME>
%%
%% @doc Binary String Helper Functions
%%
%% Copyright 2013 <NAME>
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(elli_bstr).
-export([
to_lower/1,
is_equal_ci/2,
lchr/1,
trim_left/1,
trim_right/1,
trim/1
]).
-define(IS_WS(C), (C =:= $\s orelse C=:=$\t orelse C=:= $\r orelse C =:= $\n)).
%%
%% Types
%%
-type ascii_char() :: 0..127.
%%
%% Functions
%%
% @doc Convert ascii Bin to lowercase
-spec to_lower(Bin :: binary()) -> binary().
to_lower(Bin) ->
<< <<(lchr(C))>> || <<C>> <= Bin >>.
% @doc Compare two binary values, return true iff they are equal by a caseless compare.
-spec is_equal_ci(binary(), binary()) -> boolean().
is_equal_ci(Bin, Bin) ->
% Quick match with an Erlang pattern match
true;
is_equal_ci(Bin1, Bin2) when is_binary(Bin1) andalso is_binary(Bin2)
andalso size(Bin1) =:= size(Bin2) ->
% Both binaries are the same length, do a good check
equal_ci(Bin1, Bin2);
is_equal_ci(_, _) ->
false.
% @doc convert character to lowercase.
-spec lchr(ascii_char()) -> ascii_char().
lchr($A) -> $a;
lchr($B) -> $b;
lchr($C) -> $c;
lchr($D) -> $d;
lchr($E) -> $e;
lchr($F) -> $f;
lchr($G) -> $g;
lchr($H) -> $h;
lchr($I) -> $i;
lchr($J) -> $j;
lchr($K) -> $k;
lchr($L) -> $l;
lchr($M) -> $m;
lchr($N) -> $n;
lchr($O) -> $o;
lchr($P) -> $p;
lchr($Q) -> $q;
lchr($R) -> $r;
lchr($S) -> $s;
lchr($T) -> $t;
lchr($U) -> $u;
lchr($V) -> $v;
lchr($W) -> $w;
lchr($X) -> $x;
lchr($Y) -> $y;
lchr($Z) -> $z;
lchr(Chr) -> Chr.
% @doc Remove leading whitespace from Bin
trim_left(<<C, Rest/binary>>) when ?IS_WS(C) ->
trim_left(Rest);
trim_left(Bin) ->
Bin.
% @doc Remove trailing whitespace from Bin
trim_right(<<>>) -> <<>>;
trim_right(Bin) ->
case binary:last(Bin) of
C when ?IS_WS(C) ->
trim_right(binary:part(Bin, {0, size(Bin)-1}));
_ ->
Bin
end.
% @doc Remove leading and trailing whitespace.
trim(Bin) ->
trim_left(trim_right(Bin)).
%%
%% Helpers
%%
equal_ci(<<>>, <<>>) ->
true;
equal_ci(<<C, Rest1/binary>>, <<C, Rest2/binary>>) ->
equal_ci(Rest1, Rest2);
equal_ci(<<C1, Rest1/binary>>, <<C2, Rest2/binary>>) ->
case lchr(C1) =:= lchr(C2) of
true ->
equal_ci(Rest1, Rest2);
false ->
false
end.
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
case_insensitive_equal_test() ->
?assertEqual(true, is_equal_ci(<<>>, <<>>)),
?assertEqual(true, is_equal_ci(<<"abc">>, <<"abc">>)),
?assertEqual(true, is_equal_ci(<<"123">>, <<"123">>)),
?assertEqual(false, is_equal_ci(<<"abcd">>, <<"abc">>)),
?assertEqual(false, is_equal_ci(<<"1234">>, <<"123">>)),
?assertEqual(true, is_equal_ci(<<"aBc">>, <<"abc">>)),
?assertEqual(true, is_equal_ci(<<"123AB">>, <<"123ab">>)),
?assertEqual(false, is_equal_ci(<<"1">>, <<"123ab">>)),
?assertEqual(false, is_equal_ci(<<"">>, <<"123ab">>)),
?assertEqual(false, is_equal_ci(<<"">>, <<" ">>)),
ok.
%% Test if to_lower works.
ascii_to_lower_test() ->
?assertEqual(<<>>, to_lower(<<>>)),
?assertEqual(<<"abc">>, to_lower(<<"abc">>)),
?assertEqual(<<"abc">>, to_lower(<<"ABC">>)),
?assertEqual(<<"1234567890abcdefghijklmnopqrstuvwxyz!@#$%^&*()">>,
to_lower(<<"1234567890abcdefghijklmnopqrstuvwxyz!@#$%^&*()">>)),
?assertEqual(<<"1234567890abcdefghijklmnopqrstuvwxyz!@#$%^&*()">>,
to_lower(<<"1234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ!@#$%^&*()">>)),
ok.
trim_test() ->
?assertEqual(<<"check">>, trim(<<"check">>)),
?assertEqual(<<"check">>, trim(<<" check">>)),
?assertEqual(<<"check">>, trim(<<" check ">>)),
?assertEqual(<<"">>, trim(<<" ">>)),
?assertEqual(<<>>, trim(<<>>)),
?assertEqual(<<"">>, trim(<<"\t\r\n">>)),
ok.
-endif. | src/elli_bstr.erl | 0.613931 | 0.454412 | elli_bstr.erl | starcoder |
%%%-------------------------------------------------------------------
%%% @author <NAME>
%%% @copyright (C) 2017 ACK CYFRONET AGH
%%% This software is released under the MIT license
%%% cited in 'LICENSE.txt'.
%%% @end
%%%-------------------------------------------------------------------
%%% @doc
%%% This module provides API for datastore document links management.
%%%
%%% Each datastore key can be associated with a one or many named links
%%% that point to some target/targets (#link{}). Links are grouped into trees
%%% and internally stored as tree nodes (#links_node{}). A set of trees creates
%%% a forest which holds pointers to the tree roots (#links_forest{}). In order
%%% to avoid synchronization conflicts between trees, links masks are introduced
%%% (#links_mask{}). Links mask holds a list of links in given revisions that
%%% should be excluded when get or fold operation is executed for a given tree.
%%% Links masks are arranged in a linked list form for given tree and links mask
%%% root (#links_mask_root{}) holds pointers to heads and tails of each links
%%% mask list.
%%%
%%% Links trees are represented by B+ trees. Tree nodes management functions
%%% are provided by {@link links_tree} module which implements
%%% {@link bp_tree_store} behaviour.
%%%
%%% From the API perspective there are two key objects: tree and forest iterator.
%%% First one represents a single tree and can be created with
%%% {@link init_tree/4} or {@link init_tree/5} functions. Second one represents
%%% a collection of trees and can be created with
%%% {@link datastore_links_iter:init/3} or {@link datastore_links_iter:init/4}
%%% functions. Both of them use {@link datastore_doc_batch} as a local cache.
%%% In order to retrieve datastore documents batch, terminate function should be
%%% called on both of the objects. For more information about documents batch
%%% checkout {@link datastore_doc_batch} module.
%%%
%%% NOTE! Functions provided by this module are thread safe. In order to achieve
%%% consistency and atomicity they should by called from serialization process
%%% e.g. {@link datastore_writer}.
%%% @end
%%%-------------------------------------------------------------------
-module(datastore_links).
-author("<NAME>").
-include("global_definitions.hrl").
-include("modules/datastore/datastore_models.hrl").
-include("modules/datastore/datastore_links.hrl").
-include_lib("bp_tree/include/bp_tree.hrl").
-include_lib("ctool/include/logging.hrl").
%% API
-export([get_forest_id/1, get_mask_root_id/1, get_tree_id/1]).
-export([init_tree/4, init_tree/5, terminate_tree/1]).
-export([add/2, get/2, delete/2, mark_deleted/3]).
-export([fold/4]).
-export([get_links_trees/3]).
-type ctx() :: datastore_cache:ctx().
-type key() :: datastore:key().
-type tree_id() :: links_tree:id().
-type tree_ids() :: all | tree_id() | [tree_id()].
-type tree() :: bp_tree:tree().
-type forest_id() :: links_forest:id().
-type batch() :: undefined | datastore_doc_batch:batch().
-type link() :: #link{}.
-type link_name() :: binary() | integer().
-type link_target() :: binary() | integer().
-type link_rev() :: undefined | binary().
-type remove_pred() :: bp_tree:remove_pred().
-type mask() :: datastore_links_mask:mask().
-type forest_it() :: datastore_links_iter:forest_it().
-type fold_fun() :: datastore_links_iter:fold_fun().
-type fold_acc() :: datastore_links_iter:fold_acc().
-type fold_opts() :: datastore_links_iter:fold_opts().
-export_type([ctx/0, tree_id/0, tree_ids/0, tree/0, forest_id/0]).
-export_type([link_name/0, link_target/0, link_rev/0, link/0, remove_pred/0]).
-export_type([forest_it/0, fold_fun/0, fold_acc/0, fold_opts/0]).
%%%===================================================================
%%% API
%%%===================================================================
%%--------------------------------------------------------------------
%% @doc
%% Returns links forest ID.
%% @end
%%--------------------------------------------------------------------
-spec get_forest_id(key()) -> forest_id().
get_forest_id(Key) ->
datastore_key:build_adjacent(<<"links_forest">>, Key).
%%--------------------------------------------------------------------
%% @doc
%% Returns links mask pointer ID.
%% @end
%%--------------------------------------------------------------------
-spec get_mask_root_id(key()) -> key().
get_mask_root_id(Key) ->
datastore_key:build_adjacent(<<"links_mask">>, Key).
%%--------------------------------------------------------------------
%% @doc
%% Returns links tree ID.
%% @end
%%--------------------------------------------------------------------
-spec get_tree_id(tree()) -> tree_id().
get_tree_id(#bp_tree{store_state = State}) ->
links_tree:get_tree_id(State).
%%--------------------------------------------------------------------
%% @equiv init_tree(Ctx, Key, TreeId, Batch, false)
%% @end
%%--------------------------------------------------------------------
-spec init_tree(ctx(), key(), tree_id(), batch()) ->
{ok, tree()} | {error, term()}.
init_tree(Ctx, Key, TreeId, Batch) ->
init_tree(Ctx, Key, TreeId, Batch, false).
%%--------------------------------------------------------------------
%% @doc
%% Initializes links tree.
%% @end
%%--------------------------------------------------------------------
-spec init_tree(ctx(), key(), tree_id(), batch(), boolean()) ->
{ok, tree()} | {error, term()}.
init_tree(Ctx, Key, TreeId, Batch, ReadOnly) ->
Ans = bp_tree:init([
{order, application:get_env(?CLUSTER_WORKER_APP_NAME,
datastore_links_tree_order, 1024)},
{store_module, links_tree},
{store_args, [Ctx, Key, TreeId, Batch]},
{read_only, ReadOnly}
]),
case Ans of
{broken_root, Tree} ->
% The tree has been broken by abnormal termination of application
% Some data could be lost, proceeding with fixed root
?error("Broken bp_tree ~p for key ~p and ctx ~p", [TreeId, Key, Ctx]),
{ok, Tree};
Other ->
Other
end.
%%--------------------------------------------------------------------
%% @doc
%% Clean up links tree. Returns documents batch.
%% @end
%%--------------------------------------------------------------------
-spec terminate_tree(tree()) -> batch().
terminate_tree(Tree) ->
bp_tree:terminate(Tree).
%%--------------------------------------------------------------------
%% @doc
%% Creates named link between a document and a target.
%% @end
%%--------------------------------------------------------------------
-spec add([{link_name(), {link_target(), link_rev()}}], tree()) ->
{{ok, [link_name()]} | {error, term()}, tree()}.
add(Items, Tree) ->
datastore_links_crud:add(Items, Tree).
%%--------------------------------------------------------------------
%% @doc
%% Returns document link by name.
%% @end
%%--------------------------------------------------------------------
-spec get(link_name(), forest_it()) ->
{{ok, [link()]} | {error, term()}, forest_it()}.
get(LinkName, ForestIt) ->
datastore_links_iter:get(LinkName, ForestIt).
%%--------------------------------------------------------------------
%% @doc
%% Removes document link by name and revision.
%% @end
%%--------------------------------------------------------------------
-spec delete([{link_name(), remove_pred()}], tree()) ->
{{ok, [link_name()]} | {error, term()}, tree()}.
delete(Items, Tree) ->
datastore_links_crud:delete(Items, Tree).
%%--------------------------------------------------------------------
%% @doc
%% Marks document link given by name and revision as deleted.
%% @end
%%--------------------------------------------------------------------
-spec mark_deleted(link_name(), link_rev(), mask()) ->
{ok | {error, term()}, mask()}.
mark_deleted(LinkName, LinkRev, Mask) ->
datastore_links_mask:mark_deleted(LinkName, LinkRev, Mask).
%%--------------------------------------------------------------------
%% @doc
%% Calls Fun(Link, Acc) for each link in a link tree forest in increasing order
%% of link names.
%% @end
%%--------------------------------------------------------------------
-spec fold(fold_fun(), fold_acc(), forest_it(), fold_opts()) ->
{{ok, fold_acc()} | {{ok, fold_acc()}, datastore_links_iter:token()} |
{error, term()}, forest_it()}.
fold(Fun, Acc, ForestIt, Opts) ->
datastore_links_iter:fold(Fun, Acc, ForestIt, Opts).
%%--------------------------------------------------------------------
%% @doc
%% Returns IDs of all trees in a links tree forest.
%% @end
%%--------------------------------------------------------------------
-spec get_links_trees(ctx(), key(), batch()) ->
{{ok, [tree_id()]} | {error, term()}, batch()}.
get_links_trees(Ctx, Key, Batch) ->
ForestId = get_forest_id(Key),
case datastore_doc:fetch(Ctx, ForestId, Batch) of
{{ok, #document{value = #links_forest{trees = Trees}}}, Batch2} ->
{{ok, maps:keys(Trees)}, Batch2};
{{error, Reason}, Batch2} ->
{{error, Reason}, Batch2}
end. | src/modules/datastore/links/datastore_links.erl | 0.651244 | 0.527986 | datastore_links.erl | starcoder |
%% Copyright (c) 2011-2012 Bash<NAME>, Inc. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%% @doc The parse transform used for lager messages.
%% This parse transform rewrites functions calls to lager:Severity/1,2 into
%% a more complicated function that captures module, function, line, pid and
%% time as well. The entire function call is then wrapped in a case that
%% checks the lager_config 'loglevel' value, so the code isn't executed if
%% nothing wishes to consume the message.
-module(lager_transform).
-include("lager.hrl").
-export([parse_transform/2]).
%% @private
parse_transform(AST, Options) ->
TruncSize = proplists:get_value(lager_truncation_size, Options, ?DEFAULT_TRUNCATION),
Enable = proplists:get_value(lager_print_records_flag, Options, true),
Sinks = [lager] ++ proplists:get_value(lager_extra_sinks, Options, []),
Functions = proplists:get_value(lager_function_transforms, Options, []),
put(print_records_flag, Enable),
put(truncation_size, TruncSize),
put(sinks, Sinks),
put(functions, lists:keysort(1, Functions)),
erlang:put(records, []),
%% .app file should either be in the outdir, or the same dir as the source file
guess_application(proplists:get_value(outdir, Options), hd(AST)),
walk_ast([], AST).
walk_ast(Acc, []) ->
case get(print_records_flag) of
true ->
insert_record_attribute(Acc);
false ->
lists:reverse(Acc)
end;
walk_ast(Acc, [{attribute, _, module, {Module, _PmodArgs}}=H|T]) ->
%% A wild parameterized module appears!
put(module, Module),
walk_ast([H|Acc], T);
walk_ast(Acc, [{attribute, _, module, Module}=H|T]) ->
put(module, Module),
walk_ast([H|Acc], T);
walk_ast(Acc, [{attribute, _, lager_function_transforms, FromModule }=H|T]) ->
%% Merge transform options from the module over the compile options
FromOptions = get(functions),
put(functions, orddict:merge(fun(_Key, _V1, V2) -> V2 end, FromOptions, lists:keysort(1, FromModule))),
walk_ast([H|Acc], T);
walk_ast(Acc, [{function, Line, Name, Arity, Clauses}|T]) ->
put(function, Name),
walk_ast([{function, Line, Name, Arity,
walk_clauses([], Clauses)}|Acc], T);
walk_ast(Acc, [{attribute, _, record, {Name, Fields}}=H|T]) ->
FieldNames = lists:map(fun record_field_name/1, Fields),
stash_record({Name, FieldNames}),
walk_ast([H|Acc], T);
walk_ast(Acc, [H|T]) ->
walk_ast([H|Acc], T).
record_field_name({record_field, _, {atom, _, FieldName}}) ->
FieldName;
record_field_name({record_field, _, {atom, _, FieldName}, _Default}) ->
FieldName;
record_field_name({typed_record_field, Field, _Type}) ->
record_field_name(Field).
walk_clauses(Acc, []) ->
lists:reverse(Acc);
walk_clauses(Acc, [{clause, Line, Arguments, Guards, Body}|T]) ->
walk_clauses([{clause, Line, Arguments, Guards, walk_body([], Body)}|Acc], T).
walk_body(Acc, []) ->
lists:reverse(Acc);
walk_body(Acc, [H|T]) ->
walk_body([transform_statement(H, get(sinks))|Acc], T).
transform_statement({call, Line, {remote, _Line1, {atom, _Line2, Module},
{atom, _Line3, Function}}, Arguments0} = Stmt,
Sinks) ->
case lists:member(Module, Sinks) of
true ->
case lists:member(Function, ?LEVELS) of
true ->
SinkName = lager_util:make_internal_sink_name(Module),
do_transform(Line, SinkName, Function, Arguments0);
false ->
case lists:keyfind(Function, 1, ?LEVELS_UNSAFE) of
{Function, Severity} ->
SinkName = lager_util:make_internal_sink_name(Module),
do_transform(Line, SinkName, Severity, Arguments0, unsafe);
false ->
Stmt
end
end;
false ->
list_to_tuple(transform_statement(tuple_to_list(Stmt), Sinks))
end;
transform_statement(Stmt, Sinks) when is_tuple(Stmt) ->
list_to_tuple(transform_statement(tuple_to_list(Stmt), Sinks));
transform_statement(Stmt, Sinks) when is_list(Stmt) ->
[transform_statement(S, Sinks) || S <- Stmt];
transform_statement(Stmt, _Sinks) ->
Stmt.
add_function_transforms(_Line, DefaultAttrs, []) ->
DefaultAttrs;
add_function_transforms(Line, DefaultAttrs, [{Atom, on_emit, {Module, Function}}|Remainder]) ->
NewFunction = {tuple, Line, [
{atom, Line, Atom},
{'fun', Line, {
function, {atom, Line, Module}, {atom, Line, Function}, {integer, Line, 0}
}}
]},
add_function_transforms(Line, {cons, Line, NewFunction, DefaultAttrs}, Remainder);
add_function_transforms(Line, DefaultAttrs, [{Atom, on_log, {Module, Function}}|Remainder]) ->
NewFunction = {tuple, Line, [
{atom, Line, Atom},
{call, Line, {remote, Line, {atom, Line, Module}, {atom, Line, Function}}, []}
]},
add_function_transforms(Line, {cons, Line, NewFunction, DefaultAttrs}, Remainder).
do_transform(Line, SinkName, Severity, Arguments0) ->
do_transform(Line, SinkName, Severity, Arguments0, safe).
do_transform(Line, SinkName, Severity, Arguments0, Safety) ->
SeverityAsInt=lager_util:level_to_num(Severity),
DefaultAttrs0 = {cons, Line, {tuple, Line, [
{atom, Line, module}, {atom, Line, get(module)}]},
{cons, Line, {tuple, Line, [
{atom, Line, function}, {atom, Line, get(function)}]},
{cons, Line, {tuple, Line, [
{atom, Line, line},
{integer, Line, Line}]},
{cons, Line, {tuple, Line, [
{atom, Line, pid},
{call, Line, {atom, Line, pid_to_list}, [
{call, Line, {atom, Line ,self}, []}]}]},
{cons, Line, {tuple, Line, [
{atom, Line, node},
{call, Line, {atom, Line, node}, []}]},
%% get the metadata with lager:md(), this will always return a list so we can use it as the tail here
{call, Line, {remote, Line, {atom, Line, lager}, {atom, Line, md}}, []}}}}}},
%{nil, Line}}}}}}},
Functions = get(functions),
DefaultAttrs1 = add_function_transforms(Line, DefaultAttrs0, Functions),
DefaultAttrs = case erlang:get(application) of
undefined ->
DefaultAttrs1;
App ->
%% stick the application in the attribute list
concat_lists({cons, Line, {tuple, Line, [
{atom, Line, application},
{atom, Line, App}]},
{nil, Line}}, DefaultAttrs1)
end,
{Meta, Message, Arguments} = handle_args(DefaultAttrs, Line, Arguments0),
%% Generate some unique variable names so we don't accidentally export from case clauses.
%% Note that these are not actual atoms, but the AST treats variable names as atoms.
LevelVar = make_varname("__Level", Line),
TracesVar = make_varname("__Traces", Line),
PidVar = make_varname("__Pid", Line),
LogFun = case Safety of
safe ->
do_log;
unsafe ->
do_log_unsafe
end,
%% Wrap the call to lager:dispatch_log/6 in case that will avoid doing any work if this message is not elegible for logging
%% See lager.erl (lines 89-100) for lager:dispatch_log/6
%% case {whereis(Sink), whereis(?DEFAULT_SINK), lager_config:get({Sink, loglevel}, {?LOG_NONE, []})} of
{'case',Line,
{tuple,Line,
[{call,Line,{atom,Line,whereis},[{atom,Line,SinkName}]},
{call,Line,{atom,Line,whereis},[{atom,Line,?DEFAULT_SINK}]},
{call,Line,
{remote,Line,{atom,Line,lager_config},{atom,Line,get}},
[{tuple,Line,[{atom,Line,SinkName},{atom,Line,loglevel}]},
{tuple,Line,[{integer,Line,0},{nil,Line}]}]}]},
%% {undefined, undefined, _} -> {error, lager_not_running};
[{clause,Line,
[{tuple,Line,
[{atom,Line,undefined},{atom,Line,undefined},{var,Line,'_'}]}],
[],
%% trick the linter into avoiding a 'term constructed but not used' error:
%% (fun() -> {error, lager_not_running} end)()
[{call, Line, {'fun', Line, {clauses, [{clause, Line, [],[], [{tuple, Line, [{atom, Line, error},{atom, Line, lager_not_running}]}]}]}}, []}]
},
%% {undefined, _, _} -> {error, {sink_not_configured, Sink}};
{clause,Line,
[{tuple,Line,
[{atom,Line,undefined},{var,Line,'_'},{var,Line,'_'}]}],
[],
%% same trick as above to avoid linter error
[{call, Line, {'fun', Line, {clauses, [{clause, Line, [],[], [{tuple,Line, [{atom,Line,error}, {tuple,Line,[{atom,Line,sink_not_configured},{atom,Line,SinkName}]}]}]}]}}, []}]
},
%% {SinkPid, _, {Level, Traces}} when ... -> lager:do_log/9;
{clause,Line,
[{tuple,Line,
[{var,Line,PidVar},
{var,Line,'_'},
{tuple,Line,[{var,Line,LevelVar},{var,Line,TracesVar}]}]}],
[[{op, Line, 'orelse',
{op, Line, '/=', {op, Line, 'band', {var, Line, LevelVar}, {integer, Line, SeverityAsInt}}, {integer, Line, 0}},
{op, Line, '/=', {var, Line, TracesVar}, {nil, Line}}}]],
[{call,Line,{remote, Line, {atom, Line, lager}, {atom, Line, LogFun}},
[{atom,Line,Severity},
Meta,
Message,
Arguments,
{integer, Line, get(truncation_size)},
{integer, Line, SeverityAsInt},
{var, Line, LevelVar},
{var, Line, TracesVar},
{atom, Line, SinkName},
{var, Line, PidVar}]}]},
%% _ -> ok
{clause,Line,[{var,Line,'_'}],[],[{atom,Line,ok}]}]}.
handle_args(DefaultAttrs, Line, [{cons, LineNum, {tuple, _, _}, _} = Attrs]) ->
{concat_lists(DefaultAttrs, Attrs), {string, LineNum, ""}, {atom, Line, none}};
handle_args(DefaultAttrs, Line, [Format]) ->
{DefaultAttrs, Format, {atom, Line, none}};
handle_args(DefaultAttrs, Line, [Arg1, Arg2]) ->
%% some ambiguity here, figure out if these arguments are
%% [Format, Args] or [Attr, Format].
%% The trace attributes will be a list of tuples, so check
%% for that.
case {element(1, Arg1), Arg1} of
{_, {cons, _, {tuple, _, _}, _}} ->
{concat_lists(Arg1, DefaultAttrs),
Arg2, {atom, Line, none}};
{Type, _} when Type == var;
Type == lc;
Type == call;
Type == record_field ->
%% crap, its not a literal. look at the second
%% argument to see if it is a string
case Arg2 of
{string, _, _} ->
{concat_lists(Arg1, DefaultAttrs),
Arg2, {atom, Line, none}};
_ ->
%% not a string, going to have to guess
%% it's the argument list
{DefaultAttrs, Arg1, Arg2}
end;
_ ->
{DefaultAttrs, Arg1, Arg2}
end;
handle_args(DefaultAttrs, _Line, [Attrs, Format, Args]) ->
{concat_lists(Attrs, DefaultAttrs), Format, Args}.
make_varname(Prefix, Loc) ->
list_to_atom(Prefix ++ atom_to_list(get(module)) ++ integer_to_list(line_from_loc(Loc))).
line_from_loc({Line, _Col}) -> Line;
line_from_loc(Line) -> Line.
%% concat 2 list ASTs by replacing the terminating [] in A with the contents of B
concat_lists({var, Line, _Name}=Var, B) ->
%% concatenating a var with a cons
{call, Line, {remote, Line, {atom, Line, lists},{atom, Line, flatten}},
[{cons, Line, Var, B}]};
concat_lists({lc, Line, _Body, _Generator} = LC, B) ->
%% concatenating a LC with a cons
{call, Line, {remote, Line, {atom, Line, lists},{atom, Line, flatten}},
[{cons, Line, LC, B}]};
concat_lists({call, Line, _Function, _Args} = Call, B) ->
%% concatenating a call with a cons
{call, Line, {remote, Line, {atom, Line, lists},{atom, Line, flatten}},
[{cons, Line, Call, B}]};
concat_lists({record_field, Line, _Var, _Record, _Field} = Rec, B) ->
%% concatenating a record_field with a cons
{call, Line, {remote, Line, {atom, Line, lists},{atom, Line, flatten}},
[{cons, Line, Rec, B}]};
concat_lists({nil, _Line}, B) ->
B;
concat_lists({cons, Line, Element, Tail}, B) ->
{cons, Line, Element, concat_lists(Tail, B)}.
stash_record(Record) ->
Records = case erlang:get(records) of
undefined ->
[];
R ->
R
end,
erlang:put(records, [Record|Records]).
insert_record_attribute(AST) ->
lists:foldl(fun({attribute, Line, module, _}=E, Acc) ->
[E, {attribute, Line, lager_records, erlang:get(records)}|Acc];
(E, Acc) ->
[E|Acc]
end, [], AST).
guess_application(Dirname, Attr) when Dirname /= undefined ->
case find_app_file(Dirname) of
no_idea ->
%% try it based on source file directory (app.src most likely)
guess_application(undefined, Attr);
_ ->
ok
end;
guess_application(undefined, {attribute, _, file, {Filename, _}}) ->
Dir = filename:dirname(Filename),
find_app_file(Dir);
guess_application(_, _) ->
ok.
find_app_file(Dir) ->
case filelib:wildcard(Dir++"/*.{app,app.src}") of
[] ->
no_idea;
[File] ->
case file:consult(File) of
{ok, [{application, Appname, _Attributes}|_]} ->
erlang:put(application, Appname);
_ ->
no_idea
end;
_ ->
%% multiple files, uh oh
no_idea
end. | src/lager_transform.erl | 0.572245 | 0.45423 | lager_transform.erl | starcoder |
%%% Advent of Code solution for 2019 day 10.
%%% Created: 2019-12-10T05:33:20+00:00
-module(aoc2019_day10).
-include_lib("stdlib/include/assert.hrl").
-include("aoc_puzzle.hrl").
-export([parse/1, solve/1, info/0]).
-behavior(aoc_puzzle).
-spec info() -> aoc_puzzle().
info() ->
#aoc_puzzle{module = ?MODULE,
year = 2019,
day = 10,
name = "Monitoring Station",
expected = {334, 1119},
use_one_solver_fun = true,
has_input_file = true}.
-type asteroid() :: {X :: integer(), Y :: integer()}.
-type input_type() :: [asteroid()].
-type result_type() :: {integer(), integer()}.
-spec parse(Binary :: binary()) -> input_type().
parse(Binary) ->
%% Grid is 34 chars wide (+ newline)
parse_grid(Binary, 34).
-spec solve(Input :: input_type()) -> result_type().
solve(Asteroids) ->
%% Compute a list of {A, B, Dir} where A and B are asteroid coords
%% and Dir is the direction from A to B expressed as a fraction.
AsteroidDirs = [{A, B, direction(A, B)} || A <- Asteroids, B <- Asteroids, A =/= B],
%% Compute a map where the keys are asteroids (A). Each key maps to
%% a map of (integer() -> list(Asteroids)), where the integer is the
%% slope (direction), and the list contains all the asteroids which
%% lie along that line.
AsteroidsGroupedByVisibility =
lists:foldl(fun({A, B, Dir}, Map) ->
Dist = distance(A, B),
maps:update_with(A,
fun(OldMap) ->
maps:update_with(Dir,
fun(OldList) -> [{Dist, B} | OldList]
end,
[{Dist, B}],
OldMap)
end,
#{Dir => [{Dist, B}]},
Map)
end,
#{},
AsteroidDirs),
%% Find answer to part 1: the asteroids where we can see most other
%% asteroids.
{_, Part1, VisibleAsteroids} =
maps:fold(fun(K, V, {_, Max, _} = Acc) ->
case maps:size(V) of
Len when Len > Max -> {K, Len, V};
_ -> Acc
end
end,
{undef, 0, []},
AsteroidsGroupedByVisibility),
%% Find out which asteroids we should fire at, and in which order.
%% Part 2 solution is to find the 200th destroyed
%% asteroid. Fortunately, we only need to fire at the closest
%% asteroids; as 200 < 334 (which is the number of visible
%% asteroids.
FiringOrder =
lists:map(fun(Dir) ->
lists:min(
maps:get(Dir, VisibleAsteroids))
end,
lists:sort(
maps:keys(VisibleAsteroids))),
Part2 = fire_ze_huge_lazer(FiringOrder, 1),
{Part1, Part2}.
fire_ze_huge_lazer([], _) ->
not_enough_asteroids; %% This should only happen with tests
fire_ze_huge_lazer([{_, {X, Y}} | Orders], N) ->
case N of
200 ->
X * 100 + Y;
_ ->
fire_ze_huge_lazer(Orders, N + 1)
end.
%% ---- [ Helpers ] ----
distance({X0, Y0}, {X1, Y1}) ->
math:sqrt(math:pow(X1 - X0, 2) + math:pow(Y1 - Y0, 2)).
%% When using floats as keys, we need to make sure there are no
%% rounding errors.
f_to_i(F) ->
floor(F * 1000000).
%% Return the angle (in radians) of the vector {X0,Y0} -> {X1, Y1}.
direction({X0, Y0}, {X1, Y1}) ->
Dx = X1 - X0,
Dy = Y1 - Y0,
Pi = 3.141592653589793,
%% This yuck is to get an angle where 0 is north, and increases
%% clock-wise.
Z = math:atan2(-Dy, Dx) + 3 * Pi / 2,
Z0 = if Z > 2 * Pi ->
Z - 2 * Pi;
true ->
Z
end,
f_to_i(2 * Pi - Z0).
-spec parse_grid(binary(), Width :: integer()) -> [asteroid()].
parse_grid(Binary, Width) ->
?assertEqual($\n, binary:at(Binary, Width)),
lists:map(fun({Offset, _} = _Match) -> {Offset rem (Width + 1), Offset div (Width + 1)}
end,
binary:matches(Binary, <<"#">>)). | src/2019/aoc2019_day10.erl | 0.576184 | 0.616214 | aoc2019_day10.erl | starcoder |
% https://icebreakerideas.com/fun-games-to-play-at-home/#Dots_and_Boxes
-module (dots_and_boxes).
-export ([dots/2, boxes/1, join/4, draw/1, auto_play/3]).
-type point() :: {integer(),integer()}.
-type line() :: {atom(), point(), point(), boolean()}.
-type dots() :: [point()].
-type box() :: {atom(), [line()], string()}.
-type grid() :: [box()].
-spec dots(integer(), integer()) -> dots().
dots(XDots, YDots) ->
[{X,Y} || X <- lists:seq(0, XDots - 1), Y <- lists:seq(0, YDots - 1)].
-spec boxes(dots()) -> [box()].
boxes([]) -> [];
boxes(Ds) -> boxes(Ds, 1, length(Ds), []).
boxes(Dots, Begin, End, Bs) when Begin =< End ->
Dot = lists:nth(Begin, Dots),
BoxPts = box_points_for(Dot),
case all_present(BoxPts, Dots) of
true ->
B = {box, lines(BoxPts), []},
boxes(Dots, Begin + 1, End, [B | Bs]);
false ->
boxes(Dots, Begin + 1, End, Bs)
end;
boxes(_,_,_,Bs) -> lists:sort(Bs).
all_present(BoxPts, Pts) -> BoxPts -- Pts == [].
box_points_for(BottomLeft) ->
{X1, Y1} = BottomLeft,
BoxSize = 1,
X2 = X1 + BoxSize, Y2 = Y1 + 1,
BottomRight = {X2, Y1},
TopRight = {X2,Y2},
TopLeft = {X1,Y2},
[BottomLeft,TopLeft,TopRight,BottomRight].
lines([BottomLeft,TopLeft,TopRight,BottomRight]) ->
%clockwise lines in a box
[{line,BottomLeft,TopLeft,false},
{line,TopLeft,TopRight,false},
{line,TopRight,BottomRight,false},
{line,BottomRight,BottomLeft,false}].
-spec join(point(),point(),string(),grid()) -> {grid(), boolean()}.
% return pair contains a boolean to indicate
% take another turn for the last player.
join(D1, D2, Player, Grid) ->
NextG = [mark_box(B,D1,D2,Player) || B <- Grid],
{NextG, was_box_signed(Grid, NextG)}.
was_box_signed(PrevG, NextG) ->
PPs = [B || B <- PrevG, signed_box(B)],
NPs = [B || B <- NextG, signed_box(B)],
length(NPs) - length(PPs) == 1.
signed_box({box,_,P}) -> P /= [].
% 4 arg mark_box
mark_box(B = {box,Lines,[]}, D1, D2, Player) ->
PlayerLine = {line,D1,D2,false},
case contains(PlayerLine, Lines) of
true -> signBox(mark_box(D1,D2,B), Player);
false -> B
end;
mark_box(Box,_,_,_) -> Box.
signBox(MBox = {box,MLines,_}, Player) ->
case fourth_side_complete(MBox) of
true -> {box,MLines,Player};
false -> MBox
end.
% 3 arg mark_box
mark_box(D1, D2, {box,Lines,[]}) ->
MLines = lists:map(fun
({line,A,B,false}) when ((A==D1) and (B==D2)) -> join_dots(D1, D2);
({line,A,B,false}) when ((A==D2) and (B==D1)) -> join_dots(D2, D1);
(Line) -> Line
end, Lines),
{box,MLines,[]}.
contains(_, []) -> false;
contains(Line = {line,A,B,false}, [L|Ls]) ->
case L of
Line -> true;
{line,P,Q,false} when (A==Q) and (B==P) -> true;
_ -> contains(Line, Ls)
end.
fourth_side_complete({box,Lines,_}) ->
lists:all(fun({line,_,_,Marked}) -> Marked == true end, Lines).
join_dots({X,Y1}, {X,Y2}) when abs(Y2-Y1) == 1 ->
{line,{X,Y1},{X,Y2},true};
join_dots({X1,Y}, {X2,Y}) when abs(X2-X1) == 1 ->
{line,{X1,Y},{X2,Y},true};
join_dots(_, _) ->
badarg.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% All Auto Play related functions
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec auto_play([atom()], integer(), integer()) -> [any()].
auto_play(Players, XDots, YDots) when (XDots >= 2) and (YDots >= 2) and (length(Players) >= 2) ->
Grid = boxes(dots(XDots, YDots)),
% io:format("Initial Grid = ~p~n", [Grid]),
draw(Grid),
play(Players, Grid, fun() -> rand_dots_pair(XDots, YDots) end);
auto_play(_, _, _) ->
io:format("Choose more than 2 players and have number of dots in X and Y direction as atleast 2!~n").
play(Players, Grid, RandF) ->
case game_over(Grid) of
true -> show_results(Grid);
false -> continue_play(Players, Grid, RandF)
end.
continue_play(Players = [P|Ps], G, RandF) ->
io:format("~p playing...~n", [P]),
{NextG, SamePlayerTurn} = turn(P, G, RandF),
draw(NextG),
case SamePlayerTurn of
true ->
io:format("~p taking another turn!~n", [P]),
play(Players, NextG, RandF);
false ->
play(lists:append(Ps,[P]), NextG, RandF)
end.
show_results(Grid) ->
All = [{Winner, Score}|Losers] = results(Grid),
io:format("*** Game Over ***~n"),
case lists:keymember(Score, 2, Losers) of
true -> io:format("Game has Drawn!!~n");
false -> io:format("Winner => ~p~n", [Winner])
end,
io:format("Detailed Results: ~p~n", [All]).
results(Grid) ->
Ps = [P || {box,_,P} <- Grid],
Rs = frequency(Ps),
lists:reverse(lists:keysort(2, Rs)).
game_over(Grid) ->
lists:all(fun signed_box/1, Grid).
frequency(Xs) -> frequency(Xs, []).
frequency([], Acc) -> Acc;
frequency(Xs = [X|_], Acc) ->
{Ys,Ns} = lists:partition(fun(E) -> E == X end, Xs),
frequency(Ns, [{hd(Ys),length(Ys)} | Acc]).
turn(Player, Grid, RandomF) ->
{D1,D2} = RandomF(),
case line_not_exists(D1, D2, Grid) of
true -> join(D1, D2, Player, Grid);
false -> turn(Player, Grid, RandomF)
end.
line_not_exists(D1, D2, Grid) ->
Line = {line,D1,D2,false},
GLines = lists:foldr(fun({box, Lines, _}, A) -> lists:append(A, Lines) end, [], Grid),
contains(Line, GLines).
rand_dots_pair(XDots, YDots) ->
D1 = rand_dot(XDots,YDots),
D2 = rand_dot(XDots,YDots),
case distance(D1, D2) == 1 of
true -> {D1,D2};
false -> rand_dots_pair(XDots, YDots)
end.
distance({X1,Y1}, {X2,Y2}) ->
math:sqrt((X2-X1)*(X2-X1)+(Y2-Y1)*(Y2-Y1)).
rand_dot(XDots,YDots) -> {rand(XDots),rand(YDots)}.
rand(N) when N >= 2 -> rand:uniform(N) - 1.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% All drawing related functions
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
rasterize(Grid) ->
GridLines = lists:flatmap(fun(B) ->
annotate_lines(B)
end, Grid),
Lines = lists:filter(fun({_,Aligned,_}) ->
Aligned /= vertical_left_ignore
end, GridLines),
{HLines,VLines} = lists:partition(fun({_, Aligned, _}) -> Aligned == horizontal end, Lines),
SortedHLines = lists:usort(HLines),
{{_,_,{X,Y},_},_,_} = lists:last(SortedHLines),
rasterize([], 0, 0, X, Y, SortedHLines, VLines).
rasterize(Acc, _, _, _, _, [], []) -> Acc;
rasterize(Acc, _, _, _, _, HLines, []) ->
lists:append(Acc, [[HLines,[]]]);
rasterize(Acc, X, Y, XMax, YMax, HLines, VLines) when (X =< XMax) or (Y =< YMax) ->
{Hs,HLs} = lists:partition(
fun({{_,{_,Y1},{_,Y2},_},_,_}) ->
(Y1 == Y) and (Y2 == Y)
end, HLines),
{Vs, VLs} = lists:partition(
fun({{_,{_,Y1},{_,Y2},_},_,_}) ->
(Y == Y1) and (Y2 == (Y + 1))
end, VLines),
NewAcc = lists:append(Acc,[[Hs,lists:sort(Vs)]]),
rasterize(NewAcc,X+1,Y+1,XMax,YMax,HLs,VLs).
-spec annotate_lines(box()) -> [any()].
annotate_lines(B) ->
{box,[VLeft,HUpper,VRight,HLower],TakenBy} = B,
[
{sort_line(HLower),horizontal, []},
{sort_line(VRight),vertical_right, TakenBy},
{sort_line(HUpper),horizontal, []},
case VLeft of
{line, {0, _}, {0, _}, _} ->
{sort_line(VLeft),vertical_first_col, []};
_ ->
{sort_line(VLeft),vertical_left_ignore, []}
end
].
sort_line({line, D1, D2, Present}) when D1 > D2 ->
{line, D2, D1, Present};
sort_line(L) -> L.
draw(Grid) ->
% io:format("Grid = ~p~n", [Grid]),
RasterLines = rasterize(Grid),
% io:format("RasterLines = ~p~n", [RasterLines]),
lists:foreach(fun([RLines, CLines]) ->
draw_row_lines(RLines),
draw_col_lines(CLines)
end, RasterLines).
draw_row_lines(Lines) ->
lists:foreach(fun(Line) ->
case align(Line) of
horizontal -> io:format("+---");
horizontal_blank -> io:format("+ ")
end
end, Lines),
io:format("+~n").
draw_col_lines(Lines) ->
lists:foreach(fun(Line) ->
case align(Line) of
{vertical_blank, first_column} ->
io:format(" ");
{vertical, first_column} ->
io:format("|");
vertical_blank ->
io:format(" ");
vertical ->
io:format(" |");
{vertical, TakenBy} ->
io:format("~3s|", [TakenBy])
end
end, Lines),
io:format("~n").
align({Line, vertical_first_col, _}) ->
case Line of
{line,_,_,false} ->
{vertical_blank, first_column};
{line,_,_,true} ->
{vertical, first_column}
end;
align(LInfo = {_, vertical_right, TakenBy}) ->
case LInfo of
{{line,_,_,false},_,_} -> vertical_blank;
{{line,_,_,true},_,[]} -> vertical;
{{line,_,_,true},_,TakenBy} -> {vertical, TakenBy}
end;
align({Line, horizontal, _}) ->
case Line of
{line,_,_,false} -> horizontal_blank;
{line,_,_,true} -> horizontal
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Generic functions
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
zipwith(F, Xs, Ys) ->
zipwith(F, [], Xs, Ys).
zipwith(_F, Acc, [], []) -> Acc;
zipwith(_F, Acc, _Xs, []) -> Acc;
zipwith(_F, Acc, [], _Ys) -> Acc;
zipwith(F, Acc, [X|Xs], [Y|Ys]) ->
zipwith(F, lists:append(Acc, [F(X,Y)]), Xs, Ys).
transpose([[]|_]) -> [];
transpose(M) ->
[lists:map(fun hd/1, M) | transpose(lists:map(fun tl/1, M))].
index_of(Item, List) ->
index_of(Item, List, 1).
index_of(_, [], _) -> not_found;
index_of(Item, [Item|_], Index) -> Index;
index_of(Item, [_|Tl], Index) -> index_of(Item, Tl, Index + 1). | sequential/dots_and_boxes.erl | 0.510741 | 0.581422 | dots_and_boxes.erl | starcoder |
%% -------------------------------------------------------------------
%% @doc Provides a process to queue incoming propositions.<br/>
%% The validator pulls the propositions from the queue via
%% {@link get_proposition/0} and returns the answer via
%% {@link put_score/3}.
%% The queue process keeps proposition until it gets the validation
%% result so when the validator crashes, no proposition is lost.<br/>
%% When a round is over, there may still be unvalidated propositions
%% in the queue. Therefore, the {@link dj} tells the queue that the
%% round time is over ({@link all_workers_stopped/0}) and the queue
%% sends a message back the next time it runs dry. When this has
%% happened, the final results for the round can be computed and the
%% round is officially over.<br/>
%% In this text, "validator" means a process spawned by the
%% {@link validator_port} module.
%% @end
%% -------------------------------------------------------------------
-module(validator_queue).
-behavior(gen_statem).
-include("validator_data.hrl").
%% application programming interface
-export([
start_link/0,
insert_proposition/3,
get_queue_content/0,
round_started/0,
all_workers_stopped/0
]).
%% application programming interface for validator port
-export([
get_proposition/0,
put_score/3
]).
%% called by gen_statem module
-export([
init/1,
terminate/3,
code_change/4,
callback_mode/0,
%% custom state names
empty/3,
non_empty/3
]).
%% name under which to register the process
-define(REG_NAME, validator_queue).
-record(state, {queue = queue:new() :: queue:queue(proposition()),
workers_working = false :: boolean(),
waiting_validator = none :: {pid(), term()} | none}).
%% ===================================================================
%% application programming interface
%% ===================================================================
-spec start_link() -> ignore | {error, _} | {ok, pid()}.
start_link() ->
gen_statem:start_link({local, ?REG_NAME}, ?MODULE, [], []).
-spec insert_proposition(worker_id(), [string()], string()) -> ok.
insert_proposition(WorkerID, WorkerInput, WorkerOutput) ->
Proposition = {WorkerID, WorkerInput, WorkerOutput},
gen_statem:cast(?REG_NAME, {insert, Proposition}).
%% @doc For debugging: Returns the content of the queue
get_queue_content() ->
gen_statem:call(?REG_NAME, get_queue_content).
-spec round_started() -> ok.
round_started() ->
gen_statem:cast(?REG_NAME, round_started).
-spec all_workers_stopped() -> ok.
all_workers_stopped() ->
gen_statem:cast(?REG_NAME, all_workers_stopped).
%% ===================================================================
%% application programming interface for validator port
%% ===================================================================
%% @doc Fetches a proposition, blocking the calling thread if queue is empty.
-spec get_proposition() -> proposition().
get_proposition() ->
gen_statem:call(?REG_NAME, get_proposition, infinity).
%% @doc Returns a validation result.
-spec put_score(proposition(), non_neg_integer(), string()) -> ok.
put_score(Proposition, Score, Caption) ->
gen_statem:cast(?REG_NAME, {put_score, Proposition, Score, Caption}).
%% ===================================================================
%% gen_statem callbacks
%% ===================================================================
callback_mode() ->
state_functions.
init([]) ->
{ok, empty, #state{}}.
%% -- state callbacks --
empty(cast, all_workers_stopped, Data) ->
dj:validator_queue_empty(),
{keep_state, Data#state{workers_working=false}};
empty(cast, {insert, Prop}, Data=#state{queue=Queue,
waiting_validator=WaitingValidator}) ->
NewQueue = queue:in(Prop, Queue),
case WaitingValidator of
none ->
{next_state, non_empty, Data#state{queue=NewQueue}};
_FromSpec ->
gen_statem:reply(WaitingValidator, Prop),
{next_state, non_empty, Data#state{waiting_validator=none,
queue=NewQueue}}
end;
empty({call, From}, get_proposition, Data=#state{waiting_validator=WaitingValidator}) ->
%% If a waiting validator crashes, there will be a new validator process
%% asking for a proposition. In this case, the old process ID will be overwritten.
case WaitingValidator of
none ->
ok;
_FromSpec ->
ok = lager:warning("Validator queue received get_proposition from ~p, overriding the already waiting validator ~p", [From, WaitingValidator])
end,
%% reply later, when there are elements in the queue (blocking call)
{keep_state, Data#state{waiting_validator=From}};
empty(Type, Msg, Data) ->
handle_event(Type, Msg, empty, Data).
%% -----
non_empty(cast, {put_score, Prop, Score, Caption}, Data=#state{queue=Queue, workers_working=WorkersWorking}) ->
case queue:out(Queue) of
{{value, Prop}, NewQueue} ->
{WorkerID, WorkerInput, WorkerOutput} = Prop,
dj:submit_validated_proposition(WorkerID, WorkerInput, WorkerOutput, Score, Caption),
ok = lager:debug("Validator queue received validated proposition: ~p => score: ~p, caption: ~p",
[Prop, Score, Caption]),
case queue:is_empty(NewQueue) of
true ->
case WorkersWorking of
false -> dj:validator_queue_empty();
_ -> ok
end,
{next_state, empty, Data#state{queue=NewQueue}};
false ->
{keep_state, Data#state{queue=NewQueue}}
end;
{{value, _}, _} ->
ok = lager:warning("Validator queue received score for unexpected proposition: ~p~nQueue content: ~p",
[Prop, queue:to_list(Queue)]),
{keep_state, Data#state{queue=Queue}};
{empty, _} ->
ok = lager:critical("Validator queue is empty in nonempty state!"),
{next_state, empty, Data#state{queue=Queue}}
end;
non_empty(cast, all_workers_stopped, Data) ->
{keep_state, Data#state{workers_working=false}};
non_empty(cast, {insert, Prop}, Data=#state{queue=Queue,
waiting_validator=WaitingValidator}) ->
NewQueue = queue:in(Prop, Queue),
case WaitingValidator of
none ->
{keep_state, Data#state{queue=NewQueue}};
_FromSpec ->
ok = lager:critical("There should not be a waiting validator when the queue is not empty!"),
throw(validator_queue_corrupted)
end;
non_empty({call, From}, get_proposition, Data=#state{queue=Queue}) ->
case queue:out(Queue) of
{{value, Prop}, _} ->
{keep_state_and_data, {reply, From, Prop}};
{empty, _} ->
ok = lager:critical("Validator queue is empty in nonempty state!"),
{next_state, empty, Data, {reply, From, queue_error}}
end;
non_empty(Type, Msg, Data) ->
handle_event(Type, Msg, non_empty, Data).
%% -----
code_change(_OldVsn, DataName, Data, _Extra) ->
%% No change planned.
%% The function is there for the behaviour, but will not be used.
{ok, DataName, Data}.
terminate(_Msg, _StateName, _Data) ->
ok.
%% ===================================================================
%% private functions
%% ===================================================================
%% -- stateless callbacks --
handle_event(cast, round_started, _State, Data) ->
{keep_state, Data#state{workers_working=true}};
handle_event({call, From}, get_queue_content, _State, Data=#state{queue=Queue}) ->
{keep_state, Data, [{reply, From, queue:to_list(Queue)}]};
handle_event(Type, Msg, State, Data) ->
ok = lager:error("Validator queue received unknown event ~p from ~p while in state ~p",
[Msg, Type, State]),
{keep_state, Data}.
%% =================================================================== | src/validator/validator_queue.erl | 0.685213 | 0.484136 | validator_queue.erl | starcoder |
%% @doc Patches the output of
%% <a href="http://erlang.org/doc/man/edoc_layout.html">edoc_layout</a>.
%%
%% This module append the reference and configuration for mermaid
%% javascript. For each html file created by <a href="http://erlang.org/doc/man/edoc_layout.html#module-2">
%% edoc_layout:module/2</a> and <a href="http://erlang.org/doc/man/edoc_layout.html#overview-2">edoc_layout:overview/2</a>,
%% this module will append the following html snippet:
%%
%% ```
%% <script src="https://cdn.jsdelivr.net/npm/mermaid/dist/mermaid.min.js"></script>
%% <script>mermaid.initialize({startOnLoad:true});</script>
%% '''
-module(edocmermaid_layout).
-export([module/2, overview/2, type/1]).
-include("edocmermaid.hrl").
%% @doc Calls edoc_layout:module/2 and append mermaid javascript snippet.
%% Called from edoc_doclet module.
module(Element, Options) ->
Url = edocmermaid:get_mermaid_url(?MERMAID_URL_KEY, {file, ?MERMAID_JS}, Options),
SimpleXml = edoc_layout:module(Element, Options),
patch_layout(Url, SimpleXml).
type(E) ->
edoc_layout:type(E).
%% @doc Calls edoc_layout:overview/2 and append mermaid javascript snippet.
%% Called from edoc_doclet module.
overview(Element, Options) ->
Url = edocmermaid:get_mermaid_url(?MERMAID_URL_KEY, {file, ?MERMAID_JS}, Options),
X = edoc_layout:overview(Element, Options),
patch_layout(Url, X).
%% @doc Unwrap, patch and wrap the HTML as io list.
%% edoc_layout is the module responsible for creating HTML files based on XML description.
%% patch_layout finds the body html element and append the mermaid snippet. Then
%% the HTML list is created again.
patch_layout(MermaidUrl, Term) ->
Body0 = unwrap(Term) ++ source_mermaid(MermaidUrl),
wrap(Term, Body0).
unwrap([_DOCTYPE, _W3C, _, _, _, [[_HTML0, [_, _Head, _, [_, Body, _], _], _HTML1], _]]) ->
Body.
wrap(
[_DOCTYPE, _W3C, _3, _4, _5, [[_HTML0, [_6, _Head, _7, [_8, _, _9], _10], _HTML1], _11]],
Body
) ->
[_DOCTYPE, _W3C, _3, _4, _5, [[_HTML0, [_6, _Head, _7, [_8, Body, _9], _10], _HTML1], _11]].
source_mermaid(MermaidUrl) ->
SourceJs =
case MermaidUrl of
{file, File} ->
File;
Url ->
Url
end,
[
"<script src=\"",
SourceJs,
"\"></script>",
"\n",
"<script>mermaid.initialize({startOnLoad:true});</script>",
"\n"
]. | src/edocmermaid_layout.erl | 0.581184 | 0.471345 | edocmermaid_layout.erl | starcoder |
-module(day1).
-behavior(aoc).
-include_lib("eunit/include/eunit.hrl").
-export([input_type/0, p1/1, p2/1]).
input_type() -> number_list.
%% @doc
%% # Sonar Sweep
%% You're minding your own business on a ship at sea when the overboard alarm
%% goes off! You rush to see if you can help. Apparently, one of the Elves
%% tripped and accidentally sent the % sleigh keys flying into the ocean!
%%
%% Before you know it, you're inside a submarine the Elves keep ready for
%% situations like this. It's covered in Christmas lights (because of course it
%% is), and it even has an experimental antenna that should be able to track the
%% keys if you can boost its signal strength high enough; there's a little meter
%% that indicates the antenna's signal strength by displaying 0-50 stars.
%%
%% Your instincts tell you that in order to save Christmas, you'll need to get
%% all fifty stars by December 25th.
%%
%% Collect stars by solving puzzles. Two puzzles will be made available on each
%% day in the Advent calendar; the second puzzle is unlocked when you complete
%% the first. Each puzzle grants one star. Good luck!
%%
%% As the submarine drops below the surface of the ocean, it automatically
%% performs a sonar sweep of the nearby sea floor. On a small screen, the
%% sonar sweep report (your puzzle input) appears: each line is a measurement
%% of the sea floor depth as the sweep looks further and further away from
%% the submarine.
%%
%% For example, suppose you had the following report:
%% {@see example/1}
%%
%% This report indicates that, scanning outward from the submarine, the sonar
%% sweep found depths of 199, 200, 208, 210, and so on.
%%
%% The first order of business is to figure out how quickly the depth increases,
%% just so you know what you're dealing with - you never know if the keys will
%% get carried into deeper water by an ocean current or a fish or something.
%%
%% To do this, count the number of times a depth measurement increases from
%% the previous measurement. (There is no measurement before the first
%% measurement.) In the example above, the changes are as follows:
%%
%% 199 (N/A - no previous measurement)
%% 200 (increased)
%% 208 (increased)
%% 210 (increased)
%% 200 (decreased)
%% 207 (increased)
%% 240 (increased)
%% 269 (increased)
%% 260 (decreased)
%% 263 (increased)
%%
%% In this example, there are 7 measurements that are larger
%% than the previous measurement.
%%
%% How many measurements are larger than the previous measurement?
p1(Depths) ->
p1(Depths, 0).
p1([Prev, Next | _] = Depths, Increases) when Next > Prev -> p1(tl(Depths), Increases + 1);
p1([Prev, Next | _] = Depths, Increases) when Next =< Prev -> p1(tl(Depths), Increases);
p1(_Depths, Increases) -> Increases.
-ifdef(EUNIT).
example(p1) -> [199, 200, 208, 210, 200, 207, 240, 269, 260, 263].
p1_example_test() ->
?assertEqual(7, p1(example(p1))).
-endif.
%% @doc
%% Considering every single measurement isn't as useful as you expected:
%% there's just too much noise in the data.
%%
%% Instead, consider sums of a three-measurement sliding window.
%% Again considering the above example:
%%
%% 199 A
%% 200 A B
%% 208 A B C
%% 210 B C D
%% 200 E C D
%% 207 E F D
%% 240 E F G
%% 269 F G H
%% 260 G H
%% 263 H
%%
%% Start by comparing the first and second three-measurement windows.
%% The measurements in the first window are marked A (199, 200, 208);
%% their sum is 199 + 200 + 208 = 607. The second window is marked
%% B (200, 208, 210); its sum is 618. The sum of measurements in the
%% second window is larger than the sum of the first, so this first
%% comparison increased.
%%
%% Your goal now is to count the number of times the sum of measurements
%% in this sliding window increases from the previous sum. So, compare A
%% with B, then compare B with C, then C with D, and so on. Stop when
%% there aren't enough measurements left to create a new three-measurement sum.
%%
%% In the above example, the sum of each three-measurement window is as follows:
%%
%% A: 607 (N/A - no previous sum)
%% B: 618 (increased)
%% C: 618 (no change)
%% D: 617 (decreased)
%% E: 647 (increased)
%% F: 716 (increased)
%% G: 769 (increased)
%% H: 792 (increased)
%%
%% In this example, there are 5 sums that are larger than the previous sum.
%%
%% Consider sums of a three-measurement sliding window.
%% How many sums are larger than the previous sum?
p2(Depths) ->
p1(windows(Depths)).
windows(Depths) ->
windows(Depths, []).
windows([A, B, C | _] = Depths, Windows) -> windows(tl(Depths), [A + B + C | Windows]);
windows(_Depths, Windows) -> lists:reverse(Windows).
-ifdef(EUNIT).
p2_example_test() ->
?assertEqual(5, p2(example(p1))).
-endif. | src/day1.erl | 0.506836 | 0.601447 | day1.erl | starcoder |
%%%
%%% Limit tracker
%%%
%%% Behaviour:
%%%
%%% - If _limit_ is exceeded then there's no need to reject the transaction.
%%%
%%% - _Account_ operation is idempotent as long as the transaction is nor
%%% confirmed neither rejected.
%%%
%%% After that any transaction w/ the same ID will be handled regularly as a
%%% distinct transaction.
%%%
%%% - Limit itself is _not_ part of the state, just the _timespan_, implicitly.
%%%
%%% In a nutshell, we derive underlying 'account ID' from the timespan for
%%% the sake of simplicity. There are side effect though:
%%% * limits are independent in a sense that, for example, _daily_ limit
%%% changes do not count towards _monthly_ limit, and
%%% * there is no way to know that two transactions are one and the same if
%%% their IDs are equal but their timestamps are too far apart.
%%%
%%% - Accounting does not respect timezone-related quirks.
%%%
%%% If you want to, you should do it yourself. For example, you could convert
%%% UTC timestamps to timezone-specific timestamps and feed them here.
%%%
%%% For some reason which I can not wrap my head around `localtime` can
%%% resolve one UTC timestamp to _two_ timezone-specific timestamps in the
%%% middle of DST transition and let us resolve ambiguity. I believe taking
%%% earliest one would do the trick.
%%%
-module(ff_limit).
%% API
-export([account/4]).
-export([confirm/4]).
-export([reject/4]).
-export([get/4]).
%% Machinery
-behaviour(machinery).
-export([init/4]).
-export([process_timeout/3]).
-export([process_repair/4]).
-export([process_call/4]).
%% Types
-type limit(T) :: {id(), range(T), timespan()}.
-type range(T) :: ff_range:range(T).
-type timespan() :: day | week | month | year.
-type trxid() :: binary().
-type delta(T) :: ord(T).
-type trx(T) :: {trxid(), timestamp(), delta(T)}.
-type record(T) :: ff_indef:indef(T).
-type timestamp() :: machinery:timestamp().
% totally ordered
-type ord(T) :: T.
%% API
-type namespace() :: machinery:namespace().
-type id() :: machinery:id().
-type backend() :: machinery:backend(_).
-spec account(namespace(), limit(T), trx(T), backend()) ->
{ok, record(T)}
| {error, {exceeded, record(T)}}
| {error, {conflict, trx(T)}}.
-spec confirm(namespace(), limit(T), trx(T), backend()) ->
{ok, record(T)}
| {error, {conflict, trx(T)}}.
-spec reject(namespace(), limit(T), trx(T), backend()) ->
{ok, record(T)}
| {error, {conflict, trx(T)}}.
-spec get(namespace(), limit(T), timestamp(), backend()) -> {ok, record(T)} | {error, notfound}.
account(NS, Limit, Trx, Backend) ->
ID = construct_limit_machine_id(Limit, Trx),
Range = get_limit_range(Limit),
lazycall(NS, ID, {account, Trx, Range}, Backend).
confirm(NS, Limit, Trx, Backend) ->
ID = construct_limit_machine_id(Limit, Trx),
lazycall(NS, ID, {confirm, Trx}, Backend).
reject(NS, Limit, Trx, Backend) ->
ID = construct_limit_machine_id(Limit, Trx),
lazycall(NS, ID, {reject, Trx}, Backend).
get(NS, Limit, Ts, Backend) ->
ID = construct_limit_machine_id_(Limit, Ts),
case machinery:get(NS, ID, {undefined, 0, forward}, Backend) of
{ok, #{aux_state := St}} ->
{ok, head(St)};
{error, notfound} ->
{error, notfound}
end.
lazycall(NS, ID, Call, Backend) ->
case machinery:call(NS, ID, {undefined, 0, forward}, Call, Backend) of
{ok, Response} ->
Response;
{error, notfound} ->
_ = machinery:start(NS, ID, 0, Backend),
lazycall(NS, ID, Call, Backend)
end.
construct_limit_machine_id(Limit, Trx) ->
construct_limit_machine_id_(Limit, get_trx_ts(Trx)).
construct_limit_machine_id_(Limit, Ts) ->
ID = get_limit_id(Limit),
Span = get_limit_span(Limit),
Bucket = find_bucket(Ts, Span),
ff_string:join($/, [
limit,
ID,
Span,
Bucket
]).
find_bucket({{Date, _Time}, _USec}, Span) ->
find_bucket(Date, Span);
find_bucket(Date, day) ->
calendar:date_to_gregorian_days(Date);
find_bucket(Date, week) ->
{Y, W} = calendar:iso_week_number(Date),
Y * 100 + W;
find_bucket({Y, M, _}, month) ->
Y * 100 + M;
find_bucket({Y, _, _}, year) ->
Y.
%% Machinery
-type ev(T) ::
{seed, ord(T)}
| {account, trx(T)}
| {confirm, trx(T)}
| {reject, trx(T)}.
-type auxst(T) :: #{
head := ff_indef:indef(T),
trxs := #{trxid() => trx(T)}
}.
-type machine(T) :: machinery:machine(ev(T), auxst(T)).
-type result(T) :: machinery:result(ev(T), auxst(T)).
-type handler_opts() :: machinery:handler_opts(_).
-type handler_args() :: machinery:handler_args(_).
-spec init(ord(T), machine(T), _, handler_opts()) -> result(T).
-spec process_timeout(machine(T), _, handler_opts()) -> result(T).
-type call(T) ::
{account, trx(T), limit(T)}
| {confirm, trx(T)}
| {reject, trx(T)}.
-spec process_call(call(T), machine(T), _, handler_opts()) ->
{
{ok, record(T)}
| {error, {conflict, ord(T)}},
result(T)
}.
-spec process_repair(ff_repair:scenario(), machine(_), handler_args(), handler_opts()) -> no_return().
init(Seed, #{}, _, _Opts) ->
#{
events => [{seed, Seed}],
aux_state => new_st(Seed)
}.
process_timeout(#{}, _, _Opts) ->
#{}.
process_call({account, Trx, Limit}, #{aux_state := St}, _, _Opts) ->
process_account(Trx, Limit, St);
process_call({confirm, Trx}, #{aux_state := St}, _, _Opts) ->
process_confirm(Trx, St);
process_call({reject, Trx}, #{aux_state := St}, _, _Opts) ->
process_reject(Trx, St).
process_repair(_RepairArgs, _Machine, _Args, _Opts) ->
erlang:error({not_implemented, repair}).
process_account(Trx, Range, St0) ->
case lookup_trx(get_trx_id(Trx), St0) of
error ->
St1 = record_trx(Trx, St0),
Head1 = head(St1),
case ff_range:contains(Range, ff_indef:to_range(Head1)) of
true ->
{{ok, Head1}, #{
events => [{account, Trx}],
aux_state => St1
}};
false ->
{{error, {exceeded, Head1}}, #{}}
end;
{ok, Trx} ->
{{ok, head(St0)}, #{}};
{ok, TrxWas} ->
{{error, {conflict, TrxWas}}, #{}}
end.
process_confirm(Trx, St0) ->
case lookup_trx(get_trx_id(Trx), St0) of
{ok, Trx} ->
St1 = confirm_trx(Trx, St0),
{{ok, head(St1)}, #{
events => [{confirm, Trx}],
aux_state => St1
}};
{ok, TrxWas} ->
{{error, {conflict, TrxWas}}, #{}};
error ->
{{ok, head(St0)}, #{}}
end.
process_reject(Trx, St0) ->
case lookup_trx(get_trx_id(Trx), St0) of
{ok, Trx} ->
St1 = reject_trx(Trx, St0),
{{ok, head(St1)}, #{
events => [{reject, Trx}],
aux_state => St1
}};
{ok, TrxWas} ->
{{error, {conflict, TrxWas}}, #{}};
error ->
{{ok, head(St0)}, #{}}
end.
%%
new_st(Seed) ->
#{
head => ff_indef:new(Seed),
trxs => #{}
}.
head(#{head := Head}) ->
Head.
lookup_trx(TrxID, #{trxs := Trxs}) ->
maps:find(TrxID, Trxs).
record_trx(Trx, St = #{head := Head, trxs := Trxs}) ->
St#{
head := ff_indef:account(get_trx_dv(Trx), Head),
trxs := maps:put(get_trx_id(Trx), Trx, Trxs)
}.
confirm_trx(Trx, St = #{head := Head, trxs := Trxs}) ->
St#{
head := ff_indef:confirm(get_trx_dv(Trx), Head),
trxs := maps:remove(get_trx_id(Trx), Trxs)
}.
reject_trx(Trx, St = #{head := Head, trxs := Trxs}) ->
St#{
head := ff_indef:reject(get_trx_dv(Trx), Head),
trxs := maps:remove(get_trx_id(Trx), Trxs)
}.
%%
get_trx_id({ID, _Ts, _Dv}) ->
ID.
get_trx_ts({_ID, Ts, _Dv}) ->
Ts.
get_trx_dv({_ID, _Ts, Dv}) ->
Dv.
get_limit_id({ID, _Range, _Span}) ->
ID.
get_limit_range({_ID, Range, _Span}) ->
Range.
get_limit_span({_ID, _Range, Span}) ->
Span. | apps/fistful/src/ff_limit.erl | 0.505371 | 0.500854 | ff_limit.erl | starcoder |
%%==========================================================================
%% Copyright (C) 2004 <NAME>
%% 2010 <NAME>
%%
%% Permission is hereby granted, free of charge, to any person obtaining a
%% copy of this software and associated documentation files (the
%% "Software"), to deal in the Software without restriction, including
%% without limitation the rights to use, copy, modify, merge, publish,
%% distribute, sublicense, and/or sell copies of the Software, and to permit
%% persons to whom the Software is furnished to do so, subject to the
%% following conditions:
%%
%% The above copyright notice and this permission notice shall be included
%% in all copies or substantial portions of the Software.
%%
%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
%% OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
%% MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
%% NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
%% DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
%% OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
%% USE OR OTHER DEALINGS IN THE SOFTWARE.
%%
%% Authors: <NAME> <<EMAIL>>, <NAME> <<EMAIL>>
%% Purpose: Add API call to create a table in a document.
%%==========================================================================
-module(eg_table).
-include("../include/eg.hrl").
-export([table/8, table_from_xml/8]).
% State during main output routine
-record(st,
{
y = 735, % How far up the current page; starting position is 700
min_y = 60, % Bottom margin - bottom edge of page to last text line
max_y = 735 % Top margin - bottom edge of page to top of text area
}).
%% @doc Take an XML string and generate a table from it in the PDF
%% <br/><br/><table>
%% <tr><td>PDF</td><td>PID of PDF being produced </td></tr>
%% <tr><td>XML</td><td>string of XML defining the table and its contents </td> </tr>
%% <tr><td>X</td><td>X coordinate of the top left corner of the table box </td></tr>
%% <tr><td>Width</td> <td>maximum width of the table </td></tr>
%% <tr><td>Start</td> <td>Y coordinate of top left corner of the box</td> </tr>
%% <tr><td>Bottom</td><td>Y coordinate of maximum bottom of the box</td> </tr>
%% <tr><td>FontSize</td><td>size of the letters in points</td> </tr>
%% <tr><td>FontChoice</td><td>font name</td> </tr>
%% </table>
%%
%% with an XML string like the following:
%% <code><pre> <row><cell>Escape Sequence</cell><cell>Value</cell></row>
%% <row><cell>\\b</cell><cell>Backspace</cell></row>
%% <row><cell>\\d</cell><cell>Delete</cell></row>
%% <row><cell>\\e</cell><cell>Escape</cell></row>
%% </pre></code>
%%
%% You get the following PDF results
%%
%% <center><img src="../table_example.bmp" width="247" height="115"></img><br/></center>
table_from_xml(PDF, XML, X, Width, Start, Bottom, FontSize, FontChoice) ->
Parsed = eg_xml_lite:parse_all_forms(XML),
Rows = lists:map(fun({xml, Z}) -> Z end, Parsed),
table(PDF, Rows, X, Width, Start, Bottom, FontSize, FontChoice).
%% @doc Creating tables
%%
%% 1. work out number of columns
%%
%% 2. split space evenly, leaving an extra 4 Pts for white space around each,
%% and an extra 1 Pt for each vertical Line
%%
%% 2 bis. OR Turn each cell into RTF as if it had the full width to use.
%%
%% Then find a bunch of metrics about the table which will
%% help us find a good set of column widths.
%%
%% Find the longest word in each column. This sets the
%% minimum column width.
%%
%% If the sum of the longest words is greater than the total
%% width avaialable then issue a warning, but carry on with
%% the table extending off the page to the right (really
%% trick would be to switch to landscape mode automatically
%% :)
%%
%% Ideally we want to fit into the minimum vertical space,
%% so if there is a solution where every or most rows can
%% fit on one line this would be very nice.
%%
%% Could do proportional split based on total volume of text
%% in each column
%%
%% We could find the longest cell for each column
%%
%% If all cells in a column will fit on a single line within what
%% would be an even split between the columns, allow that column to
%% take up the space it needs to fit everything one one line? OK,
%% but at the limit might really penalise another column with lots
%% of text.
%%
%% 3. Convert the text into RTF lines for each cell
%%
%% 4. For each row work out the number of lines required
%% (largest number of lines).
%%
%% 5. Output the table, drawing lines as we go
%%
%% TODO - Parameterise by: Spacing around text, table width, row background
%% Clever algorithm to arrange column widths
%% Do page breaks in the middle of a table
%%
%% spec table(PDF, Rows, X, Y, SO) ->
%% Total_Y
table(PDF, Rows, X, Width, Start, Bottom, FontSize, FontChoice) ->
eg_pdf:set_font(PDF, "Times-Roman", 10), %% when set_font is not done first, no font catalog
%% goes into the output and formatting is erratic.
S0 = #st{y = Start, min_y = Bottom},
S = space_before(10, S0),
Cols = max_length(Rows, 0), % Number of cols is max cols of all rows.
%% Col_width = Width div Cols,
W = Width - 5 * Cols,
RTF_words = lists:map(fun(Row) ->
row2rtf(Row, lists:duplicate(Cols, W), FontSize, FontChoice)
end, Rows),
%% Find the longest single word in each column. Start with a minimum col
%% size of 20000 milliPts. This gives reasonable minimum column widths
Longest_words = tuple_to_list(
longest_words(RTF_words, erlang:make_tuple(Cols, 28000))),
%% io:format("Longest word = ~p~n",[Longest_words]),
Min_tab_width = lists:sum(Longest_words) + Cols * 5000,
if Min_tab_width div 1000 + Cols > W -> % round up each col to next Pt
io:format("*** Warning *** "
"Table will not fit into available width ~p~n",
[Min_tab_width div Cols + Cols]);
true -> ok
end,
%% _TextWidth = Col_width - 4 - 1, % leave space for lines + ws
%% Sum the total length of text in each column. This gives a
%% general measure of the size of a column.
Volumes = word_volumes(RTF_words, erlang:make_tuple(Cols, 0)),
%% io:format("Volumes = ~p~n",[Volumes]),
%% Find the longest single cell in each column. This does two
%% things - it allows us to contract the whole table width if all
%% columns fit completely within the overall width on a single
%% line. It also might allow us to squeeze things around so that
%% some columns are all one liners...
Max_cell_widths = longest_cell(Volumes, erlang:make_tuple(Cols, 0)),
%% io:format("Max Cell Widths = ~p~n",[Max_cell_widths]),
Col_volume = sum_cells(Volumes, lists:duplicate(Cols, 0)),
%% io:format("Column volumes = ~p~n",[Col_volume]),
%% Try to share the column widths out respecting the minimums,
%% but in proportion to the Volumes
%% 1. Distribute by volume.
I1 = lists:map(fun(Col) ->
Col / lists:sum(Col_volume) * W * 1000
end, Col_volume),
%% io:format("I1 = ~p~n",[I1]),
%% 2. Increase any columns which have been given less than their minimum
{I2, Lost} = ensure_minimums(I1, Longest_words, 0, []),
%% io:format("I2 = ~p~n",[{Lost, I2}]),
%% 3. Try to give out any reductions amongst the other columns,
%% not allowing them to go below minimum.
I3 = distribute_reduction(I2, Longest_words, Lost),
%% io:format("I3 = ~p~n",[I3]),
%% 4. Reduce any cols which have been given more space than they
%% need, noting which ones they are.
%% 5. Try to give out any extra amongst cols which need it in
%% proportion to their volume, not increasing the table width
%% beyond the max width. Go back to step 4 to make sure we have
%% not given any column too much. This recursion is handled by
%% expand/3.
I5bis = expand(I3, Max_cell_widths, W * 1000),
%% 6. Normalise, rounding up to the nearest Pt.
I6 = lists:map(fun({fixed, Val}) ->
round(Val / 1000) + 1;
(Val) ->
round(Val / 1000) + 1
end, I5bis),
%% io:format("I6 = ~w~n",[I6]),
RTFRows = lists:map(fun(Row) ->
%% io:format("RTFRow = ~p~n",[Row]),
row2rtf(Row, I6, FontSize, FontChoice)
end, Rows),
% io:format("RTFRows = ~p~n",[RTFRows]),
Heights = lists:map(fun(Row) -> max_row_lines(Row, 0) end, RTFRows),
% io:format("Heights = ~p~n",[Heights]),
%% Re-add the 2 Pt gap and 1 Pt line width for each column
I7 = lists:map(fun(Text_w) -> Text_w + 5 end, I6),
%% Don't start a table near the bottom of a page so we don't get
%% only a top line, and because it don't look good to start it at
%% the bottom.
case S#st.y - 26 < S#st.min_y of
true -> not_fitting;
false -> rows(PDF, Heights, RTFRows, X, I7, Cols, false, S, FontSize, FontChoice)
end.
%% Insert some vertical space unless we are at the top of the page
space_before(Pts, S) ->
if S#st.y == S#st.max_y ->
S;
true ->
S#st{y = S#st.y - Pts}
end.
max_length([{_Tag, _, Row} | Rows], Max) ->
if length(Row) > Max -> max_length(Rows, length(Row));
true -> max_length(Rows, Max)
end;
max_length([], Max) ->
Max.
row2rtf({row, _, Row}, Col_widths, FontSize, FontChoice) ->
TagMap = {[cell], [{default, eg_richText:mk_face(FontChoice#table.def_font, FontSize,
true, default, 0)},
{em, eg_richText:mk_face(FontChoice#table.em_font, FontSize,
true, default, 0)},
{code, eg_richText:mk_face(FontChoice#table.code_font, FontSize,
true, default, 0)},
{b, eg_richText:mk_face(FontChoice#table.b_font, FontSize,
true, default, 0)}]},
row2rtf1(Row, Col_widths, TagMap);
row2rtf({header, _, Row}, Col_widths, FontSize, FontChoice) ->
TagMap = {[cell], [{default, eg_richText:mk_face(FontChoice#table.def_font, FontSize,
true, default, 0)},
{em, eg_richText:mk_face(FontChoice#table.em_font, FontSize,
true, default, 0)},
{code, eg_richText:mk_face(FontChoice#table.code_font, FontSize,
true, default, 0)},
{b, eg_richText:mk_face(FontChoice#table.b_font, FontSize,
true, default, 0)}]},
row2rtf1(Row, Col_widths, TagMap).
row2rtf1([Cell | T], [Col_width | T1], TagMap) ->
%%io:format("Cell = ~p~n",[{Col_width, Cell}]),
Norm = eg_xml2richText:normalise_xml(Cell, TagMap),
{cell, _, RichText} = Norm,
%%io:format("Norm = ~p~n",[RichText]),
{Lines, _, _} =
eg_line_break:break_richText(RichText, {justified, [Col_width]}),
%% io:format("Lines = ~p~n",[Lines]),
[{Lines, [Col_width], [0]} | row2rtf1(T, T1, TagMap)];
row2rtf1(_, [Cw | T], TagMap) ->
[{[], [Cw], [0]} | row2rtf1([], T, TagMap)];
row2rtf1([], [], _) ->
[].
%% Find the longest word in each column
longest_words([Row | T], Array) ->
A1 = lws(Row, 1, Array),
longest_words(T, A1);
longest_words([], A) ->
A.
%% Sum the total lengths of all text per cell
word_volumes(RTF, Arr) ->
lists:foldl(fun(Row, A1) ->
{A2, _} = row_volumes(Row, Arr),
A1 ++ [tuple_to_list(A2)]
end, [], RTF).
%% Given the output of word_volumes/2, find the max of each column.
longest_cell(Arr_list, Arr) ->
lists:foldl(fun(Row, A) ->
max_vals(Row, A)
end, tuple_to_list(Arr), Arr_list).
sum_cells(Arr_list, Arr) ->
lists:foldl(fun(Row, A) ->
sum_vals(Row, A)
end, Arr, Arr_list).
ensure_minimums([H | T], [H1 | T1], Lost, Res) ->
if H > H1 ->
ensure_minimums(T, T1, Lost, Res ++ [H]);
true ->
ensure_minimums(T, T1, Lost + H1 - H, Res ++ [{fixed, H1}])
end;
ensure_minimums([], [], Lost, Res) ->
{Res, Lost}.
distribute_reduction(Cols, Minimums, Lost) ->
Sum = lists:foldl(fun({fixed, _Col}, Sum) ->
Sum;
(Col, Sum) ->
Sum + Col
end, 0, Cols),
distribute_reduction(Cols, Minimums, Sum, Lost).
distribute_reduction([{fixed, Col} | T], [_Min | T1], Sum, Lost) ->
[{fixed, Col} | distribute_reduction(T, T1, Sum, Lost)];
distribute_reduction([Col | T], [Min | T1], Sum, Lost) ->
Reduced = Col - Col / Sum * Lost,
if Reduced < Min -> %% We can't take enough away. Darn!
New_lost = Lost - (Min - Reduced),
New_lost1 = if New_lost < 0 ->
0;
true ->
New_lost
end,
[{fixed, Min} | distribute_reduction(T, T1, Sum, New_lost1)];
true ->
[Reduced | distribute_reduction(T, T1, Sum, Lost)]
end;
distribute_reduction([], [], _Sum, _Lost) ->
[].
%% Recursively go through giving out any saved in earlier steps and
%% then ensuring that we have not given out too much, until all
%% columns are fixed, or
expand(Init, Max_cell_widths, W) ->
%% io:format("Init = ~p~n",[{Init, Max_cell_widths, W}]),
{I1, _Gained} = ensure_maximums(Init, Max_cell_widths, 0, []),
%% io:format("I1 Gained = ~p~n",[{Gained, I1}]),
Sum = lists:foldl(fun({fixed, Col}, Sum) ->
Sum + Col;
(Col, Sum) ->
Sum + Col
end, 0, I1),
%% io:format("Sum = ~p~n",[Sum]),
Fixed = is_fixed(Init),
%% io:format("Fixed = ~p~n",[Sum]),
%% io:format("W = ~p~n",[W]),
if (Sum >= W - 1) or Fixed ->
I1;
true ->
I2 = distribute_increase(I1, Sum, W),
%% io:format("I2 Increase = ~p~n",[I2]),
expand(I2, Max_cell_widths, W)
end.
max_row_lines([{Row, _, _} | Rows], Max) ->
if length(Row) > Max -> max_row_lines(Rows, length(Row));
true -> max_row_lines(Rows, Max)
end;
max_row_lines([], Max) ->
Max.
rows(PDF, [H | T], [Row | Rows], X, Col_widths, Cols, Mid_row, S, FontSize, FontChoice) ->
%% TODO - Work out when to draw top line - i.e. when we are really
%% at the start of a row even in a multipage row
if Mid_row == false ->
eg_pdf:rectangle(PDF, X, S#st.y, lists:sum(Col_widths) + 1, 1, fill);
true -> ok
end,
if S#st.y - (H * FontSize) =< S#st.min_y ->
This_page_lines = (S#st.y - S#st.min_y) div FontSize,
{TPR, NPR} = split_row(This_page_lines, Row),
S1 = row(PDF, TPR, X, Col_widths, This_page_lines, Cols, S, FontSize, FontChoice),
%% io:format("NPR = ~p~n", [NPR]),
%% Draw a line at the bottom as
if NPR == [] ->
eg_pdf:rectangle(PDF, X, S1#st.y, lists:sum(Col_widths) + 1, 1,
fill);
true -> ok
end,
Mid_row_2 = NPR /= [],
%% We need a new page, but don't want pending images
%% appearing in the middle of the table hence 'false'.
%% S2 = new_page(PDF, false, S1, FontSize),
rows(PDF, [H - This_page_lines | T],
[NPR | Rows], X, Col_widths, Cols, Mid_row_2, S1, FontSize, FontChoice);
true ->
Y = S#st.y,
S1 = row(PDF, Row, X, Col_widths, H, Cols, S, FontSize, FontChoice),
rows(PDF, T, Rows, X, Col_widths, Cols, false,
S1#st{y = Y - (H * FontSize) - 5}, FontSize, FontChoice)
end;
rows(PDF, [], [], X, Col_widths, _Cols, _Mid_row, S, _FontSize, _FontChoice) ->
%% Draw final line under table
eg_pdf:rectangle(PDF, X, S#st.y, lists:sum(Col_widths) + 1, 1, fill),
S.
lws([{Lines, _, _} | T], N, A) ->
Lw = lw(Lines, 0),
if (element(N, A) < Lw) ->
lws(T, N + 1, setelement(N, A, Lw));
true ->
lws(T, N + 1, A)
end;
lws([], _, A) ->
A.
row_volumes(Row, Arr) ->
%% io:format("Vols ~p~n", [Arr]),
lists:foldl(fun({L, _, _}, {A1, Col_num}) ->
Vol = line_volume(L),
Old = element(Col_num, A1),
New = setelement(Col_num, A1, Old + Vol),
{New, Col_num + 1}
end, {Arr, 1}, Row).
words_volume(Words, Vol) ->
%% io:format("Word ~p~n", [{Vol, Words}]),
lists:foldl(fun({word, L, _, _}, V) ->
V + L;
({space, L, _}, V) ->
V + L
end, Vol, Words).
line_volume(Line) ->
%% io:format("Line~n"),
lists:foldl(fun({richText, Words}, V) ->
words_volume(Words, V)
end, 0, Line).
max_vals([H | T], [H1 | T1]) ->
if H > H1 ->
[H | max_vals(T, T1)];
true ->
[H1 | max_vals(T, T1)]
end;
max_vals([], []) ->
[].
sum_vals([H | T], [H1 | T1]) ->
[H + H1 | sum_vals(T, T1)];
sum_vals([], []) ->
[].
ensure_maximums([{fixed, H} | T], [_Max_needed | T1], Gain, Res) ->
ensure_maximums(T, T1, Gain, Res ++ [{fixed, H}]);
ensure_maximums([H | T], [Max_needed | T1], Gain, Res) ->
%% io:format("ensure_maximums - ~p~n",[{Gain , H , Max_needed}]),
if Max_needed =< H ->
ensure_maximums(T, T1, Gain + H - Max_needed,
Res ++ [{fixed, Max_needed}]);
true ->
ensure_maximums(T, T1, Gain, Res ++ [H])
end;
ensure_maximums([], [], Gain, Res) ->
{Res, Gain}.
distribute_increase(Cols, Sum, W) ->
Gain = W - Sum,
To_share = lists:foldl(fun({fixed, _Col}, Sum1) -> Sum1;
(Col, Sum1) -> Sum1 + Col
end, 0, Cols),
distribute_increase1(Cols, Gain, To_share).
distribute_increase1([{fixed, Col} | T], Gain, To_share) ->
[{fixed, Col} | distribute_increase1(T, Gain, To_share)];
distribute_increase1([Col | T], Gain, To_share) ->
Inc = Col / To_share * Gain,
%% io:format("Inc = ~p~n",[{Col, To_share, Gain}]),
[Col + Inc | distribute_increase1(T, Gain, To_share)];
distribute_increase1([], _, _) ->
[].
is_fixed([{fixed, _} | T]) ->
is_fixed(T);
is_fixed([_ | _]) ->
false;
is_fixed([]) ->
true.
row(PDF, [{Lines, Width, Off} | Cells], X, [Col_width | T], Height, Cols, S, FontSize, FontChoice) ->
eg_pdf:rectangle(PDF, X, S#st.y, 1, -(Height * FontSize) - 5, fill),
eg_pdf:begin_text(PDF),
lines2pdf(PDF, X + 3, S#st.y, Lines, FontSize, Width, Off, justified),
eg_pdf:end_text(PDF),
row(PDF, Cells, X + Col_width, T, Height, Cols - 1, S, FontSize, FontChoice);
row(PDF, [], X, [], Height, 0, S, FontSize, _FontChoice) ->
eg_pdf:rectangle(PDF, X, S#st.y, 1, -(Height * FontSize) - 5, fill),
S;
row(PDF, [], X, [Col_width | T], Height, Cols, S, FontSize, FontChoice) ->
eg_pdf:rectangle(PDF, X, S#st.y, 1, -(Height * FontSize) - 5, fill),
row(PDF, [], X + Col_width, T, Height, Cols - 1, S, FontSize, FontChoice).
%% Split a table row into two rows where the first has no more
%% than Height number of lines in any cell, and the second row
%% contains the remaining lines.
%% TPR - This Page Rows
%% NPR - Next Page Rows
%% TPL - this page lines etc
split_row(Height, Cells) ->
lists:foldl(fun({Cell, W, O}, {TPR, NPR}) ->
{TPL, NPL} = if length(Cell) =< Height ->
{Cell, []};
true ->
lists:split(Height, Cell)
end,
{TPR ++ [{TPL, W, O}], NPR ++ [{NPL, W, O}]}
end, {[], []}, Cells).
lines2pdf(PDF, X, Y, Lines, Leading, Widths, Off, Justification) ->
%% io:format("Input = ~p~n",[{X, Y, Justification, 0, Lines,
%% Leading, Widths, Off}]),
Code = eg_richText2pdf:richText2pdf(PDF, X, Y, Justification, 0, Lines,
Leading, Widths, Off),
%io:format("Code = ~p~n",[Code]),
eg_pdf:append_stream(PDF, Code).
lw([{richText, Words} | T], L) ->
L1 = lw1(Words, L),
lw(T, L1);
lw([], L) ->
L.
lw1([{word, L, _, _} | T], L0) ->
if (L > L0) ->
lw1(T, L);
true ->
lw1(T, L0)
end;
lw1([_ | T], L) ->
lw1(T, L);
lw1([], L) ->
L. | src/eg_table.erl | 0.52756 | 0.402803 | eg_table.erl | starcoder |
% @copyright 2008-2011 Zuse Institute Berlin
% Licensed under the Apache License, Version 2.0 (the "License");
% you may not use this file except in compliance with the License.
% You may obtain a copy of the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS,
% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
% See the License for the specific language governing permissions and
% limitations under the License.
%% @author <NAME> <<EMAIL>>
%% @doc Cyclon node cache implementation using a list.
%% @end
%% @version $Id$
-module(cyclon_cache).
-author('<EMAIL>').
-vsn('$Id$').
-include("scalaris.hrl").
%% API
-export([new/0, new/2, size/1,
add_node/3, remove_node/2, trim/2,
get_random_subset/2, get_random_nodes/2,
get_nodes/1, get_ages/1,
inc_age/1, merge/5,
pop_random_node/1, pop_oldest_node/1,
debug_format_by_age/1]).
-ifdef(with_export_type_support).
-export_type([age/0, cache/0]).
-endif.
-type age() :: non_neg_integer().
-type element() :: {node:node_type(), age()}.
-type cache() :: [ element() ].
%% @doc Creates a new and empty node cache.
-spec new() -> cache().
new() ->
[].
%% @doc Creates a new node cache with the given two nodes and ages 0.
-spec new(node:node_type(), node:node_type()) -> cache().
new(Node1, Node2) ->
case node:same_process(Node1, Node2) of
true -> [{node:newer(Node1, Node2), 0}];
false -> [{Node2, 0}, {Node1, 0}]
end.
%% @doc Counts the number of Cache entries.
-spec size(cache()) -> non_neg_integer().
size(Cache) ->
length(Cache).
%% @doc Returns a random node from the (non-empty!) cache.
-spec get_random_node(Cache::[element(),...]) -> node:node_type().
get_random_node(Cache) ->
{Node, _Age} = util:randomelem(Cache),
Node.
%% @doc Removes a random element from the (non-empty!) cache and returns the
%% resulting cache and the removed node.
-spec pop_random_node([Cache::element(),...]) -> {NewCache::cache(), PoppedNode::node:node_type()}.
pop_random_node(Cache) ->
pop_random_node(Cache, cyclon_cache:size(Cache)).
%% @doc Removes a random element from the (non-empty!) cache and returns the
%% resulting cache and the removed node.
-spec pop_random_node(Cache::[element(),...], CacheSize::non_neg_integer()) -> {NewCache::cache(), PoppedNode::node:node_type()}.
pop_random_node(Cache, CacheSize) ->
{NewCache, {Node, _Age}} = util:pop_randomelem(Cache, CacheSize),
{NewCache, Node}.
%% @doc Returns a random subset of N elements from the cache.
-spec get_random_subset(N::non_neg_integer(), Cache::cache()) -> RandomSubset::cache().
get_random_subset(N, Cache) ->
util:random_subset(N, Cache).
%% @doc Returns a random subset of N nodes from the cache.
-spec get_random_nodes(N::non_neg_integer(), Cache::cache()) -> Nodes::[node:node_type()].
get_random_nodes(N, Cache) ->
[Node || {Node, _Age} <- util:random_subset(N, Cache)].
%% @doc Finds the oldest element (randomized if multiple oldest elements) and
%% removes it from the cache returning the new cache and this node.
-spec pop_oldest_node(Cache::cache()) -> {NewCache::cache(), PoppedNode::node:node_type()}.
pop_oldest_node(Cache) ->
{OldElements, _MaxAge} =
lists:foldl(
fun ({Node, Age}, {PrevOldElems, MaxAge}) ->
if Age > MaxAge ->
{[{Node, Age}], Age};
Age =:= MaxAge ->
{[{Node, Age} | PrevOldElems] , Age};
Age < MaxAge ->
{PrevOldElems, MaxAge}
end
end,
{[], 0},
Cache),
NodeP = get_random_node(OldElements),
NewCache = remove_node(NodeP, Cache),
{NewCache, NodeP}.
%% @doc Increases the age of every element in the cache by 1.
-spec inc_age(Cache::cache()) -> NewCache::cache().
inc_age(Cache) ->
[{Node, Age + 1} || {Node, Age} <- Cache].
%% @doc Checks whether the cache contains an element with the given Node.
-spec contains_node(Node::node:node_type(), Cache::cache()) -> Result::boolean().
contains_node(Node, Cache) ->
lists:any(fun({SomeNode, _Age}) -> node:same_process(SomeNode, Node) end, Cache).
%% @doc Returns the ages of all nodes in the cache.
-spec get_ages(Cache::cache()) -> Ages::[age()].
get_ages(Cache) ->
[Age || {_Node, Age} <- Cache].
%% @doc Returns all nodes in the cache (without their ages).
-spec get_nodes(Cache::cache()) -> Nodes::[node:node_type()].
get_nodes(Cache) ->
[Node || {Node, _Age} <- Cache].
%% @doc Merges MyCache at node MyNode with the ReceivedCache from another node
%% to whom SendCache has been send. The final cache size will not extend
%% TargetSize.
%% This will discard received entries pointing at MyNode and entries
%% already contained in MyCache, fill up empty slots in the cache with
%% received entries and further replace elements in MyCache using
%% replace/5.
-spec merge(MyCache::cache(), MyNode::node:node_type(), ReceivedCache::cache(), SendCache::cache(), TargetSize::pos_integer()) -> NewCache::cache().
merge(MyCache, MyNode, ReceivedCache, SendCache, TargetSize) ->
% first sort the two lists to allow transformation into 3 lists containing
% the received entries without the already known nodes, a list of entries
% from both caches (with updated IDVersions) and a list of entries from my
% cache without the updated entries
{EntriesInReceivedCacheOnly, EntriesInBoth_Updated, EntriesInMyCacheOnly} =
util:split_unique(ReceivedCache, MyCache,
fun({N1, _}, {N2, _}) -> node:pidX(N1) =< node:pidX(N2) end,
fun({N1, _}, {N2, MyAge}) -> {node:newer(N1, N2), MyAge} end),
MyC1 = EntriesInMyCacheOnly ++ EntriesInBoth_Updated,
MyC1Size = cyclon_cache:size(MyC1),
% remove eventually existing references to the node itself
ReceivedCache_Filtered =
[Elem || {Node, _Age} = Elem <- EntriesInReceivedCacheOnly,
not node:same_process(Node, MyNode)],
SendCache_Filtered =
[Elem || {Node, _Age} = Elem <- SendCache,
not node:same_process(Node, MyNode)],
% finally fill up my cache to the full size (if necessary) and start
% replacing entries
{MyC2, ReceivedCacheRest, AddedElements} =
fillup(MyC1, ReceivedCache_Filtered, TargetSize - MyC1Size),
MyC2Size = MyC1Size + AddedElements,
replace(MyC2, MyC2Size, ReceivedCacheRest, SendCache_Filtered, TargetSize).
%% @doc Trims the cache to size TargetSize (if necessary) by deleting random
%% entries as long as the cache is larger than the given TargetSize.
-spec trim(Cache::cache(), CacheSize::non_neg_integer(), TargetSize::pos_integer()) -> NewCache::cache().
trim(Cache, CacheSize, TargetSize) ->
case CacheSize =< TargetSize of
true ->
Cache;
false ->
{NewCache, _Element} = util:pop_randomelem(Cache, CacheSize),
trim(NewCache, CacheSize - 1, TargetSize)
end.
%% @doc Fills up MyCache with (up to) ToAddCount entries from ReceivedCache,
%% returning the new cache, the rest of the ReceivedCache and the number of
%% actually added elements.
-spec fillup(MyCache::cache(), ReceivedCache::cache(), ToAddCount::non_neg_integer()) -> {MyNewCache::cache(), ReceivedCacheRest::cache(), AddedElements::non_neg_integer()}.
fillup(MyCache, ReceivedCache, ToAddCount) ->
fillup(MyCache, ReceivedCache, ToAddCount, 0).
%% @doc Helper to fill up MyCache with (up to) ToAddCount entries from
%% ReceivedCache, returning the new cache, the rest of the ReceivedCache
%% and the number of actually added elements.
-spec fillup(MyCache::cache(), ReceivedCache::cache(), ToAddCount::non_neg_integer(), AddedElements::non_neg_integer()) -> {MyNewCache::cache(), ReceivedCacheRest::cache(), AddedElements::non_neg_integer()}.
fillup(MyCache, ReceivedCache, 0 = _ToAddCount, AddedElements) ->
{MyCache, ReceivedCache, AddedElements};
fillup(MyCache, [], _ToAddCount, AddedElements) ->
{MyCache, [], AddedElements};
fillup(MyCache, [Elem | Rest] = _ReceivedCache, ToAddCount, AddedElements) ->
fillup([Elem | MyCache], Rest, ToAddCount - 1, AddedElements + 1).
%% @doc Updates MyCache to include all entries of ReceivedCache by firstly
%% replacing entries among SendCache and thirdly by replacing random
%% entries.
%% ReceivedCache must not contain the local node and must not contain any
%% node that MyCache already contains!
%% SendCache must not contain the local node!
-spec replace(MyCache::cache(), MyCacheSize::non_neg_integer(), ReceivedCache::cache(), SendCache::cache(), TargetSize::pos_integer()) -> MyNewCache::cache().
replace([] = _MyCache, MyCacheSize, ReceivedCache, _SendCache, TargetSize) ->
% the cache size (although otherwise not needed) should still be correct:
0 = MyCacheSize,
trim(ReceivedCache, cyclon_cache:size(ReceivedCache), TargetSize);
replace(MyCache, _MyCacheSize, [], _SendCache, _TargetSize) ->
MyCache;
replace(MyCache, MyCacheSize, ReceivedCache, [] = _SendCache, TargetSize) ->
% trim MyCache so it has enough space for all elements of ReceivedCache
% and add all received elements
ReceivedCacheSize = cyclon_cache:size(ReceivedCache),
MyC1 = trim(MyCache, MyCacheSize, TargetSize - ReceivedCacheSize),
MyC2 = MyC1 ++ ReceivedCache,
MyC2;
replace(MyCache, _MyCacheSize, ReceivedCache, SendCache, TargetSize) ->
% filter all nodes from SendCache out of MyCache to make room for entries
% from ReceivedCache
{MyC1, SendCache_new} =
lists:partition(
fun({Node, _Age}) -> not contains_node(Node, SendCache) end,
MyCache),
MyC1Size = cyclon_cache:size(MyC1),
% trim MyC1 so it has enough space for all elements of ReceivedCache
ReceivedCacheSize = cyclon_cache:size(ReceivedCache),
MyC2 = trim(MyC1, MyC1Size, TargetSize - ReceivedCacheSize),
MyC2Size = erlang:min(MyC1Size, TargetSize - ReceivedCacheSize),
% add all received elements to MyC2
MyC3 = MyC2 ++ ReceivedCache,
MyC3Size = MyC2Size + ReceivedCacheSize,
% finally fill up MyC3 (if necessary) with elements from SendCache_new that
% are not in ReceivedCache and thus not in MyC3
case MyC3Size < TargetSize of
true ->
SendC3 = [Elem || {Node, _Age} = Elem <- SendCache_new,
not contains_node(Node, ReceivedCache)],
{MyC4, _SendC3Rest, _AddedElements} =
fillup(MyC3, SendC3, TargetSize - MyC3Size),
MyC4;
false ->
MyC3
end.
%% @doc Adds the given node to the cache or updates its age in the Cache if
%% present.
%% Beware: the node will be added to the cache no matter what size it
%% already has!
-spec add_node(Node::node:node_type(), Age::age(), Cache::cache()) -> NewCache::cache().
add_node(Node, Age, Cache) ->
case contains_node(Node, Cache) of
true -> [{Node, Age} | remove_node(Node, Cache)];
false -> [{Node, Age} | Cache]
end.
%% @doc Removes any element with the given Node from the Cache.
-spec remove_node(Node::node:node_type(), Cache::cache()) -> NewCache::cache().
remove_node(Node, Cache) ->
[Element || {SomeNode, _Age} = Element <- Cache, not node:same_process(SomeNode, Node)].
%% @doc Trims the cache to size TargetSize (if necessary) by deleting random
%% entries as long as the cache is larger than the given TargetSize.
-spec trim(Cache::cache(), TargetSize::pos_integer()) -> NewCache::cache().
trim(Cache, TargetSize) ->
trim(Cache, cyclon_cache:size(Cache), TargetSize).
%% @doc Returns a list of keys (ages) and string values (nodes) for debug output
%% used in the web interface.
-spec debug_format_by_age(Cache::cache()) -> KeyValueList::[{Age::age(), Node::string()}].
debug_format_by_age(Cache) ->
[{Age, webhelpers:safe_html_string("~p", [Node])} || {Node, Age} <- Cache]. | src/cyclon_cache.erl | 0.640074 | 0.418519 | cyclon_cache.erl | starcoder |
%% This Source Code Form is subject to the terms of the Mozilla Public
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
%% Copyright (c) 2021-2022 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(khepri_utils).
-include_lib("stdlib/include/assert.hrl").
-include("include/khepri.hrl").
-export([flat_struct_to_tree/1,
display_tree/1,
display_tree/2,
display_tree/3]).
%% khepri:get_root/1 is unexported when compiled without `-DTEST'.
-dialyzer(no_missing_calls).
-spec flat_struct_to_tree(khepri_machine:node_props_map()) ->
khepri_machine:node_props().
flat_struct_to_tree(FlatStruct) ->
NodeProps = maps:get([], FlatStruct, #{}),
Children = maps:fold(
fun flat_struct_to_tree/3,
#{},
maps:remove([], FlatStruct)),
NodeProps#{child_nodes => Children}.
flat_struct_to_tree([ChildName | [_ | _] = Path], NodeProps, Tree) ->
Child1 = case Tree of
#{ChildName := Child} ->
Children = maps:get(child_nodes, Child, #{}),
Children1 = flat_struct_to_tree(
Path, NodeProps, Children),
Child#{child_nodes => Children1};
_ ->
Children1 = flat_struct_to_tree(
Path, NodeProps, #{}),
#{child_nodes => Children1}
end,
Tree#{ChildName => Child1};
flat_struct_to_tree([ChildName], NodeProps, Tree) ->
case Tree of
#{ChildName := Child} ->
?assertEqual([child_nodes], maps:keys(Child)),
?assertNot(maps:is_key(child_nodes, NodeProps)),
NodeProps1 = maps:merge(NodeProps, Child),
Tree#{ChildName => NodeProps1};
_ ->
Tree#{ChildName => NodeProps}
end.
-spec display_tree(khepri_machine:node_props()) -> ok.
display_tree(Tree) ->
display_tree(Tree, "").
display_tree(Tree, Options) when is_map(Options) ->
display_tree(Tree, "", Options);
display_tree(Tree, Prefix) when is_list(Prefix) ->
display_tree(Tree, Prefix, #{colors => true,
lines => true}).
display_tree(#{child_nodes := Children}, Prefix, Options) ->
Keys = lists:sort(
fun(A, B) ->
ABin = ensure_is_binary(A),
BBin = ensure_is_binary(B),
case ABin == BBin of
true -> A =< B;
false -> ABin =< BBin
end
end, maps:keys(Children)),
display_nodes(Keys, Children, Prefix, Options);
display_tree(_, _, _) ->
ok.
ensure_is_binary(Key) when is_atom(Key) ->
list_to_binary(atom_to_list(Key));
ensure_is_binary(Key) when is_binary(Key) ->
Key.
display_nodes([Key | Rest], Children, Prefix, Options) ->
IsLast = Rest =:= [],
display_node_branch(Key, IsLast, Prefix, Options),
NodeProps = maps:get(Key, Children),
NewPrefix = Prefix ++ prefix(IsLast, Options),
DataPrefix = NewPrefix ++ data_prefix(NodeProps, Options),
case NodeProps of
#{data := Data} -> display_data(Data, DataPrefix, Options);
_ -> ok
end,
display_tree(NodeProps, NewPrefix, Options),
display_nodes(Rest, Children, Prefix, Options);
display_nodes([], _, _, _) ->
ok.
display_node_branch(Key, false, Prefix, #{lines := false}) ->
io:format("~ts+-- ~ts~n", [Prefix, format_key(Key)]);
display_node_branch(Key, true, Prefix, #{lines := false}) ->
io:format("~ts`-- ~ts~n", [Prefix, format_key(Key)]);
display_node_branch(Key, false, Prefix, _Options) ->
io:format("~ts├── ~ts~n", [Prefix, format_key(Key)]);
display_node_branch(Key, true, Prefix, _Options) ->
io:format("~ts╰── ~ts~n", [Prefix, format_key(Key)]).
format_key(Key) when is_atom(Key) ->
io_lib:format("~ts", [Key]);
format_key(Key) when is_binary(Key) ->
io_lib:format("<<~ts>>", [Key]).
display_data(Data, Prefix, Options) ->
Formatted = format_data(Data, Options),
Lines = string:split(Formatted, "\n", all),
case Options of
#{colors := false} ->
lists:foreach(
fun(Line) ->
io:format("~ts~ts~n", [Prefix, Line])
end, Lines);
_ ->
lists:foreach(
fun(Line) ->
io:format(
"~ts\033[38;5;246m~ts\033[0m~n",
[Prefix, Line])
end, Lines)
end,
io:format("~ts~n", [string:trim(Prefix, trailing)]).
prefix(false, #{lines := false}) -> "| ";
prefix(false, _Options) -> "│ ";
prefix(true, _Options) -> " ".
data_prefix(#{child_nodes := _}, #{lines := false}) -> "| ";
data_prefix(#{child_nodes := _}, _Options) -> "│ ";
data_prefix(_, #{lines := false}) -> " ";
data_prefix(_, _Options) -> " ".
format_data(Data, _Options) ->
lists:flatten(io_lib:format("Data: ~tp", [Data])). | src/khepri_utils.erl | 0.52683 | 0.477859 | khepri_utils.erl | starcoder |
%%
%% Copyright 2013, <NAME> <<EMAIL>>
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% @copyright 2013, <NAME>
%% @author <NAME> <<EMAIL>>
%% @doc Cap'n Proto value support.
%%
%% Everything value.
-module(ecapnp_val).
-author("<NAME> <<EMAIL>>").
-export([set/2, set/3, get/2, get/3, size/1]).
-include("ecapnp.hrl").
%% ===================================================================
%% API functions
%% ===================================================================
-spec set(value_type(), number() | boolean()) -> binary().
%% @doc Encode value to Cap'n Proto format.
set(ValueType, Value) ->
value(set, {ValueType, to_value(ValueType, Value)}).
-spec set(value_type(), number() | boolean(), binary()) -> binary().
%% @doc Encode value to Cap'n Proto format.
%%
%% The result is XOR'ed with `Default'.
set(ValueType, Value, Default) when is_bitstring(Default) ->
value(set, {ValueType, to_value(ValueType, Value), Default}).
-spec get(value_type(), binary()) -> number() | boolean().
%% @doc Decode data from Cap'n Proto format.
get(ValueType, Data) when is_bitstring(Data) ->
from_value(ValueType, value(get, {ValueType, Data})).
-spec get(value_type(), binary(), binary()) -> number() | boolean().
%% @doc Decode data from Cap'n Proto format.
%%
%% The `Data' is XOR'ed with `Default' prior to decoding.
get(ValueType, Data, Default)
when is_bitstring(Data),
is_bitstring(Default) ->
Value = value(get, {ValueType, Data, Default}),
from_value(ValueType, Value).
-spec size(value_type()) -> non_neg_integer().
%% @doc Get number of bits for `ValueType'.
size(ValueType) ->
value(size, ValueType).
%% ===================================================================
%% internal functions
%% ===================================================================
-define(DEFINE_TYPE(ValueType, Size, TypeSpec),
value(size, ValueType) -> Size;
value(get, {ValueType, Data}) ->
<<Value:Size/TypeSpec>> = Data, Value;
value(get, {ValueType, Data, Default}) ->
PadSize = 7 - ((Size + 7) rem 8),
<<Value:Size/TypeSpec, _/bits>>
= apply_default(
<<Data/bits, 0:PadSize/integer>>,
<<Default/bits, 0:PadSize/integer>>),
Value;
value(set, {ValueType, Value}) ->
<<Value:Size/TypeSpec>>;
value(set, {ValueType, Value, Default}) ->
PadSize = 7 - ((Size + 7) rem 8),
<<Data:Size/bits, _/bits>>
= apply_default(
<<Value:Size/TypeSpec, 0:PadSize/integer>>,
<<Default/bits, 0:PadSize/integer>>),
Data
).
?DEFINE_TYPE(uint64, 64, integer-unsigned-little);
?DEFINE_TYPE(uint32, 32, integer-unsigned-little);
?DEFINE_TYPE(uint16, 16, integer-unsigned-little);
?DEFINE_TYPE(uint8, 8, integer-unsigned-little);
?DEFINE_TYPE(int64, 64, integer-signed-little);
?DEFINE_TYPE(int32, 32, integer-signed-little);
?DEFINE_TYPE(int16, 16, integer-signed-little);
?DEFINE_TYPE(int8, 8, integer-signed-little);
?DEFINE_TYPE(bool, 1, bits);
?DEFINE_TYPE(float32, 32, bits); %% actual float conversion is done in the
?DEFINE_TYPE(float64, 64, bits); %% to_value/from_value functions.
value(size, void) -> 0;
value(_, {void, _}) -> void.
-define(INF_NAN_32(N,S), <<0:16,1:1,N:1,0:6,S:1,127:7>>).
-define(INF_NAN_64(N,S), <<0:48,15:4,N:1,0:3,S:1,127:7>>).
from_value(bool, <<0:1>>) -> false;
from_value(bool, <<1:1>>) -> true;
from_value(float32, ?INF_NAN_32(1, _)) -> nan;
from_value(float32, ?INF_NAN_32(0, 0)) -> inf;
from_value(float32, ?INF_NAN_32(0, 1)) -> '-inf';
from_value(float32, <<Value:32/float-little>>) -> Value;
from_value(float64, ?INF_NAN_64(1, _)) -> nan;
from_value(float64, ?INF_NAN_64(0, 0)) -> inf;
from_value(float64, ?INF_NAN_64(0, 1)) -> '-inf';
from_value(float64, <<Value:64/float-little>>) -> Value;
from_value(_, Value) when is_number(Value) -> Value;
from_value(_, void) -> void.
to_value(bool, true) -> <<1:1>>;
to_value(bool, false) -> <<0:1>>;
to_value(float32, inf) -> ?INF_NAN_32(0, 0);
to_value(float32, '-inf') -> ?INF_NAN_32(0, 1);
to_value(float32, nan) -> ?INF_NAN_32(1, 0);
to_value(float32, Value) -> <<Value:32/float-little>>;
to_value(float64, inf) -> ?INF_NAN_64(0, 0);
to_value(float64, '-inf') -> ?INF_NAN_64(0, 1);
to_value(float64, nan) -> ?INF_NAN_64(1, 0);
to_value(float64, Value) -> <<Value:64/float-little>>;
to_value(_, Value) when is_number(Value) -> Value;
to_value(_, void) -> void.
apply_default(Value, Default) ->
crypto:exor(Value, Default). | src/ecapnp_val.erl | 0.52683 | 0.532243 | ecapnp_val.erl | starcoder |
%% coding: utf-8
%--------------------------------------------------------------------
%
% Copyright (c) 2015 <NAME>
%
% This software is released under the MIT license
% http://www.opensource.org/licenses/mit-license.php
%
%--------------------------------------------------------------------
% @doc YAZL '<i>yazzle</i>' (Yet Another Zipper List):
% A mutable list with a current focus position.
%
% A yazl supports operations normally found in
% mutable doubly-linked lists, such as read, update,
% insert, delete and incremental bi-directional traversal.
% Local operations in the neighbourhood of the focus
% are executed in O(1) constant time.
% The yazl also provides global operations and index-based
% random access, typically with an O(n) performance penalty.
%
% The focus may be between two elements of the list, or at one of the ends.
% The descriptions used here are slightly different from a true zipper,
% because the focus is between elements, not at a current element
% [<i>Functional Pearl: The Zipper</i>, <NAME>, 1997].
%
% We describe lists as being ordered left-to-right,
% like western writing, with the excuse that this bias is already
% present in Erlang with the names <i>foldl</i>, <i>foldr</i>.
%
% The position of the current element is either a 1-based positive integer,
% or an end marker: `endl', for the beginning, or `endr' for the end.
% The current value is after (to the right of) the focus, if it exists.
% There is no current value for empty lists,
% or non-empty lists with the focus after the right end.
%
% Functions on single values and lists of values are not overloaded,
% they are given distinct names (<i>e.g.</i>`insert'/`inserts'),
% so that yazls can have lists as regular elements
% (<i>i.e.</i> lists of lists).
%
% == Usage ==
%
% === Create, Import, Export ===
%
% Create yazls from lists using `new', `from_list' and `from_lists'.
% Test if a term appears to be a yazl with `is_yazl'.
% Recover the underlying list with `to_list'.
%
% === Query ===
%
% Test if the yazl is empty with `is_empty'.
% Get the total length of the underlying list using `size'.
% Read the value at the current focus position using `get' or `gets'.
% Find the current focus location using `position',
% which may return a 1-based integer index, or an ending marker.
%
% === Move ===
%
% Movement functions change the focus position,
% but do not change the content of the list.
% Movements return special flags `endl' or `endr'
% if an operation would take the focus
% beyond the beginning or end of the list.
% Client code can implement cyclic behaviour by using
% these flags in conjunction with the `moveto' function.
%
% Move the focus with `move', `moves', `moveto'.
% The `move' function changes focus to the next or previous elements.
% The `moves' function jumps multiple steps relative to the current focus.
% The `moveto' function jump to absolute positions based on
% a specific index, or the beginning or end of the list.
%
% === Search ===
%
% Move the focus by searching with `find', `finds',
% `moveuntil' and `movewhile'.
% The `find' function will search for the next or previous
% occurrence of a value. The `finds' function searches for the
% next or previous occurrence of a sequence of values.
% The `moveuntil' (`movewhile') functions search until a
% boolean predicate function of the current value becomes true (false).
%
% === Update ===
%
% Write the value at the current focus position using `set'.
%
% Add new values on either side of the current focus,
% or at the head or tail of the underlying list, using
% `insert' and `inserts'.
%
% Delete the element at the current focus position using `delete'.
% Delete from the focus to one of the ends using the `truncate'.
%
% Reverse the whole list while keeping the same focus
% using `reverse' - note this is constant time O(1).
%
% === Function Application ===
%
% Apply a <i>map</i> function while leaving the focus unchanged.
%
% == Efficiency ==
%
% The implementation is efficient constant time, O(1):
% for local operations at the focus: <br/>
% `new, from_list/1, move, get, set, insert,
% delete, reverse, truncate'.
%
% Incremental operations will incur a cost proportional
% to the distance from the focus to the target position:<br/>
% `from_list/2, from_lists, gets, sets, moves, moveto, moveuntil,
% find, finds, inserts'.
%
% Global operations will incur a cost proportional to the
% length of the underlying list O(n): <br/>
% `to_list, size, position'.
-module('yazl').
% ===================================================
% API and type exports
-export( [
delete/2,
ending/1,
find/3,
finds/3,
from_list/1, from_list/2,
from_lists/2,
get/2,
gets/3,
insert/3,
inserts/3,
is_yazl/1,
is_empty/1,
map/2,
move/2,
moves/3,
moveto/2,
moveuntil/3,
movewhile/3,
new/0,
opposite/1,
position/2,
reverse/1,
set/3,
sets/3,
size/1,
to_list/1,
to_lists/1,
truncate/2
] ).
-export_types( [
yazl/1,
empty_yazl/0,
direction/0,
ending/0,
index/0,
maybe/1,
position/0,
predicate/1
] ).
% ===================================================
% Types
% A yazl is a tuple of two lists.
-type yazl(A) :: { [A], [A] }.
% An empty yazl is just two empty lists.
-type empty_yazl() :: { [], [] }.
% Directions are to the left (beginning) or to the right (end) of the list.
-type direction() :: ldir | rdir.
% A position before the left (beginning) or past the right (end) of the list.
-type ending() :: endl | endr.
% A 1-based index of a position in the list.
% The value will be between 1 and `size', inclusive.
-type index() :: pos_integer().
% Expand a generic type to include the two endings.
-type maybe(A) :: ending() | A.
% A position for the focus at an index or at the ends.
-type position() :: maybe( index() ).
% A predicate function used for filtering and searching.
-type predicate(A) :: fun( (A)->boolean() ).
% ===================================================
% Type utilities
% ---------------------------------------------------
% @doc Type utility: get the opposite of a direction.
-spec opposite( direction() ) -> direction().
opposite( rdir ) -> ldir;
opposite( ldir ) -> rdir.
% ---------------------------------------------------
% @doc Type utility: get the end in a specific direction.
-spec ending( direction() ) -> ending().
ending( rdir ) -> endr;
ending( ldir ) -> endl.
% ==============================================================
% Constructors
% ---------------------------------------------------
% @doc Constructor: create a new empty yazl.
-spec new() -> empty_yazl().
new() -> { [], [] }.
% ---------------------------------------------------
% @doc Constructor: create a yazl with focus before the first element
% of a list. If the list is empty, the empty yazl is returned.
% Equivalent to calling `from_list/2' with position argument `endl'.
-spec from_list( [A] ) -> yazl(A).
from_list( List ) when is_list(List) -> { [], List }.
% ---------------------------------------------------
% @doc Constructor: create a yazl with focus at the
% beginning, at the end, or before the Ith element of a list.
% The index is 1-based, so the first element is 1,
% and the last index is equal to the length of the list.
% To position at the beginning of the list, pass `endl'.
% To position at the end of the list, pass `endr'.
% It is an error to pass an integer less than 1,
% or greater than the length of the list,
% so passing 1 with the empty list is an error.
% If the list is empty, the empty yazl is returned.
% The position for the index is implicitly to the right,
% so for a non-empty list,
% passing `endl' is the same as passing 1.
-spec from_list( position(), [A] ) -> yazl(A).
from_list( endl, List ) when is_list(List) ->
{ [], List };
from_list( endr, List ) when is_list(List) ->
{ lists:reverse(List), [] };
from_list( I, List ) when is_list(List) and is_integer(I)
and (I >= 1) and (I =< length(List)) ->
{ L, R } = lists:split( I-1, List ),
{ lists:reverse(L), R }.
% ---------------------------------------------------
% @doc Constructor: create a yazl with focus between two sublists.
% The underlying list will be the concatenation of the two lists.
% The focus will be after (right of) the last element of the first list,
% and before (left of) the first element of the second list.
% If both lists are empty, the empty yazl is returned.
-spec from_lists( [A], [A] ) -> yazl(A).
from_lists( L, R ) when is_list(L) and is_list(R) ->
{ lists:reverse(L), R }.
% ==============================================================
% Queries
% ---------------------------------------------------
% @doc Test if a term appears to be a yazl.
-spec is_yazl( term() ) -> boolean().
is_yazl( {L,R} ) -> is_list(L) andalso is_list(R);
is_yazl( _ ) -> false.
% ---------------------------------------------------
% @doc Test if a yazl is empty.
-spec is_empty( yazl(_) ) -> boolean().
is_empty( {[],[]} ) -> true;
is_empty( { _,_ } ) -> false.
% ---------------------------------------------------
% @doc Get the length of the underlying list.
% If the yazl is empty, the size is 0.
% The performance is O(n).
-spec size( yazl(_) ) -> non_neg_integer().
size( {L,R} ) -> length(L) + length(R).
% ---------------------------------------------------
% @doc Get the one-based index of the position to
% the right or left of the current focus.
% Indices are 1-based.
% If the yazl is empty, or focus is at the beginning of
% a non-empty list, then the left index is `endl'.
% If the yazl is at the end of a non-empty list,
% then the right index is `endr'.
% The performance is proportional to the position in the list.
% If the focus is at `endl' it is O(1),
% but if the focus is at the last element, it is O(n).
-spec position( direction(), yazl(_) ) -> position().
position( ldir, {[],_ } ) -> endl;
position( rdir, { _,[]} ) -> endr;
position( ldir, { L,_ } ) -> length(L);
position( rdir, { L,_ } ) -> length(L) + 1.
% ---------------------------------------------------
% @doc Recover the underlying list.
% If the yazl is empty, the result is the empty list.
% The cost is proportional to the position in the list.
% If the focus is at `endl' it is O(1),
% but if the focus is at `endr' it is O(n).
-spec to_list( yazl(A) ) -> [A].
to_list( {[],[]} ) -> [];
to_list( { L,R } ) -> lists:reverse( L, R ).
% ---------------------------------------------------
% @doc Recover the underlying sublists before and after the focus.
% If the yazl is empty, the result is two empty lists.
% The underlying list is equal to the concatenation of the two lists.
% The cost is proportional to the position in the list.
% If the focus is at `endl' it is O(1),
% but if the focus is at `endr' it is O(n).
-spec to_lists( yazl(A) ) -> { [A], [A] }.
to_lists( Z={[],[]} ) -> Z;
to_lists( { L, R } ) -> { lists:reverse(L), R }.
% ---------------------------------------------------
% @doc Get the value of the element to the right or
% left of the current focus.
% If the operation would overrun the begining or end
% of the list, return `endr' or `endl'.
% This is fast constant time O(1).
-spec get( direction(), yazl(A) ) -> maybe(A).
get( rdir, { _,[]} ) -> endr;
get( ldir, {[],_ } ) -> endl;
get( rdir, {_,[H|_]} ) -> H;
get( ldir, {[H|_],_} ) -> H.
% ---------------------------------------------------
% @doc Get the values of elements to the right or
% left of the current focus.
% Getting zero elements returns the empty list.
% Getting a negative number of elements,
% returns elements from the other direction.
% If the operation would overrun the begining or end
% of the list, return `endr' or `endl'.
% Performance is proportional to the length of the requested sublist.
-spec gets( direction(), integer(), yazl(A) ) -> maybe([A]).
gets( _, 0, _ ) -> [];
gets( Dir, N, Z ) when (N < 0) -> gets( opposite(Dir), -N, Z );
gets( rdir, N, {_,R} ) when (N > length(R)) -> endr;
gets( ldir, N, {L,_} ) when (N > length(L)) -> endl;
gets( Dir, 1, Z ) -> [get( Dir, Z )];
gets( rdir, N, {_,R} ) -> lists:sublist(R,N);
gets( ldir, N, {L,_} ) -> lists:reverse( lists:sublist(L,N) ).
% ==============================================================
% Move focus
% ---------------------------------------------------
% @doc Move the focus one step to the right or left.
% If the operation would overrun the begining or end
% of the list, return `endr' or `endl'.
%
% Traditional function `next(...)',
% is equivalent to the curried form `move( rdir, ... )'.
% Traditional function `prev(...)',
% is equivalent to the curried form `move( ldir, ... )'.
% This is fast constant time O(1).
-spec move( direction(), yazl(A) ) -> maybe(yazl(A)).
move( rdir, { _,[]} ) -> endr;
move( ldir, {[], _} ) -> endl;
move( rdir, { L,[RH|RT]} ) -> { [RH|L], RT };
move( ldir, {[HL|TL],R } ) -> { TL, [HL|R] }.
% ---------------------------------------------------
% @doc Move the focus multiple steps to the right or left.
% If the yazl is empty, or the steps would
% overrun the beginning or end of the list,
% then return `endr' or `endl'.
%
% Moving a zero offset leaves the yazl unchanged.
%
% Negative offsets are converted to the equivalent positive
% offset in the other direction, which may return an
% unexpected opposite end value,
% e.g. `moves(rdir,-2,Z)' may return `endl'.
-spec moves( direction(), integer(), yazl(A) ) -> maybe(yazl(A)).
moves( _, 0, Z ) -> Z;
moves( Dir, 1, Z ) -> move( Dir, Z );
moves( Dir, I, Z ) when (I < 0) -> moves( opposite(Dir), -I, Z );
moves( rdir, I, {_,R} ) when (I > length(R)) -> endr;
moves( ldir, I, {L,_} ) when (I > length(L)) -> endl;
moves( rdir, I, {L,R} ) ->
{ RH, RT } = lists:split( I, R ),
{ lists:reverse(RH,L), RT };
moves( ldir, I, {L,R} ) ->
{ LH, LT } = lists:split( I, L ),
{ LT, lists:reverse(LH,R) }.
% ---------------------------------------------------
% @doc Move to the beginning or end of the list,
% or an absolute index position within the list.
% The position is `endr' or `endl',
% or a 1-based integer signifying a index,
% <i>i.e.</i> focus before the given index.
% If the index offset would overrun the beginning
% or end of the list, then return `endr' or `endl'.
-spec moveto( position(), yazl(A) ) -> maybe(yazl(A)).
moveto( endr, Z={ _,[]} ) -> Z;
moveto( endl, Z={[],_ } ) -> Z;
moveto( endr, { L,R } ) -> { lists:reverse(R,L), [] };
moveto( endl, { L,R } ) -> { [], lists:reverse(L,R) };
moveto( I, Z={ _,_ } ) when is_integer(I) ->
Len = yazl:size( Z ),
IR = case position(rdir,Z) of
endr -> Len+1;
IndexR -> IndexR
end,
case I of
I when (I < 1 ) -> endl;
I when (I > Len) -> endr;
I when (I == IR ) -> Z;
I when (I < IR ) -> moves( ldir, IR-I, Z );
I when (I > IR ) -> moves( rdir, I-IR, Z )
end.
% ---------------------------------------------------
% @doc Search for the first occurrence of a value.
% If the search is successful, return a yazl that
% focuses before (right search) or after (left search)
% the found element.
% If the search does not find the value,
% then it returns `endr' or `endl'.
-spec find( direction(), A, yazl(A) ) -> maybe(yazl(A)).
find( rdir, _, { _,[]} ) -> endr;
find( ldir, _, {[],_ } ) -> endl;
find( rdir, Val, Z={ _,[Val|_] } ) -> Z;
find( ldir, Val, Z={ [Val|_],_ } ) -> Z;
find( Dir, Val, Z ) -> find( Dir, Val, move(Dir,Z) ).
% ---------------------------------------------------
% @doc Search for the first sequence of values
% that match a given non-empty list.
% If the search is successful, return a yazl that
% focuses before (right search) or after (left search)
% the found list of elements.
% If the search does not find the value,
% then it returns `endr' or `endl'.
%
% A search for an empty list is a no-op
% that returns the original yazl
% (following the convention of `lists:prefix'
% that the empty list is a prefix of all lists).
-spec finds( direction(), [A], yazl(A) ) -> maybe(yazl(A)).
finds( _, [], Z ) -> Z;
finds( rdir, Vs=[V|VT], Z ) ->
case find(rdir,V,Z) of
endr -> endr;
Y={ _, [V|RT] } ->
case lists:prefix(VT,RT) of
true -> Y;
false -> finds( rdir, Vs, move(rdir,Y) )
end
end;
finds( ldir, Vs, Z ) ->
case finds( rdir, lists:reverse(Vs), reverse(Z) ) of
endr -> endl;
Y -> reverse( Y )
end.
% ---------------------------------------------------
% @doc Search for the first occurrence of a value
% that satisfies a boolean predicate function.
% If the search is successful, it returns a yazl
% that focuses before the found element.
% If the search does not find the value,
% then it returns `endr' or `endl'.
%
% Note this is equivalent to `movewhile'
% using the negation of the predicate.
-spec moveuntil( direction(), predicate(A), yazl(A) ) -> maybe(yazl(A)).
moveuntil( rdir, _, { _,[]} ) -> endr;
moveuntil( ldir, _, {[],_ } ) -> endl;
moveuntil( rdir, Pred, Z={_,[RH|_]} ) ->
case Pred(RH) of
true -> Z;
false -> moveuntil( rdir, Pred, move(rdir,Z) )
end;
moveuntil( ldir, Pred, Z={[LH|_],_} ) ->
case Pred(LH) of
true -> Z;
false -> moveuntil( ldir, Pred, move(ldir,Z) )
end.
% ---------------------------------------------------
% @doc Search for the first occurrence of a value
% that does not satisfy a boolean predicate function.
% If the search is successful, it returns a yazl that
% focuses before the found element.
% If the search does not find the value,
% then it returns `endr' or `endl'.
% Note this is equivalent to `moveuntil'
% using the negation of the predicate.
-spec movewhile( direction(), predicate(A), yazl(A) ) -> maybe(yazl(A)).
movewhile( Dir, Pred, Z ) ->
moveuntil( Dir, fun(A) -> not Pred(A) end, Z ).
% ==============================================================
% Update
% ---------------------------------------------------
% @doc Set the value of the element to the right or
% left of the current focus.
% If the operation would overrun the begining or end
% of the list, return `endr' or `endl'.
% This is fast constant time O(1).
-spec set( direction(), A, yazl(A) ) -> maybe(yazl(A)).
set( rdir, _, { _,[]} ) -> endr;
set( ldir, _, {[], _} ) -> endl;
set( rdir, V, {L,[_|RT]} ) -> { L,[V|RT]};
set( ldir, V, {[_|LT],R} ) -> {[V|LT],R }.
% ---------------------------------------------------
% @doc Set values of elements to the right or
% left of the current focus.
% Setting the empty list is a no-op,
% and returns the original yazl.
% If the operation would overrun the begining or end
% of the list, return `endr' or `endl'.
% Performance is proportional to the length of the requested sublist.
-spec sets( direction(), [A], yazl(A) ) -> maybe(yazl(A)).
sets( ldir, [], Z ) -> Z;
sets( rdir, Vs, {_,R} ) when (length(Vs) > length(R)) -> endr;
sets( ldir, Vs, {L,_} ) when (length(Vs) > length(L)) -> endl;
sets( Dir, [V], Z ) -> set( Dir, V, Z );
sets( rdir, Xs, {L,R} ) -> { L,
Xs++lists:nthtail(length(Xs),R) };
sets( ldir, Xs, {L,R} ) -> { lists:reverse(Xs)++
lists:nthtail(length(Xs),L), R }.
% ---------------------------------------------------
% @doc Insert a value to the right or left of the current focus,
% or at the beginning (prepend) or end (append) of the whole list.
% Whether it is to the left or right
% does not affect the final content of the list,
% just the final position of the focus
% relative to the inserted sequence.
% This is fast constant time O(1) at the focus.
-spec insert( direction() | ending(), A, yazl(A) ) -> yazl(A).
insert( rdir, V, {L,R} ) -> { L , [V|R] };
insert( ldir, V, {L,R} ) -> { [V|L], R };
insert( endl, V, {L,R} ) -> { L++[V], R };
insert( endr, V, {L,R} ) -> { L, R++[V] }.
% ---------------------------------------------------
% @doc Insert a sequence of values to the left or right
% of the current focus, or at the beginning (prepend)
% or end (append) of the whole list.
% Whether it is inserted to the left or right
% does not affect the final content of the list,
% just the final position of the focus
% relative to the inserted sequence.
% Inserting an empty sequence does not change the underlying list.
-spec inserts( direction() | ending(), [A], yazl(A) ) -> yazl(A).
inserts( _, [],Z={_,_} ) -> Z;
inserts( rdir, Vs, {L,R} ) -> { L, Vs++R };
inserts( ldir, Vs, {L,R} ) -> { lists:reverse(Vs,L), R };
inserts( endr, Vs, {L,R} ) -> { L, R++Vs };
inserts( endl, Vs, {L,R} ) -> { L++lists:reverse(Vs), R }.
% ---------------------------------------------------
% @doc Delete the value to the right or left of the focus.
% If the yazl is empty, or the focus is already
% at the beginning or end of a list, then return `endr' or `endl'.
% This is fast constant time O(1).
-spec delete( direction(), yazl(A) ) -> maybe(yazl(A)).
delete( rdir, { _,[]} ) -> endr;
delete( ldir, {[],_ } ) -> endl;
delete( rdir, {L,[_|RT]} ) -> { L,RT};
delete( ldir, {[_|LT],R} ) -> {LT,R }.
% ---------------------------------------------------
% @doc Delete the indicated sublist.
% If the yazl is empty, return the empty yazl.
% For right, the focus will be positioned after the
% last element of the left sublist.
% For left, the focus will be positioned before the
% first element of the right sublist.
% This is fast constant time O(1).
-spec truncate( direction(), yazl(A) ) -> yazl(A).
truncate( rdir, {L,_} ) -> { L,[]};
truncate( ldir, {_,R} ) -> {[],R }.
% ---------------------------------------------------
% @doc Reverse the list maintaining focus.
% If the yazl is empty, the result is the empty yazl.
% If the yazl is not empty, the current values to the
% right and left will be switched
% This is fast constant time O(1),
% compared to O(n) for ordinary list.
-spec reverse( yazl(A) ) -> yazl(A).
reverse( {L,R} ) -> {R,L}.
% ==============================================================
% Partial Function Application
% ---------------------------------------------------
% @doc Apply a map while leaving the focus unchanged.
% If the yazl is empty it will be unchanged.
-spec map( fun((A)->B), yazl(A) ) -> yazl(B).
map( Fun, {L,R} ) -> { lists:map(Fun,L), lists:map(Fun,R) }.
%==================================================================== | src/yazl.erl | 0.642545 | 0.53127 | yazl.erl | starcoder |
-module(day03).
-export([main/0]).
main() ->
Map = load(),
part1(Map),
part2(Map).
% Part one: determine how many trees are passed at slope {3,1}
part1(Map) ->
Ans = trees_for_slope(3, 1, Map),
io:format("~p~n", [Ans]).
% Part two: the product of the number of trees on a fine selection of slopes.
part2(Map) ->
Slopes = [{1,1}, {3,1}, {5,1}, {7,1}, {1,2}],
Trees = lists:map(fun ({DX, DY}) -> trees_for_slope(DX, DY, Map) end, Slopes),
Ans = lists:foldl(fun (X, Sum) -> X * Sum end, 1, Trees),
io:format("~p~n", [Ans]).
% Determines how many trees are passed on the given slope through the given map.
trees_for_slope(DX, DY, Map) ->
trees_for_slope(0, 0, 0, DX, DY, Map).
% Base case: return the accumulated count.
trees_for_slope(Count, _, _, _, _, Map) when map_size(Map) == 0 ->
Count;
% Recursive case: add one if we've passed a tree here, then recurse to the
% next step on our course.
trees_for_slope(Count, X, Y, DX, DY, Map) ->
NewCount = case tree_at(X, Y, Map) of
true -> Count + 1;
false -> Count
end,
NewX = X+DX,
NewY = Y+DY,
NewMap = maps:filter(fun(K, _) -> K >= NewY end, Map),
trees_for_slope(NewCount, NewX, NewY, DX, DY, NewMap).
% Determines if there is a tree at position {X, Y} in the given map.
tree_at(X, Y, Map) ->
tree_at(X, maps:get(Y, Map)).
tree_at(X, Map) ->
maps:get(X rem maps:size(Map), Map).
% Loads in the input from a file.
load() ->
{ok, File} = file:open("input.txt", [read]),
{ok, Text} = file:read(File, 1024*1024),
Lines = string:split(Text, "\n", all),
parse(#{}, 0, Lines).
% Parses a list of lines into a map from integer Y coordinate to parsed line.
parse(Map, _, []) ->
Map;
parse(Map, Y, [Line | Rest]) ->
parse(Map#{Y => parse_line(#{}, 0, Line)}, Y+1, Rest).
% Parses a line of input into a map from integer X coordinate to boolean value
% indicating the presence of a tree.
parse_line(Map, _, []) ->
Map;
parse_line(Map, X, [Char | Rest]) ->
parse_line(Map#{X => Char == $#}, X+1, Rest). | day03/day03.erl | 0.52756 | 0.811489 | day03.erl | starcoder |
-module(day12).
-export([solve_part1/1, solve_part2/1]).
% for tests
-export([
parse/1,
cruise/3, wp_cruise/3,
turn/2, rotate/2
]).
%%% solution
solve_part1(Input) ->
{{Lon, Lat}, _Heading} = cruise({0, 0}, 0, parse(Input)),
abs(Lon) + abs(Lat).
solve_part2(Input) ->
{{Lon, Lat}, _WpRelPos} = wp_cruise({0, 0}, {10, 1}, parse(Input)),
abs(Lon) + abs(Lat).
%%% Parsing
parse(Instructions) ->
[{instruction_to_atom(H), list_to_integer(T)} ||
[H|T] <- string:lexemes(Instructions, "\n")].
instruction_to_atom(Char) ->
case Char of
$N -> north;
$S -> south;
$E -> east;
$W -> west;
$L -> left;
$R -> right;
$F -> forward
end.
%%% Movement
%% @doc Cruise using instructions for the ship (part 1).
cruise(InitialPosition, InitialHeading, Instructions) ->
lists:foldl(fun(Instruction, {Position, Heading}) ->
advance(Position, Heading, Instruction) end,
{InitialPosition, InitialHeading},
Instructions).
%% @doc Cruise using instructions for the waypoing (part 2).
wp_cruise(InitialPosition, InitialWaypointRelativePosition, Instructions) ->
lists:foldl(fun(Instruction, {Position, WaypointRelativePosition}) ->
wp_advance(Position, WaypointRelativePosition,
Instruction) end,
{InitialPosition, InitialWaypointRelativePosition},
Instructions).
%% @doc Advance the ship position according to a single instruction.
% turn
advance(Position, Heading, {left, Degrees}) ->
{Position, turn(Heading, Degrees)};
advance(Position, Heading, {right, Degrees}) ->
{Position, turn(Heading, -Degrees)};
% absolute shift
advance({East, North}, Heading, {north, Units}) ->
{{East, North + Units}, Heading};
advance({East, North}, Heading, {south, Units}) ->
{{East, North - Units}, Heading};
advance({East, North}, Heading, {east, Units}) ->
{{East + Units, North}, Heading};
advance({East, North}, Heading, {west, Units}) ->
{{East - Units, North}, Heading};
% relative shift
advance(Position, Heading, {forward, Units}) ->
advance(Position, Heading, {heading_to_direction(Heading), Units}).
%% @doc Advance the ship/waypoint position according to a single instruction.
% turn waypoint
wp_advance(Position, WaypointRelativePosition, {left, Degrees}) ->
{Position, rotate(WaypointRelativePosition, Degrees)};
wp_advance(Position, WaypointRelativePosition, {right, Degrees}) ->
{Position, rotate(WaypointRelativePosition, -Degrees)};
% waypoint shift
wp_advance(Position, {WpX, WpY}, {north, Units}) ->
{Position, {WpX, WpY + Units}};
wp_advance(Position, {WpX, WpY}, {south, Units}) ->
{Position, {WpX, WpY - Units}};
wp_advance(Position, {WpX, WpY}, {east, Units}) ->
{Position, {WpX + Units, WpY}};
wp_advance(Position, {WpX, WpY}, {west, Units}) ->
{Position, {WpX - Units, WpY}};
% ship shift
wp_advance({Lon, Lat}, {WpX, WpY}, {forward, Times}) ->
NewShipPosition = {Lon + Times * WpX, Lat + Times * WpY},
{NewShipPosition, {WpX, WpY}}.
%%% Herlpers
%% @doc Turns a given heading by Angle degrees, returns a value in [0, 360].
turn(Heading, Angle) ->
(((Heading + Angle) rem 360) + 360) rem 360.
heading_to_direction(Heading) ->
case Heading of
0 -> east;
90 -> north;
180 -> west;
270 -> south
end.
%% @doc rotate vector {X, Y} around {0, 0} by Angle degrees.
rotate({X, Y}, Angle) ->
RadAngle = to_radians(Angle),
Sin = math:sin(RadAngle),
Cos = math:cos(RadAngle),
{round(X * Cos - Y * Sin), round(X * Sin + Y * Cos)}.
to_radians(Degrees) ->
(Degrees / 180) * math:pi(). | src/day12.erl | 0.549641 | 0.684376 | day12.erl | starcoder |
-module(buffer).
-compile([export_all, nowarn_export_all]).
-ifdef(TEST).
-include_lib("proper/include/proper.hrl").
-endif.
% Experiments in building a text buffer data structure
% for use in the elsa language server.
%
% It's based on specialized finger trees[1], with
% inspiration from the Yi editor's rope data structure[2].
%
% [1]: http://www.staff.city.ac.uk/~ross/papers/FingerTree.html
% [2]: https://github.com/yi-editor/yi-rope/blob/master/src/Yi/Rope.hs
-define(MERGE_CHUNK_SIZE, 16).
-define(CHUNK_SIZE, 3).
from_binary(Binary) ->
from_binary(empty, Binary).
from_binary(Tr, <<>>) -> Tr;
from_binary(Tr, Binary) ->
{Chunk, Rest} = take_chunk(?CHUNK_SIZE, Binary),
from_binary(push_r(Tr, Chunk), Rest).
% Take a chunk of a specified byte size from a binary.
% If the desired size would result in a character
% being split, a larger chunk is taken instead.
%
take_chunk(Size, Chunk) when size(Chunk) =< Size ->
{Chunk, <<>>};
take_chunk(Size, Binary) ->
case Binary of
<<_:Size/binary, _/utf8, _/binary>> ->
<<Chunk:Size/binary, Rest/binary>> = Binary,
{Chunk, Rest};
_ ->
take_chunk(Size + 1, Binary)
end.
% Measures a single chunk of UTF8 encoded text.
%
% The measurement of a buffer is `{Line, Col, FirstIsLF, LastIsCR}`.
% `Line` is the 0 based line offset at the end, or viewed another way
% the number of line breaks.
% `Col` is the 0 based character offset at the end of the last line.
% Taken together they represent the (Line, Col) position of a cursor
% placed at the end of the buffer.
%
% `Col` follows the Language Server Protocol (LSP) specification in that
% it counts Unicode code points at U+10000 or higher as 2 characters.
% Recognized line breaks are "\n" (LF), "\r" (CR), and "\r\n" (CRLF).
%
% Adding the measurements of two buffers (cf. `size_add`) should yield
% the same answer as concatenating the buffers and measuring the result.
% In order to preserve a correct `Line` count, we need to know if
% concatenating the buffers would result in a CR and LF being joined
% to form a CRLF. This information is embedded in the two bools
% `FirstIsLF` which is true if the buffer starts with LF, and
% `LastIsCR` which is true if the buffer ends with CR.
%
measure_chunk(<<>>) -> undefined;
measure_chunk(Chunk) ->
{Line, Col} = line_col(0, 0, Chunk),
SizeSub1 = byte_size(Chunk) - 1,
SizeSub2 = byte_size(Chunk) - 2,
{FirstIsLF, LastIsCR} =
case Chunk of
<<"\n", _:SizeSub2/binary, "\r">> -> {true, true};
<<"\n", _/binary>> -> {true, false};
<<_:SizeSub1/binary, "\r">> -> {false, true};
_ -> {false, false}
end,
{Line, Col, FirstIsLF, LastIsCR}.
% Calculate the {Line, Col} position at the end of the chunk.
%
line_col(Line, Col, <<>>) ->
{Line, Col};
line_col(Line, _Col, <<"\n"/utf8, Rest/binary>>) ->
line_col(Line+1, 0, Rest);
line_col(Line, _Col, <<"\r\n"/utf8, Rest/binary>>) ->
line_col(Line+1, 0, Rest);
line_col(Line, _Col, <<"\r"/utf8, Rest/binary>>) ->
line_col(Line+1, 0, Rest);
line_col(Line, Col, <<C/utf8, Rest/binary>>) when C >= 16#10000 ->
% The LSP spec requires U+10000 and above to be counted as 2 characters
line_col(Line, Col+2, Rest);
line_col(Line, Col, <<_/utf8, Rest/binary>>) ->
line_col(Line, Col+1, Rest);
line_col(_, _, _) ->
throw({error, invalid_utf8}).
size_add(undefined, Size) -> Size;
size_add(Size, undefined) -> Size;
size_add({Line, Col1, LF, _}, {0, Col2, _, _}) ->
{Line, Col1 + Col2, LF, false};
size_add({Line1, _, LF, true}, {Line2, Col, true, CR}) ->
% Deduct 1 line for the merged CRLF
{Line1 + Line2 - 1, Col, LF, CR};
size_add({Line1, _, LF, _}, {Line2, Col, _, CR}) ->
{Line1 + Line2, Col, LF, CR}.
measure(Chunk) when is_binary(Chunk) -> measure_chunk(Chunk);
measure({Size, _, _}) -> Size;
measure({Size, _, _, _}) -> Size.
measure_digit({A}) -> measure(A);
measure_digit({A, B}) -> size_add(measure(A), measure(B));
measure_digit({A, B, C}) -> size_add(measure(A), size_add(measure(B), measure(C)));
measure_digit({A, B, C, D}) -> size_add(measure(A), size_add(measure(B), size_add(measure(C), measure(D)))).
measure_tree(empty) -> undefined;
measure_tree({deep, Size, _, _, _}) -> Size;
measure_tree(Single) -> measure(Single).
% Smart constructors for 2,3-nodes.
% Caching the combined size of their contents.
%
node2(A, B) ->
Size = size_add(measure(A), measure(B)),
{Size, A, B}.
node3(A, B, C) ->
Size = size_add(measure(A), size_add(measure(B), measure(C))),
{Size, A, B, C}.
% Convert a node to a digit.
node_to_digit(Node) -> erlang:delete_element(1, Node).
% Smart constructor for deep trees.
deep(Sf, M, Pr) ->
Size = size_add(measure_digit(Sf), size_add(measure_tree(M), measure_digit(Pr))),
{deep, Size, Sf, M, Pr}.
% Push from the left
push_l(A, empty) -> A;
push_l(A, {deep, _Sz, Pr, M, Sf})
when byte_size(A) + byte_size(element(1, Pr)) =< ?MERGE_CHUNK_SIZE ->
deep(setelement(1, Pr, <<A/binary, (element(1, Pr))/binary>>), M, Sf);
push_l(A, {deep, _Sz, {B, C, D, E}, M, Sf}) ->
deep({A, B}, push_l(node3(C, D, E), M), Sf);
push_l(A, {deep, _Sz, Pr, M, Sf}) ->
deep(erlang:insert_element(1, Pr, A), M, Sf);
push_l(A, B) when byte_size(A) + byte_size(B) =< ?MERGE_CHUNK_SIZE ->
<<A/binary, B/binary>>;
push_l(A, B) ->
deep({A}, empty, {B}).
% Push from the right
push_r(empty, A) -> A;
push_r({deep, _Sz, Pr, M, Sf}, A)
when byte_size(A) + byte_size(element(tuple_size(Sf), Sf)) =< ?MERGE_CHUNK_SIZE ->
deep(Pr, M, setelement(tuple_size(Sf), Sf, <<(element(tuple_size(Sf), Sf))/binary, A/binary>>));
push_r({deep, _Sz, Pr, M, {A, B, C, D}}, E) ->
deep(Pr, push_r(M, node3(A, B, C)), {D, E});
push_r({deep, _Sz, Pr, M, Sf}, A) ->
deep(Pr, M, erlang:append_element(Sf, A));
push_r(A, B) when byte_size(A) + byte_size(B) =< ?MERGE_CHUNK_SIZE ->
<<A/binary, B/binary>>;
push_r(A, B) ->
deep({A}, empty, {B}).
head_l({deep, _Sz, Pr, _M, _Sf}) -> element(1, Pr).
tail_l({deep, _Sz, Pr, M, Sf}) -> deep_l(erlang:delete_element(1, Pr), M, Sf).
view_l(empty) -> empty;
view_l({deep, _Sz, Pr, M, Sf}) ->
{element(1, Pr), deep_l(erlang:delete_element(1, Pr), M, Sf)};
view_l(S) -> {S, empty}.
deep_l({}, M, Sf) ->
case view_l(M) of
empty -> digit_to_tree(Sf);
{Hd, Tl} ->
% Head of a middle tree is always a 2,3-node
deep(node_to_digit(Hd), Tl, Sf)
end;
deep_l(Pr, M, Sf) -> deep(Pr, M, Sf).
view_r(empty) -> empty;
view_r({deep, _Sz, Pr, M, Sf}) ->
{deep_r(Pr, M, erlang:delete_element(tuple_size(Sf), Sf)), element(tuple_size(Sf), Sf)};
view_r(S) -> {S, empty}.
deep_r(Pr, M, {}) ->
case view_r(M) of
empty -> digit_to_tree(Pr);
{Tl, Hd} ->
% Head of a middle tree is always a 2,3-node
deep(Pr, Tl, node_to_digit(Hd))
end;
deep_r(Pr, M, Sf) -> deep(Pr, M, Sf).
% TODO: Make this more balanced
digit_to_tree({A}) -> A;
digit_to_tree(Digit) ->
deep({element(1, Digit)}, empty, erlang:delete_element(1, Digit)).
% `From` and `To` are inclusive.
%
digit_to_tree(Digit, From, To) ->
digit_to_tree(Digit, From, To, empty).
digit_to_tree(Digit, I, To, Tree) when I =< To ->
digit_to_tree(Digit, I + 1, To, push_l(element(I, Digit), Tree));
digit_to_tree(_, _, _, Tree) -> Tree.
split_tree(Pred, Acc, {deep, _Sz, Pr, M, Sf}) ->
AccPr = size_add(Acc, measure_digit(Pr)),
case Pred(AccPr) of
true ->
I = split_digit(Pred, AccPr, Pr),
L = digit_to_tree(Pr, 1, I - 1),
R = take_r_digit(Pr, tuple_size(Pr) - I),
{L, element(I, Pr), deep_l(R, M, Sf)};
false ->
AccPrM = size_add(AccPr, measure_tree(M)),
case Pred(AccPrM) of
true ->
{ML, Xs, MR} = split_tree(Pred, AccPr, M),
AccPrML = size_add(AccPr, measure_tree(ML)),
% Since we recursed Xs must be a 2,3-node
I = split_digit(Pred, AccPrML, node_to_digit(Xs)),
L = take_l_digit(Xs, I - 1),
R = take_r_digit(Xs, tuple_size(Xs) - I),
{deep_r(Pr, ML, L), element(I, Xs), deep_l(R, MR, Sf)};
false ->
I = split_digit(Pred, AccPrM, Sf),
R = digit_to_tree(Sf, I + 1, tuple_size(Sf)),
L = take_l_digit(Pr, I - 1),
{deep_r(Pr, M, L), element(I, Sf), R}
end
end;
split_tree(_Pred, _Acc, Single) -> {empty, Single, empty}.
% Finds the index of the split point in a digit.
%
split_digit(Pred, Acc, Digit) ->
split_digit(Pred, Acc, Digit, 1).
split_digit(Pred, Acc, Digit, I) when I < tuple_size(Digit) ->
Acc1 = size_add(Acc, measure(element(I, Digit))),
case Pred(Acc1) of
true -> I;
false -> split_digit(Pred, Acc1, Digit, I + 1)
end;
split_digit(_Pred, _Acc, _Digit, I) -> I.
take_r_digit(Digit, N) when N > tuple_size(Digit) ->
throw({error, take_r_digit});
take_r_digit(_Digit, 0) -> {};
take_r_digit(Digit, 1) ->
{element(tuple_size(Digit), Digit)};
take_r_digit(Digit, 2) ->
N = tuple_size(Digit),
{element(N-1, Digit), element(N, Digit)};
take_r_digit(Digit, 3) ->
N = tuple_size(Digit),
{element(N-2, Digit), element(N-1, Digit), element(N, Digit)};
% Must be whole digit since 4 is the maxim length
take_r_digit(Digit, 4) -> Digit.
take_l_digit(Digit, N) when N > tuple_size(Digit) ->
throw({error, take_l_digit});
take_l_digit(_Digit, 0) -> {};
take_l_digit(Digit, 1) -> {element(1, Digit)};
take_l_digit(Digit, 2) -> {element(1, Digit), element(2, Digit)};
take_l_digit(Digit, 3) -> {element(1, Digit), element(2, Digit), element(3, Digit)};
% Must be whole digit since 4 is the maxim length
take_l_digit(Digit, 4) -> Digit.
%-------------------------------------------------------------------------------
% Property tests
-ifdef(TEST).
prop_measure_from_binary() ->
?FORALL(Bin, utf8(), measure(Bin) =:= measure_tree(from_binary(Bin))).
-endif. | apps/elsa/src/buffer.erl | 0.624064 | 0.665465 | buffer.erl | starcoder |
%%
%% %CopyrightBegin%
%%
%% Copyright Ericsson AB 2010-2012. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
-module(diameter_session).
-export([sequence/0,
sequence/1,
session_id/1,
origin_state_id/0]).
%% towards diameter_sup
-export([init/0]).
-define(INT64, 16#FFFFFFFFFFFFFFFF).
-define(INT32, 16#FFFFFFFF).
%% ---------------------------------------------------------------------------
%% # sequence/0-1
%%
%% Output: 32-bit
%% ---------------------------------------------------------------------------
%% 3588, 3:
%%
%% Hop-by-Hop Identifier
%% The Hop-by-Hop Identifier is an unsigned 32-bit integer field (in
%% network byte order) and aids in matching requests and replies.
%% The sender MUST ensure that the Hop-by-Hop identifier in a request
%% is unique on a given connection at any given time, and MAY attempt
%% to ensure that the number is unique across reboots. The sender of
%% an Answer message MUST ensure that the Hop-by-Hop Identifier field
%% contains the same value that was found in the corresponding
%% request. The Hop-by-Hop identifier is normally a monotonically
%% increasing number, whose start value was randomly generated. An
%% answer message that is received with an unknown Hop-by-Hop
%% Identifier MUST be discarded.
%%
%% End-to-End Identifier
%% The End-to-End Identifier is an unsigned 32-bit integer field (in
%% network byte order) and is used to detect duplicate messages.
%% Upon reboot implementations MAY set the high order 12 bits to
%% contain the low order 12 bits of current time, and the low order
%% 20 bits to a random value. Senders of request messages MUST
%% insert a unique identifier on each message. The identifier MUST
%% remain locally unique for a period of at least 4 minutes, even
%% across reboots. The originator of an Answer message MUST ensure
%% that the End-to-End Identifier field contains the same value that
%% was found in the corresponding request. The End-to-End Identifier
%% MUST NOT be modified by Diameter agents of any kind. The
%% combination of the Origin-Host (see Section 6.3) and this field is
%% used to detect duplicates. Duplicate requests SHOULD cause the
%% same answer to be transmitted (modulo the hop-by-hop Identifier
%% field and any routing AVPs that may be present), and MUST NOT
%% affect any state that was set when the original request was
%% processed. Duplicate answer messages that are to be locally
%% consumed (see Section 6.2) SHOULD be silently discarded.
-spec sequence()
-> diameter:'Unsigned32'().
sequence() ->
Instr = {_Pos = 2, _Incr = 1, _Threshold = ?INT32, _SetVal = 0},
ets:update_counter(diameter_sequence, sequence, Instr).
-spec sequence(diameter:sequence())
-> diameter:'Unsigned32'().
sequence({_,32}) ->
sequence();
sequence({H,N}) ->
(H bsl N) bor (sequence() band (1 bsl N - 1)).
%% ---------------------------------------------------------------------------
%% # origin_state_id/0
%% ---------------------------------------------------------------------------
%% 3588, 8.16:
%%
%% The Origin-State-Id AVP (AVP Code 278), of type Unsigned32, is a
%% monotonically increasing value that is advanced whenever a Diameter
%% entity restarts with loss of previous state, for example upon reboot.
%% Origin-State-Id MAY be included in any Diameter message, including
%% CER.
%%
%% A Diameter entity issuing this AVP MUST create a higher value for
%% this AVP each time its state is reset. A Diameter entity MAY set
%% Origin-State-Id to the time of startup, or it MAY use an incrementing
%% counter retained in non-volatile memory across restarts.
-spec origin_state_id()
-> diameter:'Unsigned32'().
origin_state_id() ->
ets:lookup_element(diameter_sequence, origin_state_id, 2).
%% ---------------------------------------------------------------------------
%% # session_id/1
%% ---------------------------------------------------------------------------
%% 3588, 8.8:
%%
%% The Session-Id MUST begin with the sender's identity encoded in the
%% DiameterIdentity type (see Section 4.4). The remainder of the
%% Session-Id is delimited by a ";" character, and MAY be any sequence
%% that the client can guarantee to be eternally unique; however, the
%% following format is recommended, (square brackets [] indicate an
%% optional element):
%%
%% <DiameterIdentity>;<high 32 bits>;<low 32 bits>[;<optional value>]
%%
%% <high 32 bits> and <low 32 bits> are decimal representations of the
%% high and low 32 bits of a monotonically increasing 64-bit value. The
%% 64-bit value is rendered in two part to simplify formatting by 32-bit
%% processors. At startup, the high 32 bits of the 64-bit value MAY be
%% initialized to the time, and the low 32 bits MAY be initialized to
%% zero. This will for practical purposes eliminate the possibility of
%% overlapping Session-Ids after a reboot, assuming the reboot process
%% takes longer than a second. Alternatively, an implementation MAY
%% keep track of the increasing value in non-volatile memory.
%%
%% <optional value> is implementation specific but may include a modem's
%% device Id, a layer 2 address, timestamp, etc.
-spec session_id(diameter:'DiameterIdentity'())
-> diameter:'OctetString'().
%% Note that Session-Id has type UTF8String and that any OctetString
%% is a UTF8String.
session_id(Host) ->
Instr = {_Pos = 2, _Incr = 1, _Threshold = ?INT64, _Set = 0},
N = ets:update_counter(diameter_sequence, session_base, Instr),
Hi = N bsr 32,
Lo = N band ?INT32,
[Host, ";", integer_to_list(Hi),
";", integer_to_list(Lo),
";", atom_to_list(node())].
%% ---------------------------------------------------------------------------
%% # init/0
%% ---------------------------------------------------------------------------
init() ->
{Now, Seed} = diameter_lib:seed(),
random:seed(Seed),
Time = time32(Now),
Seq = (?INT32 band (Time bsl 20)) bor (random:uniform(1 bsl 20) - 1),
ets:insert(diameter_sequence, [{origin_state_id, Time},
{session_base, Time bsl 32},
{sequence, Seq}]),
Time.
%% ---------------------------------------------------------
%% INTERNAL FUNCTIONS
%% ---------------------------------------------------------
%% The minimum value represented by a Time value. (See diameter_types.)
%% 32 bits extends to 2104.
-define(TIME0, 62105714048). %% {{1968,1,20},{3,14,8}}
time32(Now) ->
Time = calendar:now_to_universal_time(Now),
Diff = calendar:datetime_to_gregorian_seconds(Time) - ?TIME0,
Diff band ?INT32. | lib/diameter/src/base/diameter_session.erl | 0.680772 | 0.439627 | diameter_session.erl | starcoder |
% @author <NAME> <<EMAIL>>
% @copyright 2021 <NAME>
% @doc Lossless encoding and handling of monetary values.
-module(money).
-compile({no_auto_import,[abs/1,min/2,max/2]}).
-export([currency/1,
dense_currency/1,
dense/2,
dense_unsafe/2,
dense_from_discrete/1,
dense_from_decimal/3,
dense_to_decimal/3,
discrete/3,
discrete/2,
discrete_currency/1,
discrete_amount/1,
discrete_from_dense/2,
discrete_from_decimal/3,
discrete_from_decimal/4,
discrete_to_decimal/3,
scale_from_rational/1,
scale_to_rational/1,
scale/1,
default_decimal_conf/0,
mk_separators/2,
mk_separators/1,
separators_comma/0,
separators_comma_dot/0,
separators_comma_narrownbsp/0,
separators_comma_nbsp/0,
separators_comma_space/0,
separators_comma_thinsp/0,
separators_dot/0,
separators_dot_comma/0,
separators_dot_narrownbsp/0,
separators_dot_nbsp/0,
separators_dot_space/0,
separators_dot_thinsp/0,
add/2,
subtract/2,
multiply/2,
is_greater_than/2,
is_less_than/2,
is_equal_to/2,
is_less_or_equal/2,
is_greater_or_equal/2,
neg/1,
abs/1,
min/2,
max/2
]).
-export_type([dense/1,
discrete/1,
separators/0,
scale/0,
currency/0,
unit/0
]).
%%%%%%%%%%%
%% TYPES %%
%%%%%%%%%%%
% The dense() type represents a dense monetary value for currency (usually a
% ISO-4217 currency code, but not necessarily) as a rational number.
-opaque dense(Currency) :: {rational(), Currency}.
% The discrete() type represents a discrete monetary value for a currency
% expresed as an integer amount of a particular unit. For example, with
% currency ~ "USD" and unit ~ "cent" you can represent United States Dollars
% to their full extent.
-opaque discrete(Currency) :: {integer(), {Currency, unit()} | {Currency, scale()}}.
% Config to use when rendering or parsing decimal numbers.
%
% - `separators` - Decimal and thousands separators to use when rendering the
% decimal number. Construct one with mk_separators/1, or pick a ready
% made one like separators_dot or separators_dot_narrownbsp.
% - `leading_plus` - Whether to render a leading '+' sign in case the amount
% is positive
% - `digits` - Number of decimal numbers to render, if any.
% - `scale` - Scale used to when rendering the decimal number. This is useful
% if, for example, you want to render a "number of cents" rather than
% a "number of dollars" as the whole part of the decimal number when
% rendering a USD amount. It's particularly useful when rendering
% currencies such as XAU, where one might prefer to render amounts as
% a number of grams, rather than as a number of troy-ounces.
-type decimal_conf() :: list({separators, separators()} |
{leading_plus, boolean()} |
{digits, integer()} |
{scale, scale()}
).
% Decimal and thousands separators used when rendering or parsing a decimal number.
-opaque separators() :: {unicode:charlist(), unicode:charlist()} |
{unicode:charlist()}.
% Method for approximating a fractional number to an integer number.
%
% - `round` - Approximate x to the nearest integer, or to the nearest even
% integer if x is equidistant between two integers.
% - `floor` - Approximate x to the nearest integer less than or equal to x.
% - `ceiling` - Approximate x to the nearest integer greater than or equal to x.
% - `truncate` - Approximate x to the nearest integer betwen 0 and x, inclusive.
% - `half_even` - Approximate x to the nearest even integer, when equidistant
% from the nearest two integers. This is also known as "Bankers
% Rounding".
-type approximation() :: round |
floor |
ceiling |
truncate |
half_even.
% Representation of a scale as two positive integers.
-opaque scale() :: {pos_integer(), pos_integer()}.
-type currency() :: atom().
-type unit() :: atom().
% A rational value.
-type rational() :: rationals:fraction().
%%%%%%%%%%%%
%% SCALES %%
%%%%%%%%%%%%
%% TODO: Add more unit scales.
-define(UNIT_SCALES,
[{{'EUR',euro}, {1,1}},
{{'EUR',cent}, {100,1}},
{{'USD',dollar}, {1,1}},
{{'USD',cent}, {100,1}},
{{'SEK',krona}, {1,1}},
{{'SEK',ore}, {100,1}},
{{'BTC',bitcoin}, {1,1}},
{{'BTC',millibitcoin}, {1000,1}},
{{'BTC',satoshi}, {100000000,1}}
]).
%% TODO: Add more currencies.
-define(CURRENCY_SCALES,
[{'USD', cent},
{'SEK', ore},
{'EUR', cent},
{'BTC', satoshi}
]).
%%%%%%%%%
%% API %%
%%%%%%%%%
% @doc Convert a ISO 4127 currency code into a currency atom.
%
% If the currency is not supported, {error, invalid_currency} will be returned.
-spec currency(binary() | atom()) ->
{ok, currency()} |
{error, invalid_currency}.
currency(Bin) when is_binary(Bin) ->
try
currency(binary_to_existing_atom(Bin))
catch
error:badarg -> {error, invalid_currency}
end;
currency(Symbol) when is_atom(Symbol) ->
case proplists:is_defined(Symbol, ?CURRENCY_SCALES) of
true -> {ok, Symbol};
false -> {error, invalid_currency}
end.
% @doc dense() currency identifier.
-spec dense_currency(dense(currency())) -> binary().
dense_currency({{fraction,_,_},Currency}) ->
atom_to_binary(Currency).
% @doc Build a dense() monetary value from a rational() value.
%
% Notice that dense returns {error, invalid} in case the given rational()'s
% denominator is zero, which although unlikely, it is possible if the
% rational() was unsafely constructed. When dealing with hardcoded or trusted
% rational() values, you can use dense_unsafe/2 instead of dense/2 which
% unsafely constructs a dense().
-spec dense(rational(), Currency) -> {ok, dense(Currency)} | {error, invalid}.
dense(Rational, Currency) ->
case rationals:denominator(Rational) of
I when I < 1, I > -1 -> {error, invalid};
_ -> {ok, dense_unsafe(Rational, Currency)}
end.
% @doc Unsafely build a dense() monetary value from a rational() value.
%
% Contrary to dense, this function *crashes* if the given rational() has zero
% as a denominator, which is something very unlikely to happen unless the given
% rational() was itself unsafely constructed. Other than that, dense/2 and
% dense_unsafe/2 behave the same.
%
% Prefer to use dense/2 when dealing with rational() inputs from untrusted sources.
-spec dense_unsafe(rational(), Currency) -> dense(Currency).
dense_unsafe(Rational, Currency) when is_atom(Currency) ->
case rationals:denominator(Rational) of
I when I >= 1; I =< -1 ->
{Rational, Currency}
end.
% @doc Convert currency discrete() monetary value into a dense() monetary value.
-spec dense_from_discrete(discrete(Currency)) -> dense(Currency).
dense_from_discrete({Amount,{Currency,Unit}}) when is_atom(Unit) ->
Scale = scale({Currency,Unit}),
dense_from_discrete({Amount,{Currency,Scale}});
dense_from_discrete({Amount,{Currency,{A,B}}}) ->
dense_unsafe(
rationals:simplify(
rationals:divide(
rationals:new(Amount, 1),
rationals:new(A, B)
)), Currency).
% @doc Parses a decimal representation of a dense().
%
% @param DecimalConf Config to use for parsing the decimal number. Notice that
% a leading '-' or '+' will always be correctly interpreted,
% notwithstanding what the "leading '+'" policy is on the
% given DecimalConf.
% @param Decimal The raw string containing the decimal representation
% (e.g., "-1,234.56789").
% @param Currency The currency to use.
-spec dense_from_decimal(decimal_conf(), binary(), Currency) ->
{ok, dense(Currency)} | {error, invalid}.
dense_from_decimal(DecimalConf, Decimal, Currency) ->
case float_from_decimal(DecimalConf, Decimal) of
{ok, F} ->
dense(rationals:simplify(rationals:from_float(F)), Currency);
{error,invalid} ->
{error,invalid}
end.
% @doc Render a dense() monetary amount as a decimal number in a potentially lossy manner.
%
% @param DecimalConf Config to use for rendering the decimal number.
% @param Approximation Approximation to use if necessary in order to fit the
% dense() amount in as many decimal numbers as requested.
% @param Dense The monetary amount to render.
-spec dense_to_decimal(decimal_conf(), approximation(), dense(currency())) -> binary().
dense_to_decimal(DecimalConf, Approximation, Dense) ->
{R,_} = Dense,
F = rationals:to_float(R),
Digits = proplists:get_value(digits, DecimalConf),
{I,_} = approximate(Approximation, rationals:from_float(F * math:pow(10,Digits))),
Separators = proplists:get_value(separators, DecimalConf),
LeadingPlus = proplists:get_value(leading_plus, DecimalConf),
{_,Str} = lists:foldr(
fun(X,{N,Acc}) ->
Acc2 =
if
N == Digits ->
case Separators of
{_,DecimalSep} ->
DecimalSep ++ Acc;
{DecimalSep} ->
DecimalSep ++ Acc
end;
N > Digits, (N - Digits) rem 3 == 0 ->
case Separators of
{ThousandSep,_} ->
ThousandSep ++ Acc;
{_} ->
Acc
end;
true -> Acc
end,
{N+1,[X | Acc2]}
end, {0,[]}, integer_to_list(I)),
Bin = list_to_binary(Str),
case {LeadingPlus,F} of
{true,F} when F >= 0 -> unicode:characters_to_binary([<<"+">>, Bin]);
_ -> Bin
end.
% @doc Construct a discrete() value.
-spec discrete(integer(), Currency, unit() | scale()) -> discrete(Currency).
discrete(Amount, Currency, Unit) when is_integer(Amount), is_atom(Currency), is_atom(Unit) ->
{_,_} = scale({Currency,Unit}),
{Amount, {Currency, Unit}};
discrete(Amount, Currency, Scale) when is_integer(Amount), is_atom(Currency) ->
{Amount, {Currency, scale(Scale)}}.
% @doc Construct a discrete() value with the default unit scale for the currency.
%
% Note that some currencies do not have a default unit scale. This function will
% crash if you try to use a currency without a default unit scale.
%
% @see discrete/3
-spec discrete(integer(), Currency) -> discrete(Currency).
discrete(Amount, Currency) ->
{ok, Symbol} = currency(Currency),
Unit = proplists:get_value(Symbol, ?CURRENCY_SCALES),
discrete(Amount, Symbol, Unit).
% @doc discrete() currency identifier.
-spec discrete_currency(discrete(currency())) -> binary().
discrete_currency({_,{Currency,_}}) ->
atom_to_binary(Currency).
% @doc discrete() amount as integer value.
-spec discrete_amount(discrete(currency())) -> integer().
discrete_amount({Amount,_}) ->
Amount.
% @doc Approximate a dense() value x to the nearest value fully representable a
% given scale.
%
% If the given dense() doesn't fit entirely in the scale, then a non-zero
% dense() remainder is returned alongside the discrete() approximation.
%
% @param Approximation Approximation to use if necessary in order to fit
% the dense() amount in the requested scale.
% @param Dense The dense() value.
-spec discrete_from_dense(approximation(), dense(Currency)) ->
{discrete(Currency), dense(Currency)}.
discrete_from_dense(Approximation, {Amount,Currency}) ->
{N,D} = scale(Currency),
Scale = rationals:new(N,D),
ScaledAmount = rationals:multiply(Amount, Scale),
{Int,_Remainder} = approximate(Approximation, ScaledAmount),
Remainder = rationals:subtract(ScaledAmount, rationals:new(Int)),
ScaledRemainder = rationals:simplify(
rationals:divide(Remainder, Scale)),
{discrete(Int, Currency), dense_unsafe(ScaledRemainder, Currency)}.
% @doc Parses a decimal representation of a discrete().
%
% Notice that parsing will fail unless the entire precision of the decimal
% number can be represented in the desired scale.
%
% @param DecimalConf Config to use for parsing the decimal number.
% Notice that a leading '-' or '+' will always be correctly
% interpreted, notwithstanding what the "leading '+'" policy
% is on the given DecimalConf.
% @param Decimal The raw string containing the decimal representation
% (e.g., "-1,234.56789").
-spec discrete_from_decimal(decimal_conf(), binary(), Currency) ->
{ok, discrete(Currency)} | {error, invalid} | {error, precision}.
discrete_from_decimal(DecimalConf, Decimal, Currency) ->
discrete_from_decimal(DecimalConf, Decimal, Currency, scale(Currency)).
% @doc Parses a decimal representation of a discrete().
%
% @see discrete_from_decimal/3
-spec discrete_from_decimal(decimal_conf(), binary(), Currency, unit() | scale()) ->
{ok, discrete(Currency)} | {error, invalid}.
discrete_from_decimal(DecimalConf, Decimal, Currency, Unit) when is_atom(Unit) ->
discrete_from_decimal(DecimalConf, Decimal, Currency, scale(Unit));
discrete_from_decimal(DecimalConf, Decimal, Currency, Scale) ->
case float_from_decimal(DecimalConf, Decimal) of
{ok, F} ->
{N,D} = Scale,
FScaled = F * (N / D),
I = round(FScaled),
if
(I - FScaled) == 0 ->
{ok, discrete(I, Currency, Scale)};
true ->
{error, precision}
end;
{error, invalid} -> {error, invalid}
end.
% @doc Render a discrete() monetary amount as a decimal number in a potentially
% lossy manner.
%
% @param DecimalConf Config to use for rendering the decimal number.
% @param Approximation Approximation to use if necessary in order to fit the
% discrete() amount in as many decimal numbers as
% requested.
% @param Discrete The monetary amount to render.
-spec discrete_to_decimal(decimal_conf(), approximation(), discrete(currency())) -> binary().
discrete_to_decimal(DecimalConf, Approximation, Discrete) ->
dense_to_decimal(DecimalConf, Approximation, dense_from_discrete(Discrete)).
% @doc Construct a scale() from a positive, non-zero rational number.
-spec scale_from_rational(rational()) -> {ok, scale()} | {error, invalid}.
scale_from_rational(Rational) ->
case rationals:denominator(Rational) of
I when I < 1, I > -1 -> {error, invalid};
_ -> {ok, rationals:ratio(Rational)}
end.
% @doc Obtain the rational() representation of a scale.
-spec scale_to_rational(scale()) -> rational().
scale_to_rational({A,B}) ->
rationals:new(A,B).
% @doc Obtain scale() representation of a unit or currrency scale.
-spec scale({currency(),unit()} | currency() | scale() | discrete(currency())) -> scale().
scale({Currency,Unit}) when is_atom(Currency), is_atom(Unit) ->
unit_scale({Currency,Unit});
scale(Currency) when is_atom(Currency) ->
currency_scale(Currency);
scale({N,D}) when is_integer(N), is_integer(D), N > 0, D > 0 ->
{N,D};
scale({_,{_Currency,{N,D}}}) ->
scale({N,D});
scale({_,{Currency,Unit}}) ->
scale({Currency,Unit}).
% @doc The unit_scale/1 function returns a scale as a rational number (expressed
% as {Numerator,Denominator}) indicating how many pieces of unit fit in currency.
-spec unit_scale({currency(),unit()}) -> scale() | undefined.
unit_scale({Currency,Unit}) ->
proplists:get_value({Currency,Unit}, ?UNIT_SCALES).
% @doc If there exists a canonical smallest scale() that can fully represent the
% currency in all its denominations, then currency_scale(Currency) will
% return such scale(). For example, currency_scale('USD') evaluates to
% unit_scale({'USD',cent}).
-spec currency_scale(currency()) -> scale() | undefined.
currency_scale(Currency) ->
unit_scale({Currency, proplists:get_value(Currency, ?CURRENCY_SCALES)}).
% @doc Default DecimalConf.
%
% - No leading '+' sign
% - No thousands separator
% - Decimal separator is '.'
% - 2 decimal digits
% - A scale of 1
%
% That is, something like 1.23 or -1234567.89.
-spec default_decimal_conf() -> decimal_conf().
default_decimal_conf() ->
[{separators, {"."}},
{leading_plus, false},
{digits, 2},
{scale, {1,1}}
].
% @doc Construct separators() to use with in decimal_conf().
%
% The separators can't be an ASCII digit nor control character, and they must
% be different from each other.
%
% @param DecimalSep Decimal separator (i.e., the '.' in 1,234.56789)
% @param ThousandSep Thousands separator for the integer part, if any
% (i.e., the ',' in 1,234.56789).
-spec mk_separators(unicode:charlist(), unicode:charlist()) ->
{ok, separators()} | {error, invalid_separator}.
mk_separators(DecimalSep, ThousandSep) when DecimalSep /= ThousandSep ->
case {is_valid_sep(DecimalSep), is_valid_sep(ThousandSep)} of
{true, true} -> {ok, {DecimalSep, ThousandSep}};
_ -> {error, invalid_separator}
end.
% @doc Like mk_separators/2 but without thousands separator.
-spec mk_separators(unicode:charlist()) -> {ok, separators()} | {error, invalid_separator}.
mk_separators(DecimalSep) ->
case is_valid_sep(DecimalSep) of
true -> {ok, {DecimalSep}};
false -> {error, invalid_separator}
end.
separators_comma() ->
{","}.
separators_comma_dot() ->
{".",","}.
separators_comma_narrownbsp() ->
{"\x{202f}", ","}.
separators_comma_nbsp() ->
{"\xa0", ","}.
separators_comma_thinsp() ->
{"\x{2009}", ","}.
separators_comma_space() ->
{"\20", ","}.
separators_dot() ->
{","}.
separators_dot_comma() ->
{",", "."}.
separators_dot_narrownbsp() ->
{"\x{202f}", "."}.
separators_dot_nbsp() ->
{"\xa0", "."}.
separators_dot_thinsp() ->
{"\x{2009}", "."}.
separators_dot_space() ->
{"\x20", "."}.
%% ARITHMETIC %%
-spec add(discrete(Currency), discrete(Currency)) -> discrete(Currency);
(dense(Currency), dense(Currency)) -> dense(Currency).
add({A,{C1,S1}}, {B,{C2,S2}}) when C1 == C2, S1 == S2 ->
discrete(A + B, C1, S1);
add({A,C1}, {B,C2}) when C1 == C2 ->
dense_unsafe(rationals:add(A,B),C1).
-spec subtract(discrete(Currency), discrete(Currency)) -> discrete(Currency);
(dense(Currency), dense(Currency)) -> dense(Currency).
subtract({A,{C1,S1}}, {B,{C2,S2}}) when C1 == C2, S1 == S2 ->
discrete(A - B, C1, S1);
subtract({A,C1}, {B,C2}) when C1 == C2 ->
dense_unsafe(rationals:subtract(A,B),C1).
-spec multiply(discrete(Currency), integer()) -> discrete(Currency);
(dense(Currency), rational()) -> dense(Currency).
multiply({A,{C,S}}, ScaleFactor) ->
discrete(A * ScaleFactor, C, S);
multiply({A,C}, ScaleFactor) ->
dense_unsafe(rationals:multiply(A,ScaleFactor),C).
%% COMPARISION %%
-spec is_greater_than(discrete(Currency), discrete(Currency)) -> boolean();
(dense(Currency), dense(Currency)) -> boolean().
is_greater_than({A,{C1,S1}}, {B,{C2,S2}}) when C1 == C2, S1 == S2 ->
A > B;
is_greater_than({A,C1}, {B,C2}) when C1 == C2 ->
rationals:is_greater_than(A,B).
-spec is_less_than(discrete(Currency), discrete(Currency)) -> boolean();
(dense(Currency), dense(Currency)) -> boolean().
is_less_than({A,{C1,S1}}, {B,{C2,S2}}) when C1 == C2, S1 == S2 ->
A < B;
is_less_than({A,C1}, {B,C2}) when C1 == C2 ->
rationals:is_less_than(A,B).
-spec is_equal_to(discrete(Currency), discrete(Currency)) -> boolean();
(dense(Currency), dense(Currency)) -> boolean().
is_equal_to({A,{C1,S1}}, {B,{C2,S2}}) when C1 == C2, S1 == S2 ->
A == B;
is_equal_to({A,C1}, {B,C2}) when C1 == C2 ->
rationals:is_equal_to(A,B).
-spec is_greater_or_equal(discrete(Currency), discrete(Currency)) -> boolean();
(dense(Currency), dense(Currency)) -> boolean().
is_greater_or_equal({A,{C1,S1}}, {B,{C2,S2}}) when C1 == C2, S1 == S2 ->
A >= B;
is_greater_or_equal({A,C1}, {B,C2}) when C1 == C2 ->
rationals:is_greater_or_equal(A,B).
-spec is_less_or_equal(discrete(Currency), discrete(Currency)) -> boolean();
(dense(Currency), dense(Currency)) -> boolean().
is_less_or_equal({A,{C1,S1}}, {B,{C2,S2}}) when C1 == C2, S1 == S2 ->
A =< B;
is_less_or_equal({A,C1}, {B,C2}) when C1 == C2 ->
rationals:is_less_or_equal(A,B).
%% UNARY OPERATIONS %%
-spec neg(discrete(Currency)) -> discrete(Currency);
(dense(Currency)) -> dense(Currency).
neg({A,{C,S}}) ->
discrete(0 - A,C,S);
neg({A,C}) ->
dense_unsafe(rationals:subtract(rationals:new(0), A),C).
-spec abs(discrete(Currency)) -> discrete(Currency);
(dense(Currency)) -> dense(Currency).
abs({A,{C,S}}) ->
discrete(erlang:abs(A),C,S);
abs({A,C}) ->
dense_unsafe(rational_abs(A),C).
%% BINARY OPERATIONS %%
-spec max(discrete(Currency), discrete(Currency)) -> discrete(Currency);
(dense(Currency), dense(Currency)) -> dense(Currency).
max({A,{C1,S1}}, {B,{C2,S2}}) when C1 == C2, S1 == S2, A > B -> discrete(A,C1,S1);
max({A,{C1,S1}}, {B,{C2,S2}}) when C1 == C2, S1 == S2, B > A -> discrete(B,C2,S2);
max({A,{C1,S1}}, {B,{C2,S2}}) when C1 == C2, S1 == S2, A == B -> discrete(A,C1,S1);
max({A,C1}, {B,C2}) when C1 == C2 ->
case rationals:is_greater_or_equal(A,B) of
true -> dense_unsafe(A,C1);
false -> dense_unsafe(B,C2)
end.
-spec min(discrete(Currency), discrete(Currency)) -> discrete(Currency);
(dense(Currency), dense(Currency)) -> dense(Currency).
min({A,{C1,S1}}, {B,{C2,S2}}) when C1 == C2, S1 == S2, A < B -> discrete(A,C1,S1);
min({A,{C1,S1}}, {B,{C2,S2}}) when C1 == C2, S1 == S2, B < A -> discrete(B,C2,S2);
min({A,{C1,S1}}, {B,{C2,S2}}) when C1 == C2, S1 == S2, A == B -> discrete(A,C1,S1);
min({A,C1}, {B,C2}) when C1 == C2 ->
case rationals:is_less_or_equal(A,B) of
true -> dense_unsafe(A,C1);
false -> dense_unsafe(B,C2)
end.
%%%%%%%%%%%%%%
%% INTERNAL %%
%%%%%%%%%%%%%%
-spec float_from_decimal(decimal_conf(), binary()) -> {ok, float()} | {error, invalid}.
float_from_decimal(DecimalConf, Decimal) ->
Str = case proplists:get_value(separators, DecimalConf) of
{ThousandSep,DecimalSep} ->
iolist_to_binary(
string:replace(
string:replace(Decimal, [ThousandSep], "", all),
[DecimalSep], ".", trailing
));
{DecimalSep} ->
iolist_to_binary(
string:replace(Decimal, [DecimalSep], ".", trailing))
end,
try
case string:find(Str, ".") of
nomatch -> {ok, float(binary_to_integer(Str))};
_ -> {ok, binary_to_float(Str)}
end
catch
error:badarg ->
{error,invalid}
end.
-spec approximate(approximation(), rational()) -> {integer(), rational()}.
approximate(round, Rational) ->
Round = round(rationals:to_float(Rational)),
{Round, rationals:simplify(
rationals:subtract(Rational, rationals:new(Round)))};
approximate(floor, Rational) ->
Round = floor(rationals:to_float(Rational)),
{Round, rationals:simplify(
rationals:subtract(Rational, rationals:new(Round)))};
approximate(ceiling, Rational) ->
Round = ceil(rationals:to_float(Rational)),
{Round, rationals:simplify(
rationals:subtract(Rational, rationals:new(Round)))};
approximate(truncate, Rational) ->
Round = trunc(rationals:to_float(Rational)),
{Round, rationals:simplify(
rationals:subtract(Rational, rationals:new(Round)))};
approximate(half_even, Rational) ->
_X = signum(-1), %% Force Dialyzer to accept negative numbers
Tr = trunc(rationals:to_float(Rational)),
Rr = rationals:simplify(
rationals:subtract(Rational,rationals:new(Tr))),
case rationals:ratio(rational_abs(Rr)) of
{1,2} ->
case even(Tr) of
true -> {Tr,Rr};
false -> {Tr + signum(Tr), Rr}
end;
_ -> approximate(round, Rational)
end.
-spec even(integer()) -> boolean().
even(X) when X >= 0 ->
(X band 1) == 0;
even(X) when X < 0 ->
even(erlang:abs(X)).
-spec signum(integer()) -> integer().
signum(X) when X < 0 -> -1;
signum(X) when X > 0 -> 1;
signum(0) -> 0.
-spec is_valid_sep(unicode:charlist()) -> boolean().
is_valid_sep(Sep) when is_list(Sep) ->
not lists:member(Sep,
["0","1","2","3","4",
"5","6","7","8","9"]).
rational_abs(F) ->
Float = rationals:to_float(F),
if
Float >= 0 -> F;
true ->
{Numerator,Denominator} = rationals:ratio(F),
rationals:new(erlang:abs(Numerator), erlang:abs(Denominator))
end.
%%%%%%%%%%%
%% TESTS %%
%%%%%%%%%%%
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
currency_test() ->
?assertEqual({ok,'SEK'}, currency(<<"SEK">>)),
?assertEqual({ok,'SEK'}, currency('SEK')),
?assertEqual({error, invalid_currency}, currency(<<"XXX">>)).
dense_currency_test() ->
{ok,Dense} = dense(rationals:new(10), 'SEK'),
?assertEqual(<<"SEK">>, dense_currency(Dense)).
dense_test() ->
?assertEqual({ok,{{fraction,10,1},'SEK'}}, dense(rationals:new(10), 'SEK')),
?assertEqual({error,invalid}, dense(rationals:new(10,0), 'SEK')).
dense_unsafe_test() ->
?assertError({case_clause,0}, dense_unsafe(rationals:new(10,0), 'SEK')),
?assertEqual({{fraction,10,1},'SEK'}, dense_unsafe(rationals:new(10,1), 'SEK')).
dense_from_discrete_test() ->
?assertEqual({{fraction,10,1},'SEK'},
dense_from_discrete(discrete(10,'SEK',{1,1}))),
?assertEqual({{fraction,10,1},'SEK'},
dense_from_discrete(discrete(1000,'SEK',{100,1}))),
?assertEqual({{fraction,1000,1},'XAU'},
dense_from_discrete(discrete(31103477,'XAU',{31103477,1000}))),
?assertEqual({{fraction,10,1},'USD'},
dense_from_discrete(discrete(1000,'USD',cent))),
?assertEqual({{fraction,21,2},'USD'},
dense_from_discrete(discrete(1050,'USD',cent))).
dense_from_decimal_test() ->
?assertEqual({ok, {{fraction,10,1},'SEK'}},
dense_from_decimal(default_decimal_conf(), <<"10">>, 'SEK')),
{ok, {A,'SEK'}} = dense_from_decimal(default_decimal_conf(), <<"-1234.56789">>, 'SEK'),
?assertEqual(rationals:from_float(-1234.56789), A),
{ok, {B,'SEK'}} = dense_from_decimal([{separators, separators_comma_dot()}],
<<"-1.234,56789">>, 'SEK'),
?assertEqual(rationals:from_float(-1234.56789), B),
?assertEqual({error, invalid},
dense_from_decimal(default_decimal_conf(), <<"">>, 'SEK')).
dense_to_decimal_test() ->
?assertEqual(<<"10.00">>,
dense_to_decimal(default_decimal_conf(), round,
{{fraction,10,1},'SEK'})),
?assertEqual(<<"-1234.57">>,
dense_to_decimal(default_decimal_conf(), round,
{{fraction,-123456789,100000},'SEK'})),
?assertEqual(<<"+1,234.5678">>,
dense_to_decimal([{separators, separators_dot_comma()},
{leading_plus, true},
{digits, 4}
],
floor,
{{fraction,123456789,100000},'SEK'})),
?assertEqual(<<"1,234,567.89">>,
dense_to_decimal([{separators, separators_dot_comma()}, {digits, 2}], round,
{{fraction,123456789,100},'SEK'})).
discrete_test() ->
?assertEqual({10,{'SEK',{1,1}}}, discrete(10, 'SEK', {1,1})),
?assertEqual({1000,{'SEK',ore}}, discrete(1000, 'SEK', ore)),
?assertEqual({1000,{'SEK',ore}}, discrete(1000, 'SEK')),
?assertEqual({1000,{'SEK',ore}}, discrete(1000, <<"SEK">>)).
discrete_currency_test() ->
?assertEqual(<<"SEK">>, discrete_currency(discrete(10,'SEK',{1,1}))).
discrete_amount_test() ->
?assertEqual(10, discrete_amount(discrete(10,'SEK',{1,1}))).
discrete_from_dense_test() ->
?assertEqual({{1000,{'SEK',ore}}, {{fraction,0,1},'SEK'}},
discrete_from_dense(round,
dense_unsafe(rationals:new(10), 'SEK'))),
?assertEqual({{333,{'SEK',ore}}, {{fraction,1,300},'SEK'}},
discrete_from_dense(round,
dense_unsafe(rationals:new(10,3), 'SEK'))),
?assertEqual({{375,{'SEK',ore}}, {{fraction,0,1},'SEK'}},
discrete_from_dense(floor,
dense_unsafe(rationals:new(15,4), 'SEK'))),
?assertEqual({{334,{'SEK',ore}}, {{fraction,1,-150},'SEK'}},
discrete_from_dense(ceiling,
dense_unsafe(rationals:new(10,3), 'SEK'))).
discrete_from_dense_lossless_test() ->
Numbers = [rand:uniform(10) + rand:uniform() || _ <- lists:seq(1, 1000)],
lists:map(
fun(N) ->
lists:map(
fun(Approximation) ->
Dense = dense_unsafe(rationals:from_float(N), 'SEK'),
{I,R} = discrete_from_dense(Approximation, Dense),
{A,B} = {Dense, add(dense_from_discrete(I), R)},
?assertEqual({A,Approximation,I},
{B,Approximation,I})
end, [round,floor,ceiling,truncate,half_even])
end, Numbers).
discrete_from_decimal_test() ->
?assertEqual({ok, {1000,{'SEK',{100,1}}}},
discrete_from_decimal(default_decimal_conf(), <<"10">>, 'SEK')),
?assertEqual({ok, {-123456,{'SEK',{100,1}}}},
discrete_from_decimal(default_decimal_conf(), <<"-1234.56">>, 'SEK')),
?assertEqual({ok, {-123456789,{'SEK',{100000,1}}}},
discrete_from_decimal([{separators,separators_comma_dot()}],
<<"-1.234,56789">>, 'SEK', {100000,1})),
?assertEqual({error, invalid},
discrete_from_decimal(default_decimal_conf(), <<"-1,234.56789">>, 'SEK')),
?assertEqual({error, invalid},
dense_from_decimal(default_decimal_conf(), <<"">>, 'SEK')).
discrete_to_decimal_test() ->
?assertEqual(<<"10.00">>,
discrete_to_decimal(default_decimal_conf(), round,
discrete(1000,'SEK',ore))),
?assertEqual(<<"-1234.56">>,
discrete_to_decimal(default_decimal_conf(), round,
discrete(-123456,'SEK',ore))),
?assertEqual(<<"+1,234.567">>,
discrete_to_decimal([{separators, separators_dot_comma()},
{leading_plus, true},
{digits, 3},
{scale, {1000,1}}
],
floor,
discrete(123456789, 'SEK', {100000,1}))).
scale_from_rational_test() ->
?assertEqual({ok,{1,2}}, scale_from_rational(rationals:from_float(0.5))).
scale_to_rational_test() ->
?assertEqual({fraction,1,2}, scale_to_rational({1,2})).
scale_test() ->
?assertEqual({100,1}, scale({'SEK',ore})),
?assertEqual({1,1}, scale({'SEK',krona})),
?assertEqual({100,1}, scale('SEK')),
?assertEqual({100,1}, scale({100,1})),
?assertEqual({100,1}, scale(discrete(1000,'SEK',ore))).
add_test() ->
?assertEqual({20,{'SEK',{1,1}}},
add(discrete(10,'SEK',{1,1}),
discrete(10,'SEK',{1,1}))),
?assertEqual({{fraction,20,1},'SEK'},
add(dense_unsafe({fraction,10,1},'SEK'),
dense_unsafe({fraction,10,1},'SEK'))).
subtract_test() ->
?assertEqual({20,{'SEK',{1,1}}},
subtract(discrete(30,'SEK',{1,1}),
discrete(10,'SEK',{1,1}))),
?assertEqual({{fraction,20,1},'SEK'},
subtract(dense_unsafe({fraction,30,1},'SEK'),
dense_unsafe({fraction,10,1},'SEK'))).
multiply_test() ->
?assertEqual({8,{'SEK',{1,1}}},
multiply(discrete(2,'SEK',{1,1}), 4)),
?assertEqual({{fraction,8,1},'SEK'},
multiply(dense_unsafe({fraction,2,1},'SEK'), {fraction,4,1})).
is_greater_than_test() ->
?assertEqual(true, is_greater_than(dense_unsafe({fraction,10,1},'SEK'),
dense_unsafe({fraction,10,3},'SEK'))),
?assertEqual(false, is_greater_than(dense_unsafe({fraction,10,3},'SEK'),
dense_unsafe({fraction,10,1},'SEK'))),
?assertEqual(true, is_greater_than(discrete(1000,'SEK',ore),
discrete(500,'SEK',ore))),
?assertEqual(false, is_greater_than(discrete(500,'SEK',ore),
discrete(1000,'SEK',ore))).
is_less_than_test() ->
?assertEqual(false, is_less_than(dense_unsafe({fraction,10,1},'SEK'),
dense_unsafe({fraction,10,3},'SEK'))),
?assertEqual(true, is_less_than(dense_unsafe({fraction,10,3},'SEK'),
dense_unsafe({fraction,10,1},'SEK'))),
?assertEqual(false, is_less_than(discrete(1000,'SEK',ore),
discrete(500,'SEK',ore))),
?assertEqual(true, is_less_than(discrete(500,'SEK',ore),
discrete(1000,'SEK',ore))).
is_equal_to_test() ->
?assertEqual(false, is_equal_to(dense_unsafe({fraction,10,1},'SEK'),
dense_unsafe({fraction,10,3},'SEK'))),
?assertEqual(true, is_equal_to(dense_unsafe({fraction,10,3},'SEK'),
dense_unsafe({fraction,10,3},'SEK'))),
?assertEqual(false, is_equal_to(discrete(1000,'SEK',ore),
discrete(500,'SEK',ore))),
?assertEqual(true, is_equal_to(discrete(500,'SEK',ore),
discrete(500,'SEK',ore))).
is_greater_or_equal_test() ->
?assertEqual(true, is_greater_or_equal(dense_unsafe({fraction,10,1},'SEK'),
dense_unsafe({fraction,10,3},'SEK'))),
?assertEqual(true, is_greater_or_equal(dense_unsafe({fraction,10,3},'SEK'),
dense_unsafe({fraction,10,3},'SEK'))),
?assertEqual(false, is_greater_or_equal(dense_unsafe({fraction,10,3},'SEK'),
dense_unsafe({fraction,10,1},'SEK'))),
?assertEqual(true, is_greater_or_equal(discrete(1000,'SEK',ore),
discrete(500,'SEK',ore))),
?assertEqual(true, is_greater_or_equal(discrete(500,'SEK',ore),
discrete(500,'SEK',ore))),
?assertEqual(false, is_greater_or_equal(discrete(500,'SEK',ore),
discrete(1000,'SEK',ore))).
is_less_or_equal_test() ->
?assertEqual(false, is_less_or_equal(dense_unsafe({fraction,10,1},'SEK'),
dense_unsafe({fraction,10,3},'SEK'))),
?assertEqual(true, is_less_or_equal(dense_unsafe({fraction,10,3},'SEK'),
dense_unsafe({fraction,10,1},'SEK'))),
?assertEqual(true, is_less_or_equal(dense_unsafe({fraction,10,3},'SEK'),
dense_unsafe({fraction,10,3},'SEK'))),
?assertEqual(false, is_less_or_equal(discrete(1000,'SEK',ore),
discrete(500,'SEK',ore))),
?assertEqual(true, is_less_or_equal(discrete(500,'SEK',ore),
discrete(1000,'SEK',ore))),
?assertEqual(true, is_less_or_equal(discrete(500,'SEK',ore),
discrete(500,'SEK',ore))).
neg_test() ->
?assertEqual({{fraction,-10,1},'SEK'}, neg(dense_unsafe({fraction,10,1},'SEK'))),
?assertEqual({-1000,{'SEK',ore}}, neg(discrete(1000,'SEK',ore))).
abs_test() ->
?assertEqual({{fraction,10,1},'SEK'}, abs(dense_unsafe({fraction,-10,1},'SEK'))),
?assertEqual({{fraction,10,1},'SEK'}, abs(dense_unsafe({fraction,10,1},'SEK'))),
?assertEqual({1000,{'SEK',ore}}, abs(discrete(-1000,'SEK',ore))),
?assertEqual({1000,{'SEK',ore}}, abs(discrete(1000,'SEK',ore))).
max_test() ->
?assertEqual({{fraction,10,1},'SEK'}, max({{fraction,10,1},'SEK'},
{{fraction,1,1},'SEK'})),
?assertEqual({1000,{'SEK',{1,1}}}, max({1000,{'SEK',{1,1}}},
{100,{'SEK',{1,1}}})).
min_test() ->
?assertEqual({{fraction,1,1},'SEK'}, min({{fraction,10,1},'SEK'},
{{fraction,1,1},'SEK'})),
?assertEqual({100,{'SEK',{1,1}}}, min({1000,{'SEK',{1,1}}},
{100,{'SEK',{1,1}}})).
approximate_test() ->
lists:foreach(fun({Approx,R,ExpectedNum,ExpectedRest}) ->
{Num,Rest} = approximate(Approx, R),
?assertEqual({Approx,R,ExpectedNum,Rest,true},
{Approx,R,Num,Rest,
rationals:is_equal_to(ExpectedRest, Rest)
})
end,
[{round, rationals:new(33,10), 3, rationals:new(3,10)},
{round, rationals:new(35,10), 4, rationals:new(-1,2)},
{round, rationals:new(25,10), 3, rationals:new(-1,2)},
{round, rationals:new(36,10), 4, rationals:new(-2,5)},
{round, rationals:new(-33,10), -3, rationals:new(-3,10)},
{round, rationals:new(-35,10), -4, rationals:new(1,2)},
{round, rationals:new(-25,10), -3, rationals:new(1,2)},
{round, rationals:new(-36,10), -4, rationals:new(2,5)},
{floor, rationals:new(33,10), 3, rationals:new(3,10)},
{floor, rationals:new(35,10), 3, rationals:new(1,2)},
{floor, rationals:new(25,10), 2, rationals:new(1,2)},
{floor, rationals:new(36,10), 3, rationals:new(3,5)},
{floor, rationals:new(-33,10), -4, rationals:new(7,10)},
{floor, rationals:new(-35,10), -4, rationals:new(1,2)},
{floor, rationals:new(-25,10), -3, rationals:new(1,2)},
{floor, rationals:new(-36,10), -4, rationals:new(2,5)},
{ceiling, rationals:new(33,10), 4, rationals:new(-7,10)},
{ceiling, rationals:new(35,10), 4, rationals:new(-1,2)},
{ceiling, rationals:new(25,10), 3, rationals:new(-1,2)},
{ceiling, rationals:new(36,10), 4, rationals:new(-2,5)},
{ceiling, rationals:new(-33,10), -3, rationals:new(-3,10)},
{ceiling, rationals:new(-35,10), -3, rationals:new(-1,2)},
{ceiling, rationals:new(-25,10), -2, rationals:new(-1,2)},
{ceiling, rationals:new(-36,10), -3, rationals:new(-3,5)},
{truncate, rationals:new(33,10), 3, rationals:new(3,10)},
{truncate, rationals:new(35,10), 3, rationals:new(1,2)},
{truncate, rationals:new(25,10), 2, rationals:new(1,2)},
{truncate, rationals:new(36,10), 3, rationals:new(3,5)},
{truncate, rationals:new(-33,10), -3, rationals:new(-3,10)},
{truncate, rationals:new(-35,10), -3, rationals:new(-1,2)},
{truncate, rationals:new(-25,10), -2, rationals:new(-1,2)},
{truncate, rationals:new(-36,10), -3, rationals:new(-3,5)},
{half_even, rationals:new(33,10), 3, rationals:new(3,10)},
{half_even, rationals:new(35,10), 4, rationals:new(1,2)},
{half_even, rationals:new(25,10), 2, rationals:new(1,2)},
{half_even, rationals:new(36,10), 4, rationals:new(-2,5)},
{half_even, rationals:new(-33,10), -3, rationals:new(-3,10)},
{half_even, rationals:new(-35,10), -4, rationals:new(-1,2)},
{half_even, rationals:new(-25,10), -2, rationals:new(-1,2)},
{half_even, rationals:new(-36,10), -4, rationals:new(2,5)}
]).
-endif. | src/money.erl | 0.578924 | 0.446072 | money.erl | starcoder |
%% Copyright 2014 Erlio GmbH Basel Switzerland (http://erl.io)
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(vmq_updo).
-include_lib("sasl/src/systools.hrl").
-export([run/0,
run/1,
dry_run/0,
dry_run/1,
dry_run_low/0,
dry_run_low/1]).
run() ->
translate_and_run(high_level_script()).
dry_run() ->
high_level_script().
dry_run_low() ->
{_, LLinstrs} = translate_and_check(high_level_script()),
LLinstrs.
%%% The following functions are the same as above, except they take a file
%%% containing additional high or low level instructions that are appended to
%%% the automatically generated script.
run(Filename) ->
translate_and_run(high_level_script(Filename)).
dry_run(Filename) ->
high_level_script(Filename).
dry_run_low(Filename) ->
{_, LLinstrs} = translate_and_check(high_level_script(Filename)),
LLinstrs.
%%% ========================================================================
%%% Internal
%%% ========================================================================
high_level_script() ->
script(updated_modules()).
high_level_script(Filename) ->
high_level_script() ++ read_script(Filename).
updated_modules() ->
[ MF || {Mod, Filename} = MF <- code:all_loaded(),
is_list(Filename),
not code:is_sticky(Mod),
is_updated(Mod, Filename) ].
is_updated(Module, Filename) ->
LoadedVer = proplists:get_value(vsn, Module:module_info(attributes)),
case beam_lib:version(Filename) of
{ok, {_, FileVer}} -> FileVer /= LoadedVer;
{error, _, _} -> false
end.
read_script(Filename) ->
case file:consult(Filename) of
{ok, HLinstrs} ->
HLinstrs;
{error, Error} when is_tuple(Error) ->
error_logger:error_msg("Failed to parse line ~s~n", [file:format_error(Error)]),
exit(parse_error);
{error, Error} ->
error_logger:error_msg("Failed to open file ~s~n", [file:format_error(Error)]),
exit(Error)
end.
%%%
%%% Generate high level upgrade scripts from the instruction set of
%%% appup files.
%%%
%%% See http://www.erlang.org/doc/man/appup.html
%%% http://www.erlang.org/doc/design_principles/appup_cookbook.html
%%%
%% Return a list of upgrade instructions for the given list of modules.
%%
script(ModFiles) ->
lists:map(fun instruction/1, dependencies(ModFiles)).
%% Return the upgrade instruction for a module.
%%
instruction({Mod, Deps}) ->
case is_supervisor(Mod) of
true ->
{update, Mod, supervisor};
false ->
case is_special(Mod) of
true ->
{update, Mod, {advanced, []}, Deps};
false ->
{load_module, Mod, Deps}
end
end.
%% Establish the dependencies between a list of modules.
%%
%% A tuple is returned for each module with its name and a (possibly
%% empty) list of the other modules it makes calls to.
%%
dependencies(ModFiles) ->
{Mods, Filenames} = lists:unzip(ModFiles),
{ok, Xref} = xref:start([{xref_mode, modules}]),
{ok, Calls} =
try add_files_to_xref(Xref, Filenames),
xref:q(Xref, "strict ME || AM")
after
xref:stop(Xref)
end,
[ {Caller, proplists:get_all_values(Caller, Calls)} || Caller <- Mods ].
add_files_to_xref(Xref, [Filename|T]) ->
{ok, _} = xref:add_module(Xref, Filename, [{warnings, false}]),
add_files_to_xref(Xref, T);
add_files_to_xref(_, []) ->
ok.
%% Is this the module for a "special process" (as it is known in OTP),
%% meaning a process running under a supervisor.
%%
is_special(Mod) ->
Exports = Mod:module_info(exports),
lists:member({code_change, 3}, Exports)
orelse lists:member({system_code_change, 4}, Exports).
is_supervisor(Mod) ->
Attrs = Mod:module_info(attributes),
lists:member(supervisor, proplists:get_value(behaviour, Attrs, [])
++ proplists:get_value(behavior, Attrs, [])).
translate_and_run(HLinstrs) ->
{Apps, LLinstrs} = translate(HLinstrs, loaded_apps()),
LibDirs = app_lib_dirs(Apps),
check_script(LLinstrs, LibDirs),
% The interface to release_handler_1 changed in R15.
_ = code:ensure_loaded(release_handler_1),
release_handler_1:eval_script(LLinstrs, [], LibDirs, LibDirs, []).
translate_and_check(HLinstrs) ->
{Apps, LLinstrs} = translate(HLinstrs, loaded_apps()),
LibDirs = app_lib_dirs(Apps),
check_script(LLinstrs, LibDirs),
{ok, LLinstrs}.
translate(HLinstrs, AppsNow) ->
AppsAfter = apps_after(HLinstrs, AppsNow),
AppsNowRecs = app_records(AppsNow),
AppAfterRecs = app_records(AppsAfter),
case systools_rc:translate_scripts([HLinstrs], AppAfterRecs, AppsNowRecs) of
{ok, LLinstrs} ->
{AppsAfter, LLinstrs};
{error, systools_rc, Error} ->
error_logger:error_msg(systools_rc:format_error(Error)),
exit(parse_error)
end.
check_script(LLinstrs, LibDirs) ->
case release_handler_1:check_script(LLinstrs, LibDirs) of
{ok, _} ->
ok;
{error, Error} ->
exit(Error)
end.
apps_after(HLinstrs, Before) ->
Added = [ Name || {add_application, Name} <- HLinstrs ],
Removed = [ Name || {remove_application, Name} <- HLinstrs ],
lists:usort(Before ++ Added) -- Removed.
loaded_apps() ->
[ App || {App, _, _} <- application:loaded_applications() ].
app_records(Apps) ->
[ #application{name = A, modules = find_app_modules(A)} || A <- Apps ].
find_app_modules(App) ->
Ext = code:objfile_extension(),
case code:lib_dir(App, ebin) of
Path when is_list(Path) ->
Files = filelib:wildcard("*" ++ Ext, Path),
[ list_to_atom(filename:basename(F, Ext)) || F <- Files ];
{error, _} ->
error_logger:error_msg("Can't find lib dir for application '~s'~n", [App]),
exit({unknown_application, App})
end.
app_lib_dirs(Apps) ->
[ {App, "", code:lib_dir(App)} || App <- Apps ]. | src/vmq_updo.erl | 0.523664 | 0.417331 | vmq_updo.erl | starcoder |
%%%-------------------------------------------------------------------
%%% @author <NAME> <<EMAIL>>
%%% @author <NAME> <<EMAIL>>
%%% @copyright (C) 2011 InakaLabs SRL
%%% @doc Benchmarker. Given a module to test with, it helps users determine
%%% the order of functions
%%% @end
%%%-------------------------------------------------------------------
-module(edis_bench).
-author('<NAME> <<EMAIL>>').
-author('<NAME> <<EMAIL>>').
-include("edis.hrl").
-include("edis_bench.hrl").
-type symbols() :: #symbols{}.
-export_type([symbols/0]).
-type option() :: {start, pos_integer} | {step, pos_integer()} | {rounds, pos_integer()} |
{extra_args, [term()]} | {outliers, pos_integer()} | {columns, pos_integer()} |
{first_col, pos_integer()} | {rows, pos_integer()} | debug | {k, number()} | {x, number()} |
{symbols, symbols()}.
-export_type([option/0]).
-export([bench/4, bench/3, bench/2, behaviour_info/1]).
-export([zero/1, constant/1, linear/1, quadratic/1, logarithmic/1, xlogarithmic/1, exponential/1]).
%% ====================================================================
%% External functions
%% ====================================================================
%% @hidden
-spec behaviour_info(callbacks|term()) -> [{atom(), non_neg_integer()}].
behaviour_info(callbacks) ->
[{all, 0},
{init, 1}, {init_per_testcase, 2}, {init_per_round, 3},
{quit, 1}, {quit_per_testcase, 2}, {quit_per_round, 3}].
%% @doc Runs all the benchmarking functions on Module against {@link zero/0} function.
%% The list is obtained calling Module:all().
-spec bench(atom(), [option()]) -> ok.
bench(Module, Options) ->
ok = try Module:init(proplists:get_value(extra_args, Options, [])) catch _:undef -> ok end,
try
lists:foreach(
fun(Function) ->
io:format("~n~p:~p ...~n", [Module, Function]),
bench(Module, Function, zero, Options)
end, Module:all())
after
try Module:quit(proplists:get_value(extra_args, Options, [])) catch _:undef -> ok end
end.
%% @doc Compares the different runs of Module:Function to a given function.
%% Returns the standard deviation of the distances between them (outliers excluded).
%% The higher the value the more different functions are.
-spec bench(atom(), atom(), atom() | fun((pos_integer()) -> number()), [option()]) -> ok.
bench(Module, Function, MathFunction, Options) when is_atom(MathFunction) ->
bench(Module, Function, fun(X) -> ?MODULE:MathFunction(X) end, Options);
bench(Module, Function, MathFunction, Options) ->
RawResults = run(Module, Function, Options),
graph(
[{K, V, proplists:get_value(x, Options, 0) +
(proplists:get_value(k, Options, 1) * MathFunction(K))} || {K,V} <- RawResults],
Options).
%% @doc Compares the different runs of Module1:Function1 with Module2:Function2
%% Returns the standard deviation of the distances between them (outliers excluded).
%% The higher the value the more different functions are.
-spec bench({atom(), atom(), [term()]}, {atom(), atom(), [term()]}, [option()]) -> float().
bench({Module1, Function1, ExtraArgs1}, {Module2, Function2, ExtraArgs2}, Options) ->
RawResults1 = run(Module1, Function1, [{extra_args, ExtraArgs1}|Options]),
RawResults2 = run(Module2, Function2, [{extra_args, ExtraArgs2}|Options]),
RawResults = lists:zipwith(fun({K,V1}, {K,V2}) -> {K, V1, V2} end, RawResults1, RawResults2),
graph(RawResults, Options),
Diffs = [(V1-V2)/V2 || {_K,V1,V2} <- remove_outliers(RawResults, Options), V1 /= 0, V2 /= 0],
case proplists:get_bool(debug, Options) of
true -> lager:info("Diffs: ~p~n", [Diffs]);
false -> ok
end,
lists:sum(Diffs) / erlang:length(Diffs).
%% ====================================================================
%% Math functions
%% ====================================================================
%% @doc O(1) comparer
-spec zero(pos_integer()) -> pos_integer().
zero(_) -> 0.
%% @doc O(1) comparer
-spec constant(pos_integer()) -> pos_integer().
constant(_) -> 1.
%% @doc O(n) comparer
-spec linear(pos_integer()) -> pos_integer().
linear(N) -> N.
%% @doc O(n^2) comparer
-spec quadratic(pos_integer()) -> pos_integer().
quadratic(N) -> N * N.
%% @doc O(log(n)) comparer
-spec logarithmic(pos_integer()) -> float().
logarithmic(N) -> math:log(N) + 1.
%% @doc O(n*log(n)) comparer
-spec xlogarithmic(pos_integer()) -> float().
xlogarithmic(N) -> N * math:log(N) + 1.
%% @doc O(e^n) comparer
-spec exponential(pos_integer()) -> float().
exponential(N) -> math:pow(2.718281828459045, N).
%% ====================================================================
%% Internal functions
%% ====================================================================
%% @doc Runs the benchmarking function Module:Function using options.
-spec run(atom(), atom(), [option()]) -> [{pos_integer(), error | pos_integer()}].
run(Module, Function, Options) ->
ok = try Module:init(proplists:get_value(extra_args, Options, [])) catch _:undef -> ok end,
try do_run(Module, Function, Options)
after
try Module:quit(proplists:get_value(extra_args, Options, [])) catch _:undef -> ok end
end.
do_run(Module, Function, Options) ->
ok = try Module:init_per_testcase(Function, proplists:get_value(extra_args, Options, [])) catch _:undef -> ok end,
Start = proplists:get_value(start, Options, 1),
try lists:map(fun(N) -> do_run(Module, Function, N, Options) end,
lists:seq(Start,
Start + proplists:get_value(rounds, Options, 250) *
proplists:get_value(step, Options, 1),
proplists:get_value(step, Options, 1)))
after
try Module:quit_per_testcase(Function, proplists:get_value(extra_args, Options, [])) catch _:undef -> ok end
end.
do_run(Module, Function, N, Options) ->
Items = lists:reverse(lists:map(fun edis_util:integer_to_binary/1, lists:seq(1, N))),
ok = try Module:init_per_round(Function, Items, proplists:get_value(extra_args, Options, [])) catch _:undef -> ok end,
try timer:tc(Module, Function, [Items | proplists:get_value(extra_args, Options, [])]) of
{Time, Result} ->
case proplists:get_bool(debug, Options) of
true -> lager:info("~p: ~p~n\t~p~n", [N, Time/1000, Result]);
false -> ok
end,
{N, (Time+1)/1000}
catch
_:Error ->
lager:error("Error on ~p:~p (N: ~p):~n\t~p~n", [Module, Function, N, Error]),
{N, error}
after
try Module:quit_per_round(Function, Items, proplists:get_value(extra_args, Options, [])) catch _:undef -> ok end
end.
graph(Results, Options) ->
RawData = lists:sublist(Results,
proplists:get_value(first_col, Options, 1),
erlang:min(proplists:get_value(columns, Options, 250),
proplists:get_value(rounds, Options, 250))),
case proplists:get_bool(debug, Options) of
true -> lager:info("RawData:~n\t~p~n", [RawData]);
false -> ok
end,
Data = remove_outliers(RawData, Options),
Top = lists:max([erlang:max(V, M) || {_, V, M} <- Data]),
Bottom = erlang:trunc(lists:min([erlang:min(V, M) || {_, V, M} <- Data, V > 0, M > 0]) / 2),
Step =
case {Top, Bottom} of
{error, _} -> throw(everything_is_an_error);
{_, error} -> throw(everything_is_an_error);
_ ->
(Top - Bottom) / proplists:get_value(rows, Options, 70)
end,
graph(Top, Bottom, Step, proplists:get_value(symbols, Options, #symbols{}), Data).
remove_outliers(RawData, Options) ->
SortedBy2 = lists:keysort(2, [{K, V, M} || {K, V, M} <- RawData, V =/= error, M =/= error]),
SortedBy3 = lists:keysort(3, [{K, V, M} || {K, V, M} <- RawData, V =/= error, M =/= error]),
Outliers =
[{K, error, M} || {K, error, M} <- RawData] ++ [{K, V, error} || {K, V, error} <- RawData] ++
lists:sublist(lists:reverse(SortedBy2), 1, erlang:trunc(proplists:get_value(outliers, Options, 20) / 2) + 1) ++
lists:sublist(lists:reverse(SortedBy3), 1, erlang:trunc(proplists:get_value(outliers, Options, 20) / 2) + 1),
[case lists:member({K,V,M}, Outliers) of
true -> {K, 0, M};
false -> {K, V, M}
end || {K,V,M} <- RawData].
graph(Top, Bottom, _Step, _Symbols, Data) when Top =< Bottom ->
io:format(" ~s~n", [lists:duplicate(length(Data), $-)]),
io:format(" ~s~n", [lists:map(fun({K, _, _}) -> integer_to_list(K rem 10) end, Data)]);
graph(Top, Bottom, Step, Symbols, Data) ->
io:format("~7.2.0f~s~n",
[Top * 1.0,
lists:map(
fun({_, V, M}) when Top >= V, V > Top - Step,
Top >= M, M > Top - Step ->
case {Top - V, Top - M} of
{Pos, Mos} when Pos < Step/2, Mos < Step/2 -> Symbols#symbols.up_up;
{Pos, Mos} when Pos < Step/2, Mos >= Step/2 -> Symbols#symbols.up_down;
{Pos, Mos} when Pos >= Step/2, Mos < Step/2 -> Symbols#symbols.down_up;
{Pos, Mos} when Pos >= Step/2, Mos >= Step/2 -> Symbols#symbols.down_down
end;
({_, V, _M}) when Top >= V, V > Top - Step ->
case Top - V of
Pos when Pos < Step/2 -> Symbols#symbols.up_none;
Pos when Pos >= Step/2 -> Symbols#symbols.down_none
end;
({_, _V, M}) when Top >= M, M > Top - Step ->
case Top - M of
Pos when Pos < Step/2 -> Symbols#symbols.none_up;
Pos when Pos >= Step/2 -> Symbols#symbols.none_down
end;
(_) -> Symbols#symbols.none_none
end, Data)]),
graph(Top-Step, Bottom, Step, Symbols, Data). | test/edis_bench.erl | 0.585338 | 0.460228 | edis_bench.erl | starcoder |
%%% Copyright (c) 2007, 2008, 2009 JackNyfe, Inc. <<EMAIL>>.
%%% See the accompanying LICENSE file.
%% vim: ts=4 sts=4 sw=4 expandtab
-module(jn_mavg).
%%
%% This module implements exponential moving average logic,
%% a useful data structure to store hits/second averaged over some time period.
%%
%% For a general description see:
%% http://en.wikipedia.org/wiki/Moving_average#Exponential_moving_average
%%
-export([
bump_mavg/2,
bump_nodelay/2,
getEventsPer/2,
getEventsPer_nobump/2,
getEventsPer_noround/2,
getProperties/1,
getImmediateRate/1,
get_current/1,
history/1,
new_mavg/1,
new_mavg/2
]).
% Time/Event moving average representation
-record(ecnt, {
counter = 0, % Number of events counter
period_start = 0, % Timestamp of period start
history = [], % Counters: list of tuples {PeriodStart,Count}
archived_events = 0, % Total number of seen and archived events.
history_length = 3 % Max length of history list
}).
-record(mavg, {
period = 300, % Smoothing window
createts, % Time of creation of this structure
lastupdatets, % Last update time stamp
unprocessedEvents = 0, % Number of events not counted in historicAvg
historicAvg = 0.0, % Number of events in this period (float)
eventCounter = #ecnt{} % Collect absolute number of events
}).
%% Construct a moving average tracker with a specified period.
%% This is a shortcut which specifies default options.
%% @spec new_mavg(SmoothingWindow) -> record(mavg)
%% Type SmoothingWindow = 30 | 300 | 86400 | int()
new_mavg(SmoothingWindow) -> new_mavg(SmoothingWindow, []).
%% New way of constructing moving average trackers.
%% @spec new_mavg(SmoothingWindow, [Option]) -> record(mavg)
%% Type Option =
%% {start_time, int()}
%% | {start_events, int()}
%% | {history_length, int()}
new_mavg(SmoothingWindow, Options) when
is_integer(SmoothingWindow), SmoothingWindow > 10, is_list(Options) ->
Time = proplists:get_value(start_time, Options, unixtime()),
Events = proplists:get_value(start_events, Options, 0),
HLength = proplists:get_value(history_length, Options,
(#ecnt{})#ecnt.history_length),
#mavg{period = SmoothingWindow, lastupdatets = Time, createts = Time,
eventCounter = updateEventCounter(Events,
#ecnt{history_length=HLength},
Time, SmoothingWindow),
unprocessedEvents = Events };
new_mavg(SmoothingWindow, [immediate]) when SmoothingWindow>0 ->
Time = unixtime_float(),
#mavg{period = SmoothingWindow, lastupdatets = Time, createts = Time,
eventCounter = updateEventCounter(0,
#ecnt{history_length=5}, Time, SmoothingWindow),
unprocessedEvents = 0 };
%% Old way of constructing moving average trackers.
%% Create a new mavg record with a specified smoothing period.
%% @spec new_mavg(int(), int()) -> record(mavg)
new_mavg(SmoothingWindow, Events) when
is_integer(SmoothingWindow), SmoothingWindow > 10,
is_integer(Events), Events >= 0 ->
new_mavg(SmoothingWindow, [{start_events, Events},
{start_time, unixtime()}]).
% Add some number of events into the time counter.
%% @spec bump_mavg(record(mavg), int()) -> record(mavg)
%% @spec bump_mavg(record(mavg), int(), Unixtime) -> record(mavg)
% Convert old version into new version
bump_mavg(MA, Events) when tuple_size(MA) == 6 ->
bump_mavg(upgrade_record(MA), Events);
bump_mavg(MA, Events) -> bump_mavg(MA, Events, unixtime()).
bump_mavg(MA, Events, T) when
is_record(MA, mavg),
is_integer(Events), Events >= 0,
is_integer(T) ->
#mavg{ period = Period, lastupdatets = Updated,
unprocessedEvents = HoldEvs, historicAvg = Average,
eventCounter = Counter } = MA,
UpdatedCounter = updateEventCounter(Events, Counter, T, Period),
Elapsed = T - Updated,
if
% We lose precision if we incorporate each update
% into the pool right away, therefore we collect events
% and update them not earlier than once a second or so.
Elapsed =:= 0 -> MA#mavg{unprocessedEvents = HoldEvs + Events,
eventCounter = UpdatedCounter };
Elapsed < (8 * Period), Elapsed > 0 ->
%% Integrate HoldEvs, since they're for a single period
HoldAvg = (Average - HoldEvs) * math:exp(-1/Period) + HoldEvs,
%% Integrate zero-filled periods, of which there are (Elapsed-1)
ZeroAvg = HoldAvg * math:exp((1-Elapsed)/Period),
MA#mavg{unprocessedEvents = Events, historicAvg = ZeroAvg,
lastupdatets = T, eventCounter = UpdatedCounter };
true ->
MA#mavg{unprocessedEvents = Events, historicAvg = 0.0,
lastupdatets = T, eventCounter = UpdatedCounter }
end.
bump_nodelay(MA, Events) ->
bump_nodelay(MA, Events, unixtime_float()).
bump_nodelay(MA, _Events, T) when is_float(T) ->
#mavg{ period = Period, lastupdatets = Updated,
unprocessedEvents = 0, historicAvg = Average } = MA,
Elapsed = T - Updated,
Alpha = 1 - math:exp(-1/Period),
NewAvg = Average + Alpha * (Elapsed - Average),
MA#mavg{historicAvg = NewAvg, lastupdatets = T}.
getImmediateRate(MA) ->
#mavg{ period = Period, historicAvg = Avg, lastupdatets = Updated } = MA,
Elapsed = unixtime_float() - Updated,
case Avg of
0 -> 0.0;
0.0 -> 0.0;
_ -> math:exp((-Elapsed)/(Period * Avg)) / Avg
end.
ecnt_upgrade(#ecnt{}=Ecnt, _P) -> Ecnt;
ecnt_upgrade(Ecnt, 86400) when is_record(Ecnt, ecnt, 5) ->
erlang:append_element(Ecnt, 10);
ecnt_upgrade(Ecnt, _P) when is_record(Ecnt, ecnt, 5) ->
erlang:append_element(Ecnt, 3).
updateEventCounter(Events, EventsCounter, NowTS, Period) ->
EC = ecnt_upgrade(EventsCounter, Period),
#ecnt{ counter = C, period_start = PeriodStart,
history_length = MaxHistLength } = EC,
% Make it look like local timestamp, useful for day-breaking.
PST_TS = NowTS - 3600 * 8, % Pacific Standard Time, hard-coded
% Figure out whether EC corresponds to a current period or not.
CurrentPeriod = erlang:round(PST_TS) div Period,
if
CurrentPeriod == PeriodStart -> EC#ecnt{counter = Events + C};
PeriodStart == 0 -> EC#ecnt{counter = Events + C,
period_start = CurrentPeriod };
true ->
EC#ecnt{counter = Events,
period_start = CurrentPeriod,
archived_events = EC#ecnt.archived_events + C,
history = padHistoryUntil(CurrentPeriod - 1,
updateEventHistory(EC#ecnt.history,
PeriodStart, C, MaxHistLength),
MaxHistLength),
history_length = if MaxHistLength == true -> 10;
true -> MaxHistLength end
}
end.
padHistoryUntil(L, History, true) -> padHistoryUntil(L, History, 10);
padHistoryUntil(LastPeriod, [{Period,_}|_] = History, MaxHistLen) ->
Skipped = LastPeriod - Period,
if
Skipped =< 0 ->
History;
true ->
SkippedEntries = [ {PTS, 0} || PTS <- lists:seq(LastPeriod,
lists:max([Period + 1, LastPeriod - MaxHistLen]), -1)],
lists:sublist(SkippedEntries ++ History, MaxHistLen)
end;
padHistoryUntil(_LastPeriod, [], _MaxHistLen) -> [].
updateEventHistory(PrevHistory, PeriodStart, Events, true) ->
updateEventHistory(PrevHistory, PeriodStart, Events, 10);
updateEventHistory([{OldPeriodStart,_}|_] = PrevHistory, PeriodStart, Events,
MaxHistLen) when PeriodStart - OldPeriodStart == 1 ->
lists:sublist([{PeriodStart,Events} | PrevHistory], MaxHistLen);
updateEventHistory([{OldPeriodStart,_}|_] = PrevHistory, PeriodStart, Events,
MaxHistLen) ->
Skipped = PeriodStart - OldPeriodStart - 1,
EntriesForSkippedPeriod = if
Skipped =< 0 -> [];
true -> [ {PTS, 0} || PTS <- lists:seq(
PeriodStart - 1,
lists:max([OldPeriodStart+ 1,
PeriodStart - MaxHistLen]),
-1) ]
end,
lists:sublist(
[{PeriodStart,Events} | EntriesForSkippedPeriod] ++ PrevHistory,
MaxHistLen);
updateEventHistory([], _, 0, _) -> [];
updateEventHistory([], PeriodStart, Events, _) -> [{PeriodStart, Events}].
% Get number of events per given number of time (extrapolated).
%% @spec getEventsPer(record(mavg), int()) -> int()
getEventsPer(MA, SomePeriod) ->
round(getEventsPer_noround(MA, SomePeriod)).
% Convert old version into new version
getEventsPer_noround(MA, SomePeriod) when tuple_size(MA) == 6 ->
getEventsPer_noround(upgrade_record(MA), SomePeriod);
getEventsPer_noround(MA, SomePeriod) when
is_record(MA, mavg),
is_integer(SomePeriod), SomePeriod > 0 ->
MA_Updated = bump_mavg(MA, 0), % Make sure we're current
#mavg{ historicAvg = Average } = MA_Updated,
EventsPerPeriod = Average,
EventsPerPeriod * SomePeriod.
getEventsPer_nobump(#mavg{historicAvg = Average} = MA, SomePeriod) when
is_record(MA, mavg),
is_integer(SomePeriod), SomePeriod > 0 ->
round(Average * SomePeriod).
getProperties(MA) when tuple_size(MA) == 6 ->
getProperties(upgrade_record(MA));
getProperties(MA) ->
#mavg{period = P, createts = C, lastupdatets = L} = MA,
{P,C,L}.
history(MA) when tuple_size(MA) == 6 -> {0,[],0};
history(MA) ->
MA_Updated = bump_mavg(MA, 0), % Make sure we're current
#ecnt{counter = C, history = H, archived_events = A} =
ecnt_upgrade(MA_Updated#mavg.eventCounter, MA_Updated#mavg.period),
{C, [B || {_A, B} <- H], A}.
get_current(MA) when tuple_size(MA) == 6 -> get_current(upgrade_record(MA));
get_current(MA) when is_record(MA, mavg) ->
MA_Updated = bump_mavg(MA, 0), % Make sure we're current
MA_Updated#mavg.historicAvg.
upgrade_record(MA) when tuple_size(MA) == 6 ->
erlang:append_element(MA, #ecnt{}).
% Time stamp of current time.
%% @spec unixtime() -> integer()
unixtime() -> unixtime(now()).
%% @spec unixtime(now()) -> integer()
unixtime({Mega, Secs, _Msecs}) -> Mega * 1000000 + Secs.
%% @spec unixtime_float() -> float()
unixtime_float() -> unixtime_float(now()).
%% @spec unixtime_float(now()) -> float()
unixtime_float({M,S,U}) -> M*1000000 + S + U/1000000.
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
jn_mavg_test() ->
io:format("~p Testing START ~n", [?MODULE]),
[] = updateEventHistory([], 1200, 0, 42),
[{1200, 13}] = updateEventHistory([], 1200, 13, 42),
[{1201, 13},{1200, 5}] = updateEventHistory([{1200, 5}], 1201, 13, 42),
[{1201, 13},{1200, 5},foo] = updateEventHistory([{1200, 5},foo],
1201, 13, 42),
[{1200, 1}] = padHistoryUntil(1200, [{1200, 1}], 42),
[{1201, 0},{1200, 1}] = padHistoryUntil(1201, [{1200, 1}], 42),
[{1202, 0},{1201,0},{1200, 1}] = padHistoryUntil(1202, [{1200, 1}], 42),
[{1202, 0},{1201,0},{1200, 1}] = padHistoryUntil(1202, [{1200, 1}], 3),
[{1202, 0},{1201,0}] = padHistoryUntil(1202, [{1200, 1}], 2),
[{1202, 0}] = padHistoryUntil(1202, [{1200, 1}], 1),
MA1 = new_mavg(300),
MA2 = bump_mavg(MA1, 60),
io:format("tc1: ~p~n", [MA1]),
io:format("tc2: ~p, wait...~n", [MA2]),
timer:sleep(1200),
MA3 = MA2#mavg{historicAvg = 60},
MA4 = bump_mavg(MA3, 20),
timer:sleep(1200),
Ep3 = getEventsPer(MA3, 60),
Ep4 = getEventsPer(MA4, 60),
io:format("tc3: ~p, epm ~p~n", [MA3, Ep3]),
io:format("tc4: ~p, epm ~p~n", [MA4, Ep4]),
if
Ep3 < 3575; Ep3 > 3590 -> throw("Assertion failed Ep3");
Ep4 < 3578; Ep4 > 3595 -> throw("Assertion failed Ep4");
true -> true
end,
T = (unixtime() div 300) * 300 + 20,
MA11 = new_mavg(300, [{start_time, T}]),
MA5 = bump_mavg(MA11, 1, T),
io:format("tc5: ~p~n", [MA5]),
#mavg{eventCounter = #ecnt{counter = 1, archived_events = 0}} = MA5,
MA6 = bump_mavg(MA5, 1, T + 10),
io:format("tc6: ~p~n", [MA6]),
#mavg{eventCounter = #ecnt{counter = 2, archived_events = 0}} = MA6,
MA7 = bump_mavg(MA6, 1, T + 280),
io:format("tc7: ~p~n", [MA7]),
#mavg{eventCounter = #ecnt{counter = 1, archived_events = 2}} = MA7,
MA8 = bump_mavg(MA7, 1, T + 600),
io:format("tc8: ~p~n", [MA8]),
#mavg{eventCounter = #ecnt{counter = 1, archived_events = 3}} = MA8,
% History testing
HMa1 = new_mavg(60, [{start_time, unixtime() - 1000},
{start_events, 1}, {history_length, 0}]),
{_, H1, _} = history(HMa1),
0 = length(H1),
HMa2 = new_mavg(60, [{start_time, unixtime() - 1000},
{start_events, 1}, {history_length, 2}]),
{_, H2, _} = history(HMa2),
2 = length(H2),
HMa10 = new_mavg(60, [{start_time, unixtime() - 1000},
{start_events, 1}, {history_length, 10}]),
{_, H10, _} = history(HMa10),
10 = length(H10),
HMa20 = new_mavg(60, [{start_time, unixtime() - 1200},
{start_events, 1}, {history_length, 20}]),
{_, H20, _} = history(HMa20),
[1|_] = lists:reverse(H20),
20 = length(H20).
jn_mavg_rate_test() ->
% Send a few updates within a second, measure the real rate,
% then compare it with inferred rate. They shouldn't bee to far apart.
% New_mavg's SmoothingWindow should be initialized with the expected
% normal rate.
PushEvents = 50,
HMI = new_mavg(25, [immediate]),
HMI2 = lists:foldl(fun(N, HMI0) ->
Sleep = 90 * (N rem 2) + 10,
timer:sleep(Sleep),
bump_nodelay(HMI0, 1)
end, HMI, lists:seq(1, PushEvents)),
RealRate = PushEvents/(unixtime_float() - HMI2#mavg.createts),
InferredRate = 1/HMI2#mavg.historicAvg,
ImmediateRate = getImmediateRate(HMI2),
io:format("Sent ~p eps, estimated ~p, immediate ~p~n",
[RealRate, InferredRate, ImmediateRate]),
true = abs(ImmediateRate - InferredRate) < 0.1 * ImmediateRate,
perftest:sequential(1000, fun() -> bump_nodelay(HMI, 1) end),
io:format("~p Testing STOP ~n", [?MODULE]).
-endif. | src/jn_mavg.erl | 0.715026 | 0.403978 | jn_mavg.erl | starcoder |
%%==============================================================================
%% Copyright 2018-2021 <NAME> <<EMAIL>>
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%==============================================================================
%%%-------------------------------------------------------------------
%%% @doc
%%% A Bencoding library based on:
%%% The BitTorrent specification
%%%
%%% Bencoding is represented as follows:
%%%
%%% Byte string : binary (octets)
%%% Integer : integer
%%% List : list
%%% Dictionary : map
%%%
%%% @end
%%%
%% @author <NAME> <<EMAIL>>
%% @copyright (C) 2018-2021, <NAME> <<EMAIL>>
%%%-------------------------------------------------------------------
-module(bencoding).
-copyright('<NAME> <<EMAIL>>').
%% Library functions
-export([encode/1, encode/2,
decode/1, decode/2
]).
%% Exported types
-export_type([bencoding/0]).
%% Types
-type bencoding() :: binary() | integer() | list() | map().
-type opt() :: binary | iolist | continue.
%% Records
-record(opts, {return_type = iolist :: binary() | iolist(),
continue = false :: boolean()
}).
%% ===================================================================
%% Library functions.
%% ===================================================================
%%--------------------------------------------------------------------
%% Function: encode(Term) -> Bencoding.
%% @doc
%% Encodes the structured Erlang term as an iolist.
%% Equivalent of encode(Term, []) -> Bencoding.
%% @end
%%--------------------------------------------------------------------
-spec encode(bencoding()) -> iolist().
%%--------------------------------------------------------------------
encode(Term) -> encode(Term, []).
%%--------------------------------------------------------------------
%% Function: encode(Term, Options) -> Bencoding
%% @doc
%% Encodes the structured Erlang term as an iolist or binary.
%% Encode will give an exception if the erlang term is not well formed.
%% Options are:
%% binary -> a binary is returned
%% iolist -> an iolist is returned (default)
%% @end
%%--------------------------------------------------------------------
-spec encode(bencoding(), [opt()]) -> iolist() | binary().
%%--------------------------------------------------------------------
encode(Term, Opts) ->
Encoded = do_encode(Term),
case (parse_opts(Opts, #opts{}))#opts.return_type of
iolist -> Encoded;
binary -> iolist_to_binary(Encoded)
end.
%%--------------------------------------------------------------------
%% Function: decode(Binary) -> Term.
%% @doc
%% Decodes the iodata into a structured Erlang term.
%% Equivalent of decode(JSON, []) -> Term.
%% @end
%%--------------------------------------------------------------------
-spec decode(binary()) -> bencoding().
%%--------------------------------------------------------------------
decode(Binary) -> decode(Binary, []).
%%--------------------------------------------------------------------
%% Function: decode(Binary) -> Term | {Term, Rest}.
%% @doc
%% Decodes the iodata into a structured Erlang term.
%% Options are:
%% continue -> return the tuple of the decoded Bencoding and the
%% remaining binary
%% @end
%%--------------------------------------------------------------------
-spec decode(binary(), [opt()]) -> bencoding() | {bencoding(), binary()}.
%%--------------------------------------------------------------------
decode(Binary, Opts) ->
case (parse_opts(Opts, #opts{}))#opts.continue of
true -> do_decode(Binary);
false -> element(1, do_decode(Binary))
end.
%% ===================================================================
%% Internal functions.
%% ===================================================================
%% ===================================================================
%% Encoding
%% ===================================================================
do_encode(<<>>) -> "0:";
do_encode(String = <<_/binary>>) ->
[integer_to_binary(byte_size(String)), ":", String];
do_encode([]) ->
"le";
do_encode(List = [_|_]) ->
[$l, [do_encode(E) || E <- List], $e];
do_encode(Map = #{}) ->
case maps:to_list(Map) of
[] -> "de";
List ->
[$d,
[[encode_string(K), do_encode(V)] ||
{K, V} <- lists:keysort(1, List)],
$e]
end;
do_encode(Integer) ->
[$i, integer_to_binary(Integer), $e].
encode_string(String = <<_/binary>>) ->
[integer_to_binary(byte_size(String)), ":", String].
%% ===================================================================
%% Decoding
%% ===================================================================
do_decode(<<$i, I/binary>>) -> decode_integer(I, []);
do_decode(<<$l, L/binary>>) -> decode_list(L, []);
do_decode(<<$d, D/binary>>) -> decode_dictionary(D, []);
do_decode(B) -> decode_string(B, []).
decode_integer(<<$e, T/binary>>, Acc) ->
{list_to_integer(lists:reverse(Acc)), T};
decode_integer(<<H, T/binary>>, Acc) ->
decode_integer(T, [H | Acc]).
decode_list(<<$e, T/binary>>, Acc) -> {lists:reverse(Acc), T};
decode_list(Bin, Acc) ->
{E, T} = do_decode(Bin),
decode_list(T, [E | Acc]).
decode_dictionary(<<$e, T/binary>>, Acc) -> {maps:from_list(Acc), T};
decode_dictionary(Bin, Acc) ->
{K, T} = decode_string(Bin, []),
{V, T1} = do_decode(T),
decode_dictionary(T1, [{K, V} | Acc]).
decode_string(<<$:, T/binary>>, Acc) ->
Len = list_to_integer(lists:reverse(Acc)),
<<String:Len/binary, T1/binary>> = T,
{String, T1};
decode_string(<<H, T/binary>>, Acc) ->
decode_string(T, [H | Acc]).
%% ===================================================================
%% Common parts
%% ===================================================================
parse_opts([], Rec) -> Rec;
parse_opts(Opts, Rec) -> lists:foldl(fun parse_opt/2, Rec, Opts).
parse_opt(binary, Rec) -> Rec#opts{return_type = binary};
parse_opt(iolist, Rec) -> Rec#opts{return_type = iolist};
parse_opt(continue, Rec) -> Rec#opts{continue = true};
parse_opt(_, _) -> erlang:error(badarg). | src/bencoding.erl | 0.559531 | 0.46642 | bencoding.erl | starcoder |
%% =====================================================================
%% @doc An abstraction library providing an interface to the possible
%% options supported by hugin. The values returned from the functions
%% in this library can be returned in the hugin init/0 callback
%% function, or be used in the hugin API function set_option/1 and
%% set_options/1. Calling the functions in this library DOES NOTHING
%% MORE THAN RETURNING VALUES, so don't try to use them to directly
%% influence the behavior of the hugin server.
%%
%% If you want to directly influence the behavior of the server you can
%% use the corresponding functions in the hugin module. See
%% {@link hugin}.
%% @copyright 2015 <NAME>
%% @author <NAME> <<EMAIL>>
%% @version {@version}
%% @end
%% =====================================================================
-module(hugin_opts).
-export([max_freq/2, max_freq/3,
max_par/1]).
-opaque opt() :: {atom(), any()}.
-type time_unit() ::
ms | millisecond | milliseconds
| s | sec | second | seconds
| m | min | minute | minutes
| h | hour | hours
| d | day | days
| w | week | weeks.
-export_type([opt/0, time_unit/0]).
%% API
%% @doc An option to limit the amount of calls that hugin makes per
%% time unit. The default option is to have no limits. However, notice
%% that hugin still limits the amount of parallel connections to five
%% by default. See {@link max_par/1}.
%%
%% @equiv max_freq(Amount, 1, Unit)
-spec max_freq(Amount :: integer(), Unit :: time_unit()) -> opt().
max_freq(A, U) ->
max_freq(A, 1, U).
%% @doc Same as max_freq/2 but allows one more argument to specify how many
%% calls per N time units.
-spec max_freq(Amount :: integer(), N :: integer(), Unit :: time_unit())
-> opt().
max_freq(A, N, U) when is_integer(A), is_integer(N), is_atom(U) ->
Ms = milliseconds(U) * N,
{max_freq, {A, Ms}}.
%% @doc An option to limit the amount of parallel connections allowed
%% by hugin.
-spec max_par(N :: integer()) -> opt().
max_par(N) when is_integer(N) ->
{max_par, N}.
%% internal functions
milliseconds(M) ->
case M of
ms -> 1;
millisecond -> milliseconds(ms);
milliseconds -> milliseconds(ms);
s -> 1000 * milliseconds(ms);
sec -> milliseconds(s);
second -> milliseconds(s);
seconds -> milliseconds(s);
m -> 60 * milliseconds(s);
min -> milliseconds(m);
minute -> milliseconds(m);
minutes -> milliseconds(m);
h -> 60 * milliseconds(m);
hour -> milliseconds(h);
hours -> milliseconds(h);
d -> 24 * milliseconds(h);
day -> milliseconds(d);
days -> milliseconds(d);
w -> 7 * milliseconds(d);
week -> milliseconds(w);
weeks -> milliseconds(w);
_ -> erlang:error(badarg)
end. | src/hugin_opts.erl | 0.735737 | 0.464659 | hugin_opts.erl | starcoder |
% Copyright 2017-2018 <NAME>
%
% Licensed under the Apache License, Version 2.0 (the "License");
% you may not use this file except in compliance with the License.
% You may obtain a copy of the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS,
% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
% See the License for the specific language governing permissions and
% limitations under the License.
-module(statser_util).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-include("statser.hrl").
-compile({no_auto_import, [floor/1]}).
-export([ceiling/1,
floor/1,
to_number/1,
number_to_bin/1,
parse_unit/1,
parse_unit/2,
seconds/0,
epoch_seconds_to_datetime/1,
split_metric/1]).
-spec number_to_bin(number()) -> binary().
number_to_bin(Num) when is_integer(Num) ->
list_to_binary(integer_to_list(Num));
number_to_bin(Num) ->
list_to_binary(io_lib:format("~.2f", [Num])).
-spec to_number(binary()) -> {ok, number()} | error.
to_number(Binary) ->
List = binary_to_list(Binary),
case string:to_float(List) of
{error, no_float} ->
case string:to_integer(List) of
{error, _} -> error;
{Result, _} -> {ok, Result}
end;
{Result, _} -> {ok, Result}
end.
-spec floor(number()) -> integer().
floor(X) when X < 0 ->
Truncated = trunc(X),
case X - Truncated == 0 of
true -> Truncated;
false -> Truncated - 1
end;
floor(X) ->
trunc(X).
-spec ceiling(number()) -> integer().
ceiling(X) when X < 0 ->
trunc(X);
ceiling(X) ->
Truncated = trunc(X),
case X - Truncated == 0 of
true -> Truncated;
false -> Truncated + 1
end.
-spec parse_unit(binary()) -> integer() | error.
parse_unit(Value) ->
List = binary_to_list(Value),
case string:to_integer(List) of
{error, _} -> error;
{Val, Unit} -> parse_unit(Val, Unit)
end.
-spec parse_unit(integer(), unicode:chardata()) -> integer() | error.
parse_unit(Value, [$s | _]) -> Value;
parse_unit(Value, [$S | _]) -> Value;
parse_unit(Value, "min" ++ _) -> Value * 60;
parse_unit(Value, [$h | _]) -> Value * 3600;
parse_unit(Value, [$d | _]) -> Value * 86400;
parse_unit(Value, [$w | _]) -> Value * 604800;
parse_unit(Value, "mon" ++ _) -> Value * 2592000;
parse_unit(Value, [$y | _]) -> Value * 31536000;
parse_unit(_, _) -> error.
-spec seconds() -> integer().
seconds() ->
erlang:system_time(second).
-spec epoch_seconds_to_datetime(integer()) -> string().
epoch_seconds_to_datetime(Seconds) ->
% calendar:datetime_to_gregorian_seconds({{1970, 1, 1}, {0, 0, 0})
EpochSeconds = 62167219200,
{{Year, Month, Day}, {Hour, Minute, Second}} = calendar:gregorian_seconds_to_datetime(EpochSeconds + Seconds),
lists:flatten(io_lib:format("~4..0w-~2..0w-~2..0wT~2..0w:~2..0w:~2..0w",
[Year, Month, Day, Hour, Minute, Second])).
-spec split_metric(binary()) -> [binary()].
split_metric(Metric) ->
binary:split(Metric, <<".">>, [global, trim_all]).
%%
%% TESTS
%%
-ifdef(TEST).
number_to_bin_test_() ->
[?_assertEqual(<<"325">>, number_to_bin(325)),
?_assertEqual(<<"-35.21">>, number_to_bin(-35.21))
].
parse_unit_test_() ->
[?_assertEqual(error, parse_unit(100, "")),
?_assertEqual(error, parse_unit(100, "m")),
?_assertEqual(100, parse_unit(100, "s")),
?_assertEqual(180, parse_unit(3, "min")),
?_assertEqual(2 * 86400 * 7, parse_unit(2, "w"))
].
floor_test_() ->
[?_assertEqual(5, floor(5.0)),
?_assertEqual(5, floor(5)),
?_assertEqual(5, floor(5.5)),
?_assertEqual(5, floor(5.9)),
?_assertEqual(-6, floor(-6)),
?_assertEqual(-6, floor(-5.1)),
?_assertEqual(-6, floor(-5.9))
].
ceiling_test_() ->
[?_assertEqual(5, ceiling(5.0)),
?_assertEqual(5, ceiling(5)),
?_assertEqual(6, ceiling(5.5)),
?_assertEqual(6, ceiling(5.9)),
?_assertEqual(-5, ceiling(-5)),
?_assertEqual(-5, ceiling(-5.1)),
?_assertEqual(-5, ceiling(-5.9))
].
epoch_seconds_to_datetime_test_() ->
[?_assertEqual("1970-01-01T00:00:00", epoch_seconds_to_datetime(0)),
?_assertEqual("1970-01-01T01:00:00", epoch_seconds_to_datetime(3600)),
?_assertEqual("1970-01-01T01:01:01", epoch_seconds_to_datetime(3661))
].
-endif. | src/statser_util.erl | 0.761272 | 0.514156 | statser_util.erl | starcoder |
%% @doc Process which manages users, their receivers, and channels.
%% @version 0.1.0
%% @author <NAME> <<EMAIL>>
%% [http://student.vub.ac.be/~dmeysman]
%% @copyright 2016 <NAME>
-module(master).
-export([initialize/0, initialize_with/2, master_actor/2]).
-spec initialize() -> pid().
%% @doc Creates a new master process with no users and no channels.
initialize() ->
initialize_with(dict:new(), dict:new()).
-spec initialize_with(Subscriptions :: dict:dict(string(), {user, string(), sets:set(string())}),
Channels :: dict:dict(string(), pid())) -> pid().
%% @doc Creates a new master process with `Subscriptions', `Receivers',
%% and `Channels'.
initialize_with(Subscriptions, Channels) ->
Master = spawn_link(?MODULE, master_actor, [Subscriptions, Channels]),
catch unregister(master_actor),
register(master_actor, Master),
Master.
-spec master_actor(Subscriptions :: dict:dict(string(), {user, string(), sets:set(string())}),
Channels :: dict:dict(string(), pid())) -> no_return().
%% @doc Represents a master process.
master_actor(Subscriptions, Channels) ->
receive
{Sender, register_user, UserName} ->
% We first create a new user subscribed to no channels.
NewSubscriptions = dict:store(UserName, {user, UserName, sets:new()}, Subscriptions),
% We then tell the user to continue to send messages to us, as he is
% not logged in at this point.
Sender ! {self(), user_registered},
% Finally, we proceed with the new state.
master_actor(NewSubscriptions, Channels);
{Sender, log_in, UserName} ->
% We first create a receiver for the user and tell him to send all future
% messages to his receiver instead of us.
Sender ! {receiver:initialize_with(Sender, dict:fetch(UserName, Subscriptions), Channels), logged_in},
% Finally, we proceed.
master_actor(Subscriptions, Channels);
{Sender, log_out, UserName} ->
% We first notify all channels the user subscribes to and dispose of the receiver.
log_out(Sender, dict:fetch(UserName, Subscriptions), Channels),
% We then notify the sender that we successfully logged the user out.
Sender ! {self(), logged_out},
% Finally, we proceed.
master_actor(Subscriptions, Channels);
{Sender, Receiver, join_channel, UserName, ChannelName} ->
% We first subscribe the user to the channel.
NewSubscriptions = dict:update(UserName, subscribe(ChannelName), Subscriptions),
% We then spawn a new channel process if the channel does not exist yet.
ChannelPid = find_or_create_channel(ChannelName, Channels),
% The user's receiver needs to be aware of the new channel's information.
Receiver ! {self(), new_channel, {channel, ChannelName, ChannelPid}},
% Now we notify the channel that a user wishes to join it.
ChannelPid ! {self(), join_channel, {user, UserName, Sender}},
% We do not forget to notify the sender that the user has successfully joined the channel.
Sender ! {self(), channel_joined},
% Finally, we proceed with the new state.
master_actor(NewSubscriptions, dict:store(ChannelName, ChannelPid, Channels));
{Sender, get_channel_history, ChannelName} ->
% We forward requests for channel histories to the channel processes.
dict:fetch(ChannelName, Channels) ! {Sender, get_channel_history},
% We then proceed with the same state.
master_actor(Subscriptions, Channels)
end.
-spec log_out(UserPid :: pid(),
User :: {user, string(), sets:set(string())},
Channels :: dict:dict(string(), pid())) -> ok.
log_out(UserPid, {user, SubscriberName, Subscriptions}, Channels) ->
% We notify all channels the user subscribes to that he wishes to leave them.
% We use a list comprehension here instead of lists:foreach/2, because the
% compiler optimizes the construction of the result list away, as per
% http://erlang.org/doc/efficiency_guide/listHandling.html#id67631.
_ = [dict:fetch(Subscription, Channels) ! {self(), leave_channel, {user, SubscriberName, UserPid}} || Subscription <- sets:to_list(Subscriptions)],
ok.
-spec find_or_create_channel(ChannelName :: string(),
Channels :: dict:dict(string(), pid())) -> pid().
%% @doc Finds an existing channel in `Channels' by `ChannelName' or creates a
%% new one named `ChannelName'.
find_or_create_channel(ChannelName, Channels) ->
case dict:find(ChannelName, Channels) of
% If the channel already exists, we return its process identifier.
{ok, ChannelPid} ->
ChannelPid;
% If it does not, we initialize a new channel and return its process identifier.
error ->
channel:initialize()
end.
-spec subscribe(string()) -> fun(({user, string(), sets:set(string())}) -> {user, string(), sets:set(string())}).
%% @doc Generates a function which subscribes a user to `ChannelName'.
subscribe(ChannelName) ->
fun({user, UserName, Subscriptions}) ->
{user, UserName, sets:add_element(ChannelName, Subscriptions)}
end. | src/master.erl | 0.614857 | 0.439627 | master.erl | starcoder |
%% A worker that keeps values for a single metric for fixed time period.
-module(collector).
-behaviour(gen_server).
-export([start_link/1, stop/1, average/1, record/2]).
-export([init/1, handle_call/3, handle_cast/2,
handle_info/2, code_change/3, terminate/2]).
-define(PERIOD, 60000). % 1 minute = 60000 ms
-record(measure, {value, timestamp}).
%% Our state is just a stack (list) of measures, i.e. {value, timestamp} pairs.
%% New values are just pushed on top of the stack and old values (older than 1 minute)
%% get discarded on a timeout (about every minute).
%% This scheme should work good enough if we receive not too many measures per second
%% (say, less than a 1000). With high rate of incoming messages we'll get skewed
%% average and might not be able to timely process all incoming messages
%% (average and timeout will incure long pauses).
%% Alternative approach: employ a heap structure sorted on a timestamp.
start_link(Value) ->
gen_server:start_link(?MODULE, [mkmeasure(Value)], []).
stop(Pid) ->
gen_server:call(Pid, stop).
average(Pid) ->
gen_server:call(Pid, average).
record(Pid, Value) ->
gen_server:cast(Pid, {record, Value}).
%% private functions
mkmeasure(Value) ->
#measure{value = Value, timestamp = erlang:monotonic_time(millisecond)}.
%% gen_server API
init(State) ->
erlang:send_after(?PERIOD, self(), timeout),
{ok, State}.
% Returning 0 when we had no measures for the last minute.
handle_call(average, _From, []) ->
{reply, 0, []};
handle_call(average, _From, State) ->
{Sum, Count} = lists:foldl(fun(#measure{value = V}, {S, C}) -> {S + V, C + 1} end, {0, 0}, State),
{reply, Sum/Count, State};
handle_call(stop, _From, State) ->
{stop, normal, ok, State};
handle_call(_Msg, _From, State) ->
{noreply, State}.
handle_cast({record, Value}, State) ->
M = mkmeasure(Value),
{noreply, [M | State]};
handle_cast(_Msg, State) ->
{noreply, State}.
% Looks like we haven't got any measures for two PERIODs,
% shut self down to save some resources. Dispatcher will start
% fresh one when will get new measure for the same id.
handle_info(timeout, []) ->
{stop, normal, ok, []};
handle_info(timeout, State) ->
Now = erlang:monotonic_time(millisecond),
State1 = lists:takewhile(fun(#measure{timestamp = T}) -> Now - T < ?PERIOD end, State),
erlang:send_after(?PERIOD, self(), timeout),
{noreply, State1};
handle_info(_Msg, State) ->
{noreply, State}.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
terminate(_Reason, _State) ->
ok. | src/collector.erl | 0.520253 | 0.517754 | collector.erl | starcoder |
%%--------------------------------------------------------------------
%% Copyright (c) 2020-2021 DGIOT Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(modbus_util).
-export([
binary_to_coils/1,
binary_to_int16/1,
binary_to_int16s/1,
binary_to_int32/1,
binary_to_int32s/1,
binary_to_float32/1,
binary_to_ascii/1,
coils_to_binary/1,
int16_to_binary/1
]).
%% @doc Function to convert bytes to coils.
%% @end
-spec binary_to_coils(Bin::binary()) -> [0|1].
binary_to_coils(Bin) ->
lists:append([ lists:reverse([ Y || <<Y:1>> <= <<X>>]) || <<X:8>> <= Bin]).
%% @doc Function to convert bytes to 16bits integer.
%% @end
-spec binary_to_int16(Bin::binary()) -> [integer()].
binary_to_int16(Bin) ->
[ X || <<X:16/integer>> <= Bin ].
%% @doc Function to convert bytes to 16bits signed integer.
%% @end
-spec binary_to_int16s(Bin::binary()) -> [integer()].
binary_to_int16s(Bin) ->
[ X || <<X:16/signed-integer>> <= Bin ].
%% @doc Function to convert bytes to 32bits integer.
%% @end
-spec binary_to_int32(Bin::binary()) -> [integer()].
binary_to_int32(Bin) ->
[ X || <<X:32/integer>> <= Bin ].
%% @doc Function to convert bytes to 32bits signed integer.
%% @end
-spec binary_to_int32s(Bin::binary()) -> [integer()].
binary_to_int32s(Bin) ->
[ X || <<X:32/signed-integer>> <= Bin ].
%% @doc Function to convert bytes to 32bits float number.
%% @end
-spec binary_to_float32(Bin::binary()) -> [float()].
binary_to_float32(Bin) ->
[ X || <<X:32/float>> <= Bin ].
%% @doc Function to convert bytes to ASCII.
%% @end
-spec binary_to_ascii(Bin::binary()) -> list().
binary_to_ascii(Bin) ->
erlang:binary_to_list(Bin).
%% @doc Function to convert a list of coils to binary.
%% @end
-spec coils_to_binary(Values::list()) -> binary().
coils_to_binary(Values) ->
coils_to_binary(Values, <<>>).
coils_to_binary([], Acc) ->
Acc;
coils_to_binary([B0, B1, B2, B3, B4, B5, B6, B7 | T], Acc) ->
coils_to_binary(T, <<Acc/binary, B7:1, B6:1, B5:1, B4:1, B3:1, B2:1, B1:1, B0:1>>);
coils_to_binary(Values, Acc) ->
coils_to_binary(Values ++ [0], Acc).
%% @doc Function to convert a list of 16bits integer to binary.
%% @end
-spec int16_to_binary(Values::list()) -> binary().
int16_to_binary(Values) ->
<< <<X:16>> || X <- Values >>. | apps/dgiot_modbus/src/modbus/modbus_util.erl | 0.673192 | 0.464112 | modbus_util.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% Copyright (c) 2014 SyncFree Consortium. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
-module(object_log_state_SUITE).
-compile({parse_transform, lager_transform}).
-include("../include/antidote.hrl").
%% common_test callbacks
-export([
init_per_suite/1,
end_per_suite/1,
init_per_testcase/2,
end_per_testcase/2,
all/0]).
%% tests
-export([object_log_state_test/1]).
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("kernel/include/inet.hrl").
init_per_suite(Config) ->
lager_common_test_backend:bounce(debug),
test_utils:at_init_testsuite(),
Clusters = test_utils:set_up_clusters_common(Config),
Nodes = hd(Clusters),
[{nodes, Nodes}|Config].
end_per_suite(Config) ->
Config.
init_per_testcase(_Case, Config) ->
Config.
end_per_testcase(_, _) ->
ok.
all() -> [object_log_state_test].
object_log_state_test(Config) ->
Nodes = proplists:get_value(nodes, Config),
FirstNode = hd(Nodes),
Type = antidote_crdt_set_aw,
Key = object_log_state_test,
Bucket = object_log_state_bucket,
BoundObject = {Key, Type, Bucket},
CommitTime = add_set(FirstNode, BoundObject, lists:seq(1, 15), vectorclock:new()),
%% Check the read is 15
{ok, [Val], _CT} = rpc:call(FirstNode, antidote, read_objects, [CommitTime, [], [BoundObject]]),
?assertEqual(lists:seq(1, 15), Val),
%% Get the object state
{ok, [ReadResult1], _CT2} = rpc:call(FirstNode,
antidote, get_objects, [CommitTime, [], [BoundObject]]),
?assertEqual(ok, check_orset_state(lists:seq(1, 15), ReadResult1)),
CommitTime2 = add_set(FirstNode, BoundObject, lists:seq(16, 30), CommitTime),
%% Check the read is 30
{ok, [Val2], _CT3} = rpc:call(FirstNode, antidote, read_objects, [CommitTime2, [], [BoundObject]]),
?assertEqual(lists:seq(1, 30), Val2),
{ok, [LogOps]} = rpc:call(FirstNode,
antidote, get_log_operations, [[{BoundObject, CommitTime}]]),
?assertEqual(ok, check_orset_ops(lists:seq(16, 30), LogOps, {Key, Bucket})),
lager:info("object_log_state_test_test finished").
check_orset_ops([], [], _KeyBucket) ->
ok;
check_orset_ops([Val|Rest1],
[{_Id, #clocksi_payload{key = KeyBucket, type = antidote_crdt_set_aw, op_param = [{Val, _Binary, []}]}}
| Rest2],
KeyBucket) ->
check_orset_ops(Rest1, Rest2, KeyBucket).
check_orset_state([], []) ->
ok;
check_orset_state([Val|Rest1], [{Val, [Binary]}|Rest2]) when is_binary(Binary) ->
check_orset_state(Rest1, Rest2).
%% Auxiliary method to add a list of items to a set
add_set(_FirstNode, _BoundObject, [], Commit) ->
Commit;
add_set(FirstNode, Object, [First|Rest], PrevCommit) ->
Update = {Object, add, First},
ReadResult = rpc:call(FirstNode, antidote, read_objects, [ignore, [], [Object]]),
?assertMatch({ok, _, _}, ReadResult),
{ok, Commit} = rpc:call(FirstNode, antidote, update_objects, [ignore, [], [Update]]),
add_set(FirstNode, Object, Rest, vectorclock:max([PrevCommit, Commit])). | test/object_log_state_SUITE.erl | 0.559651 | 0.452294 | object_log_state_SUITE.erl | starcoder |
-module(listFns).
-export([nthtail/2, prefix/2, search/2, subtract/2]).
% Returns nth tail of a list
% Requires N to be a non-negative integer and L to be a list
nthtail(N, L)
when (is_integer(N) and is_list(L)) ->
nthtail_helper(N, L).
nthtail_helper(0, L) -> L;
nthtail_helper(N, _) when (N < 0) -> erlang:error(function_clause); % N must be non-negative
nthtail_helper(_, []) -> [];
nthtail_helper(N, [_|T]) -> nthtail_helper(N-1, T).
% prefix(List1, List2) returns true iff List1 is a prefix of List2
% Requires List1 and List2 to be lists
prefix(List1, List2)
when (is_list(List1) and is_list(List2)) ->
prefix_helper(List1, List2).
prefix_helper([], _) -> true;
prefix_helper([H|T1], [H|T2]) -> prefix_helper(T1, T2);
prefix_helper(_, _) -> false.
% search(List1, List2) returns a list of indices such that
% List1 is a prefix of List2 starting from each listed index of List2
% Requires List1 and List2 to be lists
%
% eg. search("he", "hello") -> [1]
% search([1,2], [1,2,1,2,3]) -> [1,3]
% search([1,2], [1]) -> []
% search([], []) -> [1] % consider [] to be a prefix of []
search(List1, List2)
when (is_list(List1) and is_list(List2)) ->
search_helper(List1, List2, 1, []).
search_helper([], [], Pos, Indices) -> Indices ++ [Pos]; % [] is a prefix of []
search_helper([], [_|T], Pos, Indices) ->
search_helper([], T, Pos + 1, Indices ++ [Pos]);
search_helper(_, [], _, Indices) -> Indices;
search_helper(L1, L2, Pos, Indices) ->
case prefix(L1, L2) of
true -> search_helper(L1, tl(L2), Pos + 1, Indices ++ [Pos]);
false -> search_helper(L1, tl(L2), Pos + 1, Indices)
end.
% subtract(List1, List2) returns "subtraction" of List2 from List1
% Returned subtraction is sorted by value
subtract(List1, List2)
when (is_list(List1) and is_list(List2)) ->
subtract_helper(lists:sort(List1), lists:sort(List2)).
% Assume sorted input lists
subtract_helper([], _) -> [];
subtract_helper(L, []) -> L;
subtract_helper([H|T1], [H|T2]) -> subtract_helper(T1, T2);
subtract_helper([H1|T1], [H2|T2]) ->
if
H1 < H2 -> [H1] ++ subtract_helper(T1, [H2|T2]);
true -> subtract_helper([H1|T1], T2)
end. | lists/listFns.erl | 0.555194 | 0.545286 | listFns.erl | starcoder |
%% @author Couchbase <<EMAIL>>
%% @copyright 2017-2018 Couchbase, Inc.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
-module(functools).
-export([id/1,
const/1,
compose/1, compose/2,
chain/2,
curry/1,
uncurry/1,
alternative/2,
sequence/1,
sequence_/1,
add/1, add/2,
sub/1, sub/2,
mul/1, mul/2,
idiv/1, idiv/2]).
%% Identity function.
id(X) ->
X.
%% Create a function of one argument that always returns the constant
%% passed.
const(Value) ->
fun (_) -> Value end.
%% Compose two functions. Note that the order of the function is
%% reversed to what it normally is in such functions.
compose(First, Second) ->
compose([First, Second]).
%% Compose many functions.
compose(Funs) when is_list(Funs) ->
fun (X) ->
lists:foldl(fun (F, Acc) ->
F(Acc)
end, X, Funs)
end.
%% Compose many functions and apply the resulting function to 'X'
chain(X, Funs) ->
(compose(Funs))(X).
%% Curry a function.
curry(F) ->
fun (X) ->
fun (Y) ->
F(X, Y)
end
end.
%% Uncurry a function.
uncurry(F) ->
fun (X, Y) ->
(F(X))(Y)
end.
%% Apply functions in a row until one succeeds, as indicated by {ok, _} return
%% value.
alternative(_Initial, []) ->
false;
alternative(Initial, [F | Funs]) ->
case F(Initial) of
{ok, _New} = R ->
R;
false ->
alternative(Initial, Funs)
end.
%% Apply functions in sequence and collect the ok results. If any function
%% fails, return the error.
sequence(Funs) ->
sequence(Funs, []).
sequence([], Acc) ->
{ok, lists:reverse(Acc)};
sequence([F | Rest], Acc) ->
case F() of
{ok, R} ->
sequence(Rest, [R | Acc]);
Other ->
Other
end.
%% Same as sequence/1, but doesn't expect functions to return anything useful
%% in ok case.
sequence_([]) ->
ok;
sequence_([F | Rest]) ->
case F() of
ok ->
sequence_(Rest);
Other ->
Other
end.
%% some partially applied built-in operations
add(Y) ->
fun (X) -> X + Y end.
sub(Y) ->
fun (X) -> X - Y end.
mul(Y) ->
fun (X) -> X * Y end.
idiv(Y) ->
fun (X) -> X div Y end.
%% first-class versions of some built-in operations
add(X, Y) ->
X + Y.
sub(X, Y) ->
X - Y.
mul(X, Y) ->
X * Y.
idiv(X, Y) ->
X div Y. | src/functools.erl | 0.616359 | 0.478346 | functools.erl | starcoder |
%% ====================================================================
%% @author <NAME> <<EMAIL>>
%% @copyright 2016, <NAME>
%% @doc Wrap a JSON parser to provide easy abstractions across
%% implementations and ensure a consistent return interface.
%% @end
%% ====================================================================
-module(rabbitmq_aws_json).
-export([decode/1]).
-spec decode(Value :: string() | binary()) -> list().
%% @doc Decode a JSON string returning a proplist
%% @end
decode(Value) when is_list(Value) ->
decode(list_to_binary(Value));
decode(Value) when is_binary(Value) ->
% We set an empty list of options because we don't want the default
% options set in rabbit_json:cecode/1. And we can't override
% 'return_maps' with '{return_maps, false}' because of a bug in jsx's
% options handler.
% See https://github.com/talentdeficit/jsx/pull/115
Decoded0 = rabbit_json:decode(Value, []),
Decoded = if
is_map(Decoded0) -> maps:to_list(Decoded0);
is_list(Decoded0) -> Decoded0
end,
convert_binary_values(Decoded, []).
-spec convert_binary_values(Value :: list(), Accumulator :: list()) -> list().
%% @doc Convert the binary key/value pairs returned by rabbit_json to strings.
%% @end
convert_binary_values([], Value) -> Value;
convert_binary_values([{K, V}|T], Accum) when is_map(V) ->
convert_binary_values(
T,
lists:append(
Accum,
[{binary_to_list(K), convert_binary_values(maps:to_list(V), [])}]));
convert_binary_values([{K, V}|T], Accum) when is_list(V) ->
convert_binary_values(
T,
lists:append(
Accum,
[{binary_to_list(K), convert_binary_values(V, [])}]));
convert_binary_values([{}|T],Accum) ->
convert_binary_values(T, lists:append(Accum, [{}]));
convert_binary_values([{K, V}|T], Accum) when is_binary(V) ->
convert_binary_values(T, lists:append(Accum, [{binary_to_list(K), binary_to_list(V)}]));
convert_binary_values([{K, V}|T], Accum) ->
convert_binary_values(T, lists:append(Accum, [{binary_to_list(K), V}]));
convert_binary_values([H|T], Accum) when is_map(H) ->
convert_binary_values(T, lists:append(Accum, convert_binary_values(maps:to_list(H), [])));
convert_binary_values([H|T], Accum) when is_binary(H) ->
convert_binary_values(T, lists:append(Accum, [binary_to_list(H)]));
convert_binary_values([H|T], Accum) when is_integer(H) ->
convert_binary_values(T, lists:append(Accum, [H]));
convert_binary_values([H|T], Accum) when is_atom(H) ->
convert_binary_values(T, lists:append(Accum, [H]));
convert_binary_values([H|T], Accum) ->
convert_binary_values(T, lists:append(Accum, convert_binary_values(H, []))). | src/rabbitmq_aws_json.erl | 0.552781 | 0.497253 | rabbitmq_aws_json.erl | starcoder |
%%%------------------------------------------------------------------------
%% Copyright 2019, OpenTelemetry Authors
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% @doc An implementation of {@link otel_propagator_text_map} that injects and
%% extracts trace context using the B3 single header format specification from
%% Zipkin.
%%
%% @see otel_propagator_b3
%% @end
%%%-----------------------------------------------------------------------
-module(otel_propagator_b3single).
-behaviour(otel_propagator_text_map).
-export([fields/1,
inject/4,
extract/5]).
-include("opentelemetry.hrl").
-define(B3_CONTEXT_KEY, <<"b3">>).
fields(_) ->
[?B3_CONTEXT_KEY].
-spec inject(Context, Carrier, CarrierSetFun, Options) -> Carrier
when Context :: otel_ctx:t(),
Carrier :: otel_propagator:carrier(),
CarrierSetFun :: otel_propagator_text_map:carrier_set(),
Options :: otel_propagator_text_map:propagator_options().
inject(Ctx, Carrier, CarrierSet, _Options) ->
case otel_tracer:current_span_ctx(Ctx) of
#span_ctx{trace_id=TraceId,
span_id=SpanId,
trace_flags=TraceOptions} when TraceId =/= 0, SpanId =/= 0 ->
Options = case TraceOptions band 1 of 1 -> <<"1">>; _ -> <<"0">> end,
EncodedTraceId = io_lib:format("~32.16.0b", [TraceId]),
EncodedSpanId = io_lib:format("~16.16.0b", [SpanId]),
B3Context = iolist_to_binary([EncodedTraceId, "-", EncodedSpanId, "-", Options]),
CarrierSet(?B3_CONTEXT_KEY, B3Context, Carrier);
_ ->
Carrier
end.
-spec extract(Context, Carrier, CarrierKeysFun, CarrierGetFun, Options) -> Context
when Context :: otel_ctx:t(),
Carrier :: otel_propagator:carrier(),
CarrierKeysFun :: otel_propagator_text_map:carrier_keys(),
CarrierGetFun :: otel_propagator_text_map:carrier_get(),
Options :: otel_propagator_text_map:propagator_options().
extract(Ctx, Carrier, _CarrierKeysFun, CarrierGet, _Options) ->
try
[TraceId, SpanId, Sampled] = parse_b3_context(Carrier, CarrierGet),
SpanCtx = otel_tracer:from_remote_span(TraceId, SpanId, Sampled),
otel_tracer:set_current_span(Ctx, SpanCtx)
catch
throw:invalid ->
undefined;
%% thrown if _to_integer fails or an invalid string encoding is sent
error:badarg ->
undefined
end.
% B3 maps propagation fields into a hyphen delimited string:
% {TraceId}-{SpanId}-{SamplingState}-{ParentSpanId}, where the last two fields are optional.
%
% When only propagating a sampling decision, the header is still named b3, but
% only contains the sampling state:
% {SamplingState}
parse_b3_context(Carrier, CarrierGet) ->
case CarrierGet(?B3_CONTEXT_KEY, Carrier) of
B3Context when is_binary(B3Context) ->
decode_b3_context(string:split(B3Context, "-", all));
_ ->
throw(invalid)
end.
decode_b3_context([TraceId, SpanId]) ->
% Sampled flag is optional. If it's missing then the sampling decision is
% deferred. We don't currently support it and just set the flag to 0
% instead (similarly how some other OTEL implementations are doing).
[parse_trace_id(TraceId), parse_span_id(SpanId), 0];
decode_b3_context([TraceId, SpanId, Sampled]) ->
[parse_trace_id(TraceId), parse_span_id(SpanId), parse_is_sampled(Sampled)];
decode_b3_context([TraceId, SpanId, Sampled, _ParentSpanId]) ->
[parse_trace_id(TraceId), parse_span_id(SpanId), parse_is_sampled(Sampled)];
decode_b3_context(_) ->
throw(invalid).
% Trace ID is a 32 or 16 lower-hex character binary.
parse_trace_id(TraceId) when is_binary(TraceId) ->
case string:length(TraceId) =:= 32 orelse string:length(TraceId) =:= 16 of
true -> string_to_integer(TraceId, 16);
_ -> throw(invalid)
end;
parse_trace_id(_) ->
throw(invalid).
% Span ID is a 16 lower-hex character binary.
parse_span_id(SpanId) when is_binary(SpanId) ->
case string:length(SpanId) =:= 16 of
true -> string_to_integer(SpanId, 16);
_ -> throw(invalid)
end;
parse_span_id(_) ->
throw(invalid).
% Sampling State is encoded as a single hex character for all states except
% Defer. Defer is absence of the sampling field.
%
% Possible states:
% 1 - accept
% 0 - deny
% d - debug (not supported at the moment, we instead used accept)
%
% Before the specification was written, some tracers propagated X-B3-Sampled as
% true or false.
parse_is_sampled(Sampled) when is_binary(Sampled) ->
case Sampled of
S when S =:= <<"1">> orelse S =:= <<"d">> orelse S =:= <<"true">> -> 1;
S when S =:= <<"0">> orelse S =:= <<"false">> -> 0;
_ -> throw(invalid)
end;
parse_is_sampled(_) ->
throw(invalid).
string_to_integer(S, Base) when is_binary(S) ->
binary_to_integer(S, Base). | apps/opentelemetry_api/src/otel_propagator_b3single.erl | 0.690872 | 0.491334 | otel_propagator_b3single.erl | starcoder |
%% This Source Code Form is subject to the terms of the Mozilla Public
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
%%
%% @type close_reason(Type) = {shutdown, amqp_reason(Type)}.
%% @type amqp_reason(Type) = {Type, Code, Text}
%% Code = non_neg_integer()
%% Text = binary().
%% @doc This module is responsible for maintaining a connection to an AMQP
%% broker and manages channels within the connection. This module is used to
%% open and close connections to the broker as well as creating new channels
%% within a connection.<br/>
%% The connections and channels created by this module are supervised under
%% amqp_client's supervision tree. Please note that connections and channels
%% do not get restarted automatically by the supervision tree in the case of a
%% failure. If you need robust connections and channels, we recommend you use
%% Erlang monitors on the returned connection and channel PIDs.<br/>
%% <br/>
%% In case of a failure or an AMQP error, the connection process exits with a
%% meaningful exit reason:<br/>
%% <br/>
%% <table>
%% <tr>
%% <td><strong>Cause</strong></td>
%% <td><strong>Exit reason</strong></td>
%% </tr>
%% <tr>
%% <td>Any reason, where Code would have been 200 otherwise</td>
%% <td>```normal'''</td>
%% </tr>
%% <tr>
%% <td>User application calls amqp_connection:close/3</td>
%% <td>```close_reason(app_initiated_close)'''</td>
%% </tr>
%% <tr>
%% <td>Server closes connection (hard error)</td>
%% <td>```close_reason(server_initiated_close)'''</td>
%% </tr>
%% <tr>
%% <td>Server misbehaved (did not follow protocol)</td>
%% <td>```close_reason(server_misbehaved)'''</td>
%% </tr>
%% <tr>
%% <td>AMQP client internal error - usually caused by a channel exiting
%% with an unusual reason. This is usually accompanied by a more
%% detailed error log from the channel</td>
%% <td>```close_reason(internal_error)'''</td>
%% </tr>
%% <tr>
%% <td>Other error</td>
%% <td>(various error reasons, causing more detailed logging)</td>
%% </tr>
%% </table>
%% <br/>
%% See type definitions below.
-module(amqp_connection).
-include("amqp_client_internal.hrl").
-export([open_channel/1, open_channel/2, open_channel/3, register_blocked_handler/2]).
-export([start/1, start/2, close/1, close/2, close/3, close/4]).
-export([error_atom/1]).
-export([info/2, info_keys/1, info_keys/0]).
-export([connection_name/1, update_secret/3]).
-export([socket_adapter_info/2]).
-define(DEFAULT_CONSUMER, {amqp_selective_consumer, []}).
-define(PROTOCOL_SSL_PORT, (?PROTOCOL_PORT - 1)).
%%---------------------------------------------------------------------------
%% Type Definitions
%%---------------------------------------------------------------------------
%% @type amqp_adapter_info() = #amqp_adapter_info{}.
%% @type amqp_params_direct() = #amqp_params_direct{}.
%% As defined in amqp_client.hrl. It contains the following fields:
%% <ul>
%% <li>username :: binary() - The name of a user registered with the broker,
%% defaults to <<guest">></li>
%% <li>password :: binary() - The password of user, defaults to '<PASSWORD>'</li>
%% <li>virtual_host :: binary() - The name of a virtual host in the broker,
%% defaults to <<"/">></li>
%% <li>node :: atom() - The node the broker runs on (direct only)</li>
%% <li>adapter_info :: amqp_adapter_info() - Extra management information for if
%% this connection represents a non-AMQP network connection.</li>
%% <li>client_properties :: [{binary(), atom(), binary()}] - A list of extra
%% client properties to be sent to the server, defaults to []</li>
%% </ul>
%%
%% @type amqp_params_network() = #amqp_params_network{}.
%% As defined in amqp_client.hrl. It contains the following fields:
%% <ul>
%% <li>username :: binary() - The name of a user registered with the broker,
%% defaults to <<guest">></li>
%% <li>password :: binary() - The user's password, defaults to
%% <<"guest">></li>
%% <li>virtual_host :: binary() - The name of a virtual host in the broker,
%% defaults to <<"/">></li>
%% <li>host :: string() - The hostname of the broker,
%% defaults to "localhost" (network only)</li>
%% <li>port :: integer() - The port the broker is listening on,
%% defaults to 5672 (network only)</li>
%% <li>channel_max :: non_neg_integer() - The channel_max handshake parameter,
%% defaults to 0</li>
%% <li>frame_max :: non_neg_integer() - The frame_max handshake parameter,
%% defaults to 0 (network only)</li>
%% <li>heartbeat :: non_neg_integer() - The heartbeat interval in seconds,
%% defaults to 0 (turned off) (network only)</li>
%% <li>connection_timeout :: non_neg_integer() | 'infinity'
%% - The connection timeout in milliseconds,
%% defaults to 30000 (network only)</li>
%% <li>ssl_options :: term() - The second parameter to be used with the
%% ssl:connect/2 function, defaults to 'none' (network only)</li>
%% <li>client_properties :: [{binary(), atom(), binary()}] - A list of extra
%% client properties to be sent to the server, defaults to []</li>
%% <li>socket_options :: [any()] - Extra socket options. These are
%% appended to the default options. See
%% <a href="https://www.erlang.org/doc/man/inet.html#setopts-2">inet:setopts/2</a>
%% and <a href="https://www.erlang.org/doc/man/gen_tcp.html#connect-4">
%% gen_tcp:connect/4</a> for descriptions of the available options.</li>
%% </ul>
%%---------------------------------------------------------------------------
%% Starting a connection
%%---------------------------------------------------------------------------
%% @spec (Params) -> {ok, Connection} | {error, Error}
%% where
%% Params = amqp_params_network() | amqp_params_direct()
%% Connection = pid()
%% @doc same as {@link amqp_connection:start/2. start(Params, undefined)}
start(AmqpParams) ->
start(AmqpParams, undefined).
%% @spec (Params, ConnectionName) -> {ok, Connection} | {error, Error}
%% where
%% Params = amqp_params_network() | amqp_params_direct()
%% ConnectionName = undefined | binary()
%% Connection = pid()
%% @doc Starts a connection to an AMQP server. Use network params to
%% connect to a remote AMQP server or direct params for a direct
%% connection to a RabbitMQ server, assuming that the server is
%% running in the same process space. If the port is set to 'undefined',
%% the default ports will be selected depending on whether this is a
%% normal or an SSL connection.
%% If ConnectionName is binary - it will be added to client_properties as
%% user specified connection name.
start(AmqpParams, ConnName) when ConnName == undefined; is_binary(ConnName) ->
ensure_started(),
AmqpParams0 =
case AmqpParams of
#amqp_params_direct{password = Password} ->
AmqpParams#amqp_params_direct{password = <PASSWORD>:encrypt(Password)};
#amqp_params_network{password = Password} ->
AmqpParams#amqp_params_network{password = <PASSWORD>:encrypt(Password)}
end,
AmqpParams1 =
case AmqpParams0 of
#amqp_params_network{port = undefined, ssl_options = none} ->
AmqpParams0#amqp_params_network{port = ?PROTOCOL_PORT};
#amqp_params_network{port = undefined, ssl_options = _} ->
AmqpParams0#amqp_params_network{port = ?PROTOCOL_SSL_PORT};
_ ->
AmqpParams0
end,
AmqpParams2 = set_connection_name(ConnName, AmqpParams1),
AmqpParams3 = amqp_ssl:maybe_enhance_ssl_options(AmqpParams2),
{ok, _Sup, Connection} = amqp_sup:start_connection_sup(AmqpParams3),
amqp_gen_connection:connect(Connection).
set_connection_name(undefined, Params) -> Params;
set_connection_name(ConnName,
#amqp_params_network{client_properties = Props} = Params) ->
Params#amqp_params_network{
client_properties = [
{<<"connection_name">>, longstr, ConnName} | Props
]};
set_connection_name(ConnName,
#amqp_params_direct{client_properties = Props} = Params) ->
Params#amqp_params_direct{
client_properties = [
{<<"connection_name">>, longstr, ConnName} | Props
]}.
%% Usually the amqp_client application will already be running. We
%% check whether that is the case by invoking an undocumented function
%% which does not require a synchronous call to the application
%% controller. That way we don't risk a dead-lock if, say, the
%% application controller is in the process of shutting down the very
%% application which is making this call.
ensure_started() ->
[ensure_started(App) || App <- [syntax_tools, compiler, xmerl,
rabbit_common, amqp_client, credentials_obfuscation]].
ensure_started(App) ->
case is_pid(application_controller:get_master(App)) andalso amqp_sup:is_ready() of
true -> ok;
false -> case application:ensure_all_started(App) of
{ok, _} -> ok;
{error, _} = E -> throw(E)
end
end.
%%---------------------------------------------------------------------------
%% Commands
%%---------------------------------------------------------------------------
%% @doc Invokes open_channel(ConnectionPid, none,
%% {amqp_selective_consumer, []}). Opens a channel without having to
%% specify a channel number. This uses the default consumer
%% implementation.
open_channel(ConnectionPid) ->
open_channel(ConnectionPid, none, ?DEFAULT_CONSUMER).
%% @doc Invokes open_channel(ConnectionPid, none, Consumer).
%% Opens a channel without having to specify a channel number.
open_channel(ConnectionPid, {_, _} = Consumer) ->
open_channel(ConnectionPid, none, Consumer);
%% @doc Invokes open_channel(ConnectionPid, ChannelNumber,
%% {amqp_selective_consumer, []}). Opens a channel, using the default
%% consumer implementation.
open_channel(ConnectionPid, ChannelNumber)
when is_number(ChannelNumber) orelse ChannelNumber =:= none ->
open_channel(ConnectionPid, ChannelNumber, ?DEFAULT_CONSUMER).
%% @spec (ConnectionPid, ChannelNumber, Consumer) -> Result
%% where
%% ConnectionPid = pid()
%% ChannelNumber = pos_integer() | 'none'
%% Consumer = {ConsumerModule, ConsumerArgs}
%% ConsumerModule = atom()
%% ConsumerArgs = [any()]
%% Result = {ok, ChannelPid} | {error, Error}
%% ChannelPid = pid()
%% @doc Opens an AMQP channel.<br/>
%% Opens a channel, using a proposed channel number and a specific consumer
%% implementation.<br/>
%% ConsumerModule must implement the amqp_gen_consumer behaviour. ConsumerArgs
%% is passed as parameter to ConsumerModule:init/1.<br/>
%% This function assumes that an AMQP connection (networked or direct)
%% has already been successfully established.<br/>
%% ChannelNumber must be less than or equal to the negotiated
%% max_channel value, or less than or equal to ?MAX_CHANNEL_NUMBER
%% (65535) if the negotiated max_channel value is 0.<br/>
%% In the direct connection, max_channel is always 0.
open_channel(ConnectionPid, ChannelNumber,
{_ConsumerModule, _ConsumerArgs} = Consumer) ->
amqp_gen_connection:open_channel(ConnectionPid, ChannelNumber, Consumer).
%% @spec (ConnectionPid) -> ok | Error
%% where
%% ConnectionPid = pid()
%% @doc Closes the channel, invokes
%% close(Channel, 200, <<"Goodbye">>).
close(ConnectionPid) ->
close(ConnectionPid, 200, <<"Goodbye">>).
%% @spec (ConnectionPid, Timeout) -> ok | Error
%% where
%% ConnectionPid = pid()
%% Timeout = integer()
%% @doc Closes the channel, using the supplied Timeout value.
close(ConnectionPid, Timeout) ->
close(ConnectionPid, 200, <<"Goodbye">>, Timeout).
%% @spec (ConnectionPid, Code, Text) -> ok | closing
%% where
%% ConnectionPid = pid()
%% Code = integer()
%% Text = binary()
%% @doc Closes the AMQP connection, allowing the caller to set the reply
%% code and text.
close(ConnectionPid, Code, Text) ->
close(ConnectionPid, Code, Text, amqp_util:call_timeout()).
%% @spec (ConnectionPid, Code, Text, Timeout) -> ok | closing
%% where
%% ConnectionPid = pid()
%% Code = integer()
%% Text = binary()
%% Timeout = integer()
%% @doc Closes the AMQP connection, allowing the caller to set the reply
%% code and text, as well as a timeout for the operation, after which the
%% connection will be abruptly terminated.
close(ConnectionPid, Code, Text, Timeout) ->
Close = #'connection.close'{reply_text = Text,
reply_code = Code,
class_id = 0,
method_id = 0},
amqp_gen_connection:close(ConnectionPid, Close, Timeout).
register_blocked_handler(ConnectionPid, BlockHandler) ->
amqp_gen_connection:register_blocked_handler(ConnectionPid, BlockHandler).
-spec update_secret(pid(), term(), binary()) ->
{'ok', rabbit_types:auth_user()} |
{'refused', string(), [any()]} |
{'error', any()}.
update_secret(ConnectionPid, NewSecret, Reason) ->
Update = #'connection.update_secret'{new_secret = NewSecret,
reason = Reason},
amqp_gen_connection:update_secret(ConnectionPid, Update).
%%---------------------------------------------------------------------------
%% Other functions
%%---------------------------------------------------------------------------
%% @spec (Code) -> atom()
%% where
%% Code = integer()
%% @doc Returns a descriptive atom corresponding to the given AMQP
%% error code.
error_atom(Code) -> ?PROTOCOL:amqp_exception(Code).
%% @spec (ConnectionPid, Items) -> ResultList
%% where
%% ConnectionPid = pid()
%% Items = [Item]
%% ResultList = [{Item, Result}]
%% Item = atom()
%% Result = term()
%% @doc Returns information about the connection, as specified by the Items
%% list. Item may be any atom returned by info_keys/1:
%%<ul>
%%<li>type - returns the type of the connection (network or direct)</li>
%%<li>server_properties - returns the server_properties fields sent by the
%% server while establishing the connection</li>
%%<li>is_closing - returns true if the connection is in the process of closing
%% and false otherwise</li>
%%<li>amqp_params - returns the #amqp_params{} structure used to start the
%% connection</li>
%%<li>num_channels - returns the number of channels currently open under the
%% connection (excluding channel 0)</li>
%%<li>channel_max - returns the channel_max value negotiated with the
%% server</li>
%%<li>heartbeat - returns the heartbeat value negotiated with the server
%% (only for the network connection)</li>
%%<li>frame_max - returns the frame_max value negotiated with the
%% server (only for the network connection)</li>
%%<li>sock - returns the socket for the network connection (for use with
%% e.g. inet:sockname/1) (only for the network connection)</li>
%%<li>any other value - throws an exception</li>
%%</ul>
info(ConnectionPid, Items) ->
amqp_gen_connection:info(ConnectionPid, Items).
%% @spec (ConnectionPid) -> Items
%% where
%% ConnectionPid = pid()
%% Items = [Item]
%% Item = atom()
%% @doc Returns a list of atoms that can be used in conjunction with info/2.
%% Note that the list differs from a type of connection to another (network vs.
%% direct). Use info_keys/0 to get a list of info keys that can be used for
%% any connection.
info_keys(ConnectionPid) ->
amqp_gen_connection:info_keys(ConnectionPid).
%% @spec () -> Items
%% where
%% Items = [Item]
%% Item = atom()
%% @doc Returns a list of atoms that can be used in conjunction with info/2.
%% These are general info keys, which can be used in any type of connection.
%% Other info keys may exist for a specific type. To get the full list of
%% atoms that can be used for a certain connection, use info_keys/1.
info_keys() ->
amqp_gen_connection:info_keys().
%% @doc Takes a socket and a protocol, returns an #amqp_adapter_info{}
%% based on the socket for the protocol given.
socket_adapter_info(Sock, Protocol) ->
amqp_direct_connection:socket_adapter_info(Sock, Protocol).
%% @spec (ConnectionPid) -> ConnectionName
%% where
%% ConnectionPid = pid()
%% ConnectionName = binary()
%% @doc Returns user specified connection name from client properties
connection_name(ConnectionPid) ->
ClientProperties = case info(ConnectionPid, [amqp_params]) of
[{_, #amqp_params_network{client_properties = Props}}] -> Props;
[{_, #amqp_params_direct{client_properties = Props}}] -> Props
end,
case lists:keyfind(<<"connection_name">>, 1, ClientProperties) of
{<<"connection_name">>, _, ConnName} -> ConnName;
false -> undefined
end. | erlang_server/_build/default/lib/amqp_client/src/amqp_connection.erl | 0.615203 | 0.486271 | amqp_connection.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% Copyright (c) 2012 Basho Technologies, Inc.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
-module(basic_command_line).
-include_lib("eunit/include/eunit.hrl").
-behavior(riak_test).
-compile(export_all).
-export([confirm/0]).
confirm() ->
%% Deploy a node to test against
lager:info("Deploy node to test command line"),
[Node] = rt:deploy_nodes(1),
?assertEqual(ok, rt:wait_until_nodes_ready([Node])),
%%% Verify node-up behavior
ping_up_test(Node),
attach_direct_up_test(Node),
status_up_test(Node),
console_up_test(Node),
start_up_test(Node),
getpid_up_test(Node),
%%% Stop the node, Verify node-down behavior
stop_test(Node),
ping_down_test(Node),
attach_down_test(Node),
attach_direct_down_test(Node),
status_down_test(Node),
console_test(Node),
start_test(Node),
getpid_down_test(Node),
pass.
console_up_test(Node) ->
lager:info("Node is already up, `riak console` should fail"),
{ok, ConsoleFail} = rt:riak(Node, ["console"]),
?assert(rt:str(ConsoleFail, "Node is already running")),
ok.
console_test(Node) ->
%% Make sure the cluster will start up with /usr/sbin/riak console, then quit
lager:info("Testing riak console on ~s", [Node]),
%% Stop node, to test console working
rt:console(Node, [{expect, "\(abort with ^G\)"},
{send, "riak_core_ring_manager:get_my_ring()."},
{expect, "dict,"},
{send, "q()."},
{expect, "ok"}]),
rt:wait_until_unpingable(Node),
ok.
start_up_test(Node) ->
%% Try starting again and check you get the node is already running message
lager:info("Testing riak start now will return 'already running'"),
{ok, StartOut} = rt:riak(Node, ["start"]),
?assert(rt:str(StartOut, "Node is already running!")),
ok.
start_test(Node) ->
%% Test starting with /bin/riak start
lager:info("Testing riak start works on ~s", [Node]),
{ok, StartPass} = rt:riak(Node, ["start"]),
lager:info("StartPass: ~p", [StartPass]),
?assert(StartPass =:= "" orelse string:str(StartPass, "WARNING") =/= 0),
rt:stop_and_wait(Node),
ok.
stop_test(Node) ->
?assert(rt:is_pingable(Node)),
{ok, "ok\n"} = rt:riak(Node, "stop"),
rt:wait_until_unpingable(Node),
?assertNot(rt:is_pingable(Node)),
ok.
ping_up_test(Node) ->
%% check /usr/sbin/riak ping
lager:info("Testing riak ping on ~s", [Node]),
%% ping / pong
%% rt:start_and_wait(Node),
lager:info("Node up, should ping"),
{ok, PongOut} = rt:riak(Node, ["ping"]),
?assert(rt:str(PongOut, "pong")),
ok.
ping_down_test(Node) ->
%% ping / pang
lager:info("Node down, should pang"),
{ok, PangOut} = rt:riak(Node, ["ping"]),
?assert(rt:str(PangOut, "not responding to pings")),
ok.
attach_down_test(Node) ->
lager:info("Testing riak attach while down"),
{ok, AttachOut} = rt:riak(Node, ["attach"]),
?assert(rt:str(AttachOut, "Node is not running!")),
ok.
attach_direct_up_test(Node) ->
lager:info("Testing riak attach-direct"),
rt:attach_direct(Node, [{expect, "\(^D to exit\)"},
{send, "riak_core_ring_manager:get_my_ring()."},
{expect, "dict,"},
{send, [4]}]), %% 4 = Ctrl + D
ok.
attach_direct_down_test(Node) ->
lager:info("Testing riak attach-direct while down"),
{ok, AttachOut} = rt:riak(Node, ["attach-direct"]),
?assert(rt:str(AttachOut, "Node is not running!")),
ok.
status_up_test(Node) ->
lager:info("Test riak-admin status on ~s", [Node]),
{ok, {ExitCode, StatusOut}} = rt:admin(Node, ["status"], [return_exit_code]),
io:format("Result of status: ~s", [StatusOut]),
?assertEqual(0, ExitCode),
?assert(rt:str(StatusOut, "1-minute stats")),
?assert(rt:str(StatusOut, "kernel_version")),
ok.
status_down_test(Node) ->
lager:info("Test riak-admin status while down"),
{ok, {ExitCode, StatusOut}} = rt:admin(Node, ["status"], [return_exit_code]),
?assertEqual(1, ExitCode),
?assert(rt:str(StatusOut, "Node is not running!")),
ok.
getpid_up_test(Node) ->
lager:info("Test riak getpid on ~s", [Node]),
{ok, PidOut} = rt:riak(Node, ["getpid"]),
?assertNot(rt:str(PidOut, "")),
?assert(rt:str(PidOut, rpc:call(Node, os, getpid, []))),
ok.
getpid_down_test(Node) ->
lager:info("Test riak getpid fails on ~s", [Node]),
{ok, PidOut} = rt:riak(Node, ["getpid"]),
?assert(rt:str(PidOut, "Node is not running!")),
ok. | dist-tests/basic_command_line.erl | 0.561215 | 0.527682 | basic_command_line.erl | starcoder |
%%%-------------------------------------------------------------------
%%% @author <NAME>
%%% @copyright (C) 2020 ACK CYFRONET AGH
%%% This software is released under the MIT license
%%% cited in 'LICENSE.txt'.
%%% @end
%%%-------------------------------------------------------------------
%%% @doc
%%% This module encapsulates concepts related to provider sync progress
%%% statistics in a space. The sync progress of a provider is represented as a
%%% summary of dbsync sequence numbers of other providers supporting the space
%%% that were seen and processed by the provider.
%%% @end
%%%-------------------------------------------------------------------
-module(provider_sync_progress).
-author("<NAME>").
-type provider_id() :: onedata:service_id().
% Denotes the database sequence - an increasing number that reflects the number
% of changed documents in database since the beginning of space support.
-type seq() :: non_neg_integer().
-type timestamp() :: time:seconds().
% Stores information about latest sequence currently known for given provider.
% Each provider has its own sync progress stats, which allows checking if it is
% up to date with other providers or falling behind.
% Includes information about self, which is always up to date and used for
% comparing with the sequence other providers know about the provider.
% Timestamp denotes the time when the sequence was seen.
-type stats() :: #{provider_id() => {seq(), timestamp()}}.
% Holds sync progress stats for each provider supporting the space.
-type per_provider() :: #{provider_id() => stats()}.
-export_type([stats/0, per_provider/0]).
-export([lookup_by_provider/2]).
-export([coalesce_all/2]).
-export([update_for_provider/4]).
-export([inspect_progress_between/3]).
-export([to_json/1, from_json/1]).
-export([per_provider_to_json/1, per_provider_from_json/1]).
%%%===================================================================
%%% API
%%%===================================================================
-spec lookup_by_provider(per_provider(), provider_id()) ->
{ok, stats()} | error.
lookup_by_provider(PerProvider, ProviderId) ->
maps:find(ProviderId, PerProvider).
%%--------------------------------------------------------------------
%% @doc
%% Coalesces the sync progress for all providers (see update_for_provider/4).
%% @end
%%--------------------------------------------------------------------
-spec coalesce_all(per_provider(), [provider_id()]) ->
per_provider().
coalesce_all(PerProvider, AllProviders) ->
lists:foldl(fun(ProviderId, AccPerProvider) ->
PreviousSyncProgress = maps:get(ProviderId, AccPerProvider, #{}),
update_for_provider(AccPerProvider, AllProviders, ProviderId, PreviousSyncProgress)
end, PerProvider, AllProviders).
%%--------------------------------------------------------------------
%% @doc
%% Updates sync progress for a provider. First, coalesces given sync progress
%% stats knowing the list of all space supporting providers,
%% adding missing entries and removing superfluous entries.
%% @end
%%--------------------------------------------------------------------
-spec update_for_provider(per_provider(), [provider_id()], provider_id(), stats()) ->
per_provider().
update_for_provider(PerProvider, AllProviders, ProviderId, SyncProgressStats) ->
% Take only supporting providers from the map
WithExistingProviders = maps:with(AllProviders, SyncProgressStats),
% Add supporting providers that were not in the map, with default seq and timestamp
MissingProviders = lists:subtract(AllProviders, maps:keys(WithExistingProviders)),
Coalesced = lists:foldl(fun(MissingProvider, Acc) ->
Acc#{MissingProvider => {1, 0}}
end, WithExistingProviders, MissingProviders),
PerProvider#{ProviderId => Coalesced}.
%%--------------------------------------------------------------------
%% @doc
%% Inspects the SubjectProvider's knowledge of OtherProvider's dbsync sequences.
%% Returns two values (sequence numbers):
%% Known - the highest sequence of OtherProvider known by SubjectProvider
%% Latest - the current sequence of OtherProvider, as reported to Onezone by OtherProvider
%% As reporting is done in intervals, for some time Known seq might be higher
%% than the one reported by OtherProvider - for that reason always the higher of
%% the two is returned as latest.
%% Can be called for the same provider, in such case will return the same value twice.
%% @end
%%--------------------------------------------------------------------
-spec inspect_progress_between(per_provider(), provider_id(), provider_id()) ->
{Known :: seq(), Latest :: seq()}.
inspect_progress_between(PerProvider, SubjectProvider, OtherProvider) ->
#{OtherProvider := {Known, _T1}} = maps:get(SubjectProvider, PerProvider),
#{OtherProvider := {Latest, _T2}} = maps:get(OtherProvider, PerProvider),
{Known, max(Known, Latest)}.
-spec to_json(stats()) -> json_utils:json_map().
to_json(SyncProgressStats) ->
maps:map(fun(_PrId, {Seq, Timestamp}) ->
#{<<"seq">> => Seq, <<"timestamp">> => Timestamp}
end, SyncProgressStats).
-spec from_json(json_utils:json_map()) -> stats().
from_json(SyncProgressStatsJson) ->
maps:map(fun(PrId, #{<<"seq">> := Seq, <<"timestamp">> := Time}) when is_binary(PrId), is_integer(Seq), is_integer(Time) ->
{Seq, Time}
end, SyncProgressStatsJson).
-spec per_provider_to_json(per_provider()) -> json_utils:json_map().
per_provider_to_json(PerProvider) ->
maps:map(fun(_PrId, SyncProgressStats) ->
to_json(SyncProgressStats)
end, PerProvider).
-spec per_provider_from_json(json_utils:json_map()) -> per_provider().
per_provider_from_json(PerProviderJson) ->
maps:map(fun(_PrId, SyncProgressStatsJson) ->
from_json(SyncProgressStatsJson)
end, PerProviderJson). | src/space_support/provider_sync_progress.erl | 0.517327 | 0.482063 | provider_sync_progress.erl | starcoder |
% Licensed under the Apache License, Version 2.0 (the "License"); you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
-module(couch_log_util).
-export([
should_log/1,
iso8601_timestamp/0,
get_msg_id/0,
level_to_integer/1,
level_to_atom/1,
level_to_string/1,
string_p/1
]).
-include("couch_log.hrl").
-spec should_log(#log_entry{} | atom()) -> boolean().
should_log(#log_entry{level = Level}) ->
should_log(Level);
should_log(Level) ->
level_to_integer(Level) >= couch_log_config:get(level_int).
-spec iso8601_timestamp() -> string().
iso8601_timestamp() ->
{_, _, Micro} = Now = os:timestamp(),
{{Year, Month, Date}, {Hour, Minute, Second}} = calendar:now_to_datetime(Now),
Format = "~4.10.0B-~2.10.0B-~2.10.0BT~2.10.0B:~2.10.0B:~2.10.0B.~6.10.0BZ",
io_lib:format(Format, [Year, Month, Date, Hour, Minute, Second, Micro]).
-spec get_msg_id() -> string().
get_msg_id() ->
case erlang:get(nonce) of
undefined -> "--------";
MsgId -> MsgId
end.
-spec level_to_integer(atom() | string() | integer()) -> integer().
level_to_integer(L) when L >= 0, L =< 9 -> L;
level_to_integer(debug) -> 1;
level_to_integer(info) -> 2;
level_to_integer(notice) -> 3;
level_to_integer(warning) -> 4;
level_to_integer(warn) -> 4;
level_to_integer(error) -> 5;
level_to_integer(err) -> 5;
level_to_integer(critical) -> 6;
level_to_integer(crit) -> 6;
level_to_integer(alert) -> 7;
level_to_integer(emergency) -> 8;
level_to_integer(emerg) -> 8;
level_to_integer(none) -> 9;
level_to_integer("debug") -> 1;
level_to_integer("info") -> 2;
level_to_integer("notice") -> 3;
level_to_integer("warning") -> 4;
level_to_integer("warn") -> 4;
level_to_integer("error") -> 5;
level_to_integer("err") -> 5;
level_to_integer("critical") -> 6;
level_to_integer("crit") -> 6;
level_to_integer("alert") -> 7;
level_to_integer("emergency") -> 8;
level_to_integer("emerg") -> 8;
level_to_integer("none") -> 9;
level_to_integer("1") -> 1;
level_to_integer("2") -> 2;
level_to_integer("3") -> 3;
level_to_integer("4") -> 4;
level_to_integer("5") -> 5;
level_to_integer("6") -> 6;
level_to_integer("7") -> 7;
level_to_integer("8") -> 8;
level_to_integer("9") -> 9.
-spec level_to_atom(atom() | string() | integer()) -> atom().
level_to_atom(L) when is_atom(L) -> L;
level_to_atom("1") -> debug;
level_to_atom("debug") -> debug;
level_to_atom("2") -> info;
level_to_atom("info") -> info;
level_to_atom("3") -> notice;
level_to_atom("notice") -> notice;
level_to_atom("4") -> warning;
level_to_atom("warning") -> warning;
level_to_atom("warn") -> warning;
level_to_atom("5") -> error;
level_to_atom("error") -> error;
level_to_atom("err") -> error;
level_to_atom("6") -> critical;
level_to_atom("critical") -> critical;
level_to_atom("crit") -> critical;
level_to_atom("7") -> alert;
level_to_atom("alert") -> alert;
level_to_atom("8") -> emergency;
level_to_atom("emergency") -> emergency;
level_to_atom("emerg") -> emergency;
level_to_atom("9") -> none;
level_to_atom("none") -> none;
level_to_atom(V) when is_integer(V) -> level_to_atom(integer_to_list(V));
level_to_atom(V) when is_list(V) -> info.
level_to_string(L) when is_atom(L) -> atom_to_list(L);
level_to_string(L) -> atom_to_list(level_to_atom(L)).
% From error_logger_file_h via lager_stdlib.erl
string_p([]) ->
false;
string_p(Term) ->
string_p1(Term).
string_p1([H | T]) when is_integer(H), H >= $\s, H < 256 ->
string_p1(T);
string_p1([$\n | T]) ->
string_p1(T);
string_p1([$\r | T]) ->
string_p1(T);
string_p1([$\t | T]) ->
string_p1(T);
string_p1([$\v | T]) ->
string_p1(T);
string_p1([$\b | T]) ->
string_p1(T);
string_p1([$\f | T]) ->
string_p1(T);
string_p1([$\e | T]) ->
string_p1(T);
string_p1([H | T]) when is_list(H) ->
case string_p1(H) of
true -> string_p1(T);
_ -> false
end;
string_p1([]) ->
true;
string_p1(_) ->
false. | src/couch_log/src/couch_log_util.erl | 0.612541 | 0.513303 | couch_log_util.erl | starcoder |
%% @doc Zotonic/Adlib integration
-module(mod_ginger_adlib).
-author("Driebit <<EMAIL>>").
-mod_title("Adlib").
-mod_prio(500).
-mod_description("Integrates Zotonic with the Adlib API.").
-behaviour(gen_server).
-export([
observe_search_query/2,
pid_observe_tick_1m/3,
pid_observe_tick_1h/3,
pid_observe_tick_24h/3,
endpoint/1,
enabled_databases/1,
pull_updates/2,
pull_database_updates/3,
pull_record/3,
init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3,
start_link/1
]).
-include_lib("zotonic.hrl").
-include_lib("include/ginger_adlib.hrl").
-record(state, {context}).
%% @doc Pull records modified after a date from all enabled Adlib databases
-spec pull_updates(calendar:datetime() | string(), z:context()) -> list(ok).
pull_updates(Since, Context) ->
[pull_database_updates(Database, Since, Context) || Database <- enabled_databases(Context)].
%% @doc Pull records modified after a date from an Adlib database
-spec pull_database_updates(binary(), calendar:datetime(), z:context()) -> ok.
pull_database_updates(Database, Since, Context) ->
DateTime = z_datetime:to_datetime(Since),
pull_database_updates(Database, DateTime, 1, Context).
pull_database_updates(Database, Since, StartFrom, Context) when is_tuple(Since) ->
Format = detect_modification_date_format(Database, "1900-01-01", Context),
pull_database_updates(Database, z_datetime:format(Since, Format, Context), StartFrom, Context);
pull_database_updates(Database, Since, StartFrom, Context) when is_binary(Since) ->
Args = [
{database, Database},
{search, <<"modification>=", Since/binary>>}
],
#search_result{result = Records, total = Total} = z_search:search({adlib, Args}, {StartFrom, 20}, Context),
case Records of
[] ->
lager:info("mod_ginger_adlib: Pulled ~p records modified after ~s from database ~s", [Total, Since, Database]),
ok;
_ ->
[z_notifier:notify(adlib_update(Record, Database), Context) || Record <- Records],
pull_database_updates(Database, Since, StartFrom + 20, Context)
end.
%% @doc Pull single record update from Adlib
-spec pull_record(binary(), binary(), z:context()) -> ok.
pull_record(Database, Priref, Context) ->
Args = [
{database, Database},
{search, <<"priref=", (z_convert:to_binary(Priref))/binary>>}
],
#search_result{result = Records} = z_search:search({adlib, Args}, {1, 1}, Context),
case Records of
[Record] ->
z_notifier:notify(adlib_update(Record, Database), Context);
_ ->
ok
end.
adlib_update(Record, Database) ->
#adlib_update{record = Record, database = Database}.
observe_search_query(#search_query{search = {adlib, _Args}} = Query, Context) ->
ginger_adlib_search:search(Query, Context);
observe_search_query(#search_query{}, _Context) ->
undefined.
pid_observe_tick_1m(Pid, tick_1m, Context) ->
pull_updates_when_needed(Pid, 60, Context).
pid_observe_tick_1h(Pid, tick_1h, Context) ->
pull_updates_when_needed(Pid, 3600, Context).
pid_observe_tick_24h(Pid, tick_24h, Context) ->
pull_updates_when_needed(Pid, 86400, Context).
pull_updates_when_needed(Pid, Frequency, Context) ->
case z_convert:to_integer(m_config:get_value(mod_ginger_adlib, poll_frequency, Context)) of
Frequency ->
gen_server:cast(Pid, {pull_updates, Frequency});
_ ->
nop
end.
%% @doc Get Adlib API endpoint URL
-spec endpoint(z:context()) -> binary().
endpoint(Context) ->
m_config:get_value(?MODULE, url, Context).
%% @doc Get databases that are enabled
-spec enabled_databases(z:context()) -> [binary()].
enabled_databases(Context) ->
DatabasesConfig = m_config:get(?MODULE, databases, Context),
proplists:get_value(list, DatabasesConfig).
%% @doc Detect datetime format used by Adlib server for modified.
%% Unfortunately, this can differ between Adlib instances.
-spec detect_modification_date_format(binary(), calendar:datetime(), z:context()) -> string().
detect_modification_date_format(Database, Since, Context) ->
%% First try ISO8601
ISO8601 = z_datetime:format(Since, "'Y-m-d H:i:s'", Context),
Args = [
{database, Database},
{search, <<"modification>=", ISO8601/binary>>}
],
case z_search:search({adlib, Args}, {1, 20}, Context) of
#search_result{total = undefined} ->
%% Try legacy format
"Ymd";
_ ->
"'Y-m-d H:i:s'"
end.
start_link(Args) when is_list(Args) ->
gen_server:start_link(?MODULE, Args, []).
init(Args) ->
{context, Context} = proplists:lookup(context, Args),
case m_config:get(?MODULE, databases, Context) of
undefined ->
m_config:set_prop(?MODULE, databases, list, [], Context);
_Exists ->
ok
end,
{ok, #state{context=z_context:new(Context)}}.
handle_call(Message, _From, State) ->
{stop, {unknown_call, Message}, State}.
handle_cast({pull_updates, Frequency}, State = #state{context = Context}) ->
Now = z_datetime:timestamp(),
%% Pull back a little more, so as to not lose updates.
SinceTimestamp = Now - Frequency - (Frequency div 2),
DateTime = z_datetime:timestamp_to_datetime(SinceTimestamp),
pull_updates(DateTime, Context),
{noreply, State};
handle_cast(Message, State) ->
{stop, {unknown_cast, Message}, State}.
handle_info(_Info, State) ->
{noreply, State}.
terminate(_Reason, _State) ->
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}. | modules/mod_ginger_adlib/mod_ginger_adlib.erl | 0.5769 | 0.465387 | mod_ginger_adlib.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% riakc_set: Eventually-consistent set type
%%
%% Copyright (c) 2013 Basho Technologies, Inc. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%% @doc <p>Encapsulates a set data-type. Riak's sets differ from Erlang
%% set types in several ways:</p>
%% <ul>
%% <li>Only binaries are allowed as elements. Convert other terms to a
%% binary before adding them.</li>
%% <li>Like the other eventually-consistent types, updates
%% (`add_element/2' and `del_element/2') are not applied to local
%% state. Instead, additions and removals are captured for later
%% application by Riak.</li>
%% <li>Additions and removals are non-exclusive. You can add and
%% remove the same element in the same session, both operations will
%% be performed in Riak (removal first). Removals performed without a
%% context may result in failure.</li>
%% <li>You may add an element that already exists in the original set
%% value, and remove an element that does not appear in the original
%% set value. This is non-intuitive, but acts as a safety feature: a
%% client code path that requires an element to be present in the set
%% (or removed) can ensure that intended state by applying an
%% operation.</li>
%% <li>The query functions `size/1', `is_element/1' and `fold/3' only
%% operate on the original value of the set, disregarding local
%% updates.</li>
%% </ul>
%% @end
-module(riakc_set).
-behaviour(riakc_datatype).
-ifdef(EQC).
-include_lib("eqc/include/eqc.hrl").
-export([gen_type/0, gen_op/0]).
-endif.
%% Callbacks
-export([new/0, new/1, new/2,
value/1,
to_op/1,
is_type/1,
type/0]).
%% Operations
-export([add_element/2,
add_elements/2,
del_element/2]).
%% Query functions
-export([size/1,
is_element/2,
fold/3]).
-record(set, {value = ordsets:new() :: ordsets:ordset(binary()),
adds = ordsets:new() :: ordsets:ordset(binary()),
removes = ordsets:new() :: ordsets:ordset(binary()),
context = undefined :: riakc_datatype:context() }).
-export_type([riakc_set/0, set_op/0]).
-opaque riakc_set() :: #set{}.
-type simple_set_op() :: {add_all, [binary()]} | {remove_all, [binary()]}.
-type set_op() :: simple_set_op() | {update, [simple_set_op()]}.
%% @doc Creates a new, empty set container type.
-spec new() -> riakc_set().
new() ->
#set{}.
%% @doc Creates a new set container with the opaque context.
-spec new(riakc_datatype:context()) -> riakc_set().
new(Context) ->
#set{context=Context}.
%% @doc Creates a new set container with the given members and opaque
%% context.
-spec new([binary()], riakc_datatype:context()) -> riakc_set().
new(Value, Context) when is_list(Value) ->
#set{value=ordsets:from_list(Value),
context=Context}.
%% @doc Returns the original value of the set as an ordset.
-spec value(riakc_set()) -> ordsets:ordset(binary()).
value(#set{value=V}) -> V.
%% @doc Extracts an operation from the set that can be encoded into an
%% update request.
-spec to_op(riakc_set()) -> riakc_datatype:update(set_op()).
to_op(#set{adds=[], removes=[]}) ->
undefined;
to_op(#set{adds=A, removes=[], context=C}) ->
{type(), {add_all, A}, C};
to_op(#set{adds=[], removes=R, context=C}) ->
{type(), {remove_all, R}, C};
to_op(#set{adds=A, removes=R, context=C}) ->
{type(), {update, [{remove_all, R}, {add_all, A}]}, C}.
%% @doc Determines whether the passed term is a set container.
-spec is_type(term()) -> boolean().
is_type(T) ->
is_record(T, set).
%% @doc Returns the symbolic name of this container.
-spec type() -> atom().
type() -> set.
%% @doc Adds an element to the set.
-spec add_element(binary(), riakc_set()) -> riakc_set().
add_element(Bin, #set{adds=A0}=Set) when is_binary(Bin) ->
Set#set{adds=ordsets:add_element(Bin, A0)}.
%% @doc Adds elements to the set.
-spec add_elements(list(binary()), riakc_set()) -> riakc_set().
add_elements(Elems, Set) when is_list(Elems) ->
lists:foldl(fun add_element/2, Set, Elems).
%% @doc Removes an element from the set.
%% @throws context_required
-spec del_element(binary(), riakc_set()) -> riakc_set().
del_element(_Bin, #set{context=undefined}) ->
throw(context_required);
del_element(Bin, #set{removes=R0}=Set) when is_binary(Bin) ->
Set#set{removes=ordsets:add_element(Bin, R0)}.
%% @doc Returns the cardinality (size) of the set. <em>Note: this only
%% operates on the original value as retrieved from Riak.</em>
-spec size(riakc_set()) -> pos_integer().
size(#set{value=V}) ->
ordsets:size(V).
%% @doc Test whether an element is a member of the set. <em>Note: this
%% only operates on the original value as retrieved from Riak.</em>
-spec is_element(binary(), riakc_set()) -> boolean().
is_element(Bin, #set{value=V}) when is_binary(Bin) ->
ordsets:is_element(Bin, V).
%% @doc Folds over the members of the set. <em>Note: this only
%% operates on the original value as retrieved from Riak.</em>
-spec fold(fun((binary(), term()) -> term()), term(), riakc_set()) -> term().
fold(Fun, Acc0, #set{value=V}) ->
ordsets:fold(Fun, Acc0, V).
-ifdef(EQC).
gen_type() ->
?LET({Elems, Ctx}, {list(binary()), binary()}, new(Elems, Ctx)).
gen_op() ->
{elements([add_element, del_element]),
[binary()]}.
-endif. | src/riakc_set.erl | 0.675765 | 0.416915 | riakc_set.erl | starcoder |
%%% ====================================================================
%%% ``The contents of this file are subject to the Erlang Public License,
%%% Version 1.1, (the "License"); you may not use this file except in
%%% compliance with the License. You should have received a copy of the
%%% Erlang Public License along with this software. If not, it can be
%%% retrieved via the world wide web at http://www.erlang.org/.
%%%
%%% Software distributed under the License is distributed on an "AS IS"
%%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%%% the License for the specific language governing rights and limitations
%%% under the License.
%%%
%%% @author <NAME> <<EMAIL>>
%%% @version {@vsn}, {@date} {@time}
%%% @end
%%% ====================================================================
-module(node_dag).
-author('<EMAIL>').
-include("pubsub.hrl").
-include("jlib.hrl").
-behaviour(gen_pubsub_node).
%% API definition
-export([init/3, terminate/2, options/0, features/0,
create_node_permission/6, create_node/2, delete_node/1,
purge_node/2, subscribe_node/8, unsubscribe_node/4,
publish_item/6, delete_item/4, remove_extra_items/3,
get_entity_affiliations/2, get_node_affiliations/1,
get_affiliation/2, set_affiliation/3,
get_entity_subscriptions/2, get_node_subscriptions/1,
get_subscriptions/2, set_subscriptions/4,
get_pending_nodes/2, get_states/1, get_state/2,
set_state/1, get_items/6, get_items/2, get_item/7,
get_item/2, set_item/1, get_item_name/3, node_to_path/1,
path_to_node/1]).
init(Host, ServerHost, Opts) ->
node_hometree:init(Host, ServerHost, Opts).
terminate(Host, ServerHost) ->
node_hometree:terminate(Host, ServerHost).
options() ->
[{node_type, leaf} | node_hometree:options()].
features() ->
[<<"multi-collection">> | node_hometree:features()].
create_node_permission(_Host, _ServerHost, _Node,
_ParentNode, _Owner, _Access) ->
{result, true}.
create_node(NodeID, Owner) ->
node_hometree:create_node(NodeID, Owner).
delete_node(Removed) ->
node_hometree:delete_node(Removed).
subscribe_node(NodeID, Sender, Subscriber, AccessModel,
SendLast, PresenceSubscription, RosterGroup, Options) ->
node_hometree:subscribe_node(NodeID, Sender, Subscriber,
AccessModel, SendLast, PresenceSubscription,
RosterGroup, Options).
unsubscribe_node(NodeID, Sender, Subscriber, SubID) ->
node_hometree:unsubscribe_node(NodeID, Sender,
Subscriber, SubID).
publish_item(NodeID, Publisher, Model, MaxItems, ItemID,
Payload) ->
case nodetree_dag:get_node(NodeID) of
#pubsub_node{options = Options} ->
case find_opt(node_type, Options) of
collection ->
{error,
?ERR_EXTENDED((?ERR_NOT_ALLOWED), <<"publish">>)};
_ ->
node_hometree:publish_item(NodeID, Publisher, Model,
MaxItems, ItemID, Payload)
end;
Err -> Err
end.
find_opt(_, []) -> false;
find_opt(Option, [{Option, Value} | _]) -> Value;
find_opt(Option, [_ | T]) -> find_opt(Option, T).
remove_extra_items(NodeID, MaxItems, ItemIDs) ->
node_hometree:remove_extra_items(NodeID, MaxItems,
ItemIDs).
delete_item(NodeID, Publisher, PublishModel, ItemID) ->
node_hometree:delete_item(NodeID, Publisher,
PublishModel, ItemID).
purge_node(NodeID, Owner) ->
node_hometree:purge_node(NodeID, Owner).
get_entity_affiliations(Host, Owner) ->
node_hometree:get_entity_affiliations(Host, Owner).
get_node_affiliations(NodeID) ->
node_hometree:get_node_affiliations(NodeID).
get_affiliation(NodeID, Owner) ->
node_hometree:get_affiliation(NodeID, Owner).
set_affiliation(NodeID, Owner, Affiliation) ->
node_hometree:set_affiliation(NodeID, Owner,
Affiliation).
get_entity_subscriptions(Host, Owner) ->
node_hometree:get_entity_subscriptions(Host, Owner).
get_node_subscriptions(NodeID) ->
node_hometree:get_node_subscriptions(NodeID).
get_subscriptions(NodeID, Owner) ->
node_hometree:get_subscriptions(NodeID, Owner).
set_subscriptions(NodeID, Owner, Subscription, SubID) ->
node_hometree:set_subscriptions(NodeID, Owner,
Subscription, SubID).
get_pending_nodes(Host, Owner) ->
node_hometree:get_pending_nodes(Host, Owner).
get_states(NodeID) -> node_hometree:get_states(NodeID).
get_state(NodeID, JID) ->
node_hometree:get_state(NodeID, JID).
set_state(State) -> node_hometree:set_state(State).
get_items(NodeID, From) ->
node_hometree:get_items(NodeID, From).
get_items(NodeID, JID, AccessModel,
PresenceSubscription, RosterGroup, SubID) ->
node_hometree:get_items(NodeID, JID, AccessModel,
PresenceSubscription, RosterGroup, SubID).
get_item(NodeID, ItemID) ->
node_hometree:get_item(NodeID, ItemID).
get_item(NodeID, ItemID, JID, AccessModel,
PresenceSubscription, RosterGroup, SubID) ->
node_hometree:get_item(NodeID, ItemID, JID, AccessModel,
PresenceSubscription, RosterGroup, SubID).
set_item(Item) -> node_hometree:set_item(Item).
get_item_name(Host, Node, ID) ->
node_hometree:get_item_name(Host, Node, ID).
node_to_path(Node) -> node_hometree:node_to_path(Node).
path_to_node(Path) -> node_hometree:path_to_node(Path). | blades/ejabberd/src/node_dag.erl | 0.534127 | 0.426083 | node_dag.erl | starcoder |
%% ---------------------------------------------------------------------
%% Licensed under the Apache License, Version 2.0 (the "License"); you may
%% not use this file except in compliance with the License. You may obtain
%% a copy of the License at <http://www.apache.org/licenses/LICENSE-2.0>
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% @author <NAME> <<EMAIL>>
%% @copyright 2012 <NAME>
%% @doc Trivial Lisp interpreter in Erlang.
-module(lisp).
-export([eval/1]).
-export([init/0, equal/2, gt/2, knot/1]).
-record(st, {env}).
-define(INTERPRETED, true).
-include("lisp_test.erl").
eval(P) ->
{X, _} = eval(P, init()),
X.
init() ->
Env = [{print, {builtin, fun do_print/2}}
,{list, {builtin, fun do_list/2}}
,{apply, {builtin, fun do_apply/2}}
,{plus, {builtin, fun do_plus/2}}
,{equal, {builtin, fun do_equal/2}}
,{gt, {builtin, fun do_gt/2}}
,{knot, {builtin, fun do_knot/2}}
,{y, y()}
],
#st{env=dict:from_list(Env)}.
eval([lambda, Ps, B], #st{env=E}=St) when is_list(Ps) ->
case lists:all(fun is_atom/1, Ps) andalso
(length(Ps) =:= length(lists:usort(Ps))) of
true -> {{lambda, Ps, B, E}, St};
false -> throw(bad_lambda)
end;
eval([lambda | _], _) ->
throw(bad_lambda);
eval([def, A, V, B], #st{env=E0}=St) when is_atom(A) ->
{V1, St1} = eval(V, St),
E1 = bind(A, V1, E0),
{X, St2} = eval(B, St1#st{env=E1}),
{X, St2#st{env=E0}};
eval([def | _], _) ->
throw(bad_def);
eval([quote, A], St) ->
{A, St};
eval([quote | _], _) ->
throw(bad_quote);
eval([iff, X, A, B], St) ->
case eval(X, St) of
{[], St1} -> eval(B, St1);
{_, St1} -> eval(A, St1)
end;
eval([do], _St0) ->
throw(bad_do);
eval([do | As], St0) ->
lists:foldl(fun (X, {_,St}) -> eval(X, St) end, {[],St0}, As);
eval([_|_]=L, St) ->
{[F | As], St1} = lists:mapfoldl(fun eval/2, St, L),
call(F, As, St1);
eval(A, St) when is_atom(A) ->
{deref(A, St), St};
eval(C, St) ->
{C, St}.
%% UTILITY FUNCTIONS
deref(A, #st{env=E}) ->
case dict:find(A, E) of
{ok, V} -> V;
error -> throw({undefined, A})
end.
bind(A, V, E) ->
dict:store(A, V, E).
bind_args([P | Ps], [A | As], E) ->
bind_args(Ps, As, dict:store(P, A, E));
bind_args([], [], E) ->
E;
bind_args(_, _, _) ->
throw(bad_arity).
call({lambda, Ps, B, E}, As, #st{env=E0}=St) ->
{X, St1} = eval(B, St#st{env=bind_args(Ps, As, E)}),
{X, St1#st{env=E0}};
call({builtin, F}, As, St) ->
F(As, St);
call(X, _, _) ->
throw({bad_fun, X}).
bool(true) -> 1;
bool(false) -> [].
%% BUILTINS
y() ->
{Y, _} = eval([lambda, [f],
[[lambda, [x], [f, [lambda, [y], [[x, x], y]]]],
[lambda, [x], [f, [lambda, [y], [[x, x], y]]]]]],
#st{env=dict:new()}),
Y.
do_print([S | Xs], St) ->
io:format(S, Xs),
{[], St};
do_print(_, _) ->
throw(bad_print).
do_list(As, St) ->
{As, St}.
do_apply([F, As], St) ->
call(F, As, St);
do_apply(_, _) ->
throw(bad_apply).
do_plus([X, Y], St) when is_number(X), is_number(Y) ->
{X + Y, St};
do_plus(As, _) ->
throw({bad_plus, As}).
do_equal([X, Y], St) ->
{equal(X, Y), St};
do_equal(As, _) ->
throw({bad_equal, As}).
equal(X, Y) ->
bool(X =:= Y).
do_gt([X, Y], St) ->
{gt(X, Y), St};
do_gt(As, _) ->
throw({bad_gt, As}).
gt(X, Y) ->
bool(X > Y).
do_knot([X], St) ->
{knot(X), St};
do_knot(As, _) ->
throw({bad_gt, As}).
knot([]) ->
1;
knot(_) ->
[]. | lib/syntax_tools/examples/merl/lisp.erl | 0.54819 | 0.475971 | lisp.erl | starcoder |
-module(crypto).
-export([main/1]).
%% Decrypts the contents of Filename, printing any solutions to stdout
%% as they're found. We could just accumulate the solution keys in a list,
%% but sometimes that takes a long time.
%%
main(Filename) ->
Dictionary = load_dictionary("../words"),
Ciphertext = load_ciphertext(Filename),
solve(Ciphertext, Dictionary).
%% Returns a list of dictionary words, as binaries. Words with
%% capital leters are removed.
%%
load_dictionary(Filename) ->
{ok, Raw} = file:read_file(Filename),
Words = re:split(Raw, "\\n"),
[W || W <- Words,
re:run(W, "[A-Z]") == nomatch].
%% Returns the ciphertext, as a string. Hash comments are removed.
%%
load_ciphertext(Filename) ->
{ok, Ciphertext} = file:read_file(Filename),
re:replace(Ciphertext, "#.*\n", "", [global, {return, list}]).
%% Prints out all solutions for Ciphertext (string) using the words in
%% Dictionary (list of binaries). solve just massages things and
%% hands off to solutions to do the real work.
%%
solve(Ciphertext, Dictionary) ->
Cw = re:split(Ciphertext, "\\s+", [{return, list}]),
Cw2 = [W || W <- Cw, W /= ""],
Cipherwords = [word:new(W, Dictionary) || W <- Cw2],
solutions(Cipherwords, Ciphertext).
%% Prints out all solutions for Cipherwords (list of words).
%% Ciphertext is the Ciphertext we read from the file; it's just used
%% to print deciphered solutions as we go along.
%%
solutions(Cipherwords, Ciphertext) ->
solutions(Cipherwords, Ciphertext, key:new()).
solutions(_Cipherwords = [], Ciphertext, Key) ->
%% If there are no Cipherwords left to match, then Key is a solution.
output(Ciphertext, Key),
[]; % [Key].
solutions(Cipherwords, Ciphertext, Key) ->
%% We're going to iterate over all the words in one of the
%% Ciperwords' dictionary, so pick the one with the smallest
%% dictionary in the hopes that we'll have less work to do.
Cipherword = spud:min_by(
Cipherwords,
fun(Cw) -> word:dictionary_size(Cw) end),
%% Map each word in Cipherword's dictionary to a list of solution
%% keys, and let flatmap flatten them into one list.
lists:flatmap(
fun (Plainword) ->
%% Plainword is a tentative solution for Cipherword. Try
%% to create a new key with the necessary plainletter ->
%% cipherletter mappings. If there are conflicts with
%% existing mappings, returns 'fail'.
case add_to_key(Key, word:cipherword(Cipherword), Plainword) of
fail ->
%% Plainword is not a possible solution.
[];
NewKey ->
%% Plainword may be part of a solution. Create a
%% list of NewCipherwords with Cipherword
%% removed, and with the remaining words'
%% dictionaries filtered by the new key.
NewCipherwords = [word:filter(W, NewKey) ||
W <- Cipherwords,
W /= Cipherword],
%% Return the list of solutions found by
%% recursively solving for the remaining cipher
%% words.
solutions(NewCipherwords, Ciphertext, NewKey)
end
end,
word:dictionary(Cipherword)).
%% Given a cipherword and it's tentative plainword, return a new key
%% based on the given key with Cipherword->Plainword letters added.
%% Return 'fail' if it's not possible to make such a key because two
%% cipher letters would need to map to the same plain letter. Note
%% that we'll never try to map the same cipher letter to two different
%% plain letters because the dictionary filtering ensures that never
%% happens.
%%
add_to_key(Key, Cipherword, Plainword) ->
%% Invert he key before and after we add new mappings. This makes
%% it easy to check whether a plaintext letter is already mapped
%% by a different ciphertext letter. The dictionary regexp
%% filtering prevents a ciphertext letter from matching two
%% different plaintext letters.
InvertedKey = key:invert(Key),
%% Zip into {Ciperletter, Plainletter} pairs.
Zipped = lists:zip(Cipherword, binary:bin_to_list(Plainword)),
%% Fold the pair mappings into key. Return 'fail' on collisions.
try
lists:foldl(
fun ({C, P}, Accum) ->
%% Is there already a mapping to this plaintext letter?
case key:get(Accum, P) of
unknown ->
%% No. Add P -> C to the inverted key.
key:set(Accum, P, C);
C ->
%% P is already mapped by C, no problem.
Accum;
_ ->
%% P is already mapped by a different
%% cipher letter.
throw(fail)
end
end,
InvertedKey,
Zipped)
of
IKey2 ->
key:invert(IKey2)
catch
throw:fail ->
fail
end.
%% Replace letters in Ciphertext with their mappings from Key.
%%
decipher(Ciphertext, Key) ->
[key:get(Key, C) || C <- Ciphertext].
%% Print the deciphered Ciphertext to stdout.
%%
output(Ciphertext, Key) ->
Plaintext = decipher(Ciphertext, Key),
spud:debug("~s", [Plaintext]). | src/crypto.erl | 0.544559 | 0.590248 | crypto.erl | starcoder |
% Binary Search Trees (BST)
% 13 June 2008
-module(bst).
-compile(export_all). % I am lazy. Don't do this.
%=====================================================================
% Membership. `mem(E,T)' is `false' if item `E' is not present in the
% BST `T', otherwise `true'.
%
% In tail form.
%
% In the worst case, the number of function calls to compute mem1(E,T)
% is n+1, where n is the number of nodes in `T'. This case occurs when
% `E' is not in `T' and `T' is a list.
%
% The number of comparisons is maximum when the tree is a
% right-leaning list. There are 2 failing comparisons for each node
% (clauses 2 and 3), so the worst case (i.e., the sought item is not
% present) requires exactly 2n comparisons.
%
mem1(_, empty) -> false;
mem1(E,{ _, E, _}) -> true;
mem1(E,{Left,Root, _}) when E < Root -> mem1(E,Left);
mem1(E,{ _, _,Right}) -> mem1(E,Right).
% In tail form
%
% This version performs n + 1 comparisons in the worst case.
%
mem(_, empty) -> false;
mem(E,{Left,Root,Right}) -> mem__(E,{Left,Root,Right},Root).
mem__(E,{Left,Root, _},C) when E < Root -> mem__(E,Left,C);
mem__(E,{ _,Root,Right},_) -> mem__(E,Right,Root);
mem__(E, empty,C) -> E =:= C.
%=====================================================================
% `find(E,T,F)' finds item `E' in the BST `T' and `E' and the found
% subtree of `T' (whose root is `E') are then passed to the function
% `F', which returns a BST in place of the found subtree. The result
% of `find(E,T,F)' is therefore a tree identical to `T', except
% (perhaps) that the first subtree (in a preorder traversal) whose
% root is `E', say `S', is replaced by the tree resulting from the
% call to `F(E,S)'. If `F(E,S)=S' then `find(E,T,F) = T'.
% Not in tail form
%
find1(_, empty,_) -> empty;
find1(E,{Left, E,Right},F) -> F(E,{Left,E,Right});
find1(E,{Left,Root,Right},F) when E < Root ->
{find1(E,Left,F),Root,Right};
find1(E,{Left,Root,Right},F) ->
{Left,Root,find1(E,Right,F)}.
% Almost in tail form
%
find(E,T,F) -> find(E,T,F,[]).
find(_, empty,_,A) -> find_up(A,empty);
find(E,{Left, E,Right},F,A) -> find_up(A,F(E,{Left,E,Right}));
find(E,{Left,Root,Right},F,A) when E < Root ->
find(E,Left,F,[{left,Root,Right}|A]);
find(E,{Left,Root,Right},F,A) ->
find(E,Right,F,[{Left,Root,right}|A]).
find_up( [],T) -> T;
find_up([{left,Root,Right}|A],T) -> find_up(A,{T,Root,Right});
find_up([{Left,Root,right}|A],T) -> find_up(A,{Left,Root,T}).
%=====================================================================
% Adding an item as a leaf (without repetition)
% Not in tail form.
%
add_l1(E, empty) -> {empty,E,empty};
add_l1(E,{Left, E,Right}) -> {Left,E,Right};
add_l1(E,{Left,Root,Right}) when E < Root -> {add_l1(E,Left),Root,Right};
add_l1(E,{Left,Root,Right}) -> {Left,Root,add_l1(E,Right)}.
% In tail form with sharing preserved if the item is already present.
add_tf(E,T) -> add_tf(E,T,[],T).
add_tf(E, empty,A,_) -> appk(A,{empty,E,empty});
add_tf(E, {_, E,_},_,T) -> T;
add_tf(E,{Left,Root,Right},A,T) when E < Root ->
add_tf(E,Left,[{k1,Root,Right}|A],T);
add_tf(E,{Left,Root,Right},A,T) ->
add_tf(E,Right,[{k2,Left,Root}|A],T).
appk( [],V) -> V;
appk([{k1,Root,Right}|A],V) -> appk(A,{V,Root,Right});
appk( [{k2,Left,Root}|A],V) -> appk(A,{Left,Root,V}).
% Using a functional
%
add_l11(E,T) ->
find(E,T,fun(I,empty) -> {empty,I,empty};
(_, Sub) -> Sub end).
% In Continuation-Passing Style, sharing preserved if item already present.
%
add2(E,Tree) -> add2(E,Tree,fun (X) -> X end,Tree).
add2(E,empty,K,_) ->
K({empty,E,empty});
add2(E,{_,E,_},_,T) ->
T;
add2(E,{Left,Root,Right},K,T) when E < Root ->
add2(E,Left,fun (V) -> K({V,Root,Right}) end,T);
add2(E,{Left,Root,Right},K,T) ->
add2(E,Right,fun (V) -> K({Left,Root,V}) end,T).
% In tail form (first-order)
%
add_l(E,Tree) -> add_l(E,Tree,[]).
add_l(E,empty,A) ->
add_up(A,{empty,E,empty});
add_l(E,{Left,E,Right},A) ->
add_up(A,{Left,E,Right});
add_l(E,{Left,Root,Right},A) when E < Root ->
add_l(E,Left,[{left,Root,Right}|A]);
add_l(E,{Left,Root,Right},A) ->
add_l(E,Right,[{Left,Root,right}|A]).
add_up( [],T) -> T;
add_up([{left,Root,Right}|A],T) -> add_up(A,{T,Root,Right});
add_up([{Left,Root,right}|A],T) -> add_up(A,{Left,Root,T}).
% Not in tail form
%
% This variation on add_l1/2 performs only n + 1 comparisons in the
% worst case.
%
add_l2(E, empty) -> {empty,E,empty};
add_l2(E,{Left,Root,Right}) -> add_l2__(E,{Left,Root,Right},Root).
add_l2__(E,empty,C) ->
if E =:= C -> empty;
true -> {empty,E,empty}
end;
add_l2__(E,{Left,Root,Right},C) when E < Root ->
{add_l2__(E,Left,C),Root,Right};
add_l2__(E,{Left,Root,Right},_) ->
{Left,Root,add_l2__(E,Right,Root)}.
% In tail form.
%
% This variation on add_l/2 performs only n + 1 comparisons in the
% worst case.
%
add_l3(E, empty) -> {empty,E,empty};
add_l3(E,{Left,Root,Right}) -> add_l3__(E,{Left,Root,Right},[],Root).
add_l3__(E,empty,A,C) ->
if E =:= C -> add_up(A,empty);
true -> add_up(A,{empty,E,empty})
end;
add_l3__(E,{Left,Root,Right},A,C) when E < Root ->
add_l3__(E,Left,[{left,Root,Right}|A],C);
add_l3__(E,{Left,Root,Right},A,_) ->
add_l3__(E,Right,[{Left,Root,right}|A],Root).
% Almost in tail form.
%
% This variation on add_l/2 avoids reconstructing the branch when E is
% already in T. This optimisation relies solely on the tail form.
%
add_l4(E,Tree) ->
case add_l4(E,Tree,[]) of
id -> Tree;
T -> T
end.
add_l4(E,empty,A) ->
add_up(A,{empty,E,empty});
add_l4(E,{_,E,_},_) ->
id;
add_l4(E,{Left,Root,Right},A) when E < Root ->
add_l4(E,Left,[{left,Root,Right}|A]);
add_l4(E,{Left,Root,Right},A) ->
add_l4(E,Right,[{Left,Root,right}|A]).
% Almost in tail form.
%
% This variation on add_l3/2 performs only n + 1 comparisons in the
% worst case and does not rebuild the traversed branch if E was
% already in T.
%
add_l5(E,empty) -> {empty,E,empty};
add_l5(E,Tree={_,Root,_}) ->
case add_l5__(E,Tree,[],Root) of
id -> Tree;
T -> T
end.
add_l5__(E,empty,A,C) ->
if E =:= C -> id;
true -> add_up(A,{empty,E,empty})
end;
add_l5__(E,{Left,Root,Right},A,C) when E < Root ->
add_l5__(E,Left,[{left,Root,Right}|A],C);
add_l5__(E,{Left,Root,Right},A,_) ->
add_l5__(E,Right,[{Left,Root,right}|A],Root).
% In tail form (higher-order)
%
add_l6(E,empty) -> {empty,E,empty};
add_l6(E,Tree={_,Root,_}) ->
add_l6__(E,Tree,Root,Tree,fun(T)->T end).
add_l6__(E,empty,C,Orig,K) ->
if E =:= C -> Orig;
true -> K({empty,E,empty})
end;
add_l6__(E,{Left,Root,Right},C,Orig,K) when E < Root ->
add_l6__(E,Left,C,Orig,fun (Tree) -> K({Tree,Root,Right}) end);
add_l6__(E,{Left,Root,Right},_,Orig,K) ->
add_l6__(E,Right,Root,Orig,fun (Tree) -> K({Left,Root,Tree}) end).
%=====================================================================
% Adding an item as a root (without repetition)
% Note:
%
% {Left,Right} = split(E,T),{Left,E,Right}
%
% is exactly equivalent to
%
% (fun({Left,Right}) -> {Left,E,Right} end)(split(E,T))
%
% Not in tail form
%
add_r(E,T) -> {Left,Right} = split(E,T),{Left,E,Right}.
split(_,empty) ->
{empty,empty};
split(E,{Left,E,Right}) ->
{Left,Right};
split(E,{Left,Root,Right}) when E < Root ->
{L,R} = split(E,Left),{L,{R,Root,Right}};
split(E,{Left,Root,Right}) ->
{L,R} = split(E,Right),{{Left,Root,L},R}.
% In tail form
%
add_r2(E,T) -> split2([],[],E,T).
split2(Lt,Gt,E,empty) ->
graft(Lt,E,Gt);
split2(Lt,Gt,E,{Left,E,Right}) ->
graft([Left|Lt],E,[Right|Gt]);
split2(Lt,Gt,E,{Left,Root,Right}) when E < Root ->
split2(Lt,[{Root,Right}|Gt],E,Left);
split2(Lt,Gt,E,{Left,Root,Right}) ->
split2([{Left,Root}|Lt],Gt,E,Right).
graft(Lt,E,Gt) -> graft(empty,Lt,E,Gt,empty).
graft(T1,[],E,[],T2) ->
{T1,E,T2};
graft(T1,[],E,[{Root,Right}|Gt],T2) ->
graft(T1,[],E,Gt,{T2,Root,Right});
graft(T1,[{Left,Root}|Lt],E,[],T2) ->
graft({Left,Root,T1},Lt,E,[],T2);
graft(T1,[{Left,Root1}|Lt],E,[{Root2,Right}|Gt],T2) ->
graft({Left,Root1,T1},Lt,E,Gt,{T2,Root2,Right}).
%=====================================================================
% Removing an element (and grafting at a leaf)
% Not in tail form
%
rem_l1(_, empty) -> empty;
rem_l1(E,{empty, E,empty}) -> empty;
rem_l1(E,{empty, E,Right}) -> Right;
rem_l1(E,{ Left, E,empty}) -> Left;
rem_l1(E,{ Left, E,Right}) -> hang1(Left,Right);
rem_l1(E,{ Left,Root,Right}) when E < Root ->
{rem_l1(E,Left),Root,Right};
rem_l1(E,{Left,Root,Right}) ->
{Left,Root,rem_l1(E,Right)}.
hang1(T, empty) -> T; % T is hung at the leftmost leaf
hang1(T,{Left,Root,Right}) -> {hang1(T,Left),Root,Right}.
% Using a functional (in tail form because of hang/3).
%
hang(T, empty,Forest) -> hang_up(T,Forest);
hang(T,{Left,Root,Right},Forest) -> hang(T,Left,[{Root,Right}|Forest]).
hang_up(Tree,[]) -> Tree;
hang_up(Left,[{Root,Right}|Forest]) ->
hang_up({Left,Root,Right},Forest).
rem_l2(E,T) ->
find(E,T,fun(_,empty) -> empty;
(_,{Left,_,Right}) -> hang(Left,Right,[]) end).
%=====================================================================
% Removing an element (and grafting a leaf instead)
% Not in tail form
%
rem_r1(_, empty) -> empty;
rem_r1(E,{empty, E,empty}) -> empty;
rem_r1(E,{empty, E,Right}) -> Right;
rem_r1(E,{ Left, E,empty}) -> Left;
rem_r1(E,{ Left, E,Right}) -> {rem_r1(E,Left),max(Left),Right};%????
rem_r1(E,{ Left,Root,Right}) when E < Root ->
{rem_r1(E,Left),Root,Right};
rem_r1(E,{Left,Root,Right}) ->
{Left,Root,rem_r1(E,Right)}.
max({_,Root,empty}) -> Root;
max({_, _,Right}) -> max(Right).
% Not in tail form
%
% This version decreases the number of function calls with respect to
% rem_r1/2 by making max2/1 return its argument without the maximum
% node, as well as the maximum node itself, as max/1 only does. This
% means that a recursive call to rem_r1/2 was necessary to remove
% Max. Here max2/1 interleaves both computations and results in a
% single visit to the nodes.
%
rem_r2(_, empty) -> empty;
rem_r2(E,{empty, E,empty}) -> empty;
rem_r2(E,{empty, E,Right}) -> Right;
rem_r2(E,{ Left, E,empty}) -> Left;
rem_r2(E,{ Left, E,Right}) -> {L,Max} = max2(Left), {L,Max,Right};
rem_r2(E,{ Left,Root,Right}) when E < Root ->
{rem_r2(E,Left),Root,Right};
rem_r2(E,{Left,Root,Right}) ->
{Left,Root,rem_r2(E,Right)}.
max2({Left,Root,empty}) -> {Left,Root};
max2({Left,Root,Right}) -> {R,Max} = max2(Right), {{Left,Root,R},Max}.
% Using a functional (not in tail form)
%
rem_r3_x(_, empty) -> empty;
rem_r3_x(_,{Left,_,Right}) -> {L,Max} = max2(Left), {L,Max,Right}.
rem_r3(E,T) -> find(E,T,fun rem_r3_x/2).
%=====================================================================
% Making a BST from a list and an addition function
%
% In tail form
%
make(L,Add) -> hw:foldl(Add,empty,L). | Defun/Tests/bst.erl | 0.596903 | 0.59358 | bst.erl | starcoder |
%
% reia_eval: Evaluate a given set of Reia expressions
% Copyright (C)2009 <NAME>
%
% Redistribution is permitted under the MIT license. See LICENSE for details.
%
-module(reia_eval).
-export([new_binding/0, string/1, string/2, exprs/1, exprs/2]).
-include("../compiler/reia_nodes.hrl").
-include("../compiler/reia_bindings.hrl").
-define(return_value_var(Line), #var{line=Line, name='__reia_eval_return_value'}).
% Create a new set of local variable bindings
new_binding() -> [].
% Parse and evaluate the given string
string(Str) -> string(Str, new_binding()).
% Parse and evaluate the given string with the given bindings
string(Str, Bindings) ->
case reia_parse:string(Str) of
{error, _} = Error ->
Error;
{ok, Exprs} ->
exprs(Exprs, Bindings)
end.
% Evaluate the given set of expressions
exprs(Exprs) -> exprs(Exprs, new_binding()).
% Evaluate the given set of expressions with the given bindings
exprs(Exprs, Bindings) ->
% io:format("Input Code: ~p~n", [Exprs]),
Exprs2 = annotate_return_value(Exprs, Bindings),
Filename = "reia_eval#" ++ stamp(),
Name = list_to_atom(Filename),
{ok, Module} = reia_compiler:compile(
Filename,
[temporary_module(Name, [{var, 1, Var} || {Var, _} <- Bindings], Exprs2)],
[{toplevel_wrapper, false}]
),
Args = list_to_tuple([Val || {_, Val} <- Bindings]),
{ok, Name, {Value, NewBindings}} = reia_bytecode:load(Module, [Args, nil]),
% FIXME: This code:purge is just failing and modules are just accumulating
% the code server whenever eval is used. A "reaper" process is needed to
% periodically try to purge these modules until it succeeds.
code:purge(Name),
{value, Value, NewBindings}.
% Generate a unique module name. Base it off the current PID
stamp() ->
string:join(string:tokens(pid_to_list(self()), "."), "_").
temporary_module(Name, Args, Exprs) ->
#module{line=1, name=Name, exprs=[
#function{line=1, name=toplevel, args=Args, body=Exprs}
]}.
% Annotate the return value of the expression to include the bindings
annotate_return_value(Exprs, Bindings) ->
Exprs2 = case Exprs of
[] -> [#nil{line=1}];
[_|_] -> Exprs
end,
[LastExpr|Rest] = lists:reverse(Exprs2),
Line = element(2, LastExpr),
LastExpr2 = #match{line=Line, left=?return_value_var(Line), right=LastExpr},
ReturnValue = return_value(output_bindings(Exprs2, Bindings), Line),
lists:reverse([ReturnValue, LastExpr2 | Rest]).
% Obtain a list of all variables which will be bound when eval is complete
output_bindings(Exprs, Bindings) ->
{ok, BAExprs} = reia_bindings:transform(Exprs),
[#bindings{entries=NewBindings}|_] = lists:reverse(BAExprs),
lists:usort([Var || {Var, _} <- Bindings] ++ dict:fetch_keys(NewBindings)).
% Generate the return value for eval, appending the binding nodes
return_value(Bindings, Line) ->
#tuple{line=Line, elements = [?return_value_var(Line), bindings_list(Bindings, Line)]}.
% Construct the output list for the bindings
bindings_list([], Line) ->
{empty, Line};
bindings_list([Name|Rest], Line) ->
{cons, Line, binding_node(Name, Line), bindings_list(Rest, Line)}.
% Generate the AST representing a given binding
binding_node(Name, Line) ->
#tuple{line=Line, elements=[
#atom{line=Line, name=Name},
#var{line=Line, name=Name}
]}. | src/core/reia_eval.erl | 0.559531 | 0.523968 | reia_eval.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% Copyright (c) 2012-2015 Basho Technologies, Inc. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%%
%% Hashtree EQC test.
%%
%% Generates a pair of logically identical AAE trees populated with data
%% and some phantom trees in the same leveldb database to exercise all
%% of the cases in iterate.
%%
%% Then runs commands to insert, delete, snapshot, update tree
%% and compare.
%%
%% The expected values are stored in two ETS tables t1 and t2,
%% with the most recently snapshotted values copied to tables s1 and s2.
%% (the initial common seed data is not included in the ETS tables).
%%
%% The hashtree's themselves are stored in the process dictionary under
%% key t1 and t2. This helps with shrinking as it reduces dependencies
%% between states (or at least that's why I remember doing it).
%%
%% Model state stores where each tree is through the snapshot/update cycle.
%% The command frequencies are deliberately manipulated to make it more
%% likely that compares will take place once both trees are updated.
%%
-module(hashtree_eqc).
-compile([export_all]).
-ifdef(TEST).
-ifdef(EQC).
-include_lib("eqc/include/eqc.hrl").
-include_lib("eqc/include/eqc_statem.hrl").
-define(QC_OUT(P),
eqc:on_output(fun(Str, Args) -> io:format(user, Str, Args) end, P)).
-include_lib("eunit/include/eunit.hrl").
%% Make it possible to run from '-s hashtree_eqc runfor 60' to run from cmdline
runfor([DurationMinsStr]) ->
DurationSecs = 60 * list_to_integer(atom_to_list(DurationMinsStr)),
eqc:quickcheck(eqc:testing_time(DurationSecs, hashtree_eqc:prop_correct())).
hashtree_test_() ->
{setup,
fun() ->
application:set_env(lager, handlers, [{lager_console_backend, info}]),
application:ensure_started(syntax_tools),
application:ensure_started(compiler),
application:ensure_started(goldrush),
application:ensure_started(lager)
end,
fun(_) ->
application:stop(lager),
application:stop(goldrush),
application:stop(compiler),
application:stop(syntax_tools),
application:unload(lager),
delete_ets()
end,
[{timeout, 60,
fun() ->
lager:info("Any warnings should be investigated. No lager output expected.\n"),
?assert(eqc:quickcheck(?QC_OUT(eqc:testing_time(29,
hashtree_eqc:prop_correct()))))
end
}]}.
-record(state,
{
started = false, % Boolean to prevent commands running before initialization step.
tree_id, % Tree Id
params = undefined, % {Segments, Width, MemLevels}
snap1 = undefined, % undefined, created, updated
snap2 = undefined, % undefined, created, updated
num_updates = 0 % number of insert/delete operations
}).
integer_to_binary(Int) ->
list_to_binary(integer_to_list(Int)).
-ifdef(new_hash).
sha(Bin) ->
crypto:hash(sha, Bin).
-else.
sha(Bin) ->
crypto:sha(Bin).
-endif.
key() ->
?LET(Key, int(), ?MODULE:integer_to_binary(Key)).
object() ->
{key(), sha(term_to_binary(make_ref()))}.
objects() ->
non_empty(list(object())).
levels() ->
frequency([{20, 1},
{ 5, 2}, % production
{ 1, 3}]).
mem_levels() -> %% Number of memory levels - strongly favor default setting
frequency([{20, 0}, %% Default setting to not use memory levels
{ 1, 1},
{ 1, 2},
{ 1, 3},
{ 1, 4}]).
width() ->
frequency([{ 1, 8},
{100, 16}, % pick for high density segments - WHY???
{ 1, 32},
{ 1, 64},
{ 1, 128},
{ 1, 256},
{ 1, 512},
{ 50, 1024}]). % pick for production
params() -> % {Segments, Width, MemLevels}
%% Generate in terms of number of levels, from that work out the segments
?LET({Levels, Width, MemLevels},{levels(), width(), mem_levels()},
{calculate_num_segments(Width, Levels), Width, MemLevels}).
calculate_num_segments(Width, Levels) ->
trunc(math:pow(Width, Levels)).
initial_open_mode() ->
%% frequency([{1, mark_empty}, {5, mark_open}]).
?SHRINK(oneof([mark_empty, mark_open]), [mark_empty]). % should shrink towards mark_empty
%% Generate tree ids - the first one is used for the test, the others are added
%% as empty hashtrees. This is done to provoke issues where we hit the end of a keyspace
%% but not the end of the leveldb data, which exercises different code paths than only ever
%% having 1 hashtree
%%
%% Make sure the TreeId is unique (the second element), does not matter if there
%% are dupes in the first number (Index)
ids() ->
?SUCHTHAT(TreeIds,
non_empty(list({
?LET(X, nat(), 1+X), %% Partition ID - not critical for test
?LET(X,nat(), min(X,255))} %% Tree ID as integer - must be unique
)),
lists:keyfind(element(2, hd(TreeIds)), 2, tl(TreeIds)) == false).
%%
%% Generate the commands, split into two cases to force the start to happen as
%% the first command.
%%
command(_S = #state{started = false}) ->
{call, ?MODULE, start, [params(), ids(), initial_open_mode(), initial_open_mode()]};
command(_S = #state{started = true, tree_id = TreeId,
params = {_Segments, _Width, MemLevels},
snap1 = Snap1, snap2 = Snap2}) ->
%% Calculate weighting values for different groups of commands.
%% Weights to increase snap frequency once update snapshot has begun
SnapshotsDefined = Snap1 /= undefined orelse Snap2 /= undefined,
SnapshotFrequency = case SnapshotsDefined of true -> 100; _ -> 1 end,
SnapshotWeight = 10 * SnapshotFrequency,
MoreAfterSnapshots = 100 * SnapshotFrequency,
FewerAfterSnapshots = 101 - SnapshotFrequency,
Infrequently = 1,
frequency(
%% Update snapshots/trees. If memory is enabled must test with update_tree
%% If not, can use the method used by kv/yz_index_hashtree and separate
%% the two steps, dumping the result from update_perform.
[{SnapshotWeight, {call, ?MODULE, update_tree, [t1, s1]}} || Snap1 == undefined] ++
[{SnapshotWeight, {call, ?MODULE, update_snapshot, [t1, s1]}} || Snap1 == undefined, MemLevels == 0] ++
[{SnapshotWeight, {call, ?MODULE, update_perform, [t1]}} || Snap1 == created, MemLevels == 0] ++
[{SnapshotWeight, {call, ?MODULE, set_next_rebuild, [t1]}} || Snap1 == updated] ++
[{SnapshotWeight, {call, ?MODULE, update_tree, [t2, s2]}} || Snap2 == undefined] ++
[{SnapshotWeight, {call, ?MODULE, update_snapshot, [t2, s2]}} || Snap2 == undefined, MemLevels == 0] ++
[{SnapshotWeight, {call, ?MODULE, update_perform, [t2]}} || Snap2 == created, MemLevels == 0] ++
[{SnapshotWeight, {call, ?MODULE, set_next_rebuild, [t2]}} || Snap2 == updated] ++
%% Can only run compares when both snapshots are updated. Boost the frequency
%% when both are snapshotted (note this is guarded by both snapshot being updatable)
[{MoreAfterSnapshots, {call, ?MODULE, local_compare, []}} || Snap1 == updated, Snap2 == updated] ++
[{MoreAfterSnapshots, {call, ?MODULE, local_compare1, []}} || Snap1 == updated, Snap2 == updated] ++
%% Modify the data in the two tables
[{FewerAfterSnapshots, {call, ?MODULE, write, [t1, objects()]}}] ++
[{FewerAfterSnapshots, {call, ?MODULE, write, [t2, objects()]}}] ++
[{FewerAfterSnapshots, {call, ?MODULE, write_both, [objects()]}}] ++
[{FewerAfterSnapshots, {call, ?MODULE, delete, [t1, key()]}}] ++
[{FewerAfterSnapshots, {call, ?MODULE, delete, [t2, key()]}}] ++
[{FewerAfterSnapshots, {call, ?MODULE, delete_both, [key()]}}] ++
%% Mess around with reopening, crashing and rehashing.
[{Infrequently, {call, ?MODULE, reopen_tree, [t1, TreeId]}}] ++
[{Infrequently, {call, ?MODULE, reopen_tree, [t2, TreeId]}}] ++
[{Infrequently, {call, ?MODULE, unsafe_close, [t1, TreeId]}}] ++
[{Infrequently, {call, ?MODULE, unsafe_close, [t2, TreeId]}}] ++
[{Infrequently, {call, ?MODULE, rehash_tree, [t1]}}] ++
[{Infrequently, {call, ?MODULE, rehash_tree, [t2]}}]
).
%%
%% Start the model up - initialize two trees, either mark them as open and empty
%% or request they are checked. Add additional unused hashtrees with ExtraIds
%% to make sure the iterator code is fully exercised.
%%
%% Store the hashtree records in the process dictionary under keys 't1' and 't2'.
%%
start(Params, [TreeId | ExtraIds], Tree1OpenOrEmpty, Tree2OpenOrEmpty) ->
{Segments, Width, MemLevels} = Params,
%% Return now so we can store symbolic value in procdict in next_state call
T1A = create_and_open_hashtree(TreeId, Segments, Width, MemLevels,
Tree1OpenOrEmpty, ExtraIds),
put(t1, T1A),
T2A = create_and_open_hashtree(TreeId, Segments, Width, MemLevels,
Tree2OpenOrEmpty, ExtraIds),
put(t2, T2A),
%% Make sure ETS is pristine
delete_ets(),
create_ets(),
%% Return treeid for future hashtree recreation
TreeId.
%% Create a new hashtree given TreeId, Segments, Width, and MemLevels.
%% Add some extra trees to stress iteration beyond keyspaces.
create_and_open_hashtree(TreeId, Segments, Width, MemLevels, OpenOrEmpty, ExtraIds) ->
Tree0 = hashtree:new(TreeId, [{segments, Segments},
{width, Width},
{mem_levels, MemLevels}]),
Tree = case OpenOrEmpty of
mark_empty -> hashtree:mark_open_empty(TreeId, Tree0);
_ -> hashtree:mark_open_and_check(TreeId, Tree0)
end,
add_extra_hashtrees(ExtraIds, Tree),
Tree.
%% Add some extra tree ids and update the metadata to give
%% the iterator code a workout on non-matching ids.
add_extra_hashtrees(ExtraIds, T) ->
lists:foldl(fun(ExtraId, Tacc) ->
Tacc2 = hashtree:new(ExtraId, Tacc),
Tacc3 = hashtree:mark_open_empty(ExtraId, Tacc2),
Tacc4 = hashtree:insert(<<"keyfromextratree">>,
<<"valuefromextratree">>, Tacc3),
hashtree:flush_buffer(Tacc4)
end, T, ExtraIds).
%% Wrap the hashtree:update_tree call. This works with memory levels
%% enabled. Copy the model tree to a snapshot table.
update_tree(T, S) ->
%% Snapshot the hashtree and store both states
HT = hashtree:update_tree(get(T)),
put(T, HT),
%% Copy the current ets table to the snapshot table.
copy_tree(T, S),
ok.
%% Wrap the hashtree:update_snapshot call and set the next rebuild type to full
%% to match the behavior needed by the *_index_hashtree modules that consume
%% hashtree. Otherwise if the state is treated as incremental on a safe reopen
%% then the tree does not rebuild correctly.
%%
%% Store the snapshot state in the process dictionary under {snapstate, t1} or
%% {snapstate, t2} for use by update_perform.
%%
%% N.B. This does not work with memory levels enabled as update_perform uses the
%% snapshot state which is dumped.
update_snapshot(T, S) ->
%% Snapshot the hashtree and store both states
{SS, HT} = hashtree:update_snapshot(get(T)),
%% Mark as a full rebuild until the update perfom step happens.
HT2 = hashtree:set_next_rebuild(HT, full),
put(T, HT2),
put({snapstate, T}, SS),
%% Copy the current ets table to the snapshot table.
copy_tree(T, S),
ok.
%%
%% Wrap the hashtree:update_perform call and erase the snapshot hashtree state.
%% Should only happen if a snapshot state exists.
%%
update_perform(T) ->
_ = hashtree:update_perform(get({snapstate, T})),
erase({snapstate, T}),
ok.
%% Set the next rebuild state. Should only happen once update perform has
%% completed.
%%
set_next_rebuild(T) ->
put(T, hashtree:set_next_rebuild(get(T), incremental)),
ok.
%% Wrap hashtree:insert to (over)write key with a new hash to a single
%% table and insert into the model tree.
%%
write(T, Objects) ->
lists:foreach(fun({Key, Hash}) ->
put(T, hashtree:insert(Key, Hash, get(T))),
ets:insert(T, {Key, Hash})
end, Objects),
ok.
%% Call the other wrapper to write to both trees.
%%
write_both(Objects) ->
write(t1, Objects),
write(t2, Objects),
ok.
%% Wrap hashtree:delete to remove a key from a tree (and remove
%% from the model tree).
%%
%% Keys do not need to be present to remove them from the AAE tree.
delete(T, Key) ->
put(T, hashtree:delete(Key, get(T))),
ets:delete(T, Key),
ok.
%% Call the other wrapper to remove the key from both trees.
delete_both(Key) ->
delete(t1, Key),
delete(t2, Key),
ok.
%% Trigger a rehash of the whole interior tree. There is a potential
%% race condition with update_snapshot that is avoided by this model,
%% however if called during update_perform executing it will silently
%% break multi_select_segment.
rehash_tree(T) ->
put(T, hashtree:rehash_tree(get(T))),
ok.
%% Flush, update tree, mark clean close, close and reopen the AAE tree.
reopen_tree(T, TreeId) ->
HT = hashtree:flush_buffer(get(T)),
{Segments, Width, MemLevels} = {hashtree:segments(HT), hashtree:width(HT),
hashtree:mem_levels(HT)},
Path = hashtree:path(HT),
UpdatedHT = hashtree:update_tree(HT),
CleanClosedHT = hashtree:mark_clean_close(TreeId, UpdatedHT),
hashtree:close(CleanClosedHT),
T1 = hashtree:new(TreeId, [{segments, Segments},
{width, Width},
{mem_levels, MemLevels},
{segment_path, Path}]),
put(T, hashtree:mark_open_and_check(TreeId, T1)),
ok.
%% Simulate an unsafe close. This flushes the write buffer so that the
%% model has a chance of knowing what the correct repairs should be.
unsafe_close(T, TreeId) ->
HT = get(T),
{Segments, Width, MemLevels} = {hashtree:segments(HT), hashtree:width(HT),
hashtree:mem_levels(HT)},
Path = hashtree:path(HT),
%% Although this is an unsafe close, it's unsafe in metadata/building
%% buckets. Rather than model the queue behavior, flush those and just
%% check the buckets are correctly recomputed next compare.
hashtree:flush_buffer(HT),
hashtree:fake_close(HT),
T0 = hashtree:new(TreeId, [{segments, Segments},
{width, Width},
{mem_levels, MemLevels},
{segment_path, Path}]),
put(T, hashtree:mark_open_and_check(TreeId, T0)),
ok.
%% Use the internal eunit local comparison to check for differences between the
%% two trees.
local_compare() ->
hashtree:local_compare(get(t1), get(t2)).
local_compare1() ->
hashtree:local_compare1(get(t1), get(t2)).
%% Preconditions to guard against impossible situations during shrinking.
precondition(#state{started = false}, {call, _, F, _A}) ->
F == start;
%% Make sure update_tree can only be called with no memory levels
precondition(#state{params = {_, _, MemLevels}, snap1 = Snap1}, {call, _, update_tree, [t1, _]}) ->
Snap1 == undefined andalso MemLevels == 0;
precondition(#state{params = {_, _, MemLevels}, snap2 = Snap2}, {call, _, update_tree, [t2, _]}) ->
Snap2 == undefined andalso MemLevels == 0;
%% Make sure only one snapshot, tree update or rebuild is happening in sequence
precondition(#state{snap1 = Snap1}, {call, _, update_snapshot, [t1, _]}) ->
Snap1 == undefined;
precondition(#state{snap1 = Snap1}, {call, _, update_perform, [t1]}) ->
Snap1 == created;
precondition(#state{snap1 = Snap1}, {call, _, set_next_rebuild, [t1]}) ->
Snap1 == updated;
precondition(#state{snap2 = Snap2}, {call, _, update_snapshot, [t2, _]}) ->
Snap2 == undefined;
precondition(#state{snap2 = Snap2}, {call, _, update_perform, [t2]}) ->
Snap2 == created;
precondition(#state{snap2 = Snap2}, {call, _, set_next_rebuild, [t2]}) ->
Snap2 == updated;
%% Only compare once the tree has been updated for the snapshot
precondition(#state{snap1 = Snap1, snap2 = Snap2}, {call, _, local_compare, []}) ->
Snap1 == updated andalso Snap2 == updated;
precondition(_S, _C) ->
true.
%% Check the post conditions. After the initial create,
%% make sure the next rebuilds are set correctly
postcondition(_S,{call,_,start, [_Params, _ExtraIds, T1Mark, T2Mark]},_R) ->
NextRebuildT1 = hashtree:next_rebuild(get(t1)),
NextRebuildT2 = hashtree:next_rebuild(get(t2)),
%% TODO: Convert this to a conjunction
T1Expect = case T1Mark of
mark_empty -> incremental;
_ -> full
end,
T2Expect = case T2Mark of
mark_empty -> incremental;
_ -> full
end,
eqc_statem:conj([eq({t1, T1Expect}, {t1, NextRebuildT1}),
eq({t2, T2Expect}, {t2, NextRebuildT2})]);
%% After a comparison, check against the results against
%% the ETS table containing the *snapshot* copies.
postcondition(_S,{call, _, Function, _}, Result0) when Function == local_compare;
Function == local_compare1 ->
Result = lists:sort(Result0),
T1Top = hashtree:top_hash(get(t1)),
T2Top = hashtree:top_hash(get(t2)),
Expect = expect_compare(),
case Expect of
[] ->
eqc_statem:conj([eq({result, Expect}, {result, Result}),
eq({top, T1Top}, {top, T2Top})]);
_ ->
eq(Expect, Result)
end;
postcondition(_S,{call,_,_,_},_R) ->
true.
next_state(S,R,{call, _, start, [Params,_ExtraIds,_,_]}) ->
%% Start returns the TreeId used, stick in the state
%% as no hashtree:tree_id call yet.
S#state{started = true, tree_id = R, params = Params};
next_state(S,_V,{call, _, update_tree, [t1, _]}) ->
S#state{snap1 = updated};
next_state(S,_V,{call, _, update_tree, [t2, _]}) ->
S#state{snap2 = updated};
next_state(S,_V,{call, _, update_snapshot, [t1, _]}) ->
S#state{snap1 = created};
next_state(S,_V,{call, _, update_snapshot, [t2, _]}) ->
S#state{snap2 = created};
next_state(S,_V,{call, _, update_perform, [t1]}) ->
S#state{snap1 = updated};
next_state(S,_V,{call, _, update_perform, [t2]}) ->
S#state{snap2 = updated};
next_state(S,_V,{call, _, set_next_rebuild, [t1]}) ->
S#state{snap1 = undefined};
next_state(S,_V,{call, _, set_next_rebuild, [t2]}) ->
S#state{snap2 = undefined};
next_state(S,_V,{call, _, write, [_T, Objs]}) ->
S#state{num_updates = S#state.num_updates + length(Objs)};
next_state(S,_R,{call, _, write_both, [Objs]}) ->
S#state{num_updates = S#state.num_updates + 2*length(Objs)};
next_state(S,_V,{call, _, delete, _}) ->
S#state{num_updates = S#state.num_updates + 1};
next_state(S,_R,{call, _, delete_both, _}) ->
S#state{num_updates = S#state.num_updates + 2};
next_state(S,_R,{call, _, reopen_tree, [t1, _]}) ->
S#state{snap1 = undefined};
next_state(S,_R,{call, _, reopen_tree, [t2, _]}) ->
S#state{snap2 = undefined};
next_state(S,_R,{call, _, unsafe_close, [t1, _]}) ->
S#state{snap1 = undefined};
next_state(S,_R,{call, _, unsafe_close, [t2, _]}) ->
S#state{snap2 = undefined};
next_state(S,_R,{call, _, rehash_tree, [t1]}) ->
S#state{snap1 = undefined};
next_state(S,_R,{call, _, rehash_tree, [t2]}) ->
S#state{snap2 = undefined};
next_state(S,_R,{call, _, local_compare, []}) ->
S;
next_state(S,_R,{call, _, local_compare1, []}) ->
S.
%% Property to generate a series of commands against the
%% hashtrees and afterwards force them to a comparable state.
%%
prop_correct() ->
?FORALL(Cmds,commands(?MODULE, #state{}),
aggregate(command_names(Cmds),
begin
%%io:format(user, "Starting in ~p\n", [self()]),
put(t1, undefined),
put(t2, undefined),
catch ets:delete(t1),
catch ets:delete(t2),
{_H,S,Res0} = HSR = run_commands(?MODULE,Cmds),
{Segments, Width, MemLevels} =
case S#state.params of
undefined ->
%% Possible if Cmds just init
%% set segments to 1 to avoid div by zero
{1, undefined, undefined};
Params ->
Params
end,
%% If ok after steps, do a final compare to increase
%% the number of tests.
Res = case (S#state.started andalso Res0 == ok) of
true ->
final_compare(S);
_ ->
Res0
end,
%% Clean up after the test
case Res of
ok -> % if all went well, remove leveldb files
catch cleanup_hashtree(get(t1)),
catch cleanup_hashtree(get(t2));
_ -> % otherwise, leave them around for inspection
ok
end,
NumUpdates = S#state.num_updates,
pretty_commands(?MODULE, Cmds, HSR,
?WHENFAIL(
begin
{Segments, Width, MemLevels} = S#state.params,
eqc:format("Segments ~p\nWidth ~p\nMemLevels ~p\n",
[Segments, Width, MemLevels]),
eqc:format("=== t1 ===\n~p\n\n", [ets:tab2list(t1)]),
eqc:format("=== s1 ===\n~p\n\n", [safe_tab2list(s1)]),
eqc:format("=== t2 ===\n~p\n\n", [ets:tab2list(t2)]),
eqc:format("=== s2 ===\n~p\n\n", [safe_tab2list(s2)]),
eqc:format("=== ht1 ===\n~w\n~p\n\n", [get(t1), catch dump(get(t1))]),
eqc:format("=== ht2 ===\n~w\n~p\n\n", [get(t2), catch dump(get(t2))])
end,
measure(num_updates, NumUpdates,
measure(segment_fill_ratio, NumUpdates / (2 * Segments), % Est of avg fill rate per segment
collect(with_title(mem_levels), MemLevels,
collect(with_title(segments), Segments,
collect(with_title(width), Width,
equals(ok, Res))))))))
end)).
cleanup_hashtree(HT) ->
Path = hashtree:path(HT),
HT2 = hashtree:close(HT),
hashtree:destroy(HT2),
ok = file:del_dir(Path).
dump(Tree) ->
Fun = fun(Entries) ->
Entries
end,
{SnapTree, _Tree2} = hashtree:update_snapshot(Tree),
hashtree:multi_select_segment(SnapTree, ['*','*'], Fun).
%% Force a final comparison to make sure we get a comparison test
%% for every case we try, as that is after all the point of the module.
final_compare(S) ->
maybe_update_tree(S#state.snap1, t1, s1),
maybe_update_tree(S#state.snap2, t2, s2),
Expect = expect_compare(),
case lists:sort(hashtree:local_compare(get(t1), get(t2))) of
Expect ->
ok;
Result ->
{Expect, '/=', Result}
end.
maybe_update_tree(_SnapState = undefined, Tree, Snap) ->
update_tree(Tree, Snap); % snapshot and calculate
maybe_update_tree(_SnapState = created, Tree, _Snap) ->
update_perform(Tree);
maybe_update_tree(_SnapState = updated, _Tree, _Snap) ->
ok. % Do nothing - waiting for setting rebuild status
expect_compare() ->
Snap1 = orddict:from_list(ets:tab2list(s1)),
Snap2 = orddict:from_list(ets:tab2list(s2)),
SnapDeltas = riak_core_util:orddict_delta(Snap1, Snap2),
lists:sort(
[{missing, K} || {K, {'$none', _}} <- SnapDeltas] ++
[{remote_missing, K} || {K, {_, '$none'}} <- SnapDeltas] ++
[{different, K} || {K, {V1, V2}} <- SnapDeltas, V1 /= '$none', V2 /= '$none']). %% UNDO SnapDeltas this line
%%
%% Functions for handling the model data stored in ETS tables
%%
ets_tables() -> [t1, s1, t2, s2].
delete_ets() ->
[catch ets:delete(X) || X <- ets_tables()],
ok.
create_ets() ->
[ets_new(X) || X <- ets_tables()],
ok.
%% Create ETS table with public options
ets_new(T) ->
ets:new(T, [named_table, public, set]).
%% Convert a table to a list falling back to undefined for printing
%% failure state.
safe_tab2list(Id) ->
try
ets:tab2list(Id)
catch
_:_ ->
undefined
end.
%% Copy the model data from the live to snapshot table for
%% update_tree/update_snapshot.
copy_tree(T, S) ->
catch ets:delete(S),
ets_new(S),
ets:insert(S, ets:tab2list(T)),
ok.
-endif.
-endif. | eqc/hashtree_eqc.erl | 0.580233 | 0.47993 | hashtree_eqc.erl | starcoder |
%%--------------------------------------------------------------------
%% Copyright (c) 2020 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_vm_SUITE).
-compile(export_all).
-compile(nowarn_export_all).
-include_lib("eunit/include/eunit.hrl").
all() -> emqx_ct:all(?MODULE).
t_load(_Config) ->
?assertMatch([{load1, _}, {load5, _}, {load15, _}], emqx_vm:loads()).
t_systeminfo(_Config) ->
?assertEqual(emqx_vm:system_info_keys(),
[Key || {Key, _} <- emqx_vm:get_system_info()]),
?assertEqual(undefined, emqx_vm:get_system_info(undefined)).
t_mem_info(_Config) ->
application:ensure_all_started(os_mon),
MemInfo = emqx_vm:mem_info(),
[{total_memory, _}, {used_memory, _}]= MemInfo,
application:stop(os_mon).
t_process_info(_Config) ->
ProcessInfo = emqx_vm:get_process_info(),
?assertEqual(emqx_vm:process_info_keys(), [K || {K, _V}<- ProcessInfo]).
t_process_gc(_Config) ->
GcInfo = emqx_vm:get_process_gc_info(),
?assertEqual(emqx_vm:process_gc_info_keys(), [K || {K, _V}<- GcInfo]).
t_get_ets_list(_Config) ->
ets:new(test, [named_table]),
Ets = emqx_vm:get_ets_list(),
true = lists:member(test, Ets).
t_get_ets_info(_Config) ->
ets:new(test, [named_table]),
[] = emqx_vm:get_ets_info(test1),
EtsInfo = emqx_vm:get_ets_info(test),
test = proplists:get_value(name, EtsInfo),
Tid = proplists:get_value(id, EtsInfo),
EtsInfos = emqx_vm:get_ets_info(),
?assertEqual(true, lists:foldl(fun(Info, Acc) ->
case proplists:get_value(id, Info) of
Tid -> true;
_ -> Acc
end
end, false, EtsInfos)).
t_get_ets_object(_Config) ->
ets:new(test, [named_table]),
[] = emqx_vm:get_ets_object(test),
ets:insert(test, {k, v}),
[{k, v}] = emqx_vm:get_ets_object(test).
t_get_port_types(_Config) ->
emqx_vm:get_port_types().
t_get_port_info(_Config) ->
emqx_vm:get_port_info(),
spawn(fun easy_server/0),
ct:sleep(100),
{ok, Sock} = gen_tcp:connect("localhost", 5678, [binary, {packet, 0}]),
emqx_vm:get_port_info(),
ok = gen_tcp:close(Sock),
[Port | _] = erlang:ports().
t_transform_port(_Config) ->
[Port | _] = erlang:ports(),
?assertEqual(Port, emqx_vm:transform_port(Port)),
<<131, 102, 100, NameLen:2/unit:8, _Name:NameLen/binary, N:4/unit:8, _Vsn:8>> = erlang:term_to_binary(Port),
?assertEqual(Port, emqx_vm:transform_port("#Port<0." ++ integer_to_list(N) ++ ">")).
t_scheduler_usage(_Config) ->
emqx_vm:scheduler_usage(5000).
t_get_memory(_Config) ->
emqx_vm:get_memory().
t_schedulers(_Config) ->
emqx_vm:schedulers().
t_get_process_group_leader_info(_Config) ->
emqx_vm:get_process_group_leader_info(self()).
t_get_process_limit(_Config) ->
emqx_vm:get_process_limit().
t_cpu_util(_Config) ->
_Cpu = emqx_vm:cpu_util().
easy_server() ->
{ok, LSock} = gen_tcp:listen(5678, [binary, {packet, 0}, {active, false}]),
{ok, Sock} = gen_tcp:accept(LSock),
ok = do_recv(Sock),
ok = gen_tcp:close(Sock),
ok = gen_tcp:close(LSock).
do_recv(Sock) ->
case gen_tcp:recv(Sock, 0) of
{ok, _} ->
do_recv(Sock);
{error, closed} ->
ok
end. | test/emqx_vm_SUITE.erl | 0.53777 | 0.435841 | emqx_vm_SUITE.erl | starcoder |
-module(aoc2017_day07).
-include_lib("eunit/include/eunit.hrl").
-behavior(aoc_puzzle).
-export([parse/1, solve/1, info/0]).
-include("aoc_puzzle.hrl").
-spec info() -> aoc_puzzle().
info() ->
#aoc_puzzle{module = ?MODULE,
year = 2017,
day = 7,
name = "Recursive Circus",
expected = {xegshds, 299},
use_one_solver_fun = true,
has_input_file = true}.
-type input_type() :: digraph:graph().
-type result_type() :: {atom(), integer()}.
-spec parse(Binary :: binary()) -> input_type().
parse(Binary) ->
Lines = string:tokens(binary_to_list(Binary), "\n\r"),
List =
lists:map(fun(Line) ->
[Node, Weight | Children] = string:tokens(Line, " ()->,"),
{list_to_atom(Node),
list_to_integer(Weight),
lists:map(fun list_to_atom/1, Children)}
end,
Lines),
make_digraph(List).
-spec solve(Input :: input_type()) -> result_type().
solve(Graph) ->
{yes, Root} = digraph_utils:arborescence_root(Graph),
{unbalanced, Unbalanced} = find_unbalanced_node(Graph, Root),
digraph:delete(Graph),
{Root, Unbalanced}.
%% Find the unbalanced node. Returns the total subtree weight, or
%% {unbalanced, CorrectWeight} if the subtree is unbalanced.
-spec find_unbalanced_node(digraph:graph(), Node :: atom()) ->
TotalWeight :: integer() | {unbalanced, CorrectWeight :: integer()}.
find_unbalanced_node(G, Node) ->
SubTreeWeights =
lists:foldl(fun (_Child, {unbalanced, _} = Acc) ->
Acc;
(Child, Acc) ->
case find_unbalanced_node(G, Child) of
{unbalanced, _} = Result ->
Result;
Weight ->
[{Child, Weight} | Acc]
end
end,
[],
digraph:out_neighbours(G, Node)),
{_, Weight} = digraph:vertex(G, Node),
case SubTreeWeights of
[] ->
Weight; % Leaf node
{unbalanced, _} = Result ->
Result; % The final result has been found, pass it on.
_ ->
%% Check if the subtrees are balanced.
{Children, Weights} = lists:unzip(SubTreeWeights),
case deviant(Weights) of
undef ->
Weight + lists:sum(Weights);
{Deviant, NonDeviant} ->
RevMap =
maps:from_list(
lists:zip(Weights, Children)),
{_, DevianWeight} = digraph:vertex(G, maps:get(Deviant, RevMap)),
{unbalanced, DevianWeight - (Deviant - NonDeviant)}
end
end.
%% Find the one value which deviates from the others in a list.
%% Build a frequency map and select the value which occurs only
%% once. Return 'undef' if the list only contains two values or less,
%% or if there are more than two distinct values.
-spec deviant([integer()]) -> undef | {Deviant :: integer(), NonDeviant :: integer()}.
deviant(List) ->
case maps:to_list(
lists:foldl(fun(X, Acc) -> maps:update_with(X, fun(Old) -> Old + 1 end, 1, Acc) end,
#{},
List))
of
[{X, 1}, {Y, M}] when M >= 2 ->
{X, Y};
[{Y, M}, {X, 1}] when M >= 2 ->
{X, Y};
_ ->
undef
end.
%% Construct a digraph from the tuples in the input.
make_digraph(List) ->
Graph = digraph:new([acyclic]),
lists:foreach(fun({Node, _Weight, Children}) ->
Parent = digraph:add_vertex(Graph, Node),
lists:foreach(fun(Child) ->
digraph:add_edge(Graph,
Parent,
digraph:add_vertex(Graph, Child))
end,
Children)
end,
List),
%% Annotate each vertex with the weight
lists:foreach(fun({Node, Weight, _}) -> digraph:add_vertex(Graph, Node, Weight) end,
List),
Graph. | src/2017/aoc2017_day07.erl | 0.574156 | 0.454654 | aoc2017_day07.erl | starcoder |
%% @author <NAME> <<EMAIL>>
%% @copyright 2010 <NAME>
%% @doc 'format_price' filter, show a price with two digits. Accepts a price in cents.
%% Copyright 2010 <NAME>
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(filter_format_price).
-export([format_price/4, format_price/3, format_price/2]).
insert_thousands_separator(_Sep, Output, []) ->
Output;
insert_thousands_separator(Sep, Output, Input) when is_list(Input) ->
case length(Input) > 3 of
true ->
case length(Input) rem 3 of
0 ->
[Head | Input2] = Input,
insert_thousands_separator(Sep, lists:append(Output, [Head]), Input2);
1 ->
[Head | Input2] = Input,
Head1 = lists:append([Head], [Sep]),
insert_thousands_separator(Sep, lists:append(Output, Head1), Input2);
2 ->
[Head | Input2] = Input,
insert_thousands_separator(Sep, lists:append(Output, [Head]), Input2)
end;
false -> lists:append(Output, Input)
end.
insert_thousands_separator(Sep, Input) when is_integer(Input) ->
insert_thousands_separator(Sep, [], integer_to_list(Input)).
format_price(Input, DSep, TSep, _Context) when is_integer(Input) ->
case Input rem 100 of
0 ->
[insert_thousands_separator(TSep, Input div 100), DSep, $0, $0 ];
Cents when Cents < 10 ->
[insert_thousands_separator(TSep, Input div 100), DSep, $0, Cents + $0 ];
Cents ->
[insert_thousands_separator(TSep, Input div 100), DSep, integer_to_list(Cents) ]
end;
format_price(Input, DSep, TSep, Context) when is_float(Input) ->
format_price(round(Input * 100), DSep, TSep, Context);
format_price(Input, DSep, TSep, Context) when is_function(Input, 0) ->
format_price(Input(), DSep, TSep, Context);
format_price(Input, DSep, TSep, Context) when is_function(Input, 1) ->
format_price(Input(Context), DSep, TSep, Context);
format_price(Input, DSep, TSep, Context) when is_list(Input) ->
case string:to_integer(Input) of
{error, _} -> Input;
{N, _Rest} -> format_price(N, DSep, TSep, Context)
end;
format_price(Input, DSep, TSep, Context) when is_binary(Input) ->
case string:to_integer(binary_to_list(Input)) of
{error, _} -> Input;
{N, _Rest} -> format_price(N, DSep, TSep, Context)
end;
format_price(undefined, _Dsep, _Tsep, _Context) ->
"-".
format_price(Input, Args, Context) ->
case length(Args) of
0 -> format_price(Input, $., $,, Context);
1 -> format_price(Input, Args, $,, Context);
2 -> [DSep, TSep] = Args,
format_price(Input, DSep, TSep, Context);
_ -> format_price(Input, $., $,, Context)
end.
format_price(Input, Context) ->
format_price(Input, $., $,, Context). | modules/mod_base/filters/filter_format_price.erl | 0.557845 | 0.53206 | filter_format_price.erl | starcoder |
%%======================================================================
%%
%% LeoProject - Savanna Commons
%%
%% Copyright (c) 2014-2017 Rakuten, Inc.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%%======================================================================
-module(svc_metrics_histogram).
-author('<NAME>').
-behaviour(svc_operate_behaviour).
-include("savanna_commons.hrl").
-include_lib("folsom/include/folsom.hrl").
-include_lib("eunit/include/eunit.hrl").
-export([handle_to_get_values/1,
handle_to_get_hist_stats/1,
handle_to_update/3,
trim_and_notify/2]).
%%--------------------------------------------------------------------
%% API
%%--------------------------------------------------------------------
%% @doc Retrive values
handle_to_get_values(Hist) ->
get_values_1(Hist#histogram.type, Hist#histogram.sample).
%% @doc Retrive histogram-stats
handle_to_get_hist_stats(Hist) ->
get_current_statistics(Hist).
%% @doc Insert the value
-spec(handle_to_update(?HISTOGRAM_SLIDE |
?HISTOGRAM_UNIFORM |
?HISTOGRAM_EXDEC, #uniform{}, any()) ->
#slide{} | #uniform{} | #exdec{}).
handle_to_update(?HISTOGRAM_SLIDE, Sample, Value) ->
folsom_sample_slide:update(Sample, Value);
handle_to_update(?HISTOGRAM_UNIFORM, Sample, Value) ->
folsom_sample_uniform:update(Sample, Value);
handle_to_update(?HISTOGRAM_EXDEC, Sample, Value) ->
folsom_sample_exdec:update(Sample, Value).
%% @doc Remove oldest values and notify metric with callback-func
-spec(trim_and_notify(#sv_metric_state{}, #sv_result{}) ->
ok | {error, any()}).
trim_and_notify(#sv_metric_state{id = ServerId,
type = SampleType,
notify_to = Callback}, #sv_result{} = Result)->
%% Retrieve the current value, then execute the callback-function
{MetricGroup, Key} = ?sv_schema_and_key(ServerId),
#histogram{type = HistType,
sample = HistSample} = get_value(ServerId),
Samples = get_values_1(HistType, HistSample),
Stats = bear:get_statistics(Samples),
%% Notify a calculated statistics,
%% then clear oldest data
case svc_tbl_metric_group:get(MetricGroup) of
{ok, #sv_metric_group{schema_name = SchemaName}} ->
catch Callback:notify(Result#sv_result{metric_type = ?METRIC_HISTOGRAM,
schema_name = SchemaName,
metric_group_name = MetricGroup,
col_name = Key,
result = {Samples,Stats}
}),
try
ok = trim_1(SampleType, ServerId)
catch
_:Cause ->
error_logger:error_msg("~p,~p,~p,~p~n",
[{module, ?MODULE_STRING},
{function, "trim_and_notify/1"},
{line, ?LINE}, {body, Cause}])
end,
ok;
_ ->
{error, ?ERROR_COULD_NOT_GET_SCHEMA}
end.
%% @private
trim_1(?HISTOGRAM_SLIDE, ServerId) ->
Hist = get_value(ServerId),
Sample = Hist#histogram.sample,
ets:delete_all_objects(Sample#slide.reservoir),
ok;
trim_1(?HISTOGRAM_UNIFORM, ServerId) ->
Hist = get_value(ServerId),
Sample = Hist#histogram.sample,
true = ets:insert(?HISTOGRAM_TABLE,
{ServerId,
Hist#histogram{sample = Sample#uniform{n = 1,
seed = os:timestamp()}}}),
ets:delete_all_objects(Sample#uniform.reservoir),
ok;
trim_1(?HISTOGRAM_EXDEC, ServerId) ->
Hist = get_value(ServerId),
Sample = Hist#histogram.sample,
true = ets:insert(?HISTOGRAM_TABLE,
{ServerId,
Hist#histogram{sample = Sample#exdec{start = 0,
next = 0,
seed = os:timestamp(),
n = 1}}}),
ets:delete_all_objects(Sample#exdec.reservoir),
ok.
%%--------------------------------------------------------------------
%%% INNER FUNCTIONS
%%--------------------------------------------------------------------
%% @private
get_value(ServerId) ->
[{_, Value}] = ets:lookup(?HISTOGRAM_TABLE, ServerId),
Value.
%% @doc Retrieve values
%% @private
get_values_1(?HISTOGRAM_SLIDE, Sample) ->
folsom_sample_slide:get_values(Sample);
get_values_1(?HISTOGRAM_UNIFORM, Sample) ->
folsom_sample_uniform:get_values(Sample);
get_values_1(?HISTOGRAM_EXDEC, Sample) ->
folsom_sample_exdec:get_values(Sample).
%% @doc Retrieve the current statistics
%% @private
get_current_statistics(Hist) ->
Samples = get_values_1(Hist#histogram.type, Hist#histogram.sample),
bear:get_statistics(Samples). | src/svc_metrics_histogram.erl | 0.55917 | 0.422862 | svc_metrics_histogram.erl | starcoder |
%%------------------------------------------------------------------------------
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%%-----------------------------------------------------------------------------
%%
%% @author Infoblox Inc <<EMAIL>>
%% @copyright 2013 Infoblox Inc
%% @doc Community detection based on:
%%
%% Ragahavan, Albert, Kumara. Near linear time algorithm to
%% detect community structures in large-scale networks.
%% PHYSICAL REVIEW E 76, 036106 (2007)
%% @end
-module(part_labelprop).
-export([graph/2,
find_communities/1]).
-include("tap_logger.hrl").
% Make a graph from a digraph suitable for this module to process
% Return {InternalGraphStructure, CleanupFunction}
graph(Vertices, Edges) ->
{new_digraph(Vertices, Edges),
fun(G) -> digraph:delete(G) end}.
% Returns:
% {
% [{Community, [Node]}], % maps community to nodes in the community
% {
% [Node], % vertices (i.e., endpoints) in the graph
% [{Node1, Node2}] % interfactions (i.e., edges) between Nodes
% {,
% {
% [Community], % communities
% [{Community1, Community2}] % interactions between communities
% }
% }
find_communities(G)->
?DEBUG("Starting NCI Calculation, ~B vertices, ~B edges",
[digraph:no_vertices(G), digraph:no_edges(G)]),
?LOGDURATION(prop_labels(G)),
{communities(G), tap_graph(G), tap_community_graph(G)}.
%%
%% WARNING: Recursive function
%%
%% prop_labels(G) function is an internal function called by the
%% externally callable compute(G) function.
%%
%% This function implements exactly the label propagation algorithm detailed in:
%%
%% Ragahavan, Albert, Kumara. Near linear time algorithm to
%% detect community structures in large-scale networks.
%% PHYSICAL REVIEW E 76, 036106 (2007)
%%
%% Input: digraph represpentation of a graph in which every vertex has a
%% unique label
%%
%% Output: digraph represpentation of a graph in which every vertex has a
%% label representating community membership.
%%
%% NOTES: This is a recursive function which has stop condition that
%% no labels where changed on the preveious iteration.
%% As detailed in the paper above, infinite oscillations are
%% avoided by uniformaly randomly choosing ties for labelling,
%% and randomly choosing vertex processing order on every iteration.
%%
prop_labels(G)->
%% This first section of code randomizes the vertex procesing
%% order for each iteration.
%% A list of vertices is extracted from the graph using
%% digraph:vertices(...).
%% The list of vertices shuffled randomly.
random:seed(),
V = [Y || {_, Y} <- lists:sort([{random:uniform(), X}
|| X <- digraph:vertices(G)])],
%% The next section of the code uses the lists:foldl(...) function
%% in the stdlib of the Erlang/OTP. foldl allows us to pass a
%% processing function, a start condition/accumulator, and a list.
%% The processing function is applied to each member (left to right)
%% of the list and results are accumulated in the accumlator and
%% returned at the end of the iteraion.
%% Processing function is applied to every vertex is
%% fun(Vertex,Acc)->
%% {StopCount,GoCount} = Acc, % <-- this line EXTRACTS the
%% % current StopCount and GoCount
%% % from Acc
%% Result = label_vertex(G,Vertex),
%% case Result of
%% go ->
%% {StopCount,GoCount+1};
%% stop ->
%% {StopCount+1,GoCount}
%% end
%%
%% This can be read as:
%% attempt to label every vertex (see the label_vertex function)
%% if the vertex got relabeled, Result will be "go".
%% if the label did not get relabeled, Result will be "stop"
%% depeding on Result, increment either the GoCount or
%% the StopCount
%% The accumulated {StopCount,GoCount} results will returned to
%% the caller of foldl() after all the vertices have been processed
%%
%% The initial conditions of the accumulator is passed to foldl(...)
%% as {0,0,G} where G is a hangle to the digraph structure. Passing G
%% into the accumulator is not necessary, we are doing to support the
%% stylistic G2 single-assignment notation later to aid in clarity.
%%
%% The list to run foldl(...) on is passes as V which by this point
%% in the code is a randomized list of vertices of the Erlang graph
%% passed into prop_labels(...)
%%
RunCond = lists:foldl(fun(Vertex, Acc)->
{StopCount, GoCount, G2} = Acc,
Result = label_vertex(G, Vertex),
case Result of
go ->
{StopCount, GoCount + 1, G2};
stop ->
{StopCount + 1, GoCount, G2}
end
end,{0, 0, G}, V),
{_NewStopCount, NewGoCount, G3} = RunCond,
?DEBUG("prop_labels: ~p~n", [RunCond]),
%% At this point in the code,
%% NewStopCount represents how many vertices were NOT relabeled
%% in this iternation and
%% NewGoCount represents how many vertices were relabled in
%% this iteration
%% The following conditional ensures that we do another iteration if
%% at least one vertex was relabeled.
%%
%% The stop condition, as presented in the paper above, is that
%% EVERY vertex is labeled identically to a majority
%% of its neighbours.
%%
%% The stop condition is represente as NewGoCount <= 0 == false
%% and we then return the handle to the newly labeled graph
%% whose handle is G3.
%%
case NewGoCount > 0 of
true ->
prop_labels(G3);
false ->
G3
end.
label_vertex(G, Vertex)->
{_Vert, Label} = digraph:vertex(G, Vertex),
{NewLabel, _Num} = calc_label(G, Vertex),
case NewLabel =/= Label of
true ->
digraph:add_vertex(G, Vertex, NewLabel),
go;
false ->
stop
end.
calc_label(G, Vertex)->
N = digraph:out_neighbours(G, Vertex),
case N =:= [] of
false ->
NL = [digraph:vertex(G, V) || V <- N],
Dict = dict:new(),
LC = count_labels(Dict, NL),
LLC = dict:to_list(LC),
MaxCount = max_count(LLC),
Candidates = lists:filter(fun({_, Count}) ->
Count == MaxCount end, LLC),
choose_label(Candidates);
true -> {_, Label} = digraph:vertex(G, Vertex),
{Label, 0}
end.
count_labels(Dict, [])->
Dict;
count_labels(Dict, NL)->
[Head|Rest] = NL,
{_, Label} = Head,
NewDict = dict:update_counter(Label, 1, Dict),
count_labels(NewDict, Rest).
max_count(ListLabelCount)->
lists:foldl(fun({_, CCount}, MCount) ->
case CCount > MCount of
true -> CCount;
false -> MCount
end
end, 0, ListLabelCount).
choose_label(Candidates) ->
Num = random:uniform(length(Candidates)),
{Label, _} = lists:nth(Num, Candidates),
{Label, length(Candidates)}.
communities(G) ->
dict:to_list(lists:foldl(
fun(V, D) ->
{V, C} = digraph:vertex(G, V),
dict_append(C, V, D)
end, dict:new(), digraph:vertices(G))).
% Add V to the list that's the value for K in the dict D. This
% is a constant time add, as opposed to dict:append which is O(N).
dict_append(K, V, D) ->
dict:update(K, fun(Old) -> [V | Old] end, [V], D).
% create a graph of connected communities. Each vertex is a community.
% Returns {[Node], [{Node1, Node2}]}
tap_community_graph(G) ->
{EndpointsSet, InteractionsSet} = lists:foldl(
fun(E, {EPs, IAs}) ->
{_, V1, V2, _} = digraph:edge(G, E),
{_, C1} = digraph:vertex(G, V1),
{_, C2} = digraph:vertex(G, V2),
{
sets:add_element(C1, sets:add_element(C2, EPs)),
case C1 == C2 of
true -> IAs;
false -> sets:add_element(vsort(C1,C2), IAs)
end
}
end, {sets:new(), sets:new()}, digraph:edges(G)),
{sets:to_list(EndpointsSet), sets:to_list(InteractionsSet)}.
% create a graph of connected nodes.
% Returns {[Node], [{Node1, Node2}]}
tap_graph(G) ->
{EndpointsSet, InteractionsSet} = lists:foldl(
fun(E, {EPs, IAs}) ->
{_, V1, V2, _} = digraph:edge(G, E),
{
sets:add_element(V1, sets:add_element(V2, EPs)),
case V1 == V2 of
true -> IAs;
false -> sets:add_element(vsort(V1,V2), IAs)
end
}
end, {sets:new(), sets:new()}, digraph:edges(G)),
{sets:to_list(EndpointsSet), sets:to_list(InteractionsSet)}.
vsort(V1, V2) when V1 > V2 ->
{V1, V2};
vsort(V1, V2) ->
{V2, V1}.
new_digraph(Vertices, Edges) ->
G = digraph:new(),
lists:foreach(fun(V)-> digraph:add_vertex(G,V,V) end, Vertices),
lists:foreach(fun({_, V1,V2, _}) -> digraph:add_edge(G, V1,V2) end, Edges),
G. | tapestry/apps/tapestry/src/part_labelprop.erl | 0.609757 | 0.591399 | part_labelprop.erl | starcoder |
-module(tql_compare).
%% API exports
-export([ by/1
, by/2
, by_prop/1
, by_prop/2
, by_prop/3
, concat/1
, reverse/1
]).
-type comparator(T) :: fun ((T, T) -> boolean()).
-type order() :: ascending | descending.
-export_types([ comparator/1
, order/0
]).
%%%---------------------------------------------------------------------
%%% API
%%%---------------------------------------------------------------------
%% @equiv by(F, ascending)
-spec by(fun ((A) -> B)) -> comparator(A) when
A :: term(),
B :: term().
by(F) ->
by(F, ascending).
%% @doc Creates a comparison function (`comparator(A)') compatible with
%% `lists:sort/2', using the extracted term to sort in the given
%% direction.
-spec by(fun ((A) -> B), order()) -> comparator(A) when
A :: term(),
B :: term().
by(Extractor, ascending) ->
fun (X, Y) -> Extractor(X) =< Extractor(Y) end;
by(Extractor, descending) ->
fun (X, Y) -> Extractor(Y) =< Extractor(X) end.
%% @doc Reverses the order in which elements are sorted by a comparator.
-spec reverse(comparator(T)) -> comparator(T) when T :: term().
reverse(Comparator) ->
fun (X, Y) -> Comparator(Y, X) end.
%% @equiv by_prop(Key, ascending)
-spec by_prop(Key :: term()) -> comparator(map()).
by_prop(Key) ->
by_prop(Key, ascending).
%% @doc Creates a comparator for maps, sorting by a given property, in
%% the given direction.
-spec by_prop(Key :: term(), order()) -> comparator(map()).
by_prop(Key, Order) ->
by(fun (Map) -> maps:get(Key, Map) end, Order).
%% @doc Creates a comparator for maps, sorting by the given property and
%% using the `Default' as fallback value, to sort in the provided
%% direction.
-spec by_prop(Key, Default, order()) -> comparator(map()) when
Key :: term(),
Default :: term().
by_prop(Key, Default, Order) ->
by(fun (Map) -> maps:get(Key, Map, Default) end, Order).
%% @doc Composes multiple comparators together.
%%
%% Fallthrough happens when 2 items are considered equivalent _according
%% to the comparators_. Specifically, this means when `Compare(A, B) ==
%% Compare(B, A)'.
-spec concat([Comp, ...]) -> Comp when
Comp :: comparator(T :: term()).
concat(Comparators) ->
fun (X, Y) ->
concat_help(X, Y, Comparators)
end.
%%%---------------------------------------------------------------------
%%% Internal functions
%%%---------------------------------------------------------------------
-spec concat_help(X, Y, [Comp, ...]) -> boolean() when
X :: T,
Y :: T,
Comp :: comparator(T),
T :: term().
concat_help(X, Y, [Comp]) ->
Comp(X, Y);
concat_help(X, Y, [Comp | Rest]) ->
OtherDir = Comp(Y, X),
case Comp(X, Y) of
OtherDir -> concat_help(X, Y, Rest);
Res -> Res
end.
%% Local variables:
%% mode: erlang
%% erlang-indent-level: 2
%% indent-tabs-mode: nil
%% fill-column: 72
%% coding: latin-1
%% End: | src/tql_compare.erl | 0.659734 | 0.510802 | tql_compare.erl | starcoder |
-module(carbonara_schema).
-export([
archive_settings_for_metric/1
]).
retention_seconds(I) when is_integer(I) -> I;
retention_seconds({Count, second}) -> Count;
retention_seconds({Count, minute}) -> retention_seconds({60 * Count, second});
retention_seconds({Count, hour}) -> retention_seconds({60 * Count, minute});
retention_seconds({Count, day}) -> retention_seconds({24 * Count, hour});
retention_seconds({Count, year}) -> retention_seconds({365 * Count, day}).
archive_settings_for_storage_schema(Schema) ->
% Schema must be a list of tuples of time units
% Time units may be integer seconds, or 2-tuples of {count, unit}
% where unit is on of [second, minute, hour, day, year]
% Returns a proplist of {metric_duration, bucketing}
% with both in seconds
[{retention_seconds(Duration), retention_seconds(Bucketing)}
|| {Duration, Bucketing} <- Schema].
retention_policies() ->
{ok, Schemas} = application:get_env(carbonara, storage_schemas),
Schemas.
archive_settings_for_metric(MetricName) when is_binary(MetricName) ->
archive_settings_for_metric(MetricName, retention_policies()).
archive_settings_for_metric(MetricName, []) ->
% No applicable policy found!
lager:error("No applicable storage schema for metric ~p", [MetricName]),
{error, not_found};
archive_settings_for_metric(MetricName, [Schema|Schemas]) ->
{_SchemaName, SchemaSettings} = Schema,
{pattern, Pattern} = proplists:lookup(pattern, SchemaSettings),
case pattern_matches_metric(MetricName, Pattern) of
false ->
archive_settings_for_metric(MetricName, Schemas);
true ->
{retentions, Retentions} = proplists:lookup(retentions, SchemaSettings),
{ok, archive_settings_for_storage_schema(Retentions)}
end.
pattern_matches_metric(_Metric, any) -> true;
pattern_matches_metric(Metric, Pattern) when is_binary(Pattern) ->
case re:run(Metric, Pattern) of
nomatch -> false;
_ -> true
end. | src/carbonara_schema.erl | 0.507324 | 0.415077 | carbonara_schema.erl | starcoder |
%% Copyright (C) 1997, Ericsson Telecom AB
%% File: fs_string.erl
%% Author: <NAME>
%%
%% @doc
%% String handling functions.
%% @end
-module (fs_string).
-vsn('1.0').
-export ([stringize/1,
lowercase/1,
uppercase/1,
integer_to_hex/1,
byte_to_hex/1,
hex_to_integer/1,
hex_to_string/1,
string_to_hex/1,
bash_encode/1,
binary_to_hex/1
]).
%%------------------------------------------------------------------------------
%% @doc Change a string so that integer lists appear as quoted strings.
%% For example, [65,66,67] becomes "ABC".
%% Bug: There is no check that the integers are in the printable ranges.
%% <pre>
%%
%% Types:
%% RawStr = string()
%%
%% </pre>
%% @spec stringize (RawStr) -> string()
%% @end
%%------------------------------------------------------------------------------
stringize (RawStr) -> ints_to_strings (RawStr, false).
ints_to_strings (Str, true ) -> Str;
ints_to_strings (Str, false) ->
case int_to_string (Str) of
ok -> ints_to_strings (Str , true );
NewStr -> ints_to_strings (NewStr, false)
end.
int_to_string (Str) ->
{ok, IntStrRE} = regexp:parse ("\\[[0-9,]+\\]"),
case regexp:first_match (Str, IntStrRE) of
{match, Start, Length} ->
IntStrList = string:substr (Str, Start+1, Length-2),
IntegerStrings = string:tokens (IntStrList, ","),
String = lists:map (fun(I)-> {ok,[Char],[]} = io_lib:fread("~d",I), Char end, IntegerStrings),
string_replace (Str, Start, Length, "\"" ++ String ++ "\"");
_ ->
ok
end.
string_replace (Str, Start, Length, Replacement) ->
FirstPart = string:substr (Str, 1, Start-1),
ThirdPart = string:substr (Str, Start+Length),
FirstPart ++ Replacement ++ ThirdPart.
%%------------------------------------------------------------------------------
%% @doc Converts all characters in a string to lowercase.
%% <pre>
%%
%% Types:
%% String = string()
%%
%% </pre>
%% @spec lowercase(String) -> string()
%% @end
%%------------------------------------------------------------------------------
lowercase ([C|S]) -> [lowercase(C)|S];
lowercase (C) when C>=$A, C=<$Z -> C+32;
lowercase (C) -> C.
%%------------------------------------------------------------------------------
%% @doc Converts all characters in a string to uppercase.
%% <pre>
%%
%% Types:
%% String = string()
%%
%% </pre>
%% @spec uppercase(String) -> string()
%% @end
%%------------------------------------------------------------------------------
uppercase ([C|S]) -> [uppercase(C)|S];
uppercase (C) when C>=$a, C=<$z -> C-32;
uppercase (C) -> C.
%%------------------------------------------------------------------------------
%% @doc Converts an integer to a hex-code character list (string).
%% <pre>
%%
%% Types:
%% Integer = integer()
%%
%% </pre>
%% @spec integer_to_hex(Integer) -> string()
%% @end
%%------------------------------------------------------------------------------
integer_to_hex (I) when I < 10 -> integer_to_list (I);
integer_to_hex (I) when I < 16 -> [I - 10 + $a];
integer_to_hex (I) when I >= 16 -> N = I div 16, integer_to_hex (N) ++ integer_to_hex (I rem 16).
%%------------------------------------------------------------------------------
%% @doc Converts a byte to a two-character hex-code (character list) taking care
%% to prepend ("0") if necessary.
%%
%% <pre>
%%
%% Types:
%% Byte = integer()
%% TwoCharacterString = string()
%%
%% </pre>
%%
%% @spec byte_to_hex (Byte) -> TwoCharacterString
%% @end
%%------------------------------------------------------------------------------
byte_to_hex (I) when I >= 0, I =< 255 ->
case length (HexByte = integer_to_hex (I)) of
2 -> HexByte;
1 -> [$0 | HexByte]
end.
%%------------------------------------------------------------------------------
%% @doc Converts a hex-code character list (string) to an integer.
%% <pre>
%%
%% Types:
%% Hex = string()
%%
%% </pre>
%% @spec hex_to_integer(Hex) -> integer()
%% @end
%%------------------------------------------------------------------------------
hex_to_integer (Hex) ->
lists:foldl (fun (E, Acc) -> Acc * 16 + dehex (E) end, 0, Hex).
%%------------------------------------------------------------------------------
%% @doc Converts a character list into a character list that represents the
%% hex codes of each character in the input list. The resulting list will
%% always have twice the length of the input list.
%% Example: string_to_hex("0123") -> "30313233"
%% <pre>
%%
%% Types:
%% String = string()
%%
%% </pre>
%% @spec string_to_hex(String) -> string()
%% @end
%%------------------------------------------------------------------------------
string_to_hex (String) ->
lists:foldr (fun (E, Acc) -> [hexc (E div 16), hexc (E rem 16) | Acc] end, [], String).
%%------------------------------------------------------------------------------
%% @doc Converts an asci hex code character list into the character list
%% represented by the input. The resulting list will always be half the length
%% of the input list. Example: hex_to_string("30313233") -> "0123"
%% <pre>
%%
%% Types:
%% String = string()
%%
%% </pre>
%% @spec hex_to_string(String) -> string()
%% @end
%%------------------------------------------------------------------------------
hex_to_string (Hex) ->
{String, _} = lists:foldr (fun (E, {Acc, nolow}) ->
{Acc, dehex (E)};
(E, {Acc, LO}) ->
{[dehex (E) * 16 + LO | Acc], nolow} end, {[], nolow}, Hex),
String.
%%------------------------------------------------------------------------------
%% @doc Encode a string that can be passed as a bash command-line argument.
%%
%% <pre>
%%
%% Types:
%% String = string()
%%
%% </pre>
%% @spec bash_encode(String) -> string()
%% @end
%%------------------------------------------------------------------------------
bash_encode ([ ]) -> [ ];
bash_encode ([${ | Tail]) -> [$\\, ${ | bash_encode (Tail)];
bash_encode ([$} | Tail]) -> [$\\, $} | bash_encode (Tail)];
bash_encode ([$. | Tail]) -> [$\\, $. | bash_encode (Tail)];
bash_encode ([$' | Tail]) -> [$\\, $' | bash_encode (Tail)];
bash_encode ([ C | Tail]) -> [ C | bash_encode (Tail)].
%%------------------------------------------------------------------------------
%% @doc Encodes a binary as an ASCII hex string.
%%
%% @spec binary_to_hex (Binary) -> string()
%% @end
%%------------------------------------------------------------------------------
binary_to_hex (Binary) when is_binary (Binary) ->
lists:flatten (lists:map (fun byte_to_hex/1, binary_to_list (Binary))).
%%------------------------------------------------------------------------------
%% Private Functions
%%------------------------------------------------------------------------------
dehex (H) when H >= $a, H =< $f -> H - $a + 10;
dehex (H) when H >= $A, H =< $F -> H - $A + 10;
dehex (H) when H >= $0, H =< $9 -> H - $0.
hexc (D) when D > 9 -> $a + D - 10;
hexc (D) -> $0 + D. | lib/fslib/src/fs_string.erl | 0.533397 | 0.401981 | fs_string.erl | starcoder |
-module(dijkstra).
-export([dijkstra/3, shortest_dist/2, shortest_path/2, closed_set/1]).
-record(state, {source, graph, closed, frontier, distances, parents, callbackfun}).
shortest_dist(#state{distances = Map}, Node) ->
maps:get(Node, Map).
shortest_path(State, Node) ->
shortest_path(State, Node, [Node]).
shortest_path(#state{parents = Parents} = State, Node, Path) ->
case maps:get(Node, Parents, undef) of
undef ->
Path;
Parent ->
shortest_path(State, Parent, [Parent | Path])
end.
closed_set(#state{closed = Closed}) ->
lists:sort(
maps:keys(Closed)).
%%
%% Apply dijkstra's algorithm to Graph, beginning at Source.
%%
%% @param Graph The graph to search.
%% @param Source Start node
%% @param CallbackFun
%% Function for evaluating nodes. Should return a list
%% of {Dist, Node} tuples for the nodes neighbors, or
%% 'found' if the node passed to it was the end node.
%% @returns {finished, State} if there are no more nodes to
%% evaluate, or {found, Node, State} if the goal node was
%% found.
%%
-spec dijkstra(Graph :: term(),
Source :: term(),
CallbackFun ::
fun((Node :: term(), Graph :: term()) ->
[{NbrDist :: integer(), NbrNode :: term()}])) ->
{finished, Result :: #state{}} | {found, Node :: term(), Result :: #state{}}.
dijkstra(Graph, Source, CallbackFun) ->
State =
#state{source = Source,
graph = Graph,
closed = #{},
frontier = gb_sets:from_list([{0, Source}]),
distances = #{Source => 0},
parents = #{},
callbackfun = CallbackFun},
dijkstra0(State).
dijkstra0(#state{frontier = Frontier,
callbackfun = CallbackFun,
graph = Graph} =
State) ->
case gb_sets:is_empty(Frontier) orelse take_frontier(State) of
true ->
{finished, State};
{{NodeDist, Node}, State0} ->
%% Function to apply to each neighbor
NbrFun =
fun({NbrDist, NbrNode}, #state{distances = Dists} = Acc) ->
case {is_closed(NbrNode, Acc), is_in_frontier(NbrNode, Acc)} of
{true, _} ->
%% Node already evaluated
Acc;
{false, false} ->
%% Unseen node
add_frontier(Node, NbrNode, NodeDist + NbrDist, Acc);
{false, true} ->
%% Previously seen, but not evaluated. Update
%% shortest distance, if necessary.
NewNbrDist = NodeDist + NbrDist,
OldNbrDist = maps:get(NbrNode, Dists),
if NewNbrDist < OldNbrDist ->
update_distance(Node, NbrNode, NewNbrDist, Acc);
true -> Acc
end
end
end,
case CallbackFun(Node, Graph) of
found ->
{found, Node, State0};
Neighbors ->
dijkstra0(lists:foldl(NbrFun, add_to_closed(Node, State0), Neighbors))
end
end.
is_closed(Node, State) ->
maps:is_key(Node, State#state.closed).
is_in_frontier(Node, State) ->
maps:is_key(Node, State#state.distances).
%% Pop the node from the frontier set with smallest distance from
%% origin.
take_frontier(State) ->
{Elem, F0} = gb_sets:take_smallest(State#state.frontier),
{Elem, State#state{frontier = F0}}.
%% Add a node to the frontier.
add_frontier(Parent, Node, Dist, State) ->
F0 = gb_sets:add({Dist, Node}, State#state.frontier),
D0 = maps:put(Node, Dist, State#state.distances),
P0 = maps:put(Node, Parent, State#state.parents),
State#state{frontier = F0,
distances = D0,
parents = P0}.
%% Update the distance to a node in the frontier set.
update_distance(Parent, Node, NewDist, State) ->
D0 = maps:put(Node, NewDist, State#state.distances),
P0 = maps:put(Node, Parent, State#state.parents),
State#state{distances = D0, parents = P0}.
add_to_closed(Node, State) ->
State#state{closed = maps:put(Node, true, State#state.closed)}. | src/utils/dijkstra.erl | 0.52074 | 0.567877 | dijkstra.erl | starcoder |
%% Copyright (c) 2016 <NAME>
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%% File : prop_lfe_doc.erl
%% Author : <NAME>
%% Purpose : PropEr tests for the lfe_doc module.
-module(prop_lfe_doc).
-export([prop_define_lambda/0,prop_define_match/0]).
-include_lib("proper/include/proper.hrl").
-include("lfe_doc.hrl").
%%%===================================================================
%%% Properties
%%%===================================================================
%% These only test the formats of the saved data.
prop_define_lambda() -> ?FORALL(Def, define_lambda(), validate(Def)).
prop_define_match() -> ?FORALL(Def, define_match(), validate(Def)).
validate({['define-function',Name,_Doc,Def],_}=Func) ->
validate_function(Name, function_arity(Def), Func);
validate({['define-macro',Name,_Doc,_Def],_}=Mac) ->
validate_macro(Name, Mac).
function_arity([lambda,Args|_]) -> length(Args);
function_arity(['match-lambda',[Pat|_]|_]) -> length(Pat).
validate_function(Name, Arity, {[_Define,_Name,Meta,_Def],Line}=Func) ->
Info = [export_all_funcs(),Func], %Add function export
case lfe_doc:make_doc_info(Info, []) of
{ok,#lfe_docs_v1{fdocs=[Fdoc],mdocs=[]}} ->
%% Must collect multiple doc strings.
MetaDocs = lfe_doc:collect_docs(Meta, []),
Fdocs = lfe_doc:function_doc(Fdoc),
(doc_string(MetaDocs) =:= doc_string(Fdocs))
and (Name =:= lfe_doc:function_name(Fdoc))
and (Arity =:= lfe_doc:function_arity(Fdoc))
and (Line =:= lfe_doc:function_line(Fdoc));
_ -> false
end.
validate_macro(Name, {[_Define,_Name,Meta,_Lambda],Line}=Mac) ->
Info = [export_macro(Name),Mac], %Add macro export
case lfe_doc:make_doc_info(Info, []) of
{ok,#lfe_docs_v1{fdocs=[],mdocs=[Mdoc]}} ->
%% Must collect multiple doc strings.
MetaDocs = lfe_doc:collect_docs(Meta, []),
Mdocs = lfe_doc:macro_doc(Mdoc),
(doc_string(MetaDocs) =:= doc_string(Mdocs))
and (Name =:= lfe_doc:macro_name(Mdoc))
and (Line =:= lfe_doc:macro_line(Mdoc));
_ -> false
end.
export_all_funcs() -> {['extend-module',[],[[export,all]]],1}.
export_macro(Mac) -> {['extend-module',[],[['export-macro',Mac]]],1}.
doc_string(Docs) ->
lists:foldl(fun (D, Acc) -> binary_to_list(D) ++ Acc end, "", Docs).
%%%===================================================================
%%% Definition shapes
%%%===================================================================
define_lambda() ->
{['define-function',atom1(),meta_with_doc(),lambda()],line()}.
define_match() ->
?LET(D, define(), {[D,atom1(),meta_with_doc(),'match-lambda'(D)],line()}).
%%%===================================================================
%%% Custom types
%%%===================================================================
%%% Definitions
define() -> oneof(['define-function','define-macro']).
lambda() -> [lambda,arglist_simple()|body()].
'match-lambda'('define-function') ->
['match-lambda'|non_empty(list(function_pattern_clause()))];
'match-lambda'('define-macro') ->
['match-lambda'|non_empty(list(macro_pattern_clause()))].
arglist_simple() -> list(atom1()).
atom1() -> oneof([a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z,'']).
body() -> non_empty(list(form())).
form() -> union([form_elem(),[atom1()|list(form_elem())]]).
form_elem() -> union([non_string_term(),printable_string(),atom1()]).
meta_with_doc() -> [[doc,docstring()]].
docstring() -> printable_string().
line() -> pos_integer().
%%% Patterns
pattern() -> union([non_string_term(),printable_string(),pattern_form()]).
pattern_form() ->
[oneof(['=','++*',[],
backquote,quote,
binary,cons,list,map,tuple,
match_fun()])
| body()].
match_fun() -> 'match-record'.
macro_pattern_clause() -> pattern_clause(rand_arity(), true).
function_pattern_clause() -> pattern_clause(rand_arity(), false).
pattern_clause(Arity, Macro) ->
[arglist_patterns(Arity, Macro)|[oneof([guard(),form()])|body()]].
arglist_patterns(Arity, false) -> vector(Arity, pattern());
arglist_patterns(Arity, true) -> [vector(Arity, pattern()),'$ENV'].
guard() -> ['when'|non_empty(list(union([logical_clause(),comparison()])))].
%%% Logical clauses
logical_clause() ->
X = union([atom1(),comparison()]),
[logical_operator(),X|non_empty(list(X))].
logical_operator() -> oneof(['and','andalso','or','orelse']).
%%% Comparisons
comparison() -> [comparison_operator(),atom1()|list(atom1())].
comparison_operator() -> oneof(['==','=:=','=/=','<','>','=<','>=']).
%%% Strings and non-strings
non_string_term() ->
union([atom1(),number(),[],bitstring(),binary(),boolean(),tuple()]).
printable_char() -> union([integer(32, 126),integer(160, 255)]).
printable_string() -> list(printable_char()).
%%% Rand compat
-ifdef(NEW_RAND).
rand_arity() -> rand:uniform(10).
-else.
rand_arity() -> random:uniform(10).
-endif. | test/prop_lfe_doc.erl | 0.502197 | 0.405743 | prop_lfe_doc.erl | starcoder |
%% Minicache. Feel free to rename this module and include it in other projects.
%%-----------------------------------------------------------------------------
%% Copyright 2014 <NAME>
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%% @doc A minimalistic time triggered dict based cache data structure.
%%
%% The cache keeps track of when each key was last used. Elements are evicted
%% using manual calls to evict_older_than/2. Most of the functions return a new
%% updated cache object which should be used in subsequent calls.
%%
%% A cache can be initialized to 'empty' which represents the empty cache.
%%
%% Properties:
%%
%% <ul>
%% <li>Embeddable in a gen_server or other process</li>
%% <li>Small overhead when unused (the empty cache is a single atom)</li>
%% <li>Evicting K elements is O(N + K * log N) which means low overhead when
%% nothing or few elements are evicted</li>
%% </ul>
%% @private
-module(mysql_cache).
-export_type([cache/2]).
-export([evict_older_than/2, lookup/2, new/0, size/1, store/3]).
-type cache(K, V) ::
{cache, erlang:timestamp(), dict:dict(K, {V, non_neg_integer()})} | empty.
%% @doc Deletes the entries that have not been used for `MaxAge' milliseconds
%% and returns them along with the new state.
-spec evict_older_than(Cache :: cache(K, V), MaxAge :: non_neg_integer()) ->
{Evicted :: [{K, V}], NewCache :: cache(K, V)}.
evict_older_than({cache, StartTs, Dict}, MaxAge) ->
MinTime = timer:now_diff(os:timestamp(), StartTs) div 1000 - MaxAge,
{Evicted, Dict1} = dict:fold(
fun (Key, {Value, Time}, {EvictedAcc, DictAcc}) ->
if
Time < MinTime ->
{[{Key, Value} | EvictedAcc], dict:erase(Key, DictAcc)};
Time >= MinTime ->
{EvictedAcc, DictAcc}
end
end,
{[], Dict},
Dict),
Cache1 = case dict:size(Dict1) of
0 -> empty;
_ -> {cache, StartTs, Dict1}
end,
{Evicted, Cache1};
evict_older_than(empty, _) ->
{[], empty}.
%% @doc Looks up a key in a cache. If found, returns the value and a new cache
%% with the 'last used' timestamp updated for the key.
-spec lookup(Key :: K, Cache :: cache(K, V)) ->
{found, Value :: V, UpdatedCache :: cache(K, V)} | not_found.
lookup(Key, {cache, StartTs, Dict}) ->
case dict:find(Key, Dict) of
{ok, {Value, _OldTime}} ->
NewTime = timer:now_diff(os:timestamp(), StartTs) div 1000,
Dict1 = dict:store(Key, {Value, NewTime}, Dict),
Cache1 = {cache, StartTs, Dict1},
{found, Value, Cache1};
error ->
not_found
end;
lookup(_Key, empty) ->
not_found.
%% @doc Returns the atom `empty' which represents an empty cache.
-spec new() -> cache(K :: term(), V :: term()).
new() ->
empty.
%% @doc Returns the number of elements in the cache.
-spec size(cache(K :: term(), V :: term())) -> non_neg_integer().
size({cache, _, Dict}) ->
dict:size(Dict);
size(empty) ->
0.
%% @doc Stores a key-value pair in the cache. If the key already exists, the
%% associated value is replaced by `Value'.
-spec store(Key :: K, Value :: V, Cache :: cache(K, V)) -> cache(K, V)
when K :: term(), V :: term().
store(Key, Value, {cache, StartTs, Dict}) ->
Time = timer:now_diff(os:timestamp(), StartTs) div 1000,
{cache, StartTs, dict:store(Key, {Value, Time}, Dict)};
store(Key, Value, empty) ->
{cache, os:timestamp(), dict:store(Key, {Value, 0}, dict:new())}.
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
empty_test() ->
?assertEqual(empty, ?MODULE:new()),
?assertEqual(0, ?MODULE:size(empty)),
?assertEqual(not_found, ?MODULE:lookup(foo, empty)),
?assertMatch({[], empty}, ?MODULE:evict_older_than(empty, 10)).
nonempty_test() ->
Cache = ?MODULE:store(foo, bar, empty),
?assertMatch({found, bar, _}, ?MODULE:lookup(foo, Cache)),
?assertMatch(not_found, ?MODULE:lookup(baz, Cache)),
?assertMatch({[], _}, ?MODULE:evict_older_than(Cache, 50)),
?assertMatch({cache, _, _}, Cache),
?assertEqual(1, ?MODULE:size(Cache)),
receive after 51 -> ok end, %% expire cache
?assertEqual({[{foo, bar}], empty}, ?MODULE:evict_older_than(Cache, 50)),
%% lookup un-expires cache
{found, bar, NewCache} = ?MODULE:lookup(foo, Cache),
?assertMatch({[], {cache, _, _}}, ?MODULE:evict_older_than(NewCache, 50)),
%% store also un-expires
NewCache2 = ?MODULE:store(foo, baz, Cache),
?assertMatch({[], {cache, _, _}}, ?MODULE:evict_older_than(NewCache2, 50)).
-endif. | deps/mysql/src/mysql_cache.erl | 0.714329 | 0.509032 | mysql_cache.erl | starcoder |
%
% reia: Interface between Erlang and Reia environments
% Copyright (C)2008-10 <NAME>
%
% Redistribution is permitted under the MIT license. See LICENSE for details.
%
-module(reia).
-export([
init/0,
load/1,
parse/1,
eval/1, eval/2,
apply/3, apply/4,
inst/2, inst/3,
invoke/3, invoke/4,
throw/2, throw/3,
list_to_string/1,
binary_to_string/1
]).
-include("reia_types.hrl").
% Initialize the Reia environment
init() ->
% Launch the Reia CodeServer
'CodeServer':start(),
% Load Reia builtins and other core modules
reia_internal:load_core(),
% Load the Reia standard library
reia_internal:load_stdlib(),
ok.
% Load the given Reia source code file
load(Filename) ->
LoadPaths = 'CodeServer':call({paths}, nil),
reia_internal:load(LoadPaths ++ [filename:absname("")], Filename).
% Parse the given string of Reia source code
parse(String) ->
reia_compiler:parse(String).
% Evaluate the given string of Reia source code
eval(String) ->
eval(String, []).
eval(String, Binding) ->
reia_eval:exprs(parse(String), Binding).
% Call a function within a Reia module
apply(Module, Function, Arguments) -> apply(Module, Function, Arguments, nil).
apply(Module, Function, Arguments, Block) ->
Arguments2 = if
is_tuple(Arguments) -> Arguments;
is_list(Arguments) -> list_to_tuple(Arguments);
true -> throw({error, "invalid type for arguments"})
end,
Module:Function(Arguments2, Block).
% Create a new instance of the given class
inst(Class, Arguments) -> inst(Class, Arguments, nil).
inst(Class, Arguments, Block) ->
% FIXME: initial object construction should be factored into class objects
case Class of
% Special hax for creating new UUIDs since they have no literal syntax
'UUID' ->
erlang:make_ref();
_ ->
Object = #reia_object{class=Class, ivars=dict:new()},
Class:call({Object, initialize, Arguments}, Block)
end.
% Invoke a method on the given object
invoke(Receiver, Method, Arguments) -> invoke(Receiver, Method, Arguments, nil).
invoke(Receiver, Method, Arguments, Block) ->
Arguments2 = if
is_tuple(Arguments) -> Arguments;
is_list(Arguments) -> list_to_tuple(Arguments);
true -> throw({error, "invalid type for arguments"})
end,
Class = Receiver#reia_object.class,
Class:call({Receiver, Method, Arguments2}, Block).
% Throw a Reia exception
throw(Class, Message) ->
throw(Class, nil, Message).
throw(Class, Line, Message) ->
erlang:throw(inst(Class, {Line, Message})).
% Convert an Erlang list to a Reia string
list_to_string(List) ->
#reia_string{elements=List}.
% Convert an Erlang binary to a Reia string
binary_to_string(Bin) ->
#reia_string{elements=Bin}. | src/core/reia.erl | 0.555315 | 0.47384 | reia.erl | starcoder |
%%
%% @doc One of the implementations of
%% [https://groups.google.com/g/erlang-programming/c/ZUHZpH0wsOA
%% coinductive data types].
%%
%% @see lazy2
%%
-module(lazy).
-author("<NAME> <<EMAIL>>").
-export([gen/2, filter/2, foldl/3, map/2, take/2]).
-export([natural_numbers/0]).
-dialyzer(no_improper_lists).
-type lazy_seq() :: fun(() -> [term() | lazy_seq()]).
-type integers() :: fun(() -> [pos_integer() | integers()]).
%%
%% @doc Generates lazy (infinite) sequence of elements `E'
%% using generating function `F'.
%%
-spec gen(E0, F) -> lazy_seq() when
F :: fun((ECurrent) -> ENext),
E0 :: term(),
ECurrent :: term(),
ENext :: term().
gen(E, F) -> fun() -> [E | gen(F(E), F)] end.
%%
%% @doc Generates sequence of integers.
%%
-spec integers_from(pos_integer()) -> integers().
integers_from(K) -> gen(K, fun(N) -> N + 1 end).
%%
%% @doc Generates sequence of natural numbers.
%%
-spec natural_numbers() -> integers().
natural_numbers() -> integers_from(1).
%%
%% @doc Filters the lazy sequence `CE' using predicate `P'.
%%
-spec filter(P :: Predicate, CE :: lazy_seq()) -> fun(() -> lazy_seq()) when
Predicate :: fun((term()) -> boolean()).
filter(P, CE) ->
fun() ->
case CE() of
[] -> [];
[X | Xs] ->
case P(X) of
true -> [X | filter(P, Xs)];
false -> (filter(P, Xs))()
end
end
end.
%%
%% @doc Left folds the lazy sequence `CE' using function `F' and accumulator `A'.
%%
-spec foldl(F, Acc0 :: Acc, lazy_seq()) -> Acc1 :: Acc when
F :: fun((term(), AccIn :: Acc) -> AccOut :: Acc),
Acc :: term().
foldl(F, A, CE) ->
case CE() of
[] -> A;
[X | Xs] -> foldl(F, F(A, X), Xs)
end.
%%
%% @doc Maps the lazy sequence `CE' using function `F'.
%%
-spec map(F, lazy_seq()) -> lazy_seq() when
F :: fun((term()) -> term()).
map(F, CE) ->
fun() ->
case CE() of
[] -> [];
[X | Xs] -> [F(X) | map(F, Xs)]
end
end.
%%
%% @doc Returns first `N' elements of the given lazy sequence.
%%
-spec take(non_neg_integer(), lazy_seq()) -> [term()].
take(N, LazySeq) -> take([], N, LazySeq).
take(A, 0, _) -> lists:reverse(A);
take(A, N, CE) ->
[X | Xs] = CE(),
take([X | A], N - 1, Xs).
%% =============================================================================
%% Unit tests
%% =============================================================================
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
filter_first(N, P, CE) -> take(N, filter(P, CE)).
foldl_first(0, _, A, _) -> A;
foldl_first(N, F, A, CE) ->
case CE() of
[] -> A;
[X | Xs] -> foldl_first(N - 1, F, F(A, X), Xs)
end.
map_first(N, F, CE) -> take(N, map(F, CE)).
first_natural_numbers(N) -> take(N, natural_numbers()).
first_even_numbers(N) -> filter_first(N, fun(X) -> X rem 2 =:= 0 end, natural_numbers()).
first_squares(N) -> map_first(N, fun(X) -> X * X end, natural_numbers()).
first_sum(N) -> foldl_first(N, fun(X, Sum) -> X + Sum end, 0, natural_numbers()).
filter_test() ->
F = filter(fun(X) -> 10 < X end, natural_numbers()),
[X | _] = F(),
?assertEqual(11, X).
map_test() ->
[X | _] = (map(fun(X) -> X * 2 end, natural_numbers()))(),
?assertEqual(2, X).
first_natural_numbers_test() ->
?assertEqual([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], first_natural_numbers(10)).
first_even_numbers_test_() -> [
?_assertEqual([2, 4, 6, 8, 10, 12, 14, 16, 18, 20], first_even_numbers(10)),
?_assertEqual([2, 4, 6, 8, 10, 12, 14, 16, 18, 20], take(10, gen(2, fun(N) -> N + 2 end)))
].
first_squares_test() ->
?assertEqual([1, 4, 9, 16, 25, 36, 49, 64, 81, 100], first_squares(10)).
powers_of_two_test() ->
?assertEqual([1, 2, 4, 8, 16, 32, 64, 128, 256, 512], take(10, gen(1, fun(N) -> 2 * N end))).
first_sum_test() ->
?assertEqual(55, first_sum(10)).
-endif. | lib/ndpar/src/lazy.erl | 0.649356 | 0.666008 | lazy.erl | starcoder |
%%
%% Inpired from 'https://github.com/erlang/otp/blob/master/lib/edoc/src/edoc_layout.erl'
%%
-module(hover_doc_layout).
-export([module/2]).
-include_lib("xmerl/include/xmerl.hrl").
%% @doc return a string
module(Element, Options) ->
Functions = layout_module(Element, init_opts(Element,Options)),
%return only description
Ret = [FDesc || {_FName, FDesc} <- Functions],
lists:flatten(join_strings(Ret, " \n")).
init_opts(_Element, Options) ->
Options.
layout_module({_, Xml}, Opts) ->
layout_module(Xml,Opts);
layout_module(#xmlElement{name = module, content = Es}, Opts) ->
% Filter is like this : {filter, [{function, load_xy, 1}]}
FnFilter = case proplists:get_value(filter, Opts) of
undefined -> fun (_) -> true end;
Array ->
case proplists:get_value(function, Array) of
{FName, Arity} -> fun (X) -> are_function_equal(FName, Arity, X) end;
_ -> fun (_) -> true end
end
end,
% filter the functions according to given options parameters
Functions = [{function_name(E, Opts), function_description(E, Opts)} ||
E <- get_content(functions, Es), FnFilter(E)],
Functions.
are_function_equal(FName, Arity, E) ->
N = list_to_atom(get_attrval(name, E)),
A = get_attrval(arity, E),
Result = N == FName andalso length(A) >0 andalso Arity == list_to_integer(A),
Result.
function_description(E, _Opts) ->
Content = E#xmlElement.content,
Desc = get_content(description, Content),
%io:format("description : ~p~n", [Desc]),
FullDesc = get_text(fullDescription, Desc),
%replace all '\n' by ' \n' (two spaces) for markdown rendering
lists:flatten(add_spaces(FullDesc)).
add_spaces(Str) ->
lists:reverse(add_spaces(Str, "")).
add_spaces("", Acc) ->
Acc;
add_spaces([$\n | T], Acc) ->
add_spaces(T, [$\n, $ ,$ | Acc ]);
add_spaces([H | T], Acc) ->
add_spaces(T, [H | Acc]).
function_name(E, _Opts) ->
Children = E#xmlElement.content,
Name = get_attrval(name, E),
lists:flatten(Name ++ "(" ++ function_args(get_content(args, Children)) ++ ")").
function_args(Es) ->
Args = [get_text(argName, Arg#xmlElement.content) || Arg <- get_elem(arg, Es)],
join_strings(Args, ",").
get_elem(Name, [#xmlElement{name = Name} = E | Es]) ->
[E | get_elem(Name, Es)];
get_elem(Name, [_ | Es]) ->
get_elem(Name, Es);
get_elem(_, []) ->
[].
get_content(Name, Es) ->
case get_elem(Name, Es) of
[#xmlElement{content = Es1}] ->
Es1;
[] -> []
end.
get_text(Name, Es) ->
case get_content(Name, Es) of
[#xmlText{value = Text}] ->
Text;
[] -> "";
[#xmlElement{name=p, content= Es1}|_OtherXmlText] ->
lists:flatten([T || T <- get_text_value(Es1 ++ _OtherXmlText)]);
_Other ->
error_logger:warning_msg("~p:get_text unknown xml content : ~p~n ", [?MODULE, _Other]),
""
end.
get_text_value([#xmlText{value = Text}|T]) ->
Text ++ get_text_value(T);
get_text_value([]) ->
"";
get_text_value([#xmlElement{name=p, content=Es}|T]) ->
get_text_value(Es) ++ get_text_value(T);
get_text_value([_H|T]) ->
%%ignore _H, it's not an XmlText or 'p' element
get_text_value(T).
get_attr(Name, [#xmlAttribute{name = Name} = A | As]) ->
[A | get_attr(Name, As)];
get_attr(Name, [_ | As]) ->
get_attr(Name, As);
get_attr(_, []) ->
[].
get_attrval(Name, #xmlElement{attributes = As}) ->
case get_attr(Name, As) of
[#xmlAttribute{value = V}] ->
V;
[] -> ""
end.
join_strings([], _) ->
[];
join_strings([String], _) ->
String;
join_strings([String|Rest], Joiner) ->
String ++ Joiner ++ join_strings(Rest, Joiner). | apps/erlangbridge/src/hover_doc_layout.erl | 0.551211 | 0.509825 | hover_doc_layout.erl | starcoder |
-module(openapi_bans_api).
-export([get_blocked_server_hashes/1, get_blocked_server_hashes/2]).
-define(BASE_URL, "").
%% @doc A list of SHA1 hashes of banned servers
%% Returns a list of SHA1 hashes used to check server addresses against when the client tries to connect. Clients check the lowercase name, using the ISO-8859-1 charset, against this list. They will also attempt to check subdomains, replacing each level with a *. Specifically, it splits based off of the . in the domain, goes through each section removing one at a time. For instance, for mc.example.com, it would try mc.example.com, *.example.com, and *.com. With IP addresses (verified by having 4 split sections, with each section being a valid integer between 0 and 255, inclusive) substitution starts from the end, so for 192.168.0.1, it would try 192.168.0.1, 192.168.0.*, 192.168.*, and 192.*. This check is done by the bootstrap class in netty. The default netty class is overridden by one in the com.mojang:netty dependency loaded by the launcher. This allows it to affect any version that used netty (1.7+)
-spec get_blocked_server_hashes(ctx:ctx()) -> {ok, binary(), openapi_utils:response_info()} | {ok, hackney:client_ref()} | {error, term(), openapi_utils:response_info()}.
get_blocked_server_hashes(Ctx) ->
get_blocked_server_hashes(Ctx, #{}).
-spec get_blocked_server_hashes(ctx:ctx(), maps:map()) -> {ok, binary(), openapi_utils:response_info()} | {ok, hackney:client_ref()} | {error, term(), openapi_utils:response_info()}.
get_blocked_server_hashes(Ctx, Optional) ->
_OptionalParams = maps:get(params, Optional, #{}),
Cfg = maps:get(cfg, Optional, application:get_env(kuberl, config, #{})),
Method = get,
Path = ["/blockedservers"],
QS = [],
Headers = [],
Body1 = [],
ContentTypeHeader = openapi_utils:select_header_content_type([]),
Opts = maps:get(hackney_opts, Optional, []),
openapi_utils:request(Ctx, Method, [?BASE_URL, Path], QS, ContentTypeHeader++Headers, Body1, Opts, Cfg). | generated-sources/erlang-client/mojang-sessions/src/openapi_bans_api.erl | 0.627951 | 0.400398 | openapi_bans_api.erl | starcoder |
%%%-------------------------------------------------------------------
%%% Copyright (c) 2017, sFractal Consulting, LLC
%%% Licensed under the Apache License, Version 2.0 (the "License");
%%% you may not use this file except in compliance with the License.
%%% You may obtain a copy of the License at
%%% http://www.apache.org/licenses/LICENSE-2.0
%%% Unless required by applicable law or agreed to in writing, software
%%% distributed under the License is distributed on an "AS IS" BASIS,
%%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%%% See the License for the specific language governing permissions and
%%% limitations under the License.
%%%-------------------------------------------------------------------
%%%-------------------------------------------------------------------
%% @doc helper routines for test
%% @end
%%%-------------------------------------------------------------------
-module(helper_json).
-author("<NAME>").
-copyright("2017, sFractal Consulting, LLC").
-license(apache2).
%% for test export all functions
-export( [ post_oc2/3
, create_openc2_atoms/0
, command_atoms/0
, action_atoms/0
, actuator_atoms/0
, target_atoms/0
, modifier_atom/0
] ).
%% required for common_test to work
-include_lib("common_test/include/ct.hrl").
%%%%%%%%%%%%%%%%%%%% Utilities
%% utilities to save putting this in each test
%% include path in filename
full_data_file_name(Filename, Config) ->
filename:join( ?config( data_dir, Config ), Filename ).
%% read a file containing valid json and return json object
read_json_file(Filename, Config) ->
FullFilename = full_data_file_name(Filename, Config),
read_json_file(FullFilename).
%% read a file containing valid json and return json object
read_json_file(Filename) ->
Json = read_file(Filename),
jsx:is_json(Json),
Json.
read_file(Filename) ->
%% read text from a file
{ok, Txt} = file:read_file(Filename),
Txt.
%% post openc2 command and get expected result
post_oc2(JsonFileName, ResultsFileName, Config) ->
%% read and validate json to send
JsonTxt = read_json_file(JsonFileName, Config),
%% read and validate expected results
ExpectedResultsTxt = read_json_file(ResultsFileName, Config),
%% convert json to erlang terms
ExpectedResults = jsx:decode(ExpectedResultsTxt, [return_maps]),
%% open connection to send post
Conn = make_conn(),
%% send post
{ok, Response} = shotgun:post( Conn
, "/openc2" % Url
, [ { <<"content-type">> % ReqHeaders
, <<"application/json">>
}
]
, JsonTxt % ReqBody
, #{} % Options
),
%% compare expected status to resonse status
check_status(ExpectedResults, Response),
%% check headers
check_headers(Response),
%% check has body and is json
#{ body := RespBody } = Response,
true = jsx:is_json(RespBody),
%% decode json into erlang map
JsonMap = jsx:decode( RespBody, [return_maps] ),
%% check all components are in response json
check_keys(JsonMap, ExpectedResults),
%% check correct return values in response
check_json_values(JsonMap, ExpectedResults),
ok.
%% make a connection
make_conn() ->
MyPort = application:get_env(ocas, port, 8080),
{ok, Conn} = shotgun:open("localhost", MyPort),
Conn.
%% check status of a response
check_status(ExpectedResults, Response) ->
ExpectedStatus = maps:get(<<"ExpectedStatus">>, ExpectedResults),
ResponseStatus = maps:get(status_code, Response),
ExpectedStatus = ResponseStatus,
ok.
%% check response headers
check_headers(Response) ->
%% get the headers out of the response
#{ headers := RespHeaders } = Response,
%% verify headers
{ <<"server">>, <<"Cowboy">>} = lists:keyfind( <<"server">>
, 1
, RespHeaders
),
{ <<"date">>, _Date } = lists:keyfind(<<"date">>, 1, RespHeaders),
ok.
check_keys(JsonMap, ExpectedResults) ->
%% get expected keys
ExpectedKeys = maps:get(<<"ExpectedJsonKeys">>, ExpectedResults),
%% get response keys
ResponseKeys = maps:keys(JsonMap),
lager:info("ResponseKeys: ~p", [ResponseKeys]),
%% check expected are in response
check_key(ResponseKeys, ExpectedKeys),
JsonMap.
check_key(_ResultKeys, [] ) ->
%% done since list empty
ok;
check_key(ResultKeys, [Key | RestOfKeys] ) ->
%% check key is in Results
lager:info("check_key: ~p", [Key]),
true = lists:member(Key, ResultKeys),
%% recurse thru rest of list
check_key(ResultKeys, RestOfKeys).
%% check response contains key/values expected
check_json_values(JsonMap, ExpectedResults) ->
%% get expected key/value map
ExpectedJsonPairMap = maps:get(<<"ExpectedJsonPairs">>, ExpectedResults),
ExpectedJsonPairs = maps:to_list(ExpectedJsonPairMap),
lager:info("ResponseMap: ~p", [JsonMap]),
%% recurse thru {key,value} in ExpectedJsonPairs looking for match
check_json_pair( ExpectedJsonPairs, JsonMap).
check_json_pair( [], _JsonMap) ->
% done
ok;
check_json_pair( [ {Key, Value} | RestOfExepectedPairs ], JsonMap) ->
%% check in key/value in json map
lager:info("key/value: ~p/~p", [Key, Value]),
lager:info("maps.get(Key,JsonMap): ~p", [maps:get(Key, JsonMap)]),
Value = maps:get(Key, JsonMap),
%% recurse on to next pair
check_json_pair( RestOfExepectedPairs, JsonMap).
%% make sure all openc2 atoms preexist (for json decoding)
create_openc2_atoms() ->
command_atoms(),
action_atoms(),
actuator_atoms(),
target_atoms(),
modifier_atom(),
ok.
command_atoms() ->
[ action
, target
, actuator
, modifier
, specifiers
].
action_atoms() ->
[ allow
, deny
].
actuator_atoms() ->
[].
target_atoms() ->
[].
modifier_atom() ->
[]. | test/helper_json.erl | 0.575946 | 0.41182 | helper_json.erl | starcoder |
-module(graph_creation).
-export([create_graph/2, insert_parent_data/1]).
-type vertex() :: {nonempty_string(), integer()}.
-type order() :: [vertex()].
-type connections() :: [vertex()].
-type graph_without_n_parents() :: #{vertex() => connections() | list()}.
-type vertex_data_parents() :: {connections(), integer()} | connections() | list().
-type graph_with_n_parents() :: #{vertex() => vertex_data_parents()}.
%------------------------------------------------%
% API %
%------------------------------------------------%
%Function creates graph of dependency from Word using dependencies written in Dependent.
-spec create_graph(Word :: trace_theory:word(), Dependent :: trace_theory:dependent()) ->
{graph_without_n_parents(), order()}.
create_graph(Word, Dependent) ->
{AdjacencyMap, Order} = create_vertices(Word),
{minimalize_graph(add_edges(AdjacencyMap, Order, Dependent), Order), Order}.
%Function adds extra information about number of parent to every vertex.
-spec insert_parent_data(Graph :: graph_with_n_parents()) -> graph_with_n_parents().
insert_parent_data(Graph) ->
List = maps:to_list(Graph),
lists:foldl( fun({_, Connections}, GraphMap) ->
lists:foldl(fun insert_to_map/2, GraphMap, Connections)
end, convert_graph(Graph, List), List).
%-------------------------------------------------%
% insert_parent_data helper functions %
%-------------------------------------------------%
%Function adds 0 as inital value of n_parents to every vertex
-spec convert_graph(graph_without_n_parents(), [{vertex(), connections()}]) -> graph_with_n_parents().
convert_graph(Graph, List) ->
lists:foldl(fun({Vertex, Connections}, GraphMap) ->
maps:put(Vertex, {Connections, 0}, GraphMap)
end, Graph, List).
%Function adds 1 to all neighbors' parent's counter.
-spec insert_to_map(Key :: vertex(), Graph :: graph_with_n_parents()) -> graph_with_n_parents().
insert_to_map(Key, Graph) ->
{Connections, Parents} = maps:get(Key, Graph),
maps:put(Key, {Connections, Parents+1}, Graph).
%-------------------------------------------------%
% create_graph helper functions %
%-------------------------------------------------%
%Function adds vertices to graph based on Word.
-spec create_vertices(Word :: trace_theory:word()) -> {graph_without_n_parents(), order()}.
create_vertices(Word) ->
create_vertices_iterate(Word, {#{}, []}, #{}).
%Function iterates trough the Word adding for every production new vertex.
-spec create_vertices_iterate(trace_theory:word(), {graph_without_n_parents(), order()}, #{char() => integer()}) ->
{graph_without_n_parents(), order()}.
create_vertices_iterate([], Vertices, _) ->
Vertices;
create_vertices_iterate([Head | Tail], {Vertices, Order}, LettersCounter) ->
case maps:is_key(Head, LettersCounter) of
true ->
A = maps:get(Head, LettersCounter),
create_vertices_iterate(Tail, {maps:put({Head, A}, [], Vertices), Order ++ [{Head, A}]}, maps:put(Head, A+1, LettersCounter));
false ->
create_vertices_iterate(Tail, {maps:put({Head, 0}, [], Vertices), Order ++ [{Head, 0}]}, maps:put(Head, 1, LettersCounter))
end.
%Function adds edges to a graph wich represent relation of dependency between productions
-spec add_edges(graph_without_n_parents(), order(), trace_theory:dependent()) -> graph_without_n_parents().
add_edges(Graph, [], _) ->
Graph;
add_edges(Graph, [Head | Tail], Dependent) ->
add_edges(add_connections(Graph, Head, Tail, Dependent), Tail, Dependent).
%For every vertex, function add its connections to Graph
-spec add_connections(graph_without_n_parents(), vertex(), order(), trace_theory:dependent()) ->
graph_without_n_parents().
add_connections(Graph, _, [], _) ->
Graph;
add_connections(Graph, {ElementIdentifier, _} = Element, [{HeadIdentifier, _} = Head | Tail], Dependent) ->
case lists:filter(fun(X) ->
case X of
{ElementIdentifier, HeadIdentifier} -> true;
_ -> false
end
end, Dependent) of
[{ElementIdentifier, HeadIdentifier}] ->
Tab = maps:get(Element, Graph),
add_connections(maps:put(Element, Tab ++ [Head], Graph), Element, Tail, Dependent);
[] ->
add_connections(Graph, Element, Tail, Dependent)
end.
%Function minimizes graph, deleting any unnecessary vertex.
-spec minimalize_graph(graph_without_n_parents(), order()) -> graph_without_n_parents().
minimalize_graph(GraphMap, []) ->
GraphMap;
minimalize_graph(GraphMap, [Head | Tail]) ->
minimalize_graph(delete_extra_edges(GraphMap, Head, maps:get(Head, GraphMap)), Tail).
%For every vertex in a graph, function sets out new connections table, only with necessary connections.
-spec delete_extra_edges(graph_without_n_parents(), vertex(), connections()) -> graph_without_n_parents().
delete_extra_edges(GraphMap, _, []) ->
GraphMap;
delete_extra_edges(GraphMap, Head, Connections) ->
maps:put(Head, lists:filter(fun(Element) ->
check_path(GraphMap, Connections, Head, Element, Head)
end, Connections), GraphMap).
%Function checks if there is no alternative path to the neighbor.
%If there is not it returns true, if there is it returns false.
-spec check_path(graph_without_n_parents(), connections(), vertex(), vertex(), vertex()) -> boolean().
check_path(_, _, _, Element, Element) ->
false;
check_path(_, [], _, _, _) ->
true;
check_path(GraphMap, Connections, Head, Element, Head) ->
check_results(lists:map(fun(X) ->
case X of
Element -> true;
_ -> check_path(GraphMap, maps:get(X, GraphMap), Head, Element, X)
end
end, Connections));
check_path(GraphMap, Connections, Head, Element, _) ->
check_results(lists:map(fun(X) -> check_path(GraphMap, maps:get(X, GraphMap), Head, Element, X)
end, Connections)).
%Function used to determine if there is false in a table, in that case, the function returns false.
-spec check_results([boolean()]) -> boolean().
check_results([]) ->
true;
check_results([false | _]) ->
false;
check_results([_|Tail]) ->
check_results(Tail). | src/graph_creation.erl | 0.570451 | 0.609292 | graph_creation.erl | starcoder |
-module(heaps).
%% (c) 2011-2013 <NAME> <<EMAIL>>. All rights reserved.
%% Released under the BSD 2-clause license - see this for details:
%% http://github.com/herenowcoder/erl-heaps/blob/master/LICENSE
-export([ add/3, add/4, contains_value/2, delete_by_value/2, delete/3,
is_empty/1, mapping/2, new/0, take_min/1 ]).
-type pri() :: any().
-type heap_tree_key() :: {pri(), reference()}.
-type heap() :: {gb_tree:gb_tree(), dict:dict()}.
-export_type([heap/0]).
-spec new() -> heap().
new() ->
{gb_trees:empty(), dict:new()}.
-spec is_empty(heap()) -> boolean().
is_empty(_Heap={T, _}) ->
gb_trees:size(T) == 0.
-spec add(pri(), any(), heap()) -> heap().
add(Pri, Val, Heap) ->
add(Pri, Val, none, Heap).
-spec add(pri(), any(), any(), heap()) -> heap().
add(Pri, Val, Aux, Heap={T0,R0}) ->
false = contains_value(Val, Heap),
TreeKey = {Pri, make_ref()},
{gb_trees:insert(TreeKey, Val, T0),
dict:store(Val, {TreeKey, Aux}, R0)}.
-spec take_min(heap()) -> {pri(), any(), heap()}.
take_min(_Heap={T0,R0}) ->
{{Pri,_}, Val, T1} = gb_trees:take_smallest(T0),
{Pri, Val, {T1, r_delete(Val, R0)}}.
-spec mapping(any(), heap()) -> {heap_tree_key(), any()}.
mapping(Val, _Heap={_T,R}) ->
{_TreeKey, _Aux} = dict:fetch(Val, R).
-compile({inline, mapping/2}).
-spec delete_by_value(any(), heap()) -> heap().
delete_by_value(Val, Heap) ->
{TreeKey,_} = mapping(Val, Heap),
delete(TreeKey, Val, Heap).
-spec delete(heap_tree_key(), any(), heap()) -> heap().
delete(TreeKey, Val, _Heap={T0,R0}) ->
{gb_trees:delete(TreeKey, T0), r_delete(Val, R0)}.
-compile({inline, delete/3}).
-spec r_delete(any(), dict:dict()) -> dict:dict().
r_delete(Val, R0) ->
dict:update(Val, fun({_,Aux})-> {none,Aux} end, R0).
-compile({inline, r_delete/2}).
-spec contains_value(any(), heap()) -> boolean().
contains_value(Val, _Heap={_,R}) ->
case dict:find(Val, R) of
{ok, {TreeKey,_}} when TreeKey =/= none -> true;
_ -> false
end.
-compile({inline, contains_value/2}).
-include_lib("eunit/include/eunit.hrl").
-define(IS(X), ?_assert(X)).
-define(EQ(X,Y), ?_assertEqual(X,Y)).
-define(ERR(T,X), ?_assertError(T,X)).
simple_test_() ->
H0 = new(),
H1 = add(1, a, H0),
H2 = add(2, b, H1),
{P1, Va, H3} = take_min(H2),
H4 = delete_by_value(b, H3),
[
?IS( not contains_value(any, H0) ),
?ERR( badarg, delete_by_value(any, H0) ),
?ERR( function_clause, take_min(H0) ),
?IS( contains_value(a, H1) ),
?ERR( {badmatch,true}, add(anypri, a, H1) ),
?IS( contains_value(a, H2) ),
?IS( contains_value(b, H2) ),
?IS( not contains_value(foo, H2) ),
?EQ( P1, 1 ), ?EQ( Va, a ),
?IS( not contains_value(a, H3) ),
%%?ERR( badarg, delete_by_value(a, H3) ),
?IS( contains_value(b, H3) ),
?IS( not contains_value(b, H4) ),
%%?ERR( badarg, delete_by_value(a, H4) ),
[]]. | src/heaps.erl | 0.691602 | 0.474875 | heaps.erl | starcoder |
%|==========================================================================================|
%| Copyright (c) 2016 <NAME> |
%| |
%| Permission is hereby granted, free of charge, to any person obtaining a copy |
%| of this software and associated documentation files (the "Software"), to deal |
%| in the Software without restriction, including without limitation the rights |
%| to use, copy, modify, merge, publish, distribute, sublicense, and/or sell |
%| copies of the Software, and to permit persons to whom the Software is |
%| furnished to do so, subject to the following conditions: |
%| |
%| The above copyright notice and this permission notice shall be included in |
%| all copies or substantial portions of the Software. |
%| |
%| THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
%| IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
%| FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE |
%| AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
%| LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING |
%| FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER |
%| DEALINGS IN THE SOFTWARE. |
%|==========================================================================================|
-module(cputest).
-author("<NAME>").
-export([
get_optimal_processes_number/0, generate_test_data/1, multicore_sort/2, is_sorted/1
]).
%%===========================================================================================
%% Data
%%===========================================================================================
-define(TEST_DATA_SIZE, 500000).
%%===========================================================================================
%% Optimal processes number
%%===========================================================================================
%%-------------------------------------------------------------------------------------------
%% @doc
%% Calculates the best number of processes to use with the current machine.
%% @spec
%% get_optimal_processes_number() -> pos_integer()
%% @end
%%-------------------------------------------------------------------------------------------
-spec get_optimal_processes_number() -> pos_integer().
get_optimal_processes_number() ->
Victims = generate_test_data(?TEST_DATA_SIZE),
Time = measure_sort_time(Victims, 1),
get_optimal_processes_number(Time, 1, Victims, 2).
%%-------------------------------------------------------------------------------------------
%% @private
%% @doc
%% Calculates the best number of processes to use with the current machine.
%% <br/><b>BestTime:</b> The current best time.
%% <br/><b>BestProcs:</b> The number of processes used in the best time.
%% <br/><b>Victims:</b> The generated data to do the tests.
%% <br/><b>NumProcs:</b> The current number of processes to do the test.
%% @spec
%% get_optimal_processes_number(BestTime::integer(),
%% BestProcs::pos_integer(), Victims::[pos_integer()],
%% NumProcs::pos_integer()) -> pos_integer()
%% @end
%%-------------------------------------------------------------------------------------------
-spec get_optimal_processes_number(BestTime::integer(),BestProcs::pos_integer(),
Victims::[pos_integer()],NumProcs::pos_integer()) -> pos_integer().
get_optimal_processes_number(BestTime, BestProcs, Victims, NumProcs) ->
Time = measure_sort_time(Victims, NumProcs),
case Time > BestTime of
true -> BestProcs;
_ -> get_optimal_processes_number(Time, NumProcs, Victims, NumProcs * 2)
end.
%%===========================================================================================
%% Multicore sort
%%===========================================================================================
%%-------------------------------------------------------------------------------------------
%% @doc
%% Sorts a list of numbers, using 1 or more processes.
%% <br/><b>Data:</b> The list to sort.
%% <br/><b>MaxProcs:</b> The maximum number of processes allowed.
%% @spec
%% multicore_sort(Data::[any()], MaxProcs::number()) -> maybe_improper_list()
%% @end
%%-------------------------------------------------------------------------------------------
-spec multicore_sort(Data::[any()],MaxProcs::number()) -> maybe_improper_list().
multicore_sort(Data, MaxProcs) when is_list(Data), is_number(MaxProcs), MaxProcs > 0 ->
multicore_sort(Data, 1, MaxProcs).
%%-------------------------------------------------------------------------------------------
%% @private
%% @doc
%% Sorts a list of numbers, using 1 or more processes.
%% <br/><b>Data:</b> The list to sort.
%% <br/><b>CurProcs:</b> The current number of processes.
%% <br/><b>MaxProcs:</b> The maximum number of processes allowed.
%% @spec
%% multicore_sort(Data::[any()], CurProcs::pos_integer(),
%% MaxProcs::number()) -> maybe_improper_list()
%% @end
%%-------------------------------------------------------------------------------------------
-spec multicore_sort(Data::[any()],CurProcs::pos_integer(),
MaxProcs::number()) -> maybe_improper_list().
multicore_sort([], _, _) ->
[];
multicore_sort([D|DS], CurProcs, MaxProcs) ->
LUDS = [X || X <- DS, X < D],
RUDS = [X || X <- DS, X >= D],
case CurProcs < MaxProcs of
true ->
% Only if we haven't reached the maximum number of processes allowed,
% we'll make 2 new processes to distribute the work of the operation.
This = self(),
NextCall = fun (Data) ->
spawn_link(fun () ->
This ! {self(), multicore_sort(Data, CurProcs * 2, MaxProcs)}
end)
end,
GetValue = fun (PID) ->
receive {PID, Value} -> Value end
end,
% Here we'll invoke the next steps, getting the PIDs of the processes,
% and after that we'll get the results with a receive.
LPID = NextCall(LUDS), RPID = NextCall(RUDS),
LSDS = GetValue(LPID), RSDS = GetValue(RPID),
LSDS ++ [D] ++ RSDS;
_ ->
% If we have reached the maximum number of processes allowed, we'll
% only call the next step of the sort and join the results.
LSDS = multicore_sort(LUDS, CurProcs, MaxProcs),
RSDS = multicore_sort(RUDS, CurProcs, MaxProcs),
LSDS ++ [D] ++ RSDS
end.
%%===========================================================================================
%% Is sorted
%%===========================================================================================
%%-------------------------------------------------------------------------------------------
%% @doc
%% Checks if a list of elements is sorted or not.
%% <br/><b>List:</b> The list to check.
%% @spec
%% is_sorted(List::maybe_improper_list()) -> boolean()
%% @end
%%-------------------------------------------------------------------------------------------
-spec is_sorted(List::maybe_improper_list()) -> boolean().
is_sorted([]) ->
true;
is_sorted([_]) ->
true;
is_sorted([A,B|L]) ->
case A > B of
true -> false;
_ -> is_sorted([B|L])
end.
%%===========================================================================================
%% Utility functions
%%===========================================================================================
%%-------------------------------------------------------------------------------------------
%% @doc
%% Generates a list of random numbers.
%% <br/><b>Size:</b> The size of the list to generate.
%% @spec
%% generate_test_data(Size::pos_integer()) -> [pos_integer()]
%% @end
%%-------------------------------------------------------------------------------------------
-spec generate_test_data(Size::pos_integer()) -> [pos_integer()].
generate_test_data(Size) when is_number(Size), Size > 0 ->
[random:uniform(Size) || _ <- lists:seq(1, Size)].
%%-------------------------------------------------------------------------------------------
%% @private
%% @doc
%% Measures the used time to sorts a list of numbers.
%% <br/><b>Data:</b> The list to sort.
%% <br/><b>MaxProcs:</b> The maximum number of processes allowed.
%% @spec
%% measure_sort_time(Data::[pos_integer()], MaxProcs::pos_integer()) -> integer()
%% @end
%%-------------------------------------------------------------------------------------------
-spec measure_sort_time(Data::[pos_integer()],MaxProcs::pos_integer()) -> integer().
measure_sort_time(Data, MaxProcs) when is_list(Data), is_number(MaxProcs), MaxProcs > 0 ->
{Time, _} = timer:tc(?MODULE, multicore_sort, [Data, MaxProcs]),
Time. | cputest.erl | 0.745584 | 0.442877 | cputest.erl | starcoder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.