code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1 value |
|---|---|---|---|---|---|
%% Copyright (c) 2016 <NAME> <<EMAIL>>
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(tansu_ps).
-export([create_table/0]).
-export([term/1]).
-export([term/2]).
-export([id/0]).
-export([increment/2]).
-export([voted_for/1]).
-export([voted_for/2]).
-record(?MODULE, {id, term, voted_for}).
create_table() ->
Attributes = [{attributes, record_info(fields, ?MODULE)},
{type, ordered_set}],
Definition = case tansu_config:db_schema() of
ram ->
Attributes;
_ ->
[{disc_copies, [node()]} | Attributes]
end,
case mnesia:create_table(?MODULE, Definition) of
{atomic, ok} ->
new(),
true;
{aborted, {already_exists, _}} ->
true;
{aborted, Reason} ->
error(Reason)
end.
id() ->
activity(
fun
() ->
mnesia:first(?MODULE)
end).
term(Id) ->
activity(
fun() ->
case mnesia:read(?MODULE, Id) of
[#?MODULE{term = CurrentTerm}] ->
CurrentTerm;
[] ->
error(badarg, [Id])
end
end).
term(Id, New) ->
activity(
fun() ->
case mnesia:read(?MODULE, Id) of
[#?MODULE{term = Current} = PS] when New > Current ->
ok = mnesia:write(PS#?MODULE{term = New}),
New;
[#?MODULE{term = Current}] when New < Current ->
error(badarg, [Id]);
[#?MODULE{term = Current}] ->
Current;
[] ->
error(badarg, [Id])
end
end).
increment(Id, CurrentTerm) ->
activity(
fun() ->
case mnesia:read(?MODULE, Id) of
[#?MODULE{term = CurrentTerm} = PS] ->
ok = mnesia:write(PS#?MODULE{term = CurrentTerm + 1}),
CurrentTerm+1;
[#?MODULE{term = _}] ->
error(badarg, [Id, CurrentTerm]);
[] ->
error(badarg, [Id, CurrentTerm])
end
end).
voted_for(Id) ->
activity(
fun() ->
case mnesia:read(?MODULE, Id) of
[#?MODULE{voted_for = VotedFor}] ->
VotedFor;
[] ->
error(badarg, [Id])
end
end).
voted_for(Id, VotedFor) ->
activity(
fun() ->
case mnesia:read(?MODULE, Id) of
[#?MODULE{} = PS] ->
ok = mnesia:write(
PS#?MODULE{voted_for = VotedFor}),
VotedFor;
[] ->
error(badarg, [Id, VotedFor])
end
end).
new() ->
activity(
fun() ->
mnesia:write(#?MODULE{id = tansu_uuid:new(), term = 0})
end).
activity(F) ->
mnesia:activity(transaction, F). | src/tansu_ps.erl | 0.556038 | 0.412234 | tansu_ps.erl | starcoder |
%% This Source Code Form is subject to the terms of the Mozilla Public
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
%% Copyright (c) 2007-2020 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(file_handle_cache).
%% A File Handle Cache
%%
%% This extends a subset of the functionality of the Erlang file
%% module. In the below, we use "file handle" to specifically refer to
%% file handles, and "file descriptor" to refer to descriptors which
%% are not file handles, e.g. sockets.
%%
%% Some constraints
%% 1) This supports one writer, multiple readers per file. Nothing
%% else.
%% 2) Do not open the same file from different processes. Bad things
%% may happen, especially for writes.
%% 3) Writes are all appends. You cannot write to the middle of a
%% file, although you can truncate and then append if you want.
%% 4) There are read and write buffers. Feel free to use the read_ahead
%% mode, but beware of the interaction between that buffer and the write
%% buffer.
%%
%% Some benefits
%% 1) You do not have to remember to call sync before close
%% 2) Buffering is much more flexible than with the plain file module,
%% and you can control when the buffer gets flushed out. This means
%% that you can rely on reads-after-writes working, without having to
%% call the expensive sync.
%% 3) Unnecessary calls to position and sync get optimised out.
%% 4) You can find out what your 'real' offset is, and what your
%% 'virtual' offset is (i.e. where the hdl really is, and where it
%% would be after the write buffer is written out).
%%
%% There is also a server component which serves to limit the number
%% of open file descriptors. This is a hard limit: the server
%% component will ensure that clients do not have more file
%% descriptors open than it's configured to allow.
%%
%% On open, the client requests permission from the server to open the
%% required number of file handles. The server may ask the client to
%% close other file handles that it has open, or it may queue the
%% request and ask other clients to close file handles they have open
%% in order to satisfy the request. Requests are always satisfied in
%% the order they arrive, even if a latter request (for a small number
%% of file handles) can be satisfied before an earlier request (for a
%% larger number of file handles). On close, the client sends a
%% message to the server. These messages allow the server to keep
%% track of the number of open handles. The client also keeps a
%% gb_tree which is updated on every use of a file handle, mapping the
%% time at which the file handle was last used (timestamp) to the
%% handle. Thus the smallest key in this tree maps to the file handle
%% that has not been used for the longest amount of time. This
%% smallest key is included in the messages to the server. As such,
%% the server keeps track of when the least recently used file handle
%% was used *at the point of the most recent open or close* by each
%% client.
%%
%% Note that this data can go very out of date, by the client using
%% the least recently used handle.
%%
%% When the limit is exceeded (i.e. the number of open file handles is
%% at the limit and there are pending 'open' requests), the server
%% calculates the average age of the last reported least recently used
%% file handle of all the clients. It then tells all the clients to
%% close any handles not used for longer than this average, by
%% invoking the callback the client registered. The client should
%% receive this message and pass it into
%% set_maximum_since_use/1. However, it is highly possible this age
%% will be greater than the ages of all the handles the client knows
%% of because the client has used its file handles in the mean
%% time. Thus at this point the client reports to the server the
%% current timestamp at which its least recently used file handle was
%% last used. The server will check two seconds later that either it
%% is back under the limit, in which case all is well again, or if
%% not, it will calculate a new average age. Its data will be much
%% more recent now, and so it is very likely that when this is
%% communicated to the clients, the clients will close file handles.
%% (In extreme cases, where it's very likely that all clients have
%% used their open handles since they last sent in an update, which
%% would mean that the average will never cause any file handles to
%% be closed, the server can send out an average age of 0, resulting
%% in all available clients closing all their file handles.)
%%
%% Care is taken to ensure that (a) processes which are blocked
%% waiting for file descriptors to become available are not sent
%% requests to close file handles; and (b) given it is known how many
%% file handles a process has open, when the average age is forced to
%% 0, close messages are only sent to enough processes to release the
%% correct number of file handles and the list of processes is
%% randomly shuffled. This ensures we don't cause processes to
%% needlessly close file handles, and ensures that we don't always
%% make such requests of the same processes.
%%
%% The advantage of this scheme is that there is only communication
%% from the client to the server on open, close, and when in the
%% process of trying to reduce file handle usage. There is no
%% communication from the client to the server on normal file handle
%% operations. This scheme forms a feed-back loop - the server does
%% not care which file handles are closed, just that some are, and it
%% checks this repeatedly when over the limit.
%%
%% Handles which are closed as a result of the server are put into a
%% "soft-closed" state in which the handle is closed (data flushed out
%% and sync'd first) but the state is maintained. The handle will be
%% fully reopened again as soon as needed, thus users of this library
%% do not need to worry about their handles being closed by the server
%% - reopening them when necessary is handled transparently.
%%
%% The server also supports obtain, release and transfer. obtain/{0,1}
%% blocks until a file descriptor is available, at which point the
%% requesting process is considered to 'own' more descriptor(s).
%% release/{0,1} is the inverse operation and releases previously obtained
%% descriptor(s). transfer/{1,2} transfers ownership of file descriptor(s)
%% between processes. It is non-blocking. Obtain has a
%% lower limit, set by the ?OBTAIN_LIMIT/1 macro. File handles can use
%% the entire limit, but will be evicted by obtain calls up to the
%% point at which no more obtain calls can be satisfied by the obtains
%% limit. Thus there will always be some capacity available for file
%% handles. Processes that use obtain are never asked to return them,
%% and they are not managed in any way by the server. It is simply a
%% mechanism to ensure that processes that need file descriptors such
%% as sockets can do so in such a way that the overall number of open
%% file descriptors is managed.
%%
%% The callers of register_callback/3, obtain, and the argument of
%% transfer are monitored, reducing the count of handles in use
%% appropriately when the processes terminate.
-behaviour(gen_server2).
-export([register_callback/3]).
-export([open/3, close/1, read/2, append/2, needs_sync/1, sync/1, position/2,
truncate/1, current_virtual_offset/1, current_raw_offset/1, flush/1,
copy/3, set_maximum_since_use/1, delete/1, clear/1,
open_with_absolute_path/3]).
-export([obtain/0, obtain/1, release/0, release/1, transfer/1, transfer/2,
set_limit/1, get_limit/0, info_keys/0, with_handle/1, with_handle/2,
info/0, info/1, clear_read_cache/0, clear_process_read_cache/0]).
-export([set_reservation/0, set_reservation/1, release_reservation/0]).
-export([ulimit/0]).
-export([start_link/0, start_link/2, init/1, handle_call/3, handle_cast/2,
handle_info/2, terminate/2, code_change/3, prioritise_cast/3]).
-define(SERVER, ?MODULE).
%% Reserve 3 handles for ra usage: wal, segment writer and a dets table
-define(RESERVED_FOR_OTHERS, 100 + 3).
-define(FILE_HANDLES_LIMIT_OTHER, 1024).
-define(FILE_HANDLES_CHECK_INTERVAL, 2000).
-define(OBTAIN_LIMIT(LIMIT), trunc((LIMIT * 0.9) - 2)).
-define(CLIENT_ETS_TABLE, file_handle_cache_client).
-define(ELDERS_ETS_TABLE, file_handle_cache_elders).
%%----------------------------------------------------------------------------
-record(file,
{ reader_count,
has_writer
}).
-record(handle,
{ hdl,
ref,
offset,
is_dirty,
write_buffer_size,
write_buffer_size_limit,
write_buffer,
read_buffer,
read_buffer_pos,
read_buffer_rem, %% Num of bytes from pos to end
read_buffer_size, %% Next size of read buffer to use
read_buffer_size_limit, %% Max size of read buffer to use
read_buffer_usage, %% Bytes we have read from it, for tuning
at_eof,
path,
mode,
options,
is_write,
is_read,
last_used_at
}).
-record(fhc_state,
{ elders,
limit,
open_count,
open_pending,
obtain_limit, %%socket
obtain_count_socket,
obtain_count_file,
obtain_pending_socket,
obtain_pending_file,
clients,
timer_ref,
alarm_set,
alarm_clear,
reserve_count_socket,
reserve_count_file
}).
-record(cstate,
{ pid,
callback,
opened,
obtained_socket,
obtained_file,
blocked,
pending_closes,
reserved_socket,
reserved_file
}).
-record(pending,
{ kind,
pid,
requested,
from
}).
%%----------------------------------------------------------------------------
%% Specs
%%----------------------------------------------------------------------------
-type ref() :: any().
-type ok_or_error() :: 'ok' | {'error', any()}.
-type val_or_error(T) :: {'ok', T} | {'error', any()}.
-type position() :: ('bof' | 'eof' | non_neg_integer() |
{('bof' |'eof'), non_neg_integer()} |
{'cur', integer()}).
-type offset() :: non_neg_integer().
-spec register_callback(atom(), atom(), [any()]) -> 'ok'.
-spec open
(file:filename(), [any()],
[{'write_buffer', (non_neg_integer() | 'infinity' | 'unbuffered')} |
{'read_buffer', (non_neg_integer() | 'unbuffered')}]) ->
val_or_error(ref()).
-spec open_with_absolute_path
(file:filename(), [any()],
[{'write_buffer', (non_neg_integer() | 'infinity' | 'unbuffered')} |
{'read_buffer', (non_neg_integer() | 'unbuffered')}]) ->
val_or_error(ref()).
-spec close(ref()) -> ok_or_error().
-spec read
(ref(), non_neg_integer()) -> val_or_error([char()] | binary()) | 'eof'.
-spec append(ref(), iodata()) -> ok_or_error().
-spec sync(ref()) -> ok_or_error().
-spec position(ref(), position()) -> val_or_error(offset()).
-spec truncate(ref()) -> ok_or_error().
-spec current_virtual_offset(ref()) -> val_or_error(offset()).
-spec current_raw_offset(ref()) -> val_or_error(offset()).
-spec flush(ref()) -> ok_or_error().
-spec copy(ref(), ref(), non_neg_integer()) -> val_or_error(non_neg_integer()).
-spec delete(ref()) -> ok_or_error().
-spec clear(ref()) -> ok_or_error().
-spec set_maximum_since_use(non_neg_integer()) -> 'ok'.
-spec obtain() -> 'ok'.
-spec obtain(non_neg_integer()) -> 'ok'.
-spec release() -> 'ok'.
-spec release(non_neg_integer()) -> 'ok'.
-spec transfer(pid()) -> 'ok'.
-spec transfer(pid(), non_neg_integer()) -> 'ok'.
-spec with_handle(fun(() -> A)) -> A.
-spec with_handle(non_neg_integer(), fun(() -> A)) -> A.
-spec set_limit(non_neg_integer()) -> 'ok'.
-spec get_limit() -> non_neg_integer().
-spec info_keys() -> rabbit_types:info_keys().
-spec info() -> rabbit_types:infos().
-spec info([atom()]) -> rabbit_types:infos().
-spec ulimit() -> 'unknown' | non_neg_integer().
%%----------------------------------------------------------------------------
-define(INFO_KEYS, [total_limit, total_used, sockets_limit, sockets_used]).
%%----------------------------------------------------------------------------
%% Public API
%%----------------------------------------------------------------------------
start_link() ->
start_link(fun alarm_handler:set_alarm/1, fun alarm_handler:clear_alarm/1).
start_link(AlarmSet, AlarmClear) ->
gen_server2:start_link({local, ?SERVER}, ?MODULE, [AlarmSet, AlarmClear],
[{timeout, infinity}]).
register_callback(M, F, A)
when is_atom(M) andalso is_atom(F) andalso is_list(A) ->
gen_server2:cast(?SERVER, {register_callback, self(), {M, F, A}}).
open(Path, Mode, Options) ->
open_with_absolute_path(filename:absname(Path), Mode, Options).
open_with_absolute_path(Path, Mode, Options) ->
File1 = #file { reader_count = RCount, has_writer = HasWriter } =
case get({Path, fhc_file}) of
File = #file {} -> File;
undefined -> #file { reader_count = 0,
has_writer = false }
end,
Mode1 = append_to_write(Mode),
IsWriter = is_writer(Mode1),
case IsWriter andalso HasWriter of
true -> {error, writer_exists};
false -> {ok, Ref} = new_closed_handle(Path, Mode1, Options),
case get_or_reopen_timed([{Ref, new}]) of
{ok, [_Handle1]} ->
RCount1 = case is_reader(Mode1) of
true -> RCount + 1;
false -> RCount
end,
HasWriter1 = HasWriter orelse IsWriter,
put({Path, fhc_file},
File1 #file { reader_count = RCount1,
has_writer = HasWriter1 }),
{ok, Ref};
Error ->
erase({Ref, fhc_handle}),
Error
end
end.
close(Ref) ->
case erase({Ref, fhc_handle}) of
undefined -> ok;
Handle -> case hard_close(Handle) of
ok -> ok;
{Error, Handle1} -> put_handle(Ref, Handle1),
Error
end
end.
read(Ref, Count) ->
with_flushed_handles(
[Ref], keep,
fun ([#handle { is_read = false }]) ->
{error, not_open_for_reading};
([#handle{read_buffer_size_limit = 0,
hdl = Hdl, offset = Offset} = Handle]) ->
%% The read buffer is disabled. This is just an
%% optimization: the clauses below can handle this case.
case prim_file_read(Hdl, Count) of
{ok, Data} -> {{ok, Data},
[Handle#handle{offset = Offset+size(Data)}]};
eof -> {eof, [Handle #handle { at_eof = true }]};
Error -> {Error, Handle}
end;
([Handle = #handle{read_buffer = Buf,
read_buffer_pos = BufPos,
read_buffer_rem = BufRem,
read_buffer_usage = BufUsg,
offset = Offset}])
when BufRem >= Count ->
<<_:BufPos/binary, Res:Count/binary, _/binary>> = Buf,
{{ok, Res}, [Handle#handle{offset = Offset + Count,
read_buffer_pos = BufPos + Count,
read_buffer_rem = BufRem - Count,
read_buffer_usage = BufUsg + Count }]};
([Handle0]) ->
maybe_reduce_read_cache([Ref]),
Handle = #handle{read_buffer = Buf,
read_buffer_pos = BufPos,
read_buffer_rem = BufRem,
read_buffer_size = BufSz,
hdl = Hdl,
offset = Offset}
= tune_read_buffer_limit(Handle0, Count),
WantedCount = Count - BufRem,
case prim_file_read(Hdl, max(BufSz, WantedCount)) of
{ok, Data} ->
<<_:BufPos/binary, BufTl/binary>> = Buf,
ReadCount = size(Data),
case ReadCount < WantedCount of
true ->
OffSet1 = Offset + BufRem + ReadCount,
{{ok, <<BufTl/binary, Data/binary>>},
[reset_read_buffer(
Handle#handle{offset = OffSet1})]};
false ->
<<Hd:WantedCount/binary, _/binary>> = Data,
OffSet1 = Offset + BufRem + WantedCount,
BufRem1 = ReadCount - WantedCount,
{{ok, <<BufTl/binary, Hd/binary>>},
[Handle#handle{offset = OffSet1,
read_buffer = Data,
read_buffer_pos = WantedCount,
read_buffer_rem = BufRem1,
read_buffer_usage = WantedCount}]}
end;
eof ->
{eof, [Handle #handle { at_eof = true }]};
Error ->
{Error, [reset_read_buffer(Handle)]}
end
end).
append(Ref, Data) ->
with_handles(
[Ref],
fun ([#handle { is_write = false }]) ->
{error, not_open_for_writing};
([Handle]) ->
case maybe_seek(eof, Handle) of
{{ok, _Offset}, #handle { hdl = Hdl, offset = Offset,
write_buffer_size_limit = 0,
at_eof = true } = Handle1} ->
Offset1 = Offset + iolist_size(Data),
{prim_file_write(Hdl, Data),
[Handle1 #handle { is_dirty = true, offset = Offset1 }]};
{{ok, _Offset}, #handle { write_buffer = WriteBuffer,
write_buffer_size = Size,
write_buffer_size_limit = Limit,
at_eof = true } = Handle1} ->
WriteBuffer1 = [Data | WriteBuffer],
Size1 = Size + iolist_size(Data),
Handle2 = Handle1 #handle { write_buffer = WriteBuffer1,
write_buffer_size = Size1 },
case Limit =/= infinity andalso Size1 > Limit of
true -> {Result, Handle3} = write_buffer(Handle2),
{Result, [Handle3]};
false -> {ok, [Handle2]}
end;
{{error, _} = Error, Handle1} ->
{Error, [Handle1]}
end
end).
sync(Ref) ->
with_flushed_handles(
[Ref], keep,
fun ([#handle { is_dirty = false, write_buffer = [] }]) ->
ok;
([Handle = #handle { hdl = Hdl,
is_dirty = true, write_buffer = [] }]) ->
case prim_file_sync(Hdl) of
ok -> {ok, [Handle #handle { is_dirty = false }]};
Error -> {Error, [Handle]}
end
end).
needs_sync(Ref) ->
%% This must *not* use with_handles/2; see bug 25052
case get({Ref, fhc_handle}) of
#handle { is_dirty = false, write_buffer = [] } -> false;
#handle {} -> true
end.
position(Ref, NewOffset) ->
with_flushed_handles(
[Ref], keep,
fun ([Handle]) -> {Result, Handle1} = maybe_seek(NewOffset, Handle),
{Result, [Handle1]}
end).
truncate(Ref) ->
with_flushed_handles(
[Ref],
fun ([Handle1 = #handle { hdl = Hdl }]) ->
case prim_file:truncate(Hdl) of
ok -> {ok, [Handle1 #handle { at_eof = true }]};
Error -> {Error, [Handle1]}
end
end).
current_virtual_offset(Ref) ->
with_handles([Ref], fun ([#handle { at_eof = true, is_write = true,
offset = Offset,
write_buffer_size = Size }]) ->
{ok, Offset + Size};
([#handle { offset = Offset }]) ->
{ok, Offset}
end).
current_raw_offset(Ref) ->
with_handles([Ref], fun ([Handle]) -> {ok, Handle #handle.offset} end).
flush(Ref) ->
with_flushed_handles([Ref], fun ([Handle]) -> {ok, [Handle]} end).
copy(Src, Dest, Count) ->
with_flushed_handles(
[Src, Dest],
fun ([SHandle = #handle { is_read = true, hdl = SHdl, offset = SOffset },
DHandle = #handle { is_write = true, hdl = DHdl, offset = DOffset }]
) ->
case prim_file:copy(SHdl, DHdl, Count) of
{ok, Count1} = Result1 ->
{Result1,
[SHandle #handle { offset = SOffset + Count1 },
DHandle #handle { offset = DOffset + Count1,
is_dirty = true }]};
Error ->
{Error, [SHandle, DHandle]}
end;
(_Handles) ->
{error, incorrect_handle_modes}
end).
delete(Ref) ->
case erase({Ref, fhc_handle}) of
undefined ->
ok;
Handle = #handle { path = Path } ->
case hard_close(Handle #handle { is_dirty = false,
write_buffer = [] }) of
ok -> prim_file:delete(Path);
{Error, Handle1} -> put_handle(Ref, Handle1),
Error
end
end.
clear(Ref) ->
with_handles(
[Ref],
fun ([#handle { at_eof = true, write_buffer_size = 0, offset = 0 }]) ->
ok;
([Handle]) ->
case maybe_seek(bof, Handle#handle{write_buffer = [],
write_buffer_size = 0}) of
{{ok, 0}, Handle1 = #handle { hdl = Hdl }} ->
case prim_file:truncate(Hdl) of
ok -> {ok, [Handle1 #handle { at_eof = true }]};
Error -> {Error, [Handle1]}
end;
{{error, _} = Error, Handle1} ->
{Error, [Handle1]}
end
end).
set_maximum_since_use(MaximumAge) ->
Now = erlang:monotonic_time(),
case lists:foldl(
fun ({{Ref, fhc_handle},
Handle = #handle { hdl = Hdl, last_used_at = Then }}, Rep) ->
case Hdl =/= closed andalso
erlang:convert_time_unit(Now - Then,
native,
micro_seconds)
>= MaximumAge of
true -> soft_close(Ref, Handle) orelse Rep;
false -> Rep
end;
(_KeyValuePair, Rep) ->
Rep
end, false, get()) of
false -> age_tree_change(), ok;
true -> ok
end.
obtain() -> obtain(1).
set_reservation() -> set_reservation(1).
release() -> release(1).
release_reservation() -> release_reservation(file).
transfer(Pid) -> transfer(Pid, 1).
obtain(Count) -> obtain(Count, socket).
set_reservation(Count) -> set_reservation(Count, file).
release(Count) -> release(Count, socket).
with_handle(Fun) ->
with_handle(1, Fun).
with_handle(N, Fun) ->
ok = obtain(N, file),
try Fun()
after ok = release(N, file)
end.
obtain(Count, Type) when Count > 0 ->
%% If the FHC isn't running, obtains succeed immediately.
case whereis(?SERVER) of
undefined -> ok;
_ -> gen_server2:call(
?SERVER, {obtain, Count, Type, self()}, infinity)
end.
set_reservation(Count, Type) when Count > 0 ->
%% If the FHC isn't running, reserve succeed immediately.
case whereis(?SERVER) of
undefined -> ok;
_ -> gen_server2:cast(?SERVER, {set_reservation, Count, Type, self()})
end.
release(Count, Type) when Count > 0 ->
gen_server2:cast(?SERVER, {release, Count, Type, self()}).
release_reservation(Type) ->
gen_server2:cast(?SERVER, {release_reservation, Type, self()}).
transfer(Pid, Count) when Count > 0 ->
gen_server2:cast(?SERVER, {transfer, Count, self(), Pid}).
set_limit(Limit) ->
gen_server2:call(?SERVER, {set_limit, Limit}, infinity).
get_limit() ->
gen_server2:call(?SERVER, get_limit, infinity).
info_keys() -> ?INFO_KEYS.
info() -> info(?INFO_KEYS).
info(Items) -> gen_server2:call(?SERVER, {info, Items}, infinity).
clear_read_cache() ->
gen_server2:cast(?SERVER, clear_read_cache).
clear_process_read_cache() ->
[
begin
Handle1 = reset_read_buffer(Handle),
put({Ref, fhc_handle}, Handle1)
end ||
{{Ref, fhc_handle}, Handle} <- get(),
size(Handle#handle.read_buffer) > 0
].
%%----------------------------------------------------------------------------
%% Internal functions
%%----------------------------------------------------------------------------
prim_file_read(Hdl, Size) ->
file_handle_cache_stats:update(
io_read, Size, fun() -> prim_file:read(Hdl, Size) end).
prim_file_write(Hdl, Bytes) ->
file_handle_cache_stats:update(
io_write, iolist_size(Bytes), fun() -> prim_file:write(Hdl, Bytes) end).
prim_file_sync(Hdl) ->
file_handle_cache_stats:update(io_sync, fun() -> prim_file:sync(Hdl) end).
prim_file_position(Hdl, NewOffset) ->
file_handle_cache_stats:update(
io_seek, fun() -> prim_file:position(Hdl, NewOffset) end).
is_reader(Mode) -> lists:member(read, Mode).
is_writer(Mode) -> lists:member(write, Mode).
append_to_write(Mode) ->
case lists:member(append, Mode) of
true -> [write | Mode -- [append, write]];
false -> Mode
end.
with_handles(Refs, Fun) ->
with_handles(Refs, reset, Fun).
with_handles(Refs, ReadBuffer, Fun) ->
case get_or_reopen_timed([{Ref, reopen} || Ref <- Refs]) of
{ok, Handles0} ->
Handles = case ReadBuffer of
reset -> [reset_read_buffer(H) || H <- Handles0];
keep -> Handles0
end,
case Fun(Handles) of
{Result, Handles1} when is_list(Handles1) ->
_ = lists:zipwith(fun put_handle/2, Refs, Handles1),
Result;
Result ->
Result
end;
Error ->
Error
end.
with_flushed_handles(Refs, Fun) ->
with_flushed_handles(Refs, reset, Fun).
with_flushed_handles(Refs, ReadBuffer, Fun) ->
with_handles(
Refs, ReadBuffer,
fun (Handles) ->
case lists:foldl(
fun (Handle, {ok, HandlesAcc}) ->
{Res, Handle1} = write_buffer(Handle),
{Res, [Handle1 | HandlesAcc]};
(Handle, {Error, HandlesAcc}) ->
{Error, [Handle | HandlesAcc]}
end, {ok, []}, Handles) of
{ok, Handles1} ->
Fun(lists:reverse(Handles1));
{Error, Handles1} ->
{Error, lists:reverse(Handles1)}
end
end).
get_or_reopen_timed(RefNewOrReopens) ->
file_handle_cache_stats:update(
io_file_handle_open_attempt, fun() -> get_or_reopen(RefNewOrReopens) end).
get_or_reopen(RefNewOrReopens) ->
case partition_handles(RefNewOrReopens) of
{OpenHdls, []} ->
{ok, [Handle || {_Ref, Handle} <- OpenHdls]};
{OpenHdls, ClosedHdls} ->
Oldest = oldest(get_age_tree(),
fun () -> erlang:monotonic_time() end),
case gen_server2:call(?SERVER, {open, self(), length(ClosedHdls),
Oldest}, infinity) of
ok ->
case reopen(ClosedHdls) of
{ok, RefHdls} -> sort_handles(RefNewOrReopens,
OpenHdls, RefHdls, []);
Error -> Error
end;
close ->
[soft_close(Ref, Handle) ||
{{Ref, fhc_handle}, Handle = #handle { hdl = Hdl }} <-
get(),
Hdl =/= closed],
get_or_reopen(RefNewOrReopens)
end
end.
reopen(ClosedHdls) -> reopen(ClosedHdls, get_age_tree(), []).
reopen([], Tree, RefHdls) ->
put_age_tree(Tree),
{ok, lists:reverse(RefHdls)};
reopen([{Ref, NewOrReopen, Handle = #handle { hdl = closed,
path = Path,
mode = Mode0,
offset = Offset,
last_used_at = undefined }} |
RefNewOrReopenHdls] = ToOpen, Tree, RefHdls) ->
Mode = case NewOrReopen of
new -> Mode0;
reopen -> file_handle_cache_stats:update(io_reopen),
[read | Mode0]
end,
case prim_file:open(Path, Mode) of
{ok, Hdl} ->
Now = erlang:monotonic_time(),
{{ok, _Offset}, Handle1} =
maybe_seek(Offset, reset_read_buffer(
Handle#handle{hdl = Hdl,
offset = 0,
last_used_at = Now})),
put({Ref, fhc_handle}, Handle1),
reopen(RefNewOrReopenHdls, gb_trees:insert({Now, Ref}, true, Tree),
[{Ref, Handle1} | RefHdls]);
Error ->
%% NB: none of the handles in ToOpen are in the age tree
Oldest = oldest(Tree, fun () -> undefined end),
[gen_server2:cast(?SERVER, {close, self(), Oldest}) || _ <- ToOpen],
put_age_tree(Tree),
Error
end.
partition_handles(RefNewOrReopens) ->
lists:foldr(
fun ({Ref, NewOrReopen}, {Open, Closed}) ->
case get({Ref, fhc_handle}) of
#handle { hdl = closed } = Handle ->
{Open, [{Ref, NewOrReopen, Handle} | Closed]};
#handle {} = Handle ->
{[{Ref, Handle} | Open], Closed}
end
end, {[], []}, RefNewOrReopens).
sort_handles([], [], [], Acc) ->
{ok, lists:reverse(Acc)};
sort_handles([{Ref, _} | RefHdls], [{Ref, Handle} | RefHdlsA], RefHdlsB, Acc) ->
sort_handles(RefHdls, RefHdlsA, RefHdlsB, [Handle | Acc]);
sort_handles([{Ref, _} | RefHdls], RefHdlsA, [{Ref, Handle} | RefHdlsB], Acc) ->
sort_handles(RefHdls, RefHdlsA, RefHdlsB, [Handle | Acc]).
put_handle(Ref, Handle = #handle { last_used_at = Then }) ->
Now = erlang:monotonic_time(),
age_tree_update(Then, Now, Ref),
put({Ref, fhc_handle}, Handle #handle { last_used_at = Now }).
with_age_tree(Fun) -> put_age_tree(Fun(get_age_tree())).
get_age_tree() ->
case get(fhc_age_tree) of
undefined -> gb_trees:empty();
AgeTree -> AgeTree
end.
put_age_tree(Tree) -> put(fhc_age_tree, Tree).
age_tree_update(Then, Now, Ref) ->
with_age_tree(
fun (Tree) ->
gb_trees:insert({Now, Ref}, true,
gb_trees:delete_any({Then, Ref}, Tree))
end).
age_tree_delete(Then, Ref) ->
with_age_tree(
fun (Tree) ->
Tree1 = gb_trees:delete_any({Then, Ref}, Tree),
Oldest = oldest(Tree1, fun () -> undefined end),
gen_server2:cast(?SERVER, {close, self(), Oldest}),
Tree1
end).
age_tree_change() ->
with_age_tree(
fun (Tree) ->
case gb_trees:is_empty(Tree) of
true -> Tree;
false -> {{Oldest, _Ref}, _} = gb_trees:smallest(Tree),
gen_server2:cast(?SERVER, {update, self(), Oldest}),
Tree
end
end).
oldest(Tree, DefaultFun) ->
case gb_trees:is_empty(Tree) of
true -> DefaultFun();
false -> {{Oldest, _Ref}, _} = gb_trees:smallest(Tree),
Oldest
end.
new_closed_handle(Path, Mode, Options) ->
WriteBufferSize =
case application:get_env(rabbit, fhc_write_buffering) of
{ok, false} -> 0;
{ok, true} ->
case proplists:get_value(write_buffer, Options, unbuffered) of
unbuffered -> 0;
infinity -> infinity;
N when is_integer(N) -> N
end
end,
ReadBufferSize =
case application:get_env(rabbit, fhc_read_buffering) of
{ok, false} -> 0;
{ok, true} ->
case proplists:get_value(read_buffer, Options, unbuffered) of
unbuffered -> 0;
N2 when is_integer(N2) -> N2
end
end,
Ref = make_ref(),
put({Ref, fhc_handle}, #handle { hdl = closed,
ref = Ref,
offset = 0,
is_dirty = false,
write_buffer_size = 0,
write_buffer_size_limit = WriteBufferSize,
write_buffer = [],
read_buffer = <<>>,
read_buffer_pos = 0,
read_buffer_rem = 0,
read_buffer_size = ReadBufferSize,
read_buffer_size_limit = ReadBufferSize,
read_buffer_usage = 0,
at_eof = false,
path = Path,
mode = Mode,
options = Options,
is_write = is_writer(Mode),
is_read = is_reader(Mode),
last_used_at = undefined }),
{ok, Ref}.
soft_close(Ref, Handle) ->
{Res, Handle1} = soft_close(Handle),
case Res of
ok -> put({Ref, fhc_handle}, Handle1),
true;
_ -> put_handle(Ref, Handle1),
false
end.
soft_close(Handle = #handle { hdl = closed }) ->
{ok, Handle};
soft_close(Handle) ->
case write_buffer(Handle) of
{ok, #handle { hdl = Hdl,
ref = Ref,
is_dirty = IsDirty,
last_used_at = Then } = Handle1 } ->
ok = case IsDirty of
true -> prim_file_sync(Hdl);
false -> ok
end,
ok = prim_file:close(Hdl),
age_tree_delete(Then, Ref),
{ok, Handle1 #handle { hdl = closed,
is_dirty = false,
last_used_at = undefined }};
{_Error, _Handle} = Result ->
Result
end.
hard_close(Handle) ->
case soft_close(Handle) of
{ok, #handle { path = Path,
is_read = IsReader, is_write = IsWriter }} ->
#file { reader_count = RCount, has_writer = HasWriter } = File =
get({Path, fhc_file}),
RCount1 = case IsReader of
true -> RCount - 1;
false -> RCount
end,
HasWriter1 = HasWriter andalso not IsWriter,
case RCount1 =:= 0 andalso not HasWriter1 of
true -> erase({Path, fhc_file});
false -> put({Path, fhc_file},
File #file { reader_count = RCount1,
has_writer = HasWriter1 })
end,
ok;
{_Error, _Handle} = Result ->
Result
end.
maybe_seek(New, Handle = #handle{hdl = Hdl,
offset = Old,
read_buffer_pos = BufPos,
read_buffer_rem = BufRem,
at_eof = AtEoF}) ->
{AtEoF1, NeedsSeek} = needs_seek(AtEoF, Old, New),
case NeedsSeek of
true when is_number(New) andalso
((New >= Old andalso New =< BufRem + Old)
orelse (New < Old andalso Old - New =< BufPos)) ->
Diff = New - Old,
{{ok, New}, Handle#handle{offset = New,
at_eof = AtEoF1,
read_buffer_pos = BufPos + Diff,
read_buffer_rem = BufRem - Diff}};
true ->
case prim_file_position(Hdl, New) of
{ok, Offset1} = Result ->
{Result, reset_read_buffer(Handle#handle{offset = Offset1,
at_eof = AtEoF1})};
{error, _} = Error ->
{Error, Handle}
end;
false ->
{{ok, Old}, Handle}
end.
needs_seek( AtEoF, _CurOffset, cur ) -> {AtEoF, false};
needs_seek( AtEoF, _CurOffset, {cur, 0}) -> {AtEoF, false};
needs_seek( true, _CurOffset, eof ) -> {true , false};
needs_seek( true, _CurOffset, {eof, 0}) -> {true , false};
needs_seek( false, _CurOffset, eof ) -> {true , true };
needs_seek( false, _CurOffset, {eof, 0}) -> {true , true };
needs_seek( AtEoF, 0, bof ) -> {AtEoF, false};
needs_seek( AtEoF, 0, {bof, 0}) -> {AtEoF, false};
needs_seek( AtEoF, CurOffset, CurOffset) -> {AtEoF, false};
needs_seek( true, CurOffset, {bof, DesiredOffset})
when DesiredOffset >= CurOffset ->
{true, true};
needs_seek( true, _CurOffset, {cur, DesiredOffset})
when DesiredOffset > 0 ->
{true, true};
needs_seek( true, CurOffset, DesiredOffset) %% same as {bof, DO}
when is_integer(DesiredOffset) andalso DesiredOffset >= CurOffset ->
{true, true};
%% because we can't really track size, we could well end up at EoF and not know
needs_seek(_AtEoF, _CurOffset, _DesiredOffset) ->
{false, true}.
write_buffer(Handle = #handle { write_buffer = [] }) ->
{ok, Handle};
write_buffer(Handle = #handle { hdl = Hdl, offset = Offset,
write_buffer = WriteBuffer,
write_buffer_size = DataSize,
at_eof = true }) ->
case prim_file_write(Hdl, lists:reverse(WriteBuffer)) of
ok ->
Offset1 = Offset + DataSize,
{ok, Handle #handle { offset = Offset1, is_dirty = true,
write_buffer = [], write_buffer_size = 0 }};
{error, _} = Error ->
{Error, Handle}
end.
reset_read_buffer(Handle) ->
Handle#handle{read_buffer = <<>>,
read_buffer_pos = 0,
read_buffer_rem = 0}.
%% We come into this function whenever there's been a miss while
%% reading from the buffer - but note that when we first start with a
%% new handle the usage will be 0. Therefore in that case don't take
%% it as meaning the buffer was useless, we just haven't done anything
%% yet!
tune_read_buffer_limit(Handle = #handle{read_buffer_usage = 0}, _Count) ->
Handle;
%% In this head we have been using the buffer but now tried to read
%% outside it. So how did we do? If we used less than the size of the
%% buffer, make the new buffer the size of what we used before, but
%% add one byte (so that next time we can distinguish between getting
%% the buffer size exactly right and actually wanting more). If we
%% read 100% of what we had, then double it for next time, up to the
%% limit that was set when we were created.
tune_read_buffer_limit(Handle = #handle{read_buffer = Buf,
read_buffer_usage = Usg,
read_buffer_size = Sz,
read_buffer_size_limit = Lim}, Count) ->
%% If the buffer is <<>> then we are in the first read after a
%% reset, the read_buffer_usage is the total usage from before the
%% reset. But otherwise we are in a read which read off the end of
%% the buffer, so really the size of this read should be included
%% in the usage.
TotalUsg = case Buf of
<<>> -> Usg;
_ -> Usg + Count
end,
Handle#handle{read_buffer_usage = 0,
read_buffer_size = erlang:min(case TotalUsg < Sz of
true -> Usg + 1;
false -> Usg * 2
end, Lim)}.
maybe_reduce_read_cache(SparedRefs) ->
case vm_memory_monitor:get_memory_use(bytes) of
{_, infinity} -> ok;
{MemUse, MemLimit} when MemUse < MemLimit -> ok;
{MemUse, MemLimit} -> reduce_read_cache(
(MemUse - MemLimit) * 2,
SparedRefs)
end.
reduce_read_cache(MemToFree, SparedRefs) ->
Handles = lists:sort(
fun({_, H1}, {_, H2}) -> H1 < H2 end,
[{R, H} || {{R, fhc_handle}, H} <- get(),
not lists:member(R, SparedRefs)
andalso size(H#handle.read_buffer) > 0]),
FreedMem = lists:foldl(
fun
(_, Freed) when Freed >= MemToFree ->
Freed;
({Ref, #handle{read_buffer = Buf} = Handle}, Freed) ->
Handle1 = reset_read_buffer(Handle),
put({Ref, fhc_handle}, Handle1),
Freed + size(Buf)
end, 0, Handles),
if
FreedMem < MemToFree andalso SparedRefs =/= [] ->
reduce_read_cache(MemToFree - FreedMem, []);
true ->
ok
end.
infos(Items, State) -> [{Item, i(Item, State)} || Item <- Items].
i(total_limit, #fhc_state{limit = Limit}) -> Limit;
i(total_used, State) -> used(State);
i(sockets_limit, #fhc_state{obtain_limit = Limit}) -> Limit;
i(sockets_used, #fhc_state{obtain_count_socket = Count,
reserve_count_socket = RCount}) -> Count + RCount;
i(files_reserved, #fhc_state{reserve_count_file = RCount}) -> RCount;
i(Item, _) -> throw({bad_argument, Item}).
used(#fhc_state{open_count = C1,
obtain_count_socket = C2,
obtain_count_file = C3,
reserve_count_socket = C4,
reserve_count_file = C5}) -> C1 + C2 + C3 + C4 + C5.
%%----------------------------------------------------------------------------
%% gen_server2 callbacks
%%----------------------------------------------------------------------------
init([AlarmSet, AlarmClear]) ->
_ = file_handle_cache_stats:init(),
Limit = case application:get_env(file_handles_high_watermark) of
{ok, Watermark} when (is_integer(Watermark) andalso
Watermark > 0) ->
Watermark;
_ ->
case ulimit() of
unknown -> ?FILE_HANDLES_LIMIT_OTHER;
Lim -> lists:max([2, Lim - ?RESERVED_FOR_OTHERS])
end
end,
ObtainLimit = obtain_limit(Limit),
error_logger:info_msg("Limiting to approx ~p file handles (~p sockets)~n",
[Limit, ObtainLimit]),
Clients = ets:new(?CLIENT_ETS_TABLE, [set, private, {keypos, #cstate.pid}]),
Elders = ets:new(?ELDERS_ETS_TABLE, [set, private]),
{ok, #fhc_state { elders = Elders,
limit = Limit,
open_count = 0,
open_pending = pending_new(),
obtain_limit = ObtainLimit,
obtain_count_file = 0,
obtain_pending_file = pending_new(),
obtain_count_socket = 0,
obtain_pending_socket = pending_new(),
clients = Clients,
timer_ref = undefined,
alarm_set = AlarmSet,
alarm_clear = AlarmClear,
reserve_count_file = 0,
reserve_count_socket = 0 }}.
prioritise_cast(Msg, _Len, _State) ->
case Msg of
{release, _, _, _} -> 5;
{release_reservation, _, _, _} -> 5;
_ -> 0
end.
handle_call({open, Pid, Requested, EldestUnusedSince}, From,
State = #fhc_state { open_count = Count,
open_pending = Pending,
elders = Elders,
clients = Clients })
when EldestUnusedSince =/= undefined ->
true = ets:insert(Elders, {Pid, EldestUnusedSince}),
Item = #pending { kind = open,
pid = Pid,
requested = Requested,
from = From },
ok = track_client(Pid, Clients),
case needs_reduce(State #fhc_state { open_count = Count + Requested }) of
true -> case ets:lookup(Clients, Pid) of
[#cstate { opened = 0 }] ->
true = ets:update_element(
Clients, Pid, {#cstate.blocked, true}),
{noreply,
reduce(State #fhc_state {
open_pending = pending_in(Item, Pending) })};
[#cstate { opened = Opened }] ->
true = ets:update_element(
Clients, Pid,
{#cstate.pending_closes, Opened}),
{reply, close, State}
end;
false -> {noreply, run_pending_item(Item, State)}
end;
handle_call({obtain, N, Type, Pid}, From,
State = #fhc_state { clients = Clients }) ->
Count = obtain_state(Type, count, State),
Pending = obtain_state(Type, pending, State),
ok = track_client(Pid, Clients),
Item = #pending { kind = {obtain, Type}, pid = Pid,
requested = N, from = From },
Enqueue = fun () ->
true = ets:update_element(Clients, Pid,
{#cstate.blocked, true}),
set_obtain_state(Type, pending,
pending_in(Item, Pending), State)
end,
{noreply,
case obtain_limit_reached(Type, State) of
true -> Enqueue();
false -> case needs_reduce(
set_obtain_state(Type, count, Count + 1, State)) of
true -> reduce(Enqueue());
false -> adjust_alarm(
State, run_pending_item(Item, State))
end
end};
handle_call({set_limit, Limit}, _From, State) ->
{reply, ok, adjust_alarm(
State, maybe_reduce(
process_pending(
State #fhc_state {
limit = Limit,
obtain_limit = obtain_limit(Limit) })))};
handle_call(get_limit, _From, State = #fhc_state { limit = Limit }) ->
{reply, Limit, State};
handle_call({info, Items}, _From, State) ->
{reply, infos(Items, State), State}.
handle_cast({register_callback, Pid, MFA},
State = #fhc_state { clients = Clients }) ->
ok = track_client(Pid, Clients),
true = ets:update_element(Clients, Pid, {#cstate.callback, MFA}),
{noreply, State};
handle_cast({update, Pid, EldestUnusedSince},
State = #fhc_state { elders = Elders })
when EldestUnusedSince =/= undefined ->
true = ets:insert(Elders, {Pid, EldestUnusedSince}),
%% don't call maybe_reduce from here otherwise we can create a
%% storm of messages
{noreply, State};
handle_cast({release, N, Type, Pid}, State) ->
State1 = process_pending(update_counts({obtain, Type}, Pid, -N, State)),
{noreply, adjust_alarm(State, State1)};
handle_cast({close, Pid, EldestUnusedSince},
State = #fhc_state { elders = Elders, clients = Clients }) ->
true = case EldestUnusedSince of
undefined -> ets:delete(Elders, Pid);
_ -> ets:insert(Elders, {Pid, EldestUnusedSince})
end,
ets:update_counter(Clients, Pid, {#cstate.pending_closes, -1, 0, 0}),
{noreply, adjust_alarm(State, process_pending(
update_counts(open, Pid, -1, State)))};
handle_cast({transfer, N, FromPid, ToPid}, State) ->
ok = track_client(ToPid, State#fhc_state.clients),
{noreply, process_pending(
update_counts({obtain, socket}, ToPid, +N,
update_counts({obtain, socket}, FromPid, -N,
State)))};
handle_cast(clear_read_cache, State) ->
_ = clear_process_read_cache(),
{noreply, State};
handle_cast({release_reservation, Type, Pid}, State) ->
State1 = process_pending(update_counts({reserve, Type}, Pid, 0, State)),
{noreply, adjust_alarm(State, State1)};
handle_cast({set_reservation, N, Type, Pid},
State = #fhc_state { clients = Clients }) ->
ok = track_client(Pid, Clients),
NewState = process_pending(update_counts({reserve, Type}, Pid, N, State)),
{noreply, case needs_reduce(NewState) of
true -> reduce(NewState);
false -> adjust_alarm(State, NewState)
end}.
handle_info(check_counts, State) ->
{noreply, maybe_reduce(State #fhc_state { timer_ref = undefined })};
handle_info({'DOWN', _MRef, process, Pid, _Reason},
State = #fhc_state { elders = Elders,
open_count = OpenCount,
open_pending = OpenPending,
obtain_count_file = ObtainCountF,
obtain_count_socket = ObtainCountS,
obtain_pending_file = ObtainPendingF,
obtain_pending_socket = ObtainPendingS,
reserve_count_file = ReserveCountF,
reserve_count_socket = ReserveCountS,
clients = Clients }) ->
[#cstate { opened = Opened,
obtained_file = ObtainedFile,
obtained_socket = ObtainedSocket,
reserved_file = ReservedFile,
reserved_socket = ReservedSocket }] =
ets:lookup(Clients, Pid),
true = ets:delete(Clients, Pid),
true = ets:delete(Elders, Pid),
Fun = fun (#pending { pid = Pid1 }) -> Pid1 =/= Pid end,
State1 = process_pending(
State #fhc_state {
open_count = OpenCount - Opened,
open_pending = filter_pending(Fun, OpenPending),
obtain_count_file = ObtainCountF - ObtainedFile,
obtain_count_socket = ObtainCountS - ObtainedSocket,
obtain_pending_file = filter_pending(Fun, ObtainPendingF),
obtain_pending_socket = filter_pending(Fun, ObtainPendingS),
reserve_count_file = ReserveCountF - ReservedFile,
reserve_count_socket = ReserveCountS - ReservedSocket}),
{noreply, adjust_alarm(State, State1)}.
terminate(_Reason, State = #fhc_state { clients = Clients,
elders = Elders }) ->
ets:delete(Clients),
ets:delete(Elders),
State.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%----------------------------------------------------------------------------
%% pending queue abstraction helpers
%%----------------------------------------------------------------------------
queue_fold(Fun, Init, Q) ->
case queue:out(Q) of
{empty, _Q} -> Init;
{{value, V}, Q1} -> queue_fold(Fun, Fun(V, Init), Q1)
end.
filter_pending(Fun, {Count, Queue}) ->
{Delta, Queue1} =
queue_fold(
fun (Item = #pending { requested = Requested }, {DeltaN, QueueN}) ->
case Fun(Item) of
true -> {DeltaN, queue:in(Item, QueueN)};
false -> {DeltaN - Requested, QueueN}
end
end, {0, queue:new()}, Queue),
{Count + Delta, Queue1}.
pending_new() ->
{0, queue:new()}.
pending_in(Item = #pending { requested = Requested }, {Count, Queue}) ->
{Count + Requested, queue:in(Item, Queue)}.
pending_out({0, _Queue} = Pending) ->
{empty, Pending};
pending_out({N, Queue}) ->
{{value, #pending { requested = Requested }} = Result, Queue1} =
queue:out(Queue),
{Result, {N - Requested, Queue1}}.
pending_count({Count, _Queue}) ->
Count.
%%----------------------------------------------------------------------------
%% server helpers
%%----------------------------------------------------------------------------
obtain_limit(infinity) -> infinity;
obtain_limit(Limit) -> case ?OBTAIN_LIMIT(Limit) of
OLimit when OLimit < 0 -> 0;
OLimit -> OLimit
end.
obtain_limit_reached(socket, State) -> obtain_limit_reached(State);
obtain_limit_reached(file, State) -> needs_reduce(State).
obtain_limit_reached(#fhc_state{obtain_limit = Limit,
obtain_count_socket = Count,
reserve_count_socket = RCount}) ->
Limit =/= infinity andalso (RCount + Count) >= Limit.
obtain_state(file, count, #fhc_state{obtain_count_file = N}) -> N;
obtain_state(socket, count, #fhc_state{obtain_count_socket = N}) -> N;
obtain_state(file, pending, #fhc_state{obtain_pending_file = N}) -> N;
obtain_state(socket, pending, #fhc_state{obtain_pending_socket = N}) -> N.
set_obtain_state(file, count, N, S) -> S#fhc_state{obtain_count_file = N};
set_obtain_state(socket, count, N, S) -> S#fhc_state{obtain_count_socket = N};
set_obtain_state(file, pending, N, S) -> S#fhc_state{obtain_pending_file = N};
set_obtain_state(socket, pending, N, S) -> S#fhc_state{obtain_pending_socket = N}.
adjust_alarm(OldState = #fhc_state { alarm_set = AlarmSet,
alarm_clear = AlarmClear }, NewState) ->
case {obtain_limit_reached(OldState), obtain_limit_reached(NewState)} of
{false, true} -> AlarmSet({file_descriptor_limit, []});
{true, false} -> AlarmClear(file_descriptor_limit);
_ -> ok
end,
NewState.
process_pending(State = #fhc_state { limit = infinity }) ->
State;
process_pending(State) ->
process_open(process_obtain(socket, process_obtain(file, State))).
process_open(State = #fhc_state { limit = Limit,
open_pending = Pending}) ->
{Pending1, State1} = process_pending(Pending, Limit - used(State), State),
State1 #fhc_state { open_pending = Pending1 }.
process_obtain(socket, State = #fhc_state { limit = Limit,
obtain_limit = ObtainLimit,
open_count = OpenCount,
obtain_count_socket = ObtainCount,
obtain_pending_socket = Pending,
obtain_count_file = ObtainCountF,
reserve_count_file = ReserveCountF,
reserve_count_socket = ReserveCount}) ->
Quota = min(ObtainLimit - ObtainCount,
Limit - (OpenCount + ObtainCount + ObtainCountF + ReserveCount + ReserveCountF)),
{Pending1, State1} = process_pending(Pending, Quota, State),
State1#fhc_state{obtain_pending_socket = Pending1};
process_obtain(file, State = #fhc_state { limit = Limit,
open_count = OpenCount,
obtain_count_socket = ObtainCountS,
obtain_count_file = ObtainCountF,
obtain_pending_file = Pending,
reserve_count_file = ReserveCountF,
reserve_count_socket = ReserveCountS}) ->
Quota = Limit - (OpenCount + ObtainCountS + ObtainCountF + ReserveCountF + ReserveCountS),
{Pending1, State1} = process_pending(Pending, Quota, State),
State1#fhc_state{obtain_pending_file = Pending1}.
process_pending(Pending, Quota, State) when Quota =< 0 ->
{Pending, State};
process_pending(Pending, Quota, State) ->
case pending_out(Pending) of
{empty, _Pending} ->
{Pending, State};
{{value, #pending { requested = Requested }}, _Pending1}
when Requested > Quota ->
{Pending, State};
{{value, #pending { requested = Requested } = Item}, Pending1} ->
process_pending(Pending1, Quota - Requested,
run_pending_item(Item, State))
end.
run_pending_item(#pending { kind = Kind,
pid = Pid,
requested = Requested,
from = From },
State = #fhc_state { clients = Clients }) ->
gen_server2:reply(From, ok),
true = ets:update_element(Clients, Pid, {#cstate.blocked, false}),
update_counts(Kind, Pid, Requested, State).
update_counts(open, Pid, Delta,
State = #fhc_state { open_count = OpenCount,
clients = Clients }) ->
ets:update_counter(Clients, Pid, {#cstate.opened, Delta}),
State #fhc_state { open_count = OpenCount + Delta};
update_counts({obtain, file}, Pid, Delta,
State = #fhc_state {obtain_count_file = ObtainCountF,
clients = Clients }) ->
ets:update_counter(Clients, Pid, {#cstate.obtained_file, Delta}),
State #fhc_state { obtain_count_file = ObtainCountF + Delta};
update_counts({obtain, socket}, Pid, Delta,
State = #fhc_state {obtain_count_socket = ObtainCountS,
clients = Clients }) ->
ets:update_counter(Clients, Pid, {#cstate.obtained_socket, Delta}),
State #fhc_state { obtain_count_socket = ObtainCountS + Delta};
update_counts({reserve, file}, Pid, NewReservation,
State = #fhc_state {reserve_count_file = ReserveCountF,
clients = Clients }) ->
[#cstate{reserved_file = R}] = ets:lookup(Clients, Pid),
Delta = NewReservation - R,
ets:update_counter(Clients, Pid, {#cstate.reserved_file, Delta}),
State #fhc_state { reserve_count_file = ReserveCountF + Delta};
update_counts({reserve, socket}, Pid, NewReservation,
State = #fhc_state {reserve_count_socket = ReserveCountS,
clients = Clients }) ->
[#cstate{reserved_file = R}] = ets:lookup(Clients, Pid),
Delta = NewReservation - R,
ets:update_counter(Clients, Pid, {#cstate.reserved_socket, Delta}),
State #fhc_state { reserve_count_socket = ReserveCountS + Delta}.
maybe_reduce(State) ->
case needs_reduce(State) of
true -> reduce(State);
false -> State
end.
needs_reduce(#fhc_state { limit = Limit,
open_count = OpenCount,
open_pending = {OpenPending, _},
obtain_limit = ObtainLimit,
obtain_count_socket = ObtainCountS,
obtain_count_file = ObtainCountF,
obtain_pending_file = {ObtainPendingF, _},
obtain_pending_socket = {ObtainPendingS, _},
reserve_count_socket = ReserveCountS,
reserve_count_file = ReserveCountF}) ->
Limit =/= infinity
andalso (((OpenCount + ObtainCountS + ObtainCountF + ReserveCountS + ReserveCountF) > Limit)
orelse (OpenPending =/= 0)
orelse (ObtainPendingF =/= 0)
orelse (ObtainCountS < ObtainLimit
andalso (ObtainPendingS =/= 0))).
reduce(State = #fhc_state { open_pending = OpenPending,
obtain_pending_file = ObtainPendingFile,
obtain_pending_socket = ObtainPendingSocket,
elders = Elders,
clients = Clients,
timer_ref = TRef }) ->
Now = erlang:monotonic_time(),
{CStates, Sum, ClientCount} =
ets:foldl(fun ({Pid, Eldest}, {CStatesAcc, SumAcc, CountAcc} = Accs) ->
[#cstate { pending_closes = PendingCloses,
opened = Opened,
blocked = Blocked } = CState] =
ets:lookup(Clients, Pid),
TimeDiff = erlang:convert_time_unit(
Now - Eldest, native, micro_seconds),
case Blocked orelse PendingCloses =:= Opened of
true -> Accs;
false -> {[CState | CStatesAcc],
SumAcc + TimeDiff,
CountAcc + 1}
end
end, {[], 0, 0}, Elders),
case CStates of
[] -> ok;
_ -> case (Sum / ClientCount) -
(1000 * ?FILE_HANDLES_CHECK_INTERVAL) of
AverageAge when AverageAge > 0 ->
notify_age(CStates, AverageAge);
_ ->
notify_age0(Clients, CStates,
pending_count(OpenPending) +
pending_count(ObtainPendingFile) +
pending_count(ObtainPendingSocket))
end
end,
case TRef of
undefined -> TRef1 = erlang:send_after(
?FILE_HANDLES_CHECK_INTERVAL, ?SERVER,
check_counts),
State #fhc_state { timer_ref = TRef1 };
_ -> State
end.
notify_age(CStates, AverageAge) ->
lists:foreach(
fun (#cstate { callback = undefined }) -> ok;
(#cstate { callback = {M, F, A} }) -> apply(M, F, A ++ [AverageAge])
end, CStates).
notify_age0(Clients, CStates, Required) ->
case [CState || CState <- CStates, CState#cstate.callback =/= undefined] of
[] -> ok;
Notifications -> S = rand:uniform(length(Notifications)),
{L1, L2} = lists:split(S, Notifications),
notify(Clients, Required, L2 ++ L1)
end.
notify(_Clients, _Required, []) ->
ok;
notify(_Clients, Required, _Notifications) when Required =< 0 ->
ok;
notify(Clients, Required, [#cstate{ pid = Pid,
callback = {M, F, A},
opened = Opened } | Notifications]) ->
apply(M, F, A ++ [0]),
ets:update_element(Clients, Pid, {#cstate.pending_closes, Opened}),
notify(Clients, Required - Opened, Notifications).
track_client(Pid, Clients) ->
case ets:insert_new(Clients, #cstate { pid = Pid,
callback = undefined,
opened = 0,
obtained_file = 0,
obtained_socket = 0,
blocked = false,
pending_closes = 0,
reserved_file = 0,
reserved_socket = 0 }) of
true -> _MRef = erlang:monitor(process, Pid),
ok;
false -> ok
end.
%% To increase the number of file descriptors: on Windows set ERL_MAX_PORTS
%% environment variable, on Linux set `ulimit -n`.
ulimit() ->
IOStats = case erlang:system_info(check_io) of
[Val | _] when is_list(Val) -> Val;
Val when is_list(Val) -> Val;
_Other -> []
end,
case proplists:get_value(max_fds, IOStats) of
MaxFds when is_integer(MaxFds) andalso MaxFds > 1 ->
case os:type() of
{win32, _OsName} ->
%% On Windows max_fds is twice the number of open files:
%% https://github.com/yrashk/erlang/blob/e1282325ed75e52a98d5/erts/emulator/sys/win32/sys.c#L2459-2466
MaxFds div 2;
_Any ->
%% For other operating systems trust Erlang.
MaxFds
end;
_ ->
unknown
end. | erlang_server/_build/default/lib/rabbit_common/src/file_handle_cache.erl | 0.66072 | 0.528229 | file_handle_cache.erl | starcoder |
%%--------------------------------------------------------------------
%% Copyright (c) 2019 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(esockd_udp_SUITE).
-compile(export_all).
-compile(nowarn_export_all).
-include_lib("eunit/include/eunit.hrl").
all() -> esockd_ct:all(?MODULE).
init_per_testcase(_TestCase, Config) ->
Config.
end_per_testcase(_TestCase, Config) ->
Config.
%%--------------------------------------------------------------------
%% Test cases for UDP Server
%%--------------------------------------------------------------------
t_udp_server(_) ->
with_udp_server(
fun(_Srv, Port) ->
{ok, Sock} = gen_udp:open(0, [binary, {active, false}]),
ok = udp_send_and_recv(Sock, Port, <<"hello">>),
ok = udp_send_and_recv(Sock, Port, <<"world">>)
end).
t_count_peers(_) ->
with_udp_server(
fun(Srv, Port) ->
{ok, Sock1} = gen_udp:open(0, [binary, {active, false}]),
ok = udp_send_and_recv(Sock1, Port, <<"hello">>),
{ok, Sock2} = gen_udp:open(0, [binary, {active, false}]),
ok = udp_send_and_recv(Sock2, Port, <<"world">>),
?assertEqual(2, esockd_udp:count_peers(Srv))
end).
t_peer_down(_) ->
with_udp_server(
fun(Srv, Port) ->
{ok, Sock} = gen_udp:open(0, [binary, {active, false}]),
ok = udp_send_and_recv(Sock, Port, <<"hello">>),
?assertEqual(1, esockd_udp:count_peers(Srv)),
ok = gen_udp:send(Sock, {127,0,0,1}, Port, <<"stop">>),
timer:sleep(100),
?assertEqual(0, esockd_udp:count_peers(Srv))
end).
udp_send_and_recv(Sock, Port, Data) ->
ok = gen_udp:send(Sock, {127,0,0,1}, Port, Data),
{ok, {_Addr, Port, Data}} = gen_udp:recv(Sock, 0),
ok.
with_udp_server(TestFun) ->
MFA = {?MODULE, udp_echo_init, []},
{ok, Srv} = esockd_udp:server(test, {{127,0,0,1}, 6000}, [], MFA),
TestFun(Srv, 6000),
ok = esockd_udp:stop(Srv).
udp_echo_init(Transport, Peer) ->
{ok, spawn(fun() -> udp_echo_loop(Transport, Peer) end)}.
udp_echo_loop(Transport, Peer) ->
receive
{datagram, From, <<"stop">>} ->
exit(normal);
{datagram, From, Packet} ->
From ! {datagram, Peer, Packet},
udp_echo_loop(Transport, Peer)
end.
t_handle_call(_) ->
{reply, ignore, state} = esockd_udp:handle_call(req, from, state).
t_handle_cast(_) ->
{noreply, state} = esockd_udp:handle_cast(msg, state).
t_handle_info(_) ->
{noreply, state} = esockd_udp:handle_info(info, state).
t_code_change(_) ->
{ok, state} = esockd_udp:code_change(oldvsn, state, extra). | test/esockd_udp_SUITE.erl | 0.511229 | 0.432723 | esockd_udp_SUITE.erl | starcoder |
-module(intcode_io).
-export([
push/2, poll/1, poll_or_notify/2, as_list/1
]).
-export_type([
intcode_io/0
]).
-type intcode_io() :: tuple().
-callback poll(Reference) -> {intcode:value(), Reference}
when Reference :: intcode_io().
-callback push(Reference, intcode:value()) -> Reference
when Reference :: intcode_io().
-callback poll_or_notify(Reference, fun(() -> any())) -> {intcode:value(), Reference} | nil
when Reference :: intcode_io().
-callback as_list(Reference) -> list(intcode:value())
when Reference :: intcode_io().
get_module(Reference) ->
element(1, Reference).
%% @doc Pushes `Value' onto the queue described by `Descriptor'.
%%
%% Calls the `push/2' function on the module that is defined by the first
%% element of the tuple of the first parameter with the same parameters that
%% are passed to this function.
-spec push(Reference, Value :: intcode:value()) -> Reference when Reference :: intcode_io().
push(Reference, Value) ->
Module = get_module(Reference),
Module:push(Reference, Value).
%% @doc Polls a value from the queue described by `Descriptor'.
%%
%% Calls the `poll/1' function on the module that is defined by the first
%% element of the tuple of the first parameter with the same parameter that
%% is passed to this function.
-spec poll(Reference) -> nil | {intcode:value(), Reference} when Reference :: intcode_io().
poll(Reference) ->
Module = get_module(Reference),
Module:poll(Reference).
%% @doc Attempts to poll a value from the queue described by `Descriptor', but
%% if that fails, calls the `Callback' function once a value becomes available.
%%
%% Calls the `async_poll/2' function on the module that is defined by the first
%% element of the tuple of the first parameter with the same parameters that
%% are passed to this function.
-spec poll_or_notify(Reference, Callback :: fun(() -> any())) -> {intcode:value() | wait, Reference} when Reference :: intcode_io().
poll_or_notify(Reference, Callback) ->
Module = get_module(Reference),
Module:poll_or_notify(Reference, Callback).
%% @doc Converts the queue described by `Descriptor' into a list.
%%
%% Calls the `as_list/1' function on the module that is defined by the first
%% element of the tuple of the first parameter with the same parameter that
%% is passed to this function.
-spec as_list(Reference :: intcode_io()) -> list(intcode:value()).
as_list(Reference) ->
Module = get_module(Reference),
Module:as_list(Reference). | src/intcode/intcode_io.erl | 0.562177 | 0.474022 | intcode_io.erl | starcoder |
%% vim: set ai et sw=4 sts=4:
%% See LICENSE for licensing information.
-module(solarized_binary_diff).
-export([ diff/2
]).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
% A diff implementation based on:
% - https://neil.fraser.name/writing/diff/
%
% More complete implementations already exist:
% - https://github.com/mmzeeman/diffy
% - https://github.com/tomas-abrahamsson/tdiff/
%
% This is a new implementation because:
% - the output needs to be matched to solarized_diff's needs,
% - we can give up early and still get good results for solarized_eunit,
% - we want binary and list implementation that can have different trade offs.
%=======================================================================
% Compare two binaries (Left, Right)
% return pair of {Left, Right} difference lists
% where the lists are in the form
% DiffList = [Same0, Diff1, Same1, Diff2, Same2, ...]
% and
% SameX alternates with DiffX
% and
% SameX & DiffX are sub-binaries of the initial binaries
diff(Same, Same) ->
{[Same], [Same]};
diff(Left, Right) ->
common_prefix(Left, Right).
%=======================================================================
% used in recursive calls to diff()
% ASSUMES:
% - no common prefix or suffix
% - head of Lt & Rt is a *same* binary
% - returns difference lists with a *diff* head
diff(L, R, Lt, Rt) ->
single_edit(L, R, Lt, Rt).
%=======================================================================
common_prefix(Left, Right) ->
case binary:longest_common_prefix([Left, Right]) of
0 ->
common_prefix_next(Left, Right, <<>>);
N ->
<<Prefix:N/binary, L/binary>> = Left,
<<_:N/binary, R/binary>> = Right,
common_prefix_next(L, R, Prefix)
end.
%-----------------------------------------------------------------------
common_prefix_next(L, R, Prefix) ->
{Lt, Rt} = common_suffix(L, R),
{[Prefix | Lt], [Prefix | Rt]}.
%=======================================================================
common_suffix(Left, Right) ->
case binary:longest_common_suffix([Left, Right]) of
0 ->
single_edit(Left, Right, [], []);
N ->
Ln = size(Left) - N,
<<Lm:Ln/binary, Suffix:N/binary>> = Left,
Rn = size(Right) - N,
<<Rm:Rn/binary, _/binary>> = Right,
single_edit(Lm, Rm, [Suffix], [Suffix])
end.
%=======================================================================
single_edit(L, R, Lt, Rt) when L =:= <<>> orelse R =:= <<>> ->
{[L | Lt], [R | Rt]};
single_edit(L, R, Lt, Rt) ->
case size(L) < size(R) of
true ->
two_edits(L, R, Lt, Rt);
false ->
{Rs, Ls} = two_edits(R, L, Rt, Lt),
{Ls, Rs}
end.
%=======================================================================
% L is smaller than R
two_edits(L, R, Lt, Rt) ->
case binary:match(R, L) of
{Start, Length} ->
<<Before:Start/binary, _:Length/binary, After/binary>> = R,
{[<<>>, L, <<>> | Lt], [Before, L, After | Rt]};
nomatch when size(L) =:= 1 ->
{[L | Lt], [R | Rt]};
nomatch ->
half_match(L, R, Lt, Rt)
end.
%=======================================================================
% L is smaller than R
half_match(L, R, Lt, Rt)
when size(R) < 10 orelse
size(R) > (size(L) * 2) ->
% not worth it
bisect(L, R, Lt, Rt);
half_match(L, R, Lt, Rt) ->
Rn = size(R),
A = half_match_seed(L, R, ceil(Rn / 4), floor(Rn / 4)),
B = half_match_seed(L, R, ceil(Rn / 2), floor(Rn / 4)),
case half_match_best(A, B) of
nomatch ->
bisect(L, R, Lt, Rt);
{Mn, Li, Ri} ->
<<Lpre:Li/binary, M:Mn/binary, Lpost/binary>> = L,
<<Rpre:Ri/binary, _:Mn/binary, Rpost/binary>> = R,
{La, Ra} = diff(Lpost, Rpost, Lt, Rt),
diff(Lpre, Rpre, [M | La], [M | Ra])
end.
%-----------------------------------------------------------------------
half_match_seed(L, R, Start, Length) when Length > 0 ->
<<_:Start/binary, Seed:Length/binary, _/binary>> = R,
half_match_slide(L, R, Seed, Start, Length, 0, {0, 0, 0}).
%-----------------------------------------------------------------------
half_match_slide(L, R, S, Si, Sn, Li, B = {Bn, _, _}) ->
case binary:match(L, S, [{scope, {Li, size(L) - Li}}]) of
nomatch ->
case Bn >= size(R) div 2 of
true ->
B;
false ->
{0, 0, 0}
end;
{Mi, _} ->
<<Lpre:Mi/binary, _:Sn/binary, Lpost/binary>> = L,
<<Rpre:Si/binary, _:Sn/binary, Rpost/binary>> = R,
Npre = binary:longest_common_suffix([Lpre, Rpre]),
Npost = binary:longest_common_prefix([Lpost, Rpost]),
Mn = Npre + Sn + Npost,
case Mn > Bn of
true ->
Best = {Mn, Mi - Npre, Si - Npre},
half_match_slide(L, R, S, Si, Sn, Mi + 1, Best);
false ->
half_match_slide(L, R, S, Si, Sn, Mi + 1, B)
end
end.
%-----------------------------------------------------------------------
half_match_best({0, _, _}, {0, _, _}) ->
nomatch;
half_match_best({0, _, _}, Best) ->
Best;
half_match_best(Best, {0, _, _}) ->
Best;
half_match_best(Best = {B, _, _}, {A, _, _}) when B > A ->
Best;
half_match_best(_, Best) ->
Best.
%=======================================================================
bisect(L, R, Lt, Rt) ->
case bisect(L, R) of
give_up ->
{[L | Lt], [R | Rt]};
{M, Lpre, Lpost, Rpre, Rpost} ->
{La, Ra} = diff(Lpost, Rpost, Lt, Rt),
diff(Lpre, Rpre, [M | La], [M | Ra])
end.
%=======================================================================
-record(bisect, {
x :: binary(),
y :: binary(),
xn :: pos_integer(),
yn :: pos_integer(),
delta :: integer()
}).
% after N steps:
% - each end has created V lengths of: 1 + 2 + 3 + ... + N/2
% - total sum = (N*N + 2N) / 4
% - total sum for (N = 1000) = 250,500
-define(MAX_BISECT, 1000).
bisect(X, Y) ->
B = bisect_init(X, Y),
Delta = B#bisect.delta,
Stop = min(B#bisect.xn + B#bisect.yn - 1, ?MAX_BISECT),
case Delta rem 2 of
0 ->
% delta is even
% do back first
Back = {Delta, [B#bisect.xn], zig, Delta - 1},
% back will produce a zag compatible V so start fore with a zag
Fore = {0, [0], zag, 1},
bisect_back(B, Stop, Fore, Back);
_ ->
% delta is odd
Fore = {0, [0], zig, -1},
Back = {Delta, [B#bisect.xn], zig, Delta - 1},
bisect_fore(B, Stop, Fore, Back)
end.
%-----------------------------------------------------------------------
bisect_init(X, Y) ->
Xn = size(X),
Yn = size(Y),
Delta = Xn - Yn,
#bisect{x = X, y = Y, xn = Xn, yn = Yn, delta = Delta}.
%-----------------------------------------------------------------------
bisect_answer(#bisect{x = X, y = Y}, {match, Mn, Xi, Yi}) ->
<<Xpre:Xi/binary, M:Mn/binary, Xpost/binary>> = X,
<<Ypre:Yi/binary, _:Mn/binary, Ypost/binary>> = Y,
{M, Xpre, Xpost, Ypre, Ypost}.
%-----------------------------------------------------------------------
bisect_fore(_, 0, _, _) ->
give_up;
bisect_fore(B, Stop, {_, ForeV, ForeZig, ForeK}, Back = {BackK, BackV, _, _}) ->
Reverse = {BackK, BackV},
case bisect_scan(B, fore, ForeZig, ForeK, ForeV, Reverse) of
Match = {match, _, _, _} ->
bisect_answer(B, Match);
NextFore ->
bisect_back(B, Stop - 1, NextFore, Back)
end.
%-----------------------------------------------------------------------
bisect_back(_, 0, _, _) ->
give_up;
bisect_back(B, Stop, NextFore, {_, BackV, BackZig, BackK}) ->
case bisect_scan(B, back, BackZig, BackK, BackV, no_match) of
Match = {match, _, _, _} ->
% should not happen since we sent no_match
throw({bisect_back, B, Match});
NextBack ->
bisect_fore(B, Stop - 1, NextFore, NextBack)
end.
%-----------------------------------------------------------------------
bisect_scan(B, Dir, Zig, K, V, Reverse) ->
bisect_scan(B, Dir, Zig, K, [out | V], [], Reverse).
%-----------------------------------------------------------------------
bisect_scan(_, _, zig, K, [], Acc, _) ->
% we stopped with K two (2) beyond the last accumulated item
% next zag will start with K one (1) wider
{K - 2, Acc, zag, K - 1};
bisect_scan(_, _, zag, K, [], Acc, _) ->
% we stopped with K two (2) beyond the last accumulated item
% next zag will start with K one (1) wider
{K + 2, Acc, zig, K + 1};
bisect_scan(B, Dir, Zig, K, V, Acc, Reverse) ->
%X = bisect_move(Zig, Dir, V),
Moves = bisect_move_choices(Zig, V),
Move = bisect_move_choose(Dir, Moves),
Moved = bisect_move_by(B, K, Move),
Followed = bisect_follow_k(B, Dir, Moved, K),
case bisect_match(Zig, Followed, K, Reverse) of
Match = {match, _, _, _} ->
Match;
NextReverse ->
NextK = case Zig of
zig -> K + 2;
zag -> K - 2
end,
NextV = tl(V),
NextAcc = case Followed of
%out when Acc =:= [] -> [];
_ -> [Followed | Acc]
end,
bisect_scan(B, Dir, Zig, NextK, NextV, NextAcc, NextReverse)
end.
%-----------------------------------------------------------------------
% original:
% if k == -D || ( k /= D && V[k - 1] < V[K + 1] )
% then x = V[k + 1]
% else x = V[k - 1] + 1
% invert from x-world to y-world
% if k == -D || ( k /= D && V[k - 1] < V[K + 1] )
% then x = V[k - 1]
% else x = V[k + 1] - 1
bisect_move_choices(zig, [Neg]) ->
{Neg, out};
bisect_move_choices(zig, [Neg, Pos | _]) ->
{Neg, Pos};
bisect_move_choices(zag, [Pos]) ->
{out, Pos};
bisect_move_choices(zag, [Pos, Neg | _]) ->
{Neg, Pos}.
%-----------------------------------------------------------------------
bisect_move_choose(fore, {out, Pos}) ->
{down, Pos};
bisect_move_choose(fore, {Neg, out}) ->
{right, Neg};
bisect_move_choose(fore, {Neg, Pos}) when Neg < Pos ->
{down, Pos};
bisect_move_choose(fore, {Neg, _}) ->
{right, Neg};
bisect_move_choose(back, {Neg, out}) ->
{up, Neg};
bisect_move_choose(back, {out, Pos}) ->
{left, Pos};
bisect_move_choose(back, {Neg, Pos}) when Neg < Pos ->
{up, Neg};
bisect_move_choose(back, {_, Pos}) ->
{left, Pos}.
%-----------------------------------------------------------------------
bisect_move_by(_, _, {_, out}) ->
out;
bisect_move_by(B, _, {right, X}) when X < B#bisect.xn ->
X + 1;
bisect_move_by(B, K, {down, X}) when X - K =< B#bisect.yn ->
X;
bisect_move_by(_, _, {left, X}) when X > 0 ->
X - 1;
bisect_move_by(_, K, {up, X}) when X - K >= 0 ->
X;
bisect_move_by(_, _, _) ->
out.
-ifdef(TEST).
bisect_move_by_test_() ->
B = #bisect{xn = 10, yn = 10},
% note: k = x - y || x = k + y || y = x - k
[ ?_assertEqual(10, bisect_move_by(B, +5, {right, 9}))
, ?_assertEqual(out, bisect_move_by(B, +5, {right, 10}))
% DOWN: y = 10, k = -5, x = -5 + 10 = 5
, ?_assertEqual(5, bisect_move_by(B, -5, {down, 5}))
% FAIL DOWN: y = 11, k = -5, x = -5 + 11 = 6
, ?_assertEqual(out, bisect_move_by(B, -5, {down, 6}))
, ?_assertEqual(0, bisect_move_by(B, -5, {left, 1}))
, ?_assertEqual(out, bisect_move_by(B, -5, {left, 0}))
% OK UP: y = 0, k = +5, x = +5 + 0 = 5
, ?_assertEqual(5, bisect_move_by(B, +5, {up, 5}))
% FAIL UP: y = -1, k = +5, x = +5 + -1 = 4
, ?_assertEqual(out, bisect_move_by(B, +5, {up, 4}))
].
-endif.
%-----------------------------------------------------------------------
bisect_follow_k(_, _, out, _) ->
out;
bisect_follow_k(B, Dir, Xi, K) ->
bisect_follow_xy(B, Dir, Xi, Xi - K).
%-----------------------------------------------------------------------
bisect_follow_xy(B, fore, Xi, Yi)
when Xi < B#bisect.xn andalso Yi < B#bisect.yn ->
case binary:at(B#bisect.x, Xi) =:= binary:at(B#bisect.y, Yi) of
true ->
bisect_follow_xy(B, fore, Xi + 1, Yi + 1);
false ->
Xi
end;
bisect_follow_xy(B, back, Xi, Yi)
when Xi > 0 andalso Yi > 0 ->
case binary:at(B#bisect.x, Xi - 1) =:= binary:at(B#bisect.y, Yi - 1) of
true ->
bisect_follow_xy(B, back, Xi - 1, Yi - 1);
false ->
Xi
end;
bisect_follow_xy(_, _, Xi, _) ->
Xi.
%-----------------------------------------------------------------------
% ASSUME: Only called in the fore direction
bisect_match(_, _, _, Reverse = no_match) ->
Reverse;
bisect_match(_, _, _, Reverse = {_, []}) ->
Reverse;
bisect_match(zig, _, K, Reverse = {RevK, _}) when K < RevK ->
Reverse;
bisect_match(zag, _, K, Reverse = {RevK, _}) when K > RevK ->
Reverse;
bisect_match(Zig, ForeX, K, {K, [BackX | RevV]}) when ForeX < BackX ->
case Zig of
zig -> {K + 2, RevV};
zag -> {K - 2, RevV}
end;
bisect_match(_, ForeX, K, {K, [BackX | _]}) ->
{match, ForeX - BackX, BackX, BackX - K};
bisect_match(Zig, Followed, K, {RevK, [_ | RevV]}) ->
case Zig of
zig ->
bisect_match(Zig, Followed, K, {RevK + 2, RevV});
zag ->
bisect_match(Zig, Followed, K, {RevK + 2, RevV})
end.
%=======================================================================
-ifdef(TEST).
fraser_1_1_test() ->
Old = <<"Equality">>,
New = Old,
Expect = {[Old], [New]},
?assertEqual(Expect, solarized_binary_diff:diff(Old, New)).
%-----------------------------------------------------------------------
fraser_1_2_test() ->
Old = <<"The cat in the hat.">>,
New = <<"The dog in the hat.">>,
Expect =
{ [<<"The ">>, <<"cat">>, <<" in the hat.">>]
, [<<"The ">>, <<"dog">>, <<" in the hat.">>]
},
?assertEqual(Expect, solarized_binary_diff:diff(Old, New)).
%-----------------------------------------------------------------------
fraser_1_3_a_test() ->
Old = <<"The cat in the hat.">>,
New = <<"The furry cat in the hat.">>,
Expect =
{ [<<"The ">>, <<>>, <<"cat in the hat.">>]
, [<<"The ">>, <<"furry ">>, <<"cat in the hat.">>]
},
?assertEqual(Expect, solarized_binary_diff:diff(Old, New)).
%-----------------------------------------------------------------------
fraser_1_3_b_test() ->
Old = <<"The cat in the hat.">>,
New = <<"The cat.">>,
Expect =
{ [<<"The cat">>, <<" in the hat">>, <<".">>]
, [<<"The cat">>, <<>>, <<".">>]
},
?assertEqual(Expect, solarized_binary_diff:diff(Old, New)).
%-----------------------------------------------------------------------
fraser_1_4_a_test_() ->
Old = <<"The cat in the hat.">>,
New = <<"The happy cat in the black hat.">>,
Expect =
{ [<<"The ">>, <<>>, <<"cat in the">>, <<>>, <<" hat.">>]
, [<<"The ">>, <<"happy ">>, <<"cat in the">>, <<" black">>, <<" hat.">>]
},
Reverse = { element(2, Expect), element(1, Expect) },
[ ?_assertEqual(Expect, solarized_binary_diff:diff(Old, New))
, ?_assertEqual(Reverse, solarized_binary_diff:diff(New, Old))
].
%-----------------------------------------------------------------------
fraser_1_4_b_test_() ->
Old = <<"The cat in the hat.">>,
New = <<"The ox in the box.">>,
Expect =
{ [<<"The ">>, <<"cat">>, <<" in the ">>, <<"hat">>, <<".">>]
, [<<"The ">>, <<"ox">>, <<" in the ">>, <<"box">>, <<".">>]
},
Reverse = { element(2, Expect), element(1, Expect) },
[ ?_assertEqual(Expect, solarized_binary_diff:diff(Old, New))
, ?_assertEqual(Reverse, solarized_binary_diff:diff(New, Old))
].
%-----------------------------------------------------------------------
fraser_2_1_test_() ->
Old = <<"The cat in the hat.">>,
New = <<"The bird in the hand.">>,
Expect =
{ [<<"The ">>, <<"cat">>, <<" in the ha">>, <<"t">>, <<".">>]
, [<<"The ">>, <<"bird">>, <<" in the ha">>, <<"nd">>, <<".">>]
},
Reverse = { element(2, Expect), element(1, Expect) },
[ ?_assertEqual(Expect, solarized_binary_diff:diff(Old, New))
, ?_assertEqual(Reverse, solarized_binary_diff:diff(New, Old))
].
%-----------------------------------------------------------------------
fraser_2_2_test_() ->
Old = <<"The black cat in the hat?">>,
New = <<"The cat in the black hat!">>,
Expect =
{ [<<"The ">>, <<"black ">>
, <<"cat in the ">>, <<>>
, <<>>, <<>>
, <<"hat">>, <<"?">>
]
, [<<"The ">>, <<>>
, <<"cat in the ">>, <<"blac">>
, <<>>, <<"k ">>
, <<"hat">>, <<"!">>
]
},
Reverse = { element(2, Expect), element(1, Expect) },
[ ?_assertEqual(Expect, solarized_binary_diff:diff(Old, New))
, ?_assertEqual(Reverse, solarized_binary_diff:diff(New, Old))
].
%-----------------------------------------------------------------------
-define(ZIG(L), lists:reverse(L)).
-define(ZAG(L), L).
bisect_scan_test() ->
X = <<"abcabba">>,
Y = <<"cbabac">>,
B = bisect_init(X, Y),
Delta = B#bisect.delta,
Fore =
[ {-0, ?ZAG([0]), zig, -1}
, {+1, ?ZIG([0, 1]), zag, +2}
, {-2, ?ZAG([2, 2, 3]), zig, -3}
, {+3, ?ZIG([3, 4, 5, 5]), zag, +4}
, {-4, ?ZAG([out, 4, 5, 7, 7]), zig, -5}
],
bisect_scan_test(B, fore, Fore),
Back =
[ {Delta-0, ?ZAG([7]), zig, Delta-1}
, {Delta+1, ?ZIG([6, 5]), zag, Delta+2}
, {Delta-2, ?ZAG([5, 3, 4]), zig, Delta-3}
, {Delta+3, ?ZIG([4, 1, 2, 4]), zag, Delta+4}
, {Delta-4, ?ZAG([2, 0, 1, out, out]), zig, Delta-5}
],
bisect_scan_test(B, back, Back).
bisect_scan_test(_, _, [_]) ->
ok;
bisect_scan_test(B, Dir, [{_, V, Zig, K} | Rest = [Expect | _]]) ->
?assertEqual(Expect, bisect_scan(B, Dir, Zig, K, V, no_match)),
bisect_scan_test(B, Dir, Rest).
-undef(ZIG).
-undef(ZAG).
%-----------------------------------------------------------------------
bisect_scan_matchd_test() ->
X = <<"abcabba">>,
Y = <<"cbabac">>,
B = bisect_init(X, Y),
Delta = B#bisect.delta,
% NOTE: Delta is odd!
% after the back zag for K in {Delta-2, ..., Delta+2}
BackK = Delta - 2,
BackV = [5, 3, 4],
Reverse = {BackK, BackV},
% after the fore zag for K in {-2, ..., +2}
ForeK = -2,
ForeV = [2, 2, 3],
% so the next
NextK = ForeK - 1,
Expect = {match, 2, 3, 2},
?assertEqual(Expect, bisect_scan(B, fore, zig, NextK, ForeV, Reverse)).
%-----------------------------------------------------------------------
bisect_test() ->
X = <<"abcabba">>,
Y = <<"cbabac">>,
Expect = {<<"ab">>, <<"abc">>, <<"ba">>, <<"cb">>, <<"ac">>},
?assertEqual(Expect, bisect(X, Y)).
%-----------------------------------------------------------------------
bisect_hat_test() ->
X = <<"black hat!">>,
Y = <<"hat?">>,
Expect = {<<>>, <<"blac">>, <<"k hat!">>, <<>>, <<"hat?">>},
?assertEqual(Expect, bisect(X, Y)).
%-----------------------------------------------------------------------
-endif. | src/solarized_binary_diff.erl | 0.650023 | 0.499023 | solarized_binary_diff.erl | starcoder |
%% @copyright 2013-2016 <NAME> <<EMAIL>>
%%
%% @doc An object which represents a node on a consistent hash ring
%% @end
-module(hash_ring_node).
%%----------------------------------------------------------------------------------------------------------------------
%% Exported API
%%----------------------------------------------------------------------------------------------------------------------
-export([make/1, make/2, make/3]).
-export([is_node/1]).
-export([get_key/1, get_data/1, get_weight/1]).
-export_type([ring_node/0]).
-export_type([key/0, data/0, weight/0]).
-export_type([option/0, options/0]).
%%----------------------------------------------------------------------------------------------------------------------
%% Macros & Records & Types
%%----------------------------------------------------------------------------------------------------------------------
-define(NODE, ?MODULE).
-record(?NODE,
{
key :: key(),
data :: data(),
weight :: weight()
}).
-opaque ring_node() :: #?NODE{}.
%% A node on a ring.
-type key() :: term().
%% The key of a `ring_node()'.
%%
%% It is used to decide location of the node on a ring.
-type data() :: term().
%% The data of a `ring_node()'.
%%
%% It holds arbitrary user data.
-type weight() :: number().
%% The non negative weight of a `ring_node()'.
%%
%% The more weight node occupies, the more space in a ring.
-type options() :: [option()].
-type option() :: {weight, weight()}.
%% weight:
%% <ul>
%% <li>A coefficient which is used to determine the virtual node count of the node.</li>
%% <li>The higher the value, the number of virtual nodes increases, likely to be more selected.</li>
%% <li>The default value is `1'.</li>
%% </ul>
%%----------------------------------------------------------------------------------------------------------------------
%% Exported Functions
%%----------------------------------------------------------------------------------------------------------------------
%% @equiv make(Key, Key)
-spec make(key()) -> ring_node().
make(Key) ->
make(Key, Key).
%% @equiv make(Key, Data, [])
-spec make(key(), data()) -> ring_node().
make(Key, Data) ->
make(Key, Data, []).
%% @doc Creates a new `ring_node()' object
-spec make(key(), data(), options()) -> ring_node().
make(Key, Data, Options) ->
Weight = proplists:get_value(weight, Options, 1),
_ = (is_number(Weight) andalso Weight >= 0) orelse error(badarg, [Key, Data, Options]),
#?NODE{
key = Key,
data = Data,
weight = Weight
}.
%% @doc Returns `true' if `X' is a `ring_node()', otherwise `false'
-spec is_node(X :: (ring_node() | term())) -> boolean().
is_node(X) -> is_record(X, ?NODE).
%% @doc Gets the key of `Node'
-spec get_key(Node :: ring_node()) -> key().
get_key(#?NODE{key = Key}) -> Key.
%% @doc Gets the data of `Node'
-spec get_data(Node :: ring_node()) -> data().
get_data(#?NODE{data = Data}) -> Data.
%% @doc Gets the weight of `Node'
-spec get_weight(Node :: ring_node()) -> weight().
get_weight(#?NODE{weight = Weight}) -> Weight. | src/hash_ring_node.erl | 0.663996 | 0.442877 | hash_ring_node.erl | starcoder |
%==============================================================================
%% @copyright 2019-2020 Erlang Solutions Ltd.
%% Licensed under the Apache License, Version 2.0 (see LICENSE file)
%% @end
%%
%% @doc
%% In this scenario, users are creating PEP nodes and publishing items.
%% Users are publishing items to their PEP nodes and receiving items from other
%% users' nodes. Each node has a number of subscribers limited by the
%% `n_of_subscribers' variable. Publishing can start depending on the
%% `node_activation_policy' variable, either after `all_nodes' or after `n_nodes'
%% are subscribed to. Interactions between users and pubsub PEP nodes are managed
%% by the `amoc_coordinator'. Additional subscription and publication delay can
%% be introduced with use of the `coordinator_delay' variable. This can help to
%% moderate the load when users are being added.
%%
%% == User steps: ==
%%
%% 1. Connect to the XMPP host given by the `mim_host' variable.
%%
%% 2. Create a PEP node and send presence `available', in order to receive
%% messages from the PEP nodes. The rate of node creation is limited by the
%% `node_creation_rate' per minute. Node creation results in a timeout when
%% `iq_timeout' is exceeded.
%%
%% 3. Add user to the `amoc_coordinator' and pass client data.
%%
%% 4. Wait for the following messages in a loop:
%%
%% - {stanza, MessageStanza} - process message stanza, check if it contains the
%% user's own jid. If it does, schedule a `publish_item' message. The rate of
%% these messages is handled by `amoc_throttle' and depends on the
%% `publication_rate' variable.
%%
%% - {stanza, IqStanza} - process an `iq' stanza and update corresponding metrics.
%%
%% - {stanza, PresenceStanza} - respond to the `subscribe' presence stanzas.
%%
%% - publish_item - message from `amoc_throttle' that was scheduled after a
%% message stanza was received. Send a message, which size is defined by the
%% `publication_size' variable, to the PEP node. The rate of these `publish_item_node'
%% messages is handled by `amoc_throttle' and depends on the `node_publication_rate'
%% variable.
%%
%% 5. Continue execution of the `user_loop'.
%%
%% == Metrics exposed by this scenario: ==
%%
%% === Counters: ===
%% ==== node_creation ====
%% - node_creation has following counter metrics exposed: `request', `response',
%% `timeout', `response result', `response error', `timeout result',
%% `timeout error'
%% ==== publication ====
%% - publication has following counter metrics exposed: `request', `response',
%% `timeout', `response result', `response error', `timeout result',
%% `timeout error'
%% ==== message ====
%% - message - incremented with every received message stanza.
%%
%% === Times: ===
%% - node_creation - time for the pubsub node to be created
%%
%% - publication - time to publish pubsub item
%%
%% - message_tdd - message time to delivery
%%
%% @end
%%==============================================================================
-module(pubsub_pep).
-behaviour(amoc_scenario).
-include_lib("exml/include/exml.hrl").
-include_lib("escalus/include/escalus.hrl").
-include_lib("escalus/include/escalus_xmlns.hrl").
-include_lib("kernel/include/logger.hrl").
-define(V(X), fun amoc_config_validation:X/1).
-required_variable([
#{name => iq_timeout, default_value => 10000, verification => ?V(positive_integer),
description => "IQ timeout (milliseconds, def: 10000ms)"},
#{name => coordinator_delay, default_value => 0, verification => ?V(nonnegative_integer),
description => "Delay after N subscriptions (milliseconds, def: 0ms)"},
#{name => node_creation_rate, default_value => 600, verification => ?V(positive_integer),
description => "Rate of node creations (per minute, def:600)"},
#{name => publication_size, default_value => 300, verification => ?V(nonnegative_integer),
description => "Size of additional payload (bytes, def:300)"},
#{name => publication_rate, default_value => 1500, verification => ?V(positive_integer),
description => "Rate of publications (per minute, def:1500)"},
#{name => n_of_subscribers, default_value => 50, verification => ?V(nonnegative_integer),
description => "Number of subscriptions for each node (def: 50)"},
#{name => activation_policy, default_value => all_nodes, verification => [all_nodes, n_nodes],
description => "Publish after subscribtion of (def: all_nodes | n_nodes)"},
#{name => mim_host, default_value => <<"localhost">>, verification => ?V(binary),
description => "The virtual host served by the server (def: <<\"localhost\">>)"}
]).
-define(PEP_NODE_NS, <<"just_some_random_namespace">>).
-define(CAPS_HASH, <<"erNmVoMSwRBR4brUU/inYQ5NFr0=">>). %% mod_caps:make_disco_hash(feature_elems(), sha1).
-define(NODE, {pep, ?PEP_NODE_NS}).
-define(GROUP_NAME, <<"pubsub_simple_coordinator">>).
-define(NODE_CREATION_THROTTLING, node_creation).
-define(PUBLICATION_THROTTLING, publication).
-define(COORDINATOR_TIMEOUT, 100).
-export([init/0, start/1]).
-spec init() -> ok.
init() ->
init_metrics(),
{ok, PublicationRate} = amoc_config:get(publication_rate),
{ok, NodeCreationRate} = amoc_config:get(node_creation_rate),
amoc_throttle:start(?NODE_CREATION_THROTTLING, NodeCreationRate),
amoc_throttle:start(?PUBLICATION_THROTTLING, PublicationRate),
start_coordinator(),
ok.
-spec start(amoc_scenario:user_id()) -> any().
start(Id) ->
Client = connect_amoc_user(Id),
start_user(Client).
init_metrics() ->
iq_metrics:start([node_creation, publication]),
Counters = [message],
Times = [message_ttd],
[amoc_metrics:init(counters, Metric) || Metric <- Counters],
[amoc_metrics:init(times, Metric) || Metric <- Times].
%%------------------------------------------------------------------------------------------------
%% Coordinator
%%------------------------------------------------------------------------------------------------
start_coordinator() ->
amoc_coordinator:start(?MODULE, get_coordination_plan(), ?COORDINATOR_TIMEOUT).
get_coordination_plan() ->
N = get_no_of_node_subscribers(),
[{N, [fun make_clients_friends/3,
users_activation(n_nodes),
coordination_delay()]},
{all, users_activation(all_nodes)}].
coordination_delay() ->
Delay = amoc_config:get(coordinator_delay),
fun({coordinate, _}) -> timer:sleep(Delay);
(_) -> ok
end.
make_clients_friends(_, _, undefined) -> ok;
make_clients_friends(_, {_, C1}, {_, C2}) ->
send_presence(C1, <<"subscribe">>, C2),
send_presence(C2, <<"subscribe">>, C1).
users_activation(ActivationPolicy) ->
case amoc_config:get(activation_policy) of
ActivationPolicy ->
fun(_, CoordinationData) ->
[schedule_publishing(Pid) || {Pid, _} <- CoordinationData]
end;
_ -> fun(_) -> ok end
end.
%%------------------------------------------------------------------------------------------------
%% User
%%------------------------------------------------------------------------------------------------
start_user(Client) ->
?LOG_DEBUG("user process ~p", [self()]),
create_new_node(Client),
erlang:monitor(process, Client#client.rcv_pid),
escalus_tcp:set_active(Client#client.rcv_pid, true),
send_presence_with_caps(Client),
user_loop(Client).
create_new_node(Client) ->
amoc_throttle:send_and_wait(?NODE_CREATION_THROTTLING, create_node),
create_pubsub_node(Client),
amoc_coordinator:add(?MODULE, Client).
user_loop(Client) ->
receive
{stanza, _, #xmlel{name = <<"message">>} = Stanza, #{recv_timestamp := TimeStamp}} ->
process_msg(Stanza, TimeStamp),
user_loop(Client);
{stanza, _, #xmlel{name = <<"iq">>} = Stanza, _} ->
process_iq(Client, Stanza),
user_loop(Client);
{stanza, _, #xmlel{name = <<"presence">>} = Stanza, _} ->
process_presence(Client, Stanza),
user_loop(Client);
publish_item ->
IqTimeout = amoc_config:get(iq_timeout),
Id = publish_pubsub_item(Client),
iq_metrics:request(publication, Id, IqTimeout),
user_loop(Client);
{'DOWN', _, process, Pid, Info} when Pid =:= Client#client.rcv_pid ->
?LOG_ERROR("TCP connection process ~p down: ~p", [Pid, Info]);
Msg ->
?LOG_ERROR("unexpected message ~p", [Msg])
end.
schedule_publishing(Pid) ->
amoc_throttle:send(?PUBLICATION_THROTTLING, Pid, publish_item).
%%------------------------------------------------------------------------------------------------
%% User connection
%%------------------------------------------------------------------------------------------------
connect_amoc_user(Id) ->
ExtraProps = amoc_xmpp:pick_server([[{host, "127.0.0.1"}]]) ++
[{server, amoc_config:get(mim_host)},
{socket_opts, socket_opts()}],
{ok, Client, _} = amoc_xmpp:connect_or_exit(Id, ExtraProps),
erlang:put(jid, Client#client.jid),
Client.
socket_opts() ->
[binary,
{reuseaddr, false},
{nodelay, true}].
%%------------------------------------------------------------------------------------------------
%% Node creation
%%------------------------------------------------------------------------------------------------
create_pubsub_node(Client) ->
ReqId = iq_id(create, Client),
iq_metrics:request(node_creation, ReqId),
Request = publish_pubsub_stanza(Client, ReqId, #xmlel{name = <<"nothing">>}),
%Request = escalus_pubsub_stanza:create_node(Client, ReqId, ?NODE),
escalus:send(Client, Request),
CreateNodeResult = (catch escalus:wait_for_stanza(Client, amoc_config:get(iq_timeout))),
case {escalus_pred:is_iq_result(Request, CreateNodeResult), CreateNodeResult} of
{true, _} ->
?LOG_DEBUG("node creation ~p (~p)", [?NODE, self()]),
iq_metrics:response(ReqId, result);
{false, {'EXIT', {timeout_when_waiting_for_stanza, _}}} ->
iq_metrics:timeout(ReqId, delete),
?LOG_ERROR("Timeout creating node: ~p", [CreateNodeResult]),
exit(node_creation_timeout);
{false, _} ->
iq_metrics:response(ReqId, error),
?LOG_ERROR("Error creating node: ~p", [CreateNodeResult]),
exit(node_creation_failed)
end.
%%------------------------------------------------------------------------------------------------
%% User presence & caps
%%------------------------------------------------------------------------------------------------
send_presence(From, Type, To = #client{}) ->
ToJid = escalus_client:short_jid(To),
send_presence(From, Type, ToJid);
send_presence(From, Type, To) ->
Presence = escalus_stanza:presence_direct(To, Type),
escalus_client:send(From, Presence).
send_presence_with_caps(Client) ->
Presence = escalus_stanza:presence(<<"available">>, [caps()]),
escalus:send(Client, Presence).
caps() ->
#xmlel{name = <<"c">>,
attrs = [{<<"xmlns">>, <<"http://jabber.org/protocol/caps">>},
{<<"hash">>, <<"sha-1">>},
{<<"node">>, <<"http://www.chatopus.<EMAIL>">>},
{<<"ver">>, ?CAPS_HASH}]}.
%%------------------------------------------------------------------------------------------------
%% Item publishing
%%------------------------------------------------------------------------------------------------
publish_pubsub_item(Client) ->
Id = iq_id(publish, Client),
PayloadSize = amoc_config:get(publication_size),
Content = item_content(PayloadSize),
Request = publish_pubsub_stanza(Client, Id, Content),
escalus:send(Client, Request),
Id.
publish_pubsub_stanza(Client, Id, Content) ->
ItemId = <<"current">>,
escalus_pubsub_stanza:publish(Client, ItemId, Content, Id, ?NODE).
item_content(PayloadSize) ->
Payload = #xmlcdata{content = <<<<"A">> || _ <- lists:seq(1, PayloadSize)>>},
#xmlel{
name = <<"entry">>,
attrs = [{<<"timestamp">>, integer_to_binary(os:system_time(microsecond))},
{<<"jid">>, erlang:get(jid)}],
children = [Payload]}.
%%------------------------------------------------------------------------------------------------
%% Item processing
%%------------------------------------------------------------------------------------------------
process_msg(#xmlel{name = <<"message">>} = Stanza, TS) ->
escalus:assert(is_message, Stanza),
Entry = exml_query:path(Stanza, [{element, <<"event">>}, {element, <<"items">>},
{element, <<"item">>}, {element, <<"entry">>}]),
case Entry of
undefined -> ok;
_ ->
case {exml_query:attr(Entry, <<"jid">>), erlang:get(jid)} of
{JID, JID} -> schedule_publishing(self());
_ -> ok
end,
TimeStampBin = exml_query:attr(Entry, <<"timestamp">>),
TimeStamp = binary_to_integer(TimeStampBin),
TTD = TS - TimeStamp,
%% ?LOG_DEBUG("time to delivery ~p", [TTD]),
amoc_metrics:update_counter(message),
amoc_metrics:update_time(message_ttd, TTD)
end.
process_presence(Client, Stanza) ->
case exml_query:attr(Stanza, <<"type">>) of
<<"subscribe">> ->
From = exml_query:attr(Stanza, <<"from">>),
send_presence(Client, <<"subscribed">>, From);
_ ->
ok %%it's ok to just ignore other presence notifications
end.
process_iq(Client, #xmlel{name = <<"iq">>} = Stanza) ->
Id = exml_query:attr(Stanza, <<"id">>),
Type = exml_query:attr(Stanza, <<"type">>),
NS = exml_query:path(Stanza, [{element, <<"query">>}, {attr, <<"xmlns">>}]),
case {Type, NS, Id} of
{<<"get">>, ?NS_DISCO_INFO, _} ->
handle_disco_query(Client, Stanza);
{<<"set">>, ?NS_ROSTER, _} ->
ok; %%it's ok to just ignore roster pushes
{_, undefined, <<"publish", _/binary>>} ->
handle_publish_resp(Stanza, Id);
_ ->
?LOG_WARNING("unexpected iq ~p", [Stanza])
end.
handle_publish_resp(PublishResult, Id) ->
case escalus_pred:is_iq_result(PublishResult) of
true ->
%% ?LOG_DEBUG("publish time ~p", [PublishTime]),
iq_metrics:response(Id, result);
_ ->
iq_metrics:response(Id, error),
?LOG_ERROR("Error publishing failed: ~p", [PublishResult]),
exit(publication_failed)
end.
handle_disco_query(Client, DiscoRequest) ->
?LOG_DEBUG("handle_disco_query ~p", [self()]),
QueryEl = escalus_stanza:query_el(<<"http://jabber.org/protocol/disco#info">>,
feature_elems()),
DiscoResult = escalus_stanza:iq_result(DiscoRequest, [QueryEl]),
escalus:send(Client, DiscoResult).
feature_elems() ->
NodeNs = ?PEP_NODE_NS,
[#xmlel{name = <<"identity">>,
attrs = [{<<"category">>, <<"client">>},
{<<"name">>, <<"Psi">>},
{<<"type">>, <<"pc">>}]},
#xmlel{name = <<"feature">>,
attrs = [{<<"var">>, <<"http://jabber.org/protocol/disco#info">>}]},
#xmlel{name = <<"feature">>,
attrs = [{<<"var">>, NodeNs}]},
#xmlel{name = <<"feature">>,
attrs = [{<<"var">>, <<NodeNs/bitstring, "+notify">>}]}].
%%------------------------------------------------------------------------------------------------
%% Stanza helpers
%%------------------------------------------------------------------------------------------------
iq_id(Type, Client) ->
UserName = escalus_utils:get_username(Client),
Suffix = random_suffix(),
list_to_binary(io_lib:format("~s-~s-~p",
[Type, UserName, Suffix])).
random_suffix() ->
Suffix = base64:encode(crypto:strong_rand_bytes(5)),
re:replace(Suffix, "/", "_", [global, {return, binary}]).
%%------------------------------------------------------------------------------------------------
%% Config helpers
%%------------------------------------------------------------------------------------------------
get_no_of_node_subscribers() ->
%instead of constant No of subscriptions we can use min/max values.
amoc_config:get(n_of_subscribers). | src/scenarios/pubsub_pep.erl | 0.636353 | 0.57827 | pubsub_pep.erl | starcoder |
-module(shapes).
-export([perimeter/1, area/1, enclose/1]).
-include_lib("eunit/include/eunit.hrl").
% <NAME>
% 2017-03-12
%
% Valid shapes:
% {circle,{X,Y},R}
% {rectangle,{X,Y},H,W}
% {triangle,{X,Y},A,B,Theta} (side lengths A, B, & angle b/w of Theta)
%
% Assumes: "smallest enclosing rectangle" means rectangle w/ smallest area.
%
% Min Area Rectangle will have one side co-incident with one side of the
% triangle thus, we have 3 cases to compute a bounding rectangle for, then
% find the one with minimum area.
%
% To find each, it converts the triangle to a list of points, then for each
% side, rotates the points such that the side is aligned to the +ve x-axis,
% and computes the min bounding rectangle for the points in this orientation.
% With these 3 rectangles, we choose the one with the minimum area.
%
% To run unit tests:
% > c(shapes).
% > shapes:test().
% Calculate perimeter of various shapes
perimeter({circle,{_X,_Y},R}) ->
2 * math:pi() * R;
perimeter({rectangle,{_X,_Y},H,W}) ->
2 * (H + W);
perimeter({triangle,{_X,_Y},A,B,Theta}) ->
case Theta > math:pi() of
true -> throw('arg Theta must be <= PI');
false -> math:sqrt(A*A + B*B - 2*A*B*math:cos(Theta)) + A + B
end.
% Calculate area of various shapes
area({circle,{_X,_Y},R}) ->
math:pi() * R * R;
area({rectangle,{_X,_Y},H,W}) ->
H * W;
area({triangle,{_X,_Y},A,B,Theta}) ->
case Theta > math:pi() of
true -> throw('arg Theta must be <= PI');
false -> A * B * math:sin(Theta) / 2
end.
% Calculate min bounding rectangle of various shapes
enclose({circle,{X,Y},R}) ->
{rectangle,{X,Y},2*R,2*R};
enclose({rectangle,{X,Y},H,W}) ->
{rectangle,{X,Y},H,W};
enclose({triangle,{X,Y},A,B,Theta}) ->
case Theta > math:pi() of
true -> throw('arg Theta must be <= PI');
false -> minItem(boundingRects({triangle,{X,Y},A,B,Theta}),
fun area/1)
end.
% --- helpers ----------------------------
% Finds all bounding rectangles for a triangle, where each is coincident with
% a different side of the provided triangle.
boundingRects({triangle,{X,Y},A,B,Theta}) ->
{points,{_Ex,_Wy},Coords} = toPoints({triangle,{X,Y},A,B,Theta}),
lists:map(fun(N) ->
boundingRect(alignToXAxis({points,{X,Y},rotateList(Coords,N)}))
end, lists:seq(0,2)).
% Find the bounding rectangle that encompases a set of points.
boundingRect({points,{X,Y},Coords}) ->
{rectangle,{X,Y},
abs(maxY(Coords)-minY(Coords)),abs(maxX(Coords)-minX(Coords))}.
% Converts a triangle its set of points (centered about 0,0).
% Returns {points,{X,Y},[{X1,Y1},...]}
toPoints({triangle,{X,Y},A,B,Theta}) ->
% (assume for the moment that point of intersection is 0,0)
Coords = [{0,0},{A,0},{B*math:cos(Theta),B*math:sin(Theta)}],
% find centroid and center points around it
{Cx,Cy} = centroid(Coords),
translate2D({points,{Cx+X,Cy+Y},Coords}, -Cx, -Cy).
% Rotates all points such that the side formed by the first two points is
% aligned with the +ve x-axis.
% Assumes Coords are situated about origin, so no translations necessary.
alignToXAxis({points,{X,Y},Coords}) ->
[{X1,Y1}|[{X2,Y2}|_]] = Coords,
Angle = math:atan((Y2-Y1)/(X2-X1)),
rotate2D({points,{X,Y},Coords}, -Angle).
% Finds the centroid of a list of coordinates
centroid(Coords) ->
N = length(Coords),
SumX = lists:foldl(fun({X,_Y},Acc) -> Acc + X end, 0, Coords),
SumY = lists:foldl(fun({_X,Y},Acc) -> Acc + Y end, 0, Coords),
{SumX/N,SumY/N}.
% 2D Translation
translate2D({points,{X,Y},Coords}, Tx, Ty) ->
{points,{X+Tx,Y+Ty},lists:map(fun({Ex,Wy}) -> {Ex+Tx,Wy+Ty} end, Coords)}.
% 2D Rotation (about origin)
rotate2D({points,{X,Y},Coords}, Angle) ->
{points,{X,Y},lists:map(fun({Ex,Wy}) ->
{Ex*math:cos(Angle) - Wy*math:sin(Angle),
Wy*math:cos(Angle) + Ex*math:sin(Angle)} end, Coords)}.
% Finds the min/max X/Y ordinate of a list of coordinates
minX(Coords) -> lists:min(lists:map(fun({X,_Y}) -> X end, Coords)).
maxX(Coords) -> lists:max(lists:map(fun({X,_Y}) -> X end, Coords)).
minY(Coords) -> lists:min(lists:map(fun({_X,Y}) -> Y end, Coords)).
maxY(Coords) -> lists:max(lists:map(fun({_X,Y}) -> Y end, Coords)).
% Rotates a list N items to the left
rotateList([], _N) -> [];
rotateList([H1|[]], _N) -> [H1];
rotateList([H1|[H2|T]], 0) -> [H1|[H2|T]];
rotateList([H1|[H2|T]], N) -> rotateList([H2|T] ++ [H1], N-1).
% Finds item in List for which ToVal function returns a minimum.
% List must be non-empty.
minItem(List,ToVal) ->
lists:foldl(fun(Item,Acc) -> case ToVal(Item) < ToVal(Acc) of
true -> Item;
false -> Acc
end end, hd(List), List).
% --- unit tests ---------------------------
approxEq(A, B, Precision) ->
(B >= A - Precision) and (B =< A + Precision).
minItem_test() ->
List = [{2}, {3}, {1}, {4}, {5}],
ToVal = fun({V}) -> V end,
Min = minItem(List, ToVal),
?assert(Min == {1}).
rotateList1_test() -> ?assert(rotateList([], 0) == []).
rotateList2_test() -> ?assert(rotateList([], 1) == []).
rotateList3_test() -> ?assert(rotateList([], 2) == []).
rotateList4_test() -> ?assert(rotateList([1], 0) == [1]).
rotateList5_test() -> ?assert(rotateList([1], 1) == [1]).
rotateList6_test() -> ?assert(rotateList([1], 2) == [1]).
rotateList7_test() -> ?assert(rotateList([1,2], 0) == [1,2]).
rotateList8_test() -> ?assert(rotateList([1,2], 1) == [2,1]).
rotateList9_test() -> ?assert(rotateList([1,2], 2) == [1,2]).
rotateList10_test() -> ?assert(rotateList([1,2,3], 0) == [1,2,3]).
rotateList11_test() -> ?assert(rotateList([1,2,3], 1) == [2,3,1]).
rotateList12_test() -> ?assert(rotateList([1,2,3], 2) == [3,1,2]).
rotateList13_test() -> ?assert(rotateList([1,2,3], 3) == [1,2,3]).
minX_test() -> ?assert(minX([{1,2},{3,4},{-2,8}]) == -2).
minY_test() -> ?assert(minY([{1,2},{3,4},{-2,8}]) == 2).
maxX_test() -> ?assert(maxX([{1,2},{3,4},{-2,8}]) == 3).
maxY_test() -> ?assert(maxY([{1,2},{3,4},{-2,8}]) == 8).
rotate2D_test() ->
{points,{1,2},[{X1,Y1}|[{X2,Y2}|[{X3,Y3}|[{X4,Y4}]]]]} =
rotate2D({points,{1,2},[{1,0},{0,2},{-3,0},{0,-4}]}, math:pi()/2),
?assert(approxEq(X1, 0, 0.0001)),
?assert(approxEq(Y1, 1, 0.0001)),
?assert(approxEq(X2, -2, 0.0001)),
?assert(approxEq(Y2, 0, 0.0001)),
?assert(approxEq(X3, 0, 0.0001)),
?assert(approxEq(Y3, -3, 0.0001)),
?assert(approxEq(X4, 4, 0.0001)),
?assert(approxEq(Y4, 0, 0.0001)).
translate2D_test() ->
{points,{X,Y},[{X1,Y1}]} =
translate2D({points,{-10,10},[{-1,3}]}, 10, -5),
?assert(approxEq(X, 0, 0.0001)),
?assert(approxEq(Y, 5, 0.0001)),
?assert(approxEq(X1, 9, 0.0001)),
?assert(approxEq(Y1, -2, 0.0001)).
centroid_test() ->
{X,Y} = centroid([{0,0}, {2,1}, {5,2}]),
?assert(approxEq(X, 2.333333, 0.0001)),
?assert(approxEq(Y, 1, 0.0001)).
alignToXAxis_test() ->
{points,{X,Y},[{X1,Y1},{X2,Y2},{X3,Y3}]} =
alignToXAxis({points,{10,5},[{1,1},{3,2},{-5,-3}]}),
?assert(approxEq(X, 10, 0.0001)),
?assert(approxEq(Y, 5, 0.0001)),
?assert(approxEq(X1, 1.341640, 0.0001)),
?assert(approxEq(Y1, 0.447214, 0.0001)),
?assert(approxEq(X2, 3.577708, 0.0001)),
?assert(approxEq(Y2, 0.447214, 0.0001)),
?assert(approxEq(X3, -5.813776, 0.0001)),
?assert(approxEq(Y3, -0.447214, 0.0001)).
toPoints_test() ->
{points,{X,Y},[{X1,Y1},{X2,Y2},{X3,Y3}]} =
toPoints({triangle,{10,5},5,20,0.175}),
?assert(approxEq(X, 10, 0.0001)),
?assert(approxEq(Y, 5, 0.0001)),
?assert(approxEq(X1, -8.231510, 0.0001)),
?assert(approxEq(Y1, -1.160721, 0.0001)),
?assert(approxEq(X2, -3.231510, 0.0001)),
?assert(approxEq(Y2, -1.160721, 0.0001)),
?assert(approxEq(X3, 11.463021, 0.0001)),
?assert(approxEq(Y3, 2.321442, 0.0001)).
boundingRect_test() ->
{rectangle,{X,Y},H,W} =
boundingRect({points,{5,10},[{-3,1},{1,-4},{0,2}]}),
?assert(approxEq(X, 5, 0.0001)),
?assert(approxEq(Y, 10, 0.0001)),
?assert(approxEq(H, 6, 0.0001)),
?assert(approxEq(W, 4, 0.0001)).
boundingRects_test() ->
Rects = boundingRects({triangle,{1,2},2,3,3*math:pi()/4}),
?assert(length(Rects) == 3).
perimeter_circle1_test() ->
?assert(approxEq(perimeter({circle,{0,0},1}), 6.283185, 0.0001)).
perimeter_circle2_test() ->
?assert(approxEq(perimeter({circle,{0,0},0.5}), 3.141592, 0.0001)).
perimeter_rectangle1_test() ->
?assert(approxEq(perimeter({rectangle,{0,0},1,2}), 6, 0.0001)).
perimeter_rectangle2_test() ->
?assert(approxEq(perimeter({rectangle,{0,0},10,20}), 60, 0.0001)).
perimeter_triangle1_test() ->
?assert(approxEq(perimeter({triangle,{0,0},3,4,1.570796}), 12, 0.0001)).
perimeter_triangle2_test() ->
?assert(approxEq(perimeter({triangle,{0,0},3,4,2.356194}), 13.478469,
0.0001)).
area_circle1_test() ->
?assert(approxEq(area({circle,{0,0},1}), 3.141592, 0.0001)).
area_circle2_test() ->
?assert(approxEq(area({circle,{0,0},2}), 12.566370, 0.0001)).
area_rectangle1_test() ->
?assert(approxEq(area({rectangle,{0,0},1,2}), 2, 0.0001)).
area_rectangle2_test() ->
?assert(approxEq(area({rectangle,{0,0},3.5,2.8}), 9.8, 0.0001)).
area_triangle1_test() ->
?assert(approxEq(area({triangle,{0,0},3,4,1.570796}), 6, 0.0001)).
area_triangle2_test() ->
?assert(approxEq(area({triangle,{0,0},3,4,2.356194}), 4.242640, 0.001)).
enclose_circle1_test() ->
?assert(enclose({circle,{0,0},1}) == {rectangle,{0,0},2,2}).
enclose_circle2_test() ->
?assert(enclose({circle,{0,0},0.5}) == {rectangle,{0,0},1,1}).
enclose_rectangle1_test() ->
?assert(enclose({rectangle,{0,0},1,2}) == {rectangle,{0,0},1,2}).
enclose_rectangle2_test() ->
?assert(enclose({rectangle,{2,6},2,4}) == {rectangle,{2,6},2,4}).
enclose1_triangle_test() ->
{rectangle,{X,Y},H,W} = enclose({triangle,{0,0},3,2,3*math:pi()/4}),
?assert(approxEq(X, 0, 0.0001)),
?assert(approxEq(Y, 0, 0.0001)),
?assert(approxEq(H, 0.915304, 0.0001)),
?assert(approxEq(W, 4.635221, 0.0001)). | 1_24/shapes.erl | 0.656108 | 0.594698 | shapes.erl | starcoder |
%%--------------------------------------------------------------------
%% Copyright (c) 2020 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_ctl_SUITE).
-compile(export_all).
-compile(nowarn_export_all).
-include_lib("eunit/include/eunit.hrl").
-include_lib("common_test/include/ct.hrl").
all() -> emqx_ct:all(?MODULE).
init_per_suite(Config) ->
ok = emqx_logger:set_log_level(emergency),
Config.
end_per_suite(_Config) ->
ok.
%%--------------------------------------------------------------------
%% Test cases
%%--------------------------------------------------------------------
t_reg_unreg_command(_) ->
with_ctl_server(
fun(_CtlSrv) ->
emqx_ctl:register_command(cmd1, {?MODULE, cmd1_fun}),
emqx_ctl:register_command(cmd2, {?MODULE, cmd2_fun}),
?assertEqual([{?MODULE, cmd1_fun}], emqx_ctl:lookup_command(cmd1)),
?assertEqual([{?MODULE, cmd2_fun}], emqx_ctl:lookup_command(cmd2)),
?assertEqual([{cmd1, ?MODULE, cmd1_fun}, {cmd2, ?MODULE, cmd2_fun}],
emqx_ctl:get_commands()),
emqx_ctl:unregister_command(cmd1),
emqx_ctl:unregister_command(cmd2),
ct:sleep(100),
?assertEqual([], emqx_ctl:lookup_command(cmd1)),
?assertEqual([], emqx_ctl:lookup_command(cmd2)),
?assertEqual([], emqx_ctl:get_commands())
end).
t_run_commands(_) ->
with_ctl_server(
fun(_CtlSrv) ->
?assertEqual({error, cmd_not_found}, emqx_ctl:run_command(["cmd", "arg"])),
emqx_ctl:register_command(cmd1, {?MODULE, cmd1_fun}),
emqx_ctl:register_command(cmd2, {?MODULE, cmd2_fun}),
ok = emqx_ctl:run_command(["cmd1", "arg"]),
{error, badarg} = emqx_ctl:run_command(["cmd1", "badarg"]),
ok = emqx_ctl:run_command(["cmd2", "arg1", "arg2"]),
{error, badarg} = emqx_ctl:run_command(["cmd2", "arg1", "badarg"])
end).
t_print(_) ->
ok = emqx_ctl:print("help"),
ok = emqx_ctl:print("~s", [help]),
% - check the output of the usage
mock_print(),
?assertEqual("help", emqx_ctl:print("help")),
?assertEqual("help", emqx_ctl:print("~s", [help])),
unmock_print().
t_usage(_) ->
CmdParams1 = "emqx_cmd_1 param1 param2",
CmdDescr1 = "emqx_cmd_1 is a test command means nothing",
Output1 = "emqx_cmd_1 param1 param2 # emqx_cmd_1 is a test command means nothing\n",
% - usage/1,2 should return ok
ok = emqx_ctl:usage([{CmdParams1, CmdDescr1}, {CmdParams1, CmdDescr1}]),
ok = emqx_ctl:usage(CmdParams1, CmdDescr1),
% - check the output of the usage
mock_print(),
?assertEqual(Output1, emqx_ctl:usage(CmdParams1, CmdDescr1)),
?assertEqual([Output1, Output1], emqx_ctl:usage([{CmdParams1, CmdDescr1}, {CmdParams1, CmdDescr1}])),
% - for the commands or descriptions have multi-lines
CmdParams2 = "emqx_cmd_2 param1 param2",
CmdDescr2 = "emqx_cmd_2 is a test command\nmeans nothing",
Output2 = "emqx_cmd_2 param1 param2 # emqx_cmd_2 is a test command\n"
" ""# means nothing\n",
?assertEqual(Output2, emqx_ctl:usage(CmdParams2, CmdDescr2)),
?assertEqual([Output2, Output2], emqx_ctl:usage([{CmdParams2, CmdDescr2}, {CmdParams2, CmdDescr2}])),
unmock_print().
t_unexpected(_) ->
with_ctl_server(
fun(CtlSrv) ->
ignored = gen_server:call(CtlSrv, unexpected_call),
ok = gen_server:cast(CtlSrv, unexpected_cast),
CtlSrv ! unexpected_info,
?assert(is_process_alive(CtlSrv))
end).
%%--------------------------------------------------------------------
%% Cmds for test
%%--------------------------------------------------------------------
cmd1_fun(["arg"]) -> ok;
cmd1_fun(["badarg"]) -> error(badarg).
cmd2_fun(["arg1", "arg2"]) -> ok;
cmd2_fun(["arg1", "badarg"]) -> error(badarg).
with_ctl_server(Fun) ->
{ok, Pid} = emqx_ctl:start_link(),
_ = Fun(Pid),
ok = emqx_ctl:stop().
mock_print() ->
%% proxy usage/1,2 and print/1,2 to format_xx/1,2 funcs
meck:new(emqx_ctl, [non_strict, passthrough]),
meck:expect(emqx_ctl, print, fun(Arg) -> emqx_ctl:format(Arg) end),
meck:expect(emqx_ctl, print, fun(Msg, Arg) -> emqx_ctl:format(Msg, Arg) end),
meck:expect(emqx_ctl, usage, fun(Usages) -> emqx_ctl:format_usage(Usages) end),
meck:expect(emqx_ctl, usage, fun(CmdParams, CmdDescr) -> emqx_ctl:format_usage(CmdParams, CmdDescr) end).
unmock_print() ->
meck:unload(emqx_ctl). | test/emqx_ctl_SUITE.erl | 0.516595 | 0.424412 | emqx_ctl_SUITE.erl | starcoder |
%% @author <NAME> <<EMAIL>>
%% @copyright 2009 <NAME>
%% Date: 2009-10-02
%% @doc OAuth.
%% Copyright 2009 <NAME>
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(atom_convert).
-author("<NAME> <<EMAIL>>").
-export([resource_to_atom/2,
atom_to_resource/1
]).
-include_lib("xmerl/include/xmerl.hrl").
-include_lib("zotonic.hrl").
-define(ATOM_NS, 'http://www.w3.org/2005/Atom').
%% @doc Export a resource to Atom XML.
%% @spec resource_to_atom(rsc_export(), #context{}) -> string()
resource_to_atom(RscExport, Context) ->
Rsc = proplists:get_value(rsc, RscExport),
Content0 = [
{id, [binary_to_list(proplists:get_value(uri, RscExport))]},
{title, [{type, "text"}], [binary_to_list(z_trans:trans(proplists:get_value(title, Rsc), Context))]},
{published, [z_convert:to_isotime(proplists:get_value(publication_start, Rsc))]},
{updated, [z_convert:to_isotime(proplists:get_value(modified, Rsc))]}
],
Content1 = case empty(Body = proplists:get_value(body, Rsc)) of
true ->
Content0;
false ->
Content0 ++ [{content, [{type, "html"}], [binary_to_list(z_trans:trans(Body, Context))]}]
end,
Content2 = case empty(Summary = proplists:get_value(summary, Rsc)) of
true ->
Content1;
false ->
Content1 ++ [{summary, [{type, "text"}], [binary_to_list(z_trans:trans(Summary, Context))]}]
end,
Content3 = Content2 ++ author_element(RscExport),
RootElem = #xmlElement{name=entry,
namespace=#xmlNamespace{default=?ATOM_NS},
attributes=[#xmlAttribute{name=xmlns, value=?ATOM_NS}],
content=Content3},
lists:flatten(xmerl:export_simple([RootElem], xmerl_xml)).
%% @doc Construct the Atom author element.
%% @spec author_element(rsc_export()) -> [#xmlElement{}]
author_element(Export) ->
case proplists:get_value(edges, Export) of
X when X =:= undefined orelse X =:= [] ->
[];
Edges ->
[#xmlElement{name=author, content=[
#xmlElement{name=name, content=[#xmlText{value=proplists:get_value(object_title, E)}]},
#xmlElement{name=uri, content=[#xmlText{value=proplists:get_value(object_uri, E)}]}]}
|| E <- filter_edges(Edges, <<"author">>)]
end.
%% @doc Given a list of edges, filter out the ones which have the given predicate name.
%% @spec filter_edges([edge()], atom()) -> [edge()]
filter_edges(Edges, PredicateName) ->
lists:filter(fun(X) -> proplists:get_value(predicate_name, X) == PredicateName end, Edges).
%% @doc Export a resource to Atom XML.
%% @spec atom_to_resource(string()) -> rsc_export()
atom_to_resource(Xml) when is_binary(Xml) ->
atom_to_resource(binary_to_list(Xml));
atom_to_resource(Xml) ->
{RootElem,_} = xmerl_scan:string(Xml),
%% Atom required elements
RscUri = case xmerl_xpath:string("/entry/id", RootElem) of
[] -> undefined;
[#xmlElement{content=Uri}] ->
list_to_binary(collapse_xmltext(Uri))
end,
RscProps1 = case xmerl_xpath:string("/entry/title", RootElem) of
[] -> [{title, <<>>}];
[Title] ->
[{title, get_xmltext(Title, true)}]
end,
RscProps2 = case xmerl_xpath:string("/entry/updated", RootElem) of
[] -> RscProps1;
[#xmlElement{content=Updated}] ->
RscProps1 ++ [{modified, z_convert:to_datetime(collapse_xmltext(Updated))}]
end,
RscProps3 = case xmerl_xpath:string("/entry/published", RootElem) of
[] -> RscProps2;
[#xmlElement{content=Published}] ->
RscProps2 ++ [{publication_start, z_convert:to_datetime(collapse_xmltext(Published))}]
end,
RscProps4 = case xmerl_xpath:string("/entry/summary", RootElem) of
[] -> RscProps3;
[Summary] ->
RscProps3 ++ [{summary, get_xmltext(Summary, true)}]
end,
RscProps5 = case xmerl_xpath:string("/entry/content", RootElem) of
[] -> RscProps4;
[Body] ->
RscProps4 ++ [{body, get_xmltext(Body, false)}]
end,
%% Edges
Edges = [],
Edges1 = Edges ++ find_author(RootElem),
Edges2 = Edges1 ++ find_depiction(RootElem),
%% Medium
Medium = case xmerl_xpath:string("/entry/link[@rel=\"enclosure\"]", RootElem) of
[] -> undefined;
[Enc] ->
[{mime, xml_attrib(type, Enc)},
{url, xml_attrib(href, Enc)}]
end,
%% Combine all into rsc_export() structure
Import = [{uri, RscUri},
{rsc, RscProps5},
{medium, Medium},
{edges, Edges2}
],
lists:filter(fun({_,L}) -> not(L == []) end, Import).
%% @doc Given an Atom entry, get a list of all the authors formatted as author edges.
%% @spec find_author(#xmlElement{}) -> [edge()]
find_author(Elem) ->
case xmerl_xpath:string("/entry/author", Elem) of
[] -> []; % no author found
Authors ->
lists:map(fun(A) ->
Name = case xmerl_xpath:string("/author/name", A) of
[] -> <<>>;
[#xmlElement{content=[#xmlText{value=N}]}] ->
list_to_binary(N)
end,
Uri = case xmerl_xpath:string("/author/uri", A) of
[] -> <<>>;
[#xmlElement{content=[#xmlText{value=U}]}] ->
list_to_binary(U)
end,
[{predicate_name, <<"author">>},
{object_uri, Uri},
{object_title, Name}]
end, Authors)
end.
%% @doc Find the enclosure and store as depiction edge.
%% @spec find_depiction(#xmlElement{}) -> [edge()]
find_depiction(Elem) ->
case xmerl_xpath:string("/entry/link[@rel=\"enclosure\"]", Elem) of
[] -> []; % no depiction found
[Enclosure|_] ->
[ [{predicate_name, <<"depiction">>},
{object_uri, xml_attrib(href, Enclosure)},
{object_title, xml_attrib(title, Enclosure)}
] ]
end.
%% @doc Given an XML element, get the value of an attribute.
%% @spec xml_attrib(atom(), #xmlElement{}) -> binary() | undefined
xml_attrib(Name, #xmlElement{attributes=Attrs}) ->
case lists:filter(fun(#xmlAttribute{name=Nm}) -> Nm =:= Name end, Attrs) of
[] -> undefined;
[#xmlAttribute{value=Value}|_] ->
list_to_binary(Value)
end.
%% @doc Given a list of XML test, implode it into one list.
%% @spec collapse_xmltext([#xmlText{}]) -> string()
collapse_xmltext(Content) ->
lists:flatten([X#xmlText.value || X <- Content]).
%% @doc Given an element, get its XML text. If "strip" attribute is
%% set, text is stripped of (x)html constructs if type attribute is
%% html or xhtml.
get_xmltext(Element=#xmlElement{content=Content}, Strip) ->
Text = collapse_xmltext(Content),
Text2 = case Strip of
false -> Text;
true ->
case xml_attrib(type, Element) of
B when B =:= <<"html">> orelse B =:= <<"xhtml">> ->
%% Strip tags
z_html:strip(Text);
B2 when B2 =:= undefined orelse B2 =:= <<"text">> ->
%% Do not strip.
Text
end
end,
z_convert:to_binary(Text2).
empty(<<>>) -> true;
empty([]) -> true;
empty(undefined) -> true;
empty(_) -> false. | modules/mod_atom/atom_convert.erl | 0.525856 | 0.410077 | atom_convert.erl | starcoder |
%% @doc Module containing available functions used as aggregates, filtering or any predifined function.
-module(functions).
-export([max/1, min/1, mean/1, median/1, send_trigger/2, pred_filter_above/1, pred_filter_under/1, pred_filter_above_under/2]).
%%% AGGREGATES
%% @doc Gives the maximum of items contained the list 'Values'.
%% Items in the list 'Values' are tuples {Value, Timestamp}.
%% @spec max(Values::list({float(), float()})) -> Result::float()
max(Values) ->
Only_values_list = only_values(Values),
lists:max(Only_values_list).
%% @doc Gives the minimum of items contained the list 'Values'.
%% Items in the list 'Values' are tuples {Value, Timestamp}.
%% @spec min(Values::list({float(), float()})) -> Result::float()
min(Values) ->
Only_values_list = only_values(Values),
lists:min(Only_values_list).
%% @doc Gives the mean of items contained the list 'Values'.
%% Items in the list 'Values' are tuples {Value, Timestamp}.
%% @spec mean(Values::list({float(), float()})) -> Result::float()
mean(Values) ->
Only_values_list = only_values(Values),
Item = lists:nth(1, Only_values_list),
case Item of
[_|_] ->
mean_aux(Only_values_list);
_ ->
Sum = lists:sum(Only_values_list),
Length = length(Only_values_list),
Sum/Length
end.
mean_aux([]) ->
0;
mean_aux(Only_values_list) ->
mean_aux(Only_values_list, [0,0,0], 0).
mean_aux([], AccList, Acc) ->
Div = division(Acc),
lists:map(Div, AccList);
mean_aux([Head|Rest], [Acc1,Acc2,Acc3], Acc) ->
[A,B,C] = Head,
mean_aux(Rest, [Acc1+A, Acc2+B, Acc3+C], Acc+1).
division(Divisor) ->
fun(Elem) ->
Elem / Divisor
end.
%% @doc Gives the median value of the first 'Amount' items of the list 'Values'.
%% Items in the list 'Values' are tuples {Value, Timestamp}.
%% @spec median(Values::list({float(), float()})) -> Result::float()
median(Values) ->
Sorted_list = order(Values),
Length = length(Sorted_list),
if
Length rem 2 == 0 ->
N = ceil(Length/2),
{{N1,_}, {N2,_}} = {lists:nth(N, Sorted_list), lists:nth(N+1, Sorted_list)},
(N1+N2)/2;
true ->
N = ceil(Length/2),
{Median,_} = lists:nth(N, Sorted_list),
Median
end.
%%% TRIGGER
%% @doc Returns a function that sends trigger message to process with pid 'Destination_pid'
%% @spec send_trigger(Sender_pid::integer(), Destination_pid::integer()) -> Fun
send_trigger(Sender_pid, Destination_pid) ->
fun() ->
Destination_pid ! {trigger, Sender_pid},
ok
end.
%%% PREDICATES
%% @doc Predicate used to filter values above a certain threshold ('Upperbound').
%% @spec pred_filter_above(Upperbound::float()) -> Function::fun((Value) -> boolean())
pred_filter_above(Upperbound) ->
fun(Value) ->
case Value of
{V1, V2, V3} -> B1 = Upperbound >= V1,
B2 = Upperbound >= V2,
B3 = Upperbound >= V3,
B1 and B2 and B3;
_ -> Upperbound >= Value
end
end.
%% @doc Predicate used to filter values under a certain threshold ('Lowerbound').
%% @spec pred_filter_under(Lowerbound::float()) -> Function::fun((Value) -> boolean())
pred_filter_under(Lowerbound) ->
fun(Value) ->
case Value of
{V1, V2, V3} -> B1 = Lowerbound =< V1,
B2 = Lowerbound =< V2,
B3 = Lowerbound =< V3,
B1 and B2 and B3;
_ -> Lowerbound =< Value
end
end.
%% @doc Predicate used to filter values outside a certain range (outside ['Lowerbound', 'Upperbound']).
%% @spec pred_filter_above_under(Lowerbound::float(), Upperbound::float()) -> Function::fun((Value) -> boolean())
pred_filter_above_under(Lowerbound, Upperbound) ->
fun(Value) ->
case Value of
{V1, V2, V3} -> B1_U = Upperbound >= V1,
B1_L = Lowerbound =< V1,
B2_U = Upperbound >= V2,
B2_L = Lowerbound =< V2,
B3_U = Upperbound >= V3,
B3_L = Lowerbound =< V3,
B1_U and B1_L and B2_U and B2_L and B3_U and B3_L;
_ -> Upper = Upperbound >= Value,
Lower = Lowerbound =< Value,
Upper and Lower
end
end.
%%% AUXILIARY FUNCTIONS
% Orders List of items of the form {Value, Timestamp}
order(List) ->
Compare = fun({A,_}, {B,_}) -> A =< B end,
lists:sort(Compare, List).
% Takes of items {Value, Timestamp} and returns list of Value
only_values(List) ->
Extract = fun({Value, _Timestamp}) -> Value end,
lists:map(Extract, List). | src/functions.erl | 0.525369 | 0.684205 | functions.erl | starcoder |
-module(utils).
-export([
datetime_to_timestamp/1,
string_format/2,
get_legacy_rsa_ciphers/0,
formap/2,
map_to_sequential_list/1,
sequential_list_to_map/1,
float_to_string/1,
float_to_string/2,
int_to_string/1,
string_to_float/1,
emptyloop/1,
reinit_mnesia_cluster/0
]).
-export([bin_to_hex/1]).
%% ASCII table NUM and CHAR start (in decimal).
%% In the case of the CHAR start, we assume that it will be CHAR_START + 10 at least. (so in this case the start would be 97).
-define(CHAR_START, 87).
-define(NUM_START, 48).
%% Converts a binary() to a hex string (lowercase).
bin_to_hex(Binary) when is_binary(Binary) ->
to_hex(binary_to_list(Binary)).
to_hex(List) ->
HexList = lists:map(fun decimal_to_hex/1, List),
lists:flatten(HexList).
decimal_to_hex(DecimalChar) ->
<<MostSignificantNibble:4, LeastSignificantNibble:4>> =
<<DecimalChar>>,
[
to_hex_char(MostSignificantNibble),
to_hex_char(LeastSignificantNibble)
].
to_hex_char(Nibble) when Nibble =< 9 ->
Nibble + (?NUM_START);
to_hex_char(Nibble) ->
Nibble + (?CHAR_START).
%% Reference: https://stackoverflow.com/questions/18116628/how-to-convert-gregorian-date-in-seconds-to-unix-timestamp-in-php
datetime_to_timestamp(DateTime) ->
calendar:datetime_to_gregorian_seconds(DateTime) -
62167219200.
%% Like io:format except it returns the evaluated string rather than write it to standard output. Returns a string
%% Example: string_format("2 + 2 = ~p", [2+2])
string_format(Pattern, Values) ->
lists:flatten(io_lib:format(Pattern, Values)).
int_to_string(Value) ->
binary_to_list(integer_to_binary(Value)).
float_to_string(Value) ->
float_to_string(Value, 8).
float_to_string(Value, Decimals) ->
binary_to_list(float_to_binary(Value, [{decimals, Decimals}, compact])).
string_to_float(Value) when is_float(Value) ->
Value;
string_to_float(Value) when is_binary(Value) ->
case Value of
<<"">> -> 0.0;
_ -> binary_to_float(Value)
end;
string_to_float(Value) ->
case Value of
"" -> 0.0;
_ -> binary_to_float(list_to_binary(Value))
end.
%% For security reasons RSA key exchange cipher suites are no longer supported by default.
%% Reference: https://erlang.org/doc/apps/ssl/standards_compliance.html
get_legacy_rsa_ciphers() ->
RSAKex = ssl:filter_cipher_suites(ssl:cipher_suites(all, 'tlsv1.2'), [
{key_exchange, fun
(rsa) -> true;
(_) -> false
end}
]),
Default = ssl:cipher_suites(default, 'tlsv1.2'),
ssl:append_cipher_suites(RSAKex, Default).
%% converts the map to a sequential list ["key", "value"]
map_to_sequential_list(Map) when is_map(Map) ->
ListOfTuples = maps:to_list(Map),
ListOfLists = lists:map(fun({K, V}) -> [K, V] end, ListOfTuples),
lists:append(ListOfLists).
%% converts a list ["key1","value1", "key2", "value2"] to a map #{"key1"=>"value1"}
sequential_list_to_map(List) when is_list(List) andalso List == [] ->
#{};
sequential_list_to_map(List) when is_list(List) ->
Len = length(List),
generate_map(Len, List, #{}).
generate_map(Counter, List, Map) ->
Init = Counter - 1,
Sublist = lists:sublist(List, Init, Counter),
Result = #{lists:nth(1, Sublist) => lists:nth(2, Sublist)},
AccMap = maps:merge(Result, Map),
TempCounter = Counter - 2,
if
TempCounter > 0 -> generate_map(TempCounter, lists:sublist(List, TempCounter), AccMap);
true -> AccMap
end.
%% foreach style to maps
formap(Func, Iterator) ->
formapnext(Func, maps:next(Iterator)).
formapnext(_, NextResult) when NextResult == none ->
ok;
formapnext(Func, NextResult) ->
{K, V, NextIterator} = NextResult,
Func(K, V),
formap(Func, NextIterator).
% A empty blocking loop until the rule passes
emptyloop(RuleFunc) ->
emptyloop(RuleFunc, false).
emptyloop(_, Result) when Result == true ->
ok;
emptyloop(RuleFunc, _) ->
Result = RuleFunc(),
emptyloop(RuleFunc, Result).
%% @doc
%% Erases all mnesia replicas and start it anew.
%% Use it only after the network partition has been resolved and all nodes are reachable.
%% @end
reinit_mnesia_cluster() ->
rpc:multicall(mnesia, stop, []),
AllNodes = [node() | nodes()],
mnesia:delete_schema(AllNodes),
mnesia:create_schema(AllNodes),
rpc:multicall(mnesia, start, []). | src/utils.erl | 0.511717 | 0.610918 | utils.erl | starcoder |
%% Copyright (c) 2012, <NAME> <<EMAIL>>
%%
%% Permission to use, copy, modify, and/or distribute this software for any
%% purpose with or without fee is hereby granted, provided that the above
%% copyright notice and this permission notice appear in all copies.
%%
%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
%% @doc Event filter implementation.
%%
%% An event query is constructed using the built in operators exported from
%% this module. The filtering operators are used to specify which events
%% should be included in the output of the query. The default output action
%% is to copy all events matching the input filters associated with a query
%% to the output. This makes it possible to construct and compose multiple
%% queries at runtime.
%%
%% === Examples of built in filters ===
%% ```
%% %% Select all events where 'a' exists and is greater than 0.
%% gr_lc:gt(a, 0).
%% %% Select all events where 'a' exists and is equal to 0.
%% gr_lc:eq(a, 0).
%% %% Select all events where 'a' exists and is less than 0.
%% gr_lc:lt(a, 0).
%% %% Select all events where 'a' exists and is anything.
%% gr_lc:wc(a).
%%
%% %% Select no input events. Used as black hole query.
%% gr_lc:null(false).
%% %% Select all input events. Used as passthrough query.
%% gr_lc:null(true).
%% '''
%%
%% === Examples of combining filters ===
%% ```
%% %% Select all events where both 'a' and 'b' exists and are greater than 0.
%% gr_lc:all([gr_lc:gt(a, 0), gr_lc:gt(b, 0)]).
%% %% Select all events where 'a' or 'b' exists and are greater than 0.
%% gr_lc:any([gr_lc:gt(a, 0), gr_lc:gt(b, 0)]).
%% '''
%%
%% === Handling output events ===
%%
%% Once a query has been composed it is possible to override the output action
%% with an erlang function. The function will be applied to each output event
%% from the query. The return value from the function will be ignored.
%%
%% ```
%% %% Write all input events as info reports to the error logger.
%% gr_lc:with(gr_lc:null(true), fun(E) ->
%% error_logger:info_report(gr_e:pairs(E)) end).
%% '''
%%
-module(gr_lc).
-export([
compile/2,
compile/3,
compile/4,
handle/2,
get/2,
delete/1,
reset_counters/1,
reset_counters/2
]).
-export([
lt/2, lte/2,
eq/2,
gt/2, gte/2,
wc/1,
nf/1
]).
-export([
all/1,
any/1,
null/1,
with/2
]).
-export([
input/1,
output/1,
filter/1,
union/1
]).
-record(module, {
'query' :: term(),
tables :: [{atom(), atom()}],
qtree :: term(),
store :: term()
}).
-spec lt(atom(), term()) -> gr_lc_ops:op().
lt(Key, Term) ->
gr_lc_ops:lt(Key, Term).
-spec lte(atom(), term()) -> gr_lc_ops:op().
lte(Key, Term) ->
gr_lc_ops:lte(Key, Term).
-spec eq(atom(), term()) -> gr_lc_ops:op().
eq(Key, Term) ->
gr_lc_ops:eq(Key, Term).
-spec gt(atom(), term()) -> gr_lc_ops:op().
gt(Key, Term) ->
gr_lc_ops:gt(Key, Term).
-spec gte(atom(), term()) -> gr_lc_ops:op().
gte(Key, Term) ->
gr_lc_ops:gte(Key, Term).
-spec wc(atom()) -> gr_lc_ops:op().
wc(Key) ->
gr_lc_ops:wc(Key).
-spec nf(atom()) -> gr_lc_ops:op().
nf(Key) ->
gr_lc_ops:nf(Key).
%% @doc Filter the input using multiple filters.
%%
%% For an input to be considered valid output the all filters specified
%% in the list must hold for the input event. The list is expected to
%% be a non-empty list. If the list of filters is an empty list a `badarg'
%% error will be thrown.
-spec all([gr_lc_ops:op()]) -> gr_lc_ops:op().
all(Filters) ->
gr_lc_ops:all(Filters).
%% @doc Filter the input using one of multiple filters.
%%
%% For an input to be considered valid output on of the filters specified
%% in the list must hold for the input event. The list is expected to be
%% a non-empty list. If the list of filters is an empty list a `badarg'
%% error will be thrown.
-spec any([gr_lc_ops:op()]) -> gr_lc_ops:op().
any(Filters) ->
gr_lc_ops:any(Filters).
%% @doc Always return `true' or `false'.
-spec null(boolean()) -> gr_lc_ops:op().
null(Result) ->
gr_lc_ops:null(Result).
%% @doc Apply a function to each output of a query.
%%
%% Updating the output action of a query finalizes it. Attempting
%% to use a finalized query to construct a new query will result
%% in a `badarg' error.
-spec with(gr_lc_ops:op(), fun((gr_e:event()) -> term())) -> gr_lc_ops:op().
with(Query, Action) ->
gr_lc_ops:with(Query, Action).
%% @doc Return a union of multiple queries.
%%
%% The union of multiple queries is the equivalent of executing multiple
%% queries separately on the same input event. The advantage is that filter
%% conditions that are common to all or some of the queries only need to
%% be tested once.
%%
%% All queries are expected to be valid and have an output action other
%% than the default which is `output'. If these expectations don't hold
%% a `badarg' error will be thrown.
-spec union([gr_lc_ops:op()]) -> gr_lc_ops:op().
union(Queries) ->
gr_lc_ops:union(Queries).
%% @doc Compile a query to a module.
%%
%% On success the module representing the query is returned. The module and
%% data associated with the query must be released using the {@link delete/1}
%% function. The name of the query module is expected to be unique.
%% The counters are reset by default, unless Reset is set to false
-spec compile(atom(), gr_lc_ops:op() | [gr_lc_ops:op()]) -> {ok, atom()}.
compile(Module, Query) ->
compile(Module, Query, undefined, true).
-spec compile(atom(), gr_lc_ops:op() | [gr_lc_ops:op()], boolean()) -> {ok, atom()}.
compile(Module, Query, Reset) when is_boolean(Reset) ->
compile(Module, Query, undefined, Reset);
compile(Module, Query, undefined) ->
compile(Module, Query, undefined, true);
compile(Module, Query, Store) when is_list(Store) ->
compile(Module, Query, Store, true).
compile(Module, Query, Store, Reset) ->
{ok, ModuleData} = module_data(Module, Query, Store),
case gr_lc_code:compile(Module, ModuleData) of
{ok, Module} when Reset ->
reset_counters(Module),
{ok, Module};
{ok, Module} ->
{ok, Module}
end.
%% @doc Handle an event using a compiled query.
%%
%% The input event is expected to have been returned from {@link gr_e:make/2}.
-spec handle(atom(), list({atom(), term()}) | gr_e:event()) -> ok.
handle(Module, Event) when is_list(Event) ->
Module:handle(gr_e:make(Event, [list]));
handle(Module, Event) ->
Module:handle(Event).
get(Module, Key) ->
Module:get(Key).
%% @doc The number of input events for this query module.
-spec input(atom()) -> non_neg_integer().
input(Module) ->
Module:info(input).
%% @doc The number of output events for this query module.
-spec output(atom()) -> non_neg_integer().
output(Module) ->
Module:info(output).
%% @doc The number of filtered events for this query module.
-spec filter(atom()) -> non_neg_integer().
filter(Module) ->
Module:info(filter).
%% @doc Release a compiled query.
%%
%% This releases all resources allocated by a compiled query. The query name
%% is expected to be associated with an existing query module. Calling this
%% function will shutdown all relevant processes and purge/delete the module.
-spec delete(atom()) -> ok.
delete(Module) ->
Params = params_name(Module),
Counts = counts_name(Module),
ManageParams = manage_params_name(Module),
ManageCounts = manage_counts_name(Module),
_ = [ begin
ok = supervisor:terminate_child(Sup, Name),
ok = supervisor:delete_child(Sup, Name)
end || {Sup, Name} <-
[{gr_manager_sup, ManageParams}, {gr_manager_sup, ManageCounts},
{gr_param_sup, Params}, {gr_counter_sup, Counts}]
],
code:soft_purge(Module),
code:delete(Module),
ok.
%% @doc Reset all counters
%%
%% This resets all the counters associated with a module
-spec reset_counters(atom()) -> ok.
reset_counters(Module) ->
Module:reset_counters(all).
%% @doc Reset a specific counter
%%
%% This resets a specific counter associated with a module
-spec reset_counters(atom(), atom()) -> ok.
reset_counters(Module, Counter) ->
Module:reset_counters(Counter).
%% @private Map a query to a module data term.
-spec module_data(atom(), term(), term()) -> {ok, #module{}}.
module_data(Module, Query, Store) ->
%% terms in the query which are not valid arguments to the
%% erl_syntax:abstract/1 functions are stored in ETS.
%% the terms are only looked up once they are necessary to
%% continue evaluation of the query.
%% query counters are stored in a shared ETS table. this should
%% be an optional feature. enabled by defaults to simplify tests.
%% the abstract_tables/1 function expects a list of name-atom pairs.
%% tables are referred to by name in the generated code. the table/1
%% function maps names to registered processes response for those tables.
Tables = module_tables(Module),
Query2 = gr_lc_lib:reduce(Query),
{ok, #module{'query'=Query, tables=Tables, qtree=Query2, store=Store}}.
%% @private Create a data managed supervised process for params, counter tables
module_tables(Module) ->
Params = params_name(Module),
Counts = counts_name(Module),
ManageParams = manage_params_name(Module),
ManageCounts = manage_counts_name(Module),
Counters = [{input,0}, {filter,0}, {output,0}],
_ = supervisor:start_child(gr_param_sup,
{Params, {gr_param, start_link, [Params]},
transient, brutal_kill, worker, [Params]}),
_ = supervisor:start_child(gr_counter_sup,
{Counts, {gr_counter, start_link, [Counts]},
transient, brutal_kill, worker, [Counts]}),
_ = supervisor:start_child(gr_manager_sup,
{ManageParams, {gr_manager, start_link, [ManageParams, Params, []]},
transient, brutal_kill, worker, [ManageParams]}),
_ = supervisor:start_child(gr_manager_sup, {ManageCounts,
{gr_manager, start_link, [ManageCounts, Counts, Counters]},
transient, brutal_kill, worker, [ManageCounts]}),
[{params,Params}, {counters, Counts}].
reg_name(Module, Name) ->
list_to_atom("gr_" ++ atom_to_list(Module) ++ Name).
params_name(Module) -> reg_name(Module, "_params").
counts_name(Module) -> reg_name(Module, "_counters").
manage_params_name(Module) -> reg_name(Module, "_params_mgr").
manage_counts_name(Module) -> reg_name(Module, "_counters_mgr").
%% @todo Move comment.
%% @private Map a query to a simplified query tree term.
%%
%% The simplified query tree is used to combine multiple queries into one
%% query module. The goal of this is to reduce the filtering and dispatch
%% overhead when multiple concurrent queries are executed.
%%
%% A fixed selection condition may be used to specify a property that an event
%% must have in order to be considered part of the input stream for a query.
%%
%% For the sake of simplicity it is only possible to define selection
%% conditions using the fields present in the context and identifiers
%% of an event. The fields in the context are bound to the reserved
%% names:
%%
%% - '$n': node name
%% - '$a': application name
%% - '$p': process identifier
%% - '$t': timestamp
%%
%%
%% If an event must be selected based on the runtime state of an event handler
%% this must be done in the body of the handler.
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
setup_query(Module, Query) ->
setup_query(Module, Query, undefined).
setup_query(Module, Query, Store) ->
?assertNot(erlang:module_loaded(Module)),
?assertEqual({ok, Module}, case (catch compile(Module, Query, Store)) of
{'EXIT',_}=Error -> ?debugFmt("~p", [Error]), Error; Else -> Else end),
?assert(erlang:function_exported(Module, table, 1)),
?assert(erlang:function_exported(Module, handle, 1)),
{compiled, Module}.
events_test_() ->
{foreach,
fun() ->
error_logger:tty(false),
application:start(syntax_tools),
application:start(compiler),
application:start(goldrush)
end,
fun(_) ->
application:stop(goldrush),
application:stop(compiler),
application:stop(syntax_tools),
error_logger:tty(true)
end,
[
{"null query compiles",
fun() ->
{compiled, Mod} = setup_query(testmod1, gr_lc:null(false)),
?assertError(badarg, Mod:table(noexists))
end
},
{"params table exists",
fun() ->
{compiled, Mod} = setup_query(testmod2, gr_lc:null(false)),
?assert(is_atom(Mod:table(params))),
?assertMatch([_|_], gr_param:info(Mod:table(params)))
end
},
{"null query exists",
fun() ->
{compiled, Mod} = setup_query(testmod3, gr_lc:null(false)),
?assert(erlang:function_exported(Mod, info, 1)),
?assertError(badarg, Mod:info(invalid)),
?assertEqual({null, false}, Mod:info('query'))
end
},
{"init counters test",
fun() ->
{compiled, Mod} = setup_query(testmod4, gr_lc:null(false)),
?assertEqual(0, Mod:info(input)),
?assertEqual(0, Mod:info(filter)),
?assertEqual(0, Mod:info(output))
end
},
{"filtered events test",
fun() ->
%% If no selection condition is specified no inputs can match.
{compiled, Mod} = setup_query(testmod5, gr_lc:null(false)),
gr_lc:handle(Mod, gr_e:make([], [list])),
?assertEqual(1, Mod:info(input)),
?assertEqual(1, Mod:info(filter)),
?assertEqual(0, Mod:info(output))
end
},
{"nomatch event test",
fun() ->
%% If a selection condition but no body is specified the event
%% is expected to count as filtered out if the condition does
%% not hold.
{compiled, Mod} = setup_query(testmod6, gr_lc:eq('$n', 'noexists@nohost')),
gr_lc:handle(Mod, gr_e:make([{'$n', 'noexists2@nohost'}], [list])),
?assertEqual(1, Mod:info(input)),
?assertEqual(1, Mod:info(filter)),
?assertEqual(0, Mod:info(output))
end
},
{"opfilter equal test",
fun() ->
%% If a selection condition but no body is specified the event
%% counts as input to the query, but not as filtered out.
{compiled, Mod} = setup_query(testmod7, gr_lc:eq('$n', 'noexists@nohost')),
gr_lc:handle(Mod, gr_e:make([{'$n', 'noexists@nohost'}], [list])),
?assertEqual(1, Mod:info(input)),
?assertEqual(0, Mod:info(filter)),
?assertEqual(1, Mod:info(output))
end
},
{"opfilter wildcard test",
fun() ->
{compiled, Mod} = setup_query(testmod8, gr_lc:wc(a)),
gr_lc:handle(Mod, gr_e:make([{b, 2}], [list])),
?assertEqual(1, Mod:info(input)),
?assertEqual(1, Mod:info(filter)),
?assertEqual(0, Mod:info(output)),
gr_lc:handle(Mod, gr_e:make([{a, 2}], [list])),
?assertEqual(2, Mod:info(input)),
?assertEqual(1, Mod:info(filter)),
?assertEqual(1, Mod:info(output))
end
},
{"opfilter notfound test",
fun() ->
{compiled, Mod} = setup_query(testmod9, gr_lc:nf(a)),
gr_lc:handle(Mod, gr_e:make([{a, 2}], [list])),
?assertEqual(1, Mod:info(input)),
?assertEqual(1, Mod:info(filter)),
?assertEqual(0, Mod:info(output)),
gr_lc:handle(Mod, gr_e:make([{b, 2}], [list])),
?assertEqual(2, Mod:info(input)),
?assertEqual(1, Mod:info(filter)),
?assertEqual(1, Mod:info(output))
end
},
{"opfilter greater than test",
fun() ->
{compiled, Mod} = setup_query(testmod10a, gr_lc:gt(a, 1)),
gr_lc:handle(Mod, gr_e:make([{'a', 2}], [list])),
?assertEqual(1, Mod:info(input)),
?assertEqual(0, Mod:info(filter)),
gr_lc:handle(Mod, gr_e:make([{'a', 0}], [list])),
?assertEqual(2, Mod:info(input)),
?assertEqual(1, Mod:info(filter)),
?assertEqual(1, Mod:info(output))
end
},
{"opfilter greater than or equal to test",
fun() ->
{compiled, Mod} = setup_query(testmod10b, gr_lc:gte(a, 1)),
gr_lc:handle(Mod, gr_e:make([{'a', 2}], [list])),
?assertEqual(1, Mod:info(input)),
?assertEqual(0, Mod:info(filter)),
gr_lc:handle(Mod, gr_e:make([{'a', 1}], [list])),
?assertEqual(2, Mod:info(input)),
?assertEqual(0, Mod:info(filter)),
gr_lc:handle(Mod, gr_e:make([{'a', 0}], [list])),
?assertEqual(3, Mod:info(input)),
?assertEqual(1, Mod:info(filter)),
?assertEqual(2, Mod:info(output))
end
},
{"opfilter less than test",
fun() ->
{compiled, Mod} = setup_query(testmod11a, gr_lc:lt(a, 1)),
gr_lc:handle(Mod, gr_e:make([{'a', 0}], [list])),
?assertEqual(1, Mod:info(input)),
?assertEqual(0, Mod:info(filter)),
?assertEqual(1, Mod:info(output)),
gr_lc:handle(Mod, gr_e:make([{'a', 2}], [list])),
?assertEqual(2, Mod:info(input)),
?assertEqual(1, Mod:info(filter)),
?assertEqual(1, Mod:info(output))
end
},
{"opfilter less than or equal to test",
fun() ->
{compiled, Mod} = setup_query(testmod11b, gr_lc:lte(a, 1)),
gr_lc:handle(Mod, gr_e:make([{'a', 0}], [list])),
?assertEqual(1, Mod:info(input)),
?assertEqual(0, Mod:info(filter)),
?assertEqual(1, Mod:info(output)),
gr_lc:handle(Mod, gr_e:make([{'a', 1}], [list])),
?assertEqual(2, Mod:info(input)),
?assertEqual(0, Mod:info(filter)),
?assertEqual(2, Mod:info(output)),
gr_lc:handle(Mod, gr_e:make([{'a', 2}], [list])),
?assertEqual(3, Mod:info(input)),
?assertEqual(1, Mod:info(filter)),
?assertEqual(2, Mod:info(output))
end
},
{"allholds op test",
fun() ->
{compiled, Mod} = setup_query(testmod12,
gr_lc:all([gr_lc:eq(a, 1), gr_lc:eq(b, 2)])),
gr_lc:handle(Mod, gr_e:make([{'a', 1}], [list])),
gr_lc:handle(Mod, gr_e:make([{'a', 2}], [list])),
?assertEqual(2, Mod:info(input)),
?assertEqual(2, Mod:info(filter)),
gr_lc:handle(Mod, gr_e:make([{'b', 1}], [list])),
gr_lc:handle(Mod, gr_e:make([{'b', 2}], [list])),
?assertEqual(4, Mod:info(input)),
?assertEqual(4, Mod:info(filter)),
gr_lc:handle(Mod, gr_e:make([{'a', 1},{'b', 2}], [list])),
?assertEqual(5, Mod:info(input)),
?assertEqual(4, Mod:info(filter)),
?assertEqual(1, Mod:info(output))
end
},
{"anyholds op test",
fun() ->
{compiled, Mod} = setup_query(testmod13,
gr_lc:any([gr_lc:eq(a, 1), gr_lc:eq(b, 2)])),
gr_lc:handle(Mod, gr_e:make([{'a', 2}], [list])),
gr_lc:handle(Mod, gr_e:make([{'b', 1}], [list])),
?assertEqual(2, Mod:info(input)),
?assertEqual(2, Mod:info(filter)),
gr_lc:handle(Mod, gr_e:make([{'a', 1}], [list])),
gr_lc:handle(Mod, gr_e:make([{'b', 2}], [list])),
?assertEqual(4, Mod:info(input)),
?assertEqual(2, Mod:info(filter))
end
},
{"with function test",
fun() ->
Self = self(),
{compiled, Mod} = setup_query(testmod14,
gr_lc:with(gr_lc:eq(a, 1), fun(Event) -> Self ! gr_e:fetch(a, Event) end)),
gr_lc:handle(Mod, gr_e:make([{a,1}], [list])),
?assertEqual(1, Mod:info(output)),
?assertEqual(1, receive Msg -> Msg after 0 -> notcalled end)
end
},
{"with function storage test",
fun() ->
Self = self(),
Store = [{stored, value}],
{compiled, Mod} = setup_query(testmod15,
gr_lc:with(gr_lc:eq(a, 1), fun(Event, EStore) ->
Self ! {gr_e:fetch(a, Event), EStore} end),
Store),
gr_lc:handle(Mod, gr_e:make([{a,1}], [list])),
?assertEqual(1, Mod:info(output)),
?assertEqual(1, receive {Msg, Store} -> Msg after 0 -> notcalled end)
end
},
{"delete test",
fun() ->
{compiled, Mod} = setup_query(testmod16, gr_lc:null(false)),
?assert(is_atom(Mod:table(params))),
?assertMatch([_|_], gr_param:info(Mod:table(params))),
?assert(is_list(code:which(Mod))),
?assert(is_pid(whereis(params_name(Mod)))),
?assert(is_pid(whereis(counts_name(Mod)))),
?assert(is_pid(whereis(manage_params_name(Mod)))),
?assert(is_pid(whereis(manage_counts_name(Mod)))),
gr_lc:delete(Mod),
?assertEqual(non_existing, code:which(Mod)),
?assertEqual(undefined, whereis(params_name(Mod))),
?assertEqual(undefined, whereis(counts_name(Mod))),
?assertEqual(undefined, whereis(manage_params_name(Mod))),
?assertEqual(undefined, whereis(manage_counts_name(Mod)))
end
},
{"reset counters test",
fun() ->
{compiled, Mod} = setup_query(testmod17,
gr_lc:any([gr_lc:eq(a, 1), gr_lc:eq(b, 2)])),
gr_lc:handle(Mod, gr_e:make([{'a', 2}], [list])),
gr_lc:handle(Mod, gr_e:make([{'b', 1}], [list])),
?assertEqual(2, Mod:info(input)),
?assertEqual(2, Mod:info(filter)),
gr_lc:handle(Mod, gr_e:make([{'a', 1}], [list])),
gr_lc:handle(Mod, gr_e:make([{'b', 2}], [list])),
?assertEqual(4, Mod:info(input)),
?assertEqual(2, Mod:info(filter)),
?assertEqual(2, Mod:info(output)),
gr_lc:reset_counters(Mod, input),
?assertEqual(0, Mod:info(input)),
?assertEqual(2, Mod:info(filter)),
?assertEqual(2, Mod:info(output)),
gr_lc:reset_counters(Mod, filter),
?assertEqual(0, Mod:info(input)),
?assertEqual(0, Mod:info(filter)),
?assertEqual(2, Mod:info(output)),
gr_lc:reset_counters(Mod),
?assertEqual(0, Mod:info(input)),
?assertEqual(0, Mod:info(filter)),
?assertEqual(0, Mod:info(output))
end
},
{"ets data recovery test",
fun() ->
Self = self(),
{compiled, Mod} = setup_query(testmod18,
gr_lc:with(gr_lc:eq(a, 1), fun(Event) -> Self ! gr_e:fetch(a, Event) end)),
gr_lc:handle(Mod, gr_e:make([{a,1}], [list])),
?assertEqual(1, Mod:info(output)),
?assertEqual(1, receive Msg -> Msg after 0 -> notcalled end),
?assertEqual(1, length(gr_param:list(Mod:table(params)))),
?assertEqual(3, length(gr_param:list(Mod:table(counters)))),
true = exit(whereis(Mod:table(params)), kill),
true = exit(whereis(Mod:table(counters)), kill),
?assertEqual(1, Mod:info(input)),
gr_lc:handle(Mod, gr_e:make([{'a', 1}], [list])),
?assertEqual(2, Mod:info(input)),
?assertEqual(2, Mod:info(output)),
?assertEqual(1, length(gr_param:list(Mod:table(params)))),
?assertEqual(3, length(gr_counter:list(Mod:table(counters))))
end
},
{"variable storage test",
fun() ->
{compiled, Mod} = setup_query(testmod19,
gr_lc:eq(a, 2), [{stream, time}]),
gr_lc:handle(Mod, gr_e:make([{'a', 2}], [list])),
gr_lc:handle(Mod, gr_e:make([{'b', 1}], [list])),
?assertEqual(2, Mod:info(input)),
?assertEqual(1, Mod:info(filter)),
gr_lc:handle(Mod, gr_e:make([{'b', 2}], [list])),
?assertEqual(3, Mod:info(input)),
?assertEqual(2, Mod:info(filter)),
?assertEqual({ok, time}, gr_lc:get(Mod, stream)),
?assertEqual({error, undefined}, gr_lc:get(Mod, beam))
end
},
{"with multi function any test",
fun() ->
Self = self(),
Store = [{stored, value}],
G1 = gr_lc:with(gr_lc:eq(a, 1), fun(_Event, EStore) ->
Self ! {a, EStore} end),
G2 = gr_lc:with(gr_lc:eq(b, 2), fun(_Event, EStore) ->
Self ! {b, EStore} end),
{compiled, Mod} = setup_query(testmod20, any([G1, G2]),
Store),
gr_lc:handle(Mod, gr_e:make([{a,1}], [list])),
?assertEqual(1, Mod:info(output)),
?assertEqual(a, receive {Msg, _Store} -> Msg after 0 -> notcalled end),
?assertEqual(b, receive {Msg, _Store} -> Msg after 0 -> notcalled end)
end
},
{"with multi function all test",
fun() ->
Self = self(),
Store = [{stored, value}],
G1 = gr_lc:with(gr_lc:eq(a, 1), fun(_Event, EStore) ->
Self ! {a, EStore} end),
G2 = gr_lc:with(gr_lc:eq(b, 2), fun(_Event, EStore) ->
Self ! {b, EStore} end),
G3 = gr_lc:with(gr_lc:eq(c, 3), fun(_Event, EStore) ->
Self ! {c, EStore} end),
{compiled, Mod} = setup_query(testmod21, all([G1, G2, G3]),
Store),
gr_lc:handle(Mod, gr_e:make([{a,1}], [list])),
?assertEqual(0, Mod:info(output)),
?assertEqual(1, Mod:info(filter)),
gr_lc:handle(Mod, gr_e:make([{a,1}, {b, 2}], [list])),
?assertEqual(0, Mod:info(output)),
?assertEqual(2, Mod:info(filter)),
gr_lc:handle(Mod, gr_e:make([{a,1}, {b, 2}, {c, 3}], [list])),
?assertEqual(1, Mod:info(output)),
?assertEqual(a, receive {Msg, _Store} -> Msg after 0 -> notcalled end),
?assertEqual(b, receive {Msg, _Store} -> Msg after 0 -> notcalled end),
?assertEqual(c, receive {Msg, _Store} -> Msg after 0 -> notcalled end)
end
},
{"with multi-function output match test",
fun() ->
Self = self(),
Store = [{stored, value}],
{compiled, Mod} = setup_query(testmod22,
[gr_lc:with(gr_lc:eq(a, 1), fun(Event, _EStore) ->
Self ! {a, gr_e:fetch(a, Event)} end),
gr_lc:with(gr_lc:gt(b, 1), fun(Event, _EStore) ->
Self ! {b, gr_e:fetch(b, Event)} end)],
Store),
gr_lc:handle(Mod, gr_e:make([{a,1}, {b, 1}], [list])),
?assertEqual(1, Mod:info(output)),
?assertEqual(a, receive {a=Msg, _Store} -> Msg after 0 -> notcalled end)
end
},
{"with multi-function output double-match test",
fun() ->
Self = self(),
Store = [{stored, value}],
{compiled, Mod} = setup_query(testmod23,
[gr_lc:with(gr_lc:eq(a, 1), fun(Event, _EStore) ->
Self ! {a, gr_e:fetch(a, Event)} end),
gr_lc:with(gr_lc:eq(b, 1), fun(Event, _EStore) ->
Self ! {b, gr_e:fetch(b, Event)} end)],
Store),
gr_lc:handle(Mod, gr_e:make([{a,1}, {b, 1}], [list])),
?assertEqual(2, Mod:info(output)),
?assertEqual(a, receive {a=Msg, _Store} -> Msg after 0 -> notcalled end),
?assertEqual(b, receive {b=Msg, _Store} -> Msg after 0 -> notcalled end)
end
},
{"with multi function complex match test",
fun() ->
Self = self(),
Store = [{stored, value}],
G1 = gr_lc:with(gr_lc:gt(r, 0.1), fun(_Event, EStore) ->
Self ! {a, EStore} end),
G2 = gr_lc:with(gr_lc:all([gr_lc:eq(a, 1), gr_lc:gt(r, 0.5)]), fun(_Event, EStore) ->
Self ! {b, EStore} end),
G3 = gr_lc:with(gr_lc:all([gr_lc:eq(a, 1), gr_lc:eq(b, 2), gr_lc:gt(r, 0.6)]), fun(_Event, EStore) ->
Self ! {c, EStore} end),
{compiled, Mod} = setup_query(testmod24, [G1, G2, G3],
Store),
gr_lc:handle(Mod, gr_e:make([{a,1}, {r, 0.7}, {b, 3}], [list])),
?assertEqual(2, Mod:info(output)),
?assertEqual(1, Mod:info(input)),
?assertEqual(1, Mod:info(filter)),
?assertEqual(b, receive {Msg, _Store} -> Msg after 0 -> notcalled end),
?assertEqual(a, receive {Msg, _Store} -> Msg after 0 -> notcalled end),
%
gr_lc:handle(Mod, gr_e:make([{a,1}, {r, 0.6}], [list])),
?assertEqual(4, Mod:info(output)),
?assertEqual(2, Mod:info(input)),
?assertEqual(2, Mod:info(filter)),
?assertEqual(b, receive {Msg, _Store} -> Msg after 0 -> notcalled end),
?assertEqual(a, receive {Msg, _Store} -> Msg after 0 -> notcalled end),
%
gr_lc:handle(Mod, gr_e:make([{a,2}, {r, 0.7}, {b, 3}], [list])),
?assertEqual(5, Mod:info(output)),
?assertEqual(3, Mod:info(input)),
?assertEqual(4, Mod:info(filter)),
?assertEqual(a, receive {Msg, _Store} -> Msg after 0 -> notcalled end),
gr_lc:handle(Mod, gr_e:make([{a,1}, {r, 0.7}, {b, 2}], [list])),
?assertEqual(8, Mod:info(output)),
?assertEqual(4, Mod:info(input)),
?assertEqual(4, Mod:info(filter)),
?assertEqual(c, receive {Msg, _Store} -> Msg after 0 -> notcalled end),
?assertEqual(b, receive {Msg, _Store} -> Msg after 0 -> notcalled end),
?assertEqual(a, receive {Msg, _Store} -> Msg after 0 -> notcalled end)
end
}
]
}.
union_error_test() ->
?assertError(badarg, gr_lc:union([gr_lc:eq(a, 1)])),
done.
-endif. | src/gr_lc.erl | 0.724091 | 0.460895 | gr_lc.erl | starcoder |
%%%===================================================================
%% @author <NAME>
%% @copyright 2017 Pundun Labs AB
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
%% implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%% -------------------------------------------------------------------
%% @doc
%% Module Description:
%% @end
%%%===================================================================
-module(wikix_bench).
-export([test/1]).
%% Spawned functions
-export([server/3,
client/3,
fire_stop/1]).
-define(SPAWNS, 8).
-define(CHUNK, 5000).
-define(DURATION, 60000). %%1 Min
%%%===================================================================
%%% API functions
%%%===================================================================
test(Options) when is_map(Options) ->
test_load(Options).
-spec test_load(Options :: map()) ->
ok.
test_load(Options) ->
Connection = maps:get(connection, Options, 'pundun97ae64@sitting'),
DataModel = maps:get(data_model, Options, kv),
N = maps:get(clients, Options, 8),
Duration = maps:get(duration, Options, ?DURATION),
io:format("~p:~p(~p).~n", [?MODULE, ?FUNCTION_NAME, Connection]),
{ok, Session} = connect(Connection),
rpc(Session, delete_table, ["kv"]),
rpc(Session, delete_table, ["array"]),
rpc(Session, delete_table, ["map"]),
{ok, Tab} = create_table(Session, DataModel),
ok = disconnect(Session),
Report = #{successful_writes => 0,
unsuccessful_writes => 0,
successful_reads => 0,
unsuccessful_reads => 0},
SPid = spawn(?MODULE, server, [Duration, N, Report]),
_MonitorRef = erlang:monitor(process, SPid),
register(my_server, SPid),
[spawn(?MODULE, client, [SPid, Connection, Tab] ) || _ <- lists:seq(1,N)],
{ok, SPid}.
%%%===================================================================
%%% Internal functions
%%%===================================================================
create_table(Session, ATab) ->
Tab = atom_to_list(ATab),
TableOptions = [{type, rocksdb},
{num_of_shards, 1},
{distributed, false},
{hashing_method, uniform},
{data_model, ATab}],
io:format("[~p:~p] creating with options: ~p~n",[?MODULE, ?LINE, TableOptions]),
ok = rpc(Session, create_table, [Tab, ["title"], TableOptions]),
{ok, Tab}.
connect(Node) when is_atom(Node) ->
{ok, Node};
connect(Pid) when is_pid(Pid) ->
{ok, Pid};
connect({Host, Port, User, Pass}) ->
pbpc:connect(Host, Port, User, Pass).
disconnect(Node) when is_atom(Node)->
ok;
disconnect(Session) when is_pid(Session) ->
pbpc:disconnect(Session).
fire_stop(Pid) ->
Pid ! stop.
server(Duration, N, Report) ->
Start = os:timestamp(),
timer:apply_after(Duration, ?MODULE, fire_stop, [self()]),
io:format("[~p:~p] Starting at: ~p~n",[?MODULE, ?LINE, calendar:now_to_local_time(Start)]),
server(run, Start, N, Report).
server(_, StartTs, 0, Report) ->
report(StartTs, os:timestamp(), Report);
server(Run, StartTs, N, Report) ->
receive
stop ->
server(stop, StartTs, N, Report);
{write_result, ok} ->
SW = maps:get(successful_writes, Report),
server(Run, StartTs, N, Report#{successful_writes => SW + ?CHUNK});
{write_result, {error, _}} ->
UW = maps:get(unsuccessful_writes, Report),
server(Run, StartTs, N, Report#{unsuccessful_writes => UW + ?CHUNK});
{read_result, ok} ->
SR = maps:get(successful_reads, Report),
server(Run, StartTs, N, Report#{successful_reads => SR + ?CHUNK});
{read_result, {error, _}} ->
UR = maps:get(unsuccessful_reads, Report),
server(Run, StartTs, N, Report#{unsuccessful_reads => UR + ?CHUNK});
{client, Pid, register} ->
erlang:monitor(process, Pid),
server(Run, StartTs, N, Report);
{client, Pid, more} ->
Pid ! {server, Run},
server(Run, StartTs, N, Report);
{'DOWN', _MonitorRef, process, _Object, _Info} ->
server(Run, StartTs, N-1, Report)
end.
report(Start, Stop, #{successful_writes := SW,
unsuccessful_writes := UW,
successful_reads := SR,
unsuccessful_reads := UR}) ->
Time = timer:now_diff(Stop, Start)/1000/1000,
io:format("[~p:~p] It took ~.2f seconds.~n",[?MODULE, ?LINE, Time]),
io:format("[~p:~p] Successful Writes: ~p Failed Writes: ~p ",[?MODULE, ?LINE, SW, UW]),
io:format("Successful Reads: ~p Failed Reads: ~p~n",[SR, UR]),
SWR = trunc(SW / Time),
UWR = trunc(UW / Time),
TWR = trunc((SW+UW) / Time),
SRR = trunc(SR / Time),
URR = trunc(UR / Time),
TRR = trunc((SR+UR) / Time),
io:format("[~p:~p] Success write rate: ~p/s Fail write rate: ~p/s ",[?MODULE, ?LINE, SWR, UWR]),
io:format("Success read rate: ~p/s Fail read rate: ~p/s~n",[SRR, URR]),
io:format("[~p:~p] Total write rate: ~p/s Total read rate: ~p/s~n",[?MODULE, ?LINE, TWR, TRR]),
io:format("[~p:~p] Server stopping..~n",[?MODULE, ?LINE]).
client(SPid, Connection, Tab) ->
SPid ! {client, self(), register},
{ok, Session} = connect(Connection),
client_loop(SPid, Session, Tab).
client_loop(SPid, Session, Tab) ->
%%Start = os:timestamp(),
SPid ! {client, self(), more},
receive
{server, run} ->
ok = load(SPid, Session, Tab),
%%Stop = os:timestamp(),
%%Time = timer:now_diff(Stop, Start)/1000/1000,
%io:format("It took ~.2f seconds to load ~p entries.~n", [Time, ?CHUNK]),
client_loop(SPid, Session, Tab);
{server, stop} ->
ok = disconnect(Session)
end.
load(SPid, Session, Tab) ->
Data =
case Tab of
"kv" ->
[{data(integer),{data(binary),data(map),data(string)}}];
_ ->
[{"field1", data(integer)},
{"field2", data(binary)},
{"field3", data(map)},
{"field4", data(string)}]
end,
Terms = [[Tab, [{"title", make_key(N)}], Data] || N <- lists:seq(1, ?CHUNK)],
write_terms(SPid, Session, Terms),
read_terms(SPid, Session, Terms).
write_terms(SPid, Session, Terms) ->
Res = [rpc(Session, write, Args) || Args <- Terms],
SPid ! {write_result, get_res(lists:usort(Res))},
ok.
read_terms(SPid, Session, Terms) ->
Res = [element(1,rpc(Session, read, [T, K])) || [T, K, _] <- Terms],
SPid ! {read_result, get_res(lists:usort(Res))},
ok.
get_res([ok]) ->
ok;
get_res(_) ->
{error, failed}.
rpc(Node, Fun, Args) when is_atom(Node)->
rpc:call(Node, enterdb, Fun, Args);
rpc(Session, Fun, Args) ->
apply(pbpc, Fun, [Session | Args]).
make_key(N) ->
#{key=>erlang:unique_integer([positive,monotonic]) * N}.
data(map)->
#{procedure => {read_range,#{end_key => [#{name => "map",value => #{type => {map,#{values => #{"a" => #{type => {int,1}},"b" => #{type => {int,1}},"c" => #{type => {int,1}}}}}}},#{name => "id",value => #{type => {string,"same"}}}],limit => 2,start_key => [#{name => "map",value => #{type => {map,#{values => #{"a" => #{type => {int,2}},"b" => #{type => {int,2}},"c" => #{type => {int,2}}}}}}},#{name => "id",value => #{type => {string,"same"}}}],table_name => "pundunpy_test_table"}},transaction_id => 7,version => #{major => 0,minor => 1}};
data(binary) ->
<<10,2,16,1,16,7,34,175,1,34,172,1,10,82,10,12,10,2,105,100,18,6,50,4,115,97,109,101,10,36,10,3,109,97,112,18,29,66,27,10,7,10,1,97,18,2,16,2,10,7,10,1,98,18,2,16,2,10,7,10,1,99,18,2,16,2,18,13,10,6,110,117,109,98,101,114,18,3,50,1,50,18,13,10,4,116,101,120,116,18,5,50,3,84,119,111,10,82,10,12,10,2,105,100,18,6,50,4,115,97,109,101,10,36,10,3,109,97,112,18,29,66,27,10,7,10,1,97,18,2,16,1,10,7,10,1,98,18,2,16,1,10,7,10,1,99,18,2,16,1,18,13,10,6,110,117,109,98,101,114,18,3,50,1,49,18,13,10,4,116,101,120,116,18,5,50,3,79,110,101,18,2,8,1>>;
data(integer) ->
99892;
data(string) ->
"The protocol buffer compiler produces Python output when invoked with the --python_out= command-line flag. The parameter to the --python_out= option is the directory where you want the compiler to write your Python output. The compiler creates a .py file for each .proto file input. The names of the output files are computed by taking the name of the .proto file and making two changes". | apps/wikix/src/wikix_bench.erl | 0.565299 | 0.407628 | wikix_bench.erl | starcoder |
% inspired from 'https://github.com/robertoaloi/eunit_terms/blob/master/eunit_terms.erl'
-module(eunit_jsonreport).
-behaviour(eunit_listener).
-include_lib("eunit/include/eunit.hrl").
-export([start/0, start/1]).
-export([init/1,
handle_begin/3,
handle_end/3,
handle_cancel/3,
terminate/2]).
-export_record_info([testsuite, testcase]).
%% ============================================================================
%% TYPES
%% ============================================================================
-type(chars() :: [char() | any()]). % chars()
%% ============================================================================
%% RECORDS
%% ============================================================================
-record(testcase, {
displayname :: chars(),
description :: chars(),
module :: chars(),
function :: char(),
arity :: integer(),
line :: integer(),
result :: ok | {failed, tuple()} | {aborted, tuple()} | {skipped, tuple()},
time :: integer(),
output :: binary()
}).
-record(testsuite, {
name = <<>> :: binary(),
time = 0 :: integer(),
output = <<>> :: binary(),
succeeded = 0 :: integer(),
failed = 0 :: integer(),
aborted = 0 :: integer(),
skipped = 0 :: integer(),
testcases = [] :: [#testcase{}]
}).
-record(state, {
verbose = false,
dir = ".",
testsuite = #testsuite{}
}).
start() ->
start([]).
start(Options) ->
eunit_listener:start(?MODULE, Options).
init(Options) ->
Dir = proplists:get_value(dir, Options, "."),
SuiteName = proplists:get_value(testsuitename, Options, "testsuite"),
State = #state{verbose = proplists:get_bool(verbose, Options),
dir = Dir,
testsuite = #testsuite{name=SuiteName}},
receive
{start, _Reference} ->
State
end.
-define(record_to_tuplelist(Rec, Ref), lists:zip(record_info(fields, Rec),tl(tuple_to_list(Ref)))).
%because lists:join is recently added to erlang (doesn't exists in 6.4), I pick up the code source
lists_join(_Sep, []) -> [];
lists_join(Sep, [H|T]) -> [H|join_prepend(Sep, T)].
join_prepend(_Sep, []) -> [];
join_prepend(Sep, [H|T]) -> [Sep,H|join_prepend(Sep,T)].
tuples_to_json(L) ->
"{" ++
lists_join(",", lists:map(fun(X)-> tuple_to_json(X) end, L)) ++
"}".
tuple_to_json({Name, Value}) ->
"\""++io_lib:print(Name)++"\":" ++ to_json(Value);
tuple_to_json(X) ->
"\"xxx" ++ io_lib:print(X) ++ "xx\"".
stack_to_json({M, F, A, [{file, FileName}, {line, Line}]}) ->
tuples_to_json([{module, M}, {function, F}, {arity, A}, {file, FileName}, {line, Line}]).
record_to_json(R) ->
["{"] ++ lists_join(",", lists:map(fun(X)-> tuple_to_json(X) end, R)) ++ ["}"].
location_to_json(L) ->
NewL = lists:map(
fun(X) ->
case X of
{value, V} -> {value, "\"" ++ format_string(io_lib:print(V)) ++ "\""};
{expected, E} -> {expected, "\"" ++ format_string(io_lib:print(E)) ++"\"" };
Y -> Y
end
end, L),
tuples_to_json(NewL).
to_json(TS) when is_record(TS, testsuite) ->
Json = record_to_json(?record_to_tuplelist(testsuite, TS)),
%io:format(Json),
Json;
to_json([H|_T]) when is_record(H, testcase) ->
"[" ++ to_json(H) ++
"]";
to_json(TC) when is_record(TC, testcase) ->
L = record_to_json(?record_to_tuplelist(testcase, TC)),
L;
to_json({aborted, {error, Err, StackList}}) ->
"{\"aborted\": {\"error\" : \"" ++ io_lib:print(Err) ++ "\"," ++
"\"stacktrace\" : " ++ ["["] ++ lists_join(",", lists:map(fun(X)-> stack_to_json(X) end, StackList)) ++ ["]"]
++"}}";
to_json({error, {AssertName, AssertStack}, StackList}) ->
"{\"assertion\" :\""++ atom_to_list(AssertName) ++"\", \"location\" :" ++ location_to_json(AssertStack) ++ "," ++
"\"stacktrace\" : " ++ ["["] ++ lists_join(",", lists:map(fun(X)-> stack_to_json(X) end, StackList)) ++ ["]"]
++"}";
to_json(V) when is_bitstring(V) ->
io_lib:print(format_string(binary_to_list(V)));
to_json(V) when is_binary(V)->
%"\"binary\"";
binary_to_json(V);
to_json(V) when is_atom(V) ->
io_lib:print(atom_to_list(V));
to_json(V) when is_tuple(V) ->
%"\"tuple\"";
["{"] ++ tuple_to_json(V) ++ ["}"];
to_json(V) ->
case io_lib:printable_list(V) of
true -> io_lib:print(format_string(V));
false ->
case is_binary(V) of
true -> "\"" ++ io_lib:print(binary_to_list(V)) ++ "\"";
%false - to_json(V)
false -> "\"" ++ format_string(io_lib:print(V)) ++ "\""
%false -> "\"printable_binary\""
end
end.
binary_to_json(V) ->
NewV=binary_to_term(V),
%"\"" ++ format_string(io_lib:print(V)) ++ "\"".
to_json(NewV).
format_string(S) ->
S1 = re:replace(S, "\n", "\\\\n", [global, {return, list}]),
re:replace(S1, "\"", "'", [global, {return, list}]).
terminate({ok, _Data}, #state{testsuite = TS} = State) ->
Dir = State#state.dir,
JSon = to_json(TS),
file:write_file(filename:join([Dir, "testsuite_results.json"]), JSon),
ok;
terminate({error, Reason}, _St) ->
io:fwrite("Internal error: ~P.\n", [Reason, 25]),
sync_end(error).
sync_end(Result) ->
receive
{stop, Reference, ReplyTo} ->
ReplyTo ! {result, Reference, Result},
ok
end.
handle_begin(group, Data, State) ->
NewId = proplists:get_value(id, Data),
case NewId of
[] ->
State;
[_GroupId] ->
Desc = proplists:get_value(desc, Data),
TestSuite = State#state.testsuite,
NewTestSuite = TestSuite#testsuite{name = Desc},
State#state{testsuite=NewTestSuite};
%% Surefire format is not hierarchic: Ignore subgroups:
_ ->
State
end;
handle_begin(test, _Data, State) ->
State.
handle_end(group, Data, St) ->
%% Retrieve existing test suite:
case proplists:get_value(id, Data) of
[] ->
St;
[_GroupId|_] ->
TestSuite = St#state.testsuite,
%% Update TestSuite data:
Time = proplists:get_value(time, Data),
Output = proplists:get_value(output, Data),
NewTestSuite = TestSuite#testsuite{ time = Time, output = Output },
St#state{testsuite=NewTestSuite}
end;
handle_end(test, Data, State) ->
%% Retrieve existing test suite:
TestSuite = State#state.testsuite,
%io:format(io_lib:print(Data)),
%% Create test case:
{Module, Function, Arity} =proplists:get_value(source, Data),
Line = proplists:get_value(line, Data),
Name = lists:flatten(io_lib:format("~p:~p/~p(~p)",[Module, Function, Arity, Line])),
Desc = format_desc(proplists:get_value(desc, Data)),
Result = proplists:get_value(status, Data),
Time = proplists:get_value(time, Data),
Output = proplists:get_value(output, Data),
TestCase = #testcase{displayname = Name,
module=Module, function=Function, arity=Arity, line=Line,
description = Desc,
time = Time,output = Output},
NewTestSuite = add_testcase_to_testsuite(Result, TestCase, TestSuite),
State#state{testsuite=NewTestSuite}.
%% Cancel group does not give information on the individual cancelled test case
%% We ignore this event
handle_cancel(group, _Data, State) ->
State;
handle_cancel(test, Data, State) ->
%% Retrieve existing test suite:
TestSuite = State#state.testsuite,
%% Create test case:
Name = format_name(proplists:get_value(source, Data),
proplists:get_value(line, Data)),
Desc = format_desc(proplists:get_value(desc, Data)),
Reason = proplists:get_value(reason, Data),
TestCase = #testcase{
displayname = Name, description = Desc,
result = {skipped, Reason}, time = 0,
output = <<>>},
NewTestSuite = TestSuite#testsuite{
skipped = TestSuite#testsuite.skipped+1,
testcases=[TestCase|TestSuite#testsuite.testcases] },
State#state{testsuite=NewTestSuite}.
format_name({Module, Function, Arity}, Line) ->
{Module, Function, Arity, Line}.
format_desc(undefined) ->
"";
format_desc(Desc) when is_binary(Desc) ->
binary_to_list(Desc);
format_desc(Desc) when is_list(Desc) ->
Desc.
%% Add testcase to testsuite depending on the result of the test.
add_testcase_to_testsuite(ok, TestCaseTmp, TestSuite) ->
TestCase = TestCaseTmp#testcase{ result = ok },
TestSuite#testsuite{
succeeded = TestSuite#testsuite.succeeded+1,
testcases=[TestCase|TestSuite#testsuite.testcases] };
add_testcase_to_testsuite({error, Exception}, TestCaseTmp, TestSuite) ->
case Exception of
{error,{AssertionException,_},_} when
AssertionException == assertion_failed;
AssertionException == assertMatch_failed;
AssertionException == assertEqual_failed;
AssertionException == assertException_failed;
AssertionException == assertCmd_failed;
AssertionException == assertCmdOutput_failed
->
TestCase = TestCaseTmp#testcase{ result = {failed, Exception} },
TestSuite#testsuite{
failed = TestSuite#testsuite.failed+1,
testcases = [TestCase|TestSuite#testsuite.testcases] };
_ ->
TestCase = TestCaseTmp#testcase{ result = {aborted, Exception} },
TestSuite#testsuite{
aborted = TestSuite#testsuite.aborted+1,
testcases = [TestCase|TestSuite#testsuite.testcases] }
end. | apps/erlangbridge/src/eunit_jsonreport.erl | 0.58747 | 0.557544 | eunit_jsonreport.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% Copyright (c) 2014 SyncFree Consortium. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
-module(vectorclock).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-export([all_dots_greater/2, all_dots_smaller/2, conc/2,
eq/2, fold/3, from_list/1, ge/2, get/2,
gt/2, le/2, lt/2, map/2, max/1, min/1,
min_clock/2, new/0, set_all/2,
set/3, size/1, to_list/1, update_with/4]).
-type vc_node() :: term().
-type vectorclock() :: #{vc_node() => pos_integer()}.
-export_type([vectorclock/0]).
-spec new() -> vectorclock().
new() -> maps:new().
-spec get(vc_node(), vectorclock()) -> non_neg_integer().
get(Key, VectorClock) ->
maps:get(Key, VectorClock, 0).
-spec set(vc_node(), non_neg_integer(), vectorclock()) -> vectorclock().
set(Key, 0, VectorClock) ->
maps:remove(Key, VectorClock);
set(Key, Value, VectorClock) when Value > 0 ->
VectorClock#{Key => Value}.
-spec set_all(pos_integer(), vectorclock()) -> vectorclock().
set_all(0, _VectorClock) ->
new();
set_all(Value, VectorClock) when Value > 0 ->
Fun = fun(_K, _V) -> Value end,
map(Fun, VectorClock).
-spec from_list([{vc_node(), pos_integer()}]) -> vectorclock().
from_list(List) -> maps:from_list(List).
-spec to_list(vectorclock()) -> [{vc_node(), pos_integer()}].
to_list(VectorClock) -> maps:to_list(VectorClock).
-spec map(fun((vc_node(), pos_integer()) -> pos_integer()), vectorclock()) -> vectorclock().
map(Fun, VectorClock) -> maps:map(Fun, VectorClock).
-spec fold(fun ((vc_node(), pos_integer(), X) -> X), X, vectorclock()) -> X.
fold(Fun, Init, VectorClock) ->
maps:fold(Fun, Init, VectorClock).
-spec update_with(vc_node(), fun((pos_integer()) -> pos_integer()), pos_integer(), vectorclock()) -> vectorclock().
update_with(Key, Fun, Init, VectorClock) ->
maps:update_with(Key, Fun, Init, VectorClock).
-spec min_clock(vectorclock(), [vc_node()]) -> non_neg_integer().
min_clock(_VectorClock, []) ->
0;
min_clock(VectorClock, Nodes) ->
lists:min([get(Node, VectorClock) || Node <- Nodes]).
-spec max([vectorclock()]) -> vectorclock().
max([]) -> new();
max([V]) -> V;
max([V1, V2 | T]) -> max([max2(V1, V2) | T]).
%% component-wise maximum of two clocks
-spec max2(vectorclock(), vectorclock()) -> vectorclock().
max2(V1, V2) ->
FoldFun =
fun(DC, A, Acc) ->
B = get(DC, Acc),
case A > B of
true -> Acc#{DC => A};
false -> Acc
end
end,
maps:fold(FoldFun, V2, V1).
-spec min([vectorclock()]) -> vectorclock().
min([]) -> new();
min([V]) -> V;
min([V1, V2 | T]) ->
min([min2(V1, V2) | T]).
%% component-wise minimum of two clocks
-spec min2(vectorclock(), vectorclock()) -> vectorclock().
min2(V1, V2) ->
FoldFun = fun (DC, A, Acc) ->
B = get(DC, V2),
C = min(A, B),
case C of
0 -> Acc;
_ -> Acc#{DC => C}
end
end,
maps:fold(FoldFun, new(), V1).
-spec size(vectorclock()) -> non_neg_integer().
size(V) -> maps:size(V).
-spec for_all_keys(fun ((pos_integer(), pos_integer()) -> boolean()), vectorclock(), vectorclock()) -> boolean().
for_all_keys(F, V1, V2) ->
AllDCs = maps:keys(maps:merge(V1, V2)),
Func = fun (DC) ->
A = get(DC, V1),
B = get(DC, V2),
F(A, B)
end,
lists:all(Func, AllDCs).
-spec eq(vectorclock(), vectorclock()) -> boolean().
eq(V1, V2) -> le(V1, V2) andalso le(V2, V1).
-spec le(vectorclock(), vectorclock()) -> boolean().
le(V1, V2) ->
try
maps:fold(fun (DC, V, true) ->
case V =< get(DC, V2) of
true -> true;
false -> throw(false)
end
end, true, V1)
catch
false -> false
end .
-spec ge(vectorclock(), vectorclock()) -> boolean().
ge(V1, V2) -> le(V2, V1).
-spec all_dots_smaller(vectorclock(), vectorclock()) -> boolean().
all_dots_smaller(V1, V2) ->
for_all_keys(fun (A, B) -> A < B end, V1, V2).
-spec all_dots_greater(vectorclock(), vectorclock()) -> boolean().
all_dots_greater(V1, V2) ->
for_all_keys(fun (A, B) -> A > B end, V1, V2).
-spec gt(vectorclock(), vectorclock()) -> boolean().
gt(V1, V2) -> lt(V2, V1).
-spec lt(vectorclock(), vectorclock()) -> boolean().
lt(V1, V2) ->
try maps:fold(fun (DC, V, Acc) ->
X = get(DC, V2),
case V =< X of
true -> Acc orelse V < X;
false -> throw(false)
end
end,
false, V1)
orelse
maps:fold(fun (DC, V, _) ->
X = get(DC, V1),
case V > X of
true -> throw(true);
false -> false
end
end,
false, V2)
catch
R -> R
end.
-spec conc(vectorclock(), vectorclock()) -> boolean().
conc(V1, V2) -> not ge(V1, V2) andalso not le(V1, V2).
-ifdef(TEST).
vectorclock_empty_test() ->
V1 = new(),
V2 = from_list([]),
?assertEqual(V1, V2),
?assertEqual((eq(min([]), max([]))), true),
?assertEqual((to_list(V1)), []).
vectorclock_test() ->
V1 = from_list([{1, 5}, {2, 4}, {3, 5}, {4, 6}]),
V2 = from_list([{1, 4}, {2, 3}, {3, 4}, {4, 5}]),
V3 = from_list([{1, 5}, {2, 4}, {3, 4}, {4, 5}]),
V4 = from_list([{1, 6}, {2, 3}, {3, 1}, {4, 7}]),
V5 = from_list([{1, 6}, {2, 7}]),
?assert(all_dots_greater(V1, V2)),
?assert(all_dots_smaller(V2, V1)),
?assertNot(all_dots_greater(V1, V3)),
?assert(gt(V1, V3)),
?assertNot(gt(V1, V1)),
?assertNot(ge(V1, V4)),
?assertNot(le(V1, V4)),
?assertNot(eq(V1, V4)),
?assertNot(ge(V1, V5)).
vectorclock_lt_test() ->
?assertEqual((lt(from_list([{a, 1}]), from_list([{a, 1}, {b, 1}]))), true),
?assertEqual((lt(from_list([{a, 1}]), from_list([{a, 1}]))), false),
?assertEqual((lt(from_list([{a, 2}]), from_list([{a, 1}]))), false).
vectorclock_max_test() ->
V1 = from_list([{1, 5}, {2, 4}]),
V2 = from_list([{1, 6}, {2, 3}]),
V3 = from_list([{1, 3}, {3, 2}]),
Expected12 = from_list([{1, 6}, {2, 4}]),
Expected23 = from_list([{1, 6}, {2, 3}, {3, 2}]),
Expected13 = from_list([{1, 5}, {2, 4}, {3, 2}]),
Expected123 = from_list([{1, 6}, {2, 4}, {3, 2}]),
Unexpected123 = from_list([{1, 5}, {2, 5}, {3, 5}]),
?assertEqual((eq(max([V1, V2]), Expected12)), true),
?assertEqual((eq(max([V2, V3]), Expected23)), true),
?assertEqual((eq(max([V1, V3]), Expected13)), true),
?assertEqual((eq(max([V1, V2, V3]), Expected123)), true),
?assertEqual((eq(max([V1, V2, V3]), Unexpected123)), false).
vectorclock_min_test() ->
V1 = from_list([{1, 5}, {2, 4}]),
V2 = from_list([{1, 6}, {2, 3}]),
V3 = from_list([{1, 3}, {3, 2}]),
Expected12 = from_list([{1, 5}, {2, 3}]),
Expected23 = from_list([{1, 3}]),
Expected13 = from_list([{1, 3}]),
Expected123 = from_list([{1, 3}]),
Unexpected123 = from_list([{1, 3}, {2, 3}, {3, 2}]),
?assert(eq(min([V1, V2]), Expected12)),
?assert(eq(min([V2, V3]), Expected23)),
?assert(eq(min([V1, V3]), Expected13)),
?assert(eq(min([V1, V2, V3]), Expected123)),
?assertNot(eq(min([V1, V2, V3]), Unexpected123)),
?assert(eq(vectorclock:min([V1]), vectorclock:max([V1]))).
vectorclock_conc_test() ->
V1 = from_list([{1, 5}, {2, 4}]),
V2 = from_list([{1, 6}, {2, 3}]),
V3 = from_list([{1, 3}, {3, 2}]),
V4 = from_list([{1, 6}, {3, 3}]),
V5 = from_list([{1, 6}]),
?assert(conc(V1, V2)),
?assert(conc(V2, V3)),
?assertNot(conc(V3, V4)),
?assertNot(conc(V5, V4)).
vectorclock_set_test() ->
V1 = from_list([{1, 1}, {2, 2}]),
V2 = from_list([{1, 1}, {2, 2}, {3, 3}]),
V3 = from_list([{1, 1}, {2, 4}]),
?assertEqual(V2, set(3, 3, V1)),
?assertEqual(V3, set(2, 4, V1)),
?assertEqual(V1, set(3, 0, V2)).
vectorclock_setall_test() ->
V1 = from_list([{1, 5}, {8, 4}, {3, 5}, {9, 6}]),
V2 = from_list([{1, 7}, {8, 7}, {3, 7}, {9, 7}]),
?assertEqual(V2, set_all(7, V1)),
?assertEqual(new(), set_all(0, V1)).
vectorclock_minclock_test() ->
V1 = from_list([{1, 5}, {8, 4}, {3, 5}, {9, 6}]),
?assertEqual(4, min_clock(V1, [1, 8, 3, 9])),
?assertEqual(0, min_clock(V1, [])),
?assertEqual(0, min_clock(V1, [1, 8, 3, 9, 10])).
vectorclock_size_test() ->
V1 = from_list([{1, 5}, {8, 4}, {3, 5}, {9, 6}]),
V2 = new(),
?assertEqual(4, vectorclock:size(V1)),
?assertEqual(0, vectorclock:size(V2)).
vectorclock_update_test() ->
V1 = from_list([{1, 5}, {8, 4}, {3, 5}, {9, 6}]),
V2 = from_list([{1, 5}, {8, 8}, {3, 5}, {9, 6}]),
?assertEqual(V2, update_with(8, fun (X) -> X * 2 end, 0, V1)).
vectorclock_fold_test() ->
V1 = from_list([{1, 5}, {8, 4}, {3, 5}, {9, 6}]),
?assertEqual(20, fold(fun (_Node, X, Acc) -> X + Acc end, 0, V1)).
-endif. | src/vectorclock.erl | 0.592195 | 0.489076 | vectorclock.erl | starcoder |
%%%-------------------------------------------------------------------
%%% Licensed to the Apache Software Foundation (ASF) under one
%%% or more contributor license agreements. See the NOTICE file
%%% distributed with this work for additional information
%%% regarding copyright ownership. The ASF licenses this file
%%% to you under the Apache License, Version 2.0 (the
%%% "License"); you may not use this file except in compliance
%%% with the License. You may obtain a copy of the License at
%%%
%%% http://www.apache.org/licenses/LICENSE-2.0
%%%
%%% Unless required by applicable law or agreed to in writing,
%%% software distributed under the License is distributed on an
%%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%%% KIND, either express or implied. See the License for the
%%% specific language governing permissions and limitations
%%% under the License.
%%%
%%% @doc
%%% This module implements a simple way of giving operational visibility
%%% of events in the system. It expects a Key and Data where the
%%% Key is used to increment a counter in a table and also to store the
%%% last received Data for that Key. The Data is preferred to be a list
%%% of `{K,V}' tuples, if it is any other format, it uses `[{data, Data}]'
%%% to store the last information.
%%%
%%% The ETS tables holding the counters/snapshots must be initialized
%%% before the feature can be used. The application using this module should
%%% call `otter_lib_snapshot_count:sup_init()' from a persistent process.
%%% The simplest is to call from the application supervisor init. The call
%%% only creaes the ETS tables that need an owner process to stay around.
%%% @end
%%%-------------------------------------------------------------------
-module(otter_lib_snapshot_count).
-export([
delete_all_counters/0,
delete_counter/1,
get_snap/1,
list_counts/0,
snapshot/2,
sup_init/0
]).
%%--------------------------------------------------------------------
%% @doc Increase a counter for the given Key and store the Data. If the data
%% is a property (key-value) list then it adds the timestamp in
%% `{Year, Month, Day, Hour, Min, Sec, Us}' format to the list with key
%% `snap_timestamp`. If it is not a property list then it stores the
%% data in a property list with key `data' and adds the timestamp.
%% It returns the current counter value for the key.
%% @end
%%--------------------------------------------------------------------
-spec snapshot(Key :: term(), Data :: term()) -> integer().
snapshot(Key, [{_, _} |_ ] = Data) ->
{_, _, Us} = os:timestamp(),
{{Year, Month, Day}, {Hour, Min, Sec}} = calendar:local_time(),
ets:insert(
otter_snapshot_store,
{
Key,
[
{snap_timestamp, {Year, Month, Day, Hour, Min, Sec, Us}}
| Data
]
}
),
case catch ets:update_counter(otter_snapshot_count, Key, 1) of
{'EXIT', {badarg, _}} ->
ets:insert(otter_snapshot_count, {Key, 1});
Cnt ->
Cnt
end;
snapshot(Key, Data) ->
snapshot(Key, [{data, Data}]).
%%--------------------------------------------------------------------
%% @doc List all the counters with their values
%% @end
%%--------------------------------------------------------------------
-spec list_counts() -> [{Key :: term(), Counter :: integer()}].
list_counts() ->
ets:tab2list(otter_snapshot_count).
%%--------------------------------------------------------------------
%% @doc Return the last stored data (snapshot) for a key
%% @end
%%--------------------------------------------------------------------
-spec get_snap(Key :: term()) -> term().
get_snap(Key) ->
ets:lookup(otter_snapshot_store, Key).
%%--------------------------------------------------------------------
%% @doc Delete the counter and data (snapshot) for a key
%% @end
%%--------------------------------------------------------------------
-spec delete_counter(Key :: term()) -> ok.
delete_counter(Key) ->
ets:delete(otter_snapshot_store, Key),
ets:delete(otter_snapshot_count, Key),
ok.
%%--------------------------------------------------------------------
%% @doc Delete all counters and data (snapshot)
%% @end
%%--------------------------------------------------------------------
-spec delete_all_counters() -> ok.
delete_all_counters() ->
ets:delete_all_objects(otter_snapshot_store),
ets:delete_all_objects(otter_snapshot_count),
ok.
%%--------------------------------------------------------------------
%% @doc Initialize the ETS tables to store counters/snapshots. Should be
%% called from a persistent process/
%% @end
%%--------------------------------------------------------------------
-spec sup_init() -> term().
sup_init() ->
[
ets:new(Tab, [named_table, public, {Concurrency, true}]) ||
{Tab, Concurrency} <- [
{otter_snapshot_count, write_concurrency},
{otter_snapshot_store, write_concurrency}
]
]. | src/otter_lib_snapshot_count.erl | 0.648466 | 0.427337 | otter_lib_snapshot_count.erl | starcoder |
%%%----------------------------------------------------------------------------
%%% @author <NAME> <<EMAIL>>
%%% @copyright 2012 University of St Andrews (See LICENCE)
%%% @headerfile "skel.hrl"
%%%
%%% @doc This module contains various functions to help us when profiling and
%%% benchmarking.
%%%
%%% This allows us very simple access to benchmarking.
%%%
%%% === Example ===
%%%
%%% ```sk_profile:benchmark(skel:run([{seq, fun ?MODULE:p/1}, {seq, fun ?MODULE:f/1}], Images), [Input], Ntimes)])'''
%%%
%%% In this example we use the {@link benchmark/3} function to record how
%%% long it takes for the example seen in {@link sk_seq} to execute.
%%%
%%% @end
%%% @todo Include eprof functionality
%%%----------------------------------------------------------------------------
-module(sk_profile).
-export([
benchmark/3
]).
-include("skel.hrl").
-spec benchmark(fun(), list(), pos_integer()) -> list().
%% @doc Produces a list of averages for the time taken by function `Fun' to be
%% evaluated `N' times, given a list of arguments `Args'. Returned times are
%% in microseconds. Returns a list containing the tuples:
%%
%% <ul>
%% <li><tt>N</tt>,</li>
%% <li><tt>min</tt>, the shortest time taken to perform Fun;</li>
%% <li><tt>max</tt>, the longest time taken to perform Fun;</li>
%% <li><tt>med</tt>, the median of all individual results;</li>
%% <li><tt>mean</tt>, the mean of all individual results; and</li>
%% <li><tt>std_dev</tt>, the standard deviation.</li>
%% </ul>
benchmark(Fun, Args, N) when N > 0 ->
Timing = test_loop(Fun, Args, N, []),
Mean = mean(Timing),
[
{n, N},
{min, lists:min(Timing)},
{max, lists:max(Timing)},
{med, median(Timing)},
{mean, Mean},
{std_dev, std_dev(Timing, Mean)}
].
%% @doc Recursively records the length of time it takes for the function `Fun'
%% to be evaluated `N' times.
test_loop(_Fun, _Args, 0, Timings) ->
Timings;
test_loop(Fun, Args, N, Timings) ->
{Timing, _} = timer:tc(Fun, Args),
test_loop(Fun, Args, N-1, [Timing|Timings]).
-spec median([number(),...]) -> number().
%% @doc Returns the median of the times listed.
median(List) ->
lists:nth(round((length(List) / 2)), lists:sort(List)).
-spec mean([number(),...]) -> number().
%% @doc Returns the mean time taken for those listed.
mean(List) ->
lists:foldl(fun(X, Sum) -> X + Sum end, 0, List) / length(List).
-spec std_dev([number(),...], number()) -> number().
%% @doc Returns the standard deviation of all times recorded.
std_dev(List, Mean) ->
math:pow(variance(List, Mean), 0.5).
-spec variance([number(),...], number()) -> number().
%% @doc Calculates the variance of the times listed for use in calculating the
%% standard deviation in {@link std_dev/2}.
variance(List, _Mean) when length(List) == 1 ->
0.0;
variance(List, Mean) ->
lists:foldl(fun(X, Sum) -> math:pow(Mean - X, 2) + Sum end, 0, List) / (length(List) - 1). | src/sk_profile.erl | 0.760028 | 0.754056 | sk_profile.erl | starcoder |
-module(aoc2015_day24).
-behavior(aoc_puzzle).
-export([parse/1, solve1/1, solve2/1, info/0]).
-include("aoc_puzzle.hrl").
-spec info() -> aoc_puzzle().
info() ->
#aoc_puzzle{module = ?MODULE,
year = 2015,
day = 24,
name = "It Hangs in the Balance",
expected = {11846773891, 80393059},
has_input_file = false}.
-type input_type() :: [integer()].
-type result1_type() :: integer().
-type result2_type() :: result1_type().
-spec parse(Input :: binary()) -> input_type().
parse(_Input) ->
[1, 2, 3, 7, 11, 13, 17, 19, 23, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89,
97, 101, 103, 107, 109, 113].
-spec solve1(Input :: input_type()) -> result1_type().
solve1(Input) ->
start_common(Input, 3).
-spec solve2(Input :: input_type()) -> result2_type().
solve2(Input) ->
start_common(Input, 4).
%% To avoid combinatorial explosions when looking for possible
%% combination of packages for the first group, we assume that group A
%% will never be larger than 6. This turns out to be good enough.
-define(MAX_GROUP_A_SIZE, 6).
start_common(Packages, Groups) ->
Sum = lists:sum(Packages) div Groups,
find_first_group_qe(Packages, Sum).
%% Compute the "quantum entanglement" of a group, which is simply the
%% product of all integers in the group.
qe(L) ->
lists:foldl(fun(V, AccIn) -> V * AccIn end, 1, L).
%% Sort group A candidates on length (smallest first), then
%% on Quantum Entanglement.
sort_fun(A, B) when length(A) /= length(B) ->
length(A) =< length(B);
sort_fun(A, B) ->
qe(A) =< qe(B).
find_first_group_qe(Packages, Sum) ->
GroupAs = find_a(Packages, Sum),
[Best | _] = lists:sort(fun sort_fun/2, GroupAs),
%% I assume here that the remaining elements can be divided into 2
%% (3) groups with the correct sum. This assumption turned out to be
%% correct.
qe(Best).
%% Find best choices for first group (A)
find_a(Data, Sum) ->
lists:foldl(fun(N, As) ->
Combos = cnr(N, Data),
As ++ lists:filter(fun(X) -> lists:sum(X) == Sum end, Combos)
end,
[],
lists:seq(1, ?MAX_GROUP_A_SIZE)).
%% Returns all possible combinations of a given length.
%% https://github.com/joergen7/lib_combin/blob/master/src/lib_combin.erl
cnr(N, SrcLst) when N >= 0 ->
Cnr = fun Cnr(0, _, Acc) ->
[Acc];
Cnr(_, [], _) ->
[];
Cnr(M, [H | T], Acc) ->
case T of
[] ->
Cnr(M - 1, [], [H | Acc]);
[_ | _] ->
Cnr(M - 1, T, [H | Acc]) ++ Cnr(M, T, Acc)
end
end,
Cnr(N, SrcLst, []). | src/2015/aoc2015_day24.erl | 0.572842 | 0.656768 | aoc2015_day24.erl | starcoder |
%%%-----------------------------------------------------------------------------
%%%
%%% Copyright (c) 2016-2017 Klarna AB
%%%
%%% This file is provided to you under the Apache License,
%%% Version 2.0 (the "License"); you may not use this file
%%% except in compliance with the License. You may obtain
%%% a copy of the License at
%%%
%%% http://www.apache.org/licenses/LICENSE-2.0
%%%
%%% Unless required by applicable law or agreed to in writing,
%%% software distributed under the License is distributed on an
%%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%%% KIND, either express or implied. See the License for the
%%% specific language governing permissions and limitations
%%% under the License.
%%%
%%%-----------------------------------------------------------------------------
%% @doc This module is a collection of `eravro' supported decoder hooks
%%
%% Decoder hook is an anonymous function to be evaluated by
%% the JSON or binary decoder to amend either schmea or data (input or output).
%%
%% For example:
%%
%% A hook can be used to fast-skip undesired data fields of records
%% or undesired data of big maps etc.
%% e.g. To dig out only the field named "MyField" in "MyRecord", the
%% JSON decoder hook may probably look like:
%%
%% <pre>
%% fun(Type, SubNameOrIndex, Data, DecodeFun) ->
%% case {avro:get_type_fullname(Type), SubNameOrIndex} of
%% {"com.example.MyRecord", "MyField"} ->
%% DecodeFun(Data);
%% {"com.example.MyRecord", _OtherFields} ->
%% ignored;
%% _OtherType ->
%% DecodeFun(Data)
%% end
%% end.
%% </pre>
%%
%% A hook can be used for debug. For example, below hook should print
%% the decoding stack along the decode function traverses through the bytes.
%%
%% <pre>
%% fun(Type, SubNameOrIndex, Data, DecodeFun) ->
%% SubInfo = case is_integer(SubNameOrIndex) of
%% true -> integer_to_list(SubNameOrIndex);
%% false -> SubNameOrIndex
%% end,
%% io:format("~s.~s\n", [avro:get_type_name(Type), SubInfo]),
%% DecodeFun(Data)
%% end
%% </pre>
%%
%% A hook can also be used as a monkey patch to fix some corrupted data.
%% @end
-module(avro_decoder_hooks).
-export([ tag_unions/0
, pretty_print_hist/0
, print_debug_trace/2
]).
-include("erlavro.hrl").
-include("avro_stacktrace.hrl").
-define(PD_PP_INDENTATION, '$avro_decoder_pp_indentation').
-define(PD_DECODER_HIST, '$avro_decoder_hist').
-define(REASON_TAG, '$hook-raised').
-type count() :: non_neg_integer().
-type trace_hist_entry() :: {push, _, _} | {pop, _} | pop.
%% @doc By default, decoders do not tag union values.
%% This hook function is to tag union values with union type names
%% NOTE: null values are not tagged
%% @end
-spec tag_unions() -> avro:decoder_hook_fun().
tag_unions() -> fun tag_unions/4.
%% @doc This hook is useful when a decoder has failed on decoding,
%% try to decode it again with this hook to inspect the decode history
%% and the avro type stack where the failure happened
%% NOTE: Always call this API to retrieve the hook, never save the hook
%% and re-use for different decode attempts
%% @end.
-spec print_debug_trace(fun((iodata()) -> ok), count()) ->
avro:decoder_hook_fun().
print_debug_trace(PrintFun, MaxHistoryLength) ->
ok = erase_hist(),
fun(T, Sub, Data, DecodeFun) ->
print_trace_on_failure(T, Sub, Data, DecodeFun, PrintFun, MaxHistoryLength)
end.
%% @doc This hook prints the type tree with indentation, and the leaf values
%% to the current group leader.
%% @end
-spec pretty_print_hist() -> avro:decoder_hook_fun().
pretty_print_hist() ->
_ = erase(?PD_PP_INDENTATION),
fun(T, SubInfo, Data, DecodeFun) ->
Name = avro:get_type_fullname(T),
Indentation =
case get(?PD_PP_INDENTATION) of
undefined -> 0;
Indentati -> Indentati
end,
IndentationStr = lists:duplicate(Indentation * 2, $\s),
ToPrint =
[ IndentationStr
, Name
, case SubInfo of
"" -> ": ";
I when is_integer(I) -> [$., integer_to_list(I), "\n"];
B when is_binary(B) -> [$., B, "\n"];
_ -> "\n"
end
],
io:put_chars(user, ToPrint),
_ = put(?PD_PP_INDENTATION, Indentation + 1),
DecodeResult = DecodeFun(Data),
ResultToPrint = get_pretty_print_result(DecodeResult),
_ = pretty_print_result(SubInfo, ResultToPrint, IndentationStr),
_ = put(?PD_PP_INDENTATION, Indentation),
DecodeResult
end.
%%%_* Internal functions =======================================================
%% @private
tag_unions(#avro_union_type{} = T, SubInfo, DecodeIn, DecodeFun) ->
Result = DecodeFun(DecodeIn),
Name = get_union_member_name(T, SubInfo),
case Result of
{Value, Tail} when is_binary(Tail) ->
%% used as binary decoder hook
{maybe_tag(Name, Value), Tail};
Value ->
%% used as JSON decoder hook
maybe_tag(Name, Value)
end;
tag_unions(_T, _SubInfo, DecodeIn, DecodeFun) ->
%% Not a union, pass through
DecodeFun(DecodeIn).
%% @private
get_union_member_name(Type, Id) when is_integer(Id) ->
%% when decoding avro binary, lookup member name by union member index.
{ok, ChildType} = avro_union:lookup_type(Id, Type),
case is_binary(ChildType) of
true -> ChildType;
false -> avro:get_type_fullname(ChildType)
end;
get_union_member_name(_Type, Name) when is_binary(Name) ->
%% when decoding JSON, the value is already tagged with union member name
Name.
%% @private Never tag primitives and unnamed complex types.
maybe_tag(N, Value) when ?IS_AVRO_PRIMITIVE_NAME(N) -> Value;
maybe_tag(?AVRO_ARRAY, Value) -> Value;
maybe_tag(?AVRO_MAP, Value) -> Value;
maybe_tag(Name, Value) -> {Name, Value}.
%% @private
print_trace_on_failure(T, Sub, Data, DecodeFun, PrintFun, HistCount) ->
Name = avro:get_type_fullname(T),
ok = add_hist({push, Name, Sub}),
try
decode_and_add_trace(Sub, Data, DecodeFun)
catch
C : R ?CAPTURE_STACKTRACE
when not (is_tuple(R) andalso element(1, R) =:= ?REASON_TAG) ->
%% catch only the very first error
ok = print_trace(PrintFun, HistCount),
ok = erase_hist(),
erlang:raise(C, {?REASON_TAG, R}, ?GET_STACKTRACE)
end.
%% @private
decode_and_add_trace(Sub, Data, DecodeFun) ->
Result = DecodeFun(Data),
Value =
case Result of
{V, Tail} when is_binary(Tail) ->
%% binary decoder
V;
_ ->
%% JSON decoder
Result
end,
case Sub =:= [] orelse Value =:= [] of
true -> add_hist({pop, Value}); %% add stack hist with decoded value
false -> add_hist(pop)
end,
Result.
%% @private
-spec erase_hist() -> ok.
erase_hist() ->
_ = erlang:erase(?PD_DECODER_HIST),
ok.
%% @private
-spec get_hist() -> [trace_hist_entry()].
get_hist() ->
case erlang:get(?PD_DECODER_HIST) of
undefined -> [];
S -> S
end.
%% @private Use process dictionary to keep the decoder stack trace.
-spec add_hist(trace_hist_entry()) -> ok.
add_hist(NewOp) ->
erlang:put(?PD_DECODER_HIST, [NewOp | get_hist()]),
ok.
%% @private Print decoder trace (stack and history) using the given function.
print_trace(PrintFun, HistCount) ->
Hist = lists:reverse(get_hist()),
{Stack, History} = format_trace(Hist, _Stack = [], _History = [], HistCount),
PrintFun(["avro type stack:\n", Stack, "\n",
"decode history:\n", History]).
%% @private Format the trace hisotry into printable format.
%% Return the type stack and last N decode history entries as iodata().
%% @end
-spec format_trace(TraceHist :: [trace_hist_entry()],
TypeStack :: [{avro:name(), atom() | string() | integer()}],
FormattedTrace :: iodata(),
MaxHistEntryCount :: count()) -> {iodata(), iodata()}.
format_trace([], Stack, Hist, _HistCount) ->
{io_lib:format("~p", [lists:reverse(Stack)]), lists:reverse(Hist)};
format_trace([{push, Name, Sub} | Rest], Stack, Hist, HistCount) ->
Padding = lists:duplicate(length(Stack) * 2, $\s),
Line = bin([Padding, Name,
case Sub of
[] -> "";
none -> "";
I when is_integer(I) -> [".", integer_to_list(I)];
S when is_binary(S) -> [".", S]
end, "\n"]),
NewHist = lists:sublist([Line | Hist], HistCount),
format_trace(Rest, [{Name, Sub} | Stack], NewHist, HistCount);
format_trace([{pop, V} | Rest], Stack, Hist, HistCount) ->
Padding = lists:duplicate(length(Stack) * 2, $\s),
Line = bin([Padding, io_lib:format("~100000p", [V]), "\n"]),
NewHist = lists:sublist([Line | Hist], HistCount),
format_trace(Rest, tl(Stack), NewHist, HistCount);
format_trace([pop | Rest], Stack, Hist, HistCount) ->
format_trace(Rest, tl(Stack), Hist, HistCount).
%% @private
bin(IoData) -> iolist_to_binary(IoData).
%% @private
get_pretty_print_result(JsonResult) when ?IS_AVRO_VALUE(JsonResult) ->
%% JSON value passed to hooks is always wrapped
?AVRO_VALUE_DATA(JsonResult);
get_pretty_print_result({Result, Tail}) when is_binary(Tail) ->
%% binary decode result
Result.
%% @private
pretty_print_result(_Sub = [], Result, _IndentationStr) ->
%% print the value if it's a leaf in the type tree
io:put_chars(user, [io_lib:print(Result)]);
pretty_print_result(_Sub, _Result, _IndentationStr) ->
ok.
%%%_* Emacs ====================================================================
%%% Local Variables:
%%% allout-layout: t
%%% erlang-indent-level: 2
%%% End: | src/avro_decoder_hooks.erl | 0.589716 | 0.409398 | avro_decoder_hooks.erl | starcoder |
%%
%% %CopyrightBegin%
%%
%% Copyright Ericsson AB 1999-2020. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
%% Purpose : Constant folding optimisation for Core
%% Propagate atomic values and fold in values of safe calls to
%% constant arguments. Also detect and remove literals which are
%% ignored in a 'seq'. Could handle lets better by chasing down
%% complex 'arg' expressions and finding values.
%%
%% Try to optimise case expressions by removing unmatchable or
%% unreachable clauses. Also change explicit tuple arg into multiple
%% values and extend clause patterns. We must be careful here not to
%% generate cases which we know to be safe but later stages will not
%% recognise as such, e.g. the following is NOT acceptable:
%%
%% case 'b' of
%% <'b'> -> ...
%% end
%%
%% Variable folding is complicated by variable shadowing, for example
%% in:
%% 'foo'/1 =
%% fun (X) ->
%% let <A> = X
%% in let <X> = Y
%% in ... <use A>
%% If we were to simply substitute X for A then we would be using the
%% wrong X. Our solution is to rename variables that are the values
%% of substitutions. We could rename all shadowing variables but do
%% the minimum. We would then get:
%% 'foo'/1 =
%% fun (X) ->
%% let <A> = X
%% in let <X1> = Y
%% in ... <use A>
%% which is optimised to:
%% 'foo'/1 =
%% fun (X) ->
%% let <X1> = Y
%% in ... <use X>
%%
%% This is done by carefully shadowing variables and substituting
%% values. See details when defining functions.
%%
%% It would be possible to extend to replace repeated evaluation of
%% "simple" expressions by the value (variable) of the first call.
%% For example, after a "let Z = X+1" then X+1 would be replaced by Z
%% where X is valid. The Sub uses the full Core expression as key.
%% It would complicate handling of patterns as we would have to remove
%% all values where the key contains pattern variables.
-module(sys_core_fold).
-export([module/2,format_error/1]).
-import(lists, [map/2,foldl/3,foldr/3,mapfoldl/3,all/2,any/2,
reverse/1,reverse/2,member/2,flatten/1,
unzip/1,keyfind/3]).
-import(cerl, [ann_c_cons/3,ann_c_map/3,ann_c_tuple/2]).
-include("core_parse.hrl").
%%-define(DEBUG, 1).
-ifdef(DEBUG).
-define(ASSERT(E),
case E of
true ->
ok;
false ->
io:format("~p, line ~p: assertion failed\n", [?MODULE,?LINE]),
error(assertion_failed)
end).
-else.
-define(ASSERT(E), ignore).
-endif.
%% Variable value info.
-record(sub, {v=[], %Variable substitutions
s=sets:new([{version, 2}]) :: sets:set(), %Variables in scope
t=#{} :: map(), %Types
in_guard=false, %In guard or not.
top=true}). %Not inside a term.
-spec module(cerl:c_module(), [compile:option()]) ->
{'ok', cerl:c_module(), [_]}.
module(#c_module{defs=Ds0}=Mod, Opts) ->
put(no_inline_list_funcs, not member(inline_list_funcs, Opts)),
init_warnings(),
Ds1 = [function_1(D) || D <- Ds0],
erase(new_var_num),
erase(no_inline_list_funcs),
{ok,Mod#c_module{defs=Ds1},get_warnings()}.
function_1({#c_var{name={F,Arity}}=Name,B0}) ->
try
%% Find a suitable starting value for the variable
%% counter. Note that this pass assumes that new_var_name/1
%% returns a variable name distinct from any variable used in
%% the entire body of the function. We use integers as
%% variable names to avoid filling up the atom table when
%% compiling huge functions.
Count = cerl_trees:next_free_variable_name(B0),
put(new_var_num, Count),
B = find_fixpoint(fun(Core) ->
%% This must be a fun!
expr(Core, value, sub_new())
end, B0, 20),
{Name,B}
catch
Class:Error:Stack ->
io:fwrite("Function: ~w/~w\n", [F,Arity]),
erlang:raise(Class, Error, Stack)
end.
find_fixpoint(_OptFun, Core, 0) ->
Core;
find_fixpoint(OptFun, Core0, Max) ->
case OptFun(Core0) of
Core0 -> Core0;
Core -> find_fixpoint(OptFun, Core, Max-1)
end.
%% body(Expr, Sub) -> Expr.
%% body(Expr, Context, Sub) -> Expr.
%% No special handling of anything except values.
body(Body, Sub) ->
body(Body, value, Sub).
body(#c_values{anno=A,es=Es0}, value, Sub) ->
Es1 = expr_list(Es0, value, Sub),
#c_values{anno=A,es=Es1};
body(E, Ctxt, Sub) ->
?ASSERT(verify_scope(E, Sub)),
expr(E, Ctxt, Sub).
%% guard(Expr, Sub) -> Expr.
%% Do guard expression. We optimize it in the same way as
%% expressions in function bodies.
guard(Expr, Sub) ->
?ASSERT(verify_scope(Expr, Sub)),
expr(Expr, value, Sub#sub{in_guard=true}).
%% expr(Expr, Sub) -> Expr.
%% expr(Expr, Context, Sub) -> Expr.
expr(Expr, Sub) ->
expr(Expr, value, Sub).
expr(#c_var{}=V, Ctxt, Sub) ->
%% Return void() in effect context to potentially shorten the life time
%% of the variable and potentially generate better code
%% (for instance, if the variable no longer needs to survive a function
%% call, there will be no need to save it in the stack frame).
case Ctxt of
effect -> void();
value -> sub_get_var(V, Sub)
end;
expr(#c_literal{val=Val}=L, Ctxt, Sub) ->
case Ctxt of
effect ->
case Val of
[] ->
%% Keep as [] - might give slightly better code.
L;
_ when is_atom(Val) ->
%% For cleanliness replace with void().
void();
_ ->
%% Warn and replace with void().
warn_useless_building(L, Sub),
void()
end;
value -> L
end;
expr(#c_cons{anno=Anno,hd=H0,tl=T0}=Cons, Ctxt, Sub) ->
DeeperSub = descend(Cons, Sub),
H1 = expr(H0, Ctxt, DeeperSub),
T1 = expr(T0, Ctxt, DeeperSub),
case Ctxt of
effect ->
warn_useless_building(Cons, Sub),
make_effect_seq([H1,T1], Sub);
value ->
ann_c_cons(Anno, H1, T1)
end;
expr(#c_tuple{anno=Anno,es=Es0}=Tuple, Ctxt, Sub) ->
Es = expr_list(Es0, Ctxt, descend(Tuple, Sub)),
case Ctxt of
effect ->
warn_useless_building(Tuple, Sub),
make_effect_seq(Es, Sub);
value ->
ann_c_tuple(Anno, Es)
end;
expr(#c_map{anno=Anno,arg=V0,es=Es0}=Map, Ctxt, Sub) ->
Es = pair_list(Es0, Ctxt, descend(Map, Sub)),
case Ctxt of
effect ->
warn_useless_building(Map, Sub),
make_effect_seq(Es, Sub);
value ->
V = expr(V0, Ctxt, Sub),
ann_c_map(Anno,V,Es)
end;
expr(#c_binary{segments=Ss}=Bin0, Ctxt, Sub) ->
%% Warn for useless building, but always build the binary
%% anyway to preserve a possible exception.
case Ctxt of
effect -> warn_useless_building(Bin0, Sub);
value -> ok
end,
Bin1 = Bin0#c_binary{segments=bitstr_list(Ss, Sub)},
Bin = bin_un_utf(Bin1),
eval_binary(Bin);
expr(#c_fun{}=Fun, effect, Sub) ->
%% A fun is created, but not used. Warn, and replace with the void value.
warn_useless_building(Fun, Sub),
void();
expr(#c_fun{vars=Vs0,body=B0}=Fun, Ctxt0, Sub0) ->
{Vs1,Sub1} = var_list(Vs0, Sub0),
Ctxt = case Ctxt0 of
{letrec,Ctxt1} -> Ctxt1;
value -> value
end,
B1 = body(B0, Ctxt, Sub1),
Fun#c_fun{vars=Vs1,body=B1};
expr(#c_seq{arg=Arg0,body=B0}=Seq0, Ctxt, Sub) ->
%% Optimise away pure literal arg as its value is ignored.
B1 = body(B0, Ctxt, Sub),
Arg = body(Arg0, effect, Sub),
case will_fail(Arg) of
true ->
Arg;
false ->
%% Arg cannot be "values" here - only a single value
%% make sense here.
case {Ctxt,is_safe_simple(Arg)} of
{effect,true} -> B1;
{effect,false} ->
case is_safe_simple(B1) of
true -> Arg;
false -> Seq0#c_seq{arg=Arg,body=B1}
end;
{value,true} -> B1;
{value,false} -> Seq0#c_seq{arg=Arg,body=B1}
end
end;
expr(#c_let{}=Let0, Ctxt, Sub) ->
Let = opt_case_in_let(Let0),
case simplify_let(Let, Sub) of
impossible ->
%% The argument for the let is "simple", i.e. has no
%% complex structures such as let or seq that can be entered.
?ASSERT(verify_scope(Let, Sub)),
opt_fun_call(opt_simple_let(Let, Ctxt, Sub));
Expr ->
%% The let body was successfully moved into the let argument.
%% Now recursively re-process the new expression.
Expr
end;
expr(#c_letrec{body=#c_var{}}=Letrec, effect, _Sub) ->
%% This is named fun in an 'effect' context. Warn and ignore.
add_warning(Letrec, {ignored,useless_building}),
void();
expr(#c_letrec{defs=Fs0,body=B0}=Letrec, Ctxt, Sub) ->
Fs1 = map(fun ({Name,Fb}) ->
case Ctxt =:= effect andalso is_fun_effect_safe(Name, B0) of
true ->
{Name,expr(Fb, {letrec, effect}, Sub)};
false ->
{Name,expr(Fb, {letrec, value}, Sub)}
end
end, Fs0),
B1 = body(B0, Ctxt, Sub),
Letrec#c_letrec{defs=Fs1,body=B1};
expr(#c_case{}=Case0, Ctxt, Sub) ->
%% Ideally, the compiler should only emit warnings when there is
%% a real mistake in the code being compiled. We use the follow
%% heuristics in an attempt to approach that ideal:
%%
%% * If the guard for a clause always fails, we will emit a
%% warning.
%%
%% * If a case expression is a literal, we will emit no warnings
%% for clauses that will not match or for clauses that are
%% shadowed after a clause that will always match. That means
%% that code such as:
%%
%% case ?DEBUG of
%% false -> ok;
%% true -> ...
%% end
%%
%% (where ?DEBUG expands to either 'true' or 'false') will not
%% produce any warnings.
%%
%% * If the case expression is not literal, warnings will be
%% emitted for every clause that don't match and for all
%% clauses following a clause that will always match.
%%
%% * If no clause will ever match, there will be a warning
%% (in addition to any warnings that may have been emitted
%% according to the rules above).
%%
Case1 = opt_bool_case(Case0, Sub),
#c_case{anno=Anno,arg=Arg0,clauses=Cs0} = Case1,
Arg1 = body(Arg0, value, Sub),
LitExpr = cerl:is_literal(Arg1),
{Arg2,Cs1} = case_opt(Arg1, Cs0, Sub),
Cs2 = clauses(Arg2, Cs1, Ctxt, Sub, LitExpr, Anno),
Case = Case1#c_case{arg=Arg2,clauses=Cs2},
warn_no_clause_match(Case1, Case),
Expr = eval_case(Case, Sub),
move_case_into_arg(Expr, Sub);
expr(#c_apply{anno=Anno,op=Op0,args=As0}=Apply0, _, Sub) ->
Op1 = expr(Op0, value, Sub),
As1 = expr_list(As0, value, Sub),
case cerl:is_data(Op1) andalso not is_literal_fun(Op1) of
false ->
Apply = Apply0#c_apply{op=Op1,args=As1},
fold_apply(Apply, Op1, As1);
true ->
add_warning(Apply0, {failed,bad_call}),
Err = #c_call{anno=Anno,
module=#c_literal{val=erlang},
name=#c_literal{val=error},
args=[#c_tuple{es=[#c_literal{val='badfun'},
Op1]}]},
make_effect_seq(As1++[Err], Sub)
end;
expr(#c_call{module=M0,name=N0}=Call0, Ctxt, Sub) ->
M1 = expr(M0, value, Sub),
N1 = expr(N0, value, Sub),
Call = Call0#c_call{module=M1,name=N1},
case useless_call(Ctxt, Call) of
no -> call(Call, M1, N1, Sub);
{yes,Seq} -> expr(Seq, Ctxt, Sub)
end;
expr(#c_primop{name=#c_literal{val=build_stacktrace}}, effect, _Sub) ->
void();
expr(#c_primop{args=As0}=Prim, _, Sub) ->
As1 = expr_list(As0, value, Sub),
Prim#c_primop{args=As1};
expr(#c_catch{anno=Anno,body=B}, effect, Sub) ->
%% When the return value of the 'catch' is ignored, we can replace it
%% with a try/catch to avoid building a stack trace when an exception
%% occurs.
Var = #c_var{name='catch_value'},
Evs = [#c_var{name='Class'},#c_var{name='Reason'},#c_var{name='Stk'}],
Try = #c_try{anno=Anno,arg=B,vars=[Var],body=Var,
evars=Evs,handler=void()},
expr(Try, effect, Sub);
expr(#c_catch{body=B0}=Catch, _, Sub) ->
%% We can remove catch if the value is simple
B1 = body(B0, value, Sub),
case is_safe_simple(B1) of
true -> B1;
false -> Catch#c_catch{body=B1}
end;
expr(#c_try{arg=E0,vars=[#c_var{name=X}],body=#c_var{name=X},
handler=#c_literal{val=false}=False}=Try, _, Sub) ->
%% Since guard may call expr/2, we must do some optimization of
%% the kind of try's that occur in guards.
E1 = body(E0, value, Sub),
case will_fail(E1) of
false ->
%% We can remove try/catch if the expression is an
%% expression that cannot fail.
case is_safe_bool_expr(E1) orelse is_safe_simple(E1) of
true -> E1;
false -> Try#c_try{arg=E1}
end;
true ->
%% Expression will always fail.
False
end;
expr(#c_try{anno=A,arg=E0,vars=Vs0,body=B0,evars=Evs0,handler=H0}=Try, _, Sub0) ->
%% Here is the general try/catch construct outside of guards.
%% We can remove try if the value is simple and replace it with a let.
E1 = body(E0, value, Sub0),
{Vs1,Sub1} = var_list(Vs0, Sub0),
B1 = body(B0, value, Sub1),
case is_safe_simple(E1) of
true ->
expr(#c_let{anno=A,vars=Vs1,arg=E1,body=B1}, value, Sub0);
false ->
{Evs1,Sub2} = var_list(Evs0, Sub0),
H1 = body(H0, value, Sub2),
Try#c_try{arg=E1,vars=Vs1,body=B1,evars=Evs1,handler=H1}
end.
%% If a fun or its application is used as an argument, then it's unsafe to
%% handle it in effect context as the side-effects may rely on its return
%% value. The following is a minimal example of where it can go wrong:
%%
%% do letrec 'f'/0 = fun () -> ... whatever ...
%% in call 'side':'effect'(apply 'f'/0())
%% 'ok'
%%
%% This function returns 'true' if Body definitely does not rely on a
%% value produced by FVar, or 'false' if Body depends on or might depend on
%% a value produced by FVar.
is_fun_effect_safe(#c_var{}=FVar, Body) ->
ifes_1(FVar, Body, true).
ifes_1(FVar, #c_alias{pat=Pat}, _Safe) ->
ifes_1(FVar, Pat, false);
ifes_1(FVar, #c_apply{op=Op,args=Args}, Safe) ->
%% FVar(...) is safe as long its return value is ignored, but it's never
%% okay to pass FVar as an argument.
ifes_list(FVar, Args, false) andalso ifes_1(FVar, Op, Safe);
ifes_1(FVar, #c_binary{segments=Segments}, _Safe) ->
ifes_list(FVar, Segments, false);
ifes_1(FVar, #c_bitstr{val=Val,size=Size,unit=Unit}, _Safe) ->
ifes_list(FVar, [Val, Size, Unit], false);
ifes_1(FVar, #c_call{args=Args}, _Safe) ->
ifes_list(FVar, Args, false);
ifes_1(FVar, #c_case{arg=Arg,clauses=Clauses}, Safe) ->
ifes_1(FVar, Arg, false) andalso ifes_list(FVar, Clauses, Safe);
ifes_1(FVar, #c_catch{body=Body}, _Safe) ->
ifes_1(FVar, Body, false);
ifes_1(FVar, #c_clause{pats=Pats,guard=Guard,body=Body}, Safe) ->
ifes_list(FVar, Pats, false) andalso
ifes_1(FVar, Guard, false) andalso
ifes_1(FVar, Body, Safe);
ifes_1(FVar, #c_cons{hd=Hd,tl=Tl}, _Safe) ->
ifes_1(FVar, Hd, false) andalso ifes_1(FVar, Tl, false);
ifes_1(FVar, #c_fun{body=Body}, _Safe) ->
ifes_1(FVar, Body, false);
ifes_1(FVar, #c_let{arg=Arg,body=Body}, Safe) ->
ifes_1(FVar, Arg, false) andalso ifes_1(FVar, Body, Safe);
ifes_1(FVar, #c_letrec{defs=Defs,body=Body}, Safe) ->
Funs = [Fun || {_,Fun} <- Defs],
ifes_list(FVar, Funs, false) andalso ifes_1(FVar, Body, Safe);
ifes_1(_FVar, #c_literal{}, _Safe) ->
true;
ifes_1(FVar, #c_map{arg=Arg,es=Elements}, _Safe) ->
ifes_1(FVar, Arg, false) andalso ifes_list(FVar, Elements, false);
ifes_1(FVar, #c_map_pair{key=Key,val=Val}, _Safe) ->
ifes_1(FVar, Key, false) andalso ifes_1(FVar, Val, false);
ifes_1(FVar, #c_primop{args=Args}, _Safe) ->
ifes_list(FVar, Args, false);
ifes_1(FVar, #c_seq{arg=Arg,body=Body}, Safe) ->
%% Arg of a #c_seq{} has no effect so it's okay to use FVar there even if
%% Safe=false.
ifes_1(FVar, Arg, true) andalso ifes_1(FVar, Body, Safe);
ifes_1(FVar, #c_try{arg=Arg,handler=Handler,body=Body}, Safe) ->
ifes_1(FVar, Arg, false) andalso
ifes_1(FVar, Handler, Safe) andalso
ifes_1(FVar, Body, Safe);
ifes_1(FVar, #c_tuple{es=Elements}, _Safe) ->
ifes_list(FVar, Elements, false);
ifes_1(FVar, #c_values{es=Elements}, _Safe) ->
ifes_list(FVar, Elements, false);
ifes_1(#c_var{name=Name}, #c_var{name=Name}, Safe) ->
%% It's safe to return FVar if it's unused.
Safe;
ifes_1(_FVar, #c_var{}, _Safe) ->
true.
ifes_list(FVar, [E|Es], Safe) ->
ifes_1(FVar, E, Safe) andalso ifes_list(FVar, Es, Safe);
ifes_list(_FVar, [], _Safe) ->
true.
expr_list(Es, Ctxt, Sub) ->
[expr(E, Ctxt, Sub) || E <- Es].
pair_list(Es, Ctxt, Sub) ->
[pair(E, Ctxt, Sub) || E <- Es].
pair(#c_map_pair{key=K,val=V}, effect, Sub) ->
make_effect_seq([K,V], Sub);
pair(#c_map_pair{key=K0,val=V0}=Pair, value=Ctxt, Sub) ->
K = expr(K0, Ctxt, Sub),
V = expr(V0, Ctxt, Sub),
Pair#c_map_pair{key=K,val=V}.
bitstr_list(Es, Sub) ->
[bitstr(E, Sub) || E <- Es].
bitstr(#c_bitstr{val=Val,size=Size}=BinSeg, Sub) ->
BinSeg#c_bitstr{val=expr(Val, Sub),size=expr(Size, value, Sub)}.
is_literal_fun(#c_literal{val=F}) -> is_function(F);
is_literal_fun(_) -> false.
%% is_safe_simple(Expr, Sub) -> true | false.
%% A safe simple cannot fail with badarg and is safe to use
%% in a guard.
%%
%% Currently, we don't attempt to check binaries because they
%% are difficult to check.
is_safe_simple(#c_var{}=Var) ->
not cerl:is_c_fname(Var);
is_safe_simple(#c_cons{hd=H,tl=T}) ->
is_safe_simple(H) andalso is_safe_simple(T);
is_safe_simple(#c_tuple{es=Es}) -> is_safe_simple_list(Es);
is_safe_simple(#c_literal{}) -> true;
is_safe_simple(#c_call{module=#c_literal{val=erlang},
name=#c_literal{val=Name},
args=Args}) when is_atom(Name) ->
NumArgs = length(Args),
case erl_internal:bool_op(Name, NumArgs) of
true ->
%% Boolean operators are safe if the arguments are boolean.
all(fun is_bool_expr/1, Args);
false ->
%% We need a rather complicated test to ensure that
%% we only allow safe calls that are allowed in a guard.
%% (Note that is_function/2 is a type test, but is not safe.)
erl_bifs:is_safe(erlang, Name, NumArgs) andalso
(erl_internal:comp_op(Name, NumArgs) orelse
erl_internal:new_type_test(Name, NumArgs))
end;
is_safe_simple(_) -> false.
is_safe_simple_list(Es) -> all(fun(E) -> is_safe_simple(E) end, Es).
%% will_fail(Expr) -> true|false.
%% Determine whether the expression will fail with an exception.
%% Return true if the expression always will fail with an exception,
%% i.e. never return normally.
will_fail(#c_let{arg=A,body=B}) ->
will_fail(A) orelse will_fail(B);
will_fail(#c_call{module=#c_literal{val=Mod},name=#c_literal{val=Name},args=Args}) ->
erl_bifs:is_exit_bif(Mod, Name, length(Args));
will_fail(#c_primop{name=#c_literal{val=match_fail},args=[_]}) -> true;
will_fail(_) -> false.
%% bin_un_utf(#c_binary{}) -> #c_binary{}
%% Convert any literal UTF-8/16/32 literals to byte-sized
%% integer fields.
bin_un_utf(#c_binary{anno=Anno,segments=Ss}=Bin) ->
Bin#c_binary{segments=bin_un_utf_1(Ss, Anno)}.
bin_un_utf_1([#c_bitstr{val=#c_literal{},type=#c_literal{val=utf8}}=H|T],
Anno) ->
bin_un_utf_eval(H, Anno) ++ bin_un_utf_1(T, Anno);
bin_un_utf_1([#c_bitstr{val=#c_literal{},type=#c_literal{val=utf16}}=H|T],
Anno) ->
bin_un_utf_eval(H, Anno) ++ bin_un_utf_1(T, Anno);
bin_un_utf_1([#c_bitstr{val=#c_literal{},type=#c_literal{val=utf32}}=H|T],
Anno) ->
bin_un_utf_eval(H, Anno) ++ bin_un_utf_1(T, Anno);
bin_un_utf_1([H|T], Anno) ->
[H|bin_un_utf_1(T, Anno)];
bin_un_utf_1([], _) -> [].
bin_un_utf_eval(Bitstr, Anno) ->
Segments = [Bitstr],
case eval_binary(#c_binary{anno=Anno,segments=Segments}) of
#c_literal{anno=Anno,val=Bytes} when is_binary(Bytes) ->
[#c_bitstr{anno=Anno,
val=#c_literal{anno=Anno,val=B},
size=#c_literal{anno=Anno,val=8},
unit=#c_literal{anno=Anno,val=1},
type=#c_literal{anno=Anno,val=integer},
flags=#c_literal{anno=Anno,val=[unsigned,big]}} ||
B <- binary_to_list(Bytes)];
_ ->
Segments
end.
%% eval_binary(#c_binary{}) -> #c_binary{} | #c_literal{}
%% Evaluate a binary at compile time if possible to create
%% a binary literal.
eval_binary(#c_binary{anno=Anno,segments=Ss}=Bin) ->
try
#c_literal{anno=Anno,val=eval_binary_1(Ss, <<>>)}
catch
throw:impossible ->
Bin;
throw:{badarg,Warning} ->
add_warning(Bin, {failed,Warning}),
Bin
end.
eval_binary_1([#c_bitstr{val=#c_literal{val=Val},size=#c_literal{val=Sz},
unit=#c_literal{val=Unit},type=#c_literal{val=Type},
flags=#c_literal{val=Flags}}|Ss], Acc0) ->
Endian = bs_endian(Flags),
%% Make sure that the size is reasonable.
case Type of
binary when is_bitstring(Val) ->
if
Sz =:= all ->
ok;
Sz*Unit =< bit_size(Val) ->
ok;
true ->
%% Field size is greater than the actual binary - will fail.
throw({badarg,embedded_binary_size})
end;
integer when is_integer(Val) ->
%% Estimate the number of bits needed to to hold the integer
%% literal. Check whether the field size is reasonable in
%% proportion to the number of bits needed.
if
Sz*Unit =< 256 ->
%% Don't be cheap - always accept fields up to this size.
ok;
true ->
case count_bits(Val) of
BitsNeeded when 2*BitsNeeded >= Sz*Unit ->
ok;
_ ->
%% More than about half of the field size will be
%% filled out with zeroes - not acceptable.
throw(impossible)
end
end;
float when is_float(Val) ->
%% Bad float size.
try Sz*Unit of
16 -> ok;
32 -> ok;
64 -> ok;
_ ->
throw({badarg,bad_float_size})
catch
error:_ ->
throw({badarg,bad_float_size})
end;
utf8 -> ok;
utf16 -> ok;
utf32 -> ok;
_ ->
throw(impossible)
end,
case Endian =:= native andalso Type =/= binary of
true -> throw(impossible);
false -> ok
end,
%% Evaluate the field.
try eval_binary_2(Acc0, Val, Sz, Unit, Type, Endian) of
Acc -> eval_binary_1(Ss, Acc)
catch
error:_ ->
throw(impossible)
end;
eval_binary_1([], Acc) -> Acc;
eval_binary_1(_, _) -> throw(impossible).
eval_binary_2(Acc, Val, Size, Unit, integer, little) ->
<<Acc/bitstring,Val:(Size*Unit)/little>>;
eval_binary_2(Acc, Val, Size, Unit, integer, big) ->
<<Acc/bitstring,Val:(Size*Unit)/big>>;
eval_binary_2(Acc, Val, _Size, _Unit, utf8, _) ->
try
<<Acc/bitstring,Val/utf8>>
catch
error:_ ->
throw({badarg,bad_unicode})
end;
eval_binary_2(Acc, Val, _Size, _Unit, utf16, big) ->
try
<<Acc/bitstring,Val/big-utf16>>
catch
error:_ ->
throw({badarg,bad_unicode})
end;
eval_binary_2(Acc, Val, _Size, _Unit, utf16, little) ->
try
<<Acc/bitstring,Val/little-utf16>>
catch
error:_ ->
throw({badarg,bad_unicode})
end;
eval_binary_2(Acc, Val, _Size, _Unit, utf32, big) ->
try
<<Acc/bitstring,Val/big-utf32>>
catch
error:_ ->
throw({badarg,bad_unicode})
end;
eval_binary_2(Acc, Val, _Size, _Unit, utf32, little) ->
try
<<Acc/bitstring,Val/little-utf32>>
catch
error:_ ->
throw({badarg,bad_unicode})
end;
eval_binary_2(Acc, Val, Size, Unit, float, little) ->
<<Acc/bitstring,Val:(Size*Unit)/little-float>>;
eval_binary_2(Acc, Val, Size, Unit, float, big) ->
<<Acc/bitstring,Val:(Size*Unit)/big-float>>;
eval_binary_2(Acc, Val, all, Unit, binary, _) ->
case bit_size(Val) of
Size when Size rem Unit =:= 0 ->
<<Acc/bitstring,Val:Size/bitstring>>;
Size ->
throw({badarg,{embedded_unit,Unit,Size}})
end;
eval_binary_2(Acc, Val, Size, Unit, binary, _) ->
<<Acc/bitstring,Val:(Size*Unit)/bitstring>>.
bs_endian([big=E|_]) -> E;
bs_endian([little=E|_]) -> E;
bs_endian([native=E|_]) -> E;
bs_endian([_|Fs]) -> bs_endian(Fs).
%% Count the number of bits approximately needed to store Int.
%% (We don't need an exact result for this purpose.)
count_bits(Int) ->
count_bits_1(abs(Int), 64).
count_bits_1(0, Bits) -> Bits;
count_bits_1(Int, Bits) -> count_bits_1(Int bsr 64, Bits+64).
%% useless_call(Context, #c_call{}) -> no | {yes,Expr}
%% Check whether the function is called only for effect,
%% and if the function either has no effect whatsoever or
%% the only effect is an exception. Generate appropriate
%% warnings. If the call is "useless" (has no effect),
%% a rewritten expression consisting of a sequence of
%% the arguments only is returned.
useless_call(effect, #c_call{module=#c_literal{val=Mod},
name=#c_literal{val=Name},
args=Args}=Call) ->
A = length(Args),
case erl_bifs:is_safe(Mod, Name, A) of
false ->
case erl_bifs:is_pure(Mod, Name, A) of
true ->
Classified = classify_call(Call),
add_warning(Call, {ignored,{result,Classified}});
false ->
ok
end,
no;
true ->
add_warning(Call, {ignored,{no_effect,{Mod,Name,A}}}),
{yes,make_effect_seq(Args, sub_new())}
end;
useless_call(_, _) -> no.
%% make_effect_seq([Expr], Sub) -> #c_seq{}|void()
%% Convert a list of expressions evaluated in effect context to a chain of
%% #c_seq{}. The body in the innermost #c_seq{} will be void().
%% Anything that will not have any effect will be thrown away.
make_effect_seq([H|T], Sub) ->
case is_safe_simple(H) of
true -> make_effect_seq(T, Sub);
false -> #c_seq{arg=H,body=make_effect_seq(T, Sub)}
end;
make_effect_seq([], _) -> void().
%% fold_apply(Apply, LiteraFun, Args) -> Apply.
%% Replace an apply of a literal external fun with a call.
fold_apply(Apply, #c_literal{val=Fun}, Args) when is_function(Fun) ->
{module,Mod} = erlang:fun_info(Fun, module),
{name,Name} = erlang:fun_info(Fun, name),
{arity,Arity} = erlang:fun_info(Fun, arity),
if
Arity =:= length(Args) ->
#c_call{anno=Apply#c_apply.anno,
module=#c_literal{val=Mod},
name=#c_literal{val=Name},
args=Args};
true ->
Apply
end;
fold_apply(Apply, _, _) -> Apply.
%% Handling remote calls. The module/name fields have been processed.
call(#c_call{args=As0}=Call0, #c_literal{val=M}=M0, #c_literal{val=N}=N0, Sub) ->
As1 = expr_list(As0, value, Sub),
case simplify_call(Call0, M, N, As1) of
#c_literal{}=Lit ->
Lit;
#c_call{args=As}=Call ->
case get(no_inline_list_funcs) of
true ->
fold_call(Call, M0, N0, As, Sub);
false ->
case sys_core_fold_lists:call(Call, M, N, As) of
none -> fold_call(Call, M0, N0, As, Sub);
Core -> expr(Core, Sub)
end
end
end;
call(#c_call{args=As0}=Call, M, N, Sub) ->
As = expr_list(As0, value, Sub),
fold_call(Call#c_call{args=As}, M, N, As, Sub).
%% Rewrite certain known functions to BIFs, improving performance
%% slightly at the cost of making tracing and stack traces incorrect.
simplify_call(Call, maps, get, [Key, Map]) ->
rewrite_call(Call, erlang, map_get, [Key, Map]);
simplify_call(Call, maps, is_key, [Key, Map]) ->
rewrite_call(Call, erlang, is_map_key, [Key, Map]);
simplify_call(_Call, maps, new, []) ->
#c_literal{val=#{}};
simplify_call(Call, maps, size, [Map]) ->
rewrite_call(Call, erlang, map_size, [Map]);
simplify_call(Call, _, _, Args) ->
Call#c_call{args=Args}.
%% rewrite_call(Call0, Mod, Func, Args, Sub) -> Call
%% Rewrites a call to the given MFA.
rewrite_call(Call, Mod, Func, Args) ->
ModLit = #c_literal{val=Mod},
FuncLit = #c_literal{val=Func},
Call#c_call{module=ModLit,name=FuncLit,args=Args}.
%% fold_call(Call, Mod, Name, Args, Sub) -> Expr.
%% Try to safely evaluate the call. Just try to evaluate arguments,
%% do the call and convert return values to literals. If this
%% succeeds then use the new value, otherwise just fail and use
%% original call. Do this at every level.
%%
%% We attempt to evaluate calls to certain BIFs even if the
%% arguments are not literals. For instance, we evaluate length/1
%% if the shape of the list is known, and element/2 and setelement/3
%% if the position is constant and the shape of the tuple is known.
%%
fold_call(Call, #c_literal{val=M}, #c_literal{val=F}, Args, Sub) ->
fold_call_1(Call, M, F, Args, Sub);
fold_call(Call, _M, _N, _Args, _Sub) -> Call.
fold_call_1(Call, erlang, apply, [Fun,Args], _) ->
simplify_fun_apply(Call, Fun, Args);
fold_call_1(Call, erlang, apply, [Mod,Func,Args], _) ->
simplify_apply(Call, Mod, Func, Args);
fold_call_1(Call, Mod, Name, Args, Sub) ->
NumArgs = length(Args),
case erl_bifs:is_pure(Mod, Name, NumArgs) of
false -> Call; %Not pure - keep call.
true -> fold_call_2(Call, Mod, Name, Args, Sub)
end.
fold_call_2(Call, Module, Name, Args, Sub) ->
case all(fun cerl:is_literal/1, Args) of
true ->
%% All arguments are literals.
fold_lit_args(Call, Module, Name, Args);
false ->
%% At least one non-literal argument.
fold_non_lit_args(Call, Module, Name, Args, Sub)
end.
fold_lit_args(Call, Module, Name, Args0) ->
Args = [cerl:concrete(A) || A <- Args0],
try apply(Module, Name, Args) of
Val ->
case cerl:is_literal_term(Val) of
true ->
cerl:ann_abstract(cerl:get_ann(Call), Val);
false ->
%% Successful evaluation, but it was not possible
%% to express the computed value as a literal.
Call
end
catch
error:Reason ->
%% Evaluation of the function failed. Warn but keep
%% the call to ensure that extended error information
%% will be available at runtime.
eval_failure(Call, Reason)
end.
%% fold_non_lit_args(Call, Module, Name, Args, Sub) -> Expr.
%% Attempt to evaluate some pure BIF calls with one or more
%% non-literals arguments.
%%
fold_non_lit_args(Call, erlang, length, [Arg], _) ->
eval_length(Call, Arg);
fold_non_lit_args(Call, erlang, '++', [Arg1,Arg2], _) ->
eval_append(Call, Arg1, Arg2);
fold_non_lit_args(Call, lists, append, [Arg1,Arg2], _) ->
eval_append(Call, Arg1, Arg2);
fold_non_lit_args(Call, _, _, _, _) -> Call.
%% eval_length(Call, List) -> Val.
%% Evaluates the length for the prefix of List which has a known
%% shape.
%%
eval_length(Call, Core) -> eval_length(Call, Core, 0).
eval_length(Call, #c_literal{val=Val}, Len0) ->
try
Len = Len0 + length(Val),
#c_literal{anno=Call#c_call.anno,val=Len}
catch
_:_ ->
eval_failure(Call, badarg)
end;
eval_length(Call, #c_cons{tl=T}, Len) ->
eval_length(Call, T, Len+1);
eval_length(Call, _List, 0) ->
Call; %Could do nothing
eval_length(Call, List, Len) ->
A = Call#c_call.anno,
#c_call{anno=A,
module=#c_literal{anno=A,val=erlang},
name=#c_literal{anno=A,val='+'},
args=[#c_literal{anno=A,val=Len},Call#c_call{args=[List]}]}.
%% eval_append(Call, FirstList, SecondList) -> Val.
%% Evaluates the constant part of '++' expression.
%%
eval_append(Call, #c_literal{val=Cs1}=S1, #c_literal{val=Cs2}) ->
try
S1#c_literal{val=Cs1 ++ Cs2}
catch error:badarg ->
eval_failure(Call, badarg)
end;
eval_append(Call, #c_literal{val=Cs}, List) when length(Cs) =< 4 ->
Anno = Call#c_call.anno,
foldr(fun (C, L) ->
ann_c_cons(Anno, #c_literal{val=C}, L)
end, List, Cs);
eval_append(Call, #c_cons{anno=Anno,hd=H,tl=T}, List) ->
ann_c_cons(Anno, H, eval_append(Call, T, List));
eval_append(Call, X, Y) ->
Call#c_call{args=[X,Y]}. %Rebuild call arguments.
%% eval_failure(Call, Reason) -> Core.
%% Warn for a call that will fail but keep the call.
%%
eval_failure(Call, Reason) ->
Classified = classify_call(Call),
add_warning(Call, {failed,{eval_failure,Classified,Reason}}),
Call.
%% simplify_apply(Call0, Mod, Func, Args) -> Call
%% Simplify an apply/3 to a call if the number of arguments
%% are known at compile time.
simplify_apply(Call, Mod, Func, Args0) ->
case is_atom_or_var(Mod) andalso is_atom_or_var(Func) of
true ->
case get_fixed_args(Args0, []) of
error ->
Call;
{ok,Args} ->
Call#c_call{module=Mod,name=Func,args=Args}
end;
false ->
Call
end.
is_atom_or_var(#c_literal{val=Atom}) when is_atom(Atom) -> true;
is_atom_or_var(#c_var{}) -> true;
is_atom_or_var(_) -> false.
simplify_fun_apply(#c_call{anno=Anno}=Call, Fun, Args0) ->
case get_fixed_args(Args0, []) of
error ->
Call;
{ok,Args} ->
#c_apply{anno=Anno,op=Fun,args=Args}
end.
get_fixed_args(#c_literal{val=MoreArgs0}, Args)
when length(MoreArgs0) >= 0 ->
MoreArgs = [#c_literal{val=Arg} || Arg <- MoreArgs0],
{ok,reverse(Args, MoreArgs)};
get_fixed_args(#c_cons{hd=Arg,tl=T}, Args) ->
get_fixed_args(T, [Arg|Args]);
get_fixed_args(_, _) -> error.
%% clause(Clause, Cepxr, Context, Sub) -> Clause.
clause(#c_clause{pats=Ps0}=Cl, Cexpr, Ctxt, Sub0) ->
try pattern_list(Ps0, Sub0) of
{Ps1,Sub1} ->
clause_1(Cl, Ps1, Cexpr, Ctxt, Sub1)
catch
nomatch ->
Cl#c_clause{anno=[compiler_generated],
guard=#c_literal{val=false}}
end.
clause_1(#c_clause{guard=G0,body=B0}=Cl, Ps1, Cexpr, Ctxt, Sub1) ->
GSub = case {Cexpr,Ps1,G0} of
{_,_,#c_literal{}} ->
%% No need for substitution tricks when the guard
%% does not contain any variables.
Sub1;
{#c_var{},[#c_var{}=Var],_} ->
%% The idea here is to optimize expressions such as
%%
%% case A of A -> ...
%%
%% to get rid of the extra guard test that the compiler
%% added when converting to the Core Erlang representation:
%%
%% case A of NewVar when A =:= NewVar -> ...
%%
%% By replacing NewVar with A everywhere in the guard
%% expression, we get
%%
%% case A of NewVar when A =:= A -> ...
%%
%% which by constant-expression evaluation is reduced to
%%
%% case A of NewVar when true -> ...
%%
case cerl:is_c_fname(Cexpr) of
false ->
sub_set_var(Var, Cexpr, Sub1);
true ->
%% We must not copy funs, and especially not into guards.
Sub1
end;
_ ->
Sub1
end,
G1 = guard(G0, GSub),
B1 = body(B0, Ctxt, Sub1),
Cl#c_clause{pats=Ps1,guard=G1,body=B1}.
%% let_substs(LetVars, LetArg, Sub) -> {[Var],[Val],Sub}.
%% Add suitable substitutions to Sub of variables in LetVars. First
%% remove variables in LetVars from Sub, then fix subs. N.B. must
%% work out new subs in parallel and then apply them to subs. Return
%% the unsubstituted variables and values.
let_substs(Vs0, As0, Sub0) ->
{Vs1,Sub1} = var_list(Vs0, Sub0),
{Vs2,As1,Ss} = let_substs_1(Vs1, As0, Sub1),
Sub2 = sub_add_scope([V || #c_var{name=V} <- Vs2], Sub1),
{Vs2,As1,
foldl(fun ({V,S}, Sub) -> sub_set_name(V, S, Sub) end, Sub2, Ss)}.
let_substs_1(Vs, #c_values{es=As}, Sub) ->
let_subst_list(Vs, As, Sub);
let_substs_1([V], A, Sub) -> let_subst_list([V], [A], Sub);
let_substs_1(Vs, A, _) -> {Vs,A,[]}.
let_subst_list([V|Vs0], [A|As0], Sub) ->
{Vs1,As1,Ss} = let_subst_list(Vs0, As0, Sub),
case is_subst(A) of
true ->
{Vs1,As1,sub_subst_var(V, A, Sub) ++ Ss};
false ->
{[V|Vs1],[A|As1],Ss}
end;
let_subst_list([], [], _) -> {[],[],[]}.
%% pattern(Pattern, InSub) -> {Pattern,OutSub}.
%% pattern(Pattern, InSub, OutSub) -> {Pattern,OutSub}.
%% Variables occurring in Pattern will shadow so they must be removed
%% from Sub. If they occur as a value in Sub then we create a new
%% variable and then add a substitution for that.
%%
%% Patterns are complicated by sizes in binaries. These are pure
%% input variables which create no bindings. We, therefore, need to
%% carry around the original substitutions to get the correct
%% handling.
%%pattern(Pat, Sub) -> pattern(Pat, Sub, Sub).
pattern(#c_var{}=Pat, Isub, Osub) ->
case sub_is_in_scope(Pat, Isub) of
true ->
%% This variable either has a substitution or is used in
%% the variable list of an enclosing `let`. In either
%% case, it must be renamed to an unused name to avoid
%% name capture problems.
V1 = make_var_name(),
Pat1 = #c_var{name=V1},
{Pat1,sub_set_var(Pat, Pat1, sub_add_scope([V1], Osub))};
false ->
%% This variable has never been used. Add it to the scope.
{Pat,sub_add_scope([Pat#c_var.name], Osub)}
end;
pattern(#c_literal{}=Pat, _, Osub) -> {Pat,Osub};
pattern(#c_cons{anno=Anno,hd=H0,tl=T0}, Isub, Osub0) ->
{H1,Osub1} = pattern(H0, Isub, Osub0),
{T1,Osub2} = pattern(T0, Isub, Osub1),
{ann_c_cons(Anno, H1, T1),Osub2};
pattern(#c_tuple{anno=Anno,es=Es0}, Isub, Osub0) ->
{Es1,Osub1} = pattern_list(Es0, Isub, Osub0),
{ann_c_tuple(Anno, Es1),Osub1};
pattern(#c_map{anno=Anno,es=Es0}=Map, Isub, Osub0) ->
{Es1,Osub1} = map_pair_pattern_list(Es0, Isub, Osub0),
{Map#c_map{anno=Anno,es=Es1},Osub1};
pattern(#c_binary{segments=V0}=Pat, Isub, Osub0) ->
{V1,Osub1} = bin_pattern_list(V0, Isub, Osub0),
{Pat#c_binary{segments=V1},Osub1};
pattern(#c_alias{var=V0,pat=P0}=Pat, Isub, Osub0) ->
{V1,Osub1} = pattern(V0, Isub, Osub0),
{P1,Osub} = pattern(P0, Isub, Osub1),
{Pat#c_alias{var=V1,pat=P1},Osub}.
map_pair_pattern_list(Ps0, Isub, Osub0) ->
{Ps,{_,Osub}} = mapfoldl(fun map_pair_pattern/2, {Isub,Osub0}, Ps0),
{Ps,Osub}.
map_pair_pattern(#c_map_pair{op=#c_literal{val=exact},key=K0,val=V0}=Pair,{Isub,Osub0}) ->
K = expr(K0, Isub),
{V,Osub} = pattern(V0,Isub,Osub0),
{Pair#c_map_pair{key=K,val=V},{Isub,Osub}}.
bin_pattern_list(Ps, Isub, Osub0) ->
mapfoldl(fun(P, Osub) ->
bin_pattern(P, Isub, Osub)
end, Osub0, Ps).
bin_pattern(#c_bitstr{val=E0,size=Size0}=Pat0, Isub, Osub0) ->
Size2 = case {Size0,expr(Size0, Isub)} of
{#c_var{},#c_literal{val=all}} ->
%% The size `all` is used for the size of the final binary
%% segment in a pattern. Using `all` explicitly is not allowed,
%% so we convert it to an obvious invalid size. We also need
%% to add an annotation to get the correct wording of the warning
%% that will soon be issued.
#c_literal{anno=[size_was_all],val=bad_size};
{_,Size1} ->
Size1
end,
{E1,Osub} = pattern(E0, Isub, Osub0),
Pat = Pat0#c_bitstr{val=E1,size=Size2},
bin_pat_warn(Pat),
{Pat,Osub}.
pattern_list(Ps, Sub) -> pattern_list(Ps, Sub, Sub).
pattern_list(Ps0, Isub, Osub0) ->
mapfoldl(fun (P, Osub) -> pattern(P, Isub, Osub) end, Osub0, Ps0).
%% var_list([Var], InSub) -> {Pattern,OutSub}.
%% Works like pattern_list/2 but only accept variables and is
%% guaranteed not to throw an exception.
var_list(Vs, Sub0) ->
mapfoldl(fun (#c_var{}=V, Sub) ->
pattern(V, Sub, Sub)
end, Sub0, Vs).
%%%
%%% Generate warnings for binary patterns that will not match.
%%%
bin_pat_warn(#c_bitstr{type=#c_literal{val=Type},
val=Val0,
size=#c_literal{anno=SizeAnno,val=Sz},
unit=#c_literal{val=Unit},
flags=Fl}=Pat) ->
case {Type,Sz} of
{_,_} when is_integer(Sz), Sz >= 0 -> ok;
{binary,all} -> ok;
{utf8,undefined} -> ok;
{utf16,undefined} -> ok;
{utf32,undefined} -> ok;
{_,_} ->
case member(size_was_all, SizeAnno) of
true ->
add_warning(Pat, {nomatch,{bit_syntax_size,all}});
false ->
add_warning(Pat, {nomatch,{bit_syntax_size,Sz}})
end,
throw(nomatch)
end,
case {Type,Val0} of
{integer,#c_literal{val=Val}} when is_integer(Val) ->
Signedness = signedness(Fl),
TotalSz = Sz * Unit,
bit_pat_warn_int(Val, TotalSz, Signedness, Pat);
{float,#c_literal{val=Val}} when is_float(Val) ->
ok;
{utf8,#c_literal{val=Val}} when is_integer(Val) ->
bit_pat_warn_unicode(Val, Pat);
{utf16,#c_literal{val=Val}} when is_integer(Val) ->
bit_pat_warn_unicode(Val, Pat);
{utf32,#c_literal{val=Val}} when is_integer(Val) ->
bit_pat_warn_unicode(Val, Pat);
{_,#c_literal{val=Val}} ->
add_warning(Pat, {nomatch,{bit_syntax_type,Val,Type}}),
throw(nomatch);
{_,_} ->
ok
end;
bin_pat_warn(#c_bitstr{type=#c_literal{val=Type},val=Val0,flags=Fl}=Pat) ->
%% Size is variable. Not much that we can check.
case {Type,Val0} of
{integer,#c_literal{val=Val}} when is_integer(Val) ->
case signedness(Fl) of
unsigned when Val < 0 ->
add_warning(Pat, {nomatch,{bit_syntax_unsigned,Val}}),
throw(nomatch);
_ ->
ok
end;
{float,#c_literal{val=Val}} when is_float(Val) ->
ok;
{_,#c_literal{val=Val}} ->
add_warning(Pat, {nomatch,{bit_syntax_type,Val,Type}}),
throw(nomatch);
{_,_} ->
ok
end.
bit_pat_warn_int(Val, 0, signed, Pat) ->
if
Val =:= 0 ->
ok;
true ->
add_warning(Pat, {nomatch,{bit_syntax_truncated,signed,Val,0}}),
throw(nomatch)
end;
bit_pat_warn_int(Val, Sz, signed, Pat) ->
if
Val < 0, Val bsr (Sz - 1) =/= -1 ->
add_warning(Pat, {nomatch,{bit_syntax_truncated,signed,Val,Sz}}),
throw(nomatch);
Val > 0, Val bsr (Sz - 1) =/= 0 ->
add_warning(Pat, {nomatch,{bit_syntax_truncated,signed,Val,Sz}}),
throw(nomatch);
true ->
ok
end;
bit_pat_warn_int(Val, _Sz, unsigned, Pat) when Val < 0 ->
add_warning(Pat, {nomatch,{bit_syntax_unsigned,Val}}),
throw(nomatch);
bit_pat_warn_int(Val, Sz, unsigned, Pat) ->
if
Val bsr Sz =:= 0 ->
ok;
true ->
add_warning(Pat, {nomatch,{bit_syntax_truncated,unsigned,Val,Sz}}),
throw(nomatch)
end.
bit_pat_warn_unicode(U, _Pat) when 0 =< U, U =< 16#10FFFF ->
ok;
bit_pat_warn_unicode(U, Pat) ->
add_warning(Pat, {nomatch,{bit_syntax_unicode,U}}),
throw(nomatch).
signedness(#c_literal{val=Flags}) ->
[S] = [F || F <- Flags, F =:= signed orelse F =:= unsigned],
S.
%% is_subst(Expr) -> true | false.
%% Test whether an expression is a suitable substitution.
is_subst(#c_var{name={_,_}}) ->
%% Funs must not be duplicated (which will happen if the variable
%% is used more than once), because the funs will not be equal
%% (their "index" fields will be different).
false;
is_subst(#c_var{}) -> true;
is_subst(#c_literal{}) -> true;
is_subst(_) -> false.
%% sub_new() -> #sub{}.
%% sub_get_var(Var, #sub{}) -> Value.
%% sub_set_var(Var, Value, #sub{}) -> #sub{}.
%% sub_set_name(Name, Value, #sub{}) -> #sub{}.
%% sub_del_var(Var, #sub{}) -> #sub{}.
%% sub_subst_var(Var, Value, #sub{}) -> [{Name,Value}].
%% sub_is_in_scope(Var, #sub{}) -> boolean().
%% sub_add_scope([Var], #sub{}) -> #sub{}
%% sub_subst_scope(#sub{}) -> #sub{}
%%
%% We use the variable name as key so as not have problems with
%% annotations. When adding a new substitute we fold substitute
%% chains so we never have to search more than once. Use orddict so
%% we know the format.
%%
%% In addition to the list of substitutions, we also keep track of
%% all variable currently live (the scope).
%%
%% sub_add_scope/2 adds variables to the scope. sub_subst_scope/1
%% adds dummy substitutions for all variables in the scope in order
%% to force renaming if variables in the scope occurs as pattern
%% variables.
sub_new() -> #sub{v=orddict:new(),s=sets:new([{version, 2}]),t=#{}}.
sub_new(#sub{}=Sub) ->
Sub#sub{v=orddict:new(),t=#{}}.
sub_get_var(#c_var{name=V}=Var, #sub{v=S}) ->
case orddict:find(V, S) of
{ok,Val} ->
propagate_compiler_generated(Var, Val);
error ->
Var
end.
sub_set_var(#c_var{name=V}, Val, Sub) ->
sub_set_name(V, Val, Sub).
sub_set_name(V, Val, #sub{v=S,s=Scope,t=Tdb0}=Sub) ->
Tdb1 = kill_types(V, Tdb0),
Tdb = copy_type(V, Val, Tdb1),
Sub#sub{v=orddict:store(V, Val, S),s=sets:add_element(V, Scope),t=Tdb}.
sub_subst_var(#c_var{name=V}=Var, Val0, #sub{v=S0}) ->
Val = propagate_compiler_generated(Var, Val0),
%% Fold chained substitutions.
[{V,Val}] ++ [{K,Val} || {K,#c_var{name=V1}} <- S0, V1 =:= V].
sub_add_scope(Vs, #sub{s=Scope0}=Sub) ->
Scope = foldl(fun(V, S) when is_integer(V); is_atom(V) ->
sets:add_element(V, S)
end, Scope0, Vs),
Sub#sub{s=Scope}.
sub_subst_scope(#sub{v=S0,s=Scope}=Sub) ->
Initial = case S0 of
[{NegInt,_}|_] when is_integer(NegInt), NegInt < 0 ->
NegInt - 1;
_ ->
-1
end,
S = sub_subst_scope_1(sets:to_list(Scope), Initial, S0),
Sub#sub{v=orddict:from_list(S)}.
%% The keys in an orddict must be unique. Make them so!
sub_subst_scope_1([H|T], Key, Acc) ->
sub_subst_scope_1(T, Key-1, [{Key,#c_var{name=H}}|Acc]);
sub_subst_scope_1([], _, Acc) -> Acc.
sub_is_in_scope(#c_var{name=V}, #sub{s=Scope}) ->
sets:is_element(V, Scope).
%% Propagate the 'compiler_generated' annotation (if any)
%% from From to To.
propagate_compiler_generated(From, To) ->
case is_compiler_generated(From) andalso
not is_compiler_generated(To) of
true ->
Ann = [compiler_generated|cerl:get_ann(To)],
cerl:set_ann(To, Ann);
false ->
To
end.
%% warn_no_clause_match(CaseOrig, CaseOpt) -> ok
%% Generate a warning if none of the user-specified clauses
%% will match.
warn_no_clause_match(CaseOrig, CaseOpt) ->
OrigCs = cerl:case_clauses(CaseOrig),
OptCs = cerl:case_clauses(CaseOpt),
case any(fun(C) -> not is_compiler_generated(C) end, OrigCs) andalso
all(fun is_compiler_generated/1, OptCs) of
true ->
%% The original list of clauses did contain at least one
%% user-specified clause, but none of them will match.
%% That is probably a mistake.
add_warning(CaseOrig, {nomatch,no_clause});
false ->
%% Either there were user-specified clauses left in
%% the transformed clauses, or else none of the original
%% clauses were user-specified to begin with (as in 'andalso').
ok
end.
%% clauses(E, [Clause], TopLevel, Context, Sub, Anno) -> [Clause].
%% Trim the clauses by removing all clauses AFTER the first one which
%% is guaranteed to match. Also remove all trivially false clauses.
clauses(E, [C0|Cs], Ctxt, Sub, LitExpr, Anno) ->
#c_clause{pats=Ps,guard=G} = C1 = clause(C0, E, Ctxt, Sub),
%%ok = io:fwrite("~w: ~p~n", [?LINE,{E,Ps}]),
case {will_match(E, Ps),will_succeed(G)} of
{yes,yes} ->
case LitExpr of
false ->
Line = get_line(cerl:get_ann(C1)),
shadow_warning(Cs, Line, Anno);
true ->
%% If the case expression is a literal,
%% it is probably OK that some clauses don't match.
%% It is a probably some sort of debug macro.
ok
end,
[C1]; %Skip the rest
{_Mat,no} -> %Guard fails.
add_warning(C1, {nomatch,guard}),
clauses(E, Cs, Ctxt, Sub, LitExpr, Anno); %Skip this clause
{_Mat,_Suc} ->
[C1|clauses(E, Cs, Ctxt, Sub, LitExpr, Anno)]
end;
clauses(_, [], _, _, _, _) -> [].
shadow_warning([C|Cs], none, Anno) ->
add_warning(C, {nomatch,shadow}),
shadow_warning(Cs, none, Anno);
shadow_warning([C|Cs], Line, Anno) ->
case keyfind(function, 1, Anno) of
{function, {Name, Arity}} ->
add_warning(C, {nomatch,{shadow,Line,{Name,Arity}}});
_ ->
add_warning(C, {nomatch,{shadow,Line}})
end,
shadow_warning(Cs, Line, Anno);
shadow_warning([], _, _) -> ok.
%% will_succeed(Guard) -> yes | maybe | no.
%% Test if we know whether a guard will succeed/fail or just don't
%% know. Be VERY conservative!
will_succeed(#c_literal{val=true}) -> yes;
will_succeed(#c_literal{val=false}) -> no;
will_succeed(_Guard) -> maybe.
%% will_match(Expr, [Pattern]) -> yes | maybe.
%% We KNOW that this function is only used after optimizations
%% in case_opt/4. Therefore clauses that can definitely not match
%% have already been pruned.
will_match(#c_values{es=Es}, Ps) ->
will_match_1(cerl_clauses:match_list(Ps, Es));
will_match(E, [P]) ->
will_match_1(cerl_clauses:match(P, E)).
will_match_1({false,_}) -> maybe;
will_match_1({true,_}) -> yes.
%% opt_bool_case(CoreExpr, Sub) - CoreExpr'.
%%
%% In bodies, do various optimizations to case statements that have
%% boolean case expressions. We don't do the optimizations in guards,
%% because they would thwart the optimization in beam_ssa_bool.
%%
%% We start with some simple optimizations and normalizations
%% to facilitate later optimizations.
%%
%% If the case expression can only return a boolean we can remove any
%% clause that cannot possibly match 'true' or 'false'. Also, any
%% clause following both 'true' and 'false' clause can be removed. If
%% successful, we will end up like this:
%%
%% case BoolExpr of case BoolExpr of
%% true -> false ->
%% ...; ...;
%% false -> OR true ->
%% ... ...
%% end. end.
%%
%% We give up if there are clauses with guards, or if there
%% is a variable clause that matches anything.
opt_bool_case(#c_case{}=Case, #sub{in_guard=true}) ->
%% v3_kernel does a better job without "help".
Case;
opt_bool_case(#c_case{arg=Arg}=Case0, #sub{in_guard=false}) ->
case is_bool_expr(Arg) of
false ->
Case0;
true ->
try opt_bool_clauses(Case0) of
Case ->
opt_bool_not(Case)
catch
impossible ->
Case0
end
end.
opt_bool_clauses(#c_case{clauses=Cs}=Case) ->
Case#c_case{clauses=opt_bool_clauses(Cs, false, false)}.
opt_bool_clauses(Cs, true, true) ->
%% We have now seen clauses that match both true and false.
%% Any remaining clauses cannot possibly match.
case Cs of
[_|_] ->
shadow_warning(Cs, none, []),
[];
[] ->
[]
end;
opt_bool_clauses([#c_clause{pats=[#c_literal{val=Lit}],
guard=#c_literal{val=true}}=C|Cs], SeenT, SeenF) ->
case is_boolean(Lit) of
false ->
%% Not a boolean - this clause can't match.
add_warning(C, {nomatch,clause_type}),
opt_bool_clauses(Cs, SeenT, SeenF);
true ->
%% This clause will match.
case {Lit,SeenT,SeenF} of
{false,_,false} ->
[C|opt_bool_clauses(Cs, SeenT, true)];
{true,false,_} ->
[C|opt_bool_clauses(Cs, true, SeenF)];
_ ->
add_warning(C, {nomatch,shadow}),
opt_bool_clauses(Cs, SeenT, SeenF)
end
end;
opt_bool_clauses([#c_clause{pats=Ps,guard=#c_literal{val=true}}=C|Cs], SeenT, SeenF) ->
case Ps of
[#c_var{}] ->
%% Will match a boolean.
throw(impossible);
[#c_alias{}] ->
%% Might match a boolean.
throw(impossible);
_ ->
%% The clause cannot possible match a boolean.
%% We can remove it.
add_warning(C, {nomatch,clause_type}),
opt_bool_clauses(Cs, SeenT, SeenF)
end;
opt_bool_clauses([_|_], _, _) ->
%% A clause with a guard. Give up.
throw(impossible).
%% We intentionally do not have a clause that match an empty
%% list. An empty list would indicate that the clauses do not
%% match all possible values for the case expression, which
%% means that the Core Erlang program is illegal. We prefer to
%% crash on such illegal input, rather than producing code that will
%% fail mysteriously at run time.
%% opt_bool_not(Case) -> CoreExpr.
%% Try to eliminate one or more calls to 'not' at the top level
%% of the case expression.
%%
%% We KNOW that the case expression is guaranteed to return
%% a boolean and that there are exactly two clauses: one that
%% matches 'true' and one that matches 'false'.
%%
%% case not Expr of case Expr of
%% true -> false ->
%% ...; ...;
%% false -> ==> true ->
%% ... ...;
%% end. NewVar ->
%% erlang:error(badarg)
%% end.
opt_bool_not(#c_case{arg=Arg,clauses=Cs0}=Case0) ->
case Arg of
#c_call{anno=Anno,module=#c_literal{val=erlang},
name=#c_literal{val='not'},
args=[Expr]} ->
Cs = [opt_bool_not_invert(C) || C <- Cs0] ++
[#c_clause{anno=[compiler_generated],
pats=[#c_var{name=cor_variable}],
guard=#c_literal{val=true},
body=#c_call{anno=Anno,
module=#c_literal{val=erlang},
name=#c_literal{val=error},
args=[#c_literal{val=badarg}]}}],
Case = Case0#c_case{arg=Expr,clauses=Cs},
opt_bool_not(Case);
_ ->
Case0
end.
opt_bool_not_invert(#c_clause{pats=[#c_literal{val=Bool}]}=C) ->
C#c_clause{pats=[#c_literal{val=not Bool}]}.
%% eval_case(Case) -> #c_case{} | #c_let{}.
%% If possible, evaluate a case at compile time. We know that the
%% last clause is guaranteed to match so if there is only one clause
%% with a pattern containing only variables then rewrite to a let.
eval_case(#c_case{arg=E,clauses=[#c_clause{pats=Ps0,
guard=#c_literal{val=true},
body=B}]}=Case, Sub) ->
Es = case cerl:is_c_values(E) of
true -> cerl:values_es(E);
false -> [E]
end,
%% Consider:
%%
%% case SomeSideEffect() of
%% X=Y -> ...
%% end
%%
%% We must not rewrite it to:
%%
%% let <X,Y> = <SomeSideEffect(),SomeSideEffect()> in ...
%%
%% because SomeSideEffect() would be evaluated twice.
%%
%% Instead we must evaluate the case expression in an outer let
%% like this:
%%
%% let NewVar = SomeSideEffect() in
%% let <X,Y> = <NewVar,NewVar> in ...
%%
Vs = make_vars([], length(Es)),
case cerl_clauses:match_list(Ps0, Vs) of
{false,_} ->
%% This can only happen if the Core Erlang code is
%% handwritten or generated by another code generator
%% than v3_core. Assuming that the Core Erlang program
%% is correct, the clause will always match at run-time.
Case;
{true,Bs} ->
eval_case_warn(B),
{Ps,As} = unzip(Bs),
InnerLet = cerl:c_let(Ps, core_lib:make_values(As), B),
Let = cerl:c_let(Vs, E, InnerLet),
expr(Let, sub_new(Sub))
end;
eval_case(Case, _) -> Case.
eval_case_warn(#c_primop{anno=Anno,
name=#c_literal{val=match_fail},
args=[_]}=Core) ->
case keyfind(eval_failure, 1, Anno) of
false ->
ok;
{eval_failure,badmap} ->
%% Example: M = not_map, M#{k:=v}
add_warning(Core, {failed,bad_map_update})
end;
eval_case_warn(_) -> ok.
%% case_opt(CaseArg, [Clause]) -> {CaseArg,[Clause]}.
%% Try and optimise a case by avoid building tuples or lists
%% in the case expression. Instead combine the variable parts
%% of the case expression to multiple "values". If a clause
%% refers to the constructed term in the case expression (which
%% was not built), introduce a let into the guard and/or body to
%% build the term.
%%
%% case {ok,[Expr1,Expr2]} of case <Expr1,Expr2> of
%% {ok,[P1,P2]} -> ... <P1,P2> -> ...
%% . ==> .
%% . .
%% . .
%% Var -> <Var1,Var2> ->
%% ... Var ... let <Var> = {ok,[Var1,Var2]}
%% in ... Var ...
%% . .
%% . .
%% . .
%% end. end.
%%
case_opt(Arg, Cs0, Sub) ->
Cs1 = [{cerl:clause_pats(C),C,[],[]} || C <- Cs0],
Args0 = case cerl:is_c_values(Arg) of
false -> [Arg];
true -> cerl:values_es(Arg)
end,
LitExpr = cerl:is_literal(Arg),
{Args,Cs2} = case_opt_args(Args0, Cs1, Sub, LitExpr, []),
Cs = [cerl:update_c_clause(C,
reverse(Ps),
letify(Bs, cerl:clause_guard(C)),
letify(Bs, cerl:clause_body(C))) ||
{[],C,Ps,Bs} <- Cs2],
{core_lib:make_values(Args),Cs}.
case_opt_args([A0|As0], Cs0, Sub, LitExpr, Acc) ->
case case_opt_arg(A0, Sub, Cs0, LitExpr) of
{error,Cs1} ->
%% Nothing to be done. Move on to the next argument.
Cs = [{Ps,C,[P|PsAcc],Bs} || {[P|Ps],C,PsAcc,Bs} <- Cs1],
case_opt_args(As0, Cs, Sub, LitExpr, [A0|Acc]);
{ok,As1,Cs} ->
%% The argument was either expanded (from tuple/list) or
%% removed (literal).
case_opt_args(As1++As0, Cs, Sub, LitExpr, Acc)
end;
case_opt_args([], Cs, _Sub, _LitExpr, Acc) ->
{reverse(Acc),Cs}.
%% case_opt_arg(Expr, Sub, Clauses0, LitExpr) ->
%% {ok,Args,Clauses} | error
%% Try to expand one argument to several arguments (if tuple/list)
%% or to remove a literal argument.
%%
case_opt_arg(E0, Sub, Cs, LitExpr) ->
case cerl:is_c_var(E0) of
false ->
case_opt_arg_1(E0, Cs, LitExpr);
true ->
case case_will_var_match(Cs) of
true ->
%% All clauses will match a variable in the
%% current position. Don't expand this variable
%% (that can only make the code worse).
{error,Cs};
false ->
%% If possible, expand this variable to a previously
%% constructed tuple
E = case_expand_var(E0, Sub),
case_opt_arg_1(E, Cs, LitExpr)
end
end.
case_opt_arg_1(E0, Cs0, LitExpr) ->
case cerl:is_data(E0) of
false ->
{error,Cs0};
true ->
E = case_opt_compiler_generated(E0),
Cs = case_opt_nomatch(E, Cs0, LitExpr),
case cerl:is_literal(E) of
true ->
case_opt_lit(E, Cs);
false ->
case_opt_data(E, Cs)
end
end.
%% case_will_var_match([Clause]) -> true | false.
%% Return if all clauses will match a variable in the
%% current position.
%%
case_will_var_match(Cs) ->
all(fun({[P|_],_,_,_}) ->
case cerl_clauses:match(P, any) of
{true,_} -> true;
_ -> false
end
end, Cs).
%% case_opt_compiler_generated(Core) -> Core'
%% Mark Core expressions as compiler generated to ensure that
%% no warnings are generated if they turn out to be unused.
%% To pretty-printed Core Erlang easier to read, don't mark
%% constructs that can't cause warnings to be emitted.
%%
case_opt_compiler_generated(Core) ->
F = fun(C) ->
case cerl:type(C) of
alias -> C;
var -> C;
_ -> cerl:set_ann(C, [compiler_generated])
end
end,
cerl_trees:map(F, Core).
%% case_expand_var(Expr0, Sub) -> Expr
%% If Expr0 is a variable that is known to be bound to a
%% constructed tuple, return the tuple instead. Otherwise
%% return Expr0 unchanged.
case_expand_var(E, #sub{t=Tdb}) ->
Key = cerl:var_name(E),
case Tdb of
#{Key:=T} -> T;
_ -> E
end.
%% case_opt_nomatch(E, Clauses, LitExpr) -> Clauses'
%% Remove all clauses that cannot possibly match.
case_opt_nomatch(E, [{[P|_],C,_,_}=Current|Cs], LitExpr) ->
case cerl_clauses:match(P, E) of
none ->
%% The pattern will not match the case expression. Remove
%% the clause. Unless the entire case expression is a
%% literal, also emit a warning.
case LitExpr of
false -> add_warning(C, {nomatch,clause_type});
true -> ok
end,
case_opt_nomatch(E, Cs, LitExpr);
_ ->
[Current|case_opt_nomatch(E, Cs, LitExpr)]
end;
case_opt_nomatch(_, [], _) -> [].
%% case_opt_lit(Literal, Clauses0) -> {ok,[],Clauses} | error
%% The current part of the case expression is a literal. That
%% means that we will know at compile-time whether a clause
%% will match, and we can remove the corresponding pattern from
%% each clause.
%%
%% The only complication is if the literal is a binary or map.
%% In general, it is difficult to know whether a binary or
%% map pattern will match, so we give up in that case.
case_opt_lit(Lit, Cs0) ->
try case_opt_lit_1(Lit, Cs0) of
Cs ->
{ok,[],Cs}
catch
throw:impossible ->
{error,Cs0}
end.
case_opt_lit_1(E, [{[P|Ps],C,PsAcc,Bs0}|Cs]) ->
%% Non-matching clauses have already been removed
%% in case_opt_nomatch/3.
case cerl_clauses:match(P, E) of
{true,Bs} ->
%% The pattern matches the literal. Remove the pattern
%% and update the bindings.
[{Ps,C,PsAcc,Bs++Bs0}|case_opt_lit_1(E, Cs)];
{false,_} ->
%% Binary literal and pattern. We are not sure whether
%% the pattern will match.
throw(impossible)
end;
case_opt_lit_1(_, []) -> [].
%% case_opt_data(Expr, Clauses0, LitExpr) -> {ok,Exprs,Clauses}
%% The case expression is a non-atomic data constructor (cons
%% or tuple). We can know at compile time whether each clause
%% will match, and we can delay the building of the data to
%% the clauses where it is actually needed.
case_opt_data(E, Cs0) ->
TypeSig = {cerl:data_type(E),cerl:data_arity(E)},
try case_opt_data_1(Cs0, TypeSig) of
Cs ->
Es = cerl:data_es(E),
{ok,Es,Cs}
catch
throw:impossible ->
%% The pattern contained a binary or map.
{error,Cs0}
end.
case_opt_data_1([{[P0|Ps0],C,PsAcc,Bs0}|Cs], TypeSig) ->
P = case_opt_compiler_generated(P0),
{Ps1,Bs} = case_opt_data_2(P, TypeSig, Bs0),
[{Ps1++Ps0,C,PsAcc,Bs}|case_opt_data_1(Cs, TypeSig)];
case_opt_data_1([], _) -> [].
case_opt_data_2(P, TypeSig, Bs0) ->
case case_analyze_pat(P) of
{[],Pat} when Pat =/= none ->
DataEs = cerl:data_es(P),
{DataEs,Bs0};
{[V|Vs],none} ->
{Type,Arity} = TypeSig,
Ann = [compiler_generated],
Vars = make_vars(Ann, Arity),
Data = cerl:ann_make_data(Ann, Type, Vars),
Bs = [{V,Data} | [{Var,V} || Var <- Vs] ++ Bs0],
{Vars,Bs};
{[V|Vs],Pat} when Pat =/= none ->
{Type,_} = TypeSig,
DataEs = cerl:data_es(Pat),
Vars = pat_to_expr_list(DataEs),
Ann = [compiler_generated],
Data = cerl:ann_make_data(Ann, Type, Vars),
Bs = [{V,Data} | [{Var,V} || Var <- Vs] ++ Bs0],
{DataEs,Bs}
end.
case_analyze_pat(P) ->
case_analyze_pat_1(P, [], none).
case_analyze_pat_1(P, Vs, Pat) ->
case cerl:type(P) of
alias ->
V = cerl:alias_var(P),
Apat = cerl:alias_pat(P),
case_analyze_pat_1(Apat, [V|Vs], Pat);
var ->
{[P|Vs],Pat};
_ ->
{Vs,P}
end.
%% pat_to_expr(Pattern) -> Expression.
%% Convert a pattern to an expression if possible. We KNOW that
%% all variables in the pattern will be bound.
%%
%% Throw an 'impossible' exception if a map or (non-literal)
%% binary is encountered. Trying to use a map pattern as an
%% expression is incorrect, while rebuilding a potentially
%% huge binary in an expression would be wasteful.
pat_to_expr(P) ->
case cerl:type(P) of
alias ->
cerl:alias_var(P);
var ->
P;
_ ->
case cerl:is_data(P) of
false ->
%% Map or binary.
throw(impossible);
true ->
Es = pat_to_expr_list(cerl:data_es(P)),
cerl:update_data(P, cerl:data_type(P), Es)
end
end.
pat_to_expr_list(Ps) -> [pat_to_expr(P) || P <- Ps].
make_vars(A, Max) ->
make_vars(A, 1, Max).
make_vars(A, I, Max) when I =< Max ->
[make_var(A)|make_vars(A, I+1, Max)];
make_vars(_, _, _) -> [].
make_var(A) ->
#c_var{anno=A,name=make_var_name()}.
make_var_name() ->
N = get(new_var_num),
put(new_var_num, N+1),
N.
letify(Bs, Body) ->
Ann = cerl:get_ann(Body),
foldr(fun({V,Val}, B) ->
cerl:ann_c_let(Ann, [V], Val, B)
end, Body, Bs).
%% opt_not_in_let(Let) -> Cerl
%% Try to optimize away a 'not' operator in a 'let'.
-spec opt_not_in_let(cerl:c_let()) -> cerl:cerl().
opt_not_in_let(#c_let{vars=[_]=Vs0,arg=Arg0,body=Body0}=Let) ->
case opt_not_in_let_0(Vs0, Arg0, Body0) of
{[],#c_values{es=[]},Body} ->
Body;
{Vs,Arg,Body} ->
Let#c_let{vars=Vs,arg=Arg,body=Body}
end;
opt_not_in_let(Let) -> Let.
opt_not_in_let_0([#c_var{name=V}]=Vs0, Arg0, Body0) ->
case cerl:type(Body0) of
call ->
%% let <V> = Expr in not V ==>
%% let <> = <> in notExpr
case opt_not_in_let_1(V, Body0, Arg0) of
no ->
{Vs0,Arg0,Body0};
{yes,Body} ->
{[],#c_values{es=[]},Body}
end;
'let' ->
%% let <V> = Expr in let <Var> = not V in Body ==>
%% let <Var> = notExpr in Body
%% V must not be used in Body.
LetArg = cerl:let_arg(Body0),
case opt_not_in_let_1(V, LetArg, Arg0) of
no ->
{Vs0,Arg0,Body0};
{yes,Arg} ->
LetBody = cerl:let_body(Body0),
case core_lib:is_var_used(V, LetBody) of
true ->
{Vs0,Arg0,Body0};
false ->
LetVars = cerl:let_vars(Body0),
{LetVars,Arg,LetBody}
end
end;
_ ->
{Vs0,Arg0,Body0}
end.
opt_not_in_let_1(V, Call, Body) ->
case Call of
#c_call{module=#c_literal{val=erlang},
name=#c_literal{val='not'},
args=[#c_var{name=V}]} ->
opt_not_in_let_2(Body, Call);
_ ->
no
end.
opt_not_in_let_2(#c_case{clauses=Cs0}=Case, NotCall) ->
Vars = make_vars([], 1),
Body = NotCall#c_call{args=Vars},
Cs = [begin
Let = #c_let{vars=Vars,arg=B,body=Body},
C#c_clause{body=opt_not_in_let(Let)}
end || #c_clause{body=B}=C <- Cs0],
{yes,Case#c_case{clauses=Cs}};
opt_not_in_let_2(#c_call{}=Call0, _NotCall) ->
invert_call(Call0);
opt_not_in_let_2(_, _) -> no.
invert_call(#c_call{module=#c_literal{val=erlang},
name=#c_literal{val=Name0},
args=[_,_]}=Call) ->
case inverse_rel_op(Name0) of
no -> no;
Name -> {yes,Call#c_call{name=#c_literal{val=Name}}}
end;
invert_call(#c_call{}) -> no.
%% inverse_rel_op(Op) -> no | RevOp
inverse_rel_op('=:=') -> '=/=';
inverse_rel_op('=/=') -> '=:=';
inverse_rel_op('==') -> '/=';
inverse_rel_op('/=') -> '==';
inverse_rel_op('>') -> '=<';
inverse_rel_op('<') -> '>=';
inverse_rel_op('>=') -> '<';
inverse_rel_op('=<') -> '>';
inverse_rel_op(_) -> no.
%% opt_bool_case_in_let(LetExpr) -> Core
opt_bool_case_in_let(#c_let{vars=Vs,arg=Arg,body=B}=Let, Sub) ->
opt_bool_case_in_let_1(Vs, Arg, B, Let, Sub).
opt_bool_case_in_let_1([#c_var{name=V}], Arg,
#c_case{arg=#c_var{name=V}}=Case0, Let, Sub) ->
case is_simple_case_arg(Arg) of
true ->
Case = opt_bool_case(Case0#c_case{arg=Arg}, Sub),
case core_lib:is_var_used(V, Case) of
false -> Case;
true -> Let
end;
false ->
Let
end;
opt_bool_case_in_let_1(_, _, _, Let, _) -> Let.
%% is_simple_case_arg(Expr) -> true|false
%% Determine whether the Expr is simple enough to be worth
%% substituting into a case argument. (Common substitutions
%% of variables and literals are assumed to have been already
%% handled by the caller.)
is_simple_case_arg(#c_cons{}) -> true;
is_simple_case_arg(#c_tuple{}) -> true;
is_simple_case_arg(#c_call{}) -> true;
is_simple_case_arg(#c_apply{}) -> true;
is_simple_case_arg(_) -> false.
%% is_bool_expr(Core) -> true|false
%% Check whether the Core expression is guaranteed to
%% return a boolean.
%%
is_bool_expr(#c_call{module=#c_literal{val=erlang},
name=#c_literal{val=Name},args=Args}) ->
NumArgs = length(Args),
erl_internal:comp_op(Name, NumArgs) orelse
erl_internal:new_type_test(Name, NumArgs) orelse
erl_internal:bool_op(Name, NumArgs);
is_bool_expr(#c_try{arg=E,vars=[#c_var{name=X}],body=#c_var{name=X},
handler=#c_literal{val=false}}) ->
is_bool_expr(E);
is_bool_expr(#c_case{clauses=Cs}) ->
is_bool_expr_list(Cs);
is_bool_expr(#c_clause{body=B}) ->
is_bool_expr(B);
is_bool_expr(#c_let{body=B}) ->
is_bool_expr(B);
is_bool_expr(#c_literal{val=Val}) ->
is_boolean(Val);
is_bool_expr(_) -> false.
is_bool_expr_list([C|Cs]) ->
is_bool_expr(C) andalso is_bool_expr_list(Cs);
is_bool_expr_list([]) -> true.
%% is_safe_bool_expr(Core) -> true|false
%% Check whether the Core expression ALWAYS returns a boolean
%% (i.e. it cannot fail).
%%
is_safe_bool_expr(Core) ->
is_safe_bool_expr_1(Core, sets:new([{version, 2}])).
is_safe_bool_expr_1(#c_call{module=#c_literal{val=erlang},
name=#c_literal{val=is_function},
args=[A,#c_literal{val=Arity}]},
_BoolVars) when is_integer(Arity), Arity >= 0 ->
is_safe_simple(A);
is_safe_bool_expr_1(#c_call{module=#c_literal{val=erlang},
name=#c_literal{val=is_function}},
_BoolVars) ->
false;
is_safe_bool_expr_1(#c_call{module=#c_literal{val=erlang},
name=#c_literal{val=Name},args=Args},
BoolVars) ->
NumArgs = length(Args),
case (erl_internal:comp_op(Name, NumArgs) orelse
erl_internal:new_type_test(Name, NumArgs)) andalso
is_safe_simple_list(Args) of
true ->
true;
false ->
%% Boolean operators are safe if all arguments are boolean.
erl_internal:bool_op(Name, NumArgs) andalso
is_safe_bool_expr_list(Args, BoolVars)
end;
is_safe_bool_expr_1(#c_let{vars=Vars,arg=Arg,body=B}, BoolVars) ->
case is_safe_simple(Arg) of
true ->
case {is_safe_bool_expr_1(Arg, BoolVars),Vars} of
{true,[#c_var{name=V}]} ->
is_safe_bool_expr_1(B, sets:add_element(V, BoolVars));
{false,_} ->
is_safe_bool_expr_1(B, BoolVars)
end;
false -> false
end;
is_safe_bool_expr_1(#c_literal{val=Val}, _BoolVars) ->
is_boolean(Val);
is_safe_bool_expr_1(#c_var{name=V}, BoolVars) ->
sets:is_element(V, BoolVars);
is_safe_bool_expr_1(_, _) -> false.
is_safe_bool_expr_list([C|Cs], BoolVars) ->
case is_safe_bool_expr_1(C, BoolVars) of
true -> is_safe_bool_expr_list(Cs, BoolVars);
false -> false
end;
is_safe_bool_expr_list([], _) -> true.
opt_fun_call(#c_let{vars=[#c_var{name=V}],arg=#c_fun{}=FunDef,body=Body}=Let) ->
try do_opt_fun_call(V, FunDef, Body) of
impossible -> Let;
Expr -> Expr
catch
throw:impossible ->
Let
end;
opt_fun_call(Expr) -> Expr.
do_opt_fun_call(V, FunDef, #c_apply{op=#c_var{name=V},args=CallArgs}) ->
Values = core_lib:make_values(CallArgs),
simplify_fun_call(V, Values, FunDef, CallArgs);
do_opt_fun_call(V, FunDef, #c_let{arg=#c_apply{op=#c_var{name=V},args=CallArgs},
body=Rest}=Let) ->
Values = core_lib:make_values([Rest|CallArgs]),
Inlined = simplify_fun_call(V, Values, FunDef, CallArgs),
Let#c_let{arg=Inlined};
do_opt_fun_call(V, FunDef, #c_seq{arg=#c_apply{op=#c_var{name=V},args=CallArgs},
body=Rest}=Seq) ->
Values = core_lib:make_values([Rest|CallArgs]),
Inlined = simplify_fun_call(V, Values, FunDef, CallArgs),
Seq#c_seq{arg=Inlined};
do_opt_fun_call(_, _, _) -> impossible.
simplify_fun_call(V, Values, #c_fun{vars=Vars,body=FunBody}, CallArgs) ->
case not core_lib:is_var_used(V, Values) andalso length(Vars) =:= length(CallArgs) of
true ->
%% Safe to inline.
#c_let{vars=Vars,
arg=core_lib:make_values(CallArgs),
body=FunBody};
false ->
%% The fun is used more than once or there is an arity mismatch.
throw(impossible)
end.
%% simplify_let(Let, Sub) -> Expr | impossible
%% If the argument part of an let contains a complex expression, such
%% as a let or a sequence, move the original let body into the complex
%% expression.
simplify_let(#c_let{arg=Arg}=Let, Sub) ->
move_let_into_expr(Let, Arg, Sub).
move_let_into_expr(#c_let{vars=InnerVs0,body=InnerBody0}=Inner,
#c_let{vars=OuterVs0,arg=Arg0,body=OuterBody0}=Outer, Sub0) ->
%%
%% let <InnerVars> = let <OuterVars> = <Arg>
%% in <OuterBody>
%% in <InnerBody>
%%
%% ==>
%%
%% let <OuterVars> = <Arg>
%% in let <InnerVars> = <OuterBody>
%% in <InnerBody>
%%
Arg = body(Arg0, Sub0),
ScopeSub0 = sub_subst_scope(Sub0#sub{t=#{}}),
{OuterVs,ScopeSub} = var_list(OuterVs0, ScopeSub0),
OuterBody = body(OuterBody0, ScopeSub),
{InnerVs,Sub} = var_list(InnerVs0, Sub0),
InnerBody = body(InnerBody0, Sub),
Outer#c_let{vars=OuterVs,arg=Arg,
body=Inner#c_let{vars=InnerVs,arg=OuterBody,body=InnerBody}};
move_let_into_expr(#c_let{vars=Lvs0,body=Lbody0}=Let,
#c_case{arg=Cexpr0,clauses=[Ca0|Cs0]}=Case, Sub0) ->
case not is_failing_clause(Ca0) andalso
are_all_failing_clauses(Cs0) of
true ->
%% let <Lvars> = case <Case-expr> of
%% <Cpats> -> <Clause-body>;
%% <OtherCpats> -> erlang:error(...)
%% end
%% in <Let-body>
%%
%% ==>
%%
%% case <Case-expr> of
%% <Cpats> ->
%% let <Lvars> = <Clause-body>
%% in <Let-body>;
%% <OtherCpats> -> erlang:error(...)
%% end
Cexpr = body(Cexpr0, Sub0),
CaPats0 = Ca0#c_clause.pats,
G0 = Ca0#c_clause.guard,
B0 = Ca0#c_clause.body,
ScopeSub0 = sub_subst_scope(Sub0#sub{t=#{}}),
try pattern_list(CaPats0, ScopeSub0) of
{CaPats,ScopeSub} ->
G = guard(G0, ScopeSub),
B1 = body(B0, ScopeSub),
{Lvs,B2,Sub1} = let_substs(Lvs0, B1, Sub0),
Sub2 = Sub1#sub{s=sets:union(ScopeSub#sub.s,
Sub1#sub.s)},
Lbody = body(Lbody0, Sub2),
B = Let#c_let{vars=Lvs,
arg=core_lib:make_values(B2),
body=Lbody},
Ca = Ca0#c_clause{pats=CaPats,guard=G,body=B},
Cs = [clause(C, Cexpr, value, Sub0) || C <- Cs0],
Case#c_case{arg=Cexpr,clauses=[Ca|Cs]}
catch
nomatch ->
%% This is not a defeat. The code will eventually
%% be optimized to erlang:error(...) by the other
%% optimizations done in this module.
impossible
end;
false -> impossible
end;
move_let_into_expr(#c_let{vars=Lvs0,body=Lbody0}=Let,
#c_seq{arg=Sarg0,body=Sbody0}=Seq, Sub0) ->
%%
%% let <Lvars> = do <Seq-arg>
%% <Seq-body>
%% in <Let-body>
%%
%% ==>
%%
%% do <Seq-arg>
%% let <Lvars> = <Seq-body>
%% in <Let-body>
%%
Sarg = body(Sarg0, Sub0),
Sbody1 = body(Sbody0, Sub0),
{Lvs,Sbody,Sub} = let_substs(Lvs0, Sbody1, Sub0),
Lbody = body(Lbody0, Sub),
Seq#c_seq{arg=Sarg,body=Let#c_let{vars=Lvs,arg=core_lib:make_values(Sbody),
body=Lbody}};
move_let_into_expr(_Let, _Expr, _Sub) -> impossible.
are_all_failing_clauses(Cs) ->
all(fun is_failing_clause/1, Cs).
is_failing_clause(#c_clause{body=B}) ->
will_fail(B).
%% opt_build_stacktrace(Let) -> Core.
%% If the stacktrace is *only* used in a call to erlang:raise/3,
%% there is no need to build a cooked stackframe using build_stacktrace/1.
opt_build_stacktrace(#c_let{vars=[#c_var{name=Cooked}],
arg=#c_primop{name=#c_literal{val=build_stacktrace},
args=[RawStk]},
body=Body}=Let) ->
case Body of
#c_call{module=#c_literal{val=erlang},
name=#c_literal{val=raise},
args=[Class,Exp,#c_var{name=Cooked}]} ->
case core_lib:is_var_used(Cooked, #c_cons{hd=Class,tl=Exp}) of
true ->
%% Not safe. The stacktrace is used in the class or
%% reason.
Let;
false ->
%% The stacktrace is only used in the last
%% argument for erlang:raise/3. There is no need
%% to build the stacktrace. Replace the call to
%% erlang:raise/3 with the the raw_raise/3
%% instruction, which will use a raw stacktrace.
#c_primop{name=#c_literal{val=raw_raise},
args=[Class,Exp,RawStk]}
end;
#c_let{vars=[#c_var{name=V}],arg=Arg,body=B0} when V =/= Cooked ->
case core_lib:is_var_used(Cooked, Arg) of
false ->
%% The built stacktrace is not used in the argument,
%% so we can sink the building of the stacktrace into
%% the body of the let.
B = opt_build_stacktrace(Let#c_let{body=B0}),
Body#c_let{body=B};
true ->
Let
end;
#c_seq{arg=Arg,body=B0} ->
case core_lib:is_var_used(Cooked, Arg) of
false ->
%% The built stacktrace is not used in the argument,
%% so we can sink the building of the stacktrace into
%% the body of the sequence.
B = opt_build_stacktrace(Let#c_let{body=B0}),
Body#c_seq{body=B};
true ->
Let
end;
#c_case{clauses=Cs0} ->
NilBody = #c_literal{val=[]},
Cs1 = [C#c_clause{body=NilBody} || C <- Cs0],
Case = Body#c_case{clauses=Cs1},
case core_lib:is_var_used(Cooked, Case) of
false ->
%% The built stacktrace is not used in the case
%% argument or in the head of any clause. Thus
%% it is safe sink the building of the stacktrace
%% into each arm of the case.
Cs = [begin
B = opt_build_stacktrace(Let#c_let{body=B0}),
C#c_clause{body=B}
end || #c_clause{body=B0}=C <- Cs0],
Body#c_case{clauses=Cs};
true ->
Let
end;
_ ->
Let
end;
opt_build_stacktrace(Expr) ->
Expr.
%% opt_case_in_let(Let) -> Let'
%% Try to avoid building tuples that are immediately matched.
%% A common pattern is:
%%
%% {V1,V2,...} = case E of P -> ... {Val1,Val2,...}; ... end
%%
%% In Core Erlang the pattern would look like this:
%%
%% let <V> = case E of
%% ... -> ... {Val1,Val2}
%% ...
%% end,
%% in case V of
%% {A,B} -> ... <use A and B> ...
%% end
%%
%% Rewrite this to:
%%
%% let <V1,V2> = case E of
%% ... -> ... <Val1,Val2>
%% ...
%% end,
%% in
%% let <V> = {V1,V2}
%% in case V of
%% {A,B} -> ... <use A and B> ...
%% end
%%
%% Note that the second 'case' is unchanged. The other optimizations
%% in this module will eliminate the building of the tuple and
%% rewrite the second case to:
%%
%% case <V1,V2> of
%% <A,B> -> ... <use A and B> ...
%% end
%%
opt_case_in_let(#c_let{vars=Vs,arg=Arg0,body=B}=Let0) ->
case matches_data(Vs, B) of
{yes,TypeSig} ->
case delay_build(Arg0, TypeSig) of
no ->
Let0;
{yes,Vars,Arg,Data} ->
InnerLet = Let0#c_let{arg=Data},
Let0#c_let{vars=Vars,arg=Arg,body=InnerLet}
end;
no ->
Let0
end.
matches_data([#c_var{name=V}], #c_case{arg=#c_var{name=V},
clauses=[#c_clause{pats=[P]}|_]}) ->
case cerl:is_data(P) of
false ->
no;
true ->
case cerl:data_type(P) of
{atomic,_} ->
no;
Type ->
{yes,{Type,cerl:data_arity(P)}}
end
end;
matches_data(_, _) -> no.
delay_build(Core, TypeSig) ->
case cerl:is_data(Core) of
true -> no;
false -> delay_build_1(Core, TypeSig)
end.
delay_build_1(Core0, TypeSig) ->
try delay_build_expr(Core0, TypeSig) of
Core ->
{Type,Arity} = TypeSig,
Ann = [compiler_generated],
Vars = make_vars(Ann, Arity),
Data = cerl:ann_make_data(Ann, Type, Vars),
{yes,Vars,Core,Data}
catch
throw:impossible ->
no
end.
delay_build_cs([#c_clause{body=B0}=C0|Cs], TypeSig) ->
B = delay_build_expr(B0, TypeSig),
C = C0#c_clause{body=B},
[C|delay_build_cs(Cs, TypeSig)];
delay_build_cs([], _) -> [].
delay_build_expr(Core, {Type,Arity}=TypeSig) ->
case cerl:is_data(Core) of
false ->
delay_build_expr_1(Core, TypeSig);
true ->
case {cerl:data_type(Core),cerl:data_arity(Core)} of
{Type,Arity} ->
core_lib:make_values(cerl:data_es(Core));
{_,_} ->
throw(impossible)
end
end.
delay_build_expr_1(#c_case{clauses=Cs0}=Case, TypeSig) ->
Cs = delay_build_cs(Cs0, TypeSig),
Case#c_case{clauses=Cs};
delay_build_expr_1(#c_let{body=B0}=Let, TypeSig) ->
B = delay_build_expr(B0, TypeSig),
Let#c_let{body=B};
delay_build_expr_1(#c_seq{body=B0}=Seq, TypeSig) ->
B = delay_build_expr(B0, TypeSig),
Seq#c_seq{body=B};
delay_build_expr_1(Core, _TypeSig) ->
case will_fail(Core) of
true -> Core;
false -> throw(impossible)
end.
%% opt_simple_let(#c_let{}, Context, Sub) -> CoreTerm
%% Optimize a let construct that does not contain any lets in
%% in its argument.
opt_simple_let(Let0, Ctxt, Sub) ->
case opt_not_in_let(Let0) of
#c_let{}=Let ->
opt_simple_let_0(Let, Ctxt, Sub);
Expr ->
expr(Expr, Ctxt, Sub)
end.
opt_simple_let_0(#c_let{arg=Arg0}=Let, Ctxt, Sub) ->
Arg = body(Arg0, value, Sub), %This is a body
case will_fail(Arg) of
true -> Arg;
false -> opt_simple_let_1(Let, Arg, Ctxt, Sub)
end.
opt_simple_let_1(#c_let{vars=Vs0,body=B0}=Let, Arg0, Ctxt, Sub0) ->
%% Optimise let and add new substitutions.
{Vs,Args,Sub1} = let_substs(Vs0, Arg0, Sub0),
BodySub = update_let_types(Vs, Args, Sub1),
Sub = Sub1#sub{v=[],s=sets:new([{version, 2}])},
B = body(B0, Ctxt, BodySub),
Arg = core_lib:make_values(Args),
opt_simple_let_2(Let, Vs, Arg, B, B0, Sub).
%% opt_simple_let_2(Let0, Vs0, Arg0, Body, PrevBody, Ctxt, Sub) -> Core.
%% Do final simplifications of the let.
%%
%% Note that the substitutions and scope in Sub have been cleared
%% and should not be used.
opt_simple_let_2(Let0, Vs0, Arg0, Body, PrevBody, Sub) ->
case {Vs0,Arg0,Body} of
{[#c_var{name=V}],Arg1,#c_var{name=V}} ->
%% let <Var> = Arg in <Var> ==> Arg
Arg1;
{[],#c_values{es=[]},_} ->
%% No variables left.
Body;
{[#c_var{name=V}=Var]=Vars0,Arg1,Body} ->
case core_lib:is_var_used(V, Body) of
false ->
%% If the variable is not used in the body, we can
%% rewrite the let to a sequence:
%% let <Var> = Arg in BodyWithoutVar ==>
%% seq Arg BodyWithoutVar
Arg = maybe_suppress_warnings(Arg1, Var, PrevBody),
#c_seq{arg=Arg,body=Body};
true ->
Let1 = Let0#c_let{vars=Vars0,arg=Arg1,body=Body},
post_opt_let(Let1, Sub)
end;
{_,_,_} ->
%% The argument for a sequence must be a single value (not
%% #c_values{}). Therefore, we must keep the let.
Let1 = Let0#c_let{vars=Vs0,arg=Arg0,body=Body},
post_opt_let(Let1, Sub)
end.
%% post_opt_let(Let, Sub)
%% Final optimizations of the let.
%%
%% Note that the substitutions and scope in Sub have been cleared
%% and should not be used.
post_opt_let(Let0, Sub) ->
Let1 = opt_bool_case_in_let(Let0, Sub),
opt_build_stacktrace(Let1).
%% maybe_suppress_warnings(Arg, #c_var{}, PreviousBody) -> Arg'
%% Try to suppress false warnings when a variable is not used.
%% For instance, we don't expect a warning for useless building in:
%%
%% R = #r{}, %No warning expected.
%% R#r.f %Optimization would remove the reference to R.
%%
%% To avoid false warnings, we will check whether the variables were
%% referenced in the original unoptimized code. If they were, we will
%% consider the warning false and suppress it.
maybe_suppress_warnings(Arg, #c_var{name=V}, PrevBody) ->
case should_suppress_warning(Arg) of
true ->
Arg; %Already suppressed.
false ->
case core_lib:is_var_used(V, PrevBody) of
true ->
suppress_warning([Arg]);
false ->
Arg
end
end.
%% Suppress warnings for a Core Erlang expression whose value will
%% be ignored.
suppress_warning([H|T]) ->
case cerl:is_literal(H) of
true ->
suppress_warning(T);
false ->
case cerl:is_data(H) of
true ->
suppress_warning(cerl:data_es(H) ++ T);
false ->
%% Some other thing, such as a function call.
%% This cannot be the compiler's fault, so the
%% warning should not be suppressed. We must
%% be careful not to destroy tail-recursion.
case T of
[] ->
H;
[_|_] ->
cerl:c_seq(H, suppress_warning(T))
end
end
end;
suppress_warning([]) -> void().
move_case_into_arg(#c_case{arg=#c_let{vars=OuterVars0,arg=OuterArg,
body=InnerArg0}=Outer,
clauses=InnerClauses}=Inner, Sub) ->
%%
%% case let <OuterVars> = <OuterArg> in <InnerArg> of
%% <InnerClauses>
%% end
%%
%% ==>
%%
%% let <OuterVars> = <OuterArg>
%% in case <InnerArg> of <InnerClauses> end
%%
ScopeSub0 = sub_subst_scope(Sub#sub{t=#{}}),
{OuterVars,ScopeSub} = var_list(OuterVars0, ScopeSub0),
InnerArg = body(InnerArg0, ScopeSub),
Outer#c_let{vars=OuterVars,arg=OuterArg,
body=Inner#c_case{arg=InnerArg,clauses=InnerClauses}};
move_case_into_arg(#c_case{arg=#c_case{arg=OuterArg,
clauses=[OuterCa0,OuterCb]}=Outer,
clauses=InnerClauses}=Inner0, Sub) ->
case is_failing_clause(OuterCb) of
true ->
#c_clause{pats=OuterPats0,guard=OuterGuard0,
body=InnerArg0} = OuterCa0,
%%
%% case case <OuterArg> of
%% <OuterPats> when <OuterGuard> -> <InnerArg>
%% <OuterCb>
%% ...
%% end of
%% <InnerClauses>
%% end
%%
%% ==>
%%
%% case <OuterArg> of
%% <OuterPats> when <OuterGuard> ->
%% case <InnerArg> of <InnerClauses> end
%% <OuterCb>
%% end
%%
ScopeSub0 = sub_subst_scope(Sub#sub{t=#{}}),
%% We KNOW that pattern_list/2 has already been called for OuterPats0;
%% therefore, it cannot throw an exception.
{OuterPats,ScopeSub} = pattern_list(OuterPats0, ScopeSub0),
OuterGuard = guard(OuterGuard0, ScopeSub),
InnerArg = body(InnerArg0, ScopeSub),
Inner = Inner0#c_case{arg=InnerArg,clauses=InnerClauses},
OuterCa = OuterCa0#c_clause{pats=OuterPats,
guard=OuterGuard,
body=Inner},
Outer#c_case{arg=OuterArg,
clauses=[OuterCa,OuterCb]};
false ->
Inner0
end;
move_case_into_arg(#c_case{arg=#c_seq{arg=OuterArg,body=InnerArg}=Outer,
clauses=InnerClauses}=Inner, _Sub) ->
%%
%% case do <OuterArg> <InnerArg> of
%% <InnerClauses>
%% end
%%
%% ==>
%%
%% do <OuterArg>
%% case <InnerArg> of <InerClauses> end
%%
Outer#c_seq{arg=OuterArg,
body=Inner#c_case{arg=InnerArg,clauses=InnerClauses}};
move_case_into_arg(Expr, _) ->
Expr.
%%%
%%% Update type information.
%%%
update_let_types(Vs, Args, Sub) when is_list(Args) ->
update_let_types_1(Vs, Args, Sub);
update_let_types(_Vs, _Arg, Sub) ->
%% The argument is a complex expression (such as a 'case')
%% that returns multiple values.
Sub.
update_let_types_1([#c_var{name=V}|Vs], [A|As], Sub0) ->
Sub = update_types(V, A, Sub0),
update_let_types_1(Vs, As, Sub);
update_let_types_1([], [], Sub) -> Sub.
update_types(V, #c_tuple{}=P, #sub{t=Tdb}=Sub) ->
Sub#sub{t=Tdb#{V=>P}};
update_types(_, _, Sub) -> Sub.
%% kill_types(V, Tdb) -> Tdb'
%% Kill any entries that references the variable,
%% either in the key or in the value.
kill_types(V, Tdb) ->
maps:from_list(kill_types2(V,maps:to_list(Tdb))).
kill_types2(V, [{V,_}|Tdb]) ->
kill_types2(V, Tdb);
kill_types2(V, [{_,#c_tuple{}=Tuple}=Entry|Tdb]) ->
case core_lib:is_var_used(V, Tuple) of
false -> [Entry|kill_types2(V, Tdb)];
true -> kill_types2(V, Tdb)
end;
kill_types2(_, []) -> [].
%% copy_type(DestVar, SrcVar, Tdb) -> Tdb'
%% If the SrcVar has a type, assign it to DestVar.
%%
copy_type(V, #c_var{name=Src}, Tdb) ->
case Tdb of
#{Src:=Type} -> Tdb#{V=>Type};
_ -> Tdb
end;
copy_type(_, _, Tdb) -> Tdb.
%% The atom `ok', is widely used in Erlang for "void" values.
void() -> #c_literal{val=ok}.
%%%
%%% Handling of the `useless_building` warning (building a term that
%%% is never used).
%%%
%%% Consider this code fragment:
%%%
%%% [ {ok,Term} ],
%%% ok
%%%
%%% The list that is ignored contains a tuple that is also ignored.
%%% While optimizing this code fragment, two warnings for useless
%%% building will be generated: one for the list and one for the tuple
%%% inside. Before the introduction of column numbers, those two warnings
%%% would be coalesced to one becuase they had the same line number.
%%%
%%% With column numbers, we will need a more sophisticated solution to
%%% avoid emitting annoying duplicate warnings.
%%%
%%% Note that if two separate terms are being built on the same line, we
%%% do expect to get two warnings:
%%%
%%% [ {ok,Term} ], [ {error,BadTerm} ], ok
%%% ^ ^
%%%
%%% (The carets mark the expected columns for the warnings.)
%%%
%%% To handle those requirements, we will use the #sub{} record to keep
%%% track of whether we are at the top level or have descended into
%%% a sub expression.
%%%
%% Note in the Sub record that we have are no longer at the top level.
descend(_Core, #sub{top=false}=Sub) ->
Sub;
descend(Core, #sub{top=true}=Sub) ->
case should_suppress_warning(Core) of
true ->
%% In a list comprehension being ignored such as:
%%
%% [{error,Z} || Z <- List], ok
%%
%% the warning for ignoring the cons cell should be
%% suppressed, but there should still be a warning for
%% ignoring the {error,Z} tuple. Therefore, pretend that
%% we are still at the top level.
Sub;
false ->
%% No longer at top level. Warnings for useless building
%% should now be suppressed.
Sub#sub{top=false}
end.
warn_useless_building(Core, #sub{top=Top}) ->
case Top of
true -> add_warning(Core, {ignored,useless_building});
false -> ok
end.
%%%
%%% Handling of warnings.
%%%
init_warnings() ->
put({?MODULE,warnings}, []).
add_warning(Core, Term) ->
case should_suppress_warning(Core) of
true ->
ok;
false ->
Anno = cerl:get_ann(Core),
Location = get_location(Anno),
File = get_file(Anno),
Key = {?MODULE,warnings},
case get(Key) of
[{File,[{Location,?MODULE,Term}]}|_] ->
ok; %We already have
%an identical warning.
Ws ->
put(Key, [{File,[{Location,?MODULE,Term}]}|Ws])
end
end.
get_line([Line|_]) when is_integer(Line) -> Line;
get_line([{Line, _Column} | _T]) when is_integer(Line) -> Line;
get_line([_|T]) -> get_line(T);
get_line([]) -> none.
get_location([Line|_]) when is_integer(Line) ->
Line;
get_location([{Line, Column} | _T]) when is_integer(Line), is_integer(Column) ->
{Line,Column};
get_location([_|T]) ->
get_location(T);
get_location([]) ->
none.
get_file([{file,File}|_]) -> File;
get_file([_|T]) -> get_file(T);
get_file([]) -> "no_file". % should not happen
should_suppress_warning(Core) ->
is_compiler_generated(Core) orelse
is_result_unwanted(Core).
is_compiler_generated(Core) ->
Ann = cerl:get_ann(Core),
member(compiler_generated, Ann).
is_result_unwanted(Core) ->
Ann = cerl:get_ann(Core),
member(result_not_wanted, Ann).
get_warnings() ->
ordsets:from_list((erase({?MODULE,warnings}))).
classify_call(Call) ->
Mod = cerl:concrete(cerl:call_module(Call)),
Name = cerl:concrete(cerl:call_name(Call)),
Arity = cerl:call_arity(Call),
{Mod, Name, Arity}.
-type error() :: {'failed' | 'nomatch' | 'ignored', term()}.
-spec format_error(error()) -> nonempty_string().
format_error({failed,{eval_failure,Call,Reason}}) ->
flatten(io_lib:format("~ts will fail with a '~p' exception",
[format_call(Call, false),Reason]));
format_error({failed,embedded_binary_size}) ->
"binary construction will fail with a 'badarg' exception "
"(field size for binary/bitstring greater than actual size)";
format_error({failed,{embedded_unit,Unit,Size}}) ->
M = io_lib:format("binary construction will fail with a 'badarg' exception "
"(size ~p cannot be evenly divided by unit ~p)", [Size,Unit]),
flatten(M);
format_error({failed,bad_unicode}) ->
"binary construction will fail with a 'badarg' exception "
"(invalid Unicode code point in a utf8/utf16/utf32 segment)";
format_error({failed,bad_float_size}) ->
"binary construction will fail with a 'badarg' exception "
"(invalid size for a float segment)";
format_error({failed,bad_map_update}) ->
"map update will fail with a 'badmap' exception";
format_error({failed,bad_call}) ->
"invalid function call";
format_error({nomatch,{shadow,Line,{Name, Arity}}}) ->
M = io_lib:format("this clause for ~ts/~B cannot match because a previous "
"clause at line ~p always matches", [Name, Arity, Line]),
flatten(M);
format_error({nomatch,{shadow,Line}}) ->
M = io_lib:format("this clause cannot match because a previous clause at line ~p "
"always matches", [Line]),
flatten(M);
format_error({nomatch,shadow}) ->
"this clause cannot match because a previous clause always matches";
format_error({nomatch,guard}) ->
"this clause cannot match because its guard evaluates to 'false'";
format_error({nomatch,{bit_syntax_truncated,Signess,Val,Sz}}) ->
S = case Signess of
signed -> "a 'signed'";
unsigned -> "an 'unsigned'"
end,
F = "this clause cannot match because the value ~P"
" will not fit in ~s binary segment of size ~p",
flatten(io_lib:format(F, [Val,10,S,Sz]));
format_error({nomatch,{bit_syntax_unsigned,Val}}) ->
F = "this clause cannot match because the negative value ~P"
" will never match the value of an 'unsigned' binary segment",
flatten(io_lib:format(F, [Val,10]));
format_error({nomatch,{bit_syntax_size,Sz}}) ->
F = "this clause cannot match because '~P' is not a valid size for a binary segment",
flatten(io_lib:format(F, [Sz,10]));
format_error({nomatch,{bit_syntax_type,Val,Type}}) ->
F = "this clause cannot match because '~P' is not of the"
" expected type '~p'",
flatten(io_lib:format(F, [Val,10,Type]));
format_error({nomatch,{bit_syntax_unicode,Val}}) ->
F = "this clause cannot match because the value ~p"
" is not a valid Unicode code point",
flatten(io_lib:format(F, [Val]));
format_error({nomatch,no_clause}) ->
"no clause will ever match";
format_error({nomatch,clause_type}) ->
"this clause cannot match because of different types/sizes";
format_error({ignored,{no_effect,{erlang,F,A}}}) ->
{Fmt,Args} = case erl_internal:comp_op(F, A) of
true ->
{"use of operator ~p has no effect",[F]};
false ->
case erl_internal:bif(F, A) of
false ->
{"the call to erlang:~p/~p has no effect",[F,A]};
true ->
{"the call to ~p/~p has no effect",[F,A]}
end
end,
flatten(io_lib:format(Fmt, Args));
format_error({ignored,{result,Call}}) ->
Fmt = "the result of ~ts is ignored "
"(suppress the warning by assigning the expression to the _ variable)",
flatten(io_lib:format(Fmt, [format_call(Call, true)]));
format_error({ignored,useless_building}) ->
"a term is constructed, but never used".
format_call({erlang,make_fun,3}, _) ->
"fun construction";
format_call({Mod, Name, Arity}, UseProgressiveForm) ->
case is_operator(Mod, Name, Arity) of
true ->
Str = case UseProgressiveForm of
true -> "evaluating";
false -> "evaluation of"
end,
[Str, io_lib:format(" operator ~p/~p", [Name,Arity])];
false ->
Str = case UseProgressiveForm of
true -> "calling";
false -> "the call to"
end,
case is_auto_imported(Mod, Name, Arity) of
true ->
[Str, io_lib:format(" ~p/~p", [Name,Arity])];
false ->
[Str, io_lib:format(" ~p:~p/~p", [Mod,Name,Arity])]
end
end.
is_operator(erlang, Name, Arity) ->
try
_ = erl_internal:op_type(Name, Arity),
true
catch
error:_ ->
false
end;
is_operator(_, _, _) -> false.
is_auto_imported(erlang, Name, Arity) ->
erl_internal:bif(Name, Arity);
is_auto_imported(_, _, _) -> false.
-ifdef(DEBUG).
%% In order for simplify_let/2 to work correctly, the list of
%% in-scope variables must always be a superset of the free variables
%% in the current expression (otherwise we might fail to rename a variable
%% when needed and get a name capture bug).
verify_scope(E, #sub{s=Scope}) ->
Free0 = cerl_trees:free_variables(E),
Free = [V || V <- Free0, not is_tuple(V)], %Ignore function names.
case is_subset_of_scope(Free, Scope) of
true ->
true;
false ->
io:format("~p\n", [E]),
io:format("~p\n", [Free]),
io:format("~p\n", [ordsets:from_list(sets:to_list(Scope))]),
false
end.
is_subset_of_scope([V|Vs], Scope) ->
sets:is_element(V, Scope) andalso is_subset_of_scope(Vs, Scope);
is_subset_of_scope([], _) -> true.
-endif. | lib/compiler/src/sys_core_fold.erl | 0.563258 | 0.435541 | sys_core_fold.erl | starcoder |
%%
%% %CopyrightBegin%
%%
%% Copyright Ericsson AB 2000-2015. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
-module(xref_scanner).
-include("xref.hrl").
-export([scan/1]).
scan(Chars) ->
case erl_scan:string(Chars) of
{ok, Tokens, _Line} ->
{ok, lex(a1(Tokens))};
{error, {Line,Module,Info}, _EndLine} ->
{error, Module:format_error(Info), Line}
end.
a1([{'-',N},{integer,N,1} | L]) ->
[{integer,N,-1} | a1(L)];
a1([T | L]) ->
[T | a1(L)];
a1([]) ->
[].
-define(MFA(M,F,A,N), {atom,N,M}, {':',_}, {atom,_,F}, {'/',_}, {integer,_,A}).
-define(MFA2(M,F,A,N),
{'{',N},{atom,_,M},{',',_},{atom,_,F},{',',_},{integer,_,A},{'}',_}).
-define(DECL(N1,N2,T), {':',N1},{var,N2,T}).
lex([{atom,N,V1},{'->',_},{atom,_,V2} | L]) ->
Constant = {constant, unknown, edge, {V1,V2}},
[{edge,N,Constant} | lex(L)];
lex([{'{',N},{atom,_,V1},{',',_},{atom,_,V2},{'}',_} | L]) ->
Constant = {constant, unknown, edge, {V1,V2}},
[{edge,N,Constant} | lex(L)];
lex([?MFA(M,F,A,N),{'->',_},?MFA(M2,F2,A2,_) | L]) ->
Constant = {constant, 'Fun', edge, {{M,F,A},{M2,F2,A2}}},
[{edge,N,Constant} | lex(L)];
lex([?MFA(M,F,A,N) | L]) ->
Constant = {constant, 'Fun', vertex, {M,F,A}},
[{vertex,N,Constant} | lex(L)];
lex([{'{',N},?MFA2(M,F,A,_),{',',_},?MFA2(M2,F2,A2,_),{'}',_} | L]) ->
Constant = {constant, 'Fun', edge, {{M,F,A},{M2,F2,A2}}},
[{edge,N,Constant} | lex(L)];
lex([?MFA2(M,F,A,N) | L]) ->
Constant = {constant, 'Fun', vertex, {M,F,A}},
[{vertex,N,Constant} | lex(L)];
lex([?DECL(N1,N2,Decl) | L]) ->
case is_type(Decl) of
false -> [?DECL(N1, N2, Decl) | lex(L)];
true -> [{decl,N1,Decl} | lex(L)]
end;
lex([{':',N},{'=',_} | L]) ->
[{':=',N} | lex(L)];
lex([{'||',N},{'|',_} | L]) ->
[{'|||',N} | lex(L)];
lex([V={var,N,Var} | L]) ->
T = case is_type(Var) of
false -> V;
true -> {cast,N,Var}
end,
[T | lex(L)];
lex([T | Ts]) ->
[T | lex(Ts)];
lex([]) ->
[{'$end', erl_anno:new(?XREF_END_LINE)}].
is_type('Rel') -> true;
is_type('App') -> true;
is_type('Mod') -> true;
is_type('Fun') -> true;
is_type('Lin') -> true;
is_type('LLin') -> true;
is_type('XLin') -> true;
is_type('ELin') -> true;
is_type('XXL') -> true;
is_type(_) -> false. | lib/tools/src/xref_scanner.erl | 0.556882 | 0.410993 | xref_scanner.erl | starcoder |
%%%=============================================================================
%% Copyright 2012- Klarna AB
%% Copyright 2015- AUTHORS
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% @doc Json schema validation module.
%%
%% This module is the core of jesse, it implements the validation functionality
%% according to the standard.
%% @end
%%%=============================================================================
-module(jesse_schema_validator).
%% API
-export([ validate/3
, validate_with_state/3
]).
%% Includes
-include("jesse_schema_validator.hrl").
-include_lib("eunit/include/eunit.hrl").
%%% API
%% @doc Validates json `Data' against `JsonSchema' with `Options'.
%% If the given json is valid, then it is returned to the caller as is,
%% otherwise an exception will be thrown.
-spec validate( JsonSchema :: jesse:schema()
, Value :: jesse:json_term()
, Options :: jesse:options()
) -> {ok, jesse:json_term()}
| no_return().
validate(JsonSchema, Value, Options0) ->
Options = [{with_value, Value} | proplists:delete(with_value, Options0)],
State = jesse_state:new(JsonSchema, Options),
NewState = validate_with_state(JsonSchema, Value, State),
{result(NewState), jesse_state:get_current_value(NewState)}.
%% @doc Validates json `Data' against `JsonSchema' with `State'.
%% If the given json is valid, then the latest state is returned to the caller,
%% otherwise an exception will be thrown.
-spec validate_with_state( JsonSchema :: jesse:json_term()
, Data :: jesse:json_term()
, State :: jesse_state:state()
) -> jesse_state:state()
| no_return().
validate_with_state(JsonSchema, Value, State) ->
SchemaVer = get_schema_ver(JsonSchema, State),
select_and_run_validator(SchemaVer, JsonSchema, Value, State).
%%% Internal functions
%% @doc Returns "$schema" property from `JsonSchema' if it is present,
%% otherwise the default schema version from `State' is returned.
%% @private
get_schema_ver(JsonSchema, State) ->
case jesse_json_path:value(?SCHEMA, JsonSchema, ?not_found) of
?not_found -> jesse_state:get_default_schema_ver(State);
SchemaVer -> SchemaVer
end.
%% @doc Returns a result depending on `State'.
%% @private
result(State) ->
ErrorList = jesse_state:get_error_list(State),
case ErrorList of
[] -> ok;
_ -> throw(ErrorList)
end.
%% @doc Runs appropriate validator depending on schema version
%% it is called with.
%% @private
select_and_run_validator(?json_schema_draft3, JsonSchema, Value, State) ->
jesse_validator_draft3:check_value( Value
, jesse_json_path:unwrap_value(JsonSchema)
, State
);
select_and_run_validator(?json_schema_draft4, JsonSchema, Value, State) ->
jesse_validator_draft4:check_value( Value
, jesse_json_path:unwrap_value(JsonSchema)
, State
);
select_and_run_validator(SchemaURI, _JsonSchema, _Value, State) ->
jesse_error:handle_schema_invalid({?schema_unsupported, SchemaURI}, State). | src/jesse_schema_validator.erl | 0.73307 | 0.426799 | jesse_schema_validator.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% jam_math: Simple date/time math functions against Erlang's
%% date/time tuples
%%
%% Copyright (c) 2016 Basho Technologies, Inc. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
-module(jam_math).
-export([add_time/2, add_date/2, wrap/3]).
-ifdef(TEST).
-compile(export_all).
-include_lib("eunit/include/eunit.hrl").
-endif.
add_time({Hour, Minute, Second}, {AddH}) ->
add_time({Hour, Minute, Second}, {AddH, 0, 0});
add_time({Hour, Minute, Second}, {AddH, AddM}) ->
add_time({Hour, Minute, Second}, {AddH, AddM, 0});
add_time(Time, Diff) ->
to_tuple(
wrap_diff(tuple_to_list(Time), tuple_to_list(Diff),
[24, 60, 60])).
add_date(Date, NumDays) ->
calendar:gregorian_days_to_date(
calendar:date_to_gregorian_days(Date)+NumDays).
to_tuple([H|T]) ->
{H, list_to_tuple(T)}.
wrap_diff(Time, Diff, Wraps) ->
wrap_diff_reversed(lists:reverse(Time),
lists:reverse(Diff),
lists:reverse(Wraps), 0, []).
wrap_diff_reversed([], [], [], Carry, Acc) ->
[Carry|Acc];
wrap_diff_reversed([Hv|Tv], [Hd|Td], [Hw|Tw], Carry, Acc) ->
{NewCarry, NewVal} = wrap(Hv + Hd + Carry, Hw, 0),
wrap_diff_reversed(Tv, Td, Tw, NewCarry, [NewVal|Acc]).
%% Max is exclusive, Min is inclusive. Return value is a tuple:
%% `{Carry, Sum}'
wrap(Sum, Max, Min) ->
wrap(Sum, Max-Min, Max-1, Min).
%% Internal only. Max is now inclusive. Algorithm modified from
%% http://stackoverflow.com/a/707426
wrap(Sum, Range, Max, Min) when Sum < Min ->
NewSum = Sum + (Range * ((Min - Sum) div Range + 1)),
{_, Sum2} = wrap(NewSum, Range, Max, Min),
{(Sum+1) div Range - 1, Sum2};
wrap(Sum, Range, _Max, Min) ->
{Sum div Range, Min + ((Sum - Min) rem Range)}.
-ifdef(TEST).
negative_wrap_test() ->
?assertEqual({-1, 0}, wrap(-60, 60, 0)).
negative_wrap_nonzero_test() ->
?assertEqual({-1, 11}, wrap(-1, 13, 1)).
zero_wrap_nonzero_test() ->
?assertEqual({-1, 12}, wrap(0, 13, 1)).
add_time_test_() ->
Times = [
{
0, {20, 15, 45}, {1}, {21, 15, 45}
},
{
1, {20, 15, 45}, {4}, {0, 15, 45}
},
{
2, {20, 15, 45}, {28}, {0, 15, 45}
},
{
2, {20, 15, 45}, {28, 40}, {0, 55, 45}
},
{
2, {20, 15, 45}, {28, 40, 5}, {0, 55, 50}
},
{
2, {20, 15, 45}, {27, 100, 5}, {0, 55, 50}
},
{
-1, {2, 15, 45}, {-4, -15}, {22, 0, 45}
},
{
-2, {2, 15, 45}, {-28, -15}, {22, 0, 45}
},
{
-2, {2, 15, 45}, {-27, -75}, {22, 0, 45}
}
],
lists:map(fun({Adj, Old, Add, New}) ->
?_assertEqual({Adj, New}, add_time(Old, Add))
end, Times).
-endif. | src/jam_math.erl | 0.592195 | 0.525125 | jam_math.erl | starcoder |
%% Copyright 2015 <NAME>
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(fn_error).
-export([to_string/2, normalize/1, normalize_warning/1]).
to_string(Module, Errors) when is_list(Errors) ->
lists:map(fun (Error) -> to_string(Module, Error) end, Errors);
to_string(Module, {Type, Line, Details}) ->
TypeStr = type_to_string(Type),
DetailsStr = details_to_string(Details),
io_lib:format("~p:~p:~p: ~s at line ~p: ~s~n", [Module, Line, Type, TypeStr, Line, DetailsStr]).
type_to_string(invalid_fn_ref) -> <<"Invalid Function Reference">>;
type_to_string(invalid_bin_type_specifier_field) -> <<"Invalid Type Specifier Field">>;
type_to_string(invalid_bin_type_specifier_value) -> <<"Invalid Type Specifier Value">>;
type_to_string(unknown_compiler_info) -> <<"Unknown Compiler Info Name">>;
type_to_string(case_mismatch) -> <<"Case Mismatch">>;
type_to_string(bad_record_field_init) -> <<"Bad Record Field Initialization">>;
type_to_string(bad_record_field_decl) -> <<"Bad Record Field Declaration">>;
type_to_string(invalid_export) -> <<"Invalid Export">>;
type_to_string(invalid_expression) -> <<"Invalid Expression">>;
type_to_string(invalid_top_level_expression) -> <<"Invalid Top Level Expression">>;
type_to_string(invalid_type_declaration) -> <<"Invalid Type Declaration">>;
type_to_string(invalid_type_value) -> <<"Invalid Type Value">>;
type_to_string(invalid_type_argument) -> <<"Invalid Type Argument">>;
type_to_string(invalid_catch) -> <<"Invalid Catch">>;
type_to_string(duplicated_function_spec) -> <<"Duplicated Function Spec">>;
type_to_string(Other) -> atom_to_list(Other).
format_maybe_ast({ast, Ast}) -> fn_pp:print(Ast);
format_maybe_ast(Other) -> io_lib:format("~p", [Other]).
details_to_string({expected, Expected, got, Got}) when is_list(Expected) ->
io_lib:format("Expected ~s got ~s", [Expected, format_maybe_ast(Got)]);
details_to_string({expected, Expected, got, Got}) ->
io_lib:format("Expected ~p got ~s", [Expected, format_maybe_ast(Got)]);
details_to_string(Other) -> format_maybe_ast(Other).
to_bs(V) when is_list(V) -> list_to_binary(V);
to_bs(V) when is_binary(V) -> V.
normalize({error, {Line, fn_parser, Reason}}) ->
BReason = to_bs(Reason),
case BReason of
<<"syntax error before: '\\n'">> ->
io_lib:format("~p: syntax error before end of line", [Line]);
_ ->
io_lib:format("~p: parse error: '~s'", [Line, Reason])
end;
normalize({error, {Line, fn_lexer, {illegal, Reason}}}) ->
io_lib:format("~p: illegal char ~p", [Line, Reason]);
normalize({error, {Line, fn_lexer, {eof, _}}}) ->
io_lib:format("~p: end of file", [Line]);
normalize({error, {Line, fn_lexer, Reason}}) when is_list(Reason) ->
io_lib:format("~p: ~s", [Line, Reason]);
normalize({error, {efene, _Module, Reason}}) ->
io_lib:format("~s", [Reason]);
normalize({error, Other}) ->
io_lib:format("~p", [Other]);
normalize({Line, erl_lint, Reason}) ->
io_lib:format("line ~p: ~s", [Line, erl_lint:format_error(Reason)]);
normalize({Line, sys_core_fold, Reason}) ->
io_lib:format("line ~p: ~s", [Line, sys_core_fold:format_error(Reason)]);
normalize({Line, v3_kernel, Reason}) ->
io_lib:format("line ~p: ~s", [Line, v3_kernel:format_error(Reason)]);
normalize({_Path, Errors}) when is_list(Errors) ->
ErrorsStrs = [normalize(Error) || Error <- Errors],
[string:join(ErrorsStrs, "\n"), "\n"];
normalize(Other) ->
io_lib:format("~p", [Other]).
normalize_warning({implicit_override, Line,
#{fn := {FnName, Arity},
prev_fn := #{module_path := PrevPath, line := PrevLine}}}) ->
io_lib:format("~p: Implicit override of function ~p/~p at line ~p (previously defined at ~p line ~p, hint: add @override attribute to supress warning)", [Line, FnName, Arity, Line, PrevPath, PrevLine]);
normalize_warning(Other) ->
io_lib:format("~p", [Other]). | src/fn_error.erl | 0.600774 | 0.47591 | fn_error.erl | starcoder |
%%%-------------------------------------------------------------------
%%% @doc
%%% Additional iterator utilities that are not replicas of `lists'
%%% module functionality. These functions are kept separate to avoid
%%% any future name clashes with additions to the stdlib.
%%%
%%% Unlike the functions in `llists', these utility functions do not
%%% follow the same strict transformation rules. Instead, inputs and
%%% outputs generally follow evaluation needs with eagerly evaluated
%%% values passed as lists and lazily evaluated ones passed as
%%% iterators.
%%% @end
%%%-------------------------------------------------------------------
-module(llists_utils).
-record(zipper, {heads, tail}).
-type permutation_options() :: proplists:proplist().
%% API
-export([
choice/1,
combinations/2,
combinations/3,
cycle/1,
enumerate/1,
group/2,
groupwith/2,
permutations/2,
permutations/3,
random/0,
random/1,
unique/1,
unique/2
]).
-export_type([permutation_options/0]).
%%%===================================================================
%%% API
%%%===================================================================
%% @doc
%% Create an infinite iterator that returns random elements from the
%% given list of `Choices'. Each iterator returns a unique sequence
%% and returns the same unique sequence each time it is evaluated.
%% @end
-spec choice(Choices) -> Iterator when
Choices :: [Elem, ...],
Iterator :: llists:iterator(Elem).
choice(Choices) when length(Choices) > 0 ->
Length = length(Choices),
Enumerated = lists:zip(lists:seq(1, Length), Choices),
Lookup = maps:from_list(Enumerated),
llists:map(
fun(I) -> maps:get(I, Lookup) end,
random(Length)
).
%% @see combinations/3
-spec combinations(N, Choices) -> Iterator when
N :: non_neg_integer(),
Choices :: [Elem],
Iterator :: llists:iterator([Elem]).
combinations(N, Choices) when N >= 0, is_list(Choices) ->
llists:unfold(
fun
(none) ->
none;
(CurrentChoices) ->
NextChoice = next_choice(CurrentChoices),
NextChoices = next_combination(CurrentChoices),
{NextChoice, NextChoices}
end,
unique_choices(N, Choices)
).
%% @doc
%% Create an iterator that returns all combinations of elements from
%% `Choices' that are `N' elements long. If the `repetitions' property
%% is passed in `Options', combinations with repeated elements of
%% `Choices' are included.
%%
%% Examples:
%% ```
%% > llists:to_list(
%% llists_utils:combinations(2, [1, 2, 3]).
%% [[1,2],[1,3],[2,3]]
%% > llists:to_list(
%% llists_utils:combinations(2, [1, 2, 3], [repetitions]).
%% [[1,1],[1,2],[1,3],[2,2],[2,3],[3,3]]
%% '''
%%
%% If the elements of `Choices' are sorted, the order of the resulting
%% combinations will also be sorted.
%% @end
-spec combinations(N, Choices, Options) -> Iterator when
N :: non_neg_integer(),
Choices :: [Elem],
Options :: permutation_options(),
Iterator :: llists:iterator([Elem]).
combinations(N, Choices, Options) when is_list(Options) ->
Repetitions = proplists:get_bool(repetitions, Options),
case Repetitions of
true ->
combinations_with_repetitions(N, Choices);
false ->
combinations(N, Choices)
end.
%% @doc
%% Create an infinite iterator that repeatedly returns the sequence of
%% elements in the given iterator.
%% @end
-spec cycle(Iterator1) -> Iterator2 when
Iterator1 :: llists:iterator(Elem),
Iterator2 :: llists:iterator(Elem).
cycle(Iterator) ->
true = llists:is_iterator(Iterator),
llists:append(llists:duplicate(infinity, Iterator)).
%% @doc
%% Given an existing `Iterator1' creates a new `Iterator2' which
%% returns each element of the original iterator as a tuple of the
%% number of elements returned and the element itself.
%%
%% Example:
%% ```
%% > llists:to_list(
%% llists_utils:enumerate(
%% llits:from_list([one, two, three]))).
%% [{1,one},{2,two},{3,three}]
%% '''
%% @end
-spec enumerate(Iterator1) -> Iterator2 when
Iterator1 :: llists:iterator(Elem),
Iterator2 :: llists:iterator({Index, Elem}),
Index :: pos_integer().
enumerate(Iterator) ->
true = llists:is_iterator(Iterator),
llists:unfold(
fun({I, FoldIterator}) ->
case llists:next(FoldIterator) of
[] ->
none;
[Elem | Next] ->
{{I, Elem}, {I + 1, Next}}
end
end,
{1, Iterator}
).
%% @doc
%% Create an iterator that returns groups of elements from `Iterator1'
%% as a list of at least `Length' elements.
%%
%% Example:
%% ```
%% > llists:to_list(
%% llists_utils:group(
%% 2,
%% llists:from_list([1, 2, 3, 4, 5]))).
%% [[1,2],[3,4],[5]]
%% '''
%%
%% It is not an error if there are not enough elements to fill out the
%% final group, instead a smaller group is returned.
%% @end
-spec group(Length, Iterator1) -> Iterator2 when
Length :: pos_integer(),
Iterator1 :: llists:iterator(Elem),
Iterator2 :: llists:iterator([Elem]).
group(Length, Iterator) when Length > 0 ->
true = llists:is_iterator(Iterator),
llists:unfold(
fun(FoldIterator) ->
group_loop(Length, [], FoldIterator)
end,
Iterator
).
%% @doc
%% Create an iterator that returns groups of elements from `Iterator1'
%% based on the return value of `Pred(Elem)'. If the predicate
%% function returns `true' it signals the end of a group which will be
%% returned as a list. If the predicate returns `false', the element
%% will be included in the next group returned. Even if the predicate
%% function returns `false' for the last element, the final group will
%% still be returned.
%%
%% Example:
%% ```
%% > llists:to_list(
%% llists_utils:groupwith(
%% fun (Elem) -> Elem rem 2 == 0 end,
%% llists:from_list([1, 2, 3, 4, 5]))).
%% [[1,2],[3,4],[5]]
%% '''
%%
%% If `Pred(Elem)' returns false for every element in an infinite
%% iterator, the first evaluation of `Iterator2' will never return.
%% @end
-spec groupwith(Pred, Iterator1) -> Iterator2 when
Pred :: llists:predicate(Elem),
Iterator1 :: llists:iterator(Elem),
Iterator2 :: llists:iterator([Elem]).
groupwith(Pred, Iterator) when is_function(Pred, 1) ->
true = llists:is_iterator(Iterator),
llists:unfold(
fun(FoldIterator) ->
groupwith_loop(Pred, [], FoldIterator)
end,
Iterator
).
%% @see permutations/3
-spec permutations(N, Choices) -> Iterator when
N :: non_neg_integer(),
Choices :: [Elem],
Iterator :: llists:iterator([Elem]).
permutations(N, Choices) when N >= 0, is_list(Choices) ->
llists:unfold(
fun
(none) ->
none;
(Zippers) ->
NextChoice = zipper_choice(Zippers),
NextZippers = next_permutation(Zippers),
{NextChoice, NextZippers}
end,
zipper_choices(N, Choices)
).
%% @doc
%% Create an iterator that returns all permutations of elements from
%% `Choices' that are `N' elements long. If the `repetitions' property
%% is passed in `Options', permutations with repeated elements of
%% `Choices' are included.
%%
%% Examples:
%% ```
%% > llists:to_list(
%% llists_utils:permutations(2, [1, 2, 3]).
%% [[1,2],[1,3],[2,1],[2,3],[3,1],[3,2]]
%% > llists:to_list(
%% llists_utils:permutations(2, [1, 2, 3], [repetitions]).
%% [[1,1],[1,2],[1,3],[2,1],[2,2],[2,3],[3,1],[3,2],[3,3]]
%% '''
%%
%% If the elements of `Choices' are sorted, the order of the resulting
%% permutations will also be sorted.
%% @end
-spec permutations(N, Choices, Options) -> Iterator when
N :: non_neg_integer(),
Choices :: [Elem],
Options :: permutation_options(),
Iterator :: llists:iterator([Elem]).
permutations(N, Choices, Options) when is_list(Options) ->
Repetitions = proplists:get_bool(repetitions, Options),
case Repetitions of
true ->
permutations_with_repetitions(N, Choices);
false ->
permutations(N, Choices)
end.
%% @doc
%% Create an infinite iterator that returns random floats in the range
%% `[0.0, 1.0)'. Each iterator returns a unique sequence and returns
%% the same unique sequence each time it is evaluated.
%% @end
%% @see rand:uniform/0
-spec random() -> Iterator when Iterator :: llists:iterator(float()).
random() ->
llists:unfold(
fun(Seed) -> rand:uniform_s(Seed) end,
rand:seed_s(exrop)
).
%% @doc
%% Create an infinite iterator that returns random integers in the range
%% `[1, N)'. Each iterator returns a unique sequence and returns
%% the same unique sequence each time it is evaluated.
%% @end
%% @see rand:uniform/1
-spec random(N) -> Iterator when
N :: pos_integer(),
Iterator :: llists:iterator(float()).
random(N) when N >= 1 ->
llists:unfold(
fun(Seed) -> rand:uniform_s(N, Seed) end,
rand:seed_s(exrop)
).
%% @doc
%% As `unique/2', but with `==' as a equality function.
%% @end
%% @see unique/2
-spec unique(Iterator1) -> Iterator2 when
Iterator1 :: llists:iterator(Elem),
Iterator2 :: llists:iterator(Elem).
unique(Iterator) ->
true = llists:is_iterator(Iterator),
unique(fun erlang:'=='/2, Iterator).
%% @doc
%% Discards repeated values in a sorted iterator according to a
%% provided equality function `Fun(A, B)' which should return `true'
%% when `A' and `B' are equal and `false' otherwise. All values that
%% compares equal to the previously returned value are skipped until a
%% non-equal value is found.
%%
%% Example:
%% ```
%% > llists:to_list(
%% llists_utils:unique(
%% llists:from_list([1, 1, 2, 2, 1, 1]))).
%% [1,2,1]
%% '''
%%
%% Infinite iterators of equal values will cause the first evaluation
%% of `Iterator2' to never return.
%% @end
-spec unique(Fun, Iterator1) -> Iterator2 when
Fun :: llists:compare(A, B),
Iterator1 :: llists:iterator(Elem),
Iterator2 :: llists:iterator(Elem),
A :: Elem,
B :: Elem.
unique(Fun, Iterator) when is_function(Fun, 2) ->
true = llists:is_iterator(Iterator),
llists:unfold(
fun
Next({_Prev, []}) ->
none;
Next({Prev, FoldIterator}) ->
case {Prev, llists:next(FoldIterator)} of
{_Prev, []} ->
none;
{first, [Elem | NextIterator]} ->
{Elem, {{previous, Elem}, NextIterator}};
{{previous, PrevElem} = Prev, [Elem | NextIterator]} ->
case Fun(Elem, PrevElem) of
true ->
Next({Prev, NextIterator});
false ->
{Elem, {{previous, Elem}, NextIterator}}
end
end
end,
{first, Iterator}
).
%%%===================================================================
%%% Internal Functions
%%%===================================================================
repeated_choices(N, Choices) when N >= 0 ->
lists:duplicate(N, Choices).
unique_choices(N, Choices) ->
unique_choices(N, Choices, []).
unique_choices(0, _Choices, Acc) ->
Acc;
unique_choices(_N, [], _Acc) ->
none;
unique_choices(N, [_ | Tail] = Choices, Acc) when N > 0 ->
unique_choices(N - 1, Tail, [Choices | Acc]).
next_choice(Choices) ->
lists:foldl(
fun([Head | _], Acc) -> [Head | Acc] end,
[],
Choices
).
next_combination(Choices) ->
next_combination(1, Choices).
next_combination(_N, []) ->
none;
next_combination(N, [Current | Choices]) when N >= length(Current) ->
next_combination(N + 1, Choices);
next_combination(N, [[_ | Tail] | Choices]) ->
unique_choices(N, Tail) ++ Choices.
next_rep_combination(Choices) ->
next_rep_combination(1, Choices).
next_rep_combination(_N, []) ->
none;
next_rep_combination(N, [[_] | Choices]) ->
next_rep_combination(N + 1, Choices);
next_rep_combination(N, [[_ | Tail] | Choices]) ->
repeated_choices(N, Tail) ++ Choices.
combinations_with_repetitions(N, Choices) when N >= 0, is_list(Choices) ->
llists:unfold(
fun
(none) ->
none;
(CurrentChoices) ->
NextChoice = next_choice(CurrentChoices),
NextChoices = next_rep_combination(CurrentChoices),
{NextChoice, NextChoices}
end,
repeated_choices(N, Choices)
).
zipper_choices(N, Choices) ->
zipper_choices(N, Choices, []).
zipper_choices(0, _Choices, Acc) ->
Acc;
zipper_choices(_N, [], _Acc) ->
none;
zipper_choices(N, [Head | Tail], Acc) ->
zipper_choices(N - 1, Tail, [#zipper{heads = [Head], tail = Tail} | Acc]).
zipper_choice(Zippers) ->
lists:foldl(
fun(#zipper{heads = [Head | _]}, Acc) -> [Head | Acc] end,
[],
Zippers
).
next_permutation(Zippers) ->
next_permutation(1, Zippers).
next_permutation(_N, []) ->
none;
next_permutation(N, [#zipper{tail = []} | Zippers]) ->
next_permutation(N + 1, Zippers);
next_permutation(N, [#zipper{heads = Heads, tail = [Head | Tail]} | Zippers]) ->
zipper_choices(N - 1, lists:reverse(Heads) ++ Tail) ++
[#zipper{heads = [Head | Heads], tail = Tail}] ++
Zippers.
next_rep_permutation(Original, Choices) ->
next_rep_permutation(1, Original, Choices).
next_rep_permutation(_N, _Original, []) ->
none;
next_rep_permutation(N, Original, [[_] | Choices]) ->
next_rep_permutation(N + 1, Original, Choices);
next_rep_permutation(N, Original, [[_ | Tail] | Choices]) ->
repeated_choices(N - 1, Original) ++ [Tail] ++ Choices.
permutations_with_repetitions(N, Choices) when N >= 0, is_list(Choices) ->
llists:unfold(
fun
(none) ->
none;
(CurrentChoices) ->
NextChoice = next_choice(CurrentChoices),
NextChoices = next_rep_permutation(Choices, CurrentChoices),
{NextChoice, NextChoices}
end,
repeated_choices(N, Choices)
).
group_loop(_N, _Acc, none) ->
none;
group_loop(0, Acc, Iterator) ->
{lists:reverse(Acc), Iterator};
group_loop(N, Acc, Iterator) when N > 0 ->
case {Acc, llists:next(Iterator)} of
{[], []} ->
none;
{_, []} ->
{lists:reverse(Acc), none};
{_, [Elem | NextIterator]} ->
group_loop(N - 1, [Elem | Acc], NextIterator)
end.
groupwith_loop(_Pred, _Acc, none) ->
none;
groupwith_loop(Pred, Acc, Iterator) ->
case {Acc, llists:next(Iterator)} of
{[], []} ->
none;
{_, []} ->
{lists:reverse(Acc), none};
{_, [Elem | NextIterator]} ->
case Pred(Elem) of
true ->
{lists:reverse([Elem | Acc]), NextIterator};
false ->
groupwith_loop(Pred, [Elem | Acc], NextIterator)
end
end. | src/llists_utils.erl | 0.699562 | 0.707533 | llists_utils.erl | starcoder |
% @doc API for the
% <a href="https://reference.digilentinc.com/reference/pmod/pmodhygro/start">
% PmodHYGRO
% </a>.
%
% Start the driver with
% ```
% 1> grisp:add_device(i2c, pmod_hygro).
% '''
% @end
-module(pmod_hygro).
-behaviour(gen_server).
% API
-export([start_link/2]).
-export([temp/0]).
-export([humid/0]).
-export([measurements/0]).
% Callbacks
-export([init/1]).
-export([handle_call/3]).
-export([handle_cast/2]).
-export([handle_info/2]).
-export([code_change/3]).
-export([terminate/2]).
-define(DEVICE_ADR, 16#40).
-define(REG_TEMPERATURE, 16#00).
-define(REG_MANUFACTURER_ID, 16#FE).
-define(REG_DEVICE_ID, 16#FF).
-define(DELAY_TIME, 15).
-define(MANUFACTURER_ID, 16#5449).
-define(DEVICE_ID, 16#1050).
%--- Records -------------------------------------------------------------------
%
-record(state, {bus}).
%--- API -----------------------------------------------------------------------
% @private
start_link(Slot, _Opts) ->
gen_server:start_link({local, ?MODULE}, ?MODULE, Slot, []).
% @doc Measure the temperature in °C.
%
% === Example ===
% ```
% 2> pmod_hygro:temp().
% [{temp,24.6746826171875}]
% '''
-spec temp() -> [{temp, float()}].
temp() ->
gen_server:call(?MODULE, temp).
% @doc Measure the humidity in %.
%
% === Example ===
% ```
% 2> pmod_hygro:humid().
% [{humid,50.225830078125}]
% '''
-spec humid() -> [{humid, float()}].
humid() ->
gen_server:call(?MODULE, humid).
% @doc Measure the temperature and humidity.
%
% === Example ===
% ```
% 2> pmod_hygro:measurements().
% [{temp,24.52362060546875},{humid,50.823974609375}]
% '''
-spec measurements() -> [{temp, float()}|{humid, float()}].
measurements() ->
gen_server:call(?MODULE, measurements).
%--- Callbacks -----------------------------------------------------------------
% @private
init(i2c = Slot) ->
Bus = grisp_i2c:open(i2c1),
verify_device(Bus),
grisp_devices:register(Slot, ?MODULE),
{ok, #state{bus = Bus}}.
% @private
handle_call(temp, _From, #state{bus = Bus} = State) ->
{ok, <<T:14/unsigned-big, _:2>>} = device_request(Bus, ?REG_TEMPERATURE, ?DELAY_TIME, 2),
Temp = evaluate_temp(T),
{reply, [{temp, Temp}], State};
handle_call(humid, _From, #state{bus = Bus} = State) ->
{ok, <<_:14, _:2, H:14/unsigned-big, _:2>>} = device_request(Bus, ?REG_TEMPERATURE, ?DELAY_TIME, 4),
Humid = evaluate_humid(H),
{reply, [{humid, Humid}], State};
handle_call(measurements, _From, #state{bus = Bus} = State) ->
{ok, <<T:14/unsigned-big, _:2, H:14/unsigned-big, _:2>>} = device_request(Bus, ?REG_TEMPERATURE, ?DELAY_TIME, 4),
Temp = evaluate_temp(T),
Humid = evaluate_humid(H),
{reply, [{temp, Temp}, {humid, Humid}], State}.
% @private
handle_cast(Request, _State) -> error({unknown_cast, Request}).
% @private
handle_info(Info, _State) -> error({unknown_info, Info}).
% @private
code_change(_OldVsn, State, _Extra) -> {ok, State}.
% @private
terminate(_Reason, _State) -> ok.
%--- Internal ------------------------------------------------------------------
verify_device(Bus) ->
{ok, <<ManufacturerID:16>>} = device_request(Bus, ?REG_MANUFACTURER_ID, 0, 2),
{ok, <<DeviceID:16>>} = device_request(Bus, ?REG_DEVICE_ID, 0, 2),
case {ManufacturerID, DeviceID} of
{?MANUFACTURER_ID, ?DEVICE_ID} -> ok;
Other -> error({device_mismatch, Other})
end.
device_request(Bus, Register, Delay, BytesToRead) ->
[ok] = grisp_i2c:transfer(Bus, [{write, ?DEVICE_ADR, 0, <<Register:8>>}]),
timer:sleep(Delay),
[Response] = grisp_i2c:transfer(Bus, [{read, ?DEVICE_ADR, 0, BytesToRead}]),
{ok, Response}.
evaluate_temp(T) ->
(T / 16384) * 165 - 40.
evaluate_humid(H) ->
(H / 16384) * 100. | src/pmod_hygro.erl | 0.533641 | 0.425187 | pmod_hygro.erl | starcoder |
%% The contents of this file are subject to the Mozilla Public License
%% Version 1.1 (the "License"); you may not use this file except in
%% compliance with the License. You may obtain a copy of the License
%% at http://www.mozilla.org/MPL/
%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and
%% limitations under the License.
%%
%% The Original Code is RabbitMQ.
%%
%% The Initial Developer of the Original Code is GoPivotal, Inc.
%% Copyright (c) 2007-2017 Pivotal Software, Inc. All rights reserved.
%%
-module(mn_climber_version).
-export([
recorded/0, matches/2, desired/0,
record_desired/0, upgrades_required/0
]).
%% -------------------------------------------------------------------
-export_type([step/0]).
-type step() :: {atom(), atom()}.
-type version() :: [atom()].
-define(VERSION_FILENAME, "schema_version").
%% -------------------------------------------------------------------
-spec recorded() -> rabbit_types:ok_or_error2(version(), any()).
recorded() ->
case mn_climber_file:read_term_file(schema_filename()) of
{ok, [V]} -> {ok, V};
{error, _} = Err -> Err
end.
record(V) ->
ok = mn_climber_file:write_term_file(schema_filename(), [V]).
recorded_for_upgrades() ->
case recorded() of
{error, _} = Err ->
Err;
{ok, Version} ->
{ok, get_upgrades(Version)}
end.
%% -------------------------------------------------------------------
-spec matches([A], [A]) -> boolean().
matches(VerA, VerB) ->
lists:usort(VerA) =:= lists:usort(VerB).
%% -------------------------------------------------------------------
-spec desired() -> version().
desired() -> with_upgrade_graph(fun heads/1).
-spec record_desired() -> 'ok'.
record_desired() -> record(desired()).
-spec upgrades_required() -> rabbit_types:ok_or_error2([step()], any()).
upgrades_required() ->
case recorded_for_upgrades() of
{error, enoent} ->
{error, starting_from_scratch};
{ok, CurrentHeads} ->
with_upgrade_graph(
fun(G) ->
case unknown_heads(CurrentHeads, G) of
[] -> {ok, upgrades_to_apply(CurrentHeads, G)};
Unknown -> {error, {future_upgrades_found, Unknown}}
end
end)
end.
%% -------------------------------------------------------------------
with_upgrade_graph(Fun) ->
case mn_climber_misc:build_acyclic_graph(
fun({_App, Module, Steps}) -> vertices(Module, Steps) end,
fun({_App, Module, Steps}) -> edges(Module, Steps) end,
mn_climber_misc:all_module_attributes(mn_climber_upgrade)) of
{ok, G} ->
try
Fun(G)
after
true = digraph:delete(G)
end;
{error, {vertex, duplicate, StepName}} ->
throw({error, {duplicate_upgrade_step, StepName}});
{error, {edge, {bad_vertex, StepName}, _From, _To}} ->
throw({error, {dependency_on_unknown_upgrade_step, StepName}});
{error, {edge, {bad_edge, StepNames}, _From, _To}} ->
throw({error, {cycle_in_upgrade_steps, StepNames}})
end.
vertices(Module, Steps) ->
[{StepName, {Module, StepName}} || {StepName, _Reqs} <- Steps].
edges(_Module, Steps) ->
[{Require, StepName} || {StepName, Requires} <- Steps,
Require <- Requires].
unknown_heads(Heads, G) ->
[H || H <- Heads, digraph:vertex(G, H) =:= false].
upgrades_to_apply(Heads, G) ->
%% Take all the vertices which can reach the known heads. That's
%% everything we've already applied. Subtract that from all
%% vertices: that's what we have to apply.
Unsorted = sets:to_list(
sets:subtract(
sets:from_list(digraph:vertices(G)),
sets:from_list(digraph_utils:reaching(Heads, G)))),
%% Form a subgraph from that list and find a topological ordering
%% so we can invoke them in order.
[element(2, digraph:vertex(G, StepName)) ||
StepName <- digraph_utils:topsort(digraph_utils:subgraph(G, Unsorted))].
heads(G) ->
lists:sort([V || V <- digraph:vertices(G), digraph:out_degree(G, V) =:= 0]).
%% -------------------------------------------------------------------
get_upgrades(Version) when is_list(Version) ->
[Name || {_App, _Module, Attributes} <-
mn_climber_misc:all_module_attributes(mn_climber_upgrade),
{Name, _Requires} <- Attributes,
lists:member(Name, Version)].
dir() -> mn_climber_mnesia:dir().
schema_filename() -> filename:join(dir(), ?VERSION_FILENAME). | src/mn_climber_version.erl | 0.622115 | 0.475118 | mn_climber_version.erl | starcoder |
%%
%% @doc Restoring RSA private key from its parts.
%%
%% RSA private key consists of four integer values `{p, q, t, d}'.
%% The knowledge of any one of these values is sufficient to
%% compute all the other three.
%%
%% Easy case: Attacker knows `p' (or `q'). Then
%% ```
%% q = n / p
%% t = (p - 1)(q - 1) / gcd(p - 1, q - 1)
%% d = e^-1 (mod t)'''
%%
%% To play with the functions in this module, the following commands
%% may be useful if you want to generate the real private keys:
%% ```
%% $ openssl genrsa -out private.pem 2048
%% $ openssl asn1parse -i -in private.pem
%% '''
%% @reference [FSK1] Chapter 12.4.3. The Private Key.
%%
-module(rsa_private_key).
-author("<NAME>").
-export([factorize_from_d/3, factorize_from_t/2]).
%%
%% @doc If an attacker knows `d', she can find `{p, q}' using this method.
%%
-spec factorize_from_d(N :: pos_integer(), E :: pos_integer(), D :: pos_integer()) ->
{P :: pos_integer(), Q :: pos_integer()} | error.
factorize_from_d(N, E, D) ->
factorize_from_d(N, E, D, 1).
factorize_from_d(N, E, D, Factor) ->
case factorize_from_t(N, (E * D - 1) div Factor) of
error -> factorize_from_d(N, E, D, Factor + 1);
Result -> Result
end.
%%
%% @doc If an attacker knows `t', she can find `{p, q}' using this method.
%%
-spec factorize_from_t(N :: pos_integer(), T :: pos_integer()) ->
{P :: pos_integer(), Q :: pos_integer()} | error.
factorize_from_t(N, T) ->
case N div T of
0 -> error;
GCD -> factorize_from_t(N, T, GCD)
end.
factorize_from_t(N, T, GCD) ->
Phi = T * GCD,
S = N - Phi + 1,
factorize_from_s(N, S).
factorize_from_s(_N, S) when S rem 2 =:= 1 -> error;
factorize_from_s(N, S) ->
S2 = S div 2,
Desc = maths:isqrt(S2 * S2 - N),
{P, Q} = {S2 - Desc, S2 + Desc},
case P * Q of
N -> {P, Q};
_ -> error
end.
%% =============================================================================
%% Unit tests
%% =============================================================================
-include_lib("eunit/include/eunit.hrl").
factorize_from_d_test() ->
?assertEqual({11, 13}, factorize_from_d(143, 7, 43)).
factorize_from_t_test_() -> [
?_assertEqual(error, factorize_from_t(143, 300)),
?_assertEqual(error, factorize_from_t(143, 150)),
?_assertEqual(error, factorize_from_t(143, 100)),
?_assertEqual(error, factorize_from_t(143, 75)),
?_assertEqual({11, 13}, factorize_from_t(143, 60))]. | lib/ndpar/src/rsa_private_key.erl | 0.699152 | 0.700612 | rsa_private_key.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% riak_core: Core Riak Application
%%
%% Copyright (c) 2007-2015 Basho Technologies, Inc. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%% @doc The default functions used for claiming partition ownership. Generally,
%% a wants_claim function should return either {yes, Integer} or 'no' where
%% Integer is the number of additional partitions wanted by this node. A
%% choose_claim function should return a riak_core_ring with more
%% partitions claimed by this node than in the input ring.
%% The usual intention for partition ownership assumes relative heterogeneity of
%% capacity and connectivity. Accordingly, the standard claim functions attempt
%% to maximize "spread" -- expected distance between partitions claimed by each
%% given node. This is in order to produce the expectation that for any
%% reasonably short span of consecutive partitions, there will be a minimal
%% number of partitions owned by the same node.
%% The exact amount that is considered tolerable is determined by the
%% application env variable "target_n_val". The functions in riak_core_claim
%% will ensure that all sequences up to target_n_val long contain no repeats if
%% at all possible. The effect of this is that when the number of nodes in the
%% system is smaller than target_n_val, a potentially large number of partitions
%% must be moved in order to safely add a new node. After the cluster has grown
%% beyond that size, a minimal number of partitions (1/NumNodes) will generally
%% be moved.
%% If the number of nodes does not divide evenly into the number of partitions,
%% it may not be possible to perfectly achieve the maximum spread constraint.
%% In that case, Riak will minimize the cases where the constraint is violated
%% and they will all exist near the origin point of the ring.
%% A good way to decide on the setting of target_n_val for your application is
%% to set it to the largest value you expect to use for any bucket's n_val. The
%% default is 4.
-module(riak_core_claim).
-export([claim/1,
claim/3,
claim_until_balanced/2,
claim_until_balanced/4]).
-export([default_wants_claim/1,
default_wants_claim/2,
default_choose_claim/1,
default_choose_claim/2,
default_choose_claim/3,
never_wants_claim/1,
never_wants_claim/2,
random_choose_claim/1,
random_choose_claim/2,
random_choose_claim/3]).
-export([wants_claim_v2/1,
wants_claim_v2/2,
choose_claim_v2/1,
choose_claim_v2/2,
choose_claim_v2/3,
claim_rebalance_n/2,
claim_diversify/3,
claim_diagonal/3,
wants/1,
wants_owns_diff/2,
meets_target_n/2,
diagonal_stripe/2]).
-define(DEF_TARGET_N, 4).
claim(Ring) ->
claim(Ring, want, choose).
claim(Ring, _, _) ->
Members = riak_core_ring:claiming_members(Ring),
lists:foldl(fun(Node, Ring0) ->
claim_until_balanced(Ring0, Node, want, choose)
end, Ring, Members).
claim_until_balanced(Ring, Node) ->
claim_until_balanced(Ring, Node, want, choose).
claim_until_balanced(Ring, Node, want, choose) ->
NeedsIndexes = wants_claim_v2(Ring, Node),
case NeedsIndexes of
no -> Ring;
{yes, _NumToClaim} ->
NewRing = choose_claim_v2(Ring, Node),
claim_until_balanced(NewRing, Node, want, choose)
end.
%% ===================================================================
%% Claim Function Implementations
%% ===================================================================
%% @spec default_choose_claim(riak_core_ring()) -> riak_core_ring()
%% @doc Choose a partition at random.
default_choose_claim(Ring) ->
default_choose_claim(Ring, node()).
default_choose_claim(Ring, Node) ->
choose_claim_v2(Ring, Node).
default_choose_claim(Ring, Node, Params) ->
choose_claim_v2(Ring, Node, Params).
%% @spec default_wants_claim(riak_core_ring()) -> {yes, integer()} | no
%% @doc Want a partition if we currently have less than floor(ringsize/nodes).
default_wants_claim(Ring) ->
default_wants_claim(Ring, node()).
default_wants_claim(Ring, Node) ->
wants_claim_v2(Ring, Node).
wants_claim_v2(Ring) ->
wants_claim_v2(Ring, node()).
wants_claim_v2(Ring, Node) ->
Active = riak_core_ring:claiming_members(Ring),
Owners = riak_core_ring:all_owners(Ring),
Counts = get_counts(Active, Owners),
NodeCount = erlang:length(Active),
RingSize = riak_core_ring:num_partitions(Ring),
Avg = RingSize div NodeCount,
Count = proplists:get_value(Node, Counts, 0),
case Count < Avg of
false -> no;
true -> {yes, Avg - Count}
end.
%% Provide default choose parameters if none given
default_choose_params() ->
default_choose_params([]).
default_choose_params(Params) ->
case proplists:get_value(target_n_val, Params) of
undefined ->
TN = application:get_env(riak_core, target_n_val, ?DEF_TARGET_N),
[{target_n_val, TN} | Params];
_->
Params
end.
choose_claim_v2(Ring) ->
choose_claim_v2(Ring, node()).
choose_claim_v2(Ring, Node) ->
Params = default_choose_params(),
choose_claim_v2(Ring, Node, Params).
choose_claim_v2(Ring, Node, Params0) ->
Params = default_choose_params(Params0),
%% Active::[node()]
Active = riak_core_ring:claiming_members(Ring),
%% Owners::[{index(), node()}]
Owners = riak_core_ring:all_owners(Ring),
%% Counts::[node(), non_neg_integer()]
Counts = get_counts(Active, Owners),
RingSize = riak_core_ring:num_partitions(Ring),
NodeCount = erlang:length(Active),
%% Deltas::[node(), integer()]
Deltas = get_deltas(RingSize, NodeCount, Owners, Counts),
{_, Want} = lists:keyfind(Node, 1, Deltas),
TargetN = proplists:get_value(target_n_val, Params),
AllIndices = lists:zip(lists:seq(0, length(Owners)-1),
[Idx || {Idx, _} <- Owners]),
EnoughNodes =
(NodeCount > TargetN)
or ((NodeCount == TargetN) and (RingSize rem TargetN =:= 0)),
case EnoughNodes of
true ->
%% If we have enough nodes to meet target_n, then we prefer to
%% claim indices that are currently causing violations, and then
%% fallback to indices in linear order. The filtering steps below
%% will ensure no new violations are introduced.
Violated = lists:flatten(find_violations(Ring, TargetN)),
Violated2 = [lists:keyfind(Idx, 2, AllIndices) || Idx <- Violated],
Indices = Violated2 ++ (AllIndices -- Violated2);
false ->
%% If we do not have enough nodes to meet target_n, then we prefer
%% claiming the same indices that would occur during a
%% re-diagonalization of the ring with target_n nodes, falling
%% back to linear offsets off these preferred indices when the
%% number of indices desired is less than the computed set.
Padding = lists:duplicate(TargetN, undefined),
Expanded = lists:sublist(Active ++ Padding, TargetN),
PreferredClaim = riak_core_claim:diagonal_stripe(Ring, Expanded),
PreferredNth = [begin
{Nth, Idx} = lists:keyfind(Idx, 2, AllIndices),
Nth
end || {Idx, Owner} <- PreferredClaim,
Owner =:= Node],
Offsets = lists:seq(0, RingSize div length(PreferredNth)),
AllNth = lists:sublist([(X+Y) rem RingSize || Y <- Offsets,
X <- PreferredNth],
RingSize),
Indices = [lists:keyfind(Nth, 1, AllIndices) || Nth <- AllNth]
end,
%% Filter out indices that conflict with the node's existing ownership
Indices2 = prefilter_violations(Ring, Node, AllIndices, Indices,
TargetN, RingSize),
%% Claim indices from the remaining candidate set
Claim = select_indices(Owners, Deltas, Indices2, TargetN, RingSize),
Claim2 = lists:sublist(Claim, Want),
NewRing = lists:foldl(fun(Idx, Ring0) ->
riak_core_ring:transfer_node(Idx, Node, Ring0)
end, Ring, Claim2),
RingChanged = ([] /= Claim2),
RingMeetsTargetN = meets_target_n(NewRing, TargetN),
case {RingChanged, EnoughNodes, RingMeetsTargetN} of
{false, _, _} ->
%% Unable to claim, fallback to re-diagonalization
sequential_claim(Ring, Node, TargetN);
{_, true, false} ->
%% Failed to meet target_n, fallback to re-diagonalization
sequential_claim(Ring, Node, TargetN);
_ ->
NewRing
end.
%% @private for each node in owners return a tuple of owner and delta
%% where delta is an integer that expresses how many nodes the owner
%% needs it's ownership to change by. A positive means the owner needs
%% that many more partitions, a negative means the owner can lose that
%% many paritions.
-spec get_deltas(RingSize::pos_integer(),
NodeCount::pos_integer(),
Owners::[{Index::non_neg_integer(), node()}],
Counts::[{node(), non_neg_integer()}]) ->
Deltas::[{node(), integer()}].
get_deltas(RingSize, NodeCount, Owners, Counts) ->
Avg = RingSize / NodeCount,
%% the most any node should own
Max = ceiling(RingSize / NodeCount),
ActiveDeltas = [{Member, Count, normalise_delta(Avg - Count)}
|| {Member, Count} <- Counts],
BalancedDeltas = rebalance_deltas(ActiveDeltas, Max, RingSize),
add_default_deltas(Owners, BalancedDeltas, 0).
%% @private a node can only claim whole partitions, but if RingSize
%% rem NodeCount /= 0, a delta will be a float. This function decides
%% if that float should be floored or ceilinged
-spec normalise_delta(float()) -> integer().
normalise_delta(Delta) when Delta < 0 ->
%% if the node has too many (a negative delta) give up the most
%% you can (will be rebalanced)
ceiling(abs(Delta)) * -1;
normalise_delta(Delta) ->
%% if the node wants partitions, ask for the fewest for least
%% movement
trunc(Delta).
%% @private so that we don't end up with an imbalanced ring where one
%% node has more vnodes than it should (e.g. [{n1, 6}, {n2, 6}, {n3,
%% 6}, {n4, 8}, {n5,6} we rebalance the deltas so that select_indices
%% doesn't leave some node not giving up enough partitions
-spec rebalance_deltas([{node(), integer()}],
pos_integer(),
pos_integer())
-> [{node(), integer()}].
rebalance_deltas(NodeDeltas, Max, RingSize) ->
AppliedDeltas = [Own + Delta || {_, Own, Delta} <- NodeDeltas],
case lists:sum(AppliedDeltas) - RingSize of
0 ->
[{Node, Delta} || {Node, _Cnt, Delta} <- NodeDeltas];
N when N < 0 ->
increase_keeps(NodeDeltas, N, Max, [])
end.
%% @private increases the delta for (some) nodes giving away
%% partitions to the max they can keep
-spec increase_keeps(Deltas::[{node(), integer()}],
WantsError::integer(),
Max::pos_integer(),
Acc::[{node(), integer()}]) ->
Rebalanced::[{node(), integer()}].
increase_keeps(Rest, 0, _Max, Acc) ->
[{Node, Delta} || {Node, _Own, Delta} <- lists:usort(lists:append(Rest, Acc))];
increase_keeps([], N, Max, Acc) when N < 0 ->
increase_takes(lists:reverse(Acc), N, Max, []);
increase_keeps([{Node, Own, Delta} | Rest], N, Max, Acc) when Delta < 0 ->
WouldOwn = Own + Delta,
Additive = case WouldOwn +1 =< Max of
true -> 1;
false -> 0
end,
increase_keeps(Rest, N+Additive, Max, [{Node, Own+Delta+Additive} | Acc]);
increase_keeps([NodeDelta | Rest], N, Max, Acc) ->
increase_keeps(Rest, N, Max, [NodeDelta | Acc]).
%% @private increases the delta for (some) nodes taking partitions to the max
%% they can ask for
-spec increase_takes(Deltas::[{node(), integer()}],
WantsError::integer(),
Max::pos_integer(),
Acc::[{node(), integer()}]) ->
Rebalanced::[{node(), integer()}].
increase_takes(Rest, 0, _Max, Acc) ->
[{Node, Delta} || {Node, _Own, Delta} <- lists:usort(lists:append(Rest, Acc))];
increase_takes([], N, _Max, Acc) when N < 0 ->
[{Node, Delta} || {Node, _Own, Delta} <- lists:usort(Acc)];
increase_takes([{Node, Own, Delta} | Rest], N, Max, Acc) when Delta > 0 ->
WouldOwn = Own + Delta,
Additive = case WouldOwn +1 =< Max of
true -> 1;
false -> 0
end,
increase_takes(Rest, N+Additive, Max, [{Node, Own, Delta+Additive} | Acc]);
increase_takes([NodeDelta | Rest], N, Max, Acc) ->
increase_takes(Rest, N, Max, [NodeDelta | Acc]).
meets_target_n(Ring, TargetN) ->
Owners = lists:keysort(1, riak_core_ring:all_owners(Ring)),
meets_target_n(Owners, TargetN, 0, [], []).
meets_target_n([{Part, Node}|Rest], TargetN, Index, First, Last) ->
case lists:keytake(Node, 1, Last) of
{value, {Node, LastIndex, _}, NewLast} ->
if Index-LastIndex >= TargetN ->
%% node repeat respects TargetN
meets_target_n(Rest, TargetN, Index+1, First,
[{Node, Index, Part}|NewLast]);
true ->
%% violation of TargetN
false
end;
false ->
%% haven't seen this node yet
meets_target_n(Rest, TargetN, Index+1,
[{Node, Index}|First], [{Node, Index, Part}|Last])
end;
meets_target_n([], TargetN, Index, First, Last) ->
%% start through end guarantees TargetN
%% compute violations at wrap around, but don't fail
%% because of them: handle during reclaim
Violations =
lists:filter(fun({Node, L, _}) ->
{Node, F} = proplists:lookup(Node, First),
(Index-L)+F < TargetN
end,
Last),
{true, [ Part || {_, _, Part} <- Violations ]}.
%% Claim diversify tries to build a perfectly diverse ownership list that meets
%% target N. It uses wants to work out which nodes want partitions, but does
%% not honor the counts currently. The algorithm incrementally builds the ownership
%% list, updating the adjacency matrix needed to compute the diversity score as each
%% node is added and uses it to drive the selection of the next nodes.
claim_diversify(Wants, Owners, Params) ->
TN = proplists:get_value(target_n_val, Params, ?DEF_TARGET_N),
Q = length(Owners),
Claiming = [N || {N, W} <- Wants, W > 0],
{ok, NewOwners, _AM} = riak_core_claim_util:construct(
riak_core_claim_util:gen_complete_len(Q), Claiming, TN),
{NewOwners, [diversified]}.
%% Claim nodes in seq a,b,c,a,b,c trying to handle the wraparound
%% case to meet target N
claim_diagonal(Wants, Owners, Params) ->
TN = proplists:get_value(target_n_val, Params, ?DEF_TARGET_N),
Claiming = lists:sort([N || {N, W} <- Wants, W > 0]),
S = length(Claiming),
Q = length(Owners),
Reps = Q div S,
%% Handle the ring wrapround case. If possible try to pick nodes
%% that are not within the first TN of Claiming, if enough nodes
%% are available.
Tail = Q - Reps * S,
Last = case S >= TN + Tail of
true -> % If number wanted can be filled excluding first TN nodes
lists:sublist(lists:nthtail(TN - Tail, Claiming), Tail);
_ ->
lists:sublist(Claiming, Tail)
end,
{lists:flatten([lists:duplicate(Reps, Claiming), Last]), [diagonalized]}.
%% @private fall back to diagonal striping vnodes across nodes in a
%% sequential round robin (eg n1 | n2 | n3 | n4 | n5 | n1 | n2 | n3
%% etc) However, different to `claim_rebalance_n', this function
%% attempts to eliminate tail violations (for example a ring that
%% starts/ends n1 | n2 | ...| n3 | n4 | n1)
-spec sequential_claim(riak_core_ring:riak_core_ring(),
node(),
integer()) ->
riak_core_ring:riak_core_ring().
sequential_claim(Ring, Node, TargetN) ->
Nodes = lists:usort([Node|riak_core_ring:claiming_members(Ring)]),
NodeCount = length(Nodes),
RingSize = riak_core_ring:num_partitions(Ring),
Overhang = RingSize rem NodeCount,
HasTailViolation = (Overhang > 0 andalso Overhang < TargetN),
Shortfall = TargetN - Overhang,
CompleteSequences = RingSize div NodeCount,
MaxFetchesPerSeq = NodeCount - TargetN,
MinFetchesPerSeq = ceiling(Shortfall / CompleteSequences),
CanSolveViolation = ((CompleteSequences * MaxFetchesPerSeq) >= Shortfall),
Zipped = case (HasTailViolation andalso CanSolveViolation) of
true->
Partitions = lists:sort([ I || {I, _} <- riak_core_ring:all_owners(Ring) ]),
Nodelist = solve_tail_violations(RingSize, Nodes, Shortfall, MinFetchesPerSeq),
lists:zip(Partitions, lists:flatten(Nodelist));
false ->
diagonal_stripe(Ring, Nodes)
end,
lists:foldl(fun({P, N}, Acc) ->
riak_core_ring:transfer_node(P, N, Acc)
end,
Ring,
Zipped).
%% @private every module has a ceiling function
-spec ceiling(float()) -> integer().
ceiling(F) ->
T = trunc(F),
case F - T == 0 of
true ->
T;
false ->
T + 1
end.
%% @private rem_fill increase the tail so that there is no wrap around
%% preflist violation, by taking a `Shortfall' number nodes from
%% earlier in the preflist
-spec solve_tail_violations(integer(), [node()], integer(), integer()) -> [node()].
solve_tail_violations(RingSize, Nodes, Shortfall, MinFetchesPerSeq) ->
StartingNode = (RingSize rem length(Nodes)) + 1,
build_nodelist(RingSize, Nodes, Shortfall, StartingNode, MinFetchesPerSeq, []).
%% @private build the node list by building tail to satisfy TargetN, then removing
%% the added nodes from earlier segments
-spec build_nodelist(integer(), [node()], integer(), integer(), integer(), [node()]) -> [node()].
build_nodelist(RingSize, Nodes, _Shortfall=0, _NodeCounter, _MinFetchesPerSeq, Acc) ->
%% Finished shuffling, backfill if required
ShuffledRing = lists:flatten(Acc),
backfill_ring(RingSize, Nodes,
(RingSize-length(ShuffledRing)) div (length(Nodes)), Acc);
build_nodelist(RingSize, Nodes, Shortfall, NodeCounter, MinFetchesPerSeq, _Acc=[]) ->
%% Build the tail with sufficient nodes to satisfy TargetN
NodeCount = length(Nodes),
LastSegLength = (RingSize rem NodeCount) + Shortfall,
NewSeq = lists:sublist(Nodes, 1, LastSegLength),
build_nodelist(RingSize, Nodes, Shortfall, NodeCounter, MinFetchesPerSeq, NewSeq);
build_nodelist(RingSize, Nodes, Shortfall, NodeCounter, MinFetchesPerSeq, Acc) ->
%% Build rest of list, subtracting minimum of MinFetchesPerSeq, Shortfall
%% or (NodeCount - NodeCounter) each time
NodeCount = length(Nodes),
NodesToRemove = min(min(MinFetchesPerSeq, Shortfall), NodeCount - NodeCounter),
RemovalList = lists:sublist(Nodes, NodeCounter, NodesToRemove),
NewSeq = lists:subtract(Nodes, RemovalList),
NewNodeCounter = NodeCounter + NodesToRemove,
build_nodelist(RingSize, Nodes, Shortfall - NodesToRemove, NewNodeCounter,
MinFetchesPerSeq, [ NewSeq | Acc]).
%% @private Backfill the ring with full sequences
-spec backfill_ring(integer(), [node()], integer(), [node()]) -> [node()].
backfill_ring(_RingSize, _Nodes, _Remaining=0, Acc) ->
Acc;
backfill_ring(RingSize, Nodes, Remaining, Acc) ->
backfill_ring(RingSize, Nodes, Remaining - 1, [Nodes | Acc]).
claim_rebalance_n(Ring, Node) ->
Nodes = lists:usort([Node|riak_core_ring:claiming_members(Ring)]),
Zipped = diagonal_stripe(Ring, Nodes),
lists:foldl(fun({P, N}, Acc) ->
riak_core_ring:transfer_node(P, N, Acc)
end,
Ring,
Zipped).
diagonal_stripe(Ring, Nodes) ->
%% diagonal stripes guarantee most disperse data
Partitions = lists:sort([ I || {I, _} <- riak_core_ring:all_owners(Ring) ]),
Zipped = lists:zip(Partitions,
lists:sublist(
lists:flatten(
lists:duplicate(
1+(length(Partitions) div length(Nodes)),
Nodes)),
1, length(Partitions))),
Zipped.
random_choose_claim(Ring) ->
random_choose_claim(Ring, node()).
random_choose_claim(Ring, Node) ->
random_choose_claim(Ring, Node, []).
random_choose_claim(Ring, Node, _Params) ->
riak_core_ring:transfer_node(riak_core_ring:random_other_index(Ring),
Node, Ring).
%% @spec never_wants_claim(riak_core_ring()) -> no
%% @doc For use by nodes that should not claim any partitions.
never_wants_claim(_) -> no.
never_wants_claim(_, _) -> no.
%% ===================================================================
%% Private
%% ===================================================================
%% @private
%%
%% @doc Determines indices that violate the given target_n spacing
%% property.
find_violations(Ring, TargetN) ->
Owners = riak_core_ring:all_owners(Ring),
Suffix = lists:sublist(Owners, TargetN-1),
Owners2 = Owners ++ Suffix,
%% Use a sliding window to determine violations
{Bad, _} = lists:foldl(fun(P={Idx, Owner}, {Out, Window}) ->
Window2 = lists:sublist([P|Window], TargetN-1),
case lists:keyfind(Owner, 2, Window) of
{PrevIdx, Owner} ->
{[[PrevIdx, Idx] | Out], Window2};
false ->
{Out, Window2}
end
end, {[], []}, Owners2),
lists:reverse(Bad).
%% @private
%%
%% @doc Counts up the number of partitions owned by each node.
-spec get_counts([node()], [{integer(), _}]) ->
[{node(), non_neg_integer()}].
get_counts(Nodes, Ring) ->
Empty = [{Node, 0} || Node <- Nodes],
Counts = lists:foldl(fun({_Idx, Node}, Counts) ->
case lists:member(Node, Nodes) of
true ->
dict:update_counter(Node, 1, Counts);
false ->
Counts
end
end, dict:from_list(Empty), Ring),
dict:to_list(Counts).
%% @private
add_default_deltas(IdxOwners, Deltas, Default) ->
{_, Owners} = lists:unzip(IdxOwners),
Owners2 = lists:usort(Owners),
Defaults = [{Member, Default} || Member <- Owners2],
lists:ukeysort(1, Deltas ++ Defaults).
%% @private
%%
%% @doc Filter out candidate indices that would violate target_n given
%% a node's current partition ownership.
prefilter_violations(Ring, Node, AllIndices, Indices, TargetN, RingSize) ->
CurrentIndices = riak_core_ring:indices(Ring, Node),
CurrentNth = [lists:keyfind(Idx, 2, AllIndices) || Idx <- CurrentIndices],
[{Nth, Idx} || {Nth, Idx} <- Indices,
lists:all(fun({CNth, _}) ->
spaced_by_n(CNth, Nth, TargetN, RingSize)
end, CurrentNth)].
%% @private
%%
%% @doc Select indices from a given candidate set, according to two
%% goals.
%%
%% 1. Ensure greedy/local target_n spacing between indices. Note that this
%% goal intentionally does not reject overall target_n violations.
%%
%% 2. Select indices based on the delta between current ownership and
%% expected ownership. In other words, if A owns 5 partitions and
%% the desired ownership is 3, then we try to claim at most 2 partitions
%% from A.
select_indices(_Owners, _Deltas, [], _TargetN, _RingSize) ->
[];
select_indices(Owners, Deltas, Indices, TargetN, RingSize) ->
OwnerDT = dict:from_list(Owners),
{FirstNth, _} = hd(Indices),
%% The `First' symbol indicates whether or not this is the first
%% partition to be claimed by this node. This assumes that the
%% node doesn't already own any partitions. In that case it is
%% _always_ safe to claim the first partition that another owner
%% is willing to part with. It's the subsequent partitions
%% claimed by this node that must not break the target_n invariant.
{Claim, _, _, _} =
lists:foldl(fun({Nth, Idx}, {Out, LastNth, DeltaDT, First}) ->
Owner = dict:fetch(Idx, OwnerDT),
Delta = dict:fetch(Owner, DeltaDT),
MeetsTN = spaced_by_n(LastNth, Nth, TargetN,
RingSize),
case (Delta < 0) and (First or MeetsTN) of
true ->
NextDeltaDT =
dict:update_counter(Owner, 1, DeltaDT),
{[Idx|Out], Nth, NextDeltaDT, false};
false ->
{Out, LastNth, DeltaDT, First}
end
end,
{[], FirstNth, dict:from_list(Deltas), true},
Indices),
lists:reverse(Claim).
%% @private
%%
%% @doc Determine if two positions in the ring meet target_n spacing.
spaced_by_n(NthA, NthB, TargetN, RingSize) ->
case NthA > NthB of
true ->
NFwd = NthA - NthB,
NBack = NthB - NthA + RingSize;
false ->
NFwd = NthA - NthB + RingSize,
NBack = NthB - NthA
end,
(NFwd >= TargetN) and (NBack >= TargetN).
%% For each node in wants, work out how many more partition each node wants (positive) or is
%% overloaded by (negative) compared to what it owns.
wants_owns_diff(Wants, Owns) ->
[ case lists:keyfind(N, 1, Owns) of
{N, O} ->
{N, W - O};
false ->
{N, W}
end || {N, W} <- Wants ].
%% Given a ring, work out how many partition each wants to be
%% considered balanced
wants(Ring) ->
Active = lists:sort(riak_core_ring:claiming_members(Ring)),
Inactive = riak_core_ring:all_members(Ring) -- Active,
Q = riak_core_ring:num_partitions(Ring),
ActiveWants = lists:zip(Active, wants_counts(length(Active), Q)),
InactiveWants = [ {N, 0} || N <- Inactive ],
lists:sort(ActiveWants ++ InactiveWants).
%% @private
%% Given a number of nodes and ring size, return a list of
%% desired ownership, S long that add up to Q
wants_counts(S, Q) ->
Max = roundup(Q / S),
case S * Max - Q of
0 ->
lists:duplicate(S, Max);
X ->
lists:duplicate(X, Max - 1) ++ lists:duplicate(S - X, Max)
end.
%% Round up to next whole integer - ceil
roundup(I) when I >= 0 ->
T = erlang:trunc(I),
case (I - T) of
Neg when Neg < 0 -> T;
Pos when Pos > 0 -> T + 1;
_ -> T
end.
%% ===================================================================
%% Unit tests
%% ===================================================================
-ifdef(TEST).
-compile(export_all).
-include_lib("eunit/include/eunit.hrl").
wants_claim_test() ->
riak_core_ring_manager:setup_ets(test),
riak_core_test_util:setup_mockring1(),
{ok, Ring} = riak_core_ring_manager:get_my_ring(),
?assertEqual({yes, 1}, default_wants_claim(Ring)),
riak_core_ring_manager:cleanup_ets(test),
riak_core_ring_manager:stop().
%% @private console helper function to return node lists for claiming
%% partitions
-spec gen_diag(pos_integer(), pos_integer()) -> [Node::atom()].
gen_diag(RingSize, NodeCount) ->
Nodes = [list_to_atom(lists:concat(["n_", N])) || N <- lists:seq(1, NodeCount)],
{HeadNode, RestNodes} = {hd(Nodes), tl(Nodes)},
R0 = riak_core_ring:fresh(RingSize, HeadNode),
RAdded = lists:foldl(fun(Node, Racc) ->
riak_core_ring:add_member(HeadNode, Racc, Node)
end,
R0, RestNodes),
Diag = diagonal_stripe(RAdded, Nodes),
{_P, N} = lists:unzip(Diag),
N.
%% @private call with result of gen_diag/1 only, does the list have
%% tail violations, returns true if so, false otherwise.
-spec has_violations([Node::atom()]) -> boolean().
has_violations(Diag) ->
RS = length(Diag),
NC = length(lists:usort(Diag)),
Overhang = RS rem NC,
(Overhang > 0 andalso Overhang < 4). %% hardcoded target n of 4
-ifdef(EQC).
-export([prop_claim_ensures_unique_nodes/1, prop_wants/0, prop_wants_counts/0, eqc_check/2]).
-include_lib("eqc/include/eqc.hrl").
-include_lib("eunit/include/eunit.hrl").
-define(QC_OUT(P),
eqc:on_output(fun(Str, Args) -> io:format(user, Str, Args) end, P)).
-define(POW_2(N), trunc(math:pow(2, N))).
eqc_check(File, Prop) ->
{ok, Bytes} = file:read_file(File),
CE = binary_to_term(Bytes),
eqc:check(Prop, CE).
test_nodes(Count) ->
[node() | [list_to_atom(lists:concat(["n_", N])) || N <- lists:seq(1, Count-1)]].
test_nodes(Count, StartNode) ->
[list_to_atom(lists:concat(["n_", N])) || N <- lists:seq(StartNode, StartNode + Count)].
property_claim_ensures_unique_nodes_v2_test_() ->
Prop = eqc:testing_time(30, ?QC_OUT(prop_claim_ensures_unique_nodes(choose_claim_v2))),
{timeout, 120, fun() -> ?assert(eqc:quickcheck(Prop)) end}.
property_claim_ensures_unique_nodes_adding_groups_v2_test_() ->
Prop = eqc:testing_time(30, ?QC_OUT(
prop_claim_ensures_unique_nodes_adding_groups(choose_claim_v2))),
{timeout, 120, fun() -> ?assert(eqc:quickcheck(Prop)) end}.
property_claim_ensures_unique_nodes_adding_singly_v2_test_() ->
Prop = eqc:testing_time(30, ?QC_OUT(
prop_claim_ensures_unique_nodes_adding_singly(choose_claim_v2))),
{timeout, 120, fun() -> ?assert(eqc:quickcheck(Prop)) end}.
prop_claim_ensures_unique_nodes(ChooseFun) ->
%% NOTE: We know that this doesn't work for the case of {_, 3}.
%% NOTE2: uses undocumented "double_shrink", is expensive, but should get
%% around those case where we shrink to a non-minimal case because
%% some intermediate combinations of ring_size/node have no violations
?FORALL({PartsPow, NodeCount}, eqc_gen:double_shrink({choose(4, 9), choose(4, 15)}),
begin
Nval = 3,
TNval = Nval + 1,
_Params = [{target_n_val, TNval}],
Partitions = ?POW_2(PartsPow),
[Node0 | RestNodes] = test_nodes(NodeCount),
R0 = riak_core_ring:fresh(Partitions, Node0),
RAdded = lists:foldl(fun(Node, Racc) ->
riak_core_ring:add_member(Node0, Racc, Node)
end, R0, RestNodes),
Rfinal = claim(RAdded, {?MODULE, wants_claim_v2}, {?MODULE, ChooseFun}),
Preflists = riak_core_ring:all_preflists(Rfinal, Nval),
ImperfectPLs = orddict:to_list(
lists:foldl(fun(PL, Acc) ->
PLNodes = lists:usort([N || {_, N} <- PL]),
case length(PLNodes) of
Nval ->
Acc;
_ ->
ordsets:add_element(PL, Acc)
end
end, [], Preflists)),
?WHENFAIL(
begin
io:format(user, "{Partitions, Nodes} {~p, ~p}~n",
[Partitions, NodeCount]),
io:format(user, "Owners: ~p~n",
[riak_core_ring:all_owners(Rfinal)])
end,
conjunction([{meets_target_n,
equals({true, []},
meets_target_n(Rfinal, TNval))},
{perfect_preflists, equals([], ImperfectPLs)},
{balanced_ring, balanced_ring(Partitions, NodeCount, Rfinal)}]))
end).
prop_claim_ensures_unique_nodes_adding_groups(ChooseFun) ->
%% NOTE: We know that this doesn't work for the case of {_, 3}.
%% NOTE2: uses undocumented "double_shrink", is expensive, but should get
%% around those case where we shrink to a non-minimal case because
%% some intermediate combinations of ring_size/node have no violations
?FORALL({PartsPow, BaseNodes, AddedNodes},
eqc_gen:double_shrink({choose(4, 9), choose(2, 10), choose(2, 5)}),
begin
Nval = 3,
TNval = Nval + 1,
_Params = [{target_n_val, TNval}],
Partitions = ?POW_2(PartsPow),
[Node0 | RestNodes] = test_nodes(BaseNodes),
AddNodes = test_nodes(AddedNodes-1, BaseNodes),
NodeCount = BaseNodes + AddedNodes,
%% io:format("Base: ~p~n",[[Node0 | RestNodes]]),
%% io:format("Added: ~p~n",[AddNodes]),
R0 = riak_core_ring:fresh(Partitions, Node0),
RBase = lists:foldl(fun(Node, Racc) ->
riak_core_ring:add_member(Node0, Racc, Node)
end, R0, RestNodes),
Rinterim = claim(RBase, {?MODULE, wants_claim_v2}, {?MODULE, ChooseFun}),
RAdded = lists:foldl(fun(Node, Racc) ->
riak_core_ring:add_member(Node0, Racc, Node)
end, Rinterim, AddNodes),
Rfinal = claim(RAdded, {?MODULE, wants_claim_v2}, {?MODULE, ChooseFun}),
Preflists = riak_core_ring:all_preflists(Rfinal, Nval),
ImperfectPLs = orddict:to_list(
lists:foldl(fun(PL, Acc) ->
PLNodes = lists:usort([N || {_, N} <- PL]),
case length(PLNodes) of
Nval ->
Acc;
_ ->
ordsets:add_element(PL, Acc)
end
end, [], Preflists)),
?WHENFAIL(
begin
io:format(user, "{Partitions, Nodes} {~p, ~p}~n",
[Partitions, NodeCount]),
io:format(user, "Owners: ~p~n",
[riak_core_ring:all_owners(Rfinal)])
end,
conjunction([{meets_target_n,
equals({true, []},
meets_target_n(Rfinal, TNval))},
{perfect_preflists, equals([], ImperfectPLs)},
{balanced_ring, balanced_ring(Partitions, NodeCount, Rfinal)}]))
end).
prop_claim_ensures_unique_nodes_adding_singly(ChooseFun) ->
%% NOTE: We know that this doesn't work for the case of {_, 3}.
%% NOTE2: uses undocumented "double_shrink", is expensive, but should get
%% around those case where we shrink to a non-minimal case because
%% some intermediate combinations of ring_size/node have no violations
?FORALL({PartsPow, NodeCount}, eqc_gen:double_shrink({choose(4, 9), choose(4, 15)}),
begin
Nval = 3,
TNval = Nval + 1,
Params = [{target_n_val, TNval}],
Partitions = ?POW_2(PartsPow),
[Node0 | RestNodes] = test_nodes(NodeCount),
R0 = riak_core_ring:fresh(Partitions, Node0),
Rfinal = lists:foldl(fun(Node, Racc) ->
Racc0 = riak_core_ring:add_member(Node0, Racc, Node),
%% TODO which is it? Claim or ChooseFun??
%%claim(Racc0, {?MODULE, wants_claim_v2},
%% {?MODULE, ChooseFun})
?MODULE:ChooseFun(Racc0, Node, Params)
end, R0, RestNodes),
Preflists = riak_core_ring:all_preflists(Rfinal, Nval),
ImperfectPLs = orddict:to_list(
lists:foldl(fun(PL, Acc) ->
PLNodes = lists:usort([N || {_, N} <- PL]),
case length(PLNodes) of
Nval ->
Acc;
_ ->
ordsets:add_element(PL, Acc)
end
end, [], Preflists)),
?WHENFAIL(
begin
io:format(user, "{Partitions, Nodes} {~p, ~p}~n",
[Partitions, NodeCount]),
io:format(user, "Owners: ~p~n",
[riak_core_ring:all_owners(Rfinal)])
end,
conjunction([{meets_target_n,
equals({true, []},
meets_target_n(Rfinal, TNval))},
{perfect_preflists, equals([], ImperfectPLs)},
{balanced_ring, balanced_ring(Partitions, NodeCount, Rfinal)}]))
end).
%% @private check that no node claims more than it should
-spec balanced_ring(RingSize::integer(), NodeCount::integer(),
riak_core_ring:riak_core_ring()) ->
boolean().
balanced_ring(RingSize, NodeCount, Ring) ->
TargetClaim = ceiling(RingSize / NodeCount),
MinClaim = RingSize div NodeCount,
AllOwners0 = riak_core_ring:all_owners(Ring),
AllOwners = lists:keysort(2, AllOwners0),
{BalancedMax, AccFinal} = lists:foldl(fun({_Part, Node}, {_Balanced, [{Node, Cnt} | Acc]})
when Cnt >= TargetClaim ->
{false, [{Node, Cnt+1} | Acc]};
({_Part, Node}, {Balanced, [{Node, Cnt} | Acc]}) ->
{Balanced, [{Node, Cnt+1} | Acc]};
({_Part, NewNode}, {Balanced, Acc}) ->
{Balanced, [{NewNode, 1} | Acc]}
end,
{true, []},
AllOwners),
BalancedMin = lists:all(fun({_Node, Cnt}) -> Cnt >= MinClaim end, AccFinal),
case BalancedMax andalso BalancedMin of
true ->
true;
false ->
{TargetClaim, MinClaim, lists:sort(AccFinal)}
end.
wants_counts_test() ->
?assert(eqc:quickcheck(?QC_OUT((prop_wants_counts())))).
prop_wants_counts() ->
?FORALL({S, Q}, {large_pos(100), large_pos(100000)},
begin
Wants = wants_counts(S, Q),
conjunction([{len, equals(S, length(Wants))},
{sum, equals(Q, lists:sum(Wants))}])
end).
wants_test() ->
?assert(eqc:quickcheck(?QC_OUT((prop_wants())))).
prop_wants() ->
?FORALL({NodeStatus, Q},
{?SUCHTHAT(L, non_empty(list(elements([leaving, joining]))),
lists:member(joining, L)),
?LET(X, choose(1, 16), trunc(math:pow(2, X)))},
begin
R0 = riak_core_ring:fresh(Q, tnode(1)),
{_, R2, Active} =
lists:foldl(
fun(S, {I, R1, A1}) ->
N = tnode(I),
case S of
joining ->
{I+1, riak_core_ring:add_member(N, R1, N), [N|A1]};
_ ->
{I+1, riak_core_ring:leave_member(N, R1, N), A1}
end
end, {1, R0, []}, NodeStatus),
Wants = wants(R2),
%% Check any non-claiming nodes are set to 0
%% Check all nodes are present
{ActiveWants, InactiveWants} =
lists:partition(fun({N, _W}) -> lists:member(N, Active) end, Wants),
ActiveSum = lists:sum([W || {_, W} <- ActiveWants]),
InactiveSum = lists:sum([W || {_, W} <- InactiveWants]),
?WHENFAIL(
begin
io:format(user, "NodeStatus: ~p\n", [NodeStatus]),
io:format(user, "Active: ~p\n", [Active]),
io:format(user, "Q: ~p\n", [Q]),
io:format(user, "Wants: ~p\n", [Wants]),
io:format(user, "ActiveWants: ~p\n", [ActiveWants]),
io:format(user, "InactiveWants: ~p\n", [InactiveWants])
end,
conjunction([{wants, equals(length(Wants), length(NodeStatus))},
{active, equals(Q, ActiveSum)},
{inactive, equals(0, InactiveSum)}]))
end).
%% Large positive integer between 1 and Max
large_pos(Max) ->
?LET(X, largeint(), 1 + (abs(X) rem Max)).
take_idxs_test() ->
?assert(eqc:quickcheck(?QC_OUT((prop_take_idxs())))).
prop_take_idxs() ->
?FORALL({OwnersSeed, CIdxsSeed, ExchangesSeed, TNSeed},
{non_empty(list(largeint())), % [OwnerSeed]
non_empty(list(largeint())), % [CIdxSeed]
non_empty(list({int(), int()})), % {GiveSeed, TakeSeed}
int()}, % TNSeed
begin
%% Generate Nis - duplicate owners seed to make sure Q > S
S = length(ExchangesSeed),
Dup = roundup(S / length(OwnersSeed)),
Owners = lists:flatten(
lists:duplicate(Dup,
[tnode(abs(OwnerSeed) rem S) ||
OwnerSeed <- OwnersSeed])),
Q = length(Owners),
TN = 1+abs(TNSeed),
Ownership0 = orddict:from_list([{tnode(I), []} || I <- lists:seq(0, S -1)]),
Ownership = lists:foldl(fun({I, O}, A) ->
orddict:append_list(O, [I], A)
end,
Ownership0,
lists:zip(lists:seq(0, Q-1), Owners)),
NIs = [{Node, undefined, Owned} || {Node, Owned} <- Ownership],
%% Generate claimable indices
CIdxs = ordsets:from_list([abs(Idx) rem Q || Idx <- CIdxsSeed]),
%% io:format(user, "ExchangesSeed (~p): ~p\n", [length(ExchangesSeed),
%% ExchangesSeed]),
%% io:format(user, "NIs (~p): ~p\n", [length(NIs), NIs]),
%% Generate exchanges
Exchanges = [{Node, % node name
abs(GiveSeed) rem (length(OIdxs) + 1), % maximum indices to give
abs(TakeSeed) rem (Q+1), % maximum indices to take
CIdxs} || % indices that can be claimed by node
{{Node, _Want, OIdxs}, {GiveSeed, TakeSeed}} <-
lists:zip(NIs, ExchangesSeed)],
%% Fire the test
NIs2 = take_idxs(Exchanges, NIs, Q, TN),
%% Check All nodes are still in NIs
%% Check that no node lost more than it wanted to give
?WHENFAIL(
begin
io:format(user, "Exchanges:\n~p\n", [Exchanges]),
io:format(user, "NIs:\n~p\n", [NIs]),
io:format(user, "NIs2:\n~p\n", [NIs2]),
io:format(user, "Q: ~p\nTN: ~p\n", [Q, TN])
end,
check_deltas(Exchanges, NIs, NIs2, Q, TN))
%% conjunction([{len, equals(length(NIs), length(NIs2))},
%% {delta, check_deltas(Exchanges, NIs, NIs2, Q, TN)}]))
end).
tnode(I) ->
list_to_atom("n" ++ integer_to_list(I)).
%% Check that no node gained more than it wanted to take
%% Check that none of the nodes took more partitions than allowed
%% Check that no nodes violate target N
check_deltas(Exchanges, Before, After, Q, TN) ->
conjunction(
lists:flatten(
[begin
Gave = length(OIdxs1 -- OIdxs2), % in original and not new
Took = length(OIdxs2 -- OIdxs1),
V1 = count_violations(OIdxs1, Q, TN),
V2 = count_violations(OIdxs2, Q, TN),
[{{give, Node, Gave, Give}, Gave =< Give},
{{take, Node, Took, Take}, Took =< Take},
{{valid, Node, V1, V2},
V2 == 0 orelse
V1 > 0 orelse % check no violations if there were not before
OIdxs1 == []}] % or the node held no indices so violation was impossible
end || {{Node, Give, Take, _CIdxs}, {Node, _Want1, OIdxs1}, {Node, _Want2, OIdxs2}} <-
lists:zip3(lists:sort(Exchanges), lists:sort(Before), lists:sort(After))])).
count_violations([], _Q, _TN) ->
0;
count_violations(Idxs, Q, TN) ->
SOIdxs = lists:sort(Idxs),
{_, Violations} = lists:foldl(
fun(This, {Last, Vs}) ->
case Last - This >= TN of
true ->
{This, Vs};
_ ->
{This, Vs + 1}
end
end, {Q + hd(SOIdxs), 0}, lists:reverse(SOIdxs)),
Violations.
-endif. % EQC
-endif. % TEST | src/riak_core_claim.erl | 0.634996 | 0.425367 | riak_core_claim.erl | starcoder |
%% ``The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with your Erlang distribution. If not, it can be
%% retrieved via the world wide web at http://www.erlang.org/.
%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
%%
%% The Initial Developer of the Original Code is Corelatus AB.
%% Portions created by Corelatus are Copyright 2003, Corelatus
%% AB. All Rights Reserved.''
%%
%% @doc Module to print out terms for logging. Limits by length rather than depth.
%%
%% The resulting string may be slightly larger than the limit; the intention
%% is to provide predictable CPU and memory consumption for formatting
%% terms, not produce precise string lengths.
%%
%% Typical use:
%%
%% trunc_io:print(Term, 500).
%%
%% Source license: Erlang Public License.
%% Original author: <NAME>, <tt><EMAIL></tt>
%%
%% Various changes to this module, most notably the format/3 implementation
%% were added by <NAME> `<<EMAIL>>'. The module has been renamed
%% to avoid conflicts with the vanilla module.
-module(lager_trunc_io).
-author('<EMAIL>').
%% And thanks to Chris Newcombe for a bug fix
-export([format/3, format/4, print/2, print/3, fprint/2, fprint/3, safe/2]). % interface functions
-version("$Id: trunc_io.erl,v 1.11 2009-02-23 12:01:06 matthias Exp $").
-ifdef(TEST).
-export([perf/0, perf/3, perf1/0, test/0, test/2]). % testing functions
-include_lib("eunit/include/eunit.hrl").
-endif.
-type option() :: {'depth', integer()}
| {'lists_as_strings', boolean()}
| {'force_strings', boolean()}.
-type options() :: [option()].
-record(print_options, {
%% negative depth means no depth limiting
depth = -1 :: integer(),
%% whether to print lists as strings, if possible
lists_as_strings = true :: boolean(),
%% force strings, or binaries to be printed as a string,
%% even if they're not printable
force_strings = false :: boolean()
}).
format(Fmt, Args, Max) ->
format(Fmt, Args, Max, []).
format(Fmt, Args, Max, Options) ->
try lager_format:format(Fmt, Args, Max, Options)
catch
_What:_Why ->
erlang:error(badarg, [Fmt, Args])
end.
%% @doc Returns an flattened list containing the ASCII representation of the given
%% term.
-spec fprint(term(), pos_integer()) -> string().
fprint(Term, Max) ->
fprint(Term, Max, []).
%% @doc Returns an flattened list containing the ASCII representation of the given
%% term.
-spec fprint(term(), pos_integer(), options()) -> string().
fprint(T, Max, Options) ->
{L, _} = print(T, Max, prepare_options(Options, #print_options{})),
lists:flatten(L).
%% @doc Same as print, but never crashes.
%%
%% This is a tradeoff. Print might conceivably crash if it's asked to
%% print something it doesn't understand, for example some new data
%% type in a future version of Erlang. If print crashes, we fall back
%% to io_lib to format the term, but then the formatting is
%% depth-limited instead of length limited, so you might run out
%% memory printing it. Out of the frying pan and into the fire.
%%
-spec safe(term(), pos_integer()) -> {string(), pos_integer()} | {string()}.
safe(What, Len) ->
case catch print(What, Len) of
{L, Used} when is_list(L) -> {L, Used};
_ -> {"unable to print" ++ io_lib:write(What, 99)}
end.
%% @doc Returns {List, Length}
-spec print(term(), pos_integer()) -> {iolist(), pos_integer()}.
print(Term, Max) ->
print(Term, Max, []).
%% @doc Returns {List, Length}
-spec print(term(), pos_integer(), options() | #print_options{}) -> {iolist(), pos_integer()}.
print(Term, Max, Options) when is_list(Options) ->
%% need to convert the proplist to a record
print(Term, Max, prepare_options(Options, #print_options{}));
print(Term, _Max, #print_options{force_strings=true}) when not is_list(Term), not is_binary(Term), not is_atom(Term) ->
erlang:error(badarg);
print(_, Max, _Options) when Max < 0 -> {"...", 3};
print(_, _, #print_options{depth=0}) -> {"...", 3};
print(Tuple, Max, Options) when is_tuple(Tuple) ->
{TC, Len} = tuple_contents(Tuple, Max-2, Options),
{[${, TC, $}], Len + 2};
%% @doc We assume atoms, floats, funs, integers, PIDs, ports and refs never need
%% to be truncated. This isn't strictly true, someone could make an
%% arbitrarily long bignum. Let's assume that won't happen unless someone
%% is being malicious.
%%
print(Atom, _Max, #print_options{force_strings=NoQuote}) when is_atom(Atom) ->
L = atom_to_list(Atom),
R = case atom_needs_quoting_start(L) andalso not NoQuote of
true -> lists:flatten([$', L, $']);
false -> L
end,
{R, length(R)};
print(<<>>, _Max, _Options) ->
{"<<>>", 4};
print(Binary, 0, _Options) when is_bitstring(Binary) ->
{"<<..>>", 6};
print(Binary, Max, Options) when is_binary(Binary) ->
B = binary_to_list(Binary, 1, lists:min([Max, byte_size(Binary)])),
{L, Len} = case Options#print_options.lists_as_strings orelse
Options#print_options.force_strings of
true ->
alist_start(B, Max-4, Options);
_ ->
list_body(B, Max-4, Options, false)
end,
{Res, Length} = case L of
[91, X, 93] ->
{X, Len - 2};
X ->
{X, Len}
end,
case Options#print_options.force_strings of
true ->
{Res, Length};
_ ->
{["<<", Res, ">>"], Length+4}
end;
%% bitstrings are binary's evil brother who doesn't end on an 8 bit boundary.
%% This makes printing them extremely annoying, so list_body/list_bodyc has
%% some magic for dealing with the output of bitstring_to_list, which returns
%% a list of integers (as expected) but with a trailing binary that represents
%% the remaining bits.
print(BitString, Max, Options) when is_bitstring(BitString) ->
case byte_size(BitString) > Max of
true ->
BL = binary_to_list(BitString, 1, Max);
_ ->
BL = erlang:bitstring_to_list(BitString)
end,
{X, Len0} = list_body(BL, Max - 4, Options, false),
{["<<", X, ">>"], Len0 + 4};
print(Float, _Max, _Options) when is_float(Float) ->
%% use the same function io_lib:format uses to print floats
%% float_to_list is way too verbose.
L = io_lib_format:fwrite_g(Float),
{L, length(L)};
print(Fun, Max, _Options) when is_function(Fun) ->
L = erlang:fun_to_list(Fun),
case length(L) > Max of
true ->
S = erlang:max(5, Max),
Res = string:substr(L, 1, S) ++ "..>",
{Res, length(Res)};
_ ->
{L, length(L)}
end;
print(Integer, _Max, _Options) when is_integer(Integer) ->
L = integer_to_list(Integer),
{L, length(L)};
print(Pid, _Max, _Options) when is_pid(Pid) ->
L = pid_to_list(Pid),
{L, length(L)};
print(Ref, _Max, _Options) when is_reference(Ref) ->
L = erlang:ref_to_list(Ref),
{L, length(L)};
print(Port, _Max, _Options) when is_port(Port) ->
L = erlang:port_to_list(Port),
{L, length(L)};
print(List, Max, Options) when is_list(List) ->
case Options#print_options.lists_as_strings orelse
Options#print_options.force_strings of
true ->
alist_start(List, Max, dec_depth(Options));
_ ->
{R, Len} = list_body(List, Max - 2, dec_depth(Options), false),
{[$[, R, $]], Len + 2}
end.
%% Returns {List, Length}
tuple_contents(Tuple, Max, Options) ->
L = tuple_to_list(Tuple),
list_body(L, Max, dec_depth(Options), true).
%% Format the inside of a list, i.e. do not add a leading [ or trailing ].
%% Returns {List, Length}
list_body([], _Max, _Options, _Tuple) -> {[], 0};
list_body(_, Max, _Options, _Tuple) when Max < 4 -> {"...", 3};
list_body(_, _Max, #print_options{depth=0}, _Tuple) -> {"...", 3};
list_body([B], _Max, _Options, _Tuple) when is_bitstring(B), not is_binary(B) ->
Size = bit_size(B),
<<Value:Size>> = B,
ValueStr = integer_to_list(Value),
SizeStr = integer_to_list(Size),
{[ValueStr, $:, SizeStr], length(ValueStr) + length(SizeStr) +1};
list_body([H|T], Max, Options, Tuple) ->
{List, Len} = print(H, Max, Options),
{Final, FLen} = list_bodyc(T, Max - Len, Options, Tuple),
{[List|Final], FLen + Len};
list_body(X, Max, Options, _Tuple) -> %% improper list
{List, Len} = print(X, Max - 1, Options),
{[$|,List], Len + 1}.
list_bodyc([], _Max, _Options, _Tuple) -> {[], 0};
list_bodyc(_, Max, _Options, _Tuple) when Max < 5 -> {",...", 4};
list_bodyc([B], _Max, _Options, _Tuple) when is_bitstring(B), not is_binary(B) ->
Size = bit_size(B),
<<Value:Size>> = B,
ValueStr = integer_to_list(Value),
SizeStr = integer_to_list(Size),
{[$, , ValueStr, $:, SizeStr], length(ValueStr) + length(SizeStr) +2};
list_bodyc([H|T], Max, #print_options{depth=Depth} = Options, Tuple) ->
{List, Len} = print(H, Max, dec_depth(Options)),
{Final, FLen} = list_bodyc(T, Max - Len - 1, Options, Tuple),
Sep = case Depth == 1 andalso not Tuple of
true -> $|;
_ -> $,
end,
{[Sep, List|Final], FLen + Len + 1};
list_bodyc(X, Max, Options, _Tuple) -> %% improper list
{List, Len} = print(X, Max - 1, Options),
{[$|,List], Len + 1}.
%% The head of a list we hope is ascii. Examples:
%%
%% [65,66,67] -> "ABC"
%% [65,0,67] -> "A"[0,67]
%% [0,65,66] -> [0,65,66]
%% [65,b,66] -> "A"[b,66]
%%
alist_start([], _Max, #print_options{force_strings=true}) -> {"", 0};
alist_start([], _Max, _Options) -> {"[]", 2};
alist_start(_, Max, _Options) when Max < 4 -> {"...", 3};
alist_start(_, _Max, #print_options{depth=0}) -> {"[...]", 5};
alist_start(L, Max, #print_options{force_strings=true} = Options) ->
alist(L, Max, Options);
alist_start([H|T], Max, Options) when is_integer(H), H >= 16#20, H =< 16#7e -> % definitely printable
try alist([H|T], Max -1, Options) of
{L, Len} ->
{[$"|L], Len + 1}
catch
throw:unprintable ->
{R, Len} = list_body([H|T], Max-2, Options, false),
{[$[, R, $]], Len + 2}
end;
alist_start([H|T], Max, Options) when H =:= $\t; H =:= $\n; H =:= $\r; H =:= $\v; H =:= $\e; H=:= $\f; H=:= $\b ->
try alist([H|T], Max -1, Options) of
{L, Len} ->
{[$"|L], Len + 1}
catch
throw:unprintable ->
{R, Len} = list_body([H|T], Max-2, Options, false),
{[$[, R, $]], Len + 2}
end;
alist_start(L, Max, Options) ->
{R, Len} = list_body(L, Max-2, Options, false),
{[$[, R, $]], Len + 2}.
alist([], _Max, #print_options{force_strings=true}) -> {"", 0};
alist([], _Max, _Options) -> {"\"", 1};
alist(_, Max, #print_options{force_strings=true}) when Max < 4 -> {"...", 3};
alist(_, Max, #print_options{force_strings=false}) when Max < 5 -> {"...\"", 4};
alist([H|T], Max, Options = #print_options{force_strings=false,lists_as_strings=true}) when H =:= $"; H =:= $\\ ->
%% preserve escaping around quotes
{L, Len} = alist(T, Max-1, Options),
{[$\\,H|L], Len + 2};
alist([H|T], Max, Options) when is_integer(H), H >= 16#20, H =< 16#7e -> % definitely printable
{L, Len} = alist(T, Max-1, Options),
{[H|L], Len + 1};
alist([H|T], Max, Options) when H =:= $\t; H =:= $\n; H =:= $\r; H =:= $\v; H =:= $\e; H=:= $\f; H=:= $\b ->
{L, Len} = alist(T, Max-1, Options),
case Options#print_options.force_strings of
true ->
{[H|L], Len + 1};
_ ->
{[escape(H)|L], Len + 1}
end;
alist([H|T], Max, #print_options{force_strings=true} = Options) when is_integer(H) ->
{L, Len} = alist(T, Max-1, Options),
{[H|L], Len + 1};
alist(_, _, #print_options{force_strings=true}) ->
erlang:error(badarg);
alist(_L, _Max, _Options) ->
throw(unprintable).
%% is the first character in the atom alphabetic & lowercase?
atom_needs_quoting_start([H|T]) when H >= $a, H =< $z ->
atom_needs_quoting(T);
atom_needs_quoting_start(_) ->
true.
atom_needs_quoting([]) ->
false;
atom_needs_quoting([H|T]) when (H >= $a andalso H =< $z);
(H >= $A andalso H =< $Z);
(H >= $0 andalso H =< $9);
H == $@; H == $_ ->
atom_needs_quoting(T);
atom_needs_quoting(_) ->
true.
-spec prepare_options(options(), #print_options{}) -> #print_options{}.
prepare_options([], Options) ->
Options;
prepare_options([{depth, Depth}|T], Options) when is_integer(Depth) ->
prepare_options(T, Options#print_options{depth=Depth});
prepare_options([{lists_as_strings, Bool}|T], Options) when is_boolean(Bool) ->
prepare_options(T, Options#print_options{lists_as_strings = Bool});
prepare_options([{force_strings, Bool}|T], Options) when is_boolean(Bool) ->
prepare_options(T, Options#print_options{force_strings = Bool}).
dec_depth(#print_options{depth=Depth} = Options) when Depth > 0 ->
Options#print_options{depth=Depth-1};
dec_depth(Options) ->
Options.
escape($\t) -> "\\t";
escape($\n) -> "\\n";
escape($\r) -> "\\r";
escape($\e) -> "\\e";
escape($\f) -> "\\f";
escape($\b) -> "\\b";
escape($\v) -> "\\v".
-ifdef(TEST).
%%--------------------
%% The start of a test suite. So far, it only checks for not crashing.
-spec test() -> ok.
test() ->
test(trunc_io, print).
-spec test(atom(), atom()) -> ok.
test(Mod, Func) ->
Simple_items = [atom, 1234, 1234.0, {tuple}, [], [list], "string", self(),
<<1,2,3>>, make_ref(), fun() -> ok end],
F = fun(A) ->
Mod:Func(A, 100),
Mod:Func(A, 2),
Mod:Func(A, 20)
end,
G = fun(A) ->
case catch F(A) of
{'EXIT', _} -> exit({failed, A});
_ -> ok
end
end,
lists:foreach(G, Simple_items),
Tuples = [ {1,2,3,a,b,c}, {"abc", def, 1234},
{{{{a},b,c,{d},e}},f}],
Lists = [ [1,2,3,4,5,6,7], lists:seq(1,1000),
[{a}, {a,b}, {a, [b,c]}, "def"], [a|b], [$a|$b] ],
lists:foreach(G, Tuples),
lists:foreach(G, Lists).
-spec perf() -> ok.
perf() ->
{New, _} = timer:tc(trunc_io, perf, [trunc_io, print, 1000]),
{Old, _} = timer:tc(trunc_io, perf, [io_lib, write, 1000]),
io:fwrite("New code took ~p us, old code ~p\n", [New, Old]).
-spec perf(atom(), atom(), integer()) -> done.
perf(M, F, Reps) when Reps > 0 ->
test(M,F),
perf(M,F,Reps-1);
perf(_,_,_) ->
done.
%% Performance test. Needs a particularly large term I saved as a binary...
-spec perf1() -> {non_neg_integer(), non_neg_integer()}.
perf1() ->
{ok, Bin} = file:read_file("bin"),
A = binary_to_term(Bin),
{N, _} = timer:tc(trunc_io, print, [A, 1500]),
{M, _} = timer:tc(io_lib, write, [A]),
{N, M}.
format_test() ->
%% simple format strings
?assertEqual("foobar", lists:flatten(format("~s", [["foo", $b, $a, $r]], 50))),
?assertEqual("[\"foo\",98,97,114]", lists:flatten(format("~p", [["foo", $b, $a, $r]], 50))),
?assertEqual("[\"foo\",98,97,114]", lists:flatten(format("~P", [["foo", $b, $a, $r], 10], 50))),
?assertEqual("[[102,111,111],98,97,114]", lists:flatten(format("~w", [["foo", $b, $a, $r]], 50))),
%% complex ones
?assertEqual(" foobar", lists:flatten(format("~10s", [["foo", $b, $a, $r]], 50))),
?assertEqual("f", lists:flatten(format("~1s", [["foo", $b, $a, $r]], 50))),
?assertEqual("[\"foo\",98,97,114]", lists:flatten(format("~22p", [["foo", $b, $a, $r]], 50))),
?assertEqual("[\"foo\",98,97,114]", lists:flatten(format("~22P", [["foo", $b, $a, $r], 10], 50))),
?assertEqual("**********", lists:flatten(format("~10W", [["foo", $b, $a, $r], 10], 50))),
?assertEqual("[[102,111,111],98,97,114]", lists:flatten(format("~25W", [["foo", $b, $a, $r], 10], 50))),
% Note these next two diverge from io_lib:format; the field width is
% ignored, when it should be used as max line length.
?assertEqual("[\"foo\",98,97,114]", lists:flatten(format("~10p", [["foo", $b, $a, $r]], 50))),
?assertEqual("[\"foo\",98,97,114]", lists:flatten(format("~10P", [["foo", $b, $a, $r], 10], 50))),
ok.
atom_quoting_test() ->
?assertEqual("hello", lists:flatten(format("~p", [hello], 50))),
?assertEqual("'hello world'", lists:flatten(format("~p", ['hello world'], 50))),
?assertEqual("'Hello world'", lists:flatten(format("~p", ['Hello world'], 50))),
?assertEqual("hello_world", lists:flatten(format("~p", ['hello_world'], 50))),
?assertEqual("'node@127.0.0.1'", lists:flatten(format("~p", ['node@127.0.0.1'], 50))),
?assertEqual("node@nohost", lists:flatten(format("~p", [node@nohost], 50))),
?assertEqual("abc123", lists:flatten(format("~p", [abc123], 50))),
ok.
sane_float_printing_test() ->
?assertEqual("1.0", lists:flatten(format("~p", [1.0], 50))),
?assertEqual("1.23456789", lists:flatten(format("~p", [1.23456789], 50))),
?assertEqual("1.23456789", lists:flatten(format("~p", [1.234567890], 50))),
?assertEqual("0.3333333333333333", lists:flatten(format("~p", [1/3], 50))),
?assertEqual("0.1234567", lists:flatten(format("~p", [0.1234567], 50))),
ok.
float_inside_list_test() ->
?assertEqual("[97,38.233913133184835,99]", lists:flatten(format("~p", [[$a, 38.233913133184835, $c]], 50))),
?assertError(badarg, lists:flatten(format("~s", [[$a, 38.233913133184835, $c]], 50))),
ok.
quote_strip_test() ->
?assertEqual("\"hello\"", lists:flatten(format("~p", ["hello"], 50))),
?assertEqual("hello", lists:flatten(format("~s", ["hello"], 50))),
?assertEqual("hello", lists:flatten(format("~s", [hello], 50))),
?assertEqual("hello", lists:flatten(format("~p", [hello], 50))),
?assertEqual("'hello world'", lists:flatten(format("~p", ['hello world'], 50))),
?assertEqual("hello world", lists:flatten(format("~s", ['hello world'], 50))),
ok.
binary_printing_test() ->
?assertEqual("<<>>", lists:flatten(format("~p", [<<>>], 50))),
?assertEqual("<<..>>", lists:flatten(format("~p", [<<"hi">>], 0))),
?assertEqual("<<...>>", lists:flatten(format("~p", [<<"hi">>], 1))),
?assertEqual("<<\"hello\">>", lists:flatten(format("~p", [<<$h, $e, $l, $l, $o>>], 50))),
?assertEqual("<<\"hello\">>", lists:flatten(format("~p", [<<"hello">>], 50))),
?assertEqual("<<104,101,108,108,111>>", lists:flatten(format("~w", [<<"hello">>], 50))),
?assertEqual("<<1,2,3,4>>", lists:flatten(format("~p", [<<1, 2, 3, 4>>], 50))),
?assertEqual([1,2,3,4], lists:flatten(format("~s", [<<1, 2, 3, 4>>], 50))),
?assertEqual("hello", lists:flatten(format("~s", [<<"hello">>], 50))),
?assertEqual("hello\nworld", lists:flatten(format("~s", [<<"hello\nworld">>], 50))),
?assertEqual("<<\"hello\\nworld\">>", lists:flatten(format("~p", [<<"hello\nworld">>], 50))),
?assertEqual("<<\"\\\"hello world\\\"\">>", lists:flatten(format("~p", [<<"\"hello world\"">>], 50))),
?assertEqual("<<\"hello\\\\world\">>", lists:flatten(format("~p", [<<"hello\\world">>], 50))),
?assertEqual("<<\"hello\\\\\world\">>", lists:flatten(format("~p", [<<"hello\\\world">>], 50))),
?assertEqual("<<\"hello\\\\\\\\world\">>", lists:flatten(format("~p", [<<"hello\\\\world">>], 50))),
?assertEqual("<<\"hello\\bworld\">>", lists:flatten(format("~p", [<<"hello\bworld">>], 50))),
?assertEqual("<<\"hello\\tworld\">>", lists:flatten(format("~p", [<<"hello\tworld">>], 50))),
?assertEqual("<<\"hello\\nworld\">>", lists:flatten(format("~p", [<<"hello\nworld">>], 50))),
?assertEqual("<<\"hello\\rworld\">>", lists:flatten(format("~p", [<<"hello\rworld">>], 50))),
?assertEqual("<<\"hello\\eworld\">>", lists:flatten(format("~p", [<<"hello\eworld">>], 50))),
?assertEqual("<<\"hello\\fworld\">>", lists:flatten(format("~p", [<<"hello\fworld">>], 50))),
?assertEqual("<<\"hello\\vworld\">>", lists:flatten(format("~p", [<<"hello\vworld">>], 50))),
?assertEqual(" hello", lists:flatten(format("~10s", [<<"hello">>], 50))),
ok.
bitstring_printing_test() ->
?assertEqual("<<1,2,3,1:7>>", lists:flatten(format("~p",
[<<1, 2, 3, 1:7>>], 100))),
?assertEqual("<<1:7>>", lists:flatten(format("~p",
[<<1:7>>], 100))),
?assertEqual("<<1,2,3,...>>", lists:flatten(format("~p",
[<<1, 2, 3, 1:7>>], 12))),
?assertEqual("<<1,2,3,...>>", lists:flatten(format("~p",
[<<1, 2, 3, 1:7>>], 13))),
?assertEqual("<<1,2,3,1:7>>", lists:flatten(format("~p",
[<<1, 2, 3, 1:7>>], 14))),
?assertEqual("<<..>>", lists:flatten(format("~p", [<<1:7>>], 0))),
?assertEqual("<<...>>", lists:flatten(format("~p", [<<1:7>>], 1))),
?assertEqual("[<<1>>,<<2>>]", lists:flatten(format("~p", [[<<1>>, <<2>>]],
100))),
ok.
list_printing_test() ->
?assertEqual("[]", lists:flatten(format("~p", [[]], 50))),
?assertEqual("[]", lists:flatten(format("~w", [[]], 50))),
?assertEqual("", lists:flatten(format("~s", [[]], 50))),
?assertEqual("...", lists:flatten(format("~s", [[]], -1))),
?assertEqual("[[]]", lists:flatten(format("~p", [[[]]], 50))),
?assertEqual("[13,11,10,8,5,4]", lists:flatten(format("~p", [[13,11,10,8,5,4]], 50))),
?assertEqual("\"\\rabc\"", lists:flatten(format("~p", [[13,$a, $b, $c]], 50))),
?assertEqual("[1,2,3|4]", lists:flatten(format("~p", [[1, 2, 3|4]], 50))),
?assertEqual("[...]", lists:flatten(format("~p", [[1, 2, 3,4]], 4))),
?assertEqual("[1,...]", lists:flatten(format("~p", [[1, 2, 3, 4]], 6))),
?assertEqual("[1,...]", lists:flatten(format("~p", [[1, 2, 3, 4]], 7))),
?assertEqual("[1,2,...]", lists:flatten(format("~p", [[1, 2, 3, 4]], 8))),
?assertEqual("[1|4]", lists:flatten(format("~p", [[1|4]], 50))),
?assertEqual("[1]", lists:flatten(format("~p", [[1]], 50))),
?assertError(badarg, lists:flatten(format("~s", [[1|4]], 50))),
?assertEqual("\"hello...\"", lists:flatten(format("~p", ["hello world"], 10))),
?assertEqual("hello w...", lists:flatten(format("~s", ["hello world"], 10))),
?assertEqual("hello world\r\n", lists:flatten(format("~s", ["hello world\r\n"], 50))),
?assertEqual("\rhello world\r\n", lists:flatten(format("~s", ["\rhello world\r\n"], 50))),
?assertEqual("\"\\rhello world\\r\\n\"", lists:flatten(format("~p", ["\rhello world\r\n"], 50))),
?assertEqual("[13,104,101,108,108,111,32,119,111,114,108,100,13,10]", lists:flatten(format("~w", ["\rhello world\r\n"], 60))),
?assertEqual("...", lists:flatten(format("~s", ["\rhello world\r\n"], 3))),
?assertEqual("[22835963083295358096932575511191922182123945984,...]",
lists:flatten(format("~p", [
[22835963083295358096932575511191922182123945984,
22835963083295358096932575511191922182123945984]], 9))),
?assertEqual("[22835963083295358096932575511191922182123945984,...]",
lists:flatten(format("~p", [
[22835963083295358096932575511191922182123945984,
22835963083295358096932575511191922182123945984]], 53))),
ok.
tuple_printing_test() ->
?assertEqual("{}", lists:flatten(format("~p", [{}], 50))),
?assertEqual("{}", lists:flatten(format("~w", [{}], 50))),
?assertError(badarg, lists:flatten(format("~s", [{}], 50))),
?assertEqual("{...}", lists:flatten(format("~p", [{foo}], 1))),
?assertEqual("{...}", lists:flatten(format("~p", [{foo}], 2))),
?assertEqual("{...}", lists:flatten(format("~p", [{foo}], 3))),
?assertEqual("{...}", lists:flatten(format("~p", [{foo}], 4))),
?assertEqual("{...}", lists:flatten(format("~p", [{foo}], 5))),
?assertEqual("{foo,...}", lists:flatten(format("~p", [{foo,bar}], 6))),
?assertEqual("{foo,...}", lists:flatten(format("~p", [{foo,bar}], 7))),
?assertEqual("{foo,...}", lists:flatten(format("~p", [{foo,bar}], 9))),
?assertEqual("{foo,bar}", lists:flatten(format("~p", [{foo,bar}], 10))),
?assertEqual("{22835963083295358096932575511191922182123945984,...}",
lists:flatten(format("~w", [
{22835963083295358096932575511191922182123945984,
22835963083295358096932575511191922182123945984}], 10))),
?assertEqual("{22835963083295358096932575511191922182123945984,...}",
lists:flatten(format("~w", [
{22835963083295358096932575511191922182123945984,
bar}], 10))),
?assertEqual("{22835963083295358096932575511191922182123945984,...}",
lists:flatten(format("~w", [
{22835963083295358096932575511191922182123945984,
22835963083295358096932575511191922182123945984}], 53))),
ok.
unicode_test() ->
?assertEqual([231,167,129], lists:flatten(format("~s", [<<231,167,129>>], 50))),
?assertEqual([31169], lists:flatten(format("~ts", [<<231,167,129>>], 50))),
ok.
depth_limit_test() ->
?assertEqual("{...}", lists:flatten(format("~P", [{a, [b, [c, [d]]]}, 1], 50))),
?assertEqual("{a,...}", lists:flatten(format("~P", [{a, [b, [c, [d]]]}, 2], 50))),
?assertEqual("{a,[...]}", lists:flatten(format("~P", [{a, [b, [c, [d]]]}, 3], 50))),
?assertEqual("{a,[b|...]}", lists:flatten(format("~P", [{a, [b, [c, [d]]]}, 4], 50))),
?assertEqual("{a,[b,[...]]}", lists:flatten(format("~P", [{a, [b, [c, [d]]]}, 5], 50))),
?assertEqual("{a,[b,[c|...]]}", lists:flatten(format("~P", [{a, [b, [c, [d]]]}, 6], 50))),
?assertEqual("{a,[b,[c,[...]]]}", lists:flatten(format("~P", [{a, [b, [c, [d]]]}, 7], 50))),
?assertEqual("{a,[b,[c,[d]]]}", lists:flatten(format("~P", [{a, [b, [c, [d]]]}, 8], 50))),
?assertEqual("{a,[b,[c,[d]]]}", lists:flatten(format("~P", [{a, [b, [c, [d]]]}, 9], 50))),
?assertEqual("{a,{...}}", lists:flatten(format("~P", [{a, {b, {c, {d}}}}, 3], 50))),
?assertEqual("{a,{b,...}}", lists:flatten(format("~P", [{a, {b, {c, {d}}}}, 4], 50))),
?assertEqual("{a,{b,{...}}}", lists:flatten(format("~P", [{a, {b, {c, {d}}}}, 5], 50))),
?assertEqual("{a,{b,{c,...}}}", lists:flatten(format("~P", [{a, {b, {c, {d}}}}, 6], 50))),
?assertEqual("{a,{b,{c,{...}}}}", lists:flatten(format("~P", [{a, {b, {c, {d}}}}, 7], 50))),
?assertEqual("{a,{b,{c,{d}}}}", lists:flatten(format("~P", [{a, {b, {c, {d}}}}, 8], 50))),
?assertEqual("{\"a\",[...]}", lists:flatten(format("~P", [{"a", ["b", ["c", ["d"]]]}, 3], 50))),
?assertEqual("{\"a\",[\"b\",[[...]|...]]}", lists:flatten(format("~P", [{"a", ["b", ["c", ["d"]]]}, 6], 50))),
?assertEqual("{\"a\",[\"b\",[\"c\",[\"d\"]]]}", lists:flatten(format("~P", [{"a", ["b", ["c", ["d"]]]}, 9], 50))),
ok.
-endif. | src/lager_trunc_io.erl | 0.510985 | 0.403508 | lager_trunc_io.erl | starcoder |
%% Copyright 2018 Erlio GmbH Basel Switzerland (http://erl.io)
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(vmq_schema).
-export([translate_listeners/1,
string_to_secs/1]).
translate_listeners(Conf) ->
%% cuttlefish messes up with the tree-like configuration style if
%% it cannot find either configured values or defaults in the
%% more specific leafs of the tree. That's why we always provide
%% a default value and take care of them by ourselfs.
InfIntVal = fun(Name, Val1, Def) ->
case Val1 of
infinity -> infinity;
undefined -> Def;
-1 -> Def;
Int when is_integer(Int) -> Int;
_ -> cuttlefish:invalid(Name ++ " should be an integer")
end
end,
MPVal = fun(Name, Val2, Def) -> case Val2 of
"off" -> "";
"" -> Def;
S when is_list(S) -> S;
_ -> cuttlefish:invalid(Name ++ "should be a string")
end
end,
StrVal = fun(_, "", Def) -> Def;
(_, S, _) when is_list(S) -> S;
(_, undefined, Def) -> Def end,
BoolVal = fun(_, B, _) when is_boolean(B) -> B;
(_, undefined, Def) -> Def end,
AtomVal = fun(_, A, _) when is_atom(A) -> A;
(_, undefined, Def) -> Def end,
IntVal = fun(_, I, _) when is_integer(I) -> I;
(_, undefined, Def) -> Def end,
%% A value looking like "[3,4]" or "[3, 4]" or "3,4"
StringIntegerListVal =
fun(_, undefined, Def) -> Def;
(_, Val, _) ->
{ok, T, _}
= case re:run(Val, "\\[.*\\]", []) of
nomatch ->
erl_scan:string("[" ++ Val ++ "].");
{match, _} ->
erl_scan:string(Val ++ ".")
end,
{ok, Term} = erl_parse:parse_term(T),
Term
end,
MZip = fun([H|_] = ListOfLists) ->
Size = length(H), %% get default size
ListOfLists = [L || L <- ListOfLists, length(L) == Size],
[
lists:reverse(
lists:foldl(
fun(L, Acc) ->
[lists:nth(I, L)|Acc]
end, [], ListOfLists))
|| I <- lists:seq(1, Size)]
end,
{TCPIPs, TCPMaxConns} = lists:unzip(extract("listener.tcp", "max_connections", InfIntVal, Conf)),
{SSLIPs, SSLMaxConns} = lists:unzip(extract("listener.ssl", "max_connections", InfIntVal, Conf)),
{WSIPs, WSMaxConns} = lists:unzip(extract("listener.ws", "max_connections", InfIntVal, Conf)),
{WS_SSLIPs, WS_SSLMaxConns} = lists:unzip(extract("listener.wss", "max_connections", InfIntVal, Conf)),
{VMQIPs, VMQMaxConns} = lists:unzip(extract("listener.vmq", "max_connections", InfIntVal, Conf)),
{VMQ_SSLIPs, VMQ_SSLMaxConns} = lists:unzip(extract("listener.vmqs", "max_connections", InfIntVal, Conf)),
{HTTPIPs, HTTPMaxConns} = lists:unzip(extract("listener.http", "max_connections", InfIntVal, Conf)),
{HTTP_SSLIPs, HTTP_SSLMaxConns} = lists:unzip(extract("listener.https", "max_connections", InfIntVal, Conf)),
{TCPIPs, TCPNrOfAcceptors} = lists:unzip(extract("listener.tcp", "nr_of_acceptors", InfIntVal, Conf)),
{SSLIPs, SSLNrOfAcceptors} = lists:unzip(extract("listener.ssl", "nr_of_acceptors", InfIntVal, Conf)),
{WSIPs, WSNrOfAcceptors} = lists:unzip(extract("listener.ws", "nr_of_acceptors", InfIntVal, Conf)),
{WS_SSLIPs, WS_SSLNrOfAcceptors} = lists:unzip(extract("listener.wss", "nr_of_acceptors", InfIntVal, Conf)),
{VMQIPs, VMQNrOfAcceptors} = lists:unzip(extract("listener.vmq", "nr_of_acceptors", InfIntVal, Conf)),
{VMQ_SSLIPs, VMQ_SSLNrOfAcceptors} = lists:unzip(extract("listener.vmqs", "nr_of_acceptors", InfIntVal, Conf)),
{HTTPIPs, HTTPNrOfAcceptors} = lists:unzip(extract("listener.http", "nr_of_acceptors", InfIntVal, Conf)),
{HTTP_SSLIPs, HTTP_SSLNrOfAcceptors} = lists:unzip(extract("listener.https", "nr_of_acceptors", InfIntVal, Conf)),
{TCPIPs, TCPMountPoint} = lists:unzip(extract("listener.tcp", "mountpoint", MPVal, Conf)),
{SSLIPs, SSLMountPoint} = lists:unzip(extract("listener.ssl", "mountpoint", MPVal, Conf)),
{WSIPs, WSMountPoint} = lists:unzip(extract("listener.ws", "mountpoint", MPVal, Conf)),
{WS_SSLIPs, WS_SSLMountPoint} = lists:unzip(extract("listener.wss", "mountpoint", MPVal, Conf)),
{VMQIPs, VMQMountPoint} = lists:unzip(extract("listener.vmq", "mountpoint", MPVal, Conf)),
{VMQ_SSLIPs, VMQ_SSLMountPoint} = lists:unzip(extract("listener.vmqs", "mountpoint", MPVal, Conf)),
{TCPIPs, TCPProxyProto} = lists:unzip(extract("listener.tcp", "proxy_protocol", BoolVal, Conf)),
{WSIPs, WSProxyProto} = lists:unzip(extract("listener.ws", "proxy_protocol", BoolVal, Conf)),
{HTTPIPs, HTTPProxyProto} = lists:unzip(extract("listener.http", "proxy_protocol", BoolVal, Conf)),
{TCPIPs, TCPAllowedProto} = lists:unzip(extract("listener.tcp", "allowed_protocol_versions", StringIntegerListVal, Conf)),
{SSLIPs, SSLAllowedProto} = lists:unzip(extract("listener.ssl", "allowed_protocol_versions", StringIntegerListVal, Conf)),
{WSIPs, WSAllowedProto} = lists:unzip(extract("listener.ws", "allowed_protocol_versions", StringIntegerListVal, Conf)),
{WS_SSLIPs, WS_SSLAllowedProto} = lists:unzip(extract("listener.wss", "allowed_protocol_versions", StringIntegerListVal, Conf)),
{HTTPIPs, HTTPConfigMod} = lists:unzip(extract("listener.http", "config_mod", AtomVal, Conf)),
{HTTPIPs, HTTPConfigFun} = lists:unzip(extract("listener.http", "config_fun", AtomVal, Conf)),
{HTTP_SSLIPs, HTTP_SSLConfigMod} = lists:unzip(extract("listener.https", "config_mod", AtomVal, Conf)),
{HTTP_SSLIPs, HTTP_SSLConfigFun} = lists:unzip(extract("listener.https", "config_fun", AtomVal, Conf)),
% SSL
{SSLIPs, SSLCAFiles} = lists:unzip(extract("listener.ssl", "cafile", StrVal, Conf)),
{SSLIPs, SSLDepths} = lists:unzip(extract("listener.ssl", "depth", IntVal, Conf)),
{SSLIPs, SSLCertFiles} = lists:unzip(extract("listener.ssl", "certfile", StrVal, Conf)),
{SSLIPs, SSLCiphers} = lists:unzip(extract("listener.ssl", "ciphers", StrVal, Conf)),
{SSLIPs, SSLCrlFiles} = lists:unzip(extract("listener.ssl", "crlfile", StrVal, Conf)),
{SSLIPs, SSLKeyFiles} = lists:unzip(extract("listener.ssl", "keyfile", StrVal, Conf)),
{SSLIPs, SSLRequireCerts} = lists:unzip(extract("listener.ssl", "require_certificate", BoolVal, Conf)),
{SSLIPs, SSLVersions} = lists:unzip(extract("listener.ssl", "tls_version", AtomVal, Conf)),
{SSLIPs, SSLUseIdents} = lists:unzip(extract("listener.ssl", "use_identity_as_username", BoolVal, Conf)),
% WSS
{WS_SSLIPs, WS_SSLCAFiles} = lists:unzip(extract("listener.wss", "cafile", StrVal, Conf)),
{WS_SSLIPs, WS_SSLDepths} = lists:unzip(extract("listener.wss", "depth", IntVal, Conf)),
{WS_SSLIPs, WS_SSLCertFiles} = lists:unzip(extract("listener.wss", "certfile", StrVal, Conf)),
{WS_SSLIPs, WS_SSLCiphers} = lists:unzip(extract("listener.wss", "ciphers", StrVal, Conf)),
{WS_SSLIPs, WS_SSLCrlFiles} = lists:unzip(extract("listener.wss", "crlfile", StrVal, Conf)),
{WS_SSLIPs, WS_SSLKeyFiles} = lists:unzip(extract("listener.wss", "keyfile", StrVal, Conf)),
{WS_SSLIPs, WS_SSLRequireCerts} = lists:unzip(extract("listener.wss", "require_certificate", BoolVal, Conf)),
{WS_SSLIPs, WS_SSLVersions} = lists:unzip(extract("listener.wss", "tls_version", AtomVal, Conf)),
{WS_SSLIPs, WS_SSLUseIdents} = lists:unzip(extract("listener.wss", "use_identity_as_username", BoolVal, Conf)),
% VMQS
{VMQ_SSLIPs, VMQ_SSLCAFiles} = lists:unzip(extract("listener.vmqs", "cafile", StrVal, Conf)),
{VMQ_SSLIPs, VMQ_SSLDepths} = lists:unzip(extract("listener.vmqs", "depth", IntVal, Conf)),
{VMQ_SSLIPs, VMQ_SSLCertFiles} = lists:unzip(extract("listener.vmqs", "certfile", StrVal, Conf)),
{VMQ_SSLIPs, VMQ_SSLCiphers} = lists:unzip(extract("listener.vmqs", "ciphers", StrVal, Conf)),
{VMQ_SSLIPs, VMQ_SSLCrlFiles} = lists:unzip(extract("listener.vmqs", "crlfile", StrVal, Conf)),
{VMQ_SSLIPs, VMQ_SSLKeyFiles} = lists:unzip(extract("listener.vmqs", "keyfile", StrVal, Conf)),
{VMQ_SSLIPs, VMQ_SSLRequireCerts} = lists:unzip(extract("listener.vmqs", "require_certificate", BoolVal, Conf)),
{VMQ_SSLIPs, VMQ_SSLVersions} = lists:unzip(extract("listener.vmqs", "tls_version", AtomVal, Conf)),
% HTTPS
{HTTP_SSLIPs, HTTP_SSLCAFiles} = lists:unzip(extract("listener.https", "cafile", StrVal, Conf)),
{HTTP_SSLIPs, HTTP_SSLDepths} = lists:unzip(extract("listener.https", "depth", IntVal, Conf)),
{HTTP_SSLIPs, HTTP_SSLCertFiles} = lists:unzip(extract("listener.https", "certfile", StrVal, Conf)),
{HTTP_SSLIPs, HTTP_SSLCiphers} = lists:unzip(extract("listener.https", "ciphers", StrVal, Conf)),
{HTTP_SSLIPs, HTTP_SSLCrlFiles} = lists:unzip(extract("listener.https", "crlfile", StrVal, Conf)),
{HTTP_SSLIPs, HTTP_SSLKeyFiles} = lists:unzip(extract("listener.https", "keyfile", StrVal, Conf)),
{HTTP_SSLIPs, HTTP_SSLRequireCerts} = lists:unzip(extract("listener.https", "require_certificate", BoolVal, Conf)),
{HTTP_SSLIPs, HTTP_SSLVersions} = lists:unzip(extract("listener.https", "tls_version", AtomVal, Conf)),
TCP = lists:zip(TCPIPs, MZip([TCPMaxConns,
TCPNrOfAcceptors,
TCPMountPoint,
TCPProxyProto,
TCPAllowedProto])),
WS = lists:zip(WSIPs, MZip([WSMaxConns,
WSNrOfAcceptors,
WSMountPoint,
WSProxyProto,
WSAllowedProto])),
VMQ = lists:zip(VMQIPs, MZip([VMQMaxConns,
VMQNrOfAcceptors,
VMQMountPoint])),
HTTP = lists:zip(HTTPIPs, MZip([HTTPMaxConns,
HTTPNrOfAcceptors,
HTTPConfigMod,
HTTPConfigFun,
HTTPProxyProto])),
SSL = lists:zip(SSLIPs, MZip([SSLMaxConns,
SSLNrOfAcceptors,
SSLMountPoint,
SSLCAFiles,
SSLDepths,
SSLCertFiles,
SSLCiphers,
SSLCrlFiles,
SSLKeyFiles,
SSLRequireCerts,
SSLVersions,
SSLUseIdents,
SSLAllowedProto])),
WSS = lists:zip(WS_SSLIPs, MZip([WS_SSLMaxConns,
WS_SSLNrOfAcceptors,
WS_SSLMountPoint,
WS_SSLCAFiles,
WS_SSLDepths,
WS_SSLCertFiles,
WS_SSLCiphers,
WS_SSLCrlFiles,
WS_SSLKeyFiles,
WS_SSLRequireCerts,
WS_SSLVersions,
WS_SSLUseIdents,
WS_SSLAllowedProto])),
VMQS = lists:zip(VMQ_SSLIPs, MZip([VMQ_SSLMaxConns,
VMQ_SSLNrOfAcceptors,
VMQ_SSLMountPoint,
VMQ_SSLCAFiles,
VMQ_SSLDepths,
VMQ_SSLCertFiles,
VMQ_SSLCiphers,
VMQ_SSLCrlFiles,
VMQ_SSLKeyFiles,
VMQ_SSLRequireCerts,
VMQ_SSLVersions])),
HTTPS = lists:zip(HTTP_SSLIPs, MZip([HTTP_SSLMaxConns,
HTTP_SSLNrOfAcceptors,
HTTP_SSLCAFiles,
HTTP_SSLDepths,
HTTP_SSLCertFiles,
HTTP_SSLCiphers,
HTTP_SSLCrlFiles,
HTTP_SSLKeyFiles,
HTTP_SSLRequireCerts,
HTTP_SSLVersions,
HTTP_SSLConfigMod,
HTTP_SSLConfigFun])),
DropUndef = fun(L) ->
[{K, [I || {_, V} = I <- SubL, V /= undefined]} || {K, SubL} <- L]
end,
[{mqtt, DropUndef(TCP)},
{mqtts, DropUndef(SSL)},
{mqttws, DropUndef(WS)},
{mqttwss, DropUndef(WSS)},
{vmq, DropUndef(VMQ)},
{vmqs, DropUndef(VMQS)},
{http, DropUndef(HTTP)},
{https, DropUndef(HTTPS)}
].
extract(Prefix, Suffix, Val, Conf) ->
Mappings = ["max_connections", "nr_of_acceptors", "mountpoint"],
ExcludeRootSuffixes
= [%% ssl listener specific
"cafile", "depth", "certfile", "ciphers", "crlfile",
"keyfile", "require_certificate", "tls_version",
"use_identity_as_username",
%% http listener specific
"config_mod", "config_fun",
%% mqtt listener specific
"allowed_protocol_versions",
%% other
"proxy_protocol"
],
%% get default from root of the tree for listeners
RootDefault =
case lists:member(Suffix, ExcludeRootSuffixes) of
true ->
undefined;
false ->
cuttlefish:conf_get(lists:flatten(["listener.", Suffix]), Conf)
end,
Default = cuttlefish:conf_get(lists:flatten([Prefix, ".", Suffix]), Conf, RootDefault),
%% get the name value pairs
NameSubPrefix = lists:flatten([Prefix, ".$name"]),
[begin
{ok, Addr} = inet:parse_address(StrAddr),
Prefix4 = lists:flatten([Prefix, ".", Name, ".", Suffix]),
V1 = Val(Name, RootDefault, undefined),
V2 = Val(Name, RootDefault,V1),
V3 = Val(Name, cuttlefish:conf_get(Prefix4, Conf, Default),V2),
AddrPort = {Addr, Port},
{AddrPort, {list_to_atom(Suffix), V3}}
end
|| {[_, _, Name], {StrAddr, Port}} <- lists:filter(
fun({K, _V}) ->
cuttlefish_variable:is_fuzzy_match(K, string:tokens(NameSubPrefix, "."))
end, Conf), not lists:member(Name, Mappings ++ ExcludeRootSuffixes)].
string_to_secs(S) ->
[Entity|T] = lists:reverse(S),
case {Entity, list_to_integer(lists:reverse(T))} of
{$s, D} -> D;
{$h, D} -> D * 60 * 60;
{$d, D} -> D * 24 * 60 * 60;
{$w, D} -> D * 7 * 24 * 60 * 60;
{$m, D} -> D * 4 * 7 * 24 * 60 * 60;
{$y, D} -> D * 12 * 4 * 7 * 24 * 60 * 60;
_ -> error
end. | apps/vmq_server/src/vmq_schema.erl | 0.63114 | 0.40489 | vmq_schema.erl | starcoder |
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
%%
%% @copyright Ericsson AB 2006-2009. All Rights Reserved.
%% @doc URI-parsing library from OTP_R13B04-274-g3a68c36.
%% <p>A simple steal of an internal HTTP parsing library to
%% etorrent. It should probably be updated regurlarly.</p>
%% @end
-module(etorrent_http_uri).
-export([parse/1]).
%%%=========================================================================
%% @doc Parse an URL string() into its components
%% <p>The URL is parsed into `{Scheme, UserInfo, Host, Port, Path,
%% Query}', or into `{error, Reason}'.</p>
%% @end
-spec parse(string()) -> {error, term()}
| {http | https | udp, string(), string(), integer(), string(), string()}.
parse(AbsURI) ->
case parse_scheme(AbsURI) of
{error, Reason} ->
{error, Reason};
{Scheme, Rest} ->
case (catch parse_uri_rest(Scheme, Rest)) of
{UserInfo, Host, Port, Path, Query} ->
{Scheme, UserInfo, Host, Port, Path, Query};
_ ->
{error, {malformed_url, AbsURI}}
end
end.
%%%========================================================================
%%% Internal functions
%%%========================================================================
parse_scheme(AbsURI) ->
case split_uri(AbsURI, ":", {error, no_scheme}, 1, 1) of
{error, no_scheme} ->
{error, no_scheme};
{StrScheme, Rest} ->
case list_to_atom(http_util:to_lower(StrScheme)) of
Scheme when Scheme == http; Scheme == https; Scheme == udp ->
{Scheme, Rest};
Scheme ->
{error, {not_supported_scheme, Scheme}}
end
end.
parse_uri_rest(Scheme, "//" ++ URIPart) ->
{Authority, PathQuery} =
case split_uri(URIPart, "/", URIPart, 1, 0) of
Split = {_, _} ->
Split;
URIPart ->
case split_uri(URIPart, "\\?", URIPart, 1, 0) of
Split = {_, _} ->
Split;
URIPart ->
{URIPart,""}
end
end,
{UserInfo, HostPort} = split_uri(Authority, "@", {"", Authority}, 1, 1),
{Host, Port} = parse_host_port(Scheme, HostPort),
{Path, Query} = parse_path_query(PathQuery),
{UserInfo, Host, Port, Path, Query}.
parse_path_query(PathQuery) ->
{Path, Query} = split_uri(PathQuery, "\\?", {PathQuery, ""}, 1, 0),
{path(Path), Query}.
parse_host_port(Scheme,"[" ++ HostPort) -> %ipv6
DefaultPort = default_port(Scheme),
{Host, ColonPort} = split_uri(HostPort, "\\]", {HostPort, ""}, 1, 1),
{_, Port} = split_uri(ColonPort, ":", {"", DefaultPort}, 0, 1),
{Host, int_port(Port)};
parse_host_port(Scheme, HostPort) ->
DefaultPort = default_port(Scheme),
{Host, Port} = split_uri(HostPort, ":", {HostPort, DefaultPort}, 1, 1),
{Host, int_port(Port)}.
split_uri(UriPart, SplitChar, NoMatchResult, SkipLeft, SkipRight) ->
case inets_regexp:first_match(UriPart, SplitChar) of
{match, Match, _} ->
{string:substr(UriPart, 1, Match - SkipLeft),
string:substr(UriPart, Match + SkipRight, length(UriPart))};
nomatch ->
NoMatchResult
end.
default_port(udp) ->
80; % Hack
default_port(http) ->
80;
default_port(https) ->
443.
int_port(Port) when is_integer(Port) ->
Port;
int_port(Port) when is_list(Port) ->
list_to_integer(Port).
path("") ->
"/";
path(Path) ->
Path. | apps/etorrent/src/etorrent_http_uri.erl | 0.582372 | 0.406509 | etorrent_http_uri.erl | starcoder |
%%%-------------------------------------------------------------------
%%% @copyright (C) 2016, AdRoll
%%% @doc
%%%
%%% Kinesis record aggregator.
%%%
%%% Follows the KPL aggregated record format:
%%% https://github.com/awslabs/amazon-kinesis-producer/blob/master/aggregation-format.md
%%%
%%% This is an Erlang port of the aggregation functionality from:
%%% https://pypi.python.org/pypi/aws_kinesis_agg/1.0.0
%%%
%%% Creating a new aggregator:
%%%
%%% Agg = kpl_agg:new()
%%%
%%% Adding user records to an aggregator (the aggregator will emit an
%%% aggregated record when it is full):
%%%
%%% case kpl_agg:add(Agg, Record) of
%%% {undefined, NewAgg} -> ...
%%% {FullAggRecord, NewAgg} -> ...
%%% end
%%%
%%% You can also use kpl:add_all to add multiple records at once. A
%%% <pre>Record</pre> is a {PartitionKey, Data} tuple or a {PartitionKey, Data,
%%% ExplicitHashKey} tuple.
%%%
%%% Getting the current aggregated record (e.g. to get the last aggregated
%%% record when you have no more user records to add):
%%%
%%% case kpl_agg:finish(Agg) of
%%% {undefined, Agg} -> ...
%%% {AggRecord, NewAgg} -> ...
%%% end
%%%
%%% The result currently uses a non-standard magic prefix to prevent the KCL from
%%% deaggregating the record automatically. To use compression, instantiate the
%%% aggregator using kpl_agg:new(true), which uses another
%%% non-standard magic prefix.
%%%
%%% @end
%%% Created: 12 Dec 2016 by <NAME> <<EMAIL>>
%%%-------------------------------------------------------------------
-module(kpl_agg).
%% API
-export([new/0, new/1, count/1, size_bytes/1, finish/1, add/2, add_all/2]).
-define(MD5_DIGEST_BYTES, 16).
%% From http://docs.aws.amazon.com/kinesis/latest/APIReference/API_PutRecord.html:
-define(KINESIS_MAX_BYTES_PER_RECORD, 1 bsl 20).
-include("erlmld.hrl").
-include("kpl_agg_pb.hrl").
%% A set of keys, mapping each key to a unique index.
-record(keyset,
{rev_keys = [] :: [binary()], %% list of known keys in reverse order
rev_keys_length = 0 :: non_neg_integer(), %% length of the rev_keys list
key_to_index = maps:new() :: map()}). %% maps each known key to a 0-based index
%% Internal state of a record aggregator. It stores an aggregated record that
%% is "in progress", i.e. it is possible to add more user records to it.
-record(state,
{num_user_records = 0 :: non_neg_integer(),
agg_size_bytes = 0 :: non_neg_integer(),
%% The aggregated record's PartitionKey and ExplicitHashKey are the
%% PartitionKey and ExplicitHashKey of the first user record added.
agg_partition_key = undefined :: undefined | binary(),
agg_explicit_hash_key = undefined :: undefined | binary(),
%% Keys seen in the user records added so far.
partition_keyset = #keyset{} :: #keyset{},
explicit_hash_keyset = #keyset{} :: #keyset{},
%% List if user records added so far, in reverse order.
rev_records = [] :: [#'Record'{}],
should_deflate = false}).
%%%===================================================================
%%% API
%%%===================================================================
new() ->
new(false).
new(ShouldDeflate) ->
#state{should_deflate = ShouldDeflate}.
count(#state{num_user_records = Num} = _State) ->
Num.
size_bytes(#state{agg_size_bytes = Size, agg_partition_key = PK} = _State) ->
PKSize =
case PK of
undefined ->
0;
_ ->
byte_size(PK)
end,
byte_size(?KPL_AGG_MAGIC)
+ Size
+ ?MD5_DIGEST_BYTES
+ PKSize
+ byte_size(kpl_agg_pb:encode_msg(#'AggregatedRecord'{})).
finish(#state{num_user_records = 0} = State) ->
{undefined, State};
finish(#state{agg_partition_key = AggPK,
agg_explicit_hash_key = AggEHK,
should_deflate = ShouldDeflate} =
State) ->
AggRecord = {AggPK, serialize_data(State, ShouldDeflate), AggEHK},
{AggRecord, new(ShouldDeflate)}.
add(State, {PartitionKey, Data} = _Record) ->
add(State, {PartitionKey, Data, undefined});
add(State, {PartitionKey, Data, ExplicitHashKey} = _Record) ->
case {calc_record_size(State, PartitionKey, Data, ExplicitHashKey), size_bytes(State)} of
{RecSize, _} when RecSize > ?KINESIS_MAX_BYTES_PER_RECORD ->
error("input record too large to fit in a single Kinesis record");
{RecSize, CurSize} when RecSize + CurSize > ?KINESIS_MAX_BYTES_PER_RECORD ->
{FullRecord, State1} = finish(State),
State2 = add_record(State1, PartitionKey, Data, ExplicitHashKey, RecSize),
{FullRecord, State2};
{RecSize, _} ->
State1 = add_record(State, PartitionKey, Data, ExplicitHashKey, RecSize),
%% fixme; make size calculations more accurate
case size_bytes(State1) > ?KINESIS_MAX_BYTES_PER_RECORD - 64 of
true ->
%% size estimate is almost the limit, finish & retry:
{FullRecord, State2} = finish(State),
State3 = add_record(State2, PartitionKey, Data, ExplicitHashKey, RecSize),
{FullRecord, State3};
false ->
{undefined, State1}
end
end.
add_all(State, Records) ->
{RevAggRecords, NState} =
lists:foldl(fun(Record, {RevAggRecords, Agg}) ->
case add(Agg, Record) of
{undefined, NewAgg} ->
{RevAggRecords, NewAgg};
{AggRecord, NewAgg} ->
{[AggRecord | RevAggRecords], NewAgg}
end
end,
{[], State},
Records),
{lists:reverse(RevAggRecords), NState}.
%%%===================================================================
%%% Internal functions
%%%===================================================================
%% Calculate how many extra bytes the given user record would take, when added
%% to the current aggregated record. This calculation has to know about KPL and
%% Protobuf internals.
calc_record_size(#state{partition_keyset = PartitionKeySet,
explicit_hash_keyset = ExplicitHashKeySet} =
_State,
PartitionKey,
Data,
ExplicitHashKey) ->
%% How much space we need for the PK:
PKLength = byte_size(PartitionKey),
PKSize =
case is_key(PartitionKey, PartitionKeySet) of
true ->
0;
false ->
1 + varint_size(PKLength) + PKLength
end,
%% How much space we need for the EHK:
EHKSize =
case ExplicitHashKey of
undefined ->
0;
_ ->
EHKLength = byte_size(ExplicitHashKey),
case is_key(ExplicitHashKey, ExplicitHashKeySet) of
true ->
0;
false ->
1 + varint_size(EHKLength) + EHKLength
end
end,
%% How much space we need for the inner record:
PKIndexSize = 1 + varint_size(potential_index(PartitionKey, PartitionKeySet)),
EHKIndexSize =
case ExplicitHashKey of
undefined ->
0;
_ ->
1 + varint_size(potential_index(ExplicitHashKey, ExplicitHashKeySet))
end,
DataLength = byte_size(Data),
DataSize = 1 + varint_size(DataLength) + DataLength,
InnerSize = PKIndexSize + EHKIndexSize + DataSize,
%% How much space we need for the entire record:
PKSize + EHKSize + 1 + varint_size(InnerSize) + InnerSize.
%% Calculate how many bytes are needed to represent the given integer in a
%% Protobuf message.
varint_size(Integer) when Integer >= 0 ->
NumBits = max(num_bits(Integer, 0), 1),
(NumBits + 6) div 7.
%% Recursively compute the number of bits needed to represent an integer.
num_bits(0, Acc) ->
Acc;
num_bits(Integer, Acc) when Integer >= 0 ->
num_bits(Integer bsr 1, Acc + 1).
%% Helper for add; do not use directly.
add_record(#state{partition_keyset = PKSet,
explicit_hash_keyset = EHKSet,
rev_records = RevRecords,
num_user_records = NumUserRecords,
agg_size_bytes = AggSize,
agg_partition_key = AggPK,
agg_explicit_hash_key = AggEHK} =
State,
PartitionKey,
Data,
ExplicitHashKey,
NewRecordSize) ->
{PKIndex, NewPKSet} = get_or_add_key(PartitionKey, PKSet),
{EHKIndex, NewEHKSet} = get_or_add_key(ExplicitHashKey, EHKSet),
NewRecord =
#'Record'{partition_key_index = PKIndex,
explicit_hash_key_index = EHKIndex,
data = Data},
State#state{partition_keyset = NewPKSet,
explicit_hash_keyset = NewEHKSet,
rev_records = [NewRecord | RevRecords],
num_user_records = 1 + NumUserRecords,
agg_size_bytes = NewRecordSize + AggSize,
agg_partition_key = first_defined(AggPK, PartitionKey),
agg_explicit_hash_key = first_defined(AggEHK, ExplicitHashKey)}.
first_defined(undefined, Second) ->
Second;
first_defined(First, _) ->
First.
serialize_data(#state{partition_keyset = PKSet,
explicit_hash_keyset = EHKSet,
rev_records = RevRecords} =
_State,
ShouldDeflate) ->
ProtobufMessage =
#'AggregatedRecord'{partition_key_table = key_list(PKSet),
explicit_hash_key_table = key_list(EHKSet),
records = lists:reverse(RevRecords)},
SerializedData = kpl_agg_pb:encode_msg(ProtobufMessage),
Checksum = crypto:hash(md5, SerializedData),
case ShouldDeflate of
true ->
<<?KPL_AGG_MAGIC_DEFLATED/binary,
(zlib:compress(<<SerializedData/binary, Checksum/binary>>))/binary>>;
false ->
<<?KPL_AGG_MAGIC/binary, SerializedData/binary, Checksum/binary>>
end.
%%%===================================================================
%%% Internal functions for keysets
%%%===================================================================
is_key(Key, #keyset{key_to_index = KeyToIndex} = _KeySet) ->
maps:is_key(Key, KeyToIndex).
get_or_add_key(undefined, KeySet) ->
{undefined, KeySet};
get_or_add_key(Key,
#keyset{rev_keys = RevKeys,
rev_keys_length = Length,
key_to_index = KeyToIndex} =
KeySet) ->
case maps:get(Key, KeyToIndex, not_found) of
not_found ->
NewKeySet =
KeySet#keyset{rev_keys = [Key | RevKeys],
rev_keys_length = Length + 1,
key_to_index = maps:put(Key, Length, KeyToIndex)},
{Length, NewKeySet};
Index ->
{Index, KeySet}
end.
potential_index(Key,
#keyset{rev_keys_length = Length, key_to_index = KeyToIndex} = _KeySet) ->
case maps:get(Key, KeyToIndex, not_found) of
not_found ->
Length;
Index ->
Index
end.
key_list(#keyset{rev_keys = RevKeys} = _KeySet) ->
lists:reverse(RevKeys).
%%%===================================================================
%%% TESTS
%%%===================================================================
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
varint_size_test() ->
%% Reference values obtained using
%% aws_kinesis_agg.aggregator._calculate_varint_size().
?assertEqual(1, varint_size(0)),
?assertEqual(1, varint_size(1)),
?assertEqual(1, varint_size(127)),
?assertEqual(2, varint_size(128)),
?assertEqual(4, varint_size(9999999)),
?assertEqual(6, varint_size(999999999999)),
ok.
keyset_test() ->
KeySet0 = #keyset{},
?assertEqual([], key_list(KeySet0)),
?assertEqual(false, is_key(<<"foo">>, KeySet0)),
?assertEqual(0, potential_index(<<"foo">>, KeySet0)),
{0, KeySet1} = get_or_add_key(<<"foo">>, KeySet0),
?assertEqual([<<"foo">>], key_list(KeySet1)),
?assertEqual(true, is_key(<<"foo">>, KeySet1)),
{0, KeySet1} = get_or_add_key(<<"foo">>, KeySet1),
?assertEqual(1, potential_index(<<"bar">>, KeySet1)),
{1, KeySet2} = get_or_add_key(<<"bar">>, KeySet1),
?assertEqual([<<"foo">>, <<"bar">>], key_list(KeySet2)),
?assertEqual(true, is_key(<<"foo">>, KeySet2)),
?assertEqual(true, is_key(<<"bar">>, KeySet2)),
{0, KeySet2} = get_or_add_key(<<"foo">>, KeySet2),
{1, KeySet2} = get_or_add_key(<<"bar">>, KeySet2),
?assertEqual(2, potential_index(<<"boom">>, KeySet2)),
ok.
empty_aggregator_test() ->
Agg = new(),
?assertEqual(0, count(Agg)),
?assertEqual(4 + 16, size_bytes(Agg)), % magic and md5
{undefined, Agg} = finish(Agg),
ok.
simple_aggregation_test() ->
Agg0 = new(),
{undefined, Agg1} = add(Agg0, {<<"pk1">>, <<"data1">>, <<"ehk1">>}),
{undefined, Agg2} = add(Agg1, {<<"pk2">>, <<"data2">>, <<"ehk2">>}),
{AggRecord, Agg3} = finish(Agg2),
?assertEqual(0, count(Agg3)),
%% Reference values obtained using priv/kpl_agg_tests_helper.py.
RefPK = <<"pk1">>,
RefEHK = <<"ehk1">>,
RefData =
<<?KPL_AGG_MAGIC/binary, 10, 3, 112, 107, 49, 10, 3, 112, 107, 50, 18, 4, 101, 104, 107,
49, 18, 4, 101, 104, 107, 50, 26, 11, 8, 0, 16, 0, 26, 5, 100, 97, 116, 97, 49, 26, 11, 8,
1, 16, 1, 26, 5, 100, 97, 116, 97, 50, 244, 41, 93, 155, 173, 190, 58, 30, 240, 223, 216,
8, 26, 205, 86, 4>>,
?assertEqual({RefPK, RefData, RefEHK}, AggRecord),
ok.
aggregate_many(Records) ->
{AggRecords, Agg} = add_all(new(), Records),
case finish(Agg) of
{undefined, _} ->
AggRecords;
{LastAggRecord, _} ->
AggRecords ++ [LastAggRecord]
end.
shared_keys_test() ->
[AggRecord] =
aggregate_many([{<<"alpha">>, <<"data1">>, <<"zulu">>},
{<<"beta">>, <<"data2">>, <<"yankee">>},
{<<"alpha">>, <<"data3">>, <<"xray">>},
{<<"charlie">>, <<"data4">>, <<"yankee">>},
{<<"beta">>, <<"data5">>, <<"zulu">>}]),
%% Reference values obtained using priv/kpl_agg_tests_helper.py.
RefPK = <<"alpha">>,
RefEHK = <<"zulu">>,
RefData =
<<?KPL_AGG_MAGIC/binary, 10, 5, 97, 108, 112, 104, 97, 10, 4, 98, 101, 116, 97, 10, 7, 99,
104, 97, 114, 108, 105, 101, 18, 4, 122, 117, 108, 117, 18, 6, 121, 97, 110, 107, 101,
101, 18, 4, 120, 114, 97, 121, 26, 11, 8, 0, 16, 0, 26, 5, 100, 97, 116, 97, 49, 26, 11,
8, 1, 16, 1, 26, 5, 100, 97, 116, 97, 50, 26, 11, 8, 0, 16, 2, 26, 5, 100, 97, 116, 97,
51, 26, 11, 8, 2, 16, 1, 26, 5, 100, 97, 116, 97, 52, 26, 11, 8, 1, 16, 0, 26, 5, 100, 97,
116, 97, 53, 78, 67, 160, 206, 22, 1, 33, 154, 3, 6, 110, 235, 9, 229, 53, 100>>,
?assertEqual({RefPK, RefData, RefEHK}, AggRecord),
ok.
record_fullness_test() ->
Data1 = list_to_binary(["X" || _ <- lists:seq(1, 500000)]),
Data2 = list_to_binary(["Y" || _ <- lists:seq(1, 600000)]),
Data3 = list_to_binary(["Z" || _ <- lists:seq(1, 200000)]),
Agg0 = new(),
{undefined, Agg1} = add(Agg0, {<<"pk1">>, Data1, <<"ehk1">>}),
{{AggPK1, _AggData1, AggEHK1}, Agg2} = add(Agg1, {<<"pk2">>, Data2, <<"ehk2">>}),
{undefined, Agg3} = add(Agg2, {<<"pk3">>, Data3, <<"ehk3">>}),
{{AggPK2, _AggData2, AggEHK2}, _} = finish(Agg3),
%% Reference values obtained using priv/kpl_agg_tests_helper.py.
% fixme; these comparisons will fail as long as we're using the wrong kpl magic.
%RefChecksum1 = <<198,6,88,216,8,244,159,59,223,14,247,208,138,137,64,118>>,
%RefChecksum2 = <<89,148,130,126,150,23,148,18,38,230,176,182,93,186,150,69>>,
?assertEqual(<<"pk1">>, AggPK1),
?assertEqual(<<"ehk1">>, AggEHK1),
%?assertEqual(RefChecksum1, crypto:hash(md5, AggData1)),
?assertEqual(<<"pk2">>, AggPK2),
?assertEqual(<<"ehk2">>, AggEHK2),
%?assertEqual(RefChecksum2, crypto:hash(md5, AggData2)),
ok.
full_record_test() ->
Fill =
fun F(Acc) ->
PK = integer_to_binary(rand:uniform(1000)),
Data =
<< <<(integer_to_binary(rand:uniform(128)))/binary>>
|| _ <- lists:seq(1, 1 + rand:uniform(1000)) >>,
case add(Acc, {PK, Data}) of
{undefined, NAcc} ->
F(NAcc);
{Full, _} ->
Full
end
end,
{PK, Data, _} = Fill(new()),
Total = byte_size(PK) + byte_size(Data),
?assert(Total =< ?KINESIS_MAX_BYTES_PER_RECORD),
?assert(Total >= ?KINESIS_MAX_BYTES_PER_RECORD - 2048).
deflate_test() ->
Agg0 = new(true),
{undefined, Agg1} = add(Agg0, {<<"pk1">>, <<"data1">>, <<"ehk1">>}),
{{_, Data, _}, _} = finish(Agg1),
<<Magic:4/binary, Deflated/binary>> = Data,
?assertEqual(?KPL_AGG_MAGIC_DEFLATED, Magic),
Inflated = zlib:uncompress(Deflated),
ProtoMsg = binary:part(Inflated, 0, size(Inflated) - 16),
Checksum = binary:part(Inflated, size(Inflated), -16),
?assertEqual(Checksum, crypto:hash(md5, ProtoMsg)),
#'AggregatedRecord'{records = [R1]} = kpl_agg_pb:decode_msg(ProtoMsg, 'AggregatedRecord'),
#'Record'{data = RecordData} = R1,
?assertEqual(<<"data1">>, RecordData).
-endif. | src/kpl_agg.erl | 0.59843 | 0.481881 | kpl_agg.erl | starcoder |
-module(dsdc_accounts_trees).
%% API - similar to OTP `gb_trees` module
-export([empty/0,
empty_with_backend/0,
get/2,
lookup/2,
enter/2]).
%% API - Merkle tree
-export([root_hash/1,
commit_to_db/1
]).
%% API - Proof of inclusion
-export([ add_poi/3
, verify_poi/3
]).
%% API - misc
-export([get_all_accounts_balances/1]).
-export_type([tree/0]).
-type key() :: dsdc_keys:pubkey().
-type value() :: dsdc_accounts:deterministic_account_binary_with_pubkey().
-opaque tree() :: dsdu_mtrees:mtree(key(), value()).
%%%===================================================================
%%% API - similar to OTP `gb_trees` module
%%%===================================================================
-spec empty() -> tree().
empty() ->
dsdu_mtrees:empty().
-spec empty_with_backend() -> tree().
empty_with_backend() ->
dsdu_mtrees:empty_with_backend(dsdc_db_backends:accounts_backend()).
-spec get(dsdc_keys:pubkey(), tree()) -> dsdc_accounts:account().
get(Pubkey, Tree) ->
Account = dsdc_accounts:deserialize(dsdu_mtrees:get(Pubkey, Tree)),
Pubkey = dsdc_accounts:pubkey(Account), %% Hardcoded expectation.
Account.
-spec lookup(dsdc_keys:pubkey(), tree()) -> none | {value, dsdc_accounts:account()}.
lookup(Pubkey, Tree) ->
case dsdu_mtrees:lookup(Pubkey, Tree) of
none ->
none;
{value, SerializedAccount} ->
Account = dsdc_accounts:deserialize(SerializedAccount),
Pubkey = dsdc_accounts:pubkey(Account), %% Hardcoded expectation.
{value, Account}
end.
-spec enter(dsdc_accounts:account(), tree()) -> tree().
enter(Account, Tree) ->
dsdu_mtrees:enter(key(Account), value(Account), Tree).
%%%===================================================================
%%% API - Merkle tree
%%%===================================================================
-spec root_hash(tree()) -> {ok, dsdu_mtrees:root_hash()} | {error, empty}.
root_hash(Tree) ->
dsdu_mtrees:root_hash(Tree).
-spec add_poi(dsdc_keys:pubkey(), tree(), dsdc_poi:poi()) ->
{'ok', binary(), dsdc_poi:poi()}
| {'error', 'not_present' | 'wrong_root_hash'}.
add_poi(Pubkey, Tree, Poi) ->
dsdc_poi:add_poi(Pubkey, Tree, Poi).
-spec verify_poi(dsdc_keys:pubkey(), binary(), dsdc_poi:poi()) ->
'ok' | {'error', term()}.
verify_poi(AccountKey, SerializedAccount, Poi) ->
dsdc_poi:verify(AccountKey, SerializedAccount, Poi).
-spec commit_to_db(tree()) -> tree().
commit_to_db(Tree) ->
dsdu_mtrees:commit_to_db(Tree).
%%%===================================================================
%%% API - misc
%%%===================================================================
-spec get_all_accounts_balances(tree()) -> [{dsdc_keys:pubkey(), non_neg_integer()}].
get_all_accounts_balances(AccountsTree) ->
AccountsDump = dsdu_mtrees:to_list(AccountsTree),
lists:foldl(
fun({Pubkey, SerializedAccount}, Acc) ->
Account = dsdc_accounts:deserialize(SerializedAccount),
[{Pubkey, dsdc_accounts:balance(Account)} | Acc]
end, [], AccountsDump).
%%%===================================================================
%%% Internal functions
%%%===================================================================
key(A) ->
dsdc_accounts:pubkey(A).
value(A) ->
dsdc_accounts:serialize(A). | apps/dsdcore/src/dsdc_accounts_trees.erl | 0.64131 | 0.419113 | dsdc_accounts_trees.erl | starcoder |
% @doc
% <a href="https://reference.digilentinc.com/reference/pmod/pmodgps/reference-manual">
% PmodGPS</a>
% module.
%
% The PmodGPS sends the GPS data over UART.
%
% Start the driver with
% ```
% 1> grisp:add_device(uart, pmod_gps).
% '''
% @end
-module(pmod_gps).
-behaviour(gen_server).
% API
-export([start_link/2]).
-export([get/1]).
% Callbacks
-export([init/1]).
-export([handle_call/3]).
-export([handle_cast/2]).
-export([handle_info/2]).
-export([code_change/3]).
-export([terminate/2]).
-include("grisp.hrl").
%--- Records -------------------------------------------------------------------
-record(state, {port, last_gga, last_gsa, last_gsv, last_rmc, last_vtg}).
%--- API -----------------------------------------------------------------------
% @private
start_link(Slot, _Opts) ->
gen_server:start_link({local, ?MODULE}, ?MODULE, Slot, []).
% @doc Get the GPS data.
%
% The input parameter specifies which sentence to get. For a description of
% the sentences see the
% <a href="https://reference.digilentinc.com/_media/reference/pmod/pmodgps/pmodgps_rm.pdf">
% PmodGPS Reference Manual
% </a>.
%
% === Example ===
% ```
% 2> pmod_gps:get(gga).
% <<"$GPGGA,145832.000,5207.3597,N,01135.6957,E,1,5,2.50,61.9,M,46.7,M,,*6F\n">>
% 3> pmod_gps:get(gsa).
% <<"$GPGSA,A,3,17,06,19,02,24,,,,,,,,2.69,2.51,0.97*0B\n">>
% 4> pmod_gps:get(gsv).
% <<"$GPGSV,3,3,12,14,22,317,17,17,10,040,35,29,09,203,,22,02,351,*7F\n">>
% 5> pmod_gps:get(rmc).
% <<"$GPRMC,150007.000,A,5207.3592,N,01135.6895,E,0.46,255.74,120220,,,A*64\n">>
% 6> pmod_gps:get(vtg).
% <<"$GPVTG,297.56,T,,M,0.65,N,1.21,K,A*33\n">>
% '''
-spec get('gga' | 'gsa' | 'gsv' | 'rmc' | 'vtg') -> binary().
get(Sentence) ->
call({get, Sentence}).
%--- Callbacks -----------------------------------------------------------------
% @private
init(Slot = uart) ->
Port = open_port({spawn_driver, "grisp_termios_drv"}, [binary]),
grisp_devices:register(Slot, ?MODULE),
{ok, #state{port = Port}}.
% @private
handle_call(Call, _From, State) ->
try execute_call(Call, State)
catch throw:Reason -> {reply, {error, Reason}, State}
end.
% @private
handle_cast(Request, _State) -> error({unknown_cast, Request}).
% @private
handle_info({Port, {data, Data}}, #state{port = Port} = State) ->
case Data of
% "$GPGGA...\n"
<<$$,$G,$P,$G,$G,$A,_/binary>> ->
{noreply, State#state{last_gga = Data}};
% "$GPGSA...\n"
<<$$,$G,$P,$G,$S,$A,_/binary>> ->
{noreply, State#state{last_gsa = Data}};
% "$GPGSV...\n"
<<$$,$G,$P,$G,$S,$V,_/binary>> ->
{noreply, State#state{last_gsv = Data}};
% "$GPRMC...\n"
<<$$,$G,$P,$R,$M,$C,_/binary>> ->
{noreply, State#state{last_rmc = Data}};
% "$GPVTG...\n"
<<$$,$G,$P,$V,$T,$G,_/binary>> ->
{noreply, State#state{last_vtg = Data}};
<<$\n>> ->
{noreply, State};
_ ->
{noreply, State}
end.
% @private
code_change(_OldVsn, State, _Extra) -> {ok, State}.
% @private
terminate(_Reason, _State) -> ok.
%--- Internal -----------------------------------------------------------------
call(Call) ->
Dev = grisp_devices:default(?MODULE),
case gen_server:call(Dev#device.pid, Call) of
{error, Reason} -> error(Reason);
Result -> Result
end.
execute_call({get, gga}, #state{last_gga = Gga} = State) ->
{reply, Gga, State};
execute_call({get, gsa}, #state{last_gsa = Gsa} = State) ->
{reply, Gsa, State};
execute_call({get, gsv}, #state{last_gsv = Gsv} = State) ->
{reply, Gsv, State};
execute_call({get, rmc}, #state{last_rmc = Rmc} = State) ->
{reply, Rmc, State};
execute_call({get, vtg}, #state{last_vtg = Vtc} = State) ->
{reply, Vtc, State};
execute_call({get, Sentence}, State) ->
error({unknown_sentence, Sentence}, State);
execute_call(Request, State) ->
error({unknown_call, Request}, State). | src/pmod_gps.erl | 0.527073 | 0.511107 | pmod_gps.erl | starcoder |
%% Copyright (c) 2020 Facebook, Inc. and its affiliates.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(erlt_import).
-export([module/1]).
-record(context, {
functions = #{},
types = #{}
}).
-define(IS_STRUCT(S), (S =:= struct orelse S =:= exception orelse S =:= message)).
module(Forms) ->
Context = init(Forms),
erlt_ast:prewalk(Forms, fun(Node, Ctx) -> rewrite(Node, Context, Ctx) end).
init(Forms) -> init(Forms, [], []).
init([{function, _, Name, Arity, _} | Rest], Functions, Types) ->
init(Rest, [{{Name, Arity}, local} | Functions], Types);
init([{unchecked_function, _, Name, Arity, _} | Rest], Functions, Types) ->
init(Rest, [{{Name, Arity}, local} | Functions], Types);
init([{attribute, _, import, {Mod, Imports}} | Rest], Functions0, Types) ->
Functions = [{NA, {imported, Mod}} || NA <- Imports] ++ Functions0,
init(Rest, Functions, Types);
init([{attribute, _, import_type, {Mod, Imports}} | Rest], Functions, Types0) ->
Types = [{Name, {imported, Mod}} || {Name, _Arity} <- Imports] ++ Types0,
init(Rest, Functions, Types);
init([{attribute, _, type, {Name, _, _}} | Rest], Functions, Types) ->
init(Rest, Functions, [{Name, local} | Types]);
init([{attribute, _, opaque, {Name, _, _}} | Rest], Functions, Types) ->
init(Rest, Functions, [{Name, local} | Types]);
init([{attribute, _, unchecked_opaque, {Name, _, _}} | Rest], Functions, Types) ->
init(Rest, Functions, [{Name, local} | Types]);
init([{attribute, _, Struct, {Name, _, _}} | Rest], Functions, Types) when ?IS_STRUCT(Struct) ->
init(Rest, Functions, [{Name, local} | Types]);
init([{attribute, _, enum, {Name, _, _}} | Rest], Functions, Types) ->
init(Rest, Functions, [{Name, local} | Types]);
init([_Other | Rest], Functions, Types) ->
init(Rest, Functions, Types);
init([], Functions, Types) ->
#context{
functions = maps:from_list(Functions),
types = maps:from_list(Types)
}.
rewrite({struct, Line, Name, Fields}, Context, _Ctx) ->
{struct, Line, full_type_name(Name, Context), Fields};
rewrite({struct, Line, Expr, Name, Fields}, Context, _Ctx) ->
{struct, Line, Expr, full_type_name(Name, Context), Fields};
rewrite({struct_field, Line, Expr, Name, Field}, Context, _Ctx) ->
{struct_field, Line, Expr, full_type_name(Name, Context), Field};
rewrite({struct_index, Line, Name, Field}, Context, _Ctx) ->
{struct_index, Line, full_type_name(Name, Context), Field};
rewrite({enum, Line, Name, Constr, Fields}, Context, _Ctx) ->
{enum, Line, full_type_name(Name, Context), Constr, Fields};
rewrite({'fun', Line, {function, Name, Arity}}, Context, _Ctx) ->
case full_function_name({atom, Line, Name}, Arity, Context) of
{atom, _, Name} ->
{'fun', Line, {function, Name, Arity}};
{remote, _, {atom, _, Mod}, {atom, _, Name}} ->
{'fun', Line, {function, Mod, Name, Arity}}
end;
rewrite({call, Line, Name, Args}, Context, _Ctx) ->
{call, Line, full_function_name(Name, length(Args), Context), Args};
rewrite({user_type, Line, Name, Args}, Context, _Ctx) ->
case full_type_name({atom, Line, Name}, Context) of
{atom, _, Name} ->
{user_type, Line, Name, Args};
{remote, _, Mod, NameAtom} ->
{remote_type, Line, [Mod, NameAtom, Args]}
end;
rewrite(Other, _Context, _Ctx) ->
Other.
full_type_name({atom, Line, Name}, Context) ->
case maps:find(Name, Context#context.types) of
{ok, {imported, Mod}} ->
GenLine = erl_anno:set_generated(true, Line),
{remote, Line, {atom, GenLine, Mod}, {atom, GenLine, Name}};
{ok, local} ->
{atom, Line, Name}
end;
full_type_name({remote, _, _, _} = Name, _Context) ->
Name.
full_function_name({atom, Line, Name}, Arity, Context) ->
case maps:find({Name, Arity}, Context#context.functions) of
{ok, {imported, Mod}} ->
GenLine = erl_anno:set_generated(true, Line),
{remote, Line, {atom, GenLine, Mod}, {atom, GenLine, Name}};
{ok, local} ->
{atom, Line, Name};
error ->
case erl_internal:bif(Name, Arity) of
true ->
GenLine = erl_anno:set_generated(true, Line),
{remote, Line, {atom, GenLine, erlang}, {atom, GenLine, Name}};
false ->
%% Call to a generated function like module_info/0
{atom, Line, Name}
end
end;
full_function_name(RemoteOrExpr, _Arity, _Context) ->
RemoteOrExpr. | erltc/src/erlt_import.erl | 0.588298 | 0.408218 | erlt_import.erl | starcoder |
%%
%% e3d_bv.erl --
%%
%% Bounding volume operations.
%% Bounding boxes and quickhull, and eigen-vecs calculation implemented
%%
%% Copyright (c) 2001-2011 <NAME>
%%
%% See the file "license.terms" for information on usage and redistribution
%% of this file, and for a DISCLAIMER OF ALL WARRANTIES.
%%
%% $Id$
%%
-module(e3d_bv).
%% Bounding Box/Sphere
-export([box/0,box/1,box/2,box/3,
union/2, dist/2, intersect/2,
center/1,max_extent/1,
surface_area/1,volume/1,
sphere/1,
inside/2, hit/2, hit/3, inv_sign/1]).
%% Other Stuff
-export([eigen_vecs/1,quickhull/1,covariance_matrix/1]).
-import(e3d_vec, [dot/2,add/2,average/1,average/2,dist_sqr/2,normal/1]).
-import(lists, [foldl/3]).
-include("e3d.hrl").
-compile(inline).
-type vector() :: e3d_vec:vector().
-type point() :: e3d_vec:point().
-define(BB_MIN, {{?E3D_INFINITY,?E3D_INFINITY,?E3D_INFINITY},
{-?E3D_INFINITY,-?E3D_INFINITY,-?E3D_INFINITY}}).
%%--------------------------------------------------------------------
%% @doc Creates a bounding box
%% Infinite if no arguments is given
%% Enclosing the two points or list if given.
%% The box is expanded with Epsilon in all directions if given.
%% box() -> infinite_box;
%% box(point, point |[Epsilon])
%% box([points]|[Epsilon])
%% @end
%%--------------------------------------------------------------------
-spec box() -> e3d_bbox().
box() ->
?BB_MIN.
-spec box([point()]) -> e3d_bbox().
box([{X,Y,Z}|Vs]) ->
bounding_box_1(Vs, X, X, Y, Y, Z, Z).
-spec box(point()|[point()], point()|float()) -> e3d_bbox().
box([{X,Y,Z}|Vs], Expand) ->
{Min, Max} = bounding_box_1(Vs, X, X, Y, Y, Z, Z),
{add(Min, {-Expand,-Expand,-Expand}), add(Max,{Expand,Expand,Expand})};
box({V10,V11,V12}, {V20,V21,V22}) ->
{MinX, MaxX} = if V10 < V20 -> {V10,V20};
true -> {V20,V10}
end,
{MinY, MaxY} = if V11 < V21 -> {V11, V21};
true -> {V21, V11}
end,
{MinZ, MaxZ} =if V12 < V22 -> {V12, V22};
true -> {V22, V12}
end,
{{MinX,MinY,MinZ},{MaxX,MaxY,MaxZ}}.
-spec box(vector(), vector(), float()) -> e3d_bbox().
box({V10,V11,V12}, {V20,V21,V22}, Expand) ->
{MinX, MaxX} = if V10 < V20 -> {V10,V20};
true -> {V20,V10}
end,
{MinY, MaxY} = if V11 < V21 -> {V11, V21};
true -> {V21, V11}
end,
{MinZ, MaxZ} =if V12 < V22 -> {V12, V22};
true -> {V22, V12}
end,
{add({MinX,MinY,MinZ}, {-Expand,-Expand,-Expand}),
add({MaxX,MaxY,MaxZ}, {Expand,Expand,Expand})}.
%%--------------------------------------------------------------------
%% @doc Distance between two BB's or false if they intersect
%% @end
%%--------------------------------------------------------------------
-spec dist(e3d_bbox(), e3d_bbox()) -> false | float().
dist({{Nx1,Ny1,Nz1}, {Fx1,Fy1,Fz1}},
{{Nx2,Ny2,Nz2}, {Fx2,Fy2,Fz2}}) ->
DS0 = 0.0,
DS1 = if Fx2 < Nx1 -> Xt=(Nx1-Fx2), DS0 + Xt*Xt;
Fx1 < Nx2 -> Xt=(Nx2-Fx1), DS0 + Xt*Xt;
true -> DS0
end,
DS2 = if Fy2 < Ny1 -> Yt=(Ny1-Fy2), DS1 + Yt*Yt;
Fy1 < Ny2 -> Yt=(Ny2-Fy1), DS1 + Yt*Yt;
true -> DS1
end,
DS3 = if Fz2 < Nz1 -> Zt=(Nz1-Fz2), DS2 + Zt*Zt;
Fz1 < Nz2 -> Zt=(Nz2-Fz1), DS2 + Zt*Zt;
true -> DS2
end,
DS3 > 0.0 andalso DS3.
%%--------------------------------------------------------------------
%% @doc Checks if two BB's intersect
%% @end
%%--------------------------------------------------------------------
-spec intersect(e3d_bbox(), e3d_bbox()) -> boolean().
intersect({{Nx1,Ny1,Nz1}, {Fx1,Fy1,Fz1}},
{{Nx2,Ny2,Nz2}, {Fx2,Fy2,Fz2}}) ->
if Nx1 > Fx2 orelse Nx2 > Fx1 -> false;
Ny1 > Fy2 orelse Ny2 > Fy1 -> false;
Nz1 > Fz2 orelse Nz2 > Fz1 -> false;
true -> true
end.
%%--------------------------------------------------------------------
%% @doc Creates the union of a bounding box and point| bounding box
%% @end
%%--------------------------------------------------------------------
-spec union(e3d_bbox(), vector() | e3d_bbox()) -> e3d_bbox().
union(BBox1 = {Min1={V10,V11,V12}, Max1={V20,V21,V22}},
BBox2 = {Min2={V30,V31,V32}, Max2={V40,V41,V42}}) ->
%% Avoid tuple construction if unnecessary
%% {{erlang:min(V10,V30), erlang:min(V11,V31), erlang:min(V12,V32)},
%% {erlang:max(V20,V40), erlang:max(V21,V41), erlang:max(V22,V42)}};
%% Bjorn fix the compiler :-)
%% The compiler can not optimize away the tuple construction
%% that's why the code looks like this.
if V10 =< V30 ->
if V11 =< V31 ->
if V12 =< V32 ->
if V20 >= V40 ->
if V21 >= V41 ->
if V22 >= V42 -> BBox1;
true -> {Min1, {V20,V21,V42}}
end;
true -> {Min1, {V20, V41, erlang:max(V22,V42)}}
end;
true ->
{Min1, {V40, erlang:max(V21,V41), erlang:max(V22,V42)}}
end;
true ->
{{V10,V11,V32}, max_point(Max1, Max2)}
end;
true ->
{{V10, V31, erlang:min(V12,V32)}, max_point(Max1, Max2)}
end;
true ->
if V31 =< V11 ->
if V32 =< V12 ->
if V40 >= V20 ->
if V41 >= V21 ->
if V42 >= V22 -> BBox2;
true -> {Min2, {V40,V41,V22}}
end;
true ->
{Min2, {V40, V21, erlang:max(V42,V22)}}
end;
true ->
{Min2, {V20, erlang:max(V41,V21), erlang:max(V42,V22)}}
end;
true ->
{{V30, V31, V12}, max_point(Max1, Max2)}
end;
true ->
{{V30, V11, erlang:min(V12, V32)}, max_point(Max1, Max2)}
end
end;
union(BBox = {Min0={V10,V11,V12}, Max0 = {V20,V21,V22}},
Point = {V30,V31,V32}) ->
if V10 =< V30 ->
if V11 =< V31 ->
if V12 =< V32 ->
if V20 >= V30 ->
if V21 >= V31 ->
if V22 >= V32 -> BBox;
true -> {Min0, {V20,V21,V32}}
end;
true -> {Min0, {V20, V31, erlang:max(V22,V32)}}
end;
true ->
{Min0, {V30, erlang:max(V21,V31), erlang:max(V22,V32)}}
end;
true ->
{{V10,V11,V32}, max_point(Max0, Point)}
end;
true ->
{{V10, V31, erlang:min(V12,V32)}, max_point(Max0, Point)}
end;
true ->
if V31 =< V11 ->
if V32 =< V12 -> {Point, max_point(Max0, Point)};
true -> {{V30,V31,V12}, max_point(Max0, Point)}
end;
true -> {{V30,V11,erlang:min(V12,V32)}, max_point(Max0, Point)}
end
end.
%%--------------------------------------------------------------------
%% @doc Creates a bounding sphere from a bounding box
%% @end
%%--------------------------------------------------------------------
-spec sphere(e3d_bbox()) -> e3d_bsphere().
sphere(BB = {{_,_,_}, Max = {_,_,_}}) ->
Center = center(BB),
{Center,
case inside(Center, BB) of
true -> dist_sqr(Center, Max);
false -> 0.0
end}.
%%--------------------------------------------------------------------
%% @doc Returns the center of the bounding volume
%% @end
%%--------------------------------------------------------------------
-spec center(e3d_bv()) -> point().
center({Min = {_,_,_}, Max = {_,_,_}}) ->
average(Min,Max);
center({Center, DistSqr}) when is_number(DistSqr) ->
Center.
%%--------------------------------------------------------------------
%% @doc Returns the surface area of the bounding volume
%% @end
%%--------------------------------------------------------------------
-spec surface_area(e3d_bv()) -> float().
surface_area({Min = {Minx,_,_}, Max = {MaxX,_,_}}) ->
if Minx > MaxX -> 0;
true ->
{X,Y,Z} = e3d_vec:sub(Max, Min),
X*Y+Y*Z+Z*X*2
end.
%%--------------------------------------------------------------------
%% @doc Returns the volume of the bounding volume
%% @end
%%--------------------------------------------------------------------
-spec volume(e3d_bv()) -> float().
volume({Min = {Minx,_,_}, Max = {MaxX,_,_}}) ->
if Minx > MaxX -> 0;
true ->
{X,Y,Z} = e3d_vec:sub(Max, Min),
X*Y*Z
end.
%%--------------------------------------------------------------------
%% @doc Returns true if point is inside bounding volume
%% @end
%%--------------------------------------------------------------------
-spec inside(point(), e3d_bv()) -> boolean().
inside({V30,V31,V32}, {{V10,V11,V12}, {V20,V21,V22}}) ->
V10 >= V30 andalso V30 >= V20 andalso
V11 >= V31 andalso V31 >= V21 andalso
V12 >= V32 andalso V32 >= V22;
inside(Point, {Center, DistSqr}) when is_number(DistSqr) ->
dist_sqr(Center, Point) =< DistSqr.
%%--------------------------------------------------------------------
%% @doc Returns the largest dimension of the bounding box,
%% 1 = X, 2 = Y, Z = 3 undefined = if zero dimension
%% @end
%%--------------------------------------------------------------------
-spec max_extent(e3d_bbox()) -> undefined | 1 | 2 | 3.
max_extent({Min, Max}) ->
{X,Y,Z} = e3d_vec:sub(Max, Min),
if X > Y, X > Z -> 1;
Y > Z -> 2;
Y =:= Z, Z =< 0.0 -> undefined; %% Zero
true -> 3
end.
%%--------------------------------------------------------------------
%% @doc Tests if #ray{} hits BB
%% @end
%%--------------------------------------------------------------------
-spec hit(e3d_ray(), e3d_bbox()) -> boolean().
hit(#ray{d=Dir}=Ray, BB) ->
Swap = inv_sign(Dir),
hit(Ray, Swap, BB).
-spec hit(e3d_ray(), {vector(), {boolean(), boolean(), boolean()}}, e3d_bbox()) -> boolean().
hit(#ray{o={Ox,Oy,Oz},n=Near,f=Far}, {{Ix,Iy,Iz},{Sx,Sy,Sz}}, {{MIx,MIy,MIz},{MAx,MAy,MAz}}) ->
T0x = (MIx-Ox)*Ix, T1x = (MAx-Ox)*Ix,
{Nx,Fx} = case Sx of
false -> {max(T0x,Near), min(T1x,Far)};
true -> {max(T1x,Near), min(T0x,Far)}
end,
if Nx < Fx ->
T0y = (MIy-Oy)*Iy, T1y = (MAy-Oy)*Iy,
{Ny,Fy} = case Sy of
false -> {max(T0y,Nx), min(T1y,Fx)};
true -> {max(T1y,Nx), min(T0y,Fx)}
end,
if Ny < Fy ->
T0z = (MIz-Oz)*Iz, T1z = (MAz-Oz)*Iz,
{Nz,Fz} = case Sz of
false -> {max(T0z,Ny), min(T1z,Fy)};
true -> {max(T1z,Ny), min(T0z,Fy)}
end,
Nz < Fz;
true ->
false
end;
true ->
false
end.
inv_sign({X,Y,Z}) ->
{{inv(X),inv(Y),inv(Z)}, {X < 0.0, Y < 0.0, Z < 0.0}}.
inv(N) ->
try 1.0/N
catch _:_ ->
?E3D_INFINITY
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Internals
max_point(Max0 = {V20,V21,V22}, Max1 = {V30,V31,V32}) ->
if V20 >= V30 ->
if V21 >= V31 ->
if V22 >= V32 -> Max0;
true -> {V20,V21,V32}
end;
true -> {V20, V31, erlang:max(V22,V32)}
end;
true ->
if V31 >= V21 ->
if V32 >= V22 -> Max1;
true -> {V30,V31,V22}
end;
true -> {V30, V21, erlang:max(V22,V32)}
end
end.
bounding_box_1([{X,_,_}|_]=Vs, X0, X1, Y0, Y1, Z0, Z1) when X < X0 ->
bounding_box_1(Vs, X, X1, Y0, Y1, Z0, Z1);
bounding_box_1([{X,_,_}|_]=Vs, X0, X1, Y0, Y1, Z0, Z1) when X > X1 ->
bounding_box_1(Vs, X0, X, Y0, Y1, Z0, Z1);
bounding_box_1([{_,Y,_}|_]=Vs, X0, X1, Y0, Y1, Z0, Z1) when Y < Y0 ->
bounding_box_1(Vs, X0, X1, Y, Y1, Z0, Z1);
bounding_box_1([{_,Y,_}|_]=Vs, X0, X1, Y0, Y1, Z0, Z1) when Y > Y1 ->
bounding_box_1(Vs, X0, X1, Y0, Y, Z0, Z1);
bounding_box_1([{_,_,Z}|_]=Vs, X0, X1, Y0, Y1, Z0, Z1) when Z < Z0 ->
bounding_box_1(Vs, X0, X1, Y0, Y1, Z, Z1);
bounding_box_1([{_,_,Z}|_]=Vs, X0, X1, Y0, Y1, Z0, Z1) when Z > Z1 ->
bounding_box_1(Vs, X0, X1, Y0, Y1, Z0, Z);
bounding_box_1([_|Vs], X0, X1, Y0, Y1, Z0, Z1) ->
bounding_box_1(Vs, X0, X1, Y0, Y1, Z0, Z1);
bounding_box_1([], X0, X1, Y0, Y1, Z0, Z1) ->
{{X0,Y0,Z0},{X1,Y1,Z1}}.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
eigen_vecs(Vs) ->
Fs = quickhull(Vs),
SymMat = covariance_matrix(Fs),
{Vals,{X1,Y1,Z1,X2,Y2,Z2,X3,Y3,Z3}} = e3d_mat:eigenv3(SymMat),
{Vals,{{X1,Y1,Z1},
{X2,Y2,Z2},
{X3,Y3,Z3}}}.
%%%%%%%%%%%%%%%%%%%%%%%%%%% QHULL %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-record(hull, {f,p,os}).
%% Splits a point soup in a convex-triangle-hull.
quickhull([V1,V2|Vs0]) when is_list(Vs0) ->
%% Init find an initial triangle..
[M1,M2] = if V1 < V2 -> [V1,V2]; true -> [V2,V1] end,
{T1,T2,[T30|Vs1]} = minmax_x(Vs0,M1,M2,[]),
Cen = average([T1,T2]),
Vec = e3d_vec:norm_sub(T2,Cen),
Max = fun(V) ->
{VVec,Vd} = vec_dist(V,Cen),
A = (1-abs(dot(VVec,Vec))),
Vd*A
end,
{T3,Vs2} = max_zy(Vs1,Max,Max(T30),T30,[]),
%% Create the initial hull of two faces
F1 = #hull{p=Plane} = hull([T1,T2,T3]),
F2 = hull([T1,T3,T2]),
%% Split vertices on each plane
{F1L,F2L} = initial_split(Vs2,Plane,0.0,[],0.0,[]),
%% Expand hull
quickhull2([F1#hull{os=F1L},F2#hull{os=F2L}],[]).
quickhull2([This=#hull{os=[]}|Rest], Completed) ->
quickhull2(Rest,[This|Completed]);
quickhull2([#hull{f=Face,os=[New|Os0]}|Rest], Completed) ->
Eds0 = mk_eds(Face,gb_sets:empty()),
{Eds,Os,Unchanged} =
remove_seen_hull(Completed++Rest,Eds0,New,Os0,[]),
NewHulls = create_new_hulls(Eds,Os,New,[]),
quickhull2(NewHulls++Unchanged, []);
quickhull2([],Completed) ->
[Vs|| #hull{f=Vs} <- Completed].
remove_seen_hull([This=#hull{p=Plane,f=F,os=Os}|R],
Eds0,Point,Os0,Ignore) ->
case check_plane(Point,Plane) of
{true,_} ->
remove_seen_hull(R,mk_eds(F,Eds0),Point,Os++Os0,Ignore);
{false,_} ->
remove_seen_hull(R,Eds0,Point,Os0,[This|Ignore])
end;
remove_seen_hull([],Eds,_,Os,Ignored) ->
{gb_sets:to_list(Eds),Os,Ignored}.
create_new_hulls([{V1,V2}|R],Os0,Point,Acc) ->
Hull = #hull{p=Plane} = hull([V1,V2,Point]),
{Os,Inside} = split_outside(Os0,Plane,0.0,[],[]),
create_new_hulls(R,Inside,Point,[Hull#hull{os=Os}|Acc]);
create_new_hulls([],_Inside,_,Acc) ->
Acc.
split_outside([V|R],Plane,Worst,InFront0,Behind) ->
case check_plane(V,Plane) of
{true,D} ->
{WP,InFront} = worst(V,D,Worst,InFront0),
split_outside(R,Plane,WP,InFront,Behind);
{false,_} ->
split_outside(R,Plane,Worst,InFront0,[V|Behind])
end;
split_outside([],_,_,InFront,Behind) ->
{InFront,Behind}.
mk_eds([V1,V2,V3],T0) ->
T1 = add_edge(V1,V2,T0),
T2 = add_edge(V2,V3,T1),
add_edge(V3,V1,T2).
add_edge(V1,V2,T) ->
%% Add only border edges
case gb_sets:is_member({V2,V1}, T) of
true -> gb_sets:delete({V2,V1},T);
false -> gb_sets:add_element({V1,V2},T)
end.
check_plane(V,{PC,PN}) ->
{Vec,D} = vec_dist(V,PC),
A = dot(Vec,PN),
if A > 0 -> % 1.0e-6 ->
{true, D*A};
true ->%%A < 1.0e-6 ->
{false,-D*A}
end.
minmax_x([This|R],Old,Max,Acc) when This < Old ->
minmax_x(R,This,Max,[Old|Acc]);
minmax_x([This|R],Min,Old,Acc) when This > Old ->
minmax_x(R,Min,This,[Old|Acc]);
minmax_x([This|R],Min,Max,Acc) ->
minmax_x(R,Min,Max,[This|Acc]);
minmax_x([],Min,Max,Acc) ->
{Min,Max,Acc}.
max_zy([This|R],Test,Val,BestSoFar,Acc) ->
case Test(This) of
Better when Better > Val ->
max_zy(R,Test,Better,This,[BestSoFar|Acc]);
_Worse ->
max_zy(R,Test,Val,BestSoFar,[This|Acc])
end;
max_zy([],_,_,Best,Acc) -> {Best,Acc}.
initial_split([V|Vs],Plane,WP0,Pos0,WN0,Neg0) ->
case check_plane(V,Plane) of
{true,D} ->
{WP,Pos} = worst(V,D,WP0,Pos0),
initial_split(Vs,Plane,WP,Pos,WN0,Neg0);
{false,D} ->
{WN,Neg} =worst(V,D,WN0,Neg0),
initial_split(Vs,Plane,WP0,Pos0,WN,Neg)
end;
initial_split([],_,_,Pos,_,Neg) ->
{Pos,Neg}.
worst(V,This,D,[W|List]) when This < D ->
{D,[W,V|List]};
worst(V,D,_Worst,List) ->
{D,[V|List]}.
hull(Vs) ->
#hull{f=Vs,p={average(Vs),normal(Vs)}}.
vec_dist({V10,V11,V12}, {V20,V21,V22})
when is_float(V10), is_float(V11), is_float(V12),
is_float(V20), is_float(V21), is_float(V22)->
X = V10-V20,
Y = V11-V21,
Z = V12-V22,
try
D = math:sqrt(X*X+Y*Y+Z*Z),
{{X/D,Y/D,Z/D},D}
catch error:badarith ->
{{0.0,0.0,0.0},0.0}
end.
%%%%%%%%%%%%%%%%%%%%% QHULL %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Creates a Symmetric covariance matrix from a list of triangles.
covariance_matrix(Faces) ->
N = length(Faces),
C0 = foldl(fun(Vs,Acc) -> add(average(Vs),Acc) end,
{0.0,0.0,0.0}, Faces),
{Cx,Cy,Cz} = e3d_vec:mul(C0,1/N),
M0 = foldl(fun([{X00,Y00,Z00},{X10,Y10,Z10},{X20,Y20,Z20}],
{M11,M12,M13,M22,M23,M33}) ->
X0=X00-Cx,X1=X10-Cx,X2=X20-Cx,
Y0=Y00-Cy,Y1=Y10-Cy,Y2=Y20-Cy,
Z0=Z00-Cz,Z1=Z10-Cz,Z2=Z20-Cz,
{X0*X0+X1*X1+X2*X2+M11,
X0*Y0+X1*Y1+X2*Y2+M12,
X0*Z0+X1*Z1+X2*Z2+M13,
Y0*Y0+Y1*Y1+Y2*Y2+M22,
Y0*Z0+Y1*Z1+Y2*Z2+M23,
Z0*Z0+Z1*Z1+Z2*Z2+M33}
end,{0.0,0.0,0.0,0.0,0.0,0.0},Faces),
{M11,M21=M12,M31=M13,M22,M32=M23,M33} = M0,
D = 3*N,
{M11/D, M12/D, M13/D,
M21/D, M22/D, M23/D,
M31/D, M32/D, M33/D}.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% | e3d/e3d_bv.erl | 0.504883 | 0.538741 | e3d_bv.erl | starcoder |
%%%-------------------------------------------------------------------
%%% @doc
%%% Defines a repository.
%%%
%%% A repository maps to an underlying data store, controlled by the adapter.
%%% For example, CrossDB ships with a `xdb_mnesia_adapter' that stores data into
%%% Mnesia database.
%%%
%%% When used, the repository expects the `otp_app' and `adapter' as options.
%%% The `otp_app' should point to an OTP application that has the repository
%%% configuration, and the `adapter' is a compile-time option that specifies
%%% the adapter itself and should point to an existing and valid module that
%%% implements the `xdb_adapter' behaviour. For example, the repository:
%%%
%%% ```
%%% -module(blog_repo).
%%%
%%% -include_lib("cross_db/include/xdb.hrl").
%%% -repo([{otp_app, blog}, {adapter, xdb_mnesia_adapter}]).
%%% '''
%%%
%%% The configuration for the repo is mandatory, the previous repo
%%% could be configured with:
%%%
%%% ```
%%% [
%%% {cross_db, [
%%% {my_repo, [
%%% {adapter, xdb_mnesia_adapter},
%%% {ram_copies, local},
%%% {schemas, [person]}
%%% ]}
%%% ]}
%%% ].
%%% '''
%%%
%%% Most of the configuration that goes into the `*.config' file is specific
%%% to the adapter, so check the documentation provided for each adapter.
%%% However, some configuration is shared across
%%% all adapters, they are:
%%%
%%% <ul>
%%% <li>
%%% `adapter' - a compile-time option that specifies the adapter itself.
%%% As a compile-time option, it may also be given as an option to
%%% `-repo([..])'.
%%% </li>
%%% </ul>
%%%
%%% <h3>Shared Options</h3>
%%%
%%% The following options are supported almost for all repositories:
%%%
%%% <ul>
%%% <li>
%%% `timeout' - The time in milliseconds to wait for the query call to
%%% finish, `infinity' will wait indefinitely (default: 15000).
%%% </li>
%%% <li>
%%% `pool_timeout' - The time in milliseconds to wait for calls to the pool
%%% to finish, `infinity' will wait indefinitely (default: 5000).
%%% </li>
%%% </ul>
%%%
%%% For extra options, check adapters documentation.
%%%
%%% @reference See
%%% <a href="https://github.com/cabol/cross_db/blob/master/guides/repo-api.md">Repo API</a>
%%% @end
%%% @end
%%%-------------------------------------------------------------------
-module(xdb_repo).
%%%===================================================================
%%% Types
%%%===================================================================
%% Repo type
-type t() :: module().
%% Write command response (delete, insert and update)
-type w_respose() :: {ok, xdb_schema:t()}
| {error, xdb_changeset:t()}
| no_return().
%% Execute the actiopn or raise an error
-type exec_or_raise(R) :: R | no_return().
-export_type([
t/0,
w_respose/0,
exec_or_raise/1
]).
%%%===================================================================
%%% API
%%%===================================================================
-optional_callbacks([init/1, in_transaction/0, rollback/1, transaction/2]).
-callback init(Config) -> Res when
Config :: xdb_lib:keyword(),
Res :: {ok, xdb_lib:keyword()} | ignore.
-callback all(Queryable, Opts) -> Res when
Queryable :: xdb_query:t() | xdb_query:queryable(),
Opts :: xdb_lib:keyword(),
Res :: [xdb_schema:t()] | no_return().
-callback get(Queryable, Id, Opts) -> Res when
Queryable :: xdb_query:queryable(),
Id :: any(),
Opts :: xdb_lib:keyword(),
Res :: xdb_schema:t() | undefined | no_return().
-callback get_or_raise(Queryable, Id, Opts) -> Res when
Queryable :: xdb_query:queryable(),
Id :: any(),
Opts :: xdb_lib:keyword(),
Res :: xdb_schema:t() | no_return().
-callback get_by(Queryable, Clauses, Opts) -> Res when
Queryable :: xdb_query:queryable(),
Clauses :: xdb_lib:keyword(),
Opts :: xdb_lib:keyword(),
Res :: xdb_schema:t() | undefined | no_return().
-callback get_by_or_raise(Queryable, Clauses, Opts) -> Res when
Queryable :: xdb_query:queryable(),
Clauses :: xdb_lib:keyword(),
Opts :: xdb_lib:keyword(),
Res :: xdb_schema:t() | no_return().
-callback insert(Schema, Opts) -> Res when
Schema :: xdb_schema:t(),
Opts :: xdb_lib:keyword(),
Res :: w_respose().
-callback insert_or_raise(Schema, Opts) -> Res when
Schema :: xdb_schema:t(),
Opts :: xdb_lib:keyword(),
Res :: exec_or_raise(xdb_schema:t()).
-callback insert_all(SchemaMod, Entries, Opts) -> Res when
SchemaMod :: module(),
Entries :: [xdb_schema:fields()],
Opts :: xdb_lib:keyword(),
Count :: integer(),
Returning :: [xdb_schema:fields()] | undefined,
Res :: {Count, Returning} | no_return().
-callback delete(Data, Opts) -> Res when
Data :: xdb_schema:t() | xdb_changeset:t(),
Opts :: xdb_lib:keyword(),
Res :: w_respose().
-callback delete_or_raise(Data, Opts) -> Res when
Data :: xdb_schema:t() | xdb_changeset:t(),
Opts :: xdb_lib:keyword(),
Res :: exec_or_raise(xdb_schema:t()).
-callback delete_all(Queryable, Opts) -> Res when
Queryable :: xdb_query:t() | xdb_query:queryable(),
Opts :: xdb_lib:keyword(),
Res :: {integer(), [any()] | undefined} | no_return().
-callback update(Changeset, Opts) -> Res when
Changeset :: xdb_changeset:t(),
Opts :: xdb_lib:keyword(),
Res :: w_respose().
-callback update_or_raise(Changeset, Opts) -> Res when
Changeset :: xdb_changeset:t(),
Opts :: xdb_lib:keyword(),
Res :: exec_or_raise(xdb_schema:t()).
-callback update_all(Queryable, Updates, Opts) -> Res when
Queryable :: xdb_query:t() | xdb_query:queryable(),
Updates :: xdb_lib:keyword(),
Opts :: xdb_lib:keyword(),
Res :: {integer(), [any()] | undefined} | no_return().
-callback in_transaction() -> boolean().
-callback rollback(any()) -> no_return().
-callback transaction(Fun, Opts) -> Res when
Fun :: fun(() -> any()),
Opts :: xdb_lib:keyword(),
Res :: {ok, any()} | {error, any()}. | src/xdb_repo.erl | 0.686895 | 0.51068 | xdb_repo.erl | starcoder |
%%%=============================================================================
%%% @doc Advent of code puzzle solution
%%% @end
%%%=============================================================================
-module(aoc2020_day20).
-behavior(aoc_puzzle).
-export([ parse/1
, solve1/1
, solve2/1
, info/0
]).
-include_lib("stdlib/include/assert.hrl").
-include("aoc_puzzle.hrl").
%%------------------------------------------------------------------------------
%% @doc info/0
%% Returns info about this puzzle.
%% @end
%%------------------------------------------------------------------------------
-spec info() -> aoc_puzzle().
info() ->
#aoc_puzzle{ module = ?MODULE
, year = 2020
, day = 20
, name = "Jurassic Jigsaw"
, expected = {66020135789767, 1537}
, has_input_file = true
}.
%%==============================================================================
%% Types
%%==============================================================================
-type tile_id() :: {TileNum :: integer(),
Symmetry :: atom()}.
-type input_type() :: #{tile_id() => map()}.
-type result1_type() :: integer().
-type result2_type() :: result1_type().
%%------------------------------------------------------------------------------
%% @doc parse/1
%% Parses input file.
%% @end
%%------------------------------------------------------------------------------
-spec parse(Input :: binary()) -> input_type().
parse(Input) ->
L = binary:split(Input, <<"Tile ">>, [global]),
lists:foldl(fun(<<>>, Acc) -> Acc;
(TileBin, Acc) ->
maps:merge(Acc, parse_tile(TileBin))
end, #{}, L).
%%------------------------------------------------------------------------------
%% @doc solve1/1
%% Solves part 1. Receives parsed input as returned from parse/1.
%% @end
%%------------------------------------------------------------------------------
-spec solve1(Tiles :: input_type()) -> result1_type().
solve1(Tiles) ->
Size = floor(math:sqrt(maps:size(Tiles) div 8)),
PlacedTiles = place_tiles(Tiles),
lists:foldl(
fun(Coord, Acc) ->
{TileId, _} = maps:get(Coord, PlacedTiles),
Acc * TileId
end, 1, [{0, 0},
{Size - 1, 0},
{0, Size - 1},
{Size - 1, Size - 1}]).
%%------------------------------------------------------------------------------
%% @doc solve2/1
%% Solves part 2. Receives parsed input as returned from parse/1.
%% @end
%%------------------------------------------------------------------------------
-spec solve2(Tiles :: input_type()) -> result2_type().
solve2(Tiles) ->
Size = floor(math:sqrt(maps:size(Tiles) div 8)),
PlacedTiles = place_tiles(Tiles),
FinalGrid = join_tiles(PlacedTiles, Tiles, Size),
SM = sea_monster(),
maps:fold(
fun(_Id, Grid, not_found) ->
AllHashes = sets:from_list(maps:keys(Grid) -- [size]),
NumHashes = sets:size(AllHashes),
RemainingPixels = find_sea_monster(SM, Grid, AllHashes),
case sets:size(RemainingPixels) of
N when N < NumHashes -> N;
_ -> not_found
end;
(_Id, _Grid, Solution) -> Solution
end, not_found, all_symmetries(final, FinalGrid)).
%%==============================================================================
%% Helpers
%%==============================================================================
place_tiles(Tiles) ->
Size = floor(math:sqrt(maps:size(Tiles) div 8)),
[First|_Rest] = [{X, Y} || X <- lists:seq(0, Size - 1),
Y <- lists:seq(0, Size - 1)],
BorderMap = border_map(Tiles),
InvBorderMap = inv_border_map(Tiles),
[StartTile|_] = find_ne_corner_tile(Tiles, BorderMap, InvBorderMap),
%% Place the first tile
PlacedTiles = #{First => StartTile},
RemainingTiles = remove_tile(StartTile, Tiles),
%% Place remaining tiles, row-by-row
place_row(0, Size, StartTile, RemainingTiles,
BorderMap, PlacedTiles).
%% Build coord-map of the sea monster.
sea_monster() ->
Width = 20,
Lines = <<" # ",
"# ## ## ###",
" # # # # # # ">>,
%% Coordinates in the resulting map are relative to the tip of the
%% sea monster's tail
lists:foldl(
fun({Offset, _}, Acc) ->
maps:put({Offset rem Width,
(Offset div Width) - 1}, $#, Acc)
end, #{}, binary:matches(Lines, <<"#">>)).
%% Find sea monsters and remove any matching `#' from `AllHashes'.
find_sea_monster(SM, FinalGrid, AllHashes) ->
maps:fold(
fun(size, _, Acc) -> Acc;
(Coord, _, Acc) ->
is_sea_monster_at(Coord, SM, FinalGrid, Acc)
end, AllHashes, FinalGrid).
%% If there is a sea monster at {X, Y}, remove all hashes from `AllHashes'
%% which matches the sea monster pixels.
is_sea_monster_at({X, Y}, SM, Grid, AllHashes) ->
NumSMPixels = maps:size(SM),
MatchingPixels =
lists:foldl(fun({SMX, SMY}, Acc) ->
Coord = {X + SMX, Y + SMY},
case maps:is_key(Coord, Grid) of
true -> [Coord|Acc];
false -> Acc
end
end, [], maps:keys(SM)),
case length(MatchingPixels) of
N when N == NumSMPixels ->
%% All pixels matched, remove them from `AllHashes'
sets:subtract(AllHashes, sets:from_list(MatchingPixels));
_ ->
AllHashes
end.
%% Join all the placed tiles into a big jigsaw.
join_tiles(PlacedTiles, Tiles, Size) ->
lists:foldl(
fun({X, Y} = Coord, Acc) ->
%% {X, Y} are here the coordinates of the tiles themselves within
%% the puzzle
TileId = maps:get(Coord, PlacedTiles),
TileData = maps:get(TileId, Tiles),
Width = maps:get(size, TileData),
InnerWidth = Width - 2,
MaxN = Width - 1,
Acc0 = maps:put(size, InnerWidth * Size, Acc),
maps:fold(
fun({X0, Y0}, _, InnerAcc) ->
%% {X0, Y0} are the coordinates of each pixel within the
%% tile
if (X0 == 0) orelse (X0 == MaxN) orelse
(Y0 == 0) orelse (Y0 == MaxN) ->
InnerAcc;
true ->
maps:put({(X * InnerWidth) + X0 - 1,
(Y * InnerWidth) + Y0 - 1},
$#, InnerAcc)
end;
(_, _, InnerAcc) ->
InnerAcc
end, Acc0, TileData)
end, #{},
[{X, Y} || X <- lists:seq(0, Size - 1),
Y <- lists:seq(0, Size - 1)]).
remove_tile({Num, _}, Tiles) ->
maps:filter(fun({TileNum, _}, _) when Num =/= TileNum -> true;
(_Id, _) -> false
end, Tiles).
%% place_row: This is the tricky part. Given `LeftTile`, finds next
%% tile to the right, places it, and continues until the row runs
%% out. Then, picks the next tile in the row underneath, and does the
%% same thing.
place_row(Y, Size, LeftTile, RemainingTiles, BorderMap, PlacedTiles) ->
{_,
PlacedTilesOut0,
RemainingTilesOut0} =
lists:foldl(
fun(X, {LeftTileIn, PlacedTilesIn, RemainingTilesIn}) ->
[_N, _S, E, _W] = maps:get(LeftTileIn, BorderMap),
Coord = {X, Y},
{LeftNum, _} = LeftTileIn,
[{RightId, _RightData}] =
maps:to_list(
maps:filter(
fun({Num, _} = TileId, _TD) when Num =/= LeftNum ->
case maps:get(TileId, BorderMap) of
[_, _, _, RightW] when RightW == E -> true;
_ -> false
end;
(_, _) -> false
end, RemainingTilesIn)),
PlacedTilesOut = maps:put(Coord, RightId, PlacedTilesIn),
RemainingTilesOut = remove_tile(RightId, RemainingTilesIn),
LeftTileOut = RightId,
{LeftTileOut, PlacedTilesOut, RemainingTilesOut}
end, {LeftTile, PlacedTiles, RemainingTiles}, lists:seq(1, Size - 1)),
if Y + 1 == Size ->
%% No more rows to place
PlacedTilesOut0;
true ->
NewLeftTile = find_tile_below(LeftTile, RemainingTilesOut0, BorderMap),
Coord0 = {0, Y + 1},
PlacedTilesOut1 = maps:put(Coord0, NewLeftTile, PlacedTilesOut0),
RemainingTilesOut1 = remove_tile(NewLeftTile, RemainingTilesOut0),
place_row(Y + 1, Size, NewLeftTile, RemainingTilesOut1, BorderMap, PlacedTilesOut1)
end.
find_tile_below(Tile, RemainingTiles, BorderMap) ->
[_, S, _, _] = maps:get(Tile, BorderMap),
[{BelowId, _}] =
maps:to_list(
maps:filter(
fun({_Num, _} = TileId, _TD) ->
[BelowN|_] = maps:get(TileId, BorderMap),
BelowN == S
end, RemainingTiles)),
BelowId.
find_ne_corner_tile(Tiles, BorderMap, InvBorderMap) ->
maps:fold(
fun(TileId, _TileData, Acc) ->
Borders = maps:get(TileId, BorderMap),
case lists:map(
fun(BorderId) ->
case is_external_border(BorderId, InvBorderMap) of
true -> external;
false -> BorderId
end
end, Borders) of
%% Start with NW (top left) tile
%% Order is N S E W
[external, _S, _E, external] -> [TileId|Acc];
_Other -> Acc
end
end, [], Tiles).
%% External borders are borders which only belong to one tile.
is_external_border(BorderId, InvBorderMap) ->
length(maps:get(BorderId, InvBorderMap)) == 1.
%% Inverse border map; maps border ids to their tile numbers
inv_border_map(Tiles) ->
maps:fold(
fun({TileNum, _Sym}, Data, Acc) ->
Borders = borders(Data),
lists:foldl(
fun(Border, InnerAcc) ->
maps:update_with(
Border,
fun(Old) -> lists:usort([TileNum|Old]) end,
[TileNum], InnerAcc)
end, Acc, Borders)
end, #{}, Tiles).
%% Return a map of tile ids to their possible border ids
border_map(Tiles) ->
maps:fold(fun(TileId, TileData, Acc) ->
maps:put(TileId, borders(TileData), Acc)
end, #{}, Tiles).
%% ======================================================================
%% Parser
%% ======================================================================
parse_tile(TileBin) ->
[Header, Rows] = binary:split(TileBin, <<"\n">>),
{match, Matches} =
re:run(Header, "(\\d+):", [{capture, all_but_first, list}]),
TileNum = list_to_integer(hd(Matches)),
[{Width, _}|_] = binary:matches(Rows, <<"\n">>),
Offsets = binary:matches(Rows, <<"#">>),
TileData =
lists:foldl(fun({Offset, _}, Acc) ->
maps:put({Offset rem (Width + 1),
Offset div (Width + 1)}, $#, Acc)
end, #{}, Offsets),
all_symmetries(TileNum, maps:put(size, Width, TileData)).
shift_by_coord(Coord, Tile, N) ->
case maps:get(Coord, Tile, undefined) of
$# -> (N bsl 1) bor 1;
_ -> N bsl 1
end.
borders(Tile) ->
Size = maps:get(size, Tile),
L = lists:seq(0, Size - 1),
N = lists:foldl(fun(X, Acc) -> shift_by_coord({X, 0}, Tile, Acc) end, 0, L),
S = lists:foldl(fun(X, Acc) -> shift_by_coord({X, Size - 1}, Tile, Acc) end, 0, L),
E = lists:foldl(fun(Y, Acc) -> shift_by_coord({Size - 1, Y}, Tile, Acc) end, 0, L),
W = lists:foldl(fun(Y, Acc) -> shift_by_coord({0, Y}, Tile, Acc) end, 0, L),
[N, S, E, W].
%% ======================================================================
%% Helpers
%% ======================================================================
all_symmetries(Num, Rows) ->
TileSize = maps:get(size, Rows),
Max = TileSize - 1,
Rotate =
fun(R) ->
maps:fold(fun({X, Y}, Value, Acc) when (X >= 0) andalso (X =< Max) andalso
(Y >= 0) andalso (Y =< Max) ->
maps:put({Max - Y, X}, Value, Acc);
(K, V, Acc) ->
maps:put(K, V, Acc)
end, #{}, R)
end,
Flip =
fun(R) ->
maps:fold(fun({X, Y}, Value, Acc) when (X >= 0) andalso (X =< Max) andalso
(Y >= 0) andalso (Y =< Max) ->
maps:put({Max - X, Y}, Value, Acc);
(K, V, Acc) ->
maps:put(K, V, Acc)
end, #{}, R)
end,
R90 = Rotate(Rows),
R180 = Rotate(R90),
R270 = Rotate(R180),
FlipR0 = Flip(Rows),
FlipR90 = Rotate(FlipR0),
FlipR180 = Rotate(FlipR90),
FlipR270 = Rotate(FlipR180),
%% Self-test
?assertEqual(Rows, Rotate(R270)),
?assertEqual(Rows, Rotate(Rotate(Rotate(Rotate(Rows))))),
#{{Num, 'r0'} => Rows,
{Num, 'r90'} => R90,
{Num, 'r180'} => R180,
{Num, 'r270'} => R270,
{Num, 'f0'} => FlipR0,
{Num, 'f90'} => FlipR90,
{Num, 'f180'} => FlipR180,
{Num, 'f270'} => FlipR270}.
%%%_* Emacs ====================================================================
%%% Local Variables:
%%% allout-layout: t
%%% erlang-indent-level: 2
%%% End: | src/2020/aoc2020_day20.erl | 0.509764 | 0.550426 | aoc2020_day20.erl | starcoder |
-module(aoc2018_day03).
-behavior(aoc_puzzle).
-export([parse/1, solve1/1, solve2/1, info/0]).
-include("aoc_puzzle.hrl").
-spec info() -> aoc_puzzle().
info() ->
#aoc_puzzle{module = ?MODULE,
year = 2018,
day = 3,
name = "No Matter How You Slice It",
expected = {105231, 164},
has_input_file = true}.
-type input_type() :: [{integer(), integer(), integer(), integer(), integer()}].
-type result1_type() :: integer().
-type result2_type() :: result1_type().
-spec parse(Input :: binary()) -> input_type().
parse(Input) ->
lists:map(fun(Line) ->
list_to_tuple(lists:map(fun list_to_integer/1, string:tokens(Line, "#@ ,:x")))
end,
string:tokens(binary_to_list(Input), "\n\r")).
-spec solve1(Input :: input_type()) -> result1_type().
solve1(Areas) ->
ClaimedAreas = count_claims(Areas, #{}),
count_overlaps(ClaimedAreas).
-spec solve2(Input :: input_type()) -> result2_type().
solve2(Areas) ->
[{Id, _, _, _, _} | _] = lists:dropwhile(fun(A) -> overlaps(A, Areas) end, Areas),
Id.
%%% Part 1
%% Count the number of square inches which overlap by more than 2.
count_overlaps(Map) ->
count_overlaps0(maps:iterator(Map), 0).
count_overlaps0(It0, N) ->
case maps:next(It0) of
{_, V, It1} when V >= 2 ->
count_overlaps0(It1, N + 1);
{_, _, It1} ->
count_overlaps0(It1, N);
_ ->
N
end.
%% Returns a map from Pos -> NumberOfClaims, one position for each
%% square inch.
count_claims([], Map) ->
Map;
count_claims([{_Id, L, T, W, H} | Areas], Map) ->
NewMap = claim_area(L, T, W, H, Map),
count_claims(Areas, NewMap).
claim_area(L, T, W, H, Map) ->
Coords = [{X, Y} || X <- lists:seq(L, L + W - 1), Y <- lists:seq(T, T + H - 1)],
lists:foldl(fun(K, AccIn) -> maps:update_with(K, fun(V) -> V + 1 end, 1, AccIn) end,
Map,
Coords).
%%% Part 2
%% Does area A overlap any area in Areas?
overlaps(A, Areas) ->
lists:any(fun(A1) -> overlaps0(A, A1) end, Areas).
overlaps0(A, A) ->
false;
overlaps0(A1, A2) ->
{_, L1, T1, W1, H1} = A1,
{_, L2, T2, W2, H2} = A2,
%% Each of these are true if the two areas do not overlap (either
%% side-by-side or over-and-under, or both). If they are all
%% false, then the two areas overlap.
not ((L1 + W1 =< L2) or (L2 + W2 =< L1) or (T1 + H1 =< T2) or (T2 + H2 =< T1)). | src/2018/aoc2018_day03.erl | 0.509032 | 0.57681 | aoc2018_day03.erl | starcoder |
%%
%% Copyright (c) dushin.net
%% All rights reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
-module(sht3x).
%%-----------------------------------------------------------------------------
%% @doc An AtomVM I2C driver for the Sensirion SHT3x series of digital temperature
%% and humidity sensors.
%%
%% The Sensirion SHT3[0|1|5] is a small sensor that can read temperature and humidity.
%% The chipset supports the I2C interfaces, with varying levels of accuracy.
%% This driver uses the AtomVM I2C interface for communicating with the SHT31.
%% This means you can take temperature and humidity readings using two GPIO
%% pins on your ESP32.
%%
%% Developers interact with this driver by starting an instance, specifying pins for
%% the I2C data and clock pins. Starting an instance of the driver yeilds a reference
%% that can be used in subsequent calls.
%%
%% The primary operation in this module is the take_reading/1 function, which takes
%% a reference to a SHT31 driver, and returns a reading expressed as a tuple containing
%% the temperature (in degrees celcius) and relative humidity (as a percentage).
%%
%% Note. The SHT31 sensor is a fairly dynamic sensor and can be used for
%% many different applications (e.g., weather collection, gaming, drones, etc).
%% The primary use-case for this driver is weather collection, which is assumed
%% to be a low frequency operation. Some of the SHT31 applications may require
%% additional support in this driver, which would be relatively straightforward
%% to support in future versions.
%%
%% Further information about the Sensirion SHT3x can be found in the reference
%% documentation:
%% https://www.sensirion.com/fileadmin/user_upload/customers/sensirion/Dokumente/2_Humidity_Sensors/Datasheets/Sensirion_Humidity_Sensors_SHT3x_Datasheet_digital.pdf
%%
%% @end
%%-----------------------------------------------------------------------------
-behaviour(gen_server).
-export([start/1, start/2, start_link/1, start_link/2, stop/1, take_reading/1, soft_reset/1]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
% -define(TRACE_ENABLED, true).
-include_lib("atomvm_lib/include/trace.hrl").
-type repeatability() :: high | medium | low.
-type clock_stretching() :: enabled | disabled.
-type options() :: #{
repeatability => repeatability(),
clock_stretching => clock_stretching()
}.
-type sht() :: pid().
-type fractional() :: 0..99.
-type temp_reading() :: {integer(), fractional()}.
-type humidity_reading() :: {integer(), fractional()}.
-type reading() :: {temp_reading(), humidity_reading()}.
-define(SHT31_BASE_ADDR, 16#44).
-define(DEFAULT_OPTIONS, #{
repeatability => high,
clock_stretching => disabled
}).
-record(state, {
i2c_bus,
options
}).
%%-----------------------------------------------------------------------------
%% @param SDAPin pin number for I2C SDA channel
%% @param SCLPin pin number for the I2C SCL channel
%% @returns {ok, SHT} on success, or {error, Reason}, on failure
%% @equiv start(SDAPin, SCLPin, [])
%% @doc Start the SHT31 driver.
%% @end
%%-----------------------------------------------------------------------------
-spec start(I2CBus::i2c_bus:i2c_bus()) -> {ok, SHT::sht()} | {error, Reason::term()}.
start(I2CBus) ->
start(I2CBus, maps:new()).
%%-----------------------------------------------------------------------------
%% @param SDAPin pin number for I2C SDA channel
%% @param SCLPin pin number for the I2C SCL channel
%% @param Options additional driver options
%% @returns {ok, SHT} on success, or {error, Reason}, on failure
%% @doc Start the SHT31 driver.
%%
%% This operation will start the SHT driver. Use the returned reference
%% in subsequent operations, such as for taking a reading.
%%
%% The Options parameter may be used to fine-tune behavior of the sensor,
%% but the default values should be sufficient for weather-station based
%% scenarios.
%%
%% @end
%%-----------------------------------------------------------------------------
-spec start(I2CBus::i2c_bus:i2c_bus(), Options::options()) -> {ok, SHT::sht()} | {error, Reason::term()}.
start(I2CBus, Options) ->
gen_server:start(?MODULE, {I2CBus, maps:merge(?DEFAULT_OPTIONS, Options)}, []).
%%-----------------------------------------------------------------------------
%% @param SDAPin pin number for I2C SDA channel
%% @param SCLPin pin number for the I2C SCL channel
%% @returns {ok, SHT} on success, or {error, Reason}, on failure
%% @equiv start(SDAPin, SCLPin, [])
%% @doc Start the SHT31 driver.
%% @end
%%-----------------------------------------------------------------------------
-spec start_link(I2CBus::i2c_bus:i2c_bus()) -> {ok, SHT::sht()} | {error, Reason::term()}.
start_link(I2CBus) ->
start_link(I2CBus, maps:new()).
%%-----------------------------------------------------------------------------
%% @param SDAPin pin number for I2C SDA channel
%% @param SCLPin pin number for the I2C SCL channel
%% @param Options additional driver options
%% @returns {ok, SHT} on success, or {error, Reason}, on failure
%% @doc Start the SHT31 driver.
%%
%% This operation will start the SHT driver. Use the returned reference
%% in subsequent operations, such as for taking a reading.
%%
%% The Options parameter may be used to fine-tune behavior of the sensor,
%% but the default values should be sufficient for weather-station based
%% scenarios.
%%
%% @end
%%-----------------------------------------------------------------------------
-spec start_link(I2CBus::i2c_bus:i2c_bus(), Options::options()) -> {ok, SHT::sht()} | {error, Reason::term()}.
start_link(I2CBus, Options) ->
gen_server:start_link(?MODULE, {I2CBus, maps:merge(?DEFAULT_OPTIONS, Options)}, []).
%%-----------------------------------------------------------------------------
%% @param SHT a reference to the SHT instance created via start
%% @returns ok if successful; {error, Reason}, otherwise
%% @doc Stop the SHT31 driver.
%%
%% Note. This function is not well tested and its use may result in a memory leak.
%% @end
%%-----------------------------------------------------------------------------
-spec stop(SHT::sht()) -> ok | {error, Reason::term()}.
stop(SHT) ->
gen_server:stop(SHT).
%%-----------------------------------------------------------------------------
%% @param SHT a reference to the SHT instance created via start
%% @returns {ok, Reading} if successful; {error, Reason}, otherwise
%% @doc Take a reading from the sensor.
%%
%% This function will take a reading from the attached SHT31 sensor.
%%
%% The return value is a 2-ary tuple containing the temperature
%% and humidty readings from the sensor. Each element of the tuple is a
%% pair, containing the value in integral and fractional parts.
%%
%% Temperature is expressed in degrees celsius,
%% and humidity is expressed as relative humidity.
%% @end
%%-----------------------------------------------------------------------------
-spec take_reading(SHT::sht()) -> {ok, Reading::reading()} | {error, Reason::term()}.
take_reading(SHT) ->
gen_server:call(SHT, take_reading).
%%-----------------------------------------------------------------------------
%% @param SHT a reference to the SHT instance created via start
%% @returns ok
%% @doc Perform a soft reset of the SHT31 sensor.
%%
%% A soft reset will set all of the registers in the device
%% to values in section 5.3 of the reference documentation.
%% @end
%%-----------------------------------------------------------------------------
-spec soft_reset(SHT::sht()) -> ok.
soft_reset(SHT) ->
gen_server:call(SHT, soft_reset).
%%
%% gen_server API
%%
%% @hidden
init({I2CBus, Options}) ->
?TRACE("Initializing sht3x instance ~p with I2CBus ~p and Options ~p", [self(), I2CBus, Options]),
{ok, #state{
i2c_bus = I2CBus,
options = Options
}}.
%% @hidden
handle_call(take_reading, _From, State) ->
?TRACE("Taking reading ...", []),
Reply = do_take_reading(State),
{reply, Reply, State};
handle_call(soft_reset, _From, State) ->
%% TODO fix
%% write_byte(State#state.i2c_bus, ?SHT31_REGISTER_SOFT_RESET, 16#01)
{reply, {error, unimplemented}, State};
handle_call(Request, _From, State) ->
{reply, {error, {unknown_request, Request}}, State}.
%% @hidden
handle_cast(_Msg, State) ->
{noreply, State}.
%% @hidden
handle_info(_Info, State) ->
{noreply, State}.
%% @hidden
terminate(_Reason, _State) ->
ok.
%% @hidden
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%
%% Internal functions
%%
%% @private
do_take_reading(State) ->
#state{
i2c_bus = I2CBus
} = State,
%%
%% Tell the sensor to take a temp and humidity reading
%%
{MSB, LSB} = create_measurement_command(State#state.options),
?TRACE("measurement_command: ~p", [{MSB, LSB}]),
ok = i2c_bus:enqueue(
I2CBus, ?SHT31_BASE_ADDR, [
fun(Port, _Address) ->
?TRACE("writing ~p", [MSB]),
ok = i2c:write_byte(Port, MSB)
end,
fun(Port, _Address) ->
?TRACE("writing ~p", [LSB]),
ok = i2c:write_byte(Port, LSB)
end
]
),
timer:sleep(20),
%%
%% Read the data in memory.
%%
case read_bytes(I2CBus, 6) of
error ->
?TRACE("Bad reading!", []),
{error, bad_reading};
Bytes ->
{ok, to_reading(Bytes)}
end.
to_reading(Bytes) ->
?TRACE("to_reading: ~p", [Bytes]),
<<
TempReading:16, _TempChecksum:8,
HumidityReading:16, _Humidityksum:8
>> = Bytes,
?TRACE("TempReading: ~p", [TempReading]),
?TRACE("HumidityReading: ~p", [HumidityReading]),
%% TODO compute/varify checksum
%%
%% Normalize into {integer, fractional} values.
%%
Reading = {
compute_temp(TempReading),
compute_humidity(HumidityReading)
},
?TRACE("Reading: ~p", [Reading]),
Reading.
%% @private
create_measurement_command(Options) ->
#{
repeatability := Repeatability,
clock_stretching := ClockStretching
} = Options,
{get_msb(ClockStretching), get_lsb(ClockStretching, Repeatability)}.
%% @private
get_msb(enabled) ->
16#2C;
get_msb(disabled) ->
16#24.
%% @private
get_lsb(enabled, high) ->
16#06;
get_lsb(enabled, medium) ->
16#0D;
get_lsb(enabled, low) ->
16#10;
get_lsb(disabled, high) ->
16#0;
get_lsb(disabled, medium) ->
16#0B;
get_lsb(disabled, low) ->
16#16.
%% @private
read_bytes(I2CBus, Len) ->
?TRACE("Reading bytes off I2CBus ~p Len ~p ...", [I2CBus, Len]),
i2c_bus:read_bytes(I2CBus, ?SHT31_BASE_ADDR, Len).
%% @private
compute_temp(TempReading) ->
rational:to_decimal(
rational:add(-45, rational:multiply(175, rational:divide(TempReading, 65535))),
3
).
%% @private
compute_humidity(HumidityReading) ->
rational:to_decimal(
rational:multiply(100, rational:divide(HumidityReading, 65535)),
2
). | src/sht3x.erl | 0.73307 | 0.411939 | sht3x.erl | starcoder |
-module(spiral_memory).
-export([manhattan_distance/1, find_point/1, shell/1, test/0, stress_test/1]).
origin() ->
{point, 0, 0}.
add_point({point, X1, Y1}, {point, X2, Y2}) ->
{point, X1 + X2, Y1 + Y2}.
% Defaults to distance from origin
manhattan_distance({point, _, _} = Point) ->
manhattan_distance(Point, origin()).
manhattan_distance({point, X1, Y1}, {point, X2, Y2}) ->
abs(X1 - X2) + abs(Y1 - Y2).
shell(X) ->
shell(X, 1, 0).
shell(X, Base, Acc) ->
case X =< math:pow(Base, 2) of
true -> Acc;
false -> shell(X, Base + 2, Acc + 1)
end.
find_point(X) when X > 0 ->
find_point(X, 1, origin(), origin()).
find_point(X, X, {point, _, _} = Point, _) ->
Point;
find_point(X, CurrentIndex, {point, _, _} = Point, {point, _, _} = State) ->
{NextPoint, NextState} = next_point(Point, State),
find_point(X, CurrentIndex + 1, NextPoint, NextState).
% Initial position
next_point({point, 0, 0}, _) ->
{{point, 1, 0}, {point, 0, 1}};
% Upper Right Corner
next_point({point, X, X}, _) when X > 0 ->
{{point, X - 1, X}, {point, -1, 0}};
% Lower Left Corner
next_point({point, X, X}, _) ->
{{point, X + 1, X}, {point, 1, 0}};
% Lower Right Corner
next_point({point, X, Y}, _) when X == abs(Y) ->
{{point, X + 1, Y}, {point, 0, 1}};
% Upper Left Corner
next_point({point, X, Y}, _) when Y == abs(X) ->
{{point, X, Y - 1}, {point, 0, -1}};
next_point({point, _, _} = P, {point, _, _} = DP) ->
{add_point(P, DP), DP}.
test() ->
test_find_point(1, {point, 0, 0}),
test_find_point(2, {point, 1, 0}),
test_find_point(3, {point, 1, 1}),
test_find_point(4, {point, 0, 1}),
test_find_point(5, {point, -1, 1}),
test_find_point(6, {point, -1, 0}),
test_find_point(7, {point, -1, -1}),
test_find_point(8, {point, 0, -1}),
test_find_point(9, {point, 1, -1}),
test_find_point(10, {point, 2, -1}),
test_find_point(12, {point, 2, 1}),
test_find_point(23, {point, 0, -2}).
test_find_point(Input, Expected) ->
Result = find_point(Input),
io:format("Input: ~w | Expected: ~w | Got: ~w | Result ~w~n", [Input, Expected, Result, Result =:= Expected]).
% Part 2
sum_surrounding({point, X, Y}, Array) ->
get_value(X + 1, Y, Array) +
get_value(X + 1, Y + 1, Array) +
get_value(X, Y + 1, Array) +
get_value(X - 1, Y + 1, Array) +
get_value(X - 1, Y, Array) +
get_value(X - 1, Y - 1, Array) +
get_value(X, Y - 1, Array) +
get_value(X + 1, Y - 1, Array).
get_value({point, X, Y}, Array) ->
get_value(X, Y, Array).
get_value(X, Y, Array) ->
array:get(X * 1000 + Y, Array).
set_value({point, X, Y}, Value, Array) ->
set_value(X, Y, Value, Array).
set_value(X, Y, Value, Array) ->
array:set(X * 1000 + Y, Value, Array).
stress_test(Target) ->
stress_test(Target, array:new({default, 0}), origin(), none, {point, 500, 500}).
stress_test(Target, Array, {point, _, _} = Point, State, Center) ->
{NextPoint, NextState} = next_point(Point, State),
CorrectedPoint = add_point(Point, Center),
NextValue = case Point of
{point, 0, 0} -> 1;
_ -> sum_surrounding(CorrectedPoint, Array)
end,
case Target < NextValue of
true -> {NextValue, Point};
false -> NewArr = set_value(CorrectedPoint, NextValue, Array),
stress_test(Target, NewArr, NextPoint, NextState, Center)
end. | 2017/03/spiral_memory.erl | 0.512449 | 0.706225 | spiral_memory.erl | starcoder |
%%%-------------------------------------------------------------------
%%% @author wang <<EMAIL>>
%%% @copyright (C) 2015
%%% @doc
%%%
%%% @end
%%% Created : 2015-03-23 14:46
%%%-------------------------------------------------------------------
-module(arrow).
-author("wang").
%% API
-export([now/0,
timestamp/0,
timestamp/1,
format/1,
get/0,
get/1,
diff/2,
compare/2,
in/2,
add_years/2,
add_months/2,
add_days/2,
add_hours/2,
add_minutes/2,
add_seconds/2]).
-type arrow_datetime() :: integer() | nonempty_string() | binary() | calendar:datetime().
-type arrow_range() :: {arrow_datetime(), arrow_datetime()}.
-type arrow_compare() :: -1 | 0 | 1.
-export_type([arrow_datetime/0, arrow_range/0, arrow_compare/0]).
-define(BASE_SECONDS, 62167219200).
%% @doc
%% Get Now with format `{MegaSecs, Secs, MicroSecs}'
%% @end
-spec now() -> Return when
Return :: erlang:timestamp().
now() ->
os:timestamp().
%% @doc
%% Get Now timestamp
%% @end
-spec timestamp() -> UnixTimestamp when
UnixTimestamp :: integer().
timestamp() ->
unix_timestamp(arrow:now()).
%% @doc
%% Get timestamp of given time
%% @end
-spec timestamp(Input) -> UnixTimestamp when
Input :: arrow_datetime(),
UnixTimestamp :: integer().
timestamp(Datetime) ->
unix_timestamp(arrow:get(Datetime)).
%% @doc
%% Format given time to format `YYYY-MM-DD HH:mm:ss'
%% @end
-spec format(Input) -> DateString when
Input :: arrow_datetime(),
DateString :: nonempty_string().
format(Input) ->
{{Year, Month, Day}, {Hour, Minute, Second}} = arrow:get(Input),
Text = io_lib:fwrite("~4..0b-~2..0b-~2..0b ~2..0b:~2..0b:~2..0b",
[Year, Month, Day, Hour, Minute, Second]
),
lists:flatten(Text).
%% @doc
%% Get Current time as format `{{Year, Month, Day}, {Hour, Minute, Second}}'
%% @end
-spec get() -> Datetime when
Datetime :: calendar:datetime().
get() ->
calendar:universal_time().
%% @doc
%% Parse given time to format `{{Year, Month, Day}, {Hour, Minute, Second}}'
%% @end
-spec get(Input) -> Datetime when
Input :: arrow_datetime(),
Datetime :: calendar:datetime().
get(UnixTimestamp) when is_integer(UnixTimestamp) ->
unix_timestamp_to_datetime(UnixTimestamp);
get(DateString) when is_list(DateString) andalso length(DateString) == 19 ->
{ok, [Year, Month, Day, Hour, Minute, Second], []} = io_lib:fread("~d-~d-~d ~d:~d:~d", DateString),
{{Year, Month, Day}, {Hour, Minute, Second}};
get(DateBinary) when is_binary(DateBinary) ->
arrow:get(binary_to_list(DateBinary));
get({{_, _, _}, {_, _, _}} = Datetime) ->
Datetime.
%% @doc
%% Diff seconds of D1 and D2. `D1 - D2'
%% @end
-spec diff(D1, D2) -> DiffSeconds when
D1 :: arrow_datetime(),
D2 :: arrow_datetime(),
DiffSeconds :: integer().
diff(D1, D2) ->
D1Seconds = timestamp(D1),
D2Seconds = timestamp(D2),
D1Seconds - D2Seconds.
%% @doc
%% Compare D1 and D2.
%% @end
-spec compare(D1, D2) -> CompareResult when
D1 :: arrow_datetime(),
D2 :: arrow_datetime(),
CompareResult :: arrow_compare().
compare(D1, D2) ->
case diff(D1, D2) of
N when N == 0 -> 0;
N when N > 0 -> 1;
N when N < 0 -> -1
end.
%% @doc
%% Check whether D2 or D2Range is in D1Range.
%% @end
-spec in(D1Range, D2) -> Result when
D1Range :: arrow_range(),
D2 :: arrow_datetime(),
Result :: boolean().
in({D1Start, D1End}, D2) ->
CompareWithStart = compare(D2, D1Start),
ComareWithEnd = compare(D2, D1End),
CompareWithStart >= 0 andalso ComareWithEnd =< 0.
%% @doc
%% Add Years to Input Datetime
%% @end
-spec add_years(Input, Years) -> Datetime when
Input :: arrow_datetime(),
Years :: integer(),
Datetime :: calendar:datetime().
add_years(Datetime, Years) ->
{{Year, Month, Day}, Time} = arrow:get(Datetime),
{{Year + Years, Month, Day}, Time}.
%% @doc
%% Add Months to Input Datetime
%% @end
-spec add_months(Input, Months) -> Datetime when
Input :: arrow_datetime(),
Months :: integer(),
Datetime :: calendar:datetime().
add_months(Datetime, Months) ->
{{Year, Month, _Day}, _Time} = arrow:get(Datetime),
AddDay = do_add_months(Months, 0, {Year, Month, 0}),
add_days(Datetime, AddDay).
%% @doc
%% Add Days to Input Datetime
%% @end
-spec add_days(Input, Days) -> Datetime when
Input :: arrow_datetime(),
Days :: integer(),
Datetime :: calendar:datetime().
add_days(Datetime, Days) ->
{Date, Time} = arrow:get(Datetime),
TotalDays = calendar:date_to_gregorian_days(Date) + Days,
NewDate = calendar:gregorian_days_to_date(TotalDays),
{NewDate, Time}.
%% @doc
%% Add Hours to Input Datetime
%% @end
-spec add_hours(Input, Hours) -> Datetime when
Input :: arrow_datetime(),
Hours :: integer(),
Datetime :: calendar:datetime().
add_hours(Datetime, Hours) ->
add_seconds(Datetime, Hours*3600).
%% @doc
%% Add Minutes to Input Datetime
%% @end
-spec add_minutes(Input, Minutes) -> Datetime when
Input :: arrow_datetime(),
Minutes :: integer(),
Datetime :: calendar:datetime().
add_minutes(Datetime, Minutes) ->
add_seconds(Datetime, Minutes*60).
%% @doc
%% Add Seconds to Input Datetime
%% @end
-spec add_seconds(Input, Seconds) -> Datetime when
Input :: arrow_datetime(),
Seconds :: integer(),
Datetime :: calendar:datetime().
add_seconds(Datetime, Seconds) ->
DatetimeNormalized = arrow:get(Datetime),
TotalSeconds = calendar:datetime_to_gregorian_seconds(DatetimeNormalized) + Seconds,
calendar:gregorian_seconds_to_datetime(TotalSeconds).
%%% ====================================================
%%% Interal Functions
%%% ====================================================
-spec unix_timestamp(Input) -> UnixTimestamp when
Input :: erlang:timestamp() | calendar:datetime(),
UnixTimestamp :: integer().
unix_timestamp({MegaSecs, Secs, _MicroSecs}) ->
MegaSecs * 1000000 + Secs;
unix_timestamp(DateTime) ->
calendar:datetime_to_gregorian_seconds(DateTime) - ?BASE_SECONDS.
-spec unix_timestamp_to_datetime(UnixTimestamp) -> Datetime when
UnixTimestamp :: integer(),
Datetime :: calendar:datetime().
unix_timestamp_to_datetime(Timestamp) ->
calendar:gregorian_seconds_to_datetime(Timestamp + ?BASE_SECONDS).
-spec do_add_months(AddMonth, Acc, {Year, Month, AddDays}) -> AddDaysResult when
AddMonth :: integer(),
Acc :: integer(),
Year :: integer(),
Month :: 1..12,
AddDays :: integer(),
AddDaysResult :: integer().
do_add_months(AddMonth, Acc, {_, _, AddDays}) when AddMonth == Acc ->
AddDays;
do_add_months(AddMonth, Acc, {Year, Month, AddDays}) when AddMonth < Acc ->
{NewYear, NewMonth} =
case Month - 1 < 1 of
true ->
{Year-1, 12};
false ->
{Year, Month-1}
end,
do_add_months(AddMonth, Acc-1, {NewYear, NewMonth, AddDays-calendar:last_day_of_the_month(Year, Month)});
do_add_months(AddMonth, Acc, {Year, Month, AddDays}) when AddMonth > Acc ->
{NewYear, NewMonth} =
case Month + 1 > 12 of
true ->
{Year+1, 1};
false ->
{Year, Month+1}
end,
do_add_months(AddMonth, Acc+1, {NewYear, NewMonth, AddDays+calendar:last_day_of_the_month(Year, Month)}). | src/arrow.erl | 0.542863 | 0.402627 | arrow.erl | starcoder |
%%%-------------------------------------------------------------------
%%% @doc
%%% This module provides the public API for eflambe. These public functions are
%%% intended to be invoked by the end user to perform profiling of their
%%% application.
%%% @end
%%%-------------------------------------------------------------------
-module(eflambe).
%% Application callbacks
-export([capture/1, capture/2, capture/3,
apply/1, apply/2]).
-type mfa_fun() :: {atom(), atom(), list()} | fun().
-type program() :: hotspot | speedscope.
-type option() :: {output_directory, binary()} | {output_format, binary()} | {open, program()}.
-type options() :: [option()].
-define(FLAGS, [call, return_to, running, procs, garbage_collection, arity,
timestamp, set_on_spawn]).
%%--------------------------------------------------------------------
%% @doc
%% Starts capturing of function call data for any invocation of the specified
%% MFA and of a flamegraph for
%% the current process.
%%
%% @end
%%--------------------------------------------------------------------
-spec capture(MFA :: mfa()) -> ok.
capture(MFA) ->
capture(MFA, 1).
-spec capture(MFA :: mfa(), NumCalls :: integer()) -> ok.
capture(MFA, NumCalls) ->
capture(MFA, NumCalls, []).
-spec capture(MFA :: mfa(), NumCalls :: integer(), Options :: options()) -> ok.
capture({Module, Function, Arity}, NumCalls, Options) ->
ok = meck:new(Module, [unstick, passthrough]),
TraceId = setup_for_trace(),
ShimmedFunction = fun(Args) ->
Trace = start_trace(TraceId, NumCalls, [{meck, Module}|Options]),
% Invoke the original function
Results = meck:passthrough(Args),
stop_trace(Trace),
Results
end,
MockFun = mock_fun(Arity, ShimmedFunction),
% Replace the original function with our new function that wraps the old
% function in profiling code.
meck:expect(Module, Function, MockFun).
%%--------------------------------------------------------------------
%% @doc
%% Traces the execution of the function passed in for generation of a for a
%% flamegraph of the function call.
%%
%% @end
%%--------------------------------------------------------------------
-spec apply(Function :: mfa_fun()) -> any().
apply(Function) ->
?MODULE:apply(Function, []).
-spec apply(Function :: mfa_fun(), Options :: options()) -> any().
apply({Module, Function, Args}, Options) ->
TraceId = setup_for_trace(),
Trace = start_trace(TraceId, 1, Options),
% Invoke the original function
Results = erlang:apply(Module, Function, Args),
stop_trace(Trace),
Results;
apply({Function, Args}, Options) ->
TraceId = setup_for_trace(),
Trace = start_trace(TraceId, 1, Options),
% Invoke the original function
Results = erlang:apply(Function, Args),
stop_trace(Trace),
Results.
%%%===================================================================
%%% Internal functions
%%%===================================================================
-spec start_trace(TraceId :: any(), NumCalls :: integer(), Options :: list()) -> reference().
start_trace(TraceId, NumCalls, Options) ->
case eflambe_server:start_trace(TraceId, NumCalls, Options) of
{ok, TraceId, true, Tracer} ->
MatchSpec = [{'_', [], [{message, {{cp, {caller}}}}]}],
erlang:trace_pattern(on_load, MatchSpec, [local]),
erlang:trace_pattern({'_', '_', '_'}, MatchSpec, [local]),
erlang:trace(self(), true, [{tracer, Tracer} | ?FLAGS]);
{ok, TraceId, false, _Tracer} ->
% Trace is already running or has already finished. Or this could
% be a recursive function call. We do not need to do anything.
ok
end,
TraceId.
-spec stop_trace(any()) -> ok.
stop_trace(Trace) ->
erlang:trace(self(), false, [all]),
{ok, _} = eflambe_server:stop_trace(Trace),
ok.
setup_for_trace() ->
application:ensure_all_started(eflambe),
eflambe_sup:get_or_start_server(),
% All traces must have a unique ref so we can keep track of them
make_ref().
% Total hack
% TODO: Is there a way to programmatically generate a function of a given arity?
% I asked here:
% https://stackoverflow.com/questions/69244814/erlang-generate-anonymous-function-of-an-arbitary-arity
mock_fun(1, Function) ->
fun(A) -> Function([A]) end;
mock_fun(2, Function) ->
fun(A, B) -> Function([A, B]) end;
mock_fun(3, Function) ->
fun(A, B, C) -> Function([A, B, C]) end;
mock_fun(4, Function) ->
fun(A, B, C, D) -> Function([A, B, C, D]) end;
mock_fun(5, Function) ->
fun(A, B, C, D, E) -> Function([A, B, C, D, E]) end;
mock_fun(6, Function) ->
fun(A, B, C, D, E, F) -> Function([A, B, C, D, E, F]) end;
mock_fun(7, Function) ->
fun(A, B, C, D, E, F, G) -> Function([A, B, C, D, E, F, G]) end;
mock_fun(8, Function) ->
fun(A, B, C, D, E, F, G, H) -> Function([A, B, C, D, E, F, G, H]) end;
mock_fun(9, Function) ->
fun(A, B, C, D, E, F, G, H, I) -> Function([A, B, C, D, E, F, G, H, I]) end;
mock_fun(10, Function) ->
fun(A, B, C, D, E, F, G, H, I, J) -> Function([A, B, C, D, E, F, G, H, I, J]) end. | src/eflambe.erl | 0.5794 | 0.538559 | eflambe.erl | starcoder |
%
% Exercise 3-8: Evaluating and Compiling Expressions
%
% Strings are fully parenthesized and can include the following binary operators: +, -, *.
% Additionally, the unary ~ (negative) operator is supported.
%
% Example input strings:
% (4+3)
% ((5-3)*2)
% ~23
% (~25+3)
-module(expression_parser).
-export([parse/1, evaluate/1, pretty_print/1, compile/1, simulate/1, simplify/1, optimize/1]).
%
% parse converts Txt to an Abstract Syntax Tree
%
parse(Txt) ->
[Ast, []] = expression(Txt),
Ast.
% The parse helper methods adhere to the following method signature: [ Ast, UnparsedText ] = f(Text).
% That is, f() takes input text and returns a list where the first element is an
% abstract syntax tree and the second is the unparsed text. If a method fails, the empty
% list is returned.
expression(Txt) ->
[FirstTerm, Txt2] = term(Txt),
case operator(Txt2) of
[Operator, Txt3] ->
[SecondTerm, Txt4] = term(Txt3),
[{Operator, FirstTerm, SecondTerm}, Txt4];
[] ->
[FirstTerm, Txt2]
end.
operator([$+ | Txt]) -> [plus, Txt];
operator([$- | Txt]) -> [minus, Txt];
operator([$* | Txt]) -> [times, Txt];
operator([$/ | Txt]) -> [divide, Txt];
operator(_) -> [].
term(Txt) ->
case digit(Txt) of
[Number, Txt2] ->
[{num, list_to_integer(Number)}, Txt2];
[] ->
case Txt of
[$~ | Txt2] ->
[Expr, Txt3] = expression(Txt2),
[{negative, Expr}, Txt3];
[$( | Txt2] ->
[Expr, Txt3] = expression(Txt2),
[$) | Txt4] = Txt3,
[Expr, Txt4];
[] -> []
end
end.
digit([Digit|Txt]) when Digit >= $0, Digit =< $9 ->
case digit(Txt) of
[DigitRest, UnparsedText] ->
[[Digit | DigitRest], UnparsedText];
[] ->
[[Digit | ""], Txt]
end;
digit(_) -> [].
%
% evaluate: evaluates an abstract syntax tree
%
evaluate(Ast) ->
case Ast of
{negative, SubAst} -> -evaluate(SubAst);
{plus, LeftAst, RightAst} -> evaluate(LeftAst) + evaluate(RightAst);
{minus, LeftAst, RightAst} -> evaluate(LeftAst) - evaluate(RightAst);
{times, LeftAst, RightAst} -> evaluate(LeftAst) * evaluate(RightAst);
{divide, LeftAst, RightAst} -> evaluate(LeftAst) / evaluate(RightAst);
{num, Number} -> Number
end.
%
% pretty_print: turn an abstract syntax tree into a string
%
pretty_print(Ast) ->
case Ast of
{negative, SubAst} -> [$~ | pretty_print(SubAst)];
{plus, LeftAst, RightAst} -> lists:concat(["(", pretty_print(LeftAst), "+", pretty_print(RightAst), ")"]);
{minus, LeftAst, RightAst} -> lists:concat(["(", pretty_print(LeftAst), "-", pretty_print(RightAst), ")"]);
{times, LeftAst, RightAst} -> lists:concat(["(", pretty_print(LeftAst), "*", pretty_print(RightAst), ")"]);
{divide, LeftAst, RightAst} -> lists:concat(["(", pretty_print(LeftAst), "/", pretty_print(RightAst), ")"]);
{num, Number} -> integer_to_list(Number)
end.
%
% compile: transform an abstract syntax tree into a code sequence for a stack machine to evaluate
%
compile(Ast) ->
case Ast of
{negative, SubAst} -> lists:concat([compile(SubAst), [negate]]);
{plus, LeftAst, RightAst} -> lists:concat([compile(RightAst), compile(LeftAst), [add]]);
{minus, LeftAst, RightAst} -> lists:concat([compile(RightAst), compile(LeftAst), [subtract]]);
{times, LeftAst, RightAst} -> lists:concat([compile(RightAst), compile(LeftAst), [multiply]]);
{divide, LeftAst, RightAst} -> lists:concat([compile(RightAst), compile(LeftAst), [divide]]);
{num, Number} -> [{push, Number}]
end.
%
% simulate: execute the code sequence generated by compile. Supports the following codes:
% push Value - push Value on the stack
% add - pops two values, adds them, pushes result
% subtract - pops two values, subtracts them, pushes result
% multiply - pops two values, multiplies them, pushes result
% divide - pops two values, divides them, pushes result
% negate - pops one value, negates it, pushes result
%
simulate(CodeSequence) -> process_code_sequence(CodeSequence, []).
process_code_sequence([Code|CodeSequence], Stack) ->
case Code of
{ push, Value } -> process_code_sequence(CodeSequence, [Value | Stack]);
negate ->
[Value | Stack2] = Stack,
Stack3 = [-Value | Stack2],
process_code_sequence(CodeSequence, Stack3);
add ->
process_code_sequence(CodeSequence, process_binary_code(fun(L,R) -> L + R end, Stack));
subtract ->
process_code_sequence(CodeSequence, process_binary_code(fun(L,R) -> L - R end, Stack));
multiply ->
process_code_sequence(CodeSequence, process_binary_code(fun(L,R) -> L * R end, Stack));
divide ->
process_code_sequence(CodeSequence, process_binary_code(fun(L,R) -> L / R end, Stack))
end;
process_code_sequence([], Stack) -> Stack.
% process a binary operation and return the modified stack
process_binary_code(Operation, Stack) ->
[LeftValue | Stack2] = Stack,
[RightValue | Stack3] = Stack2,
[Operation(LeftValue, RightValue) | Stack3].
%
% simplify: simplify an abstract syntax tree so that 0*e is transformed to 0, 1*e to e, and so on.
%
simplify(Ast) -> pretty_print(optimize(Ast)).
optimize(Ast) ->
if
tuple_size(Ast) == 3 ->
binary_optimizer(Ast);
true -> Ast
end.
binary_optimizer({plus, LeftTerm, RightTerm}) ->
LeftTermResult = evaluate(LeftTerm),
RightTermResult = evaluate(RightTerm),
if
LeftTermResult == 0 -> optimize(RightTerm);
RightTermResult == 0 -> optimize(LeftTerm);
true -> {plus, optimize(LeftTerm), optimize(RightTerm)}
end;
binary_optimizer({times, LeftTerm, RightTerm}) ->
LeftTermResult = evaluate(LeftTerm),
RightTermResult = evaluate(RightTerm),
if
LeftTermResult == 0 -> {num, 0};
RightTermResult == 0 -> {num, 0};
LeftTermResult == 1 -> optimize(RightTerm);
RightTermResult == 1 -> optimize(LeftTerm);
true -> {times, optimize(LeftTerm), optimize(RightTerm)}
end;
binary_optimizer(Ast) ->
{Operator, Left, Right} = Ast,
{Operator, optimize(Left), optimize(Right)}. | Erlang/ErlangProgrammingBook/expression_parser.erl | 0.563618 | 0.651355 | expression_parser.erl | starcoder |
%%----------------------------------------------------------------
%% Copyright (c) 2013-2016 Klarna AB
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%----------------------------------------------------------------
-module(mnesia_leveled_fallback).
-export([run/0]).
-define(m(A,B), fun() -> L = ?LINE,
case {A,B} of
{__X, __X} ->
B;
Other ->
error({badmatch, [Other,
{line, L}]})
end
end()).
run() ->
cleanup(),
mnesia_leveled_tlib:start_mnesia(reset),
mnesia_leveled_tlib:create_table(led),
ok = mnesia:backup("bup0.BUP"),
[mnesia:dirty_write({t,K,V}) || {K,V} <- [{a,1},
{b,2},
{c,3}]],
ok = mnesia:backup("bup1.BUP"),
[mnesia:dirty_write({t,K,V}) || {K,V} <- [{d,4},
{e,5},
{f,6}]],
ok = mnesia:backup("bup2.BUP"),
ct:log("*****************************************~n", []),
load_backup("bup0.BUP"),
?m([], mnesia:dirty_match_object(t, {t,'_','_'})),
?m([], mnesia:dirty_index_read(t,2,v)),
ct:log("*****************************************~n", []),
load_backup("bup1.BUP"),
?m([{t,a,1},{t,b,2},{t,c,3}], mnesia:dirty_match_object(t, {t,'_','_'})),
?m([{t,b,2}], mnesia:dirty_index_read(t,2,v)),
ct:log("*****************************************~n", []),
load_backup("bup2.BUP"),
?m([{t,a,1},{t,b,2},{t,c,3},
{t,d,4},{t,e,5},{t,f,6}], mnesia:dirty_match_object(t, {t,'_','_'})),
?m([{t,b,2}], mnesia:dirty_index_read(t,2,v)),
?m([{t,e,5}], mnesia:dirty_index_read(t,5,v)),
ok.
load_backup(BUP) ->
mnesia_leveled_tlib:trace(
fun() ->
ct:log("loading backup ~s~n", [BUP]),
ok = mnesia:install_fallback(BUP),
ct:log("stopping~n", []),
mnesia:stop(),
timer:sleep(3000),
ct:log("starting~n", []),
mnesia:start(),
WaitRes = mnesia:wait_for_tables([t], 5000),
ct:log("WaitRes = ~p~n", [WaitRes])
end,
mods(0)
).
cleanup() ->
os:cmd("rm *.BUP").
mods(0) ->
[];
mods(1) ->
[
{l, mnesia_leveled},
{g, leveled}
];
mods(2) ->
[
%% {l, mnesia_monitor},
{g, mnesia_leveled},
{l, mnesia_bup},
{g, mnesia_lib},
{g, mnesia_schema},
%% {g, mnesia_loader},
{g, mnesia_index},
{l, mnesia_tm}
]. | test/mnesia_leveled_fallback.erl | 0.509032 | 0.438785 | mnesia_leveled_fallback.erl | starcoder |
%% -------------------------------------------------------------------
%% Copyright (c) 2016 <NAME>, Inc. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%%
%% @doc <p>ETS tables, when created with the `ets:new/2' function, have a single
%% owning process. If the owning process exits, then any ETS tables associated
%% with that process are deleted. The purpose of the `riak_core_table_owner'
%% module (which is a gen_server process) is to serve as the owning process for
%% ETS tables that do not otherwise have an obvious owning process. For example,
%% the `riak_core_throttle' module uses an ETS table for maintaining its state,
%% but it is not itself a process and therefore the owning process for it ETS
%% table is not clear. In this case, `riak_core_table_owner' can be used to
%% create and own the ETS table on its behalf.</p>
%%
%% <p>It is important that this process never crashes, as that would lead to
%% loss of data. Therefore, a defensive approach is taken and any calls to
%% external modules are protected with the try/catch mechanism.</p>
%%
%% <p>Note that this first iteration does not provide any API functions for
%% reading or writing data in ETS tables and therefore is appropriate only for
%% named <em>public</em> ETS tables. In order to be more broadly useful, future
%% enhancements to this module should include API functions for efficiently
%% reading and writing ETS data, preferably without going through a gen_server
%% call.</p>
-module(riak_core_table_owner).
-behaviour(gen_server).
%% API
-export([start_link/0,
create_table/2,
maybe_create_table/2]).
%% gen_server callbacks
-export([init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3]).
%% Unfortunately the `ets' module does not define a type for options, but we
%% can at least insist on a list.
-type ets_options() :: list().
-type ets_table() :: ets:tab().
-type create_table_result() :: {ok, ets_table()} | {error, Reason::term()}.
%%%===================================================================
%%% API
%%%===================================================================
-spec start_link() -> {ok, pid()} | ignore | {error, Reason::term()}.
start_link() ->
gen_server:start_link({local, ?MODULE}, ?MODULE, dict:new(), []).
%% Creates a new ETS table with the given `Name' and `Options'.
%% Since the table will be owned by the `riak_core_table_owner' process, it
%% should be created with the `public' option so that other processes can read
%% and write data in the table.
-spec create_table(Name::atom(), Options::ets_options()) -> create_table_result().
create_table(Name, Options) ->
gen_server:call(?MODULE, {create_table, Name, Options}).
%% Creates a new ETS table with the given `Name' and `Options', if and only if
%% it was not already created previously.
%% Since the table will be owned by the `riak_core_table_owner' process, it
%% should be created with the `public' option so that other processes can read
%% and write data in the table.
-spec maybe_create_table(Name::atom(), Options::ets_options()) -> create_table_result().
maybe_create_table(Name, Options) ->
gen_server:call(?MODULE, {maybe_create_table, Name, Options}).
%%%===================================================================
%%% gen_server callbacks
%%%===================================================================
init(State) ->
{ok, State}.
handle_call({create_table, Name, Options}, _From, State) ->
do_create_table(Name, Options, State);
handle_call({maybe_create_table, Name, Options}, _From, State) ->
case dict:find(Name, State) of
{ok, Table} ->
{reply, {ok, Table}, State};
error ->
do_create_table(Name, Options, State)
end.
handle_cast(_Msg, State) ->
{noreply, State}.
handle_info(_Info, State) ->
{noreply, State}.
terminate(_Reason, _State) ->
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%%===================================================================
%%% Internal functions
%%%===================================================================
do_create_table(Name, Options, State) ->
try
Table = ets:new(Name, Options),
{reply, {ok, Table}, dict:store(Name, Table, State)}
catch
Error ->
{reply, {error, Error}, State}
end. | src/riak_core_table_owner.erl | 0.624637 | 0.461805 | riak_core_table_owner.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% Copyright (c) 2012 Basho Technologies, Inc. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%% @doc This module contains functionality related to creating
%% coverage information for distributed search queries.
-module(yz_cover).
-compile(export_all).
-behavior(gen_server).
-export([code_change/3,
handle_call/3,
handle_cast/2,
handle_info/2,
init/1,
terminate/2]).
-include("yokozuna.hrl").
-record(state, {
%% The ring used to calculate the current cached plan.
ring_used :: ring()
}).
%%%===================================================================
%%% API
%%%===================================================================
%% @doc Retrieve the ring used for the current plan. In rare cases the
%% ring cannot be determined and `unknown' will be returned. It is up
%% to the caller how to interpret this.
-spec get_ring_used() -> ring() | unknown.
get_ring_used() ->
try gen_server:call(?MODULE, get_ring_used, 5000) of
undefined -> unknown;
Ring -> Ring
catch
_:_ ->
%% If the call failed then not sure what ring is
%% being used.
unknown
end.
-spec logical_partitions(ring(), ordset(p())) -> ordset(lp()).
logical_partitions(Ring, Partitions) ->
LI = logical_index(Ring),
ordsets:from_list([logical_partition(LI, P) || P <- Partitions]).
%% @doc Get the coverage plan for `Index'.
-spec plan(index_name()) -> {ok, plan()} | {error, term()}.
plan(Index) ->
case mochiglobal:get(?BIN_TO_ATOM(Index), undefined) of
undefined -> calc_plan(Index, yz_misc:get_ring(transformed));
Plan -> Plan
end.
-spec reify_partitions(ring(), ordset(lp())) -> ordset(p()).
reify_partitions(Ring, LPartitions) ->
LI = logical_index(Ring),
ordsets:from_list([partition(LI, LP) || LP <- LPartitions]).
start_link() ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
%%%===================================================================
%%% Callbacks
%%%===================================================================
init([]) ->
schedule_tick(),
{ok, #state{ring_used=undefined}}.
handle_cast(update_all_plans, S) ->
Ring = yz_misc:get_ring(transformed),
ok = update_all_plans(Ring),
{noreply, S#state{ring_used=Ring}}.
handle_info(tick, S) ->
Ring = yz_misc:get_ring(transformed),
ok = update_all_plans(Ring),
schedule_tick(),
{noreply, S#state{ring_used=Ring}};
handle_info(Req, S) ->
lager:warning("Unexpected request ~p", [Req]),
{noreply, S}.
handle_call(get_ring_used, _, S) ->
Ring = S#state.ring_used,
{reply, Ring, S};
handle_call(Req, _, S) ->
lager:warning("Unexpected request ~p", [Req]),
{noreply, S}.
code_change(_, S, _) ->
{ok, S}.
terminate(_, _) ->
ok.
%%%===================================================================
%%% Private
%%%===================================================================
%% @doc Create a covering set using logical partitions and add
%% filtering information to eliminate overlap.
-spec add_filtering(n(), q(), logical_idx(), p_set()) -> logical_cover_set().
add_filtering(N, Q, LPI, PS) ->
CS2 = make_logical(LPI, PS),
CS3 = yz_misc:make_pairs(CS2),
CS4 = make_distance_pairs(Q, CS3),
make_cover_set(N, Q, CS4).
%% @private
%%
%% @doc Calculate a plan for the `Index' and then store an entry in
%% the plan cache.
-spec cache_plan(index_name(), ring()) -> ok.
cache_plan(Index, Ring) ->
case calc_plan(Index, Ring) of
{error, _} ->
mochiglobal:put(?BIN_TO_ATOM(Index), undefined);
{ok, Plan} ->
mochiglobal:put(?BIN_TO_ATOM(Index), {ok, Plan})
end,
ok.
%% @private
%%
%% @doc Calculate a plan for the `Index'.
-spec calc_plan(index_name(), ring()) -> {ok, plan()} | {error, term()}.
calc_plan(Index, Ring) ->
NumPartitions = riak_core_ring:num_partitions(Ring),
NVal = yz_index:get_n_val(yz_index:get_index_info(Index)),
CoveragePlan = create_coverage_plan(NVal),
maybe_filter_plan(CoveragePlan, Ring, NVal, NumPartitions).
%% @private
%%
%% @doc Create a Riak core coverage plan.
-spec create_coverage_plan(n()) -> term().
create_coverage_plan(NVal) ->
ReqId = erlang:phash2(erlang:now()),
NumPrimaries = 1,
Selector=all,
riak_core_coverage_plan:create_plan(Selector,
NVal,
NumPrimaries,
ReqId,
?YZ_SVC_NAME).
%% @doc Get the distance between the logical partition `LPB' and
%% `LPA'.
-spec get_distance(q(), lp_node(), lp_node()) -> dist().
get_distance(Q, {LPA,_}, {LPB,_}) when LPB < LPA ->
%% Wrap around
BottomDiff = LPB - 1,
TopDiff = Q - LPA,
BottomDiff + TopDiff + 1;
get_distance(_Q, {LPA,_}, {LPB,_}) ->
LPB - LPA.
%% @private
%%
-spec get_uniq_nodes(logical_cover_set()) -> [node()].
get_uniq_nodes(CoverSet) ->
{_Partitions, Nodes} = lists:unzip(CoverSet),
lists:usort(Nodes).
%% @doc Create a mapping from logical to actual partition.
-spec logical_index(riak_core_ring:riak_core_ring()) -> logical_idx().
logical_index(Ring) ->
{Partitions, _} = lists:unzip(riak_core_ring:all_owners(Ring)),
Q = riak_core_ring:num_partitions(Ring),
Logical = lists:seq(1, Q),
lists:zip(Logical, lists:sort(Partitions)).
%% @doc Map `Partition' to it's logical partition.
-spec logical_partition(logical_idx(), p()) -> lp().
logical_partition(LogicalIndex, Partition) ->
{Logical, _} = lists:keyfind(Partition, 2, LogicalIndex),
Logical.
%% @doc Generate the sequence of `N' partitions leading up to `EndLP'.
%%
%% NOTE: Logical partition numbers start at 1
-spec lp_seq(n(), q(), lp()) -> [lp()].
lp_seq(N, Q, EndLP) ->
N1 = N - 1,
StartLP = EndLP - N1,
if StartLP =< 0 ->
StartLP2 = Q + StartLP,
lists:seq(StartLP2, Q) ++ lists:seq(1, EndLP);
true ->
lists:seq(StartLP, EndLP)
end.
%% @doc Take a list of `PartitionPairs' and create a list of
%% `{LogicalPartition, Distance}' pairs. The list will contain
%% the second partition in the original pair and it's distance
%% from the partition it was paired with.
-spec make_distance_pairs(q(), [{lp_node(), lp_node()}]) ->
[{lp_node(), dist()}].
make_distance_pairs(Q, PartitionPairs) ->
[{LPB, get_distance(Q, LPA, LPB)} || {LPA, LPB} <- PartitionPairs].
%% @doc Create a `{LogicalPartition, Include}' filter pair for a given
%% `{LogicalPartition, Dist}' pair. `Include' indicates which
%% replicas should be included for the paired `LogicalPartition'.
%% The value `all' means all replicas. If the value if a list of
%% `lp()' then a replica must has one of the LPs as it's first
%% primary partition on the preflist.
-spec make_cover_pair(n(), q(), {lp_node(), dist()}) -> logical_cover_pair().
make_cover_pair(N, _Q, {LPNode, N}) ->
{LPNode, all};
make_cover_pair(N, Q, {{LP, Node}, Dist}) ->
LPSeq = lists:reverse(lp_seq(N, Q, LP)),
Filter = lists:sublist(LPSeq, Dist),
{{LP, Node}, Filter}.
-spec make_cover_set(n(), q(), [{lp_node(), dist()}]) -> logical_cover_set().
make_cover_set(N, Q, Cover) ->
[make_cover_pair(N, Q, DP) || DP <- Cover].
%% @doc Convert the partition set to use logical partitions.
-spec make_logical(logical_idx(), p_set()) -> [lp_node()].
make_logical(LogicalIndex, PSet) ->
[{logical_partition(LogicalIndex, P), Node} || {P, Node} <- PSet].
%% @private
%%
%% @doc This function converts CovertSet into logical partitions and adds filtering information.
-spec make_logical_and_filter(logical_cover_set(), ring(), n(), pos_integer()) -> logical_cover_set().
make_logical_and_filter(CoverSet, Ring, NVal, NumPartitions) ->
LPI = logical_index(Ring),
add_filtering(NVal, NumPartitions, LPI, CoverSet).
%% @private
%%
%% @doc Filter plan or return error.
-spec maybe_filter_plan(term(), ring(), n(), pos_integer()) -> {ok, plan()} | {error, term()}.
maybe_filter_plan({error, Error}, _, _, _) ->
{error, Error};
maybe_filter_plan({CoverSet, _}, Ring, NVal, NumPartitions) ->
LogicalCoverSet = make_logical_and_filter(CoverSet, Ring, NVal, NumPartitions),
UniqNodes = get_uniq_nodes(CoverSet),
Mapping = yz_solr:build_mapping(UniqNodes),
plan_return(length(Mapping) == length(UniqNodes), UniqNodes, LogicalCoverSet, Mapping).
%% @doc Map `LP' to actual partition.
-spec partition(logical_idx(), lp()) -> p().
partition(LogicalIndex, LP) ->
{_, P} = lists:keyfind(LP, 1, LogicalIndex),
P.
%% @private
%%
%% @doc Return the plan only if there exists a Solr host-port mapping for each node in the plan.
-spec plan_return(boolean(), [node()], logical_cover_set(), list()) -> {ok, plan()} | {error, term()}.
plan_return(false, _, _, _) ->
{error, "Failed to determine Solr port for all nodes in search plan"};
plan_return(true, UniqNodes, LogicalCoverSet, Mapping) ->
{ok, {UniqNodes, LogicalCoverSet, Mapping}}.
%% @private
%%
%% @doc Schedule next tick to be sent to this server.
-spec schedule_tick() -> ok.
schedule_tick() ->
erlang:send_after(?YZ_COVER_TICK_INTERVAL, ?MODULE, tick),
ok.
%% @private
%%
%% @doc Iterate through the list of indexes, calculate a new coverage
%% plan, and update the cache entry.
-spec update_all_plans(ring()) -> ok.
update_all_plans(Ring) ->
Indexes = yz_index:get_indexes_from_meta(),
_ = [ok = cache_plan(I, Ring) || I <- Indexes],
ok. | deps/yokozuna/src/yz_cover.erl | 0.617974 | 0.402627 | yz_cover.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% riak_kv_fsm_timing: Common code for timing fsm states
%%
%% Copyright (c) 2007-2010 Basho Technologies, Inc. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%% @doc code that would otherwise be duplicated in both fsms
%% functions for gathering and calculating timing information
%% for fsm states.
-module(riak_kv_fsm_timing).
-export([add_timing/2, calc_timing/1]).
-type timing() :: {StageName::atom(), StageStartTime::erlang:timestamp()}.
-type timings() :: [timing()].
-type duration() :: {StageName::atom(), StageDuration::non_neg_integer()}.
-type durations() :: {ResponseUSecs::non_neg_integer(), [duration()]}.
%% @doc add timing information of `{State, erlang:now()}' to the Timings
-spec add_timing(atom(), timings()) -> timings().
add_timing(State, Timings) when is_list(Timings) ->
[{State, os:timestamp()}|Timings].
%% ---------------------------------------------------------------------
%% @doc Calc timing information - stored as `{Stage, StageStart}'
%% in reverse order.
%%
%% ResponseUsecs is calculated as time from reply to start of first stage.
%% If `reply' is in `stages' more than once, the earliest value is used.
%% If `reply' is not in `stages' fails with `badarg'
%% Since a stage's duration is the difference between it's start time
%% and the next stages start time, we don't calculate the duration of
%% the final stage, it is just there as the end time of the
%% penultimate stage
-spec calc_timing(timings()) ->
durations().
calc_timing(Stages0) ->
case proplists:get_value(reply, Stages0) of
undefined ->
erlang:error(badarg);
ReplyTime ->
[{_FinalStage, StageEnd}|Stages] = Stages0,
calc_timing(Stages, StageEnd, ReplyTime, orddict:new())
end.
%% A stages duration is the difference between it's start time
%% and the next stages start time.
-spec calc_timing(timings(), erlang:timestamp(),
erlang:timestamp(),
orddict:orddict()) ->
durations().
calc_timing([], FirstStageStart, ReplyTime, Acc) ->
%% Time from first stage start until reply sent
{timer:now_diff(ReplyTime, FirstStageStart), orddict:to_list(Acc)};
calc_timing([{Stage, StageStart} | Rest], StageEnd, ReplyTime, Acc0) ->
StageDuration = timer:now_diff(StageEnd, StageStart),
%% When the same stage appears more than once in
%% a list of timings() aggregate the times into
%% a total for that stage
Acc = orddict:update_counter(Stage, StageDuration, Acc0),
calc_timing(Rest, StageStart, ReplyTime, Acc). | deps/riak_kv/src/riak_kv_fsm_timing.erl | 0.616474 | 0.49048 | riak_kv_fsm_timing.erl | starcoder |
%%==============================================================================
%% Copyright 2014 <NAME>
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%==============================================================================
%%-------------------------------------------------------------------
%% File : jobs.erl
%% @author : <NAME> <<EMAIL>>
%% @doc
%% This is the public API of the JOBS framework.
%%
%% @end
%% Created : 15 Jan 2010 by <NAME> <<EMAIL>>
%%-------------------------------------------------------------------
-module(jobs).
-export([ask/1,
done/1,
job_info/1,
run/2,
enqueue/2,
dequeue/2]).
-export([ask_queue/2]).
%% Configuration API
-export([add_queue/2,
modify_queue/2,
delete_queue/1,
info/1,
queue_info/1,
queue_info/2,
modify_regulator/4,
add_counter/2,
modify_counter/2,
delete_counter/1,
add_group_rate/2,
modify_group_rate/2,
delete_group_rate/1]).
%% @spec ask(Type) -> {ok, Opaque} | {error, Reason}
%% @doc Asks permission to run a job of Type. Returns when permission granted.
%%
%% The simplest way to have jobs regulated is to spawn a request per job.
%% The process should immediately call this function, and when granted
%% permission, execute the job, and then terminate.
%% If for some reason the process needs to remain, to execute more jobs,
%% it should explicitly call `jobs:done(Opaque)'.
%% This is not strictly needed when regulation is rate-based, but as the
%% regulation strategy may change over time, it is the prudent thing to do.
%% @end
%%
ask(Type) ->
jobs_server:ask(Type).
%% @spec done(Opaque) -> ok
%% @doc Signals completion of an executed task.
%%
%% This is used when the current process wants to submit more jobs to load
%% regulation. It is mandatory when performing counter-based regulation
%% (unless the process terminates after completing the task). It has no
%% effect if the job type is purely rate-regulated.
%% @end
%%
done(Opaque) ->
jobs_server:done(Opaque).
%% @spec run(Type, Function::function()) -> Result
%% @doc Executes Function() when permission has been granted by job regulator.
%%
%% This is equivalent to performing the following sequence:
%% <pre>
%% case jobs:ask(Type) of
%% {ok, Opaque} ->
%% try Function()
%% after
%% jobs:done(Opaque)
%% end;
%% {error, Reason} ->
%% erlang:error(Reason)
%% end.
%% </pre>
%% @end
%%
run(Queue, F) when is_function(F, 0); is_function(F, 1) ->
jobs_server:run(Queue, F).
%% @spec enqueue(Queue, Item) -> ok | {error, Reason}
%% @doc Inserts `Item` into a passive queue.
%%
%% Note that this function only works on passive queues. An exception will be
%% raised if the queue doesn't exist, or isn't passive.
%%
%% Returns `ok' if `Item' was successfully entered into the queue,
%% `{error, Reason}' otherwise (e.g. if the queue is full).
%% @end
enqueue(Queue, Item) ->
jobs_server:enqueue(Queue, Item).
%% @spec dequeue(Queue, N) -> [{JobID, Item}]
%% @doc Extracts up to `N' items from a passive queue
%%
%% Note that this function only works on passive queues. An exception will be
%% raised if the queue doesn't exist, or if it isn't passive.
%%
%% This function will block until at least one item can be extracted from the
%% queue (see {@link enqueue/2}). No more than `N' items will be extracted.
%%
%% The items returned are on the form `{JobID, Item}', where `JobID' is in
%% the form of a microsecond timestamp
%% (see {@link jobs_lib:timestamp_to_datetime/1}), and `Item' is whatever was
%% provided in {@link enqueue/2}.
%% @end
dequeue(Queue, N) when N =:= infinity; is_integer(N), N > 0 ->
jobs_server:dequeue(Queue, N).
%% @spec job_info(Opaque) -> undefined | Info
%% @doc Retrieves job-specific information from the `Opaque' data object.
%%
%% The queue could choose to return specific information that is passed to a
%% granted job request. This could be used e.g. for load-balancing strategies.
%% @end
%%
job_info({_, Opaque}) ->
proplists:get_value(info, Opaque).
%% @spec add_queue(Name::any(), Options::[{Key,Value}]) -> ok
%% @doc Installs a new queue in the load regulator on the current node.
%%
%% Valid options are:
%%
%% * `{regulators, Rs}', where `Rs' is a list of rate- or counter-based
%% regulators. Valid regulators listed below. Default: [].
%%
%% * `{type, Type}' - type of queue. Valid types listed below. Default: `fifo'.
%%
%% * `{action, Action}' - automatic action to perform for each request.
%% Valid actions described below. Default: `undefined'.
%%
%% * `{check_interval, I}' - If specified (in ms), this overrides the interval
%% derived from any existing rate regulator. Note that regardless of how often
%% the queue is checked, enough jobs will be dispatched at each interval to
%% maintain the highest allowed rate possible, but the check interval may
%% thus affect how many jobs are dispatched at the same time. Normally, this
%% should not have to be specified.
%%
%% * `{max_time, T}', specifies how long (in ms) a job is allowed to wait
%% in the queue before it is automatically rejected.
%%
%% * `{max_size, S}', indicates how many items can be queued before requests
%% are automatically rejected. Strictly speaking, size is whatever the queue
%% behavior reports as the size; in the default queue behavior, it is the
%% number of elements in the queue.
%%
%% * `{mod, M}', indicates which queue behavior to use. Default is `jobs_queue'.
%%
%% In addition, some 'abbreviated' options are supported:
%%
%% * `{standard_rate, R}' - equivalent to
%% `[{regulators,[{rate,[{limit,R}, {modifiers,[{cpu,10},{memory,10}]}]}]}]'
%%
%% * `{standard_counter, C}' - equivalent to
%% `[{regulators,[{counter,[{limit,C}, {modifiers,[{cpu,10},{memory,10}]}]}]}]'
%%
%% * `{producer, F}' - equivalent to `{type, {producer, F}}'
%%
%% * `passive' - equivalent to `{type, {passive, fifo}}'
%%
%% * `approve | reject' - equivalent to `{action, approve | reject}'
%%
%% <b>Regulators</b>
%%
%% * `{rate, Opts}' - rate regulator. Valid options are
%% <ol>
%% <li>`{limit, Limit}' where `Limit' is the maximum rate (requests/sec)</li>
%% <li>`{modifiers, Mods}', control feedback-based regulation. See below.</li>
%% <li>`{name, Name}', optional. The default name for the regulator is
%% `{rate, QueueName, N}', where `N' is an index indicating which rate regulator
%% in the list is referred. Currently, at most one rate regulator is allowed,
%% so `N' will always be `1'.</li>
%% </ol>
%%
%% * `{counter, Opts}' - counter regulator. Valid options are
%% <ol>
%% <li>`{limit, Limit}', where `Limit' is the number of concurrent jobs
%% allowed.</li>
%% <li>`{increment, Incr}', increment per job. Default is `1'.</li>
%% <li>`{modifiers, Mods}', control feedback-based regulation. See below.</li>
%% </ol>
%%
%% * `{named_counter, Name, Incr}', use an existing counter, incrementing it
%% with `Incr' for each job. `Name' can either refer to a named top-level
%% counter (see {@link add_counter/2}), or a queue-specific counter
%% (these are named `{counter,Qname,N}', where `N' is an index specifying
%% their relative position in the regulators list - e.g. first or second
%% counter).
%%
%% * `{group_rate, R}', refers to a top-level group rate `R'.
%% See {@link add_group_rate/2}.
%%
%% <b>Types</b>
%%
%% * `fifo | lifo' - these are the types supported by the default queue
%% behavior. While lifo may sound like an odd choice, it may have benefits
%% for stochastic traffic with time constraints: there is no point to
%% 'fairness', since requests cannot control their place in the queue, and
%% choosing the 'freshest' job may increase overall goodness critera.
%%
%% * `{producer, F}', the queue is not for incoming requests, but rather
%% generates jobs. Valid options for `F' are
%% (for details, see {@link jobs_prod_simpe}):
%% <ol>
%% <li>A fun of arity 0, indicating a stateless producer</li>
%% <li>A fun of arity 2, indicating a stateful producer</li>
%% <li>`{M, F, A}', indicating a stateless producer</li>
%% <li>`{Mod, Args}' indicating a stateful producer</li>
%% </ol>
%%
%% * `{action, approve | reject}', specifies an automatic response to every
%% request. This can be used to either block a queue (`reject') or set it as
%% a pass-through ('approve').
%%
%% <b>Modifiers</b>
%%
%% Jobs supports feedback-based modification of regulators.
%%
%% The sampler framework sends feedback messages of type
%% `[{Modifier, Local, Remote::[{node(), Level}]}]'.
%%
%% Each regulator can specify a list of modifier instructions:
%%
%% * `{Modifier, Local, Remote}' - `Modifier' can be any label used by the
%% samplers (see {@link jobs_sampler}). `Local' and `Remote' indicate
%% increments in percent by which to reduce the limit of the given regulator.
%% The `Local' increment is used for feedback info pertaining to the local
%% node, and the `Remote' increment is used for remote indicators. `Local'
%% is given as a percentage value (e.g. `10' for `10 %'). The `Remote'
%% increment is either `{avg, Percent}' or `{max, Percent}', indicating whether
%% to respond to the average load of other nodes or to the most loaded node.
%% The correction from `Local' and the correction from `Remote' are summed
%% before applying to the regulator limit.
%%
%% * `{Modifier, Local}' - same as above, but responding only to local
%% indications, ignoring the load on remote nodes.
%%
%% * `{Modifier, F::function((Local, Remote) -> integer())}' - the function
%% `F(Local, Remote)' is applied and expected to return a correction value,
%% in percentage units.
%%
%% * `{Modifier, {Module, Function}}' - `Module:Function(Local Remote)'
%% is applied an expected to return a correction value in percentage units.
%%
%% For example, if a rate regulator has a limit of `100' and has a modifier,
%% `{cpu, 10}', then a feedback message of `{cpu, 2, _Remote}' will reduce
%% the rate limit by `2*10' percent, i.e. down to `80'.
%%
%% Note that modifiers are always applied to the <em>preset</em> limit,
%% not the current limit. Thus, the next round of feedback messages in our
%% example will be applied to the preset limit of `100', not the `80' that
%% resulted from the previous feedback messages. A correction value of `0'
%% will reset the limit to the preset value.
%%
%% If there are more than one modifier with the same name, the last one in the
%% list will be the one used.
%%
%% @end
%%
add_queue(Name, Options) ->
jobs_server:add_queue(Name, Options).
%% @spec modify_queue(Name::any(), Options::[{Key,Value}]) ->
%% ok | {error, Reason}
%% @doc Modifies queue parameters of existing queue.
%%
%% The queue parameters that can be modified are `max_size' and `max_time'.
%% @end
modify_queue(Name, Options) ->
jobs_server:modify_queue(Name, Options).
%% @spec delete_queue(Name) -> boolean()
%% @doc Deletes the named queue from the load regulator on the current node.
%% Returns `true' if there was in fact such a queue; `false' otherwise.
%% @end
%%
delete_queue(Name) ->
jobs_server:delete_queue(Name).
%% @spec ask_queue(QueueName, Request) -> Reply
%% @doc Sends a synchronous request to a specific queue.
%%
%% This function is mainly intended to be used for back-end processes that act
%% as custom extensions to the load regulator itself. It should not be used by
%% regular clients. Sophisticated queue behaviours could export gen_server-like
%% logic allowing them to respond to synchronous calls, either for special
%% inspection, or for influencing the queue state.
%% @end
%%
ask_queue(QueueName, Request) ->
jobs_server:ask_queue(QueueName, Request).
%% @spec add_counter(Name, Options) -> ok
%% @doc Adds a named counter to the load regulator on the current node.
%% Fails if there already is a counter the name `Name'.
%% @end
%%
add_counter(Name, Options) ->
jobs_server:add_counter(Name, Options).
%% @spec delete_counter(Name) -> boolean()
%% @doc Deletes a named counter from the load regulator on the current node.
%% Returns `true' if there was in fact such a counter; `false' otherwise.
%% @end
%%
delete_counter(Name) ->
jobs_server:delete_counter(Name).
%% @spec add_group_rate(Name, Options) -> ok
%% @doc Adds a group rate regulator to the load regulator on the current node.
%% Fails if there is already a group rate regulator of the same name.
%% @end
%%
add_group_rate(Name, Options) ->
jobs_server:add_group_rate(Name, Options).
delete_group_rate(Name) ->
jobs_server:delete_group_rate(Name).
info(Item) ->
jobs_server:info(Item).
queue_info(Name) ->
jobs_server:queue_info(Name).
queue_info(Name, Item) ->
jobs_server:queue_info(Name, Item).
modify_regulator(Type, QName, RegName, Opts) when Type==counter;Type==rate ->
jobs_server:modify_regulator(Type, QName, RegName, Opts).
modify_counter(CName, Opts) ->
jobs_server:modify_counter(CName, Opts).
modify_group_rate(GRName, Opts) ->
jobs_server:modify_group_rate(GRName, Opts). | src/jobs.erl | 0.729809 | 0.427935 | jobs.erl | starcoder |
%%%-------------------------------------------------------------------
%%% @doc
%%% A set of optics specific to orddicts.
%%%
%%% As orddicts are internally represented as a list of pairs, the
%%% type checks used here are not as reliable as those used for other
%%% optics. Please ensure via other means that these optics are only
%%% used with actual orddicts.
%%% @end
%%%-------------------------------------------------------------------
-module(optic_orddict).
%% API
-export([all/0,
all/1,
keys/0,
keys/1,
values/0,
values/1,
associations/0,
associations/1,
key/1,
key/2,
association/1,
association/2]).
%%%===================================================================
%%% API
%%%===================================================================
%% @see values/1
-spec all() -> optic:optic().
all() ->
values().
%% @see values/1
-spec all(Options) -> optic:optic() when
Options :: optic:variations().
all(Options) ->
values(Options).
%% @see keys/1
-spec keys() -> optic:optic().
keys() ->
keys(#{}).
%% @doc
%% Focus on all keys of an orddict.
%%
%% Example:
%%
%% ```
%% > optic:get([optic_orddict:keys()],
%% orddict:from_list([{first, 1}, {second, 2}])).
%% {ok,[first,second]}
%% '''
%% @end
%% @param Options Common optic options.
%% @returns An opaque optic record.
-spec keys(Options) -> optic:optic() when
Options :: optic:variations().
keys(Options) ->
Fold =
fun (Fun, Acc, Dict) ->
case is_orddict(Dict) of
true ->
{ok, orddict:fold(fun (Key, _Value, InnerAcc) ->
Fun(Key, InnerAcc)
end,
Acc,
Dict)};
false ->
{error, undefined}
end
end,
MapFold =
fun (Fun, Acc, Dict) ->
case is_orddict(Dict) of
true ->
{ok, orddict:fold(fun (Key, Value, {InnerDict, InnerAcc}) ->
{NewKey, NewAcc} = Fun(Key, InnerAcc),
{orddict:store(NewKey, Value, InnerDict), NewAcc}
end,
{orddict:new(), Acc},
Dict)};
false ->
{error, undefined}
end
end,
New =
fun (_Data, _Template) ->
orddict:new()
end,
Optic = optic:new(MapFold, Fold),
optic:variations(Optic, Options, New).
%% @see values/1
-spec values() -> optic:optic().
values() ->
values(#{}).
%% @doc
%% Focus on all values of an orddict.
%%
%% Example:
%%
%% ```
%% > optic:get([optic_orddict:values()],
%% orddict:from_list([{first, 1}, {second, 2}])).
%% {ok,[1,2]}
%% '''
%% @end
%% @param Options Common optic options.
%% @returns An opaque optic record.
-spec values(Options) -> optic:optic() when
Options :: optic:variations().
values(Options) ->
Fold =
fun (Fun, Acc, Dict) ->
case is_orddict(Dict) of
true ->
{ok, orddict:fold(fun (_Key, Value, InnerAcc) ->
Fun(Value, InnerAcc)
end,
Acc,
Dict)};
false ->
{error, undefined}
end
end,
MapFold =
fun (Fun, Acc, Dict) ->
case is_orddict(Dict) of
true ->
{ok, orddict:fold(fun (Key, Value, {InnerDict, InnerAcc}) ->
{NewValue, NewAcc} = Fun(Value, InnerAcc),
{orddict:store(Key, NewValue, InnerDict), NewAcc}
end,
{orddict:new(), Acc},
Dict)};
false ->
{error, undefined}
end
end,
New =
fun (_Data, _Template) ->
orddict:new()
end,
Optic = optic:new(MapFold, Fold),
optic:variations(Optic, Options, New).
%% @see associations/1
-spec associations() -> optic:optic().
associations() ->
associations(#{}).
%% @doc
%% Focus on all associations of an orddict. An association is a tuple
%% of the key and value for each entry.
%%
%% Example:
%%
%% ```
%% > optic:get([optic_orddict:associations()],
%% orddict:from_list([{first, 1}, {second, 2}])).
%% {ok,[{first,1},{second,2}]}
%% '''
%% @end
%% @param Options Common optic options.
%% @returns An opaque optic record.
-spec associations(Options) -> optic:optic() when
Options :: optic:variations().
associations(Options) ->
Fold =
fun (Fun, Acc, Dict) ->
case is_orddict(Dict) of
true ->
{ok, orddict:fold(fun (Key, Value, InnerAcc) ->
Fun({Key, Value}, InnerAcc)
end,
Acc,
Dict)};
false ->
{error, undefined}
end
end,
MapFold =
fun (Fun, Acc, Dict) ->
case is_orddict(Dict) of
true ->
{ok, orddict:fold(fun (Key, Value, {InnerDict, InnerAcc}) ->
{{NewKey, NewValue}, NewAcc} = Fun({Key, Value}, InnerAcc),
{orddict:store(NewKey, NewValue, InnerDict), NewAcc}
end,
{orddict:new(), Acc},
Dict)};
false ->
{error, undefined}
end
end,
New =
fun (_Data, _Template) ->
orddict:new()
end,
Optic = optic:new(MapFold, Fold),
optic:variations(Optic, Options, New).
%% @see key/2
-spec key(Key) -> optic:optic() when
Key :: term().
key(Key) ->
key(Key, #{}).
%% @doc
%% Focus on the value of an orddict key.
%%
%% Example:
%%
%% ```
%% > optic:get([optic_orddict:key(first)],
%% orddict:from_list([{first, 1}, {second, 2}])).
%% {ok,[1]}
%% '''
%% @end
%% @param Key The key to focus on.
%% @param Options Common optic options.
%% @returns An opaque optic record.
-spec key(Key, Options) -> optic:optic() when
Key :: term(),
Options :: optic:variations().
key(Key, Options) ->
Fold =
fun (Fun, Acc, Dict) ->
case is_orddict(Dict) of
true ->
case orddict:find(Key, Dict) of
{ok, Value} ->
{ok, Fun(Value, Acc)};
error ->
{error, undefined}
end;
false ->
{error, undefined}
end
end,
MapFold =
fun (Fun, Acc, Dict) ->
case is_orddict(Dict) of
true ->
case orddict:find(Key, Dict) of
{ok, Value} ->
{NewValue, NewAcc} = Fun(Value, Acc),
{ok, {orddict:store(Key, NewValue, Dict), NewAcc}};
error ->
{error, undefined}
end;
false ->
{error, undefined}
end
end,
New =
fun (Dict, Template) ->
case is_orddict(Dict) of
true ->
orddict:store(Key, Template, Dict);
false ->
orddict:from_list([{Key, Template}])
end
end,
Optic = optic:new(MapFold, Fold),
optic:variations(Optic, Options, New).
%% @see association/2
-spec association(Key) -> optic:optic() when
Key :: term().
association(Key) ->
association(Key, #{}).
%% @doc
%% Focus on the association for an orddict key. An association is the
%% tuple of a orddict key and value. If the key is modified, the optic is
%% no longer well behaved.
%%
%% Example:
%%
%% ```
%% > optic:get([optic_orddict:association(first)],
%% orddict:from_list([{first, 1}, {second, 2}])).
%% {ok,[{first,1}]}
%% '''
%% @end
%% @param Key The key to focus on.
%% @param Options Common optic options.
%% @returns An opaque optic record.
-spec association(Key, Options) -> optic:optic() when
Key :: term(),
Options :: optic:variations().
association(Key, Options) ->
Fold =
fun (Fun, Acc, Dict) ->
case is_orddict(Dict) of
true ->
case orddict:find(Key, Dict) of
{ok, Value} ->
{ok, Fun({Key, Value}, Acc)};
error ->
{error, undefined}
end;
false ->
{error, undefined}
end
end,
MapFold =
fun (Fun, Acc, Dict) ->
case is_orddict(Dict) of
true ->
case orddict:find(Key, Dict) of
{ok, Value} ->
{{NewKey, NewValue}, NewAcc} = Fun({Key, Value}, Acc),
{ok, {orddict:store(NewKey, NewValue, orddict:erase(Key, Dict)), NewAcc}};
error ->
{error, undefined}
end;
false ->
{error, undefined}
end
end,
New =
fun (Dict, Template) ->
case is_orddict(Dict) of
true ->
orddict:store(Key, Template, Dict);
false ->
orddict:from_list([{Key, Template}])
end
end,
Optic = optic:new(MapFold, Fold),
optic:variations(Optic, Options, New).
%%%===================================================================
%%% Internal Functions
%%%===================================================================
%% @private
%% @doc
%% Check if a container is likely to be an orddict. Is both unable to
%% disambiguate an empty list and an empty orddict, as well as only
%% inspecting the first element of the orddict in order to keep the
%% check constant time.
%% @end
is_orddict([]) ->
true;
is_orddict([{_, _} | _]) ->
true;
is_orddict(_) ->
false. | src/optic_orddict.erl | 0.618665 | 0.416619 | optic_orddict.erl | starcoder |
%% Copyright (c) 2013-2014 <NAME> <<EMAIL>>
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(rationals).
-export([
new/1,
new/2,
ratio/1,
add/2,
subtract/2,
multiply/2,
simplify/1,
reciprocal/1,
divide/2,
numerator/1,
denominator/1,
is_greater_than/2,
is_less_than/2,
is_equal_to/2,
is_greater_or_equal/2,
is_less_or_equal/2,
from_float/1,
to_float/1,
gcd/2
]).
-export_type([
numerator/0,
denominator/0,
fraction/0,
ratio/0
]).
-type numerator() :: integer().
-type denominator() :: pos_integer().
-record(fraction, {
numerator :: numerator(),
denominator :: denominator()
}).
-type ratio() :: {numerator(), denominator()}.
-opaque fraction() :: #fraction{}.
-spec new(numerator()) -> fraction().
new(Numerator) ->
#fraction{numerator = Numerator, denominator = 1}.
-spec new(numerator(), denominator()) -> fraction().
new(Numerator, Denominator) ->
#fraction{numerator = Numerator, denominator = Denominator}.
-spec numerator(fraction()) -> numerator().
numerator(#fraction{numerator = Numerator}) ->
Numerator.
-spec denominator(fraction()) -> denominator().
denominator(#fraction{denominator = Denominator}) ->
Denominator.
-spec ratio(fraction()) -> ratio().
ratio(#fraction{numerator = Numerator, denominator = Denominator}) ->
{Numerator, Denominator}.
-spec add(fraction(), fraction()) -> fraction().
add(#fraction{numerator = N1, denominator = D1},
#fraction{numerator = N2, denominator = D2}) ->
new((N1 * D2) + (D1 * N2), D1 * D2).
-spec subtract(fraction(), fraction()) -> fraction().
subtract(#fraction{numerator = N1, denominator = D1},
#fraction{numerator = N2, denominator = D2}) ->
new((N1 * D2) - (D1 * N2), D1 * D2).
-spec multiply(fraction(), fraction()) -> fraction().
multiply(#fraction{numerator = N1, denominator = D1},
#fraction{numerator = N2, denominator = D2}) ->
new(N1 * N2, D1 * D2).
-spec reciprocal(fraction()) -> fraction().
reciprocal(#fraction{numerator = Numerator, denominator = Denominator}) ->
new(Denominator, Numerator).
-spec divide(fraction(), fraction()) -> fraction().
divide(A, B) ->
multiply(A, reciprocal(B)).
-spec simplify(fraction()) -> fraction().
simplify(#fraction{numerator = Numerator, denominator = Denominator} = F) ->
case gcd(Numerator, Denominator) of
1 ->
F;
GCD ->
new(Numerator div GCD, Denominator div GCD)
end.
comparison(Fn,
#fraction{numerator = N1,
denominator = D1},
#fraction{numerator = N2,
denominator = D2}) ->
Fn(N1 * D2, N2 * D1).
-spec is_greater_than(fraction(), fraction()) -> boolean().
is_greater_than(F1, F2) ->
comparison(fun erlang:'>'/2, F1, F2).
-spec is_less_than(fraction(), fraction()) -> boolean().
is_less_than(F1, F2) ->
comparison(fun erlang:'<'/2, F1, F2).
-spec is_equal_to(fraction(), fraction()) -> boolean().
is_equal_to(F1, F2) ->
comparison(fun erlang:'=='/2, F1, F2).
-spec is_less_or_equal(fraction(), fraction()) -> boolean().
is_less_or_equal(F1, F2) ->
comparison(fun erlang:'=<'/2, F1, F2).
-spec is_greater_or_equal(fraction(), fraction()) -> boolean().
is_greater_or_equal(F1, F2) ->
comparison(fun erlang:'>='/2, F1, F2).
-spec to_float(fraction()) -> float().
to_float(#fraction{numerator = Numerator,
denominator = Denominator}) ->
Numerator / Denominator.
-spec from_float(float()) -> fraction().
from_float(Float) when is_float(Float) ->
from_float(Float, 1).
from_float(Numerator, Denominator) when Numerator == trunc(Numerator) ->
simplify(new(trunc(Numerator), Denominator));
from_float(Numerator, Denominator) ->
from_float(Numerator * 10, Denominator * 10).
gcd(A, 0) ->
A;
gcd(A, B) ->
gcd(B, A rem B). | src/rationals.erl | 0.783823 | 0.472988 | rationals.erl | starcoder |
%% Copyright 2018 Octavo Labs AG Zurich Switzerland (https://octavolabs.com)
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(vmq_swc_dkm).
-include("vmq_swc.hrl").
%% The vmq_swc_dkm implements a high performant and scalable DotKeyMap (DKM).
%% The DKM is the index structure that is used by the incremental Garbage
%% Collection that is kicked off after every Anti Entropy Exchange.
%%
%% The original implementation of the DKM in swc_dotkeymap uses orddicts
%% to store the dots. For performance reasons we use a dotkeymap that is backed
%% by multiple ETS tables. Unlike to the 'official' dotkeymap this implementation
%% keeps track of the active set of dots, which are dots that relate to undeleted
%% objects. Therefore a `prune` operation will only delete a dot that relates to
%% either a deleted object or the dot was superseded by a newer dot.
%%
%% An example:
%%
%% 1: insert(KeyA, (a, 1))
%% 2: insert(KeyA, (a, 2)) ----> incremental delete (a, 1)
%% 3: insert(KeyA, (b, 1))
%% 4: prune(a, 2) ----------> deletes (a, 2)
%% 5: insert(KeyA, (c, 1))
%% 6: mark_for_gc(KeyA)
%% 7: prune(b, 1) ----------> deletes (b, 1)
%% 8: prune(c, 1) ----------> deletes (c, 1) and object with key KeyA
%%
%% Such a DKM that manages all active dots can grow substantially. The size
%% of the active set of dots is at least the size of all the keys in the system.
%% A naive approach would require a full table scan to find all dots that are out
%% of date. To reduce the complexity of incremental GC this dotkeymap requires
%% three different ETS tables, `latest`, `latest_candidates`, and `gc_candidates`.
%% During dot insertion and pruning dots move from one table to the others depending on
%% if they are superseded by a new dot of the same origin.
%%
%% The `latest` and `latest_candidates` tables contain the set of most recent dots.
%% Therefore if a dot gets superseded it creates an incremental delete.
%% In case pruning would select a dot that is currently set as latest dot for a keey
%% it will replace such a dot with a candidate dot from `latest_candidates`.
%% If no suitable candidate exists the dot doesn't get pruned. The GC will do an
%% ets:select on the `latest_candidates` table. The size of the `latest_candidates`
%% table depends on the number of unpruned concurrent writes.
%%
%% By definition the largest table is the `latest`, which is why ets:select is not
%% an option for incremental GC. Instead a different table `gc_candidates` is used
%% to mark 'to-be-deleted' Keys. As all elements in the `latest` set are active they
%% are only subject to GC if marked as a `gc_candidate`.
-export([init/0,
insert/4,
mark_for_gc/2,
prune/3,
prune/4, % testing
prune_for_peer/2,
destroy/1,
test/0,
dump/1,
dkm/1, % used by test
info/2]).
-record(dkm, {latest=ets:new(?MODULE, [public]),
latest_candidates=ets:new(?MODULE, [public]),
gc_candidates=ets:new(?MODULE, [public])}).
-type dkm() :: #dkm{}.
-export_type([dkm/0]).
-spec init() -> dotkeymap().
init() ->
#dkm{}.
info(DKM, object_count) ->
ets:info(DKM#dkm.latest, size);
info(DKM, tombstone_count) ->
ets:info(DKM#dkm.gc_candidates, size);
info(#dkm{latest=LT, latest_candidates=LTC, gc_candidates=GCT}, memory) ->
ets:info(LT, memory) + ets:info(LTC, memory) + ets:info(GCT, memory).
dump(#dkm{latest=LT, gc_candidates=GCT, latest_candidates=LTC}) ->
#{latest => dump(LT),
latest_candidates => dump(LTC),
gc_candidates => dump(GCT)};
dump(T) ->
ets:foldl(fun(Obj, Acc) ->
[Obj | Acc]
end, [], T).
dkm(#dkm{latest=LT, latest_candidates=LTC}) ->
LatestObjects =
ets:foldl(fun({Key, Dot}, Acc) ->
maps:put(Dot, Key, Acc)
end, #{}, LT),
ets:foldl(fun({{_,_} = Dot, Key}, Acc) ->
maps:put(Dot, Key, Acc);
({_Key, _Dots}, Acc) ->
Acc
end, LatestObjects, LTC).
-spec insert(dotkeymap(), peer(), counter(), db_key()) -> [db_op()].
insert(#dkm{latest=LT, gc_candidates=GCT} = DKM, Id, Cnt, Key) ->
Dot = {Id, Cnt},
case ets:insert_new(LT, {Key, Dot}) of
true ->
[{dkm, sext:encode(Dot), Key}];
false ->
ReplacedDots =
case ets:lookup(LT, Key) of
[{_, {Id, CurrentCnt} = CurrentDot}] when Cnt > CurrentCnt ->
% remove possible GC candidate
ets:delete(GCT, Key),
TmpDot = replace_candidate_or_gc(DKM, Dot, CurrentDot, Key),
% dot newer than current dot -> replace
ets:insert(LT, {Key, Dot}),
[TmpDot];
[{_, {Id, _} = CurrentDot}] ->
% new dot is older than current dot -> replace candidate or gc
[replace_candidate_or_gc(DKM, Dot, CurrentDot, Key)];
[{_, CurrentDot}] ->
% remove possible GC candidate
ets:delete(GCT, Key),
% this is a new latest candidate
insert_candidates(DKM, Dot, CurrentDot, Key)
end,
{ShouldInsertDot, DbOps} =
lists:foldl(fun(D, {_Flag, Acc}) when D == Dot ->
{false, Acc};
(D, {Flag, Acc}) ->
{Flag, [{?DB_DKM, sext:encode(D), ?DELETED}|Acc]}
end, {true, []}, ReplacedDots),
case ShouldInsertDot of
true ->
[{?DB_DKM, sext:encode(Dot), Key}|DbOps];
false ->
DbOps
end
end.
replace_candidate_or_gc(#dkm{latest_candidates=LTC}, {Id, NewCnt} = Dot, {Id, OldCnt} = CurrentDot, Key) ->
case ets:lookup(LTC, Key) of
[] ->
% no candidate available, ignore this entry
% GC current dot
CurrentDot;
[{_, Dots}] ->
case maps:get(Id, Dots) of
OldCnt when NewCnt > OldCnt ->
% replace latest candidate
OldDot = {Id, OldCnt},
ets:insert(LTC, [{Key, maps:put(Id, NewCnt, Dots)}, {Dot, Key}]),
ets:delete(LTC, OldDot),
% GC Old Dot
OldDot;
OldCnt ->
% GC Dot
Dot
end
end.
insert_candidates(#dkm{latest_candidates=LTC}, Dot, CurrentDot, Key) ->
case ets:lookup(LTC, Key) of
[] ->
ets:insert(LTC, [{Key, maps:from_list([Dot, CurrentDot])}, {Dot, Key}, {CurrentDot, Key}]),
[];
[{_, Dots}] ->
{Dots0, NewDots0, UnusedDots0} = insert_candidate_dot(Dot, Dots, [], []),
{Dots1, NewDots1, UnusedDots1} = insert_candidate_dot(CurrentDot, Dots0, NewDots0, UnusedDots0),
case NewDots1 of
[] ->
% latest candidates didn't change
ok;
_ ->
ets:insert(LTC, [{Key, Dots1} | [{D, Key} || D <- NewDots1]])
end,
_ = [ets:delete(LTC, D) || D <- UnusedDots1],
UnusedDots1
end.
insert_candidate_dot({Id, Cnt} = Dot, Dots, NewDots, UnusedDots) ->
case maps:get(Id, Dots, undefined) of
undefined ->
{maps:put(Id, Cnt, Dots), [Dot|NewDots], UnusedDots};
Cnt ->
{Dots, NewDots, UnusedDots};
TmpCnt when TmpCnt < Cnt ->
{maps:put(Id, Cnt, Dots), [Dot|NewDots], [{Id, TmpCnt}|UnusedDots]};
_ ->
{Dots, NewDots, [Dot|UnusedDots]}
end.
-spec mark_for_gc(dotkeymap(), db_key()) -> ok.
mark_for_gc(#dkm{latest=LT, gc_candidates=GCT}, Key) ->
case ets:lookup(LT, Key) of
[] -> ok;
_ ->
ets:insert(GCT, {Key})
end,
ok.
-spec prune(dotkeymap(), watermark(), [db_op()]) -> [db_op()].
prune(#dkm{} = DKM, Watermark, DbOps) ->
lists:foldl(
fun(Id, Acc) ->
Min = swc_watermark:min(Watermark, Id),
prune(DKM, Id, Min, Acc)
end, DbOps, swc_watermark:peers(Watermark)).
prune(#dkm{gc_candidates=GCT} = DKM, Id, Min, DbOps) ->
ets:foldl(
fun({Key}, Acc) ->
prune_latest_dot(DKM, Key, Id, Min, Acc, true)
end, prune_candidates(DKM, Id, Min, DbOps), GCT).
prune_candidates(#dkm{latest_candidates=LTC, latest=LT}, Id, Min, DbOps) ->
MatchSpec = [{{{Id, '$1'}, '_'},[{'=<', '$1', Min}], ['$_']}],
case ets:select(LTC, MatchSpec) of % TODO: use continuation limit
[] -> DbOps;
Matches ->
lists:foldl(
fun({Dot, Key}, Acc) ->
[{_, Dots0}] = ets:lookup(LTC, Key),
Dots1 = maps:remove(Id, Dots0),
case maps:to_list(Dots1) of
[LastCandidateDot] ->
% replace latest with last available candidate
ets:insert(LT, {Key, LastCandidateDot}),
ets:delete(LTC, Dot),
ets:delete(LTC, LastCandidateDot),
ets:delete(LTC, Key),
[{?DB_DKM, sext:encode(Dot), ?DELETED} | Acc];
[NextCandidateDot|_] ->
% replace latest with next available candidate
ets:insert(LT, {Key, NextCandidateDot}),
ets:delete(LTC, Dot),
ets:insert(LTC, {Key, Dots1}),
[{?DB_DKM, sext:encode(Dot), ?DELETED} | Acc]
end
end, DbOps, Matches)
end.
prune_latest_dot(#dkm{latest=LT, gc_candidates=GCT, latest_candidates=LTC}, Key, Id, Min, Acc, IsGC) ->
case ets:lookup(LT, Key) of
[{_, {Id, CurrentCnt} = CurrentDot}] when CurrentCnt =< Min ->
% can we replace the latest with a candidate
case ets:lookup(LTC, Key) of
[] when IsGC ->
ets:delete(LT, Key),
ets:delete(GCT, Key),
% no candidate available, this object can be deleted
[{?DB_DKM, sext:encode(CurrentDot), ?DELETED},
{?DB_OBJ, Key, ?DELETED} | Acc];
_ ->
Acc
end;
_ ->
Acc
end.
prune_for_peer(#dkm{latest=LT} = DKM, Id) ->
ets:foldl(
fun
({Key, {I, Cnt}}, Acc0) when I == Id ->
Acc1 = prune_candidates(DKM, Id, Cnt, Acc0),
prune_latest_dot(DKM, Key, Id, Cnt, Acc1, false);
(_, Acc) -> Acc
end, [], LT).
destroy(#dkm{latest=LT, gc_candidates=GCT}) ->
ets:delete(LT),
ets:delete(GCT),
ok.
test() ->
DKM = init(),
Dot1 = sext:encode({a, 1}),
Dot2 = sext:encode({a, 2}),
Dot3 = sext:encode({b, 1}),
insert(DKM, a, 1, <<"hello">>),
insert(DKM, b, 1, <<"hello">>),
[] = prune(DKM, a, 1, []),
insert(DKM, a, 2, <<"hello">>),
[{?DB_DKM, Dot1, ?DELETED}] = prune(DKM, a, 1, []),
mark_for_gc(DKM, <<"hello">>),
[] = prune(DKM, a, 1, []),
[{?DB_DKM, Dot2, ?DELETED}] = prune(DKM, a, 2, []),
[{?DB_DKM, Dot3, ?DELETED},
{?DB_OBJ, hello, ?DELETED}] = prune(DKM, b, 1, []). | apps/vmq_swc/src/vmq_swc_dkm.erl | 0.81571 | 0.690256 | vmq_swc_dkm.erl | starcoder |
%% Copyright (c) 2019-2021, <NAME> <<EMAIL>>. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(uef_lists).
-export([split_list_into_chunks/2]).
-export([lists_to_list_of_tuples/2, lists_to_list_of_tuples/3]).
-export([search/2]).
%%%------------------------------------------------------------------------------
%%% API
%%%------------------------------------------------------------------------------
%% split_list_into_chunks/2
-spec split_list_into_chunks(List :: list(), MaxLen :: pos_integer()) -> List2 :: list().
%% @doc
%% Splits List into list of lists [List1, List2, ..., ListN]
%% where List1, List2, ..., ListN are lists with maximum MaxLen elements.
%% @end
split_list_into_chunks([],_) -> [];
split_list_into_chunks(List,Len) when Len > length(List) ->
[List];
split_list_into_chunks(List,Len) ->
{Head,Tail} = lists:split(Len,List),
[Head | split_list_into_chunks(Tail,Len)].
%% lists_to_list_of_tuples/2
-spec lists_to_list_of_tuples(List1 :: list(), List2 :: list()) -> List3 :: [tuple()].
%% @doc
%% Transforms two lists into one list of two-tuples,
%% where the first element of each tuple is taken from the first list
%% and the second element is taken from the second list one by one.
%% @end
lists_to_list_of_tuples(List1, List2) ->
List = lists:foldl(
fun(Elem1, Acc1) ->
lists:foldl(
fun(Elem2, Acc2) ->
[{Elem1, Elem2} | Acc2]
end,
Acc1, List2
)
end,
[], List1
),
lists:reverse(List).
%% lists_to_list_of_tuples/3
-spec lists_to_list_of_tuples(List1 :: list(), List2 :: list(), List3 :: list()) -> List4 :: [tuple()].
%% @doc
%% Transforms three lists into one list of three-tuples,
%% where the first element of each tuple is taken from the first list,
%% the second element is taken from the second list one by one,
%% and the third element is taken from the third list one by one.
%% @end
lists_to_list_of_tuples(List1, List2, List3) ->
List = lists:foldl(
fun(Elem1, Acc1) ->
lists:foldl(
fun(Elem2, Acc2) ->
lists:foldl(
fun(Elem3, Acc3) ->
[{Elem1, Elem2, Elem3} | Acc3]
end,
Acc2, List3
)
end,
Acc1, List2
)
end,
[], List1
),
lists:reverse(List).
%% search/2
-spec search(Pred, List) -> {value, Value} | false when
Pred :: fun((T) -> boolean()),
List :: [T],
Value :: T.
%% @doc
%% If there is a Value in List such that Pred(Value) returns true, returns {value, Value} for the first such Value, otherwise returns false.
%% Since OTP 21.0 use BIF lists:search/2 instead.
%% @end
search(Pred, [Hd|Tail]) ->
case Pred(Hd) of
true -> {value, Hd};
false -> search(Pred, Tail)
end;
search(Pred, []) when is_function(Pred, 1) ->
false. | src/uef_lists.erl | 0.604165 | 0.458349 | uef_lists.erl | starcoder |
%% The contents of this file are subject to the Mozilla Public License
%% Version 1.1 (the "License"); you may not use this file except in
%% compliance with the License. You may obtain a copy of the License
%% at http://www.mozilla.org/MPL/
%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and
%% limitations under the License.
%%
%% The Original Code is RabbitMQ.
%%
%% The Initial Developer of the Original Code is GoPivotal, Inc.
%% Copyright (c) 2007-2016 Pivotal Software, Inc. All rights reserved.
%%
-module(qp_log).
-export([debug/1, debug/2, debug/3,
info/1, info/2, info/3,
notice/1, notice/2, notice/3,
warning/1, warning/2, warning/3,
error/1, error/2, error/3,
critical/1, critical/2, critical/3,
alert/1, alert/2, alert/3,
emergency/1, emergency/2, emergency/3,
none/1, none/2, none/3]).
%%----------------------------------------------------------------------------
-type category() :: atom().
-spec debug(string()) -> 'ok'.
-spec debug(string(), [any()]) -> 'ok'.
-spec debug(pid() | [tuple()], string(), [any()]) -> 'ok'.
-spec info(string()) -> 'ok'.
-spec info(string(), [any()]) -> 'ok'.
-spec info(pid() | [tuple()], string(), [any()]) -> 'ok'.
-spec notice(string()) -> 'ok'.
-spec notice(string(), [any()]) -> 'ok'.
-spec notice(pid() | [tuple()], string(), [any()]) -> 'ok'.
-spec warning(string()) -> 'ok'.
-spec warning(string(), [any()]) -> 'ok'.
-spec warning(pid() | [tuple()], string(), [any()]) -> 'ok'.
-spec error(string()) -> 'ok'.
-spec error(string(), [any()]) -> 'ok'.
-spec error(pid() | [tuple()], string(), [any()]) -> 'ok'.
-spec critical(string()) -> 'ok'.
-spec critical(string(), [any()]) -> 'ok'.
-spec critical(pid() | [tuple()], string(), [any()]) -> 'ok'.
-spec alert(string()) -> 'ok'.
-spec alert(string(), [any()]) -> 'ok'.
-spec alert(pid() | [tuple()], string(), [any()]) -> 'ok'.
-spec emergency(string()) -> 'ok'.
-spec emergency(string(), [any()]) -> 'ok'.
-spec emergency(pid() | [tuple()], string(), [any()]) -> 'ok'.
-spec none(string()) -> 'ok'.
-spec none(string(), [any()]) -> 'ok'.
-spec none(pid() | [tuple()], string(), [any()]) -> 'ok'.
%%----------------------------------------------------------------------------
debug(Format) -> debug(Format, []).
debug(Format, Args) -> debug(self(), Format, Args).
debug(Metadata, Format, Args) ->
lager:log(debug, Metadata, Format, Args).
info(Format) -> info(Format, []).
info(Format, Args) -> info(self(), Format, Args).
info(Metadata, Format, Args) ->
lager:log(info, Metadata, Format, Args).
notice(Format) -> notice(Format, []).
notice(Format, Args) -> notice(self(), Format, Args).
notice(Metadata, Format, Args) ->
lager:log(notice, Metadata, Format, Args).
warning(Format) -> warning(Format, []).
warning(Format, Args) -> warning(self(), Format, Args).
warning(Metadata, Format, Args) ->
lager:log( warning, Metadata, Format, Args).
error(Format) -> ?MODULE:error(Format, []).
error(Format, Args) -> ?MODULE:error(self(), Format, Args).
error(Metadata, Format, Args) ->
lager:log(error, Metadata, Format, Args).
critical(Format) -> critical(Format, []).
critical(Format, Args) -> critical(self(), Format, Args).
critical(Metadata, Format, Args) ->
lager:log(critical, Metadata, Format, Args).
alert(Format) -> alert(Format, []).
alert(Format, Args) -> alert(self(), Format, Args).
alert(Metadata, Format, Args) ->
lager:log(alert, Metadata, Format, Args).
emergency(Format) -> emergency(Format, []).
emergency(Format, Args) -> emergency(self(), Format, Args).
emergency(Metadata, Format, Args) ->
lager:log(emergency, Metadata, Format, Args).
none(Format) -> none(Format, []).
none(Format, Args) -> none(self(), Format, Args).
none(Metadata, Format, Args) ->
lager:log(none, Metadata, Format, Args). | src/qp_log.erl | 0.53048 | 0.506591 | qp_log.erl | starcoder |
%% @author Couchbase <<EMAIL>>
%% @copyright 2012-2018 Couchbase, Inc.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% @doc this module implements state machine that decides which
%% vbucket moves can be started when and when necessary view
%% compactions can be performed.
%%
%% Overall idea is we want to move as many vbuckets as possible in
%% parallel but there are certain limits that we still need to
%% enforce. More below.
%%
%% Input is old and new vbucket map, from which it computes moves as
%% well as 2 parameters that describe concurrency limits.
%%
%% First limit is number of concurrent backfills into/out-of any
%% node. The idea is moving vbucket involves reading entire vbucket
%% from disk and sending it to destination node where entire vbucket
%% needs to be persisted. While this phase of vbucket move occurs
%% between this two nodes it's undesirable to do backfill phase
%% affecting any of those two nodes concurrently. We support limit
%% higher than 1, but in actual product it's 1.
%%
%% Second limit is how many vbucket we move into/out-of any node
%% before pausing moves and forcing views compaction.
%%
%% Current model of actions required as part of vbucket move are:
%%
%% a) build complete replica of vbucket on future master (backfill
%% phase). For this phase as pointed out above we have first limit
%% that affects both old master and future master. Note: we
%% consciously ignore the fact that we can also have incoming
%% backfills into future replicas in this phase. Those backfills
%% currently are currently not affected by or affect any limits.
%%
%% b) ensure that indexes are built for new vbucket on new master and
%% rest of vbucket takeover. That phase notably can happen
%% concurrently for many vbuckets on any node for both incoming and
%% outgoing vbucket moves. We actually try to pack as many of them as
%% possible so that indexer which is currently slowest part of
%% rebalance is always busy.
%%
%% c) (involves multiple vbucket moves at once) do view
%% compaction. This phase _cannot_ happen concurrently with any
%% vbucket moves. I.e. we want views to be as quiescent as possible
%% (i.e. no massive indexing of incoming vbucket moves at least). As
%% noted above we try to do several vbucket moves before pausing for
%% views compactions. Because compacting after every single vbucket
%% move is expensive.
%%
%% See image below (drawn by <NAME>. Many thanks):
%%
%% VBucket Move Scheduling
%% Time
%%
%% | /------------\
%% | | Backfill 0 | Backfills cannot happen
%% | \------------/ concurrently.
%% | | /------------\
%% | +------------+ | Backfill 1 |
%% | | Index File | \------------/
%% | | 0 | |
%% | | | +------------+ However, indexing _can_ happen
%% | | | | Index File | concurrently with backfills and
%% | | | | 1 | other indexing.
%% | | | | |
%% | +------------+ | |
%% | | | |
%% | | +------------+
%% | | |
%% | \---------+---------/
%% | |
%% | /--------------------------------\ Compaction for a set of vbucket moves
%% | | Compact both source and dest. | cannot happen concurrently with other
%% v \--------------------------------/ vbucket moves.
%%
%%
%% In that image you can see that backfills of 2 vbuckets between same
%% pair of nodes cannot happen concurrently, but next phase is
%% concurrent, after which there's view compaction on both nodes that
%% logically affect both moves (and prevent other concurrent moves)
%%
%% vbucket moves are picked w.r.t. this 2 constrains and we also have
%% heuristics to decide which moves to proceed based on the following
%% understanding of goodness:
%%
%% a) we want to start moving active vbuckets sooner. I.e. prioritize
%% moves that change master node and not just replicas. So that
%% balance w.r.t. node's load on GETs and SETs is more quickly
%% equalized.
%%
%% b) given that indexer is our bottleneck we want as much as possible
%% nodes to do some indexing work all or most of the time
-module(vbucket_move_scheduler).
-include("ns_common.hrl").
-export([prepare/7,
is_done/1,
choose_action/1,
get_moves/1,
extract_progress/1,
note_backfill_done/2,
note_move_completed/2,
note_compaction_done/2]).
-type move() :: {VBucket :: vbucket_id(),
ChainBefore :: [node() | undefined],
ChainAfter :: [node() | undefined],
Quirks :: [rebalance_quirks:quirk()]}.
%% all possible types of actions are moves and compactions
-type action() :: {move, move()} |
{compact, node()}.
-record(state, {
backfills_limit :: non_neg_integer(),
moves_before_compaction :: non_neg_integer(),
total_in_flight = 0 :: non_neg_integer(),
moves_left_count_per_node :: dict:dict(), % node() -> non_neg_integer()
moves_left :: [move()],
%% pending moves when current master is undefined For them
%% we don't have any limits and compaction is not needed.
%% And that's first moves that we ever consider doing
moves_from_undefineds :: [move()],
compaction_countdown_per_node :: dict:dict(), % node() -> non_neg_integer()
in_flight_backfills_per_node :: dict:dict(), % node() -> non_neg_integer() (I.e. counts current moves)
in_flight_per_node :: dict:dict(), % node() -> non_neg_integer() (I.e. counts current moves)
in_flight_compactions :: set:set(), % set of nodes
initial_move_counts :: dict:dict(),
left_move_counts :: dict:dict(),
inflight_moves_limit :: non_neg_integer()
}).
%% @doc prepares state (list of moves etc) based on current and target map
prepare(CurrentMap, TargetMap, Quirks,
BackfillsLimit, MovesBeforeCompaction, MaxInflightMoves, InfoLogger) ->
%% Dictionary mapping old node to vbucket and new node
MapTriples = lists:zip3(lists:seq(0, length(CurrentMap) - 1),
CurrentMap,
TargetMap),
{Moves, UndefinedMoves, TrivialMoves} =
lists:foldl(
fun ({V, C1, C2}, {MovesAcc, UndefinedMovesAcc, TrivialMovesAcc}) ->
OldMaster = hd(C1),
case OldMaster of
undefined ->
Move = {V, C1, C2, []},
{MovesAcc, [Move | UndefinedMovesAcc], TrivialMovesAcc};
_ ->
MoveQuirks = rebalance_quirks:get_node_quirks(OldMaster, Quirks),
TrivialMoves = rebalance_quirks:is_enabled(trivial_moves, MoveQuirks),
case C1 =:= C2 andalso not TrivialMoves of
true ->
{MovesAcc, UndefinedMovesAcc, TrivialMovesAcc + 1};
false ->
Move = {V, C1, C2, MoveQuirks},
{[Move | MovesAcc], UndefinedMovesAcc, TrivialMovesAcc}
end
end
end, {[], [], 0}, MapTriples),
MovesPerNode =
lists:foldl(
fun ({_V, [Src|_], [Dst|_], _}, Acc) ->
case Src =:= Dst of
true ->
%% no index changes will be done here
Acc;
_ ->
D = dict:update_counter(Src, 1, Acc),
dict:update_counter(Dst, 1, D)
end
end, dict:new(), Moves),
InitialMoveCounts =
lists:foldl(
fun ({_V, [Src|_], [Dst|_], _}, Acc) ->
D = dict:update_counter(Src, 1, Acc),
dict:update_counter(Dst, 1, D)
end, dict:new(), Moves),
CompactionCountdownPerNode = dict:map(fun (_K, _V) ->
MovesBeforeCompaction
end, InitialMoveCounts),
InFlight = dict:map(fun (_K, _V) -> 0 end, InitialMoveCounts),
BackfillNodes =
lists:foldl(
fun ({_V, OldChain, NewChain, _Quirks}, Acc) ->
MoveNodes = backfill_nodes(OldChain, NewChain),
sets:union(sets:from_list(MoveNodes), Acc)
end, sets:new(), Moves),
Backfills = dict:from_list([{N, 0} || N <- sets:to_list(BackfillNodes)]),
State = #state{backfills_limit = BackfillsLimit,
moves_before_compaction = MovesBeforeCompaction,
inflight_moves_limit = MaxInflightMoves,
total_in_flight = 0,
moves_left_count_per_node = MovesPerNode,
moves_left = Moves,
moves_from_undefineds = UndefinedMoves,
compaction_countdown_per_node = CompactionCountdownPerNode,
in_flight_backfills_per_node = Backfills,
in_flight_per_node = InFlight,
in_flight_compactions = sets:new(),
initial_move_counts = InitialMoveCounts,
left_move_counts = InitialMoveCounts},
InfoLogger("The following count of vbuckets do not need to be moved at all: ~p", [TrivialMoves]),
InfoLogger("The following moves are planned:~n~p", [UndefinedMoves ++ Moves]),
%% InfoLogger("State:~n~p", [State]),
State.
get_moves(#state{moves_left = Moves,
moves_from_undefineds = UndefinedMoves}) ->
{Moves, UndefinedMoves}.
%% @doc true iff we're done. NOTE: is_done is only valid if
%% choose_action returned empty actions list
is_done(#state{moves_left = MovesLeft,
moves_from_undefineds = UndefinedMoves,
total_in_flight = TotalInFlight,
in_flight_compactions = InFlightCompactions} = _State) ->
MovesLeft =:= [] andalso UndefinedMoves =:= []
andalso TotalInFlight =:= 0 andalso sets:new() =:= InFlightCompactions.
updatef(Record, Field, Body) ->
V = erlang:element(Field, Record),
NewV = Body(V),
erlang:setelement(Field, Record, NewV).
consider_starting_compaction(State) ->
dict:fold(
fun (Node, Counter, Acc0) ->
CanDo0 = dict:fetch(Node, State#state.in_flight_per_node) =:= 0,
CanDo1 = CanDo0 andalso not sets:is_element(Node, State#state.in_flight_compactions),
CanDo2 = CanDo1 andalso (Counter =:= 0 orelse (Counter < State#state.moves_before_compaction
andalso dict:fetch(Node, State#state.moves_left_count_per_node) =:= 0)),
case CanDo2 of
true ->
[Node | Acc0];
_ ->
Acc0
end
end, [], State#state.compaction_countdown_per_node).
%% builds list of actions to do now (in passed state) and returns it
%% with new state (assuming actions are started)
-spec choose_action(#state{}) -> {[action()], #state{}}.
choose_action(#state{moves_from_undefineds = [_|_] = Moves,
total_in_flight = TotalInFlight} = State) ->
NewState = State#state{moves_from_undefineds = [],
total_in_flight = TotalInFlight + length(Moves)},
{OtherActions, NewState2} = choose_action(NewState),
{OtherActions ++ [{move, M} || M <- Moves], NewState2};
choose_action(State) ->
Nodes = consider_starting_compaction(State),
NewState = updatef(State, #state.in_flight_compactions,
fun (InFlightCompactions) ->
lists:foldl(fun sets:add_element/2, InFlightCompactions, Nodes)
end),
NewState1 = updatef(NewState, #state.compaction_countdown_per_node,
fun (CompactionCountdownPerNode) ->
lists:foldl(
fun (N, D0) ->
dict:store(N, State#state.moves_before_compaction, D0)
end, CompactionCountdownPerNode, Nodes)
end),
{OtherActions, NewState2} = choose_action_not_compaction(NewState1),
Actions = [{compact, N} || N <- Nodes] ++ OtherActions,
{Actions, NewState2}.
sortby(List, KeyFn, LessEqFn) ->
KeyedList = [{KeyFn(E), E} || E <- List],
KeyedSorted = lists:sort(fun ({KA, _}, {KB, _}) ->
LessEqFn(KA, KB)
end, KeyedList),
[E || {_, E} <- KeyedSorted].
move_is_possible([Src | _] = OldChain,
[Dst | _] = NewChain,
BackfillsLimit, NowBackfills, CompactionCountdown,
InFlightMoves, InFlightMovesLimit) ->
dict:fetch(Src, CompactionCountdown) > 0
andalso dict:fetch(Dst, CompactionCountdown) > 0
andalso dict:fetch(Dst, InFlightMoves) < InFlightMovesLimit
andalso dict:fetch(Src, InFlightMoves) < InFlightMovesLimit
andalso lists:all(fun (N) ->
dict:fetch(N, NowBackfills) < BackfillsLimit
end, backfill_nodes(OldChain, NewChain)).
backfill_nodes([OldMaster | _] = OldChain,
[NewMaster | NewReplicas]) ->
%% The old master is always charged a backfill as long as it exists. The
%% reasons are:
%% - it's almost certain that it'll need to stream a lot of stuff
%% - if the master changes, it will also have to clean up views
[OldMaster || OldMaster =/= undefined] ++
%% The new master is charged a backfill as long as the vbucket is
%% moved from a different node, even if the new master already has a
%% copy. That's because views might need to be built, and that's
%% expensive.
[NewMaster || NewMaster =/= OldMaster] ++
%% All replica nodes are charged a backfill as long as they don't
%% already have the vbucket. This is to ensure that that we don't have
%% lots of "free" replica moves into a node, which can significantly
%% affect clients.
[N || N <- NewReplicas,
N =/= undefined,
not lists:member(N, OldChain)].
increment_counter(Node, Node, Dict) ->
dict:update_counter(Node, 1, Dict);
increment_counter(Src, Dst, Dict) ->
dict:update_counter(Dst, 1, dict:update_counter(Src, 1, Dict)).
decrement_counter_if_real_move(Node, Node, Dict) ->
Dict;
decrement_counter_if_real_move(Src, Dst, Dict) ->
dict:update_counter(Dst, -1, dict:update_counter(Src, -1, Dict)).
choose_action_not_compaction(#state{
backfills_limit = BackfillsLimit,
inflight_moves_limit = MaxInflightMoves,
in_flight_backfills_per_node = NowBackfills,
in_flight_per_node = NowInFlight,
in_flight_compactions = NowCompactions,
moves_left_count_per_node = LeftCount,
moves_left = MovesLeft,
compaction_countdown_per_node = CompactionCountdown} = State) ->
PossibleMoves =
lists:flatmap(fun ({_V, [Src|_] = OldChain, [Dst|_] = NewChain, _} = Move) ->
Can1 = move_is_possible(OldChain, NewChain,
BackfillsLimit, NowBackfills,
CompactionCountdown,
NowInFlight, MaxInflightMoves),
Can2 = Can1 andalso not sets:is_element(Src, NowCompactions),
Can3 = Can2 andalso not sets:is_element(Dst, NowCompactions),
case Can3 of
true ->
%% consider computing goodness here
[Move];
false ->
[]
end
end, MovesLeft),
GoodnessFn =
fun ({Vb, [Src | _], [Dst | _], _}) ->
case Src =:= Dst of
true ->
%% we under-prioritize moves that
%% don't actually move active
%% position. Because a) they don't
%% affect indexes and we want indexes
%% to be build as early and as in
%% parallel as possible and b)
%% because we want to encourage
%% earlier improvement of balance
%% w.r.t active vbuckets to equalize
%% GET/SET load earlier
Vb;
_ ->
%% our goal is to keep indexer on all nodes
%% busy as much as possible at all times. Thus
%% we prefer nodes with least current
%% moves. And destination is more important
%% because on source is it's just cleanup and
%% thus much less work
NoCompactionsG = 10000 - 10 * dict:fetch(Dst, NowInFlight) - dict:fetch(Src, NowInFlight),
%% all else equals we don't want to delay
%% index compactions
G2 = NoCompactionsG * 100 - dict:fetch(Dst, CompactionCountdown) - dict:fetch(Src, CompactionCountdown),
G3 = G2 * 10000 + dict:fetch(Dst, LeftCount) + dict:fetch(Src, LeftCount),
G3 * 10000 + Vb
end
end,
LessEqFn = fun (GoodnessA, GoodnessB) -> GoodnessA >= GoodnessB end,
SortedMoves = sortby(PossibleMoves, GoodnessFn, LessEqFn),
%% case PossibleMoves =/= [] of
%% true ->
%% ?log_debug("PossibleMovesKeyed:~n~p", [begin
%% KeyedList = [{GoodnessFn(E), E} || E <- PossibleMoves],
%% KS = lists:sort(fun ({KA, _}, {KB, _}) ->
%% LessEqFn(KA, KB)
%% end, KeyedList),
%% lists:sublist(KS, 20)
%% end]);
%% _ ->
%% ok
%% end,
%% NOTE: we know that first move is always allowed
{SelectedMoves, NewNowBackfills, NewCompactionCountdown, NewNowInFlight, NewLeftCount} =
misc:letrec(
[SortedMoves, NowBackfills, CompactionCountdown, NowInFlight, LeftCount, []],
fun (Rec, [{_V, [Src|_] = OldChain, [Dst|_] = NewChain, _} = Move | RestMoves],
NowBackfills0, CompactionCountdown0, NowInFlight0, LeftCount0, Acc) ->
case move_is_possible(OldChain, NewChain, BackfillsLimit, NowBackfills0,
CompactionCountdown0, NowInFlight0, MaxInflightMoves) of
true ->
NewNowBackfills =
lists:foldl(
fun (N, Acc0) ->
dict:update_counter(N, 1, Acc0)
end, NowBackfills0, backfill_nodes(OldChain, NewChain)),
Rec(Rec, RestMoves,
NewNowBackfills,
decrement_counter_if_real_move(Src, Dst, CompactionCountdown0),
increment_counter(Src, Dst, NowInFlight0),
decrement_counter_if_real_move(Src, Dst, LeftCount0),
[Move | Acc]);
_ ->
Rec(Rec, RestMoves, NowBackfills0, CompactionCountdown0, NowInFlight0,
LeftCount0, Acc)
end;
(_Rec, [], NowBackfills0, MovesBeforeCompaction0, NowInFlight0, LeftCount0, Acc) ->
{Acc, NowBackfills0, MovesBeforeCompaction0, NowInFlight0, LeftCount0}
end),
NewMovesLeft = MovesLeft -- SelectedMoves,
NewState = State#state{in_flight_backfills_per_node = NewNowBackfills,
in_flight_per_node = NewNowInFlight,
total_in_flight = State#state.total_in_flight + length(SelectedMoves),
moves_left_count_per_node = NewLeftCount,
moves_left = NewMovesLeft,
compaction_countdown_per_node = NewCompactionCountdown},
case SelectedMoves of
[] ->
{newstate, true} = {newstate, State =:= NewState},
{[], State};
_ ->
{MoreMoves, NewState2} = choose_action_not_compaction(NewState),
{MoreMoves ++ [{move, M} || M <- SelectedMoves], NewState2}
end.
extract_progress(#state{initial_move_counts = InitialCounts,
left_move_counts = LeftCounts} = _State) ->
dict:map(fun (Node, ThisInitialCount) ->
ThisLeftCount = dict:fetch(Node, LeftCounts),
1.0 - ThisLeftCount / ThisInitialCount
end, InitialCounts).
%% @doc marks backfill phase of previously started move as done. Users
%% of this code will call it when backfill is done to update state so
%% that next moves can be started.
note_backfill_done(State, {move, {_V, [undefined|_], [_Dst|_], _}}) ->
State;
note_backfill_done(State, {move, {_V, OldChain, NewChain, _}}) ->
updatef(State, #state.in_flight_backfills_per_node,
fun (NowBackfills) ->
lists:foldl(
fun (N, Acc) ->
dict:update_counter(N, -1, Acc)
end, NowBackfills, backfill_nodes(OldChain, NewChain))
end).
%% @doc marks entire move that was previously started done. NOTE: this
%% assumes that backfill phase of this move was previously marked as
%% done. Users of this code will call it when move is done to update
%% state so that next moves and/or compactions can be started.
note_move_completed(State, {move, {_V, [undefined|_], [_Dst|_], _}}) ->
updatef(State, #state.total_in_flight, fun (V) -> V - 1 end);
note_move_completed(State, {move, {_V, [Src|_], [Dst|_], _}}) ->
State1 =
updatef(State, #state.in_flight_per_node,
fun (NowInFlight) ->
NowInFlight1 = dict:update_counter(Src, -1, NowInFlight),
case Src =:= Dst of
true ->
NowInFlight1;
_ ->
dict:update_counter(Dst, -1, NowInFlight1)
end
end),
State2 =
updatef(State1, #state.left_move_counts,
fun (LeftMoveCounts) ->
D = dict:update_counter(Src, -1, LeftMoveCounts),
dict:update_counter(Dst, -1, D)
end),
updatef(State2, #state.total_in_flight, fun (V) -> V - 1 end).
%% @doc marks previously started compaction as done. Users of this
%% code will call it when compaction is done to update state so that
%% next moves and/or compactions can be started.
note_compaction_done(State, {compact, Node}) ->
updatef(State, #state.in_flight_compactions,
fun (InFlightCompactions) ->
sets:del_element(Node, InFlightCompactions)
end). | src/vbucket_move_scheduler.erl | 0.689201 | 0.484685 | vbucket_move_scheduler.erl | starcoder |
-module(teal_behaviours).
-export([
has_callback/3, assert_has_callback/3, assert_has_callback/4,
is_behaviour/1, assert_is_behaviour/1, assert_is_behaviour/2,
implements_behaviour/2, assert_implements_behaviour/2,
assert_implements_behaviour/3]).
%%%===================================================================
%%% API
%%%===================================================================
-spec has_callback(Module :: atom(), Name :: atom(), Arity :: integer()) ->
boolean().
has_callback(Module, Name, Arity) ->
Callbacks = get_callbacks(Module),
lists:any(fun({callback, Details}) ->
[CallbackDetails] = Details,
{{CallbackName, CallbackArity}, _Args} = CallbackDetails,
({CallbackName, CallbackArity} == {Name, Arity})
end, Callbacks).
-spec assert_has_callback(Module :: atom(), Name :: atom(),
Arity :: integer()) -> boolean().
assert_has_callback(Module, Name, Arity) ->
teal:assert(true, has_callback(Module, Name, Arity), no_callback).
-spec assert_has_callback(Module :: atom(), Name :: atom(),
Arity :: integer(), Msg :: atom()) -> boolean().
assert_has_callback(Module, Name, Arity, Msg) ->
teal:assert(true, has_callback(Module, Name, Arity), Msg).
-spec is_behaviour(Module :: atom()) -> boolean().
is_behaviour(Module) ->
case get_callbacks(Module) of
[] ->
false;
_ ->
true
end.
-spec assert_is_behaviour(Module :: atom()) -> boolean().
assert_is_behaviour(Module) ->
teal:assert(true, is_behaviour(Module), not_behaviour).
-spec assert_is_behaviour(Module :: atom(), Msg :: atom()) -> boolean().
assert_is_behaviour(Module, Msg) ->
teal:assert(true, is_behaviour(Module), Msg).
-spec implements_behaviour(Module :: atom(), Behaviour :: atom()) -> boolean().
implements_behaviour(Module, Behaviour) ->
Callbacks = get_callbacks(Behaviour),
Exports = Module:module_info(exports),
CallbackNameArities = callbacks_to_name_arity(Callbacks),
lists:all(fun(Callback) ->
lists:member(Callback, Exports)
end, CallbackNameArities).
-spec assert_implements_behaviour(Module :: atom(), Behaviour :: atom()) ->
boolean().
assert_implements_behaviour(Module, Behaviour) ->
teal:assert(true, implements_behaviour(Module, Behaviour),
behaviour_not_implemented).
-spec assert_implements_behaviour(Module :: atom(), Behaviour :: atom(),
Msg :: atom()) -> boolean().
assert_implements_behaviour(Module, Behaviour, Msg) ->
teal:assert(true, implements_behaviour(Module, Behaviour), Msg).
%%%===================================================================
%%% Private functions
%%%===================================================================
get_callbacks(Module) ->
Attributes = Module:module_info(attributes),
lists:filter(fun({AttrName, _Opts}) ->
case AttrName of
callback ->
true;
_ ->
false
end
end, Attributes).
callbacks_to_name_arity(Callbacks) ->
lists:map(fun({_, [{{Name, Arity}, _}]}) ->
{Name, Arity}
end, Callbacks). | src/teal_behaviours.erl | 0.615666 | 0.499329 | teal_behaviours.erl | starcoder |
-module(day3).
%% API exports
-export([part1/1, part2/1]).
part1(File) ->
Binaries = read_binaries(File),
{Gamma, Epsilon} = gamma_epsilon(Binaries),
GammaDec = bin_to_dec(Gamma),
EpsilonDec = bin_to_dec(Epsilon),
GammaDec * EpsilonDec.
gamma_epsilon(Binaries) ->
Columns = transpose(Binaries),
Gamma = [most_common_bit(Column) || Column <- Columns],
Epsilon = inverse(Gamma),
{Gamma, Epsilon}.
%% stolen from https://stackoverflow.com/a/7855826
transpose([[]|_]) -> [];
transpose(M) ->
[lists:map(fun hd/1, M) | transpose(lists:map(fun tl/1, M))].
bin_to_dec(Bin) ->
Indices = lists:seq(0, length(Bin) - 1),
IndexBits = lists:zip(Indices, lists:reverse(Bin)),
lists:sum([Bit * round(math:pow(2, Index)) || {Index, Bit} <- IndexBits]).
inverse(Bin) when is_list(Bin) -> [inverse(Element) || Element <- Bin];
inverse(0) -> 1;
inverse(1) -> 0.
part2(File) ->
Binaries = read_binaries(File),
Oxy = oxy(Binaries),
Scrub = scrub(Binaries),
Oxy * Scrub.
oxy(Binaries) -> filter(oxy, Binaries).
scrub(Binaries) -> filter(scrub, Binaries).
filter(oxy, Binaries) -> filter(Binaries, fun most_common_bit/1);
filter(scrub, Binaries) -> filter(Binaries, fun least_common_bit/1);
filter(Binaries, ModeFun) -> filter(1, Binaries, ModeFun).
filter(_Pos, [TheOne], _ModeFun) -> bin_to_dec(TheOne);
filter(Pos, Binaries, ModeFun) ->
ValuesAtPos = [lists:nth(Pos, Binary) || Binary <- Binaries],
Mode = ModeFun(ValuesAtPos),
Filtered = [Binary || Binary <- Binaries, lists:nth(Pos, Binary) =:= Mode],
filter(Pos + 1, Filtered, ModeFun).
most_common_bit(Binary) ->
Sum = lists:sum(Binary),
Length = length(Binary),
most_common_bit(Sum, Length).
most_common_bit(Sum, Length) when Sum >= Length/2 -> 1;
most_common_bit(_, _) -> 0.
least_common_bit(Binary) -> inverse(most_common_bit(Binary)).
%%====================================================================
%% Santa's little parsers
%%====================================================================
read_binaries(Filename) ->
{ok, FileContent} = file:read_file(Filename),
Lines = string:lexemes(FileContent, "\n"),
[to_integers_list(Line) || Line <- Lines].
to_integers_list(BinInteger) -> [Char - 48 || <<Char>> <= BinInteger]. | src/day3.erl | 0.522202 | 0.639455 | day3.erl | starcoder |
%% @doc
%% A collector for a set of metrics.
%%
%% Normal users should use {@link prometheus_gauge},
%% {@link prometheus_counter}, {@link prometheus_summary}
%% and {@link prometheus_histogram}.
%%
%% Implementing `:prometheus_collector' behaviour is for advanced uses
%% such as proxying metrics from another monitoring system.
%% It is it the responsibility of the implementer to ensure produced metrics
%% are valid.
%%
%% You will be working with Prometheus
%% data model directly (see {@link prometheus_model_helpers}).
%%
%% Callbacks:
%% - `collect_mf(Registry, Callback)' - called by exporters and formats.
%% Should call `Callback' for each `MetricFamily' of this collector;
%% - `collect_metrics(Name, Data)' - called by `MetricFamily' constructor.
%% Should return Metric list for each MetricFamily identified by `Name'.
%% `Data' is a term associated with MetricFamily by collect_mf.
%% - `deregister_cleanup(Registry)' - called when collector unregistered by
%% `Registry'. If collector is stateful you can put cleanup code here.
%%
%% Example (simplified `prometheus_vm_memory_collector'):
%% <pre lang="erlang">
%% -module(prometheus_vm_memory_collector).
%%
%% -export([deregister_cleanup/1,
%% collect_mf/2,
%% collect_metrics/2]).
%%
%% -behaviour(prometheus_collector).
%%
%% %%====================================================================
%% %% Collector API
%% %%====================================================================
%%
%% deregister_cleanup(_) -> ok.
%%
%% collect_mf(_Registry, Callback) ->
%% Memory = erlang:memory(),
%% Callback(create_gauge(erlang_vm_bytes_total,
%% "The total amount of memory currently allocated. "
%% "This is the same as the sum of the memory size "
%% "for processes and system.",
%% Memory)),
%% ok.
%%
%% collect_metrics(erlang_vm_bytes_total, Memory) ->
%% prometheus_model_helpers:gauge_metrics(
%% [
%% {[{kind, system}], proplists:get_value(system, Memory)},
%% {[{kind, processes}], proplists:get_value(processes, Memory)}
%% ]).
%%
%% %%====================================================================
%% %% Private Parts
%% %%====================================================================
%%
%% create_gauge(Name, Help, Data) ->
%% prometheus_model_helpers:create_mf(Name, Help, gauge, ?MODULE, Data).
%% </pre>
%% @end
-module(prometheus_collector).
-export([enabled_collectors/0,
collect_mf/3]).
-ifdef(TEST).
-export([collect_mf_to_list/1]).
-endif.
-export_type([collector/0,
data/0,
collect_mf_callback/0]).
-compile({no_auto_import, [register/2]}).
-define(DEFAULT_COLLECTORS,
[prometheus_boolean,
prometheus_counter,
prometheus_gauge,
prometheus_histogram,
prometheus_mnesia_collector,
prometheus_quantile_summary,
prometheus_summary,
prometheus_vm_dist_collector,
prometheus_vm_memory_collector,
prometheus_vm_msacc_collector,
prometheus_vm_statistics_collector,
prometheus_vm_system_info_collector]).
-include("prometheus.hrl").
-include("prometheus_model.hrl").
%%====================================================================
%% Types
%%====================================================================
-type collector() :: atom().
-type data() :: any().
-type collect_mf_callback() ::
fun((prometheus_model:'MetricFamily'()) -> any()).
%%====================================================================
%% Callbacks
%%====================================================================
-callback collect_mf(Registry, Callback) -> ok when
Registry :: prometheus_registry:registry(),
Callback :: collect_mf_callback().
%% %% TODO: either add mandatory Type argument here or track Type
%% %% automatically and don't ask collector implementers to care
%% -callback collect_metrics(Name, Data) -> Metrics when
%% Name :: prometheus_metric:name(),
%% Data :: data(),
%% Metrics :: prometheus_model:'Metric'() | [prometheus_model:'Metric'()].
-callback deregister_cleanup(Registry) -> ok when
Registry :: prometheus_registry:registry().
%%====================================================================
%% Public API
%%====================================================================
%% @private
-spec enabled_collectors() -> [collector()].
enabled_collectors() ->
lists:usort(
case application:get_env(prometheus, collectors) of
undefined -> all_known_collectors();
{ok, Collectors} -> catch_default_collectors(Collectors)
end).
%% @doc Calls `Callback' for each MetricFamily of this collector.
-spec collect_mf(Registry, Collector, Callback) -> ok when
Registry :: prometheus_registry:registry(),
Collector :: collector(),
Callback :: collect_mf_callback().
collect_mf(Registry, Collector, Callback0) ->
Callback = case application:get_env(prometheus, global_labels) of
undefined ->
Callback0;
{ok, Labels0} ->
Labels = prometheus_model_helpers:label_pairs(Labels0),
fun (MF=#'MetricFamily'{metric=Metrics0}) ->
Metrics = [M#'Metric'{label=Labels ++ ML}
|| M=#'Metric'{label=ML} <- Metrics0],
Callback0(MF#'MetricFamily'{metric=Metrics})
end
end,
ok = Collector:collect_mf(Registry, Callback).
%%====================================================================
%% Test only
%%====================================================================
-ifdef(TEST).
%% @private
collect_mf_to_list(Collector) ->
collect_mf_to_list(default, Collector).
collect_mf_to_list(Registry, Collector) ->
try
Callback = fun (MF) ->
put(Collector, [MF|get_list(Collector)])
end,
prometheus_collector:collect_mf(Registry, Collector, Callback),
get_list(Collector)
after
erase(Collector)
end.
get_list(Key) ->
case get(Key) of
undefined ->
[];
Value ->
Value
end.
-endif.
%%====================================================================
%% Private Parts
%%====================================================================
all_known_collectors() ->
lists:umerge(
prometheus_misc:behaviour_modules(prometheus_collector),
?DEFAULT_COLLECTORS).
catch_default_collectors(Collectors) ->
maybe_replace_default(Collectors, []).
maybe_replace_default([default|Rest], Acc) ->
maybe_replace_default(Rest, ?DEFAULT_COLLECTORS ++ Acc);
maybe_replace_default([], Acc) ->
Acc;
maybe_replace_default([H|R], Acc) ->
maybe_replace_default(R, [H|Acc]). | src/prometheus_collector.erl | 0.596668 | 0.443721 | prometheus_collector.erl | starcoder |
% Licensed under the Apache License, Version 2.0 (the "License"); you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
-module(couch_spatial_index).
-export([get/2]).
-export([init/2, open/2, close/1, reset/1, delete/1]).
-export([start_update/3, purge/4, process_doc/3, finish_update/1, commit/1]).
-export([compact/3, swap_compacted/2]).
-include("couch_spatial.hrl").
get(Property, State) ->
case Property of
db_name ->
State#spatial_state.db_name;
idx_name ->
State#spatial_state.idx_name;
signature ->
State#spatial_state.sig;
update_seq ->
State#spatial_state.update_seq;
purge_seq ->
State#spatial_state.purge_seq;
update_options ->
%Opts = State#spatual_state.design_options,
% NOTE vmx 2012-10-19: Not supported at the moment
%IncDesign = couch_util:get_value(<<"include_design">>, Opts, false),
%LocalSeq = couch_util:get_value(<<"local_seq">>, Opts, false),
%if IncDesign -> [include_design]; true -> [] end
% ++ if LocalSeq -> [local_seq]; true -> [] end;
[];
info ->
#spatial_state{
fd = Fd,
sig = Sig,
language = Lang,
update_seq = UpdateSeq,
purge_seq = PurgeSeq
} = State,
{ok, Size} = couch_file:bytes(Fd),
{ok, [
{signature, list_to_binary(couch_index_util:hexsig(Sig))},
{language, Lang},
{disk_size, Size},
{update_seq, UpdateSeq},
{purge_seq, PurgeSeq}
]};
Other ->
throw({unknown_index_property, Other})
end.
init(Db, DDoc) ->
couch_spatial_util:ddoc_to_spatial_state(couch_db:name(Db), DDoc).
open(Db, State) ->
#spatial_state{
db_name=DbName,
sig=Sig
} = State,
IndexFName = couch_spatial_util:index_file(DbName, Sig),
case couch_spatial_util:open_file(IndexFName) of
{ok, Fd} ->
NewState = case (catch couch_file:read_header(Fd)) of
{ok, {Sig, Header}} ->
% Matching view signatures.
couch_spatial_util:init_state(Db, Fd, State, Header);
_ ->
couch_spatial_util:reset_index(Db, Fd, State)
end,
{ok, RefCounter} = couch_ref_counter:start([Fd]),
{ok, NewState#spatial_state{ref_counter=RefCounter}};
Error ->
(catch couch_spatial_util:delete_files(DbName, Sig)),
Error
end.
close(State) ->
couch_file:close(State#spatial_state.fd).
delete(State) ->
#spatial_state{
fd=Fd,
db_name=DbName,
sig=Sig
} = State,
couch_file:close(Fd),
catch couch_spatial_util:delete_files(DbName, Sig).
reset(State) ->
#spatial_state{
fd=Fd,
db_name=DbName
} = State,
couch_util:with_db(DbName, fun(Db) ->
NewState = couch_spatial_util:reset_index(Db, Fd, State),
{ok, NewState}
end).
start_update(PartialDest, State, NumChanges) ->
couch_spatial_updater:start_update(PartialDest, State, NumChanges).
purge(_Db, _PurgeSeq, _PurgedIdRevs, _State) ->
throw("purge on spatial views isn't supported").
process_doc(Doc, Seq, State) ->
couch_spatial_updater:process_doc(Doc, Seq, State).
finish_update(State) ->
couch_spatial_updater:finish_update(State).
commit(State) ->
#spatial_state{
sig=Sig,
fd=Fd
} = State,
Header = {Sig, couch_spatial_util:make_header(State)},
couch_file:write_header(Fd, Header).
compact(Db, State, Opts) ->
couch_spatial_compactor:compact(Db, State, Opts).
swap_compacted(OldState, NewState) ->
couch_spatial_compactor:swap_compacted(OldState, NewState). | gc-couchdb/src/couch_spatial_index.erl | 0.696991 | 0.431405 | couch_spatial_index.erl | starcoder |
%%--------------------------------------------------------------------
%% Copyright (c) 2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
%% Random error injection suite.
%%
%% Tests that use error injection should go here, to avoid polluting
%% the logs and scaring people
-module(mria_fault_tolerance_suite).
-compile(export_all).
-compile(nowarn_export_all).
-include_lib("eunit/include/eunit.hrl").
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
-compile(nowarn_underscore_match).
all() -> mria_ct:all(?MODULE).
init_per_suite(Config) ->
Config.
end_per_suite(_Config) ->
ok.
init_per_testcase(_TestCase, Config) ->
Config.
end_per_testcase(TestCase, Config) ->
mria_ct:cleanup(TestCase),
snabbkaffe:stop(),
Config.
t_agent_restart(_) ->
Cluster = mria_ct:cluster([core, core, replicant], mria_mnesia_test_util:common_env()),
CounterKey = counter,
?check_trace(
#{timetrap => 60000},
try
Nodes = [N1, _N2, N3] = mria_ct:start_cluster(mria, Cluster),
mria_mnesia_test_util:wait_tables(Nodes),
mria_mnesia_test_util:stabilize(1000),
%% Everything in mria agent will crash
CrashRef = ?inject_crash( #{?snk_meta := #{domain := [mria, rlog, agent|_]}}
, snabbkaffe_nemesis:random_crash(0.4)
),
ok = rpc:call(N1, mria_transaction_gen, counter, [CounterKey, 100, 100]),
complete_test(CrashRef, Cluster, Nodes),
N3
after
mria_ct:teardown_cluster(Cluster)
end,
fun(N3, Trace) ->
?assert(mria_rlog_props:replicant_bootstrap_stages(N3, Trace)),
mria_rlog_props:counter_import_check(CounterKey, N3, Trace),
?assert(length(?of_kind(snabbkaffe_crash, Trace)) > 1)
end).
t_rand_error_injection(_) ->
Cluster = mria_ct:cluster([core, core, replicant], mria_mnesia_test_util:common_env()),
CounterKey = counter,
?check_trace(
#{timetrap => 60000},
try
Nodes = [N1, _N2, N3] = mria_ct:start_cluster(mria, Cluster),
mria_mnesia_test_util:wait_tables(Nodes),
mria_mnesia_test_util:stabilize(1000),
%% Everything in mria RLOG will crash
CrashRef = ?inject_crash( #{?snk_meta := #{domain := [mria, rlog|_]}}
, snabbkaffe_nemesis:random_crash(0.01)
),
ok = rpc:call(N1, mria_transaction_gen, counter, [CounterKey, 300, 100]),
complete_test(CrashRef, Cluster, Nodes),
N3
after
mria_ct:teardown_cluster(Cluster)
end,
fun(N3, Trace) ->
?assert(mria_rlog_props:replicant_bootstrap_stages(N3, Trace)),
?assert(mria_rlog_props:counter_import_check(CounterKey, N3, Trace) > 0)
end).
%% This testcase verifies verifies various modes of mria:ro_transaction
t_sum_verify(_) ->
Cluster = mria_ct:cluster([core, replicant], mria_mnesia_test_util:common_env()),
NTrans = 100,
?check_trace(
#{timetrap => 60000},
try
Nodes = mria_ct:start_cluster(mria, Cluster),
mria_mnesia_test_util:wait_tables(Nodes),
%% Everything in mria RLOG will crash
?inject_crash( #{?snk_meta := #{domain := [mria, rlog|_]}}
, snabbkaffe_nemesis:random_crash(0.1)
),
[rpc:async_call(N, mria_transaction_gen, verify_trans_sum, [NTrans, 100])
|| N <- lists:reverse(Nodes)],
[?block_until(#{?snk_kind := verify_trans_sum, node := N})
|| N <- Nodes]
after
mria_ct:teardown_cluster(Cluster)
end,
fun(Trace) ->
?assertMatch( [ok, ok]
, ?projection(result, ?of_kind(verify_trans_sum, Trace))
)
end).
%% Remove the injected errors and check table consistency
complete_test(CrashRef, Cluster, Nodes) ->
mria_mnesia_test_util:stabilize(5100),
snabbkaffe_nemesis:fix_crash(CrashRef),
mria_mnesia_test_util:wait_full_replication(Cluster),
mria_mnesia_test_util:compare_table_contents(test_tab, Nodes). | test/mria_fault_tolerance_suite.erl | 0.518546 | 0.592224 | mria_fault_tolerance_suite.erl | starcoder |
-module(model_langton3d).
-behaviour(model).
-include("parallant.hrl").
-export([initial_population/3,
move/3,
get_agent_char/2]).
-type cell_state() :: dead | alive.
-type cell() :: {cell_state()}.
-type direction() :: pos_x | neg_x | pos_y | neg_y | pos_z | neg_z.
-type langton_rule() :: [direction()].
-type direction3d() :: {langton_rule(), direction(), langton_rule()}.
-type langton_agent_state() :: {direction3d(), cell()}.
-type agent_state() :: parallant:agent_state(langton_agent_state()).
-type move() :: {0, 1, 0} | {1, 0, 0} | {0, -1, 0} |
{-1, 0, 0} | {0, 0, 1} | {0, 0, -1}.
%% model specific functions
-spec initial_population(PopulationSize :: pos_integer(),
World :: world(),
Config :: config()) ->
[{position(), agent_state()}].
initial_population(PopulationSize, World, Config) ->
#world{w = Width, h = Height, d = Depth} = World,
AllPositions = [{I, J, K} || I <- lists:seq(1, Width),
J <- lists:seq(1, Height),
K <- lists:seq(1, Depth)],
ShuffledPositions = algorithm:shuffle(AllPositions),
AgentPositions = lists:sublist(ShuffledPositions, 1, PopulationSize),
CellPositions = AllPositions,
All = [{Pos, [agent]} || Pos <- AgentPositions]
++ [{Pos, [cell]} || Pos <- CellPositions],
[populate_cell(Pos, Members, Config)
|| {Pos, Members} <- agents_lists:group_by(All)].
populate_cell(Pos, Members, _Config) ->
AgentState = case lists:member(agent, Members) of
true ->
random_agent_state();
_ ->
empty
end,
{Pos, {AgentState, initial_cell_state()}}.
-spec ant_rule() -> langton_rule().
ant_rule() ->
[pos_x, pos_y, pos_z, neg_x, neg_y, neg_z].
-spec initial_cell_state() -> cell().
initial_cell_state() ->
{dead}.
-spec random_agent_state() -> direction3d().
random_agent_state() ->
[H | T] = ant_rule(),
{lists:reverse(ant_rule()), H, T}.
%% {random_direction(), random_plane()}.
-spec move(position(), environment(), config()) -> environment().
move(Position, E, Config) ->
%% based on agent state and its neighbourhood
%% compute the new agent state and neighbourhood
%% langton's ant
A = #agent{pos = Position, state = agents:get_agent(Position, E, Config)},
{Old, New} = get_move(A, E, Config),
#agent{pos = OPos, state = {ODir, OCell}} = Old,
#agent{pos = NPos, state = {NDir, _}} = New,
case {ODir, agents:get_agent(New#agent.pos, E, Config)} of
{empty, _} ->
E;
{_, {empty, CellState}} ->
E1 = agents:update_agent(NPos, {NDir, CellState}, E, Config),
OldState = {empty, update_cell(OCell)},
agents:update_agent(OPos, OldState, E1, Config);
{_, empty} ->
io:format("no agent on pos: ~p~n", [Position]),
E;
{_, _} ->
E
end.
-spec get_move(agent(), environment(), config()) -> {agent(), agent()}.
get_move(A, E, Config) ->
New = move_agent(A, E, Config),
{A#agent{state = New#agent.state}, New}.
-spec update_cell(cell()) -> cell().
update_cell({dead}) -> {alive};
update_cell({alive}) -> {dead}.
-spec move_agent(agent(), environment(), config()) -> agent().
move_agent(Agent = #agent{state = {empty, _Cell}}, #env{}, _C) ->
Agent;
move_agent(#agent{pos = Pos, state = {Dir, {Cell}}}, #env{world = World}, _C) ->
New = turn(Dir, Cell),
{_, NewDir, _} = New,
NewPos = forward(Pos, NewDir, World),
#agent{pos = NewPos, state = {New, {Cell}}}.
-spec forward(position(), direction(), world()) -> position().
forward({X, Y, Z}, Dir, #world{w = W, h = H, d = D}) ->
{DX, DY, DZ} = direction_to_heading(Dir),
NewX = torus_bounds(X + DX, W),
NewY = torus_bounds(Y + DY, H),
NewZ = torus_bounds(Z + DZ, D),
{NewX, NewY, NewZ}.
torus_bounds(Val, Max) when Val < 1 -> Max + Val;
torus_bounds(Val, Max) when Val > Max -> Val - Max;
torus_bounds(Val, _Max) -> Val.
-spec turn(direction3d(), cell_state()) -> direction3d().
turn(Dir, dead) -> turn_left(Dir);
turn(Dir, alive) -> turn_right(Dir).
-spec turn_right(direction3d()) -> direction3d().
turn_right({_B1, _Dir, []}) ->
[H | T] = ant_rule(),
{lists:reverse(ant_rule()), H, T};
turn_right({B1, Dir, [H | T]}) -> {[Dir | B1], H, T}.
-spec turn_left(direction3d()) -> direction3d().
turn_left({[], _Dir, _B2}) ->
[H | T] = lists:reverse(ant_rule()),
{T, H, ant_rule()};
turn_left({[H | T], Dir, B2}) -> {T, H, [Dir | B2]}.
-spec direction_to_heading(direction()) -> move().
direction_to_heading(pos_x) -> {1, 0, 0};
direction_to_heading(neg_x) -> {-1, 0, 0};
direction_to_heading(pos_y) -> {0, 1, 0};
direction_to_heading(neg_y) -> {0, -1, 0};
direction_to_heading(pos_z) -> {0, 0, 1};
direction_to_heading(neg_z) -> {0, 0, -1}.
%% displaying agents
-spec get_agent_char(agent_state(), config()) -> char().
get_agent_char(empty, _Config) ->
$$;
get_agent_char({empty, {CellState}}, _Config) ->
cell_char(CellState);
get_agent_char({{_Buffer1, Dir, _Buffer2}, _}, _Config) ->
agent_char(Dir).
-spec agent_char(direction()) -> char().
agent_char(neg_x) -> $<;
agent_char(pos_x) -> $>;
agent_char(pos_y) -> $^;
agent_char(neg_y) -> $v;
agent_char(pos_z) -> $x;
agent_char(neg_z) -> $*.
-spec cell_char(cell_state()) -> char().
cell_char(dead) -> $.;
cell_char(alive) -> $o. | src/model_langton3d.erl | 0.525125 | 0.437283 | model_langton3d.erl | starcoder |
%% @doc A module to read variable-length codes from a byte stream.
-module(code_reader).
-export([read_code/2]).
%% @doc Reads an N-bit code from the given list of bytes, where the requested
%% code may span multiple bytes.
%%
%% A single code may be smaller than one byte. A code may also span more than
%% one byte, by taking a few bits from one byte and the remaining bits from the
%% subsequent bytes.
%%
%% For example, given the bytes:
%%
%% 0110 0100 0010 1010
%%
%% When a 5-bit code is read the code would be the least significant
%% bits of the first byte, i.e. `00100` = `4`. This leaves the pattern:
%%
%% 011 0010 1010
%%
%% The next 5-bit code is `10 011`, where `011` comes from the first byte and
%% `10` comes from the second.
%%
%% @param Bytes the byte stream from which to read. Each entry in this list
%% is a bitstream representing a single byte in the stream, with the exception
%% of the first entry. The first entry is _at most_ a byte, but may be fewer
%% than 8 bits, if some of the bits in that byte were read already via a
%% previous call to `read_code/2`.
%%
%% @param N the number of bits to read from the byte stream. It is an error to
%% request 0 or fewer bits, or to request more bits than are contained in the
%% byte stream.
%%
%% @returns `error` when an invalid number of bits are requested
%% @returns a tuple consisting of:
%% - The code that was read. Returned as an integer.
%% - The remaining byte stream after the requested code has been consumed.
%% This byte stream is suitable for passing back into `read_code/2`.
read_code( _, N) when N =< 0 -> error;
read_code(Bytes, N) -> read_code(Bytes, N, 0, 0).
%% @doc Reads an N-bit code from the given list of bytes, where the requested
%% code may span multiple bytes. A helper function for the main `read_code/2`
%% function, with the following additional arguments:
%%
%% @param SoFar the part of the code that has been read so far. This portion
%% represents the least significant part of the code, coming from earlier bytes
%% in the byte stream.
%%
%% @param BitsReadSoFar the number of bits that have already been read. This
%% number cannot be inferred from the value that has been read so far, because
%% the value read so far may have leading zeros that are not apparent in the
%% numerical representation of the partial code.
%%
%% An invariant of this function is that `N > 0`.
%%
%% The return value of this function can be directly returned by `read_code/2`.
%% Case: running out of bytes to read is an error. Because of the invariant
%% that `N > 0`, we can be sure something data is being requested from the
%% empty byte stream, hence the error.
read_code([], _, _, _) -> error;
%% Case: the requested number of bits fits into the first entry in the byte
%% stream, with more bits to spare.
read_code([First|Rest], N, SoFar, BitsReadSoFar) when bit_size(First) > N ->
% In that case, read the `N` least significant bits, leaving the remaining
% bits. These remaining bits form the new first byte of the byte stream.
RestOfFirstLength = bit_size(First) - N,
<<RestOfFirst:RestOfFirstLength, Code:N>> = First,
{
composed_code(Code, SoFar, BitsReadSoFar),
[<<RestOfFirst:RestOfFirstLength>>|Rest]
};
%% Case: the requested number of bits is exactly the number of bits in the
%% first entry in the byte stream, with no bits to spare.
read_code([First|Rest], N, SoFar, BitsReadSoFar) when bit_size(First) == N ->
% In that case, just read the entire first entry as a number. The new byte
% stream is the remaining entries in the input byte stream.
L = bit_size(First),
<<Code:L/integer>> = First, % read as number
% In particular, don't recurse further. This is important to enforce the
% invariant that `N > 0` in all calls.
{composed_code(Code, SoFar, BitsReadSoFar), Rest};
%% Case: even after reading the first entry in the byte stream, more bits have
%% to be read.
read_code([First|Rest], N, SoFar, BitsReadSoFar) ->
% Start by reading the first entry in the byte stream as a number.
L = bit_size(First),
<<Code:L/integer>> = First, % read as number
% Compose that number with whatevre part of the code has been read so far.
NewSoFar = composed_code(Code, SoFar, BitsReadSoFar),
% Finally, read the remaining of the bits from the remaining entries in the
% byte stream, accounting for the fact that a certain number of additional
% bits has been read.
read_code(Rest, N - L, NewSoFar, BitsReadSoFar + L).
%% @doc Compose a partially read code with an additional part of the code that
%% was just read.
%%
%% @param NewPart the part of the code that was most recently read. This
%% portion represents the most significant part of the code (up to this point,
%% as parts of the code that are yet to be read will represent more significant
%% parts).
%%
%% @param SoFar the part of the code that was previously read. This portion
%% represents the least significant part of the code, coming from earlier bytes
%% in the byte stream.
%%
%% @param BitsReadSoFar the number of bits that have already been read. This
%% number cannot be inferred from the value that has been read so far, because
%% the value read so far may have leading zeros that are not apparent in the
%% numerical representation of the partial code.
%%
%% @returns the new partial code, consisting of both parts of the code given
%% to this function.
composed_code(NewPart, SoFar, BitsReadSoFar) ->
(NewPart bsl BitsReadSoFar) bor SoFar. | src/code_reader.erl | 0.786541 | 0.820073 | code_reader.erl | starcoder |
%% @copyright <NAME>
%% @author <NAME> <<EMAIL>>
%% @version {@vsn}, {@date} {@time}
%% @doc ETS Counter Table Garbage Collector
%% @todo Optimize the implementation when markthreshold=0 (just
%% delete_object({Key, 0}) from main table.
%% @end
-module(ectr_gc).
-export([new/2
,init_table/1
,delete_table/1
,mark/2
,unmark/2
,sweep/1
,sweep/2
]).
-export([ets_tab/1]).
-include_lib("stdlib/include/ms_transform.hrl").
-record(gc, {name :: atom(),
mark_threshold :: pos_integer(),
tab :: ets:tab()}).
-opaque gc() :: #gc{}.
-export_type([gc/0]).
-spec new(Name::atom(), MarkThreshold::pos_integer()) -> gc().
new(Name, MarkThreshold)
when is_integer(MarkThreshold), MarkThreshold > 0 ->
#gc{name = Name,
mark_threshold = MarkThreshold}.
ets_tab(#gc{tab = Tab}) -> Tab.
-spec init_table(gc()) -> gc().
%% @private
%% @doc
%% Initializes the GC table for a counter table.
%% @end
init_table(GC = #gc{name = Name}) when is_atom(Name) ->
GC#gc{tab = ets:new(Name, [set, public])}.
delete_table(#gc{tab = Tab}) ->
ets:delete(Tab).
-spec mark(Key::term(), #gc{}) -> any().
%% @doc
%% Marks a key for future deletion. (Typically used when a counter has
%% a 0 value during a reporting pass).
%% @end
mark(Key, #gc{tab = Tab}) ->
ectr:incr(Tab, Key, 1).
-spec unmark(Key::term(), #gc{}) -> any().
%% @doc
%% Removes any marks on a key. (Typically used when a counter has a
%% non-0 value during a reporting pass)
%% @end
unmark(Key, #gc{tab = Tab}) ->
ets:delete(Tab, Key).
-spec sweep(#gc{}) -> [ Key::term() ].
%% @doc
%% Returns all keys that have at least MarkThreshold marks on them,
%% deleting their marks afterwards.
%% @end
sweep(#gc{tab = Tab, mark_threshold = MarkThreshold}) ->
Keys = ets:select(Tab,
ets:fun2ms(fun ({K, Ctr})
when Ctr >= MarkThreshold ->
K
end)),
[ ets:delete(Tab, Key)
|| Key <- Keys ],
Keys.
-spec sweep(Tab::ets:tab(), gc()) -> any().
sweep(MainTable, GC = #gc{}) ->
[ ets:delete_object(MainTable, {Key, 0})
|| Key <- sweep(GC) ]. | src/ectr_gc.erl | 0.593374 | 0.432902 | ectr_gc.erl | starcoder |
%% @author <NAME> <<EMAIL>> [http://yarivsblog.com]
%% @copyright <NAME> 2006-2007
%% @doc ErlyDB: The Erlang Twist on Database Abstraction.
%%
%% == Contents ==
%%
%% {@section Introduction}<br/>
%% {@section Primary and Foreign Key Conventions}
%%
%% == Introduction ==
%% ErlyDB is a database abstraction layer generator for Erlang. ErlyDB
%% combines database metadata and user-provided metadata to generate
%% functions that let you perform common data access operations in
%% an intuitive manner. It also provides a single API for working with
%% different database engines (although currently, only MySQL is supported),
%% letting you write portable data access code.
%%
%% ErlyDB is designed to work with relational schemas, supporting both
%% one-to-many and many-to-many relations. For more details on how to
%% define relations between modules, see {@link erlydb_base:relations/0}.
%%
%% By using {@link erlsql} under the hood for SQL statement generation, ErlyDB
%% provides a simple and effective mechanism for protection against
%% SQL injection attacks. (It's possible to use ErlyDB in 'unsafe' mode,
%% which lets you write SQL statement snippets as strings, but this isn't
%% recommended.) Many of the functions that ErlyDB generates let you extend
%% the automatically generated queries by passing WHERE
%% conditions and/or extras (e.g. LIMIT, ORDER BY) clauses, expressed as
%% ErlSQL snippets, as parameters.
%%
%% ErlyDB uses the module erlydb_base as a generic template for database
%% access modules. During code generation, ErlyDB calls
%% smerl:extend(erlydb_base, Module), and then performs different
%% manipulations on the functions in the resulting module in order to
%% specialize them for the specific model.
%%
%% To learn about the functions that ErlyDB generates and how to implement
%% functions that provide ErlyDB extra database metadata prior to code
%% generation, refer to the documentation for erlydb_base.
%%
%% You can find sample code illustrating how to use many of ErlyDB's features
%% in the test/erlydb directory.
%%
%% == Primary and Foreign Key Conventions ==
%%
%% Prior to ErlyWeb 0.4, ErlyDB assumed that all tables have an identity
%% primary key field named 'id'. From ErlyWeb 0.4, ErlyDB lets users define
%% arbitrary primary key fields for their tables. ErlyDB
%% figures out which fields are the primary key fields automatically by
%% querying the database' metadata.
%%
%% ErlyDB currently relies on a naming convention to map primary key field
%% names to foreign key field names in related tables. Foreign key field
%% names are constructed as follows: [TableName]_[FieldName]. For example,
%% if the 'person' table had
%% primary key fields named 'name' and 'age', then related tables would have
%% the foreign key fields 'person_name' and 'person_age', referencing the
%% 'name' and 'age' fields of the 'person' table.
%%
%% Important: Starting from ErlyWeb 0.4, when a module defines a different
%% table name (by overriding the {@link erlydb_base:table/0} function),
%% the table name is used in foreign key field names, not the module name.
%%
%% In one-to-many/many-to-one relations, the foreign key fields for the 'one'
%% table exist in the 'many' table. In many_to_many relations, all
%% foreign key fields for both modules exist in a separate table named
%% [Table1]_[Table2], where Table1 < Table2 by alphabetical ordering.
%%
%% Starting from v0.4, ErlyDB has special logic to handle the case where a
%% module has a
%% many-to-many relation to itself. In such a case, the relation table
%% would be called [TableName]_[TableName], and its fields would be the
%% table's primary key corresponding foreign key fields,
%% first with the postfix "1", and
%% then with the postfix "2". For example, if the 'person' module defined
%% the relation `{many_to_many, [person]}' (and the table name were 'person',
%% i.e., the default), then there should exist a
%% 'person_person' relation table with the following fields: person_name1,
%% person_name2, person_age1, and person_age2.
%%
%% (In addition to using a different foreign key naming convention, ErlyDB uses
%% different query construction rules when working with self-referencing
%% many-to-many relations.)
%%
%% In a future version, ErlyDB may allow users to customize the foreign
%% key field names as well as many_to_many relation table names.
%% For license information see LICENSE.txt
-module(erlydb).
-author("<NAME> (<EMAIL>) (http://yarivsblog.com)").
-export(
[start/1,
start/2,
code_gen/2,
code_gen/3,
code_gen/4,
code_gen/5]).
-define(Debug(Msg, Params), log(?MODULE, ?LINE, debug, Msg, Params)).
-define(Info(Msg, Params), log(?MODULE, ?LINE, info, Msg, Params)).
-define(Error(Msg, Params), log(?MODULE, ?LINE, error, Msg, Params)).
%% useful for debugging
-define(L(Obj), io:format("LOG ~w ~p\n", [?LINE, Obj])).
-define(S(Obj), io:format("LOG ~w ~s\n", [?LINE, Obj])).
%% @hidden
log(Module, Line, Level, Msg, Params) ->
io:format("~p:~p:~p: " ++ Msg, [Level, Module, Line] ++ Params),
io:format("~n").
%% You can add more aggregate function names here and they will be generated
%% automatically for your modules.
aggregate_functions() ->
[count, avg, min, max, sum, stddev].
%% @doc Start an ErlyDB session for the driver using the driver's default
%% options.
%% This only works for some drivers. For more details, refer to the driver's
%% documentation.
%%
%% @spec start(Driver::atom()) -> ok | {error, Err}
start(Driver) ->
start(Driver, []).
%% @doc Start an ErlyDB sessions for the driver using the list of
%% user-defined options. For information on which options are available for
%% a driver, refer to the driver's documentation.
%%
%% @spec start(Driver::atom(), Options::proplist()) -> ok | {error, Err}
start(mysql, Options) ->
erlydb_mysql:start(Options);
start(mnesia, Options) ->
erlydb_mnesia:start(Options);
start(_Driver, _Options) ->
{error, driver_not_supported}.
driver_mod(mysql) -> erlydb_mysql;
driver_mod(psql) -> erlydb_psql;
driver_mod(mnesia) -> erlydb_mnesia;
driver_mod(odbc) -> erlydb_odbc.
%% @equiv code_gen(Modules, Drivers, [])
code_gen(Modules, Drivers) ->
code_gen(Modules, Drivers, []).
%% @equiv code_gen(Modules, Drivers, Options, [])
code_gen(Modules, Drivers, Options) ->
code_gen(Modules, Drivers, Options, []).
%% @equiv code_gen(Modules, Drivers, Options, IncludePaths, [])
code_gen(Modules, Drivers, Options, IncludePaths) ->
code_gen(Modules, Drivers, Options, IncludePaths, []).
%% @doc Generate code for the list of modules using the provided drivers.
%%
%% If you're using ErlyWeb, you shouldn't need to call this function directly.
%% Instead, refer to {@link erlyweb:compile/2}.
%%
%% === Usage ===
%%
%% In ErlyWeb 0.7, the signature for this function has changed.
%% ErlyDB used to support only a single driver with a single connection
%% pool in a session. As of ErlyWeb 0.7, ErlyDB supports multiple
%% drivers in a session, and multiple connection pools for each
%% driver.
%%
%% ==== Modules ====
%% The 'Modules' parameter is a list of files or modules for which to
%% generate ErlyDB code. If a list item is an atom, ErlyDB assumes it's
%% a module that has been loaded into the VM or that resides in the VM's
%% code path. In either case, the module's source code should be discoverable
%% either through Erlang's path conventions or because the module
%% was compiled with debug_info.
%%
%% If a list item is a string, ErlyDB treats it as a file name (relative
%% or absolute) and attempts to read it from disk.
%%
%% ==== Drivers ====
%% The 'Drivers' parameter is either a single element or a list of
%% elements of the form
%% `Driver::atom()',
%% `{Driver::atom(), DriverOptions::proplist()}', or
%% `{Driver::atom(), DriverOptions::proplist(), Pools::pool()}'.
%%
%% The first element in the Drivers list is
%% the default driver that ErlyDB will use for all modules that don't
%% override the driver option.
%%
%% 'Driver' can be `mysql', `psql' or `mnesia'. 'Options' is a list of
%% driver-specific options. For a list of available options, refer to
%% the driver's documentation.
%%
%% 'DriverOptions' is a property list that contains driver-specific options
%% (e.g. '{allow_unsafe_statements, Bool}').
%% For more information refer to the driver's documentation.
%%
%% 'Pools' is a list of available connection pools for the driver.
%% Note that the driver must be started and the pools must be connected
%% before code_gen/2 is called. Each item in 'Pools' is an atom indicating
%% the pool id, or a tuple of the form `{PoolId, default}', which indicates
%% that this pool will be used as the default pool for the driver.
%% If you don't provide a `{PoolId, default}' pool option, ErlyDB will use
%% the driver-defined default pool id if it exists (you can obtain it by
%% calling Mod:get_default_pool_id(), where 'Mod' is the driver's
%% module, e.g. 'erlydb_mysql').
%%
%% ==== Options ====
%%
%% 'Options' is a list of options that are used for all modules. This may
%% include global driver options as well as options that are passed to
%% compile:file/2. For more information, refer to this function's documentation
%% in the OTP documentation.
%%
%% ==== IncludePaths ====
%%
%% Additional include paths that will be used to search for header files
%% when compiling the modules.
%%
%% ==== Macros ====
%%
%% Macro definitions that will be used for conditional compilation. These are
%% represented in the same way as 'PredefMacros' in epp:parse_file/2.
%%
%% === Examples ===
%%
%% Generate code for "musician.erl" using the MySQL driver. Only the default
%% pool is enabled.
%%
%% ```
%% code_gen(["musician.erl"], mysql).
%% '''
%%
%% Use the previous settings but allow unsafe SQL statements, and compile
%% with debug_info:
%%
%% ```
%% code_gen(["musician.erl"],
%% {mysql, [{allow_unsafe_statements, true}]},
%% [debug_info]).
%% '''
%%
%% Generate code for the modules using the MySQL driver with two additional
%% pools, 'pool1' and 'pool2'. The default pool is remains `erlydb_mysql':
%%
%% ```
%% code_gen(["musician.erl", "instrument.erl"],
%% {mysql, [], [pool1, pool2]}).
%% '''
%%
%% Similar to the previous setting, but allow unsafe statement and use
%% `pool2' as the default pool name:
%%
%% ```
%% code_gen(["src/musician.erl", "src/instrument.erl"],
%% {mysql, [{allow_unsafe_statements, true}],
%% [{pool1, {pool2, default}}]})
%% '''
%%
%% Generate code for the modules using both the MySQL and Postgres driver.
%% The MySQL driver has 2 pools enabled: mysql_pool1 and mysql_pool2, which is
%% the default. The Postgres driver has a single default pool, pg_pool1.
%% The MySQL driver allows unsafe statements:
%%
%% ```
%% code_gen(["src/musician.erl", "src/instrument.erl", "src/song.erl"],
%% [{mysql, [{allow_unsafe_statements, true}],
%% [{mysql_pool1, {mysql_pool2, default}}]},
%% {psql, [], [{pg_pool1, default}]}])
%% '''
%%
%% === Module-Specific Settings ===
%%
%% To specify which connection pool ErlyDB should for a specific module, add
%% the following line to the module's source code:
%%
%% ```
%% -erlydb_options([{driver, Driver}, {pool_id, PoolId}]).
%% '''
%%
%% The 'driver' option tells ErlyDB to use a non-default driver for the
%% module. The 'pool_id' option tells ErlyDB to use a non-default pool id
%% for the module. Neither option is required -- you can specify only
%% a 'driver' option or only a 'pool_id' option.
%%
%% @spec code_gen([Module::atom() | string()], [driver()] | driver(),
%% Options::[term()], [IncludePath::string()], [Macro::{atom(), term()}]) ->
%% ok | {error, Err}
%% @type driver() = Driver::atom() |
%% {Driver::atom(), DriverOptions::proplist()} |
%% {Driver::atom(), DriverOptions::proplist(), [pool()]}
%% @type pool() = PoolId::atom() | {default, PoolId::atom()}
code_gen(_Modules, [], _Options, _IncludePaths, _Macros) ->
exit(no_drivers_specified);
code_gen(Modules, Driver, Options, IncludePaths, Macros) when not is_list(Driver) ->
code_gen(Modules, [Driver], Options, IncludePaths, Macros);
code_gen(Modules, Drivers, Options, IncludePaths, Macros) ->
%% Normalize the driver tuples.
DriverTuples =
lists:map(
fun(Driver) when is_atom(Driver) ->
{Driver, [], []};
({Driver, DriverOptions}) ->
{Driver, DriverOptions, []};
({_Driver, _DriverOptions, _Pools} = Tpl) ->
Tpl;
(Other) ->
exit({invalid_driver_option, Other})
end, Drivers),
DriversData =
lists:foldl(
fun({Driver, DriverOptions, Pools}, Acc) ->
DriverMod = driver_mod(Driver),
%% Add the driver's default pool id to the list
%% if the default wasn't overriden.
Pools1 = case lists:any(
fun({_Id, default}) -> true;
(_) -> false
end, Pools) of
true ->
Pools;
_ ->
[{case catch DriverMod:get_default_pool_id() of
{'EXIT', _} ->
undefined;
DefaultPoolId ->
DefaultPoolId
end,
default} |
Pools]
end,
%% Get the metadata for all pools in a given driver,
%% and figure out which is the default pool.
{PoolsData, DefaultPool} =
lists:foldl(
fun(PoolData, {Acc1, DefaultPool1}) ->
{PoolId, NewDefaultPool} =
case PoolData of
Id when is_atom(Id) ->
{Id, DefaultPool1};
{Id, default} ->
{Id, Id}
end,
Metadata = DriverMod:get_metadata(
[{pool_id, PoolId} | Options]),
{gb_trees:enter(
PoolId, Metadata, Acc1), NewDefaultPool}
end, {gb_trees:empty(), undefined}, Pools1),
gb_trees:enter(
Driver,
{PoolsData, DefaultPool, DriverOptions}, Acc)
end, gb_trees:empty(), DriverTuples),
case proplists:get_value(skip_fk_checks, Options) of
true ->
ok;
_ ->
?Debug("~n~n--- To skip foreign key checks, compile with the {skip_fk_checks, true} option~n~n", [])
end,
%% create the modules
lists:foreach(
fun(Module) ->
DefaultDriverMod = element(1, hd(DriverTuples)),
gen_module_code(Module, DefaultDriverMod, DriversData,
Options, IncludePaths, Macros)
end, Modules).
gen_module_code(ModulePath, DefaultDriverMod,
DriversData, Options, IncludePaths, Macros) ->
case smerl:for_module(ModulePath, IncludePaths, Macros) of
{ok, C1} ->
C2 = preprocess_and_compile(C1),
Module = smerl:get_module(C2),
%% get the ErlyDB settings for the driver, taking the defaults
%% into account
{Driver, PoolsData, PoolId, DriverOptions} =
get_driver_settings(Module, DriversData, DefaultDriverMod),
DriverMod = driver_mod(Driver),
TablesData =
case gb_trees:lookup(PoolId, PoolsData) of
{value, Val} ->
Val;
_ ->
exit({invalid_erlydb_pool_option,
{{module, Module},
{pool_id, PoolId}}})
end,
case gb_trees:lookup(get_table(Module), TablesData) of
{value, Fields} ->
?Debug("Generating code for ~w", [Module]),
Options2 = DriverOptions ++ Options,
MetaMod =
make_module(DriverMod, C2, Fields,
[{pool_id, PoolId} | Options2],
TablesData),
smerl:compile(MetaMod, Options);
none ->
exit(
{no_such_table, {{module, Module},
{table, get_table(Module)}}})
end;
Err ->
Err
end.
preprocess_and_compile(MetaMod) ->
%% extend the base module, erlydb_base
M10 = smerl:extend(erlydb_base, MetaMod),
%% This is an optimization to avoid the remote function call
%% to erlydb_base:set/3 in order to allow the compiler to decide to
%% update the record destructively.
M20 = smerl:remove_func(M10, set, 3),
{ok, M30} = smerl:add_func(M20,
"set(Idx, Rec, Val) -> "
"setelement(Idx, Rec, Val)."),
case smerl:compile(M30) of
ok ->
M30;
Err ->
exit(Err)
end.
get_driver_settings(Module, DriversData, DefaultDriverMod) ->
{DefaultPoolsData, DefaultDriverPoolId, DefaultOptions} =
gb_trees:get(DefaultDriverMod, DriversData),
case proplists:get_value(
erlydb_options,
Module:module_info(attributes)) of
undefined ->
{DefaultDriverMod,
DefaultPoolsData,
DefaultDriverPoolId,
DefaultOptions};
DriverOpts ->
{DriverMod, PoolsData, DefaultPoolId, Options} =
case proplists:get_value(driver, DriverOpts) of
undefined ->
{DefaultDriverMod,
DefaultPoolsData,
DefaultDriverPoolId,
DefaultOptions};
OtherDriver ->
case gb_trees:lookup(OtherDriver, DriversData) of
{value, {PoolsData2, DefaultPoolId2, Options2}} ->
{OtherDriver,
PoolsData2,
DefaultPoolId2,
Options2};
none ->
exit({invalid_erlydb_driver_option,
{{module, Module},
{driver, OtherDriver}}})
end
end,
PoolId =
case proplists:get_value(pool_id, DriverOpts) of
undefined ->
DefaultPoolId;
OtherPoolId ->
OtherPoolId
end,
{DriverMod, PoolsData, PoolId, Options}
end.
get_table(Module) ->
case catch Module:table() of
{'EXIT', _} -> Module;
default -> Module;
Res -> Res
end.
%% Make the abstract forms for the module.
make_module(DriverMod, MetaMod, DbFields, Options, TablesData) ->
Module = smerl:get_module(MetaMod),
{Fields, FieldNames} = get_db_fields(Module, DbFields),
PkFields = filter_pk_fields(Fields),
PkFieldNames =
[erlydb_field:name(Field) || Field <- PkFields],
{ok, M24} = smerl:curry_replace(MetaMod, db_pk_fields, 1, [PkFields]),
M26 = add_pk_fk_field_names(M24, PkFieldNames),
%% inject the fields list into the db_fields/1 function
{ok, M30} = smerl:curry_replace(M26, db_fields, 1, [Fields]),
{ok, M32} = smerl:curry_replace(
M30, db_field_names, 1,
[FieldNames]),
{ok, M34} = smerl:curry_replace(
M32, db_field_names_str, 1,
[[erlydb_field:name_str(Field) || Field <- Fields]]),
{ok, M36} = smerl:curry_replace(
M34, db_field_names_bin, 1,
[[erlydb_field:name_bin(Field) || Field <- Fields]]),
{ok, M42} = smerl:curry_replace(
M36, db_num_fields, 1, [length(Fields)]),
{M60, _Count} = lists:foldl(
fun(Field, {M50, Count}) ->
Idx = Count,
{make_field_forms(M50, Field, Idx),
Count+1}
end, {M42, 3}, FieldNames),
%% create the constructor
M70 = case make_new_func(Module, Fields) of
undefined ->
M60;
NewFunc ->
{ok, Temp} = smerl:add_func(M60, NewFunc),
Temp
end,
%% inject the driver configuration into the driver/1 function
{ok, M80} = smerl:curry_replace(
M70, driver, 1,
[{DriverMod, Options}]),
%% make the relations function forms
M90 = make_rel_funcs(M80, TablesData, Options),
%% make the aggregate function forms
M100 = make_aggregate_forms(M90, aggregate, 5, [Module],
undefined),
%% add extra configurations to the different find functions
M120 = lists:foldl(
fun({FindFunc, Arity}, M110) ->
add_find_configs(M110, FindFunc, Arity)
end, M100, [{find, 3}, {find_first, 3}, {find_max, 4},
{find_range,5}]),
%% embed the generated module's name in
%% place of all corresponding parameters in the base forms
M130 = smerl:embed_all(M120, [{'Module', smerl:get_module(MetaMod)}]),
M130.
%% Return a list of database fields that belong to the module based on the
%% fields/0 and type_field/0 functions as (potentially)
%% implemented by the user as well as the database metadata for the table.
%%
%% Throw an error if any user-defined non-transient fields aren't in the
%% database.
get_db_fields(Module, DbFields) ->
DbFieldNames = [erlydb_field:name(Field) || Field <- DbFields],
DbFields1 =
case Module:fields() of
'*' -> [set_attributes(Field, []) || Field <- DbFields];
DefinedFields ->
DefinedFields1 =
lists:map(fun({_Name, _Atts} = F) -> F;
(Name) -> {Name, []}
end, DefinedFields),
PkFields = [{erlydb_field:name(Field), []} ||
Field <- DbFields,
erlydb_field:key(Field) == primary,
not lists:keymember(
erlydb_field:name(Field),
1, DefinedFields1)],
DefinedFields2 = PkFields ++ DefinedFields1,
InvalidFieldNames =
[Name || {Name, Atts} <- DefinedFields2,
not lists:member(Name, DbFieldNames)
and not lists:member(transient, Atts)],
case InvalidFieldNames of
[] ->
DbFields2 = [add_transient_field(Field, DbFields) ||
Field <- DefinedFields2],
lists:foldr(
fun(Field, Acc) ->
FieldName = erlydb_field:name(Field),
case lists:keysearch(
FieldName, 1, DefinedFields2) of
{value, {_Name, Atts}} ->
Field1 =
set_attributes(Field, Atts),
[Field1 | Acc];
false ->
Acc
end
end, [], DbFields2);
_ -> exit({no_such_fields, {Module, InvalidFieldNames}})
end
end,
DbFieldNames1 = [erlydb_field:name(Field) || Field <- DbFields1],
Res =
case Module:type_field() of
undefined -> {DbFields1, DbFieldNames1};
Name ->
case lists:member(Name, DbFieldNames) of
true ->
{[Field || Field <- DbFields1,
erlydb_field:name(Field) =/= Name],
DbFieldNames1 -- [Name]};
false -> exit({no_such_type_field, {Module, Name}})
end
end,
Res.
add_transient_field({Name, Atts}, DbFields) ->
case lists:member(transient, Atts) of
true ->
erlydb_field:new(Name, {varchar, undefined}, true,
undefined, undefined, undefined);
_ ->
{value, Val} = lists:keysearch(Name, 2, DbFields),
Val
end.
set_attributes(Field, Atts) ->
Atts1 = case erlydb_field:extra(Field) == identity orelse
erlydb_field:type(Field) == timestamp of
true ->
[read_only |
Atts --
[read_only]];
_ ->
Atts
end,
erlydb_field:attributes(Field, Atts1).
add_pk_fk_field_names(MetaMod, PkFieldNames) ->
Module = smerl:get_module(MetaMod),
PkFkFieldNames = pk_fk_fields(get_table(Module), PkFieldNames),
{ok, M2} = smerl:curry_replace(
MetaMod, get_pk_fk_fields, 1, [PkFkFieldNames]),
PkFkFieldNames2 =
[{PkField, append([FkField, '1']), append([FkField, '2'])} ||
{PkField, FkField} <- PkFkFieldNames],
{ok, M5} = smerl:curry_replace(
M2, get_pk_fk_fields2, 1, [PkFkFieldNames2]),
M5.
%% Create the abstract form for the given Module's 'new' function,
%% accepting all field values as parameters, except for fields that
%% are designated as 'identity' primary key fields.
%%
%% For related records with an 'id' primary key, the 'new'
%% function accepts as a parameter
%% either a tuple representing the related record, or the record's
%% id directly.
%%
%% Example:
%% {ok, Erlang} = language:find_id(1),
%% project:new("Yaws", Lang) == project:new("Yaws", language:id(Lang))
make_new_func(Module, Fields) ->
L = 1,
{Params2, Vals2} =
lists:foldl(
fun(Field, {Params, Vals}) ->
ExcludeFromNewParameterList = ((erlydb_field:extra(Field) == identity) or ((erlydb_field:type(Field) == uuid) and (erlydb_field:key(Field) == primary))),
case ExcludeFromNewParameterList of
true ->
case erlydb_field:extra(Field) of
identity -> {Params, [{atom, L, undefined} | Vals]}; % an identity will be 'undefined' when using contructor
_ -> {Params, [{call,L,
{atom,L,list_to_binary},
[{call,L,
{remote,L,{atom,L,uuid},{atom,L,get_uuid}},
[]}]} | Vals]} % get a valid UUID!
end;
false ->
Name = erlydb_field:name(Field),
{Stripped, Name1} = strip_id_chars(Name),
Params1 = [{var,L,Name1} | Params],
Vals1 =
case Stripped of
true ->
[make_new_func_if_expr(Name1) | Vals];
false ->
[{var,L,Name1} | Vals]
end,
{Params1, Vals1}
end
end, {[], []}, Fields),
NumParams = length(Params2),
if NumParams > 0 ->
{function,L,new,length(Params2),
[{clause,L,lists:reverse(Params2),[],
[{tuple,L,
[{atom,L,Module},{atom,L,true} | lists:reverse(Vals2)]}
]}
]};
true ->
undefined
end.
%% Return the following expression:
%%
%% if is_tuple(Param) -> 'Param':id(Param); true -> Param end
%%
%% This allows you to pass into the constructor either a related record
%% or the related record's id directly. If you pass in a related,
%% record, its id is automatically substituted as the parameter's
%% value.
make_new_func_if_expr(Param) ->
L = 1,
{'if',L,
[{clause,L,
[],
[[{call,L,{atom,L,is_tuple},[{var,L,Param}]}]],
[{call,L,
{remote,L,{atom,L,Param},{atom,L,id}},
[{var,L,Param}]}]},
{clause,L,[],[[{atom,L,true}]],[{var,L,Param}]}]}.
%% If Field is an atom such as 'person_id', return the atom 'person'.
%% Otherwise, return the original atom.
strip_id_chars(Field) ->
FieldName = atom_to_list(Field),
FieldLen = length(FieldName),
if
FieldLen < 4 ->
{false, Field};
true ->
case string:substr(FieldName,
FieldLen - 2, 3) of
"_id" ->
{true,
list_to_atom(string:substr(FieldName, 1, FieldLen - 3))};
_ ->
{false, Field}
end
end.
%% Add getters and setters
make_field_forms(MetaMod, Field, Idx) ->
{ok, C1} = smerl:curry_add(MetaMod, get, 2, Idx, Field),
{ok, C2} = smerl:curry_add(C1, set, 3, Idx, Field),
C2.
make_aggregate_forms(MetaMod, BaseFuncName, Arity, CurryParams, PostFix) ->
lists:foldl(
fun(Func, M1) ->
NewName = append([Func, PostFix]),
NewCurryParams = CurryParams ++ [Func],
{ok, M2} = smerl:curry_add(M1, BaseFuncName, Arity,
NewCurryParams,
NewName),
add_find_configs(M2, NewName, Arity - length(NewCurryParams))
end, MetaMod, aggregate_functions()).
%% Generate the forms for functions that enable working with related
%% records.
make_rel_funcs(MetaMod, TablesData, Opts) ->
Module = smerl:get_module(MetaMod),
lists:foldl(
fun({RelType, Modules}, MetaMod1) ->
make_rel_forms(RelType,
Modules, MetaMod1, TablesData, Opts)
end, MetaMod, Module:relations()).
make_rel_forms(RelType, Relations, MetaMod, TablesData, Opts) ->
Fun =
case RelType of
many_to_one ->
fun make_many_to_one_forms/4;
one_to_many ->
fun make_one_to_many_forms/4;
many_to_many ->
fun make_many_to_many_forms/4
end,
%% currying would be nice :)
Fun1 = fun(Relation, MetaMod1) ->
Fun(Relation, MetaMod1, TablesData, Opts)
end,
lists:foldl(Fun1, MetaMod, Relations).
make_many_to_one_forms(Relation, MetaMod, TablesData, Opts) ->
{OtherModule, Alias, PkFks} =
get_rel_options(smerl:get_module(MetaMod),
Relation, TablesData, true, Opts),
{ok, M1} = smerl:curry_add(
MetaMod, find_related_one_to_many, 3,
[OtherModule, PkFks], Alias),
{ok, M2} = smerl:curry_add(M1, set_related_one_to_many, 3,
[PkFks], Alias),
M2.
make_one_to_many_forms(Relation, MetaMod, TablesData, Opts) ->
{OtherModule, Alias, PkFks} =
get_rel_options(smerl:get_module(MetaMod),
Relation, TablesData, false, Opts),
make_some_to_many_forms(
MetaMod, OtherModule, Alias, [PkFks],
find_related_many_to_one, 5,
aggregate_related_many_to_one, 7).
make_many_to_many_forms(Relation, MetaMod, TablesData, _Opts) ->
Module = smerl:get_module(MetaMod),
{OtherModule, RelationTable, Alias} =
case Relation of
Mod when is_atom(Mod) ->
{Mod, undefined, Mod};
{Mod, Opts} ->
RelationTable1 = proplists:get_value(relation_table, Opts),
if RelationTable1 =/= undefined ->
case gb_trees:lookup(RelationTable1, TablesData) of
none ->
exit({relation_table_not_found,
{{module, Module},
{relatedModule, Mod},
{relation_table, RelationTable1}}});
_ ->
ok
end;
true ->
ok
end,
Alias1 = case proplists:get_value(alias, Opts) of
undefined ->
Mod;
Alias2 ->
Alias2
end,
{Mod, RelationTable1, Alias1}
end,
%% The name of the join table is by default assumed
%% to be the alphabetical ordering of the two
%% tables,
%% separated by an underscore.
%% Good example: person_project
%% Bad example: project_person
RelationTable2 = if RelationTable == undefined ->
[Module1, Module2] =
lists:sort(
fun(Mod1, Mod2) ->
get_table(Mod1) < get_table(Mod2)
end,
[Module, OtherModule]),
append([get_table(Module1), "_",
get_table(Module2)]);
true ->
RelationTable
end,
RemoveAllFuncName = append(["remove_all_", pluralize(Alias)]),
IsRelatedFuncName = append(["is_", Alias, "_related"]),
CurryFuncs =
[{add_related_many_to_many, 3, [],
append(["add_", Alias])},
{remove_related_many_to_many, 3, [],
append(["remove_", Alias])},
{remove_related_many_to_many_all, 5, [get_table(OtherModule)],
RemoveAllFuncName},
{is_related, 3, [], IsRelatedFuncName}],
M3 = lists:foldl(
fun({FuncName, Arity, ExtraParams, NewName}, M1) ->
{ok, M2} = smerl:curry_add(
M1, FuncName, Arity,
[RelationTable2 | ExtraParams],
NewName),
M2
end, MetaMod, CurryFuncs),
M4 = add_find_configs(M3, RemoveAllFuncName, 3),
M6 = case get_table(Module) == get_table(OtherModule) of
true ->
M5 = smerl:remove_func(M4, RemoveAllFuncName, 2),
smerl:remove_func(M5, RemoveAllFuncName, 3);
_ ->
M4
end,
M7 = make_some_to_many_forms(
M6, OtherModule, Alias, [RelationTable2],
find_related_many_to_many, 5,
aggregate_related_many_to_many, 7),
M7.
make_some_to_many_forms(MetaMod, OtherModule, Alias, ExtraCurryParams,
BaseFindFuncName, BaseFindFuncArity,
AggregateFuncName, AggregateFuncArity) ->
FindFuncName = pluralize(Alias),
{ok, M1} = smerl:curry_add(MetaMod, BaseFindFuncName, BaseFindFuncArity,
[OtherModule | ExtraCurryParams], FindFuncName),
M2 = add_find_configs(M1, FindFuncName, BaseFindFuncArity -
(1 + length(ExtraCurryParams))),
AggPostFix = "_of_" ++ atom_to_list(pluralize(Alias)),
M3 = make_aggregate_forms(M2, AggregateFuncName, AggregateFuncArity,
[OtherModule | ExtraCurryParams], AggPostFix),
FindFuncs = [
{find_related_many_first,4},
{find_related_many_max,5},
{find_related_many_range,6}
],
M6 = lists:foldl(
fun({FuncName, Arity}, M4) ->
PostFix = lists:nthtail(length("find_related_many"),
atom_to_list(FuncName)),
NewName = append([FindFuncName, PostFix]),
{ok, M5} = smerl:curry_add(M4, FuncName, Arity,
[FindFuncName], NewName),
add_find_configs(M5, NewName, Arity-1)
end, M3, FindFuncs),
CountFuncName = append(["count", AggPostFix]),
{ok, M7} =
smerl:embed_params(M6, CountFuncName, 2, [{'Field', '*'}]),
M7.
%% Get the relation name and primary/foreign key field mappings for
%% a give one-to-many or many-to-one relation.
get_rel_options(Module, OtherModule, TablesData, ReverseFieldOrder, Opts) ->
Res = {OtherMod, _Alias, PkFks} =
case OtherModule of
OtherMod1 when is_atom(OtherMod1) ->
{OtherMod1, OtherMod1,
if ReverseFieldOrder ->
pk_fk_fields2(OtherModule,
get_table(OtherModule), TablesData);
true ->
pk_fk_fields2(Module,
get_table(Module), TablesData)
end};
{OtherMod2, Opts2} ->
Alias1 =
case proplists:get_value(alias, Opts2) of
undefined ->
OtherMod2;
Other ->
Other
end,
FkBase =
case proplists:get_value(fk_base, Opts2) of
undefined ->
OtherMod2;
RevAlias ->
RevAlias
end,
PkFks1 = case proplists:get_value(foreign_keys, Opts2) of
undefined ->
pk_fk_fields2(OtherMod2,
FkBase, TablesData);
Other1 ->
Other1
end,
{OtherMod2, Alias1, PkFks1}
end,
case proplists:get_value(skip_fk_checks, Opts) of
true ->
Res;
_ ->
?Debug("Checking foreign keys for ~w\t->\t~w", [Module, OtherMod]),
verify_field_mappings(Module, OtherMod,
TablesData, PkFks, ReverseFieldOrder)
end,
Res.
%% Verify all mapped fields are present.
%%
%% TODO We can add additional validations, e.g. test data types compatibility
%% and ensure no entries have duplicates.
verify_field_mappings(Module, OtherModule, TablesData, PkFks,
ReverseFieldOrder) ->
Fields1 = get_fields(Module, TablesData),
FieldNames1 = [erlydb_field:name(F) || F <- Fields1],
Fields2 = get_fields(OtherModule, TablesData),
FieldNames2 = [erlydb_field:name(F) || F <- Fields2],
Errs = lists:foldr(
fun(Pair = {F1, F2}, Acc) ->
{Field1, Field2} =
if ReverseFieldOrder ->
{F2, F1};
true ->
Pair
end,
lists:foldr(
fun({Field, FieldNames, Module1}, Acc1) ->
case lists:member(Field, FieldNames) of
true ->
Acc1;
false ->
[{missing_field,
{{module, Module1},
{table, get_table(Module1)},
{field, Field}}} | Acc1]
end
end, Acc, [{Field1, FieldNames1, Module},
{Field2, FieldNames2, OtherModule}])
end, [], PkFks),
if Errs == [] ->
ok;
true ->
exit({bad_relation_definition,
{{module, Module},
{table, get_table(Module)},
{related_module, OtherModule},
{related_table, get_table(OtherModule)},
{errors, Errs}}})
end.
get_fields(Module, TablesData) ->
case gb_trees:lookup(get_table(Module), TablesData) of
none ->
exit({missing_table_data,
{{module, Module},
{table, get_table(Module)}}});
{value, Fields} ->
Fields
end.
filter_pk_fields(Fields) ->
[Field || Field <- Fields, erlydb_field:key(Field) == primary].
pk_fk_fields(Module, PkFieldNames) ->
[{FieldName, append([Module, '_', FieldName])} ||
FieldName <- PkFieldNames].
pk_fk_fields2(Module, Alias, TablesData) ->
pk_fk_fields(
Alias,
[erlydb_field:name(F) ||
F <- filter_pk_fields(get_fields(get_table(Module), TablesData))]).
add_find_configs(MetaMod, BaseFuncName, Arity) ->
NoWhere = {'Where', undefined},
NoExtras = {'Extras', undefined},
Configs =
[{BaseFuncName, [NoWhere, NoExtras]},
{BaseFuncName, [NoExtras]},
{append([BaseFuncName, "_with"]), [NoWhere]}],
M4 = lists:foldl(
fun({NewName, Replacements}, M2) ->
{ok, M3} =
smerl:embed_params(M2, BaseFuncName, Arity,
Replacements, NewName),
M3
end, MetaMod, Configs),
M4.
%% TODO There are probably a bunch of additional cases, but this is good
%% enough for now :)
pluralize(Module) ->
pluralize(Module, undefined).
pluralize(Module, Postfix) ->
Str = atom_to_list(Module),
[LastChar, CharBeforeLast|Rest] = Rev = lists:reverse(Str),
Suffix = [CharBeforeLast,LastChar],
Irregulars =
[{man, men},{foot,feet},{child,children},{person,people},
{tooth,teeth},{mouse,mice},{sheep,sheep},{deer,deer},{fish,fish}],
PluralForm =
case lists:keysearch(Module,1,Irregulars) of
{value, {_, Plural}} ->
atom_to_list(Plural);
_ ->
if Suffix == "fe" ->
lists:reverse([$s,$e,$v | Rest]);
LastChar == $f ->
lists:reverse([$s,$e,$v,CharBeforeLast | Rest]);
%% These rules only work in some special cases, so we'll
%% comment them out for now and possibly deal with them
%% later
%% Suffix == "is" ->
%% lists:reverse([$s,$e|Rest]);
%% Suffix == "on" ->
%% lists:reverse([$a | Rest]);
%% Suffix == "us" ->
%% lists:reverse([$i | Rest]);
true ->
Cond1 = LastChar == $y andalso
lists:member(CharBeforeLast,
"bcdfghjklmnpqrtvwxyz"),
if
Cond1 ->
lists:reverse([$s,$e,$i,CharBeforeLast|Rest]);
true ->
Cond2 = case Rev of
[$s|_] -> true;
[$h,$c|_] -> true;
[$h,$s|_] -> true;
[$x|_] -> true;
[$o|_] -> true;
_ -> false
end,
if Cond2 ->
Str ++ "es";
true ->
Str ++ "s"
end
end
end
end,
Result =
case Postfix of
undefined ->
PluralForm;
_ ->
PluralForm ++ Postfix
end,
list_to_atom(Result).
%% append a list of strings and atoms and return the result as an atom
append(Terms) ->
list_to_atom(
lists:flatten(
lists:map(
fun(undefined) -> [];
(Atom) when is_atom(Atom) ->
atom_to_list(Atom);
(List) -> List
end, Terms))). | src/erlydb/erlydb.erl | 0.63114 | 0.65994 | erlydb.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% Copyright (c) 2015 <NAME>. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
-module(lasp_type).
-author("<NAME> <<EMAIL>>").
-include("lasp.hrl").
-export([new/1,
update/4,
merge/3,
threshold_met/3,
is_inflation/3,
is_bottom/2,
is_strict_inflation/3,
encode/3,
decode/3,
query/2,
get_type/1,
delta/4]).
types() ->
[
{awset, {state_awset, undefined}},
{awset_ps, {state_awset_ps, undefined}},
{boolean, {state_boolean, undefined}},
{gcounter, {state_gcounter, undefined}},
{gmap, {state_gmap, undefined}},
{gset, {state_gset, undefined}},
{ivar, {state_ivar, undefined}},
{lwwregister, {state_lwwregister, undefined}},
{orset, {state_orset, undefined}},
{pair, {state_pair, undefined}},
{pncounter, {state_pncounter, undefined}},
{twopset, {state_twopset, undefined}}
].
get_mode() ->
lasp_config:get(mode, state_based).
%% @doc Return the internal type.
get_type([]) ->
[];
get_type([H | T]) ->
[get_type(H) | get_type(T)];
get_type({T1, T2}) ->
{get_type(T1), get_type(T2)};
get_type(T) ->
get_type(T, get_mode()).
get_type(T, Mode) ->
case orddict:find(T, types()) of
{ok, {StateType, PureOpType}} ->
case Mode of
delta_based ->
StateType;
state_based ->
StateType;
pure_op_based ->
PureOpType
end;
error ->
T
end.
remove_args({T, _Args}) ->
T;
remove_args(T) ->
T.
encode(Type, Encoding, Value) ->
T = get_type(remove_args(Type)),
T:encode(Encoding, Value).
decode(Type, Encoding, Value) ->
T = get_type(remove_args(Type)),
T:decode(Encoding, Value).
%% @doc Is bottom?
is_bottom(Type, Value) ->
T = get_type(remove_args(Type)),
T:is_bottom(Value).
%% @doc Is strict inflation?
is_strict_inflation(Type, Previous, Current) ->
T = get_type(remove_args(Type)),
T:is_strict_inflation(Previous, Current).
%% @doc Is inflation?
is_inflation(Type, Previous, Current) ->
T = get_type(remove_args(Type)),
T:is_inflation(Previous, Current).
%% @doc Determine if a threshold is met.
threshold_met(Type, Value, {strict, Threshold}) ->
T = get_type(remove_args(Type)),
T:is_strict_inflation(Threshold, Value);
threshold_met(Type, Value, Threshold) ->
T = get_type(remove_args(Type)),
T:is_inflation(Threshold, Value).
%% @doc Initialize a new variable for a given type.
new(Type) ->
T = get_type(remove_args(Type)),
case Type of
{_T0, Args} ->
T:new(get_type(Args));
_T0 ->
T:new()
end.
%% @doc Use the proper type for performing an update.
update(Type, Operation, Actor, Value) ->
Mode = get_mode(),
T = get_type(remove_args(Type), Mode),
RealActor = get_actor(T, Actor),
case Mode of
delta_based ->
T:delta_mutate(Operation, RealActor, Value);
state_based ->
T:mutate(Operation, RealActor, Value);
pure_op_based ->
ok %% @todo
end.
%% @private
get_actor(state_awset_ps, {{StorageId, _TypeId}, Actor}) ->
{StorageId, Actor};
get_actor(_Type, {_Id, Actor}) ->
Actor;
get_actor(_Type, Actor) ->
Actor.
%% @doc Call the correct merge function for a given type.
merge(Type, Value0, Value) ->
T = get_type(remove_args(Type)),
T:merge(Value0, Value).
%% @doc Return the value of a CRDT.
query(Type, Value) ->
T = get_type(remove_args(Type)),
T:query(Value).
%% @doc
delta(Type, Method, Remote, Local) ->
T = get_type(remove_args(Type)),
T:delta(Method, Remote, Local). | src/lasp_type.erl | 0.598547 | 0.461805 | lasp_type.erl | starcoder |
%% @doc: Implementation of HyperLogLog with bias correction as
%% described in the Google paper,
%% http://static.googleusercontent.com/external_content/untrusted_dlcp/
%% research.google.com/en//pubs/archive/40671.pdf
-module(hyper).
%%-compile(native).
-export([new/1, new/2, insert/2, insert_many/2]).
-export([union/1, union/2]).
-export([card/1, intersect_card/2]).
-export([to_json/1, from_json/1, from_json/2, precision/1, bytes/1, is_hyper/1]).
-export([compact/1, reduce_precision/2]).
-export([generate_unique/1]).
-type precision() :: 4..16.
-type registers() :: any().
-record(hyper, {p :: precision(), registers :: {module(), registers()}}).
-type value() :: binary().
-type filter() :: #hyper{}.
-export_type([filter/0, precision/0, registers/0]).
%% Exported for testing
-export([run_of_zeroes/1]).
-define(DEFAULT_BACKEND, hyper_binary).
-define(HLL_ALPHA_INF, 0.721347520444481703680). % constant for 0.5/ln(2)
%%
%% API
%%
-spec new(precision()) -> filter().
new(P) ->
new(P, ?DEFAULT_BACKEND).
-spec new(precision(), module()) -> filter().
new(P, Mod) when 4 =< P andalso P =< 16 andalso is_atom(Mod) ->
#hyper{p = P, registers = {Mod, Mod:new(P)}}.
-spec is_hyper(filter()) -> boolean().
is_hyper(#hyper{}) ->
true;
is_hyper(_) ->
false.
-spec insert(value(), filter()) -> filter().
insert(Value, #hyper{registers = {Mod, Registers}, p = P} = Hyper)
when is_binary(Value) ->
Hash = crypto:hash(sha, Value),
<<Index:P, RegisterValue:(64 - P)/bitstring, _/bitstring>> = Hash,
ZeroCount = run_of_zeroes(RegisterValue) + 1,
%% Registers are only allowed to increase, implement by backend
Hyper#hyper{registers = {Mod, Mod:set(Index, ZeroCount, Registers)}};
insert(_Value, _Hyper) ->
error(badarg).
-spec insert_many([value()], filter()) -> filter().
insert_many(L, Hyper) ->
lists:foldl(fun insert/2, Hyper, L).
-spec union([filter()]) -> filter().
union(Filters) when is_list(Filters) ->
case lists:usort(lists:map(fun (#hyper{p = P, registers = {Mod, _}}) ->
{P, Mod}
end,
Filters))
of
%% same P and backend
[{_P, Mod}] ->
Registers = lists:map(fun (#hyper{registers = {_, R}}) ->
R
end,
Filters),
[First | _] = Filters,
First#hyper{registers = {Mod, Mod:max_merge(Registers)}};
%% mixed P, but still must have same backend
[{MinP, Mod} | _] ->
FoldedFilters = lists:map(fun (#hyper{registers = {M, _}} = F) when M =:= Mod ->
hyper:reduce_precision(MinP, F)
end,
Filters),
union(FoldedFilters)
end.
union(Small, Big) ->
union([Small, Big]).
%% NOTE: use with caution, no guarantees on accuracy.
-spec intersect_card(filter(), filter()) -> float().
intersect_card(Left, Right) when Left#hyper.p =:= Right#hyper.p ->
max(0.0, card(Left) + card(Right) - card(union(Left, Right))).
-spec card(filter()) -> float().
card(#hyper{registers = {Mod, Registers0}, p = P}) ->
M = trunc(pow(2, P)),
Qp1 = 65 - P,
Registers = Mod:compact(Registers0),
RegisterHisto = Mod:register_histogram(Registers),
Z = M * tau(M - maps:get(Qp1, RegisterHisto, 0) / M),
%TODO: drop after Q = 64 - P in histo before folding
Z1 = lists:foldr(
fun({_K, V}, Acc) -> (Acc + V) * 0.5 end,
Z,
lists:keysort(1, maps:to_list(maps:without([0, Qp1], RegisterHisto)))
),
Zf = Z1 + M * sigma(maps:get(0, RegisterHisto, 0) / M),
?HLL_ALPHA_INF * M * M / Zf.
precision(#hyper{p = Precision}) ->
Precision.
bytes(#hyper{registers = {Mod, Registers}}) ->
Mod:bytes(Registers).
compact(#hyper{registers = {Mod, Registers}} = Hyper) ->
Hyper#hyper{registers = {Mod, Mod:compact(Registers)}}.
reduce_precision(P, #hyper{p = OldP, registers = {Mod, Registers}} = Hyper)
when P < OldP ->
Hyper#hyper{p = P, registers = {Mod, Mod:reduce_precision(P, Registers)}};
reduce_precision(P, #hyper{p = P} = Filter) ->
Filter.
%%
%% SERIALIZATION
%%
-spec to_json(filter()) -> any().
to_json(#hyper{p = P, registers = {Mod, Registers}}) ->
Compact = Mod:compact(Registers),
{[{<<"p">>, P},
{<<"registers">>, base64:encode(zlib:gzip(Mod:encode_registers(Compact)))}]}.
-spec from_json(any()) -> filter().
from_json(Struct) ->
from_json(Struct, ?DEFAULT_BACKEND).
-spec from_json(any(), module()) -> filter().
from_json({Struct}, Mod) ->
P = proplists:get_value(<<"p">>, Struct),
Bytes = zlib:gunzip(base64:decode(proplists:get_value(<<"registers">>, Struct))),
Registers = Mod:decode_registers(Bytes, P),
#hyper{p = P, registers = {Mod, Registers}}.
%%
%% HELPERS
%%
generate_unique(N) ->
generate_unique(lists:usort(random_bytes(N)), N).
generate_unique(L, N) ->
case length(L) of
N ->
L;
Less ->
generate_unique(lists:usort(random_bytes(N - Less) ++ L), N)
end.
random_bytes(N) ->
random_bytes([], N).
random_bytes(Acc, 0) ->
Acc;
random_bytes(Acc, N) ->
Int = rand:uniform(100000000000000),
random_bytes([<<Int:64/integer>> | Acc], N - 1).
sigma(1.0) ->
infinity;
sigma(X) ->
sigma_sum(X, first, X, 1.0).
sigma_sum(Z, Z, _X, _Y) ->
Z;
sigma_sum(Z, _Zp, X, Y) ->
X1 = X * X,
Z1 = (X1* Y) + Z,
sigma_sum(Z1, Z, X1, Y + Y).
tau(0.0) ->
0.0;
tau(1.0) ->
0.0;
tau(X) ->
tau_sum((1 - X), first, X, 1.0) / 3.
tau_sum(Z, Z, _X, _Y) ->
Z;
tau_sum(Z, _Zp, X, Y) ->
X1 = math:sqrt(X),
Y1 = Y * 0.5,
Z1 = Z - (math:pow(1 - X1, 2) * Y1),
tau_sum(Z1, Z, X1, Y1).
pow(X, Y) ->
math:pow(X, Y).
run_of_zeroes(B) ->
run_of_zeroes(1, B).
run_of_zeroes(I, B) ->
case B of
<<0:I, _/bitstring>> ->
run_of_zeroes(I + 1, B);
_ ->
I - 1
end. | src/hyper.erl | 0.546254 | 0.4856 | hyper.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% Copyright (c) 2017 <NAME>. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
-module(trcb_base_resender).
-author("<NAME> <<EMAIL>>").
-behaviour(gen_server).
%% API
-export([start_link/0]).
%% gen_server callbacks
-export([init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3]).
%% other
-export([add_exactly_once_queue/2]).
-include("trcb_base.hrl").
-record(state, {actor :: node(),
metrics :: atom(),
to_be_ack_queue :: dict:dict()}).
-type state_t() :: #state{}.
%%%===================================================================
%%% callbacks
%%%===================================================================
%% Add a message to a queue for at least once guarantee.
-spec add_exactly_once_queue(term(), term()) -> ok.
add_exactly_once_queue(Dot, Msg) ->
gen_server:cast(?MODULE, {add_exactly_once_queue, Dot, Msg}).
%%%===================================================================
%%% API
%%%===================================================================
%% @doc Same as start_link([]).
-spec start_link() -> {ok, pid()} | ignore | {error, term()}.
start_link() ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
%%% gen_server callbacks
%%%===================================================================
%% @private
-spec init(list()) -> {ok, state_t()}.
init([]) ->
Actor = trcb_base_util:get_node(),
%% Generate local to be acknowledged messages queue.
ToBeAckQueue = dict:new(),
schedule_resend(),
{ok, #state{actor=Actor,
metrics=trcb_base_config:get(lmetrics),
to_be_ack_queue=ToBeAckQueue}}.
%% @private
-spec handle_call(term(), {pid(), term()}, state_t()) ->
{reply, term(), state_t()}.
handle_call(Msg, _From, State) ->
lager:warning("Unhandled cast messages: ~p", [Msg]),
{reply, ok, State}.
%% @private
-spec handle_cast(term(), state_t()) -> {noreply, state_t()}.
handle_cast({add_exactly_once_queue, Dot, {VV, MessageBody, ToMembers}},
#state{to_be_ack_queue=ToBeAckQueue0}=State) ->
%% Get current time in milliseconds.
CurrentTime = trcb_base_util:get_timestamp(),
%% Add members to the queue of not ack messages and increment the vector clock.
ToBeAckQueue = dict:store(Dot, {VV, MessageBody, ToMembers, CurrentTime}, ToBeAckQueue0),
{noreply, State#state{to_be_ack_queue=ToBeAckQueue}};
handle_cast({tcbcast_ack, Dot, Sender},
#state{to_be_ack_queue=ToBeAckQueue0}=State) ->
ToBeAckQueue = case dict:find(Dot, ToBeAckQueue0) of
%% Get list of waiting ackwnoledgements.
{ok, {_VV, _MessageBody, Members0, _Timestamp}} ->
%% Remove this member as an outstanding member.
Members = lists:delete(Sender, Members0),
case length(Members) of
0 ->
%% None left, remove from ack queue.
dict:erase(Dot, ToBeAckQueue0);
_ ->
%% Still some left, preserve.
dict:update(Dot, fun({A, B, _C, D}) -> {A, B, Members, D} end, ToBeAckQueue0)
end;
error ->
ToBeAckQueue0
end,
{noreply, State#state{to_be_ack_queue=ToBeAckQueue}};
handle_cast(Msg, State) ->
lager:warning("Unhandled cast messages: ~p", [Msg]),
{noreply, State}.
%% @private
-spec handle_info(term(), state_t()) -> {noreply, state_t()}.
handle_info(check_resend, #state{actor=Actor, to_be_ack_queue=ToBeAckQueue0, metrics=Metrics} = State) ->
Now = trcb_base_util:get_timestamp(),
ToBeAckQueue1 = dict:fold(
fun(MessageDot, {MessageVV, MessageBody, MembersList, Timestamp0}, ToBeAckQueue) ->
case (Now - Timestamp0) > ?WAIT_TIME_BEFORE_RESEND of
true ->
Message1 = {tcbcast, MessageVV, MessageBody, Actor},
TaggedMessage1 = {?RESEND_TCBCAST_TAG, Message1},
%% Retransmit to membership.
trcb_base_util:send(TaggedMessage1, MembersList, Metrics, ?TCSB),
dict:update(MessageDot,
fun({A, B, C, _D}) -> {A, B, C, trcb_base_util:get_timestamp()} end,
ToBeAckQueue);
false ->
%% Do nothing.
ToBeAckQueue
end
end,
ToBeAckQueue0,
ToBeAckQueue0
),
schedule_resend(),
{noreply, State#state{to_be_ack_queue=ToBeAckQueue1}};
handle_info(Msg, State) ->
lager:warning("Unhandled info messages: ~p", [Msg]),
{noreply, State}.
%% @private
-spec terminate(term(), state_t()) -> term().
terminate(_Reason, _State) ->
ok.
%% @private
-spec code_change(term() | {down, term()}, state_t(), term()) -> {ok, state_t()}.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%% @private
schedule_resend() ->
timer:send_after(?WAIT_TIME_BEFORE_CHECK_RESEND, check_resend). | src/trcb_base_resender.erl | 0.539469 | 0.406067 | trcb_base_resender.erl | starcoder |
%% Copyright (c) 2013-2019 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(emqx_pool_SUITE).
-compile(export_all).
-compile(nowarn_export_all).
-include("emqx_mqtt.hrl").
-include_lib("eunit/include/eunit.hrl").
all() ->
[
{group, submit_case},
{group, async_submit_case},
t_unexpected
].
groups() ->
[
{submit_case, [sequence], [submit_mfa, submit_fa]},
{async_submit_case, [sequence], [async_submit_mfa, async_submit_crash]}
].
init_per_suite(Config) ->
application:ensure_all_started(gproc),
Config.
end_per_suite(_Config) ->
ok.
init_per_testcase(_, Config) ->
{ok, Sup} = emqx_pool_sup:start_link(),
[{pool_sup, Sup}|Config].
end_per_testcase(_, Config) ->
Sup = proplists:get_value(pool_sup, Config),
exit(Sup, normal).
submit_mfa(_Config) ->
Result = emqx_pool:submit({?MODULE, test_mfa, []}),
?assertEqual(15, Result).
submit_fa(_Config) ->
Fun = fun(X) -> case X rem 2 of 0 -> {true, X div 2}; _ -> false end end,
Result = emqx_pool:submit(Fun, [2]),
?assertEqual({true, 1}, Result).
async_submit_mfa(_Config) ->
emqx_pool:async_submit({?MODULE, test_mfa, []}),
emqx_pool:async_submit(fun ?MODULE:test_mfa/0, []).
async_submit_crash(_) ->
emqx_pool:async_submit(fun() -> A = 1, A = 0 end).
t_unexpected(_) ->
Pid = emqx_pool:worker(),
?assertEqual(ignored, gen_server:call(Pid, bad_request)),
?assertEqual(ok, gen_server:cast(Pid, bad_msg)),
Pid ! bad_info,
ok = gen_server:stop(Pid).
test_mfa() ->
lists:foldl(fun(X, Sum) -> X + Sum end, 0, [1,2,3,4,5]). | test/emqx_pool_SUITE.erl | 0.625896 | 0.414543 | emqx_pool_SUITE.erl | starcoder |
-module(statman_counter).
-export([init/0, counters/0, get/1, get_all/0, reset/2]).
-export([incr/1, incr/2, decr/1, decr/2, set/2]).
-compile([{no_auto_import, [get/1]}]).
-include_lib("eunit/include/eunit.hrl").
-define(TABLE, statman_counters).
%%
%% API
%%
init() ->
ets:new(?TABLE, [named_table, public, set, {write_concurrency, true}]),
ok.
get(Key) ->
case ets:match(?TABLE, {Key, '$1'}) of
[[N]] when is_integer(N) ->
N;
[] ->
error(badarg)
end.
get_all() ->
ets:select(?TABLE, [{ {'$1', '$2'}, [], [{{'$1', '$2'}}]}]).
incr(Key) -> incr(Key, 1).
decr(Key) -> decr(Key, 1).
decr(Key, Incr) -> incr(Key, -Incr).
counters() ->
ets:select(?TABLE, [{ {'$1', '$2'}, [], ['$1'] }]).
reset(Key, Value) ->
decr(Key, Value).
%%
%% INTERNAL HELPERS
%%
set(Key, Value) ->
case catch ets:update_element(?TABLE, Key, Value) of
{'EXIT', {badarg, _}} ->
(catch ets:insert(?TABLE, {Key, Value})),
ok;
_ ->
ok
end.
incr(Key, Incr) when is_integer(Incr) ->
%% If lock contention on the single key becomes a problem, we can
%% use multiple keys and try to snapshot a value across all
%% subkeys. See
%% https://github.com/boundary/high-scale-lib/blob/master/src/main/java/org/cliffc/high_scale_lib/ConcurrentAutoTable.java
ets:update_counter(?TABLE, Key, Incr, {Key, 0}),
ok;
incr(_Key, Float) when is_float(Float) ->
error(badarg).
%%
%% TESTS
%%
counter_test_() ->
{foreach,
fun setup/0, fun teardown/1,
[
?_test(test_operations()),
?_test(find_counters()),
{timeout, 100, ?_test(benchmark())},
?_test(test_reset()),
?_test(floats())
]
}.
setup() ->
init(),
[?TABLE].
teardown(Tables) ->
lists:map(fun ets:delete/1, Tables).
test_operations() ->
?assertError(badarg, get(key)),
?assertEqual(ok, incr(key)),
?assertEqual(1, get(key)),
?assertEqual(ok, decr(key)),
?assertEqual(0, get(key)),
?assertEqual(ok, decr(key)),
?assertEqual(-1, get(key)),
?assertEqual(ok, set(key, 5)),
?assertEqual(5, get(key)),
?assertEqual(ok, decr(key)),
?assertEqual(4, get(key)).
find_counters() ->
?assertEqual([], counters()),
?assertEqual([], get_all()),
?assertEqual(ok, incr(foo)),
?assertEqual(ok, incr(bar)),
?assertEqual(lists:sort([bar, foo]), lists:sort(counters())),
?assertEqual(lists:sort([{bar, 1}, {foo, 1}]), lists:sort(get_all())).
test_reset() ->
?assertEqual([], counters()),
ok = incr(foo, 5),
?assertEqual(5, get(foo)),
[{foo, Count}] = get_all(),
incr(foo, 3),
?assertEqual(8, get(foo)),
ok = reset(foo, Count),
?assertEqual(3, get(foo)).
floats() ->
?assertError(badarg, get(foo)),
?assertError(badarg, incr(foo, 2.5)).
benchmark() ->
do_benchmark(4, 100000),
do_benchmark(8, 100000),
do_benchmark(32, 100000).
do_benchmark(Processes, Writes) ->
Start = erlang:monotonic_time(microsecond),
Parent = self(),
Pids = [spawn(fun() ->
benchmark_incrementer(foo, Writes),
Parent ! {self(), done}
end) || _ <- lists:seq(1, Processes)],
receive_all(Pids, done),
End = erlang:monotonic_time(microsecond),
error_logger:info_msg("~p processes, ~p writes in ~p ms~n",
[Processes, Writes, (End - Start) / 1000]),
ok.
receive_all([], _) ->
ok;
receive_all(Pids, Msg) ->
receive
{Pid, Msg} ->
receive_all(lists:delete(Pid, Pids), Msg)
end.
benchmark_incrementer(_, 0) ->
ok;
benchmark_incrementer(Key, N) ->
incr(Key),
benchmark_incrementer(Key, N-1). | src/statman_counter.erl | 0.525856 | 0.583055 | statman_counter.erl | starcoder |
-module(md_writer).
-export([build_md/2]).
%%==============================================================================
%% API
%%==============================================================================
build_md(Data, Path) ->
{value, {_, Name, Text}} = lists:keysearch(overview, 1, Data),
filelib:ensure_dir(Path),
{ok, Output} = file:open(Path ++ "/" ++ Name ++ ".md", [write]),
io:format(Output, "##Overview~n~s~n~n", [format_sentence(Text)]),
EndpointTable = endpoints_to_table(Name, get_endpoint_list(Data)),
io:format(Output, "##Endpoints~n~n~s~n~n", [EndpointTable]),
Endpoints = [endpoint_to_string(X, Data) || {endpoint, X} <- Data],
io:format(Output, "~s", [string:join(Endpoints, "\n\n\n")]),
ok.
%%==============================================================================
%% Utils
%%==============================================================================
get_endpoint_list(Data) ->
Endpoints = [Description || {endpoint, Description} <- Data],
F = fun(EndpointData) ->
{value, Res} = lists:keysearch(definition, 1, EndpointData),
{definition, Type, Name, _} = Res,
{Type, Name}
end,
[F(E) || E <- Endpoints].
endpoints_to_table(Resource, Endpoints) ->
MethodList = [get, put, post, delete],
GetMethod = fun(MethodAtom) ->
case lists:keysearch(MethodAtom, 1, Endpoints) of
{value, {_, _}} -> {true, method_to_string(MethodAtom)};
_ -> false
end
end,
Methods = lists:filtermap(GetMethod, MethodList),
GetName = fun(MethodAtom) ->
case lists:keysearch(MethodAtom, 1, Endpoints) of
{value, {_, Name}} -> {true, Name};
_ -> false
end
end,
Names = lists:filtermap(GetName, MethodList),
Titles = ["RESOURCE" | Methods],
Values = [Resource | Names],
format_table(Titles, Values).
method_to_string(MethodAtom) -> string:to_upper(atom_to_list(MethodAtom)).
format_table(Row1, Row2) ->
GetMaxLen = fun(A, B) -> max(length(A), length(B)) end,
Lengths = lists:zipwith(GetMaxLen, Row1, Row2),
AddTrailingSpaces = fun(Str, DesiredLen) ->
string:left(Str, DesiredLen, 32)
end,
Line1 = lists:zipwith(AddTrailingSpaces, Row1, Lengths),
Line2 = [string:copies("-", L) || L <- Lengths],
Line3 = lists:zipwith(AddTrailingSpaces, Row2, Lengths),
Lines = [Line1, Line2, Line3],
io_lib:format("~s~n~s~n~s", [string:join(Line, "|") || Line <- Lines]).
endpoint_to_string(EndpointDefinition, Data) ->
{value, {_, Method, Name, Text}} = lists:keysearch(definition, 1,
EndpointDefinition),
Overview = io_lib:format("#### ~s ~s~n~n##### Overview~n~s",
%Overview = io_lib:format("#### ``~s ~s``~n~n##### Overview~n~s",
[method_to_string(Method),
Name,
format_sentence(Text)]),
Parameters = case parameters_to_string(EndpointDefinition) of
void -> "";
PValue -> "\n\n" ++ PValue
end,
Responses = "\n\n" ++ responses_to_string(EndpointDefinition),
Examples = case utils:skip_examples(Data) of
true ->
"";
false ->
case examples_to_string(EndpointDefinition, Data) of
void -> "";
EValue -> "\n\n" ++ EValue
end
end,
Overview ++ Parameters ++ Responses ++ Examples.
parameters_to_string(Data) ->
ReqParams = [{type_to_string((Type)), Name, format_sentence(Text)} ||
{parameter, required, Type, Name, Text} <- Data],
OpParams = [{type_to_string(Type), Name, format_sentence(Text)} ||
{parameter, optional, Type, Name, Text} <- Data],
ReqParamsStr = ["* **" ++ Name ++ "** (" ++ Type ++ "): " ++ Text ||
{Type, Name, Text} <- lists:reverse(ReqParams)],
OpParamsStr = ["* **" ++ Name ++ "** (" ++ Type ++ ", optional): " ++ Text ||
{Type, Name, Text} <- lists:reverse(OpParams)],
case {ReqParamsStr, OpParamsStr} of
{[], []} -> void;
_ -> ParamText = string:join(ReqParamsStr ++ OpParamsStr, "\n"),
"##### Parameters" ++ "\n" ++ ParamText
end.
responses_to_string(Data) ->
Sort = fun({A, _}, {B, _}) -> A =< B end,
Statuses = [{integer_to_list(Status), format_sentence(Text)} ||
{response, Status, Text} <- Data],
Url = "http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html",
{ok, {_, _, HTML}} = httpc:request(Url),
StatusToString = fun(Status) ->
{match, Rest} = re:run(HTML, "h3.*(" ++ Status ++ ".*)h3"),
Results = [lists:sublist(HTML, A + 5, B - 6) ||
{A, B} <- Rest],
lists:nth(2, Results)
end,
ResponseToString = fun({Status, Text}) ->
"* **" ++ Status ++ "** (" ++ StatusToString(Status) ++
"): " ++ Text
end,
StatusesStr = [ResponseToString(S) || S <- lists:sort(Sort, Statuses)],
"##### Responses:\nThe server may return:\n\n" ++
string:join(StatusesStr, "\n").
examples_to_string(EndpointDefinition, Data) ->
Examples = [{Type, Description, Params} ||
{example, Type, Description, Params} <- EndpointDefinition],
F = fun({Type, Description, Params}) ->
handle_example(Type, Description, Params, Data)
end,
ExamplesStr = lists:filtermap(F, Examples),
case ExamplesStr of
[] -> void;
_L -> "##### Samples:\n\n" ++ string:join(ExamplesStr, "\n\n")
end.
handle_example(curl, Description, Params, Data) ->
{ok, Cmd} = build_curl_req(Params, Data, false),
Response = os:cmd(Cmd),
% Separate the response in lines
Lines = [binary_to_list(Bin) || Bin <- re:split(Response, "[\r\n]+")],
F = fun(Line, {Out, In} = Acc) ->
case Line of
[$* | _] -> Acc; % Ignore this
[${ | _] -> Acc; % Ignore this
[$> | _] -> {[Line | Out], In};
[_ | _] -> {Out, [Line | In]};
[] -> Acc
end
end,
{Out, In} = lists:foldl(F, {[], []}, lists:reverse(Lines)),
{ok, ObscuredCmd} = build_curl_req(Params, Data, true),
Str = "* " ++ format_sentence(Description) ++ "\n```bash\n$ " ++ ObscuredCmd
++ "\n" ++ string:join(Out ++ In, "\n") ++ "\n```",
{true, Str};
handle_example(Type, _Description, _Params, _Data) ->
io:format("unhandled example type ~p~n", [Type]),
false.
%% String formatting utils
capitalize([H | T] = _Str) when H >= 97 andalso H =< 122 -> [H - 32 | T];
capitalize(Str) -> Str.
add_period(Str) -> case lists:last(Str) =:= 46 of
true -> Str;
_ -> Str ++ "."
end.
format_sentence(Str) -> add_period(capitalize(Str)).
type_to_string(Type) -> capitalize(parameter_handler:to_string(Type)).
build_curl_req(Params, Data, ObscureAuth) ->
case lists:keyfind(url, 1, Params) of
false -> {error, no_url};
{_, Url} -> R1 = "curl -sSv ",
R2 = case lists:keyfind(auth, 1, Params) of
false -> R1;
{_, Auth} -> add_auth(R1, Auth, Data, ObscureAuth)
end,
R3 = add_method(R2, Params),
R4 = add_type(R3, Params),
R5 = add_body(R4, Params),
{ok, R5 ++ " " ++ Url}
end.
%% Curl request
add_auth(Req, AuthName, Data, ObscureAuth) ->
case lists:keyfind(auth, 1, Data) of
false -> io:format("undefined auth ~p~n", [AuthName]),
io:format("on1 ~p~n", [Data]),
Req;
{_, L} -> case lists:keyfind(AuthName, 1, L) of
false -> io:format("undefined auth ~p~n", [AuthName]),
io:format("on2 ~p~n", [L]),
Req;
{_, {K, S}} -> case ObscureAuth of
false -> Req ++ " -u" ++ K ++ ":" ++ S;
true -> Req ++ " -uKEY:SECRET"
end
end
end.
add_method(Req, Params) ->
case lists:keyfind(method, 1, Params) of
false -> Req;
{_, Method} -> Req ++ " -X " ++ Method
end.
add_type(Req, Params) ->
case lists:keyfind(type, 1, Params) of
false -> Req;
{_, "json"} -> Req ++ " -H\"Content-Type:application/json\"";
Other -> io:format("unsupported content type ~p~n", [Other]),
Req
end.
add_body(Req, Params) ->
case lists:keyfind(body, 1, Params) of
false -> Req;
{_, Body} -> Req ++ " -d'" ++ Body ++ "'"
end. | src/md_writer.erl | 0.52074 | 0.619759 | md_writer.erl | starcoder |
%% @author Couchbase <<EMAIL>>
%% @copyright 2015-2021 Couchbase, Inc.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% @doc REST api's for handling ssl certificates
-module(menelaus_web_cert).
-include("ns_common.hrl").
-export([handle_cluster_certificate/1,
handle_regenerate_certificate/1,
handle_upload_cluster_ca/1,
handle_reload_node_certificate/1,
handle_get_node_certificate/2,
handle_client_cert_auth_settings/1,
handle_client_cert_auth_settings_post/1]).
-define(MAX_CLIENT_CERT_PREFIXES, ?get_param(max_prefixes, 10)).
handle_cluster_certificate(Req) ->
menelaus_util:assert_is_enterprise(),
case proplists:get_value("extended", mochiweb_request:parse_qs(Req)) of
"true" ->
handle_cluster_certificate_extended(Req);
_ ->
handle_cluster_certificate_simple(Req)
end.
handle_cluster_certificate_simple(Req) ->
Cert = case ns_server_cert:cluster_ca() of
{GeneratedCert, _} ->
GeneratedCert;
{UploadedCAProps, _, _} ->
proplists:get_value(pem, UploadedCAProps)
end,
menelaus_util:reply_ok(Req, "text/plain", Cert).
format_time(UTCSeconds) ->
LocalTime = calendar:universal_time_to_local_time(
calendar:gregorian_seconds_to_datetime(UTCSeconds)),
menelaus_util:format_server_time(LocalTime, 0).
warning_props({expires_soon, UTCSeconds}) ->
[{message, ns_error_messages:node_certificate_warning(expires_soon)},
{expires, format_time(UTCSeconds)}];
warning_props(Warning) ->
[{message, ns_error_messages:node_certificate_warning(Warning)}].
translate_warning({Node, Warning}) ->
[{node, Node} | warning_props(Warning)];
translate_warning(Warning) ->
warning_props(Warning).
jsonify_cert_props(Props) ->
lists:map(fun ({expires, UTCSeconds}) ->
{expires, format_time(UTCSeconds)};
({K, V}) when is_list(V) ->
{K, list_to_binary(V)};
(Pair) ->
Pair
end, Props).
handle_cluster_certificate_extended(Req) ->
{Cert, WarningsJson} =
case ns_server_cert:cluster_ca() of
{GeneratedCert, _} ->
{[{type, generated},
{pem, GeneratedCert}], [{translate_warning(self_signed)}]};
{UploadedCAProps, _, _} ->
Warnings = ns_server_cert:get_warnings(UploadedCAProps),
{[{type, uploaded} | UploadedCAProps],
[{translate_warning(Pair)} || Pair <- Warnings]}
end,
menelaus_util:reply_json(Req, {[{cert, {jsonify_cert_props(Cert)}},
{warnings, WarningsJson}]}).
handle_regenerate_certificate(Req) ->
menelaus_util:assert_is_enterprise(),
assert_n2n_encryption_is_disabled(),
ns_server_cert:generate_and_set_cert_and_pkey(),
ns_ssl_services_setup:sync(),
?log_info("Completed certificate regeneration"),
ns_audit:regenerate_certificate(Req),
handle_cluster_certificate_simple(Req).
reply_error(Req, Error) ->
menelaus_util:reply_json(
Req, {[{error, ns_error_messages:cert_validation_error_message(Error)}]}, 400).
handle_upload_cluster_ca(Req) ->
menelaus_util:assert_is_enterprise(),
assert_n2n_encryption_is_disabled(),
case mochiweb_request:recv_body(Req) of
undefined ->
reply_error(Req, empty_cert);
PemEncodedCA ->
case ns_server_cert:set_cluster_ca(PemEncodedCA) of
{ok, Props} ->
ns_audit:upload_cluster_ca(Req,
proplists:get_value(subject, Props),
proplists:get_value(expires, Props)),
handle_cluster_certificate_extended(Req);
{error, Error} ->
reply_error(Req, Error)
end
end.
assert_n2n_encryption_is_disabled() ->
case misc:is_cluster_encryption_fully_disabled() of
true -> ok;
false ->
menelaus_util:web_exception(
400, "Operation requires node-to-node encryption to be disabled")
end.
handle_reload_node_certificate(Req) ->
menelaus_util:assert_is_enterprise(),
Nodes = nodes(),
case ns_server_cert:apply_certificate_chain_from_inbox() of
{ok, Props} ->
ns_audit:reload_node_certificate(Req,
proplists:get_value(subject, Props),
proplists:get_value(expires, Props)),
ns_ssl_services_setup:sync(),
case netconfig_updater:ensure_tls_dist_started(Nodes) of
ok ->
menelaus_util:reply(Req, 200);
{error, ErrorMsg} ->
menelaus_util:reply_json(Req, ErrorMsg, 400)
end;
{error, Error} ->
?log_error("Error reloading node certificate: ~p", [Error]),
menelaus_util:reply_json(
Req, ns_error_messages:reload_node_certificate_error(Error), 400)
end.
handle_get_node_certificate(NodeId, Req) ->
menelaus_util:assert_is_enterprise(),
case menelaus_web_node:find_node_hostname(NodeId, Req) of
{ok, Node} ->
case ns_server_cert:get_node_cert_info(Node) of
[] ->
menelaus_util:reply_text(Req, <<"Certificate is not set up on this node">>, 404);
Props ->
menelaus_util:reply_json(Req, {jsonify_cert_props(Props)})
end;
{error, {invalid_node, Reason}} ->
menelaus_util:reply_text(Req, Reason, 400);
{error, not_found} ->
menelaus_util:reply_text(
Req,
<<"Node is not found, make sure the ip address/hostname matches the ip address/hostname used by Couchbase">>,
404)
end.
allowed_values(Key) ->
Values = [{"state", ["enable", "disable", "mandatory"]},
{"path", ["subject.cn", "san.uri", "san.dnsname", "san.email"]},
{"prefix", any},
{"delimiter", any}],
proplists:get_value(Key, Values, none).
handle_client_cert_auth_settings(Req) ->
Cca = ns_ssl_services_setup:client_cert_auth(),
State = list_to_binary(proplists:get_value(state, Cca)),
Prefixes = [begin
{struct, [{list_to_binary(atom_to_list(K)), list_to_binary(V)}
|| {K, V} <- Triple]}
end || Triple <- proplists:get_value(prefixes, Cca, [])],
Out = {struct, [{<<"state">>, State}, {<<"prefixes">>, Prefixes}]},
menelaus_util:reply_json(Req, Out).
validate_client_cert_auth_param(Key, Val) ->
Values = allowed_values(Key),
case Values == any orelse lists:member(Val, Values) of
true ->
{ok, {list_to_atom(Key), Val}};
false ->
{error, io_lib:format("Invalid value '~s' for key '~s'", [Val, Key])}
end.
validate_client_cert_auth_state(StateVal, Prefixes, Cfg, Errors) ->
case validate_client_cert_auth_param("state", StateVal) of
{ok, CfgPair} ->
case StateVal =/= "disable" andalso Prefixes =:= [] of
true ->
E = {error, io_lib:format("'prefixes' cannot be empty when the "
"'state' is '~s'", [StateVal])},
{Cfg, [E | Errors]};
false ->
case StateVal =:= "mandatory" andalso
misc:should_cluster_data_be_encrypted() of
false -> {[CfgPair | Cfg], Errors};
true ->
M = "Cannot set 'state' to 'mandatory' when "
"cluster encryption level has been set to "
"'all'",
E = {error, M},
{Cfg, [E | Errors]}
end
end;
Err ->
{Cfg, [Err | Errors]}
end.
validate_triple(Triple) ->
Triple1 = lists:sort(Triple),
case [K || {K, _V} <- Triple1] =:= ["delimiter", "path", "prefix"] of
true ->
case validate_client_cert_auth_param("path", proplists:get_value("path", Triple1)) of
{ok, _} ->
{[{list_to_atom(K), V} || {K, V} <- Triple1], []};
E ->
{[], [E]}
end;
false ->
E = {error, io_lib:format("Invalid prefixes entry (~p). Must contain "
"'path', 'prefix' & 'delimiter' fields.",
[Triple1])},
{[], [E]}
end.
check_for_duplicate_prefixes(_PrefixCfg, Errors) when Errors =/= [] ->
Errors;
check_for_duplicate_prefixes(PrefixCfg, Errors) ->
{_, NewErrors} =
lists:foldl(
fun(Triple, {Set, EAcc}) ->
Path = proplists:get_value(path, Triple),
Prefix = proplists:get_value(prefix, Triple),
case sets:is_element({Path, Prefix}, Set) of
true ->
E = {error,
io_lib:format("Multiple entries with same path & prefix "
"(~p) are not allowed", [{Path, Prefix}])},
{Set, [E | EAcc]};
false ->
{sets:add_element({Path, Prefix}, Set), EAcc}
end
end, {sets:new(), Errors}, PrefixCfg),
NewErrors.
validate_client_cert_auth_prefixes(Prefixes, Cfg, Errors) ->
%% Prefixes are represented as a list of lists. Each list contains
%% tuples representing the path, prefix and delimiter.
{PCfg, PErrs0} = lists:foldr(
fun({C, E}, {CAcc, EAcc}) ->
{[C | CAcc], E ++ EAcc}
end, {[], []}, [validate_triple(Triple) || Triple <- Prefixes]),
PErrs = check_for_duplicate_prefixes(PCfg, PErrs0),
{Cfg ++ [{prefixes, PCfg}], PErrs ++ Errors}.
handle_client_cert_auth_settings_post(Req) ->
menelaus_util:assert_is_enterprise(),
try
JSON = menelaus_util:parse_json(Req),
do_handle_client_cert_auth_settings_post(Req, JSON)
catch
throw:{error, Msg} ->
menelaus_util:reply_json(Req, Msg, 400);
_:_ ->
menelaus_util:reply_json(Req, <<"Invalid JSON">>, 400)
end.
%% The client_cert_auth settings will be a JSON payload and it'll look like
%% the following:
%%
%% {
%% "state": "enable",
%% "prefixes": [
%% {
%% "path": "san.uri",
%% "prefix": "www.cb-",
%% "delimiter": ".,;"
%% },
%% {
%% "path": "san.email",
%% "prefix": "a",
%% "delimiter": "@"
%% }
%% ]
%% }
do_handle_client_cert_auth_settings_post(Req, JSON) ->
{struct, Data} = JSON,
StateRaw = proplists:get_value(<<"state">>, Data),
PrefixesRaw = proplists:get_value(<<"prefixes">>, Data),
case StateRaw =:= undefined orelse PrefixesRaw =:= undefined of
true ->
throw({error,
<<"Unsupported format: Must contain 'state' and 'prefixes' "
"fields">>});
false ->
case length(proplists:get_keys(Data)) > 2 of
true ->
throw({error, <<"Unsupported fields: Must contain 'state' "
"and 'prefixes' fields only">>});
false -> ok
end
end,
State = binary_to_list(StateRaw),
case length(PrefixesRaw) > ?MAX_CLIENT_CERT_PREFIXES of
true ->
Err = io_lib:format("Maximum number of prefixes supported is ~p",
[?MAX_CLIENT_CERT_PREFIXES]),
menelaus_util:reply_json(Req, list_to_binary(Err), 400);
false ->
Prefixes = [[{binary_to_list(K), binary_to_list(V)} || {K, V} <- Triple]
|| {struct, Triple} <- PrefixesRaw],
{Cfg0, Errors0} = validate_client_cert_auth_state(State, Prefixes, [], []),
{Cfg, Errors} = validate_client_cert_auth_prefixes(Prefixes, Cfg0, Errors0),
case Errors of
[] ->
case ns_ssl_services_setup:client_cert_auth() of
Cfg ->
menelaus_util:reply(Req, 200);
_ ->
ns_config:set(client_cert_auth, Cfg),
ns_audit:client_cert_auth(Req, Cfg),
menelaus_util:reply(Req, 202)
end;
_ ->
Out = [list_to_binary(Msg) || {error, Msg} <- Errors],
menelaus_util:reply_json(Req, Out, 400)
end
end. | src/menelaus_web_cert.erl | 0.571647 | 0.432183 | menelaus_web_cert.erl | starcoder |
%% @author Couchbase <<EMAIL>>
%% @copyright 2017 Couchbase, Inc.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% @doc implementation of samples REST API's
-module(menelaus_web_samples).
-include("ns_common.hrl").
-export([handle_get/1,
handle_post/1]).
-import(menelaus_util,
[reply_json/2,
reply_json/3]).
-define(SAMPLES_LOADING_TIMEOUT, 120000).
-define(SAMPLE_BUCKET_QUOTA_MB, 100).
-define(SAMPLE_BUCKET_QUOTA, 1024 * 1024 * ?SAMPLE_BUCKET_QUOTA_MB).
handle_get(Req) ->
Buckets = [Bucket || {Bucket, _} <- ns_bucket:get_buckets(ns_config:get())],
Map = [ begin
Name = filename:basename(Path, ".zip"),
Installed = lists:member(Name, Buckets),
{struct, [{name, list_to_binary(Name)},
{installed, Installed},
{quotaNeeded, ?SAMPLE_BUCKET_QUOTA}]}
end || Path <- list_sample_files() ],
reply_json(Req, Map).
handle_post(Req) ->
menelaus_web_rbac:assert_no_users_upgrade(),
Samples = mochijson2:decode(mochiweb_request:recv_body(Req)),
Errors = case validate_post_sample_buckets(Samples) of
ok ->
start_loading_samples(Req, Samples);
X1 ->
X1
end,
case Errors of
ok ->
reply_json(Req, [], 202);
X2 ->
reply_json(Req, [Msg || {error, Msg} <- X2], 400)
end.
start_loading_samples(Req, Samples) ->
Errors = [start_loading_sample(Req, binary_to_list(Sample))
|| Sample <- Samples],
case [X || X <- Errors, X =/= ok] of
[] ->
ok;
X ->
lists:flatten(X)
end.
start_loading_sample(Req, Name) ->
Params = [{"threadsNumber", "3"},
{"replicaIndex", "0"},
{"replicaNumber", "1"},
{"saslPassword", ""},
{"authType", "sasl"},
{"ramQuotaMB", integer_to_list(?SAMPLE_BUCKET_QUOTA_MB) },
{"bucketType", "membase"},
{"name", Name}],
case menelaus_web_buckets:create_bucket(Req, Name, Params) of
ok ->
start_loading_sample_task(Req, Name);
{_, Code} when Code < 300 ->
start_loading_sample_task(Req, Name);
{{struct, [{errors, {struct, Errors}}, _]}, _} ->
?log_debug("Failed to create sample bucket: ~p", [Errors]),
[{error, <<"Failed to create bucket!">>} | [{error, Msg} || {_, Msg} <- Errors]];
{{struct, [{'_', Error}]}, _} ->
?log_debug("Failed to create sample bucket: ~p", [Error]),
[{error, Error}];
X ->
?log_debug("Failed to create sample bucket: ~p", [X]),
X
end.
start_loading_sample_task(Req, Name) ->
case samples_loader_tasks:start_loading_sample(Name, ?SAMPLE_BUCKET_QUOTA_MB) of
ok ->
ns_audit:start_loading_sample(Req, Name);
already_started ->
ok
end,
ok.
list_sample_files() ->
BinDir = path_config:component_path(bin),
filelib:wildcard(filename:join([BinDir, "..", "samples", "*.zip"])).
sample_exists(Name) ->
BinDir = path_config:component_path(bin),
filelib:is_file(filename:join([BinDir, "..", "samples", binary_to_list(Name) ++ ".zip"])).
validate_post_sample_buckets(Samples) ->
case check_valid_samples(Samples) of
ok ->
check_quota(Samples);
X ->
X
end.
check_quota(Samples) ->
NodesCount = length(ns_cluster_membership:service_active_nodes(kv)),
StorageInfo = ns_storage_conf:cluster_storage_info(),
RamQuotas = proplists:get_value(ram, StorageInfo),
QuotaUsed = proplists:get_value(quotaUsed, RamQuotas),
QuotaTotal = proplists:get_value(quotaTotal, RamQuotas),
Required = ?SAMPLE_BUCKET_QUOTA * erlang:length(Samples),
case (QuotaTotal - QuotaUsed) < (Required * NodesCount) of
true ->
Err = ["Not enough Quota, you need to allocate ", format_MB(Required),
" to install sample buckets"],
[{error, list_to_binary(Err)}];
false ->
ok
end.
check_valid_samples(Samples) ->
Errors = [begin
case ns_bucket:name_conflict(binary_to_list(Name)) of
true ->
Err1 = ["Sample bucket ", Name, " is already loaded."],
{error, list_to_binary(Err1)};
_ ->
case sample_exists(Name) of
false ->
Err2 = ["Sample ", Name, " is not a valid sample."],
{error, list_to_binary(Err2)};
_ -> ok
end
end
end || Name <- Samples],
case [X || X <- Errors, X =/= ok] of
[] ->
ok;
X ->
X
end.
format_MB(X) ->
integer_to_list(misc:ceiling(X / 1024 / 1024)) ++ "MB". | src/menelaus_web_samples.erl | 0.604983 | 0.474449 | menelaus_web_samples.erl | starcoder |
% @doc OTPCL pipes up the wazoo. OTPCL treats any command that starts with a
% pipe character (`|') as a command terminator - that is, it'll treat all the
% words before it as one command, then resume command parsing. This is a bit
% tricky to describe in English, so it's easier to just show you that this:
%
% ```
% foo | bar | baz
% '''
%
% ...is equivalent to this:
%
% ```
% foo
% | bar
% | baz
% '''
%
% In this case, the `|' happens to take the return value of the
% previously-executed command (i.e. the `$RETVAL' variable) and pass it as the
% first argument to the command named in its own first argument. That is, both
% of the above are equivalent to this:
%
% ```
% baz [bar [foo]]
% '''
%
% Of course, the command doesn't necessarily <em>have</em> to do this sort of
% chaining (or even pay attention to `$RETVAL' at all), but OTPCL's pipe support
% exists specifically to facilitate this sort of pattern, as exemplified by the
% various commands defined in this here module.
-module(otpcl_pipes).
-include("otpcl.hrl").
-export(['CMD_|!'/2, 'CMD_|&'/2, 'CMD_||'/2, 'CMD_|*'/2, 'CMD_|#'/2,
'CMD_|#*'/2]).
% @doc "Send" operator. Send the previous command's return value to the
% specified process.
'CMD_|!'([Pid], State) ->
{RetVal, State} = otpcl_meta:get(<<"RETVAL">>, State),
Pid ! RetVal,
{ok, State}.
% @doc "And Also" operator. If the previous command returned a "truthy" value,
% run the arguments as a command.
'CMD_|&'(Args, State) ->
alsoelse(Args, State, true, false).
% @doc "Or Else" operator. If the previous command returned a non-"truthy"
% value, run the arguments as a command.
'CMD_||'(Args, State) ->
alsoelse(Args, State, false, true).
alsoelse([Cmd|Args], State, Also, Else) ->
{RetVal, State} = otpcl_meta:get(<<"RETVAL">>, State),
case otpcl_control:truthy(RetVal) of
Also ->
otpcl_meta:apply(Cmd, Args, State);
Else ->
{RetVal, State}
end.
% @doc "Splat" operator. If the previous command returned a list, and there are
% no words after the "splat", then treat the first element as a command name,
% the rest as its arguments, and run it. Else, treat the first argument as the
% command name, pass the list elements as arguments, then pass any other passed
% arguments as additional arguments, then run the resulting command.
'CMD_|*'(Args, State) ->
{RetVal, State} = otpcl_meta:get(<<"RETVAL">>, State),
splat(RetVal, Args, State).
splat([Cmd|Args], [], State) ->
otpcl_meta:apply(Cmd, Args, State);
splat(Args, [NextCmd|NextArgs], State) ->
NewArgs = Args ++ NextArgs,
otpcl_meta:apply(NextCmd, NewArgs, State).
% @doc "Insert" operator. Splits its arguments at the specified position,
% inserts the previous command's return value between them, and invokes the
% resulting list of words as a command. That is:
%
% ```
% foo |# 0 bar baz # -> [foo] bar baz
% foo |# 1 bar baz # -> bar [foo] baz
% foo |# 2 bar baz # -> bar baz [foo]
% '''
%
% Note that, for obvious reasons, trying to insert an argument into a position
% greater than the number of existing arguments will result in an error.
'CMD_|#'([Pos|Args], State) when is_integer(Pos) ->
{RetVal, State} = otpcl_meta:get(<<"RETVAL">>, State),
insert_splat([RetVal], Pos, Args, State). % Gotta stay DRY :)
insert_splat([Cmd|Rest], 0, Args, State) ->
otpcl_meta:apply(Cmd, Rest ++ Args, State);
insert_splat(RetVal, Pos, Args, State) when is_integer(Pos) ->
{[Cmd|Front], Back} = lists:split(Pos, Args),
otpcl_meta:apply(Cmd, Front ++ RetVal ++ Back, State).
% @doc "Insert Splat" operator. Like `|#`, but expands the previous command's
% return value during insertion (instead of just inserting the list as a single
% argument).
'CMD_|#*'([Pos|Args], State) when is_integer(Pos) ->
{RetVal, State} = otpcl_meta:get(<<"RETVAL">>, State),
insert_splat(RetVal, Pos, Args, State). | src/otpcl_pipes.erl | 0.558568 | 0.578002 | otpcl_pipes.erl | starcoder |
%%
%% %CopyrightBegin%
%%
%% Copyright Ericsson AB 2017-2018. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
%% Purpose : Optimize bit syntax matching.
-module(sys_core_bsm).
-export([module/2]).
-include("core_parse.hrl").
-spec module(cerl:c_module(), [compile:option()]) -> {'ok', cerl:c_module()}.
module(#c_module{defs=Ds}=Mod, _Opts) ->
{ok,Mod#c_module{defs=function(Ds)}}.
function([{#c_var{name={F,Arity}}=Name,B0}|Fs]) ->
try cerl_trees:map(fun bsm_reorder/1, B0) of
B -> [{Name,B} | function(Fs)]
catch
Class:Error:Stack ->
io:fwrite("Function: ~w/~w\n", [F,Arity]),
erlang:raise(Class, Error, Stack)
end;
function([]) ->
[].
%%% Reorder bit syntax matching to facilitate optimization in further passes.
bsm_reorder(#c_case{arg=#c_var{}=V}=Case) ->
bsm_reorder_1([V], Case);
bsm_reorder(#c_case{arg=#c_values{es=Es}}=Case) ->
bsm_reorder_1(Es, Case);
bsm_reorder(Core) ->
Core.
bsm_reorder_1(Vs0, #c_case{clauses=Cs0}=Case) ->
case bsm_leftmost(Cs0) of
Pos when Pos > 0, Pos =/= none ->
Vs = core_lib:make_values(move_from_col(Pos, Vs0)),
Cs = [C#c_clause{pats=move_from_col(Pos, Ps)}
|| #c_clause{pats=Ps}=C <- Cs0],
Case#c_case{arg=Vs,clauses=Cs};
_ ->
Case
end.
move_from_col(Pos, L) ->
{First,[Col|Rest]} = lists:split(Pos - 1, L),
[Col|First] ++ Rest.
%% bsm_leftmost(Cs) -> none | ArgumentNumber
%% Find the leftmost argument that matches a nonempty binary.
%% Return either 'none' or the argument number (1-N).
bsm_leftmost(Cs) ->
bsm_leftmost_1(Cs, none).
bsm_leftmost_1([_|_], 1) ->
1;
bsm_leftmost_1([#c_clause{pats=Ps}|Cs], Pos) ->
bsm_leftmost_2(Ps, Cs, 1, Pos);
bsm_leftmost_1([], Pos) -> Pos.
bsm_leftmost_2(_, Cs, Pos, Pos) ->
bsm_leftmost_1(Cs, Pos);
bsm_leftmost_2([#c_binary{segments=[_|_]}|_], Cs, N, _) ->
bsm_leftmost_1(Cs, N);
bsm_leftmost_2([_|Ps], Cs, N, Pos) ->
bsm_leftmost_2(Ps, Cs, N+1, Pos);
bsm_leftmost_2([], Cs, _, Pos) ->
bsm_leftmost_1(Cs, Pos). | lib/compiler/src/sys_core_bsm.erl | 0.568296 | 0.419707 | sys_core_bsm.erl | starcoder |
%% vim: set ai et sw=4 sts=4:
%% See LICENSE for licensing information.
-module(solarized_list_diff).
-export([ diff/2
]).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
% A diff implementation based on:
% - https://neil.fraser.name/writing/diff/
%
% More complete implementations already exist:
% - https://github.com/mmzeeman/diffy
% - https://github.com/tomas-abrahamsson/tdiff/
%
% This is a new implementation because:
% - the output needs to be matched to solarized_diff's needs,
% - we can give up early and still get good results for solarized_eunit,
% - we want binary and list implementation that can have different trade offs.
%=======================================================================
% Compare two lists (Left, Right)
% return pair of {Left, Right} difference lists
% where the lists are in the form
% DiffList = [Same0, Diff1, Same1, Diff2, Same2, ...]
% and
% SameX alternates with DiffX
% and
% SameX & DiffX are sub-lists of the initial lists
diff(Same, Same) ->
{ [Same]
, [Same]
};
diff(Left, Right) ->
common_prefix(Left, Right).
%=======================================================================
common_prefix(Ls, Rs) ->
case common_prefix_find(Ls, Rs, 0) of
false ->
common_prefix_next([], Ls, Rs);
{Lt, Rt, N} ->
Prefix = lists:sublist(Ls, N),
common_prefix_next(Prefix, Lt, Rt)
end.
%-----------------------------------------------------------------------
common_prefix_find([A | Ls], [A | Rs], N) ->
common_prefix_find(Ls, Rs, N + 1);
common_prefix_find(_, _, 0) ->
false;
common_prefix_find(Ls, Rs, N) when N > 0 ->
{Ls, Rs, N}.
%-----------------------------------------------------------------------
common_prefix_next(Prefix, Left, Right) ->
{Ls, Rs} = common_suffix(Left, Right),
{[Prefix | Ls], [Prefix | Rs]}.
%=======================================================================
% get length of list
% be aware of improper lists, in that case return
% - length of list less the improper tail
% - a new list without the imporper tail
% - the improper tail
improper_length(L) when is_list(L) ->
improper_length(L, L, 0);
improper_length(Improper) ->
{0, [], Improper}.
improper_length(L, [], N) ->
{N, L, []};
improper_length(L, [_ | Ls], N) ->
improper_length(L, Ls, N + 1);
improper_length(L, Improper, N) ->
{N, lists:sublist(L, N), Improper}.
-ifdef(TEST).
improper_test_() ->
Diff = {[[a], [], [] | improper], [[a], [], []]},
[ ?_assertEqual({2, [a, b], []}, improper_length([a, b]))
, ?_assertEqual({2, [a, b], c}, improper_length([a, b | c]))
, ?_assertEqual({0, [], improper}, improper_length(improper))
, ?_assertEqual(Diff, diff([a | improper], [a]))
].
-endif.
%=======================================================================
common_suffix(Left, Right) ->
common_suffix_balance(improper_length(Left), improper_length(Right)).
%-----------------------------------------------------------------------
% balance tails so we can find a common suffix
common_suffix_balance({Ln, L, Lt}, {Rn, R, Rt}) when Ln > Rn ->
N = Ln - Rn,
Found = common_suffix_find(lists:nthtail(N, L), R),
common_suffix_found(N, 0, L, R, Lt, Rt, Found);
common_suffix_balance({Ln, L, Lt}, {Rn, R, Rt}) when Rn > Ln ->
N = Rn - Ln,
Found = common_suffix_find(L, lists:nthtail(N, R)),
common_suffix_found(0, N, L, R, Lt, Rt, Found);
common_suffix_balance({N, L, Lt}, {N, R, Rt}) ->
Found = common_suffix_find(L, R),
common_suffix_found(0, 0, L, R, Lt, Rt, Found).
%-----------------------------------------------------------------------
common_suffix_find(L, R) ->
common_suffix_guess(L, R, 0).
%-----------------------------------------------------------------------
common_suffix_guess(L, R, N) ->
common_suffix_check(L, R, N, {N, L}).
%-----------------------------------------------------------------------
common_suffix_check([], [], _, Best) ->
Best;
common_suffix_check([A | L], [A | R], N, Best) ->
common_suffix_check(L, R, N + 1, Best);
common_suffix_check([_ | L], [_ | R], N, _) ->
common_suffix_guess(L, R, N + 1).
%-----------------------------------------------------------------------
common_suffix_found(Ln, Rn, L, R, Lt, Rt, {M, Suffix}) ->
case Suffix of
[] ->
common_suffix_add(M + Ln, M + Rn, L, R, Lt, Rt, Suffix);
_ ->
Ls = lists:sublist(L, M + Ln),
Rs = lists:sublist(R, M + Rn),
common_suffix_add(M + Ln, M + Rn, Ls, Rs, Lt, Rt, Suffix)
end.
%-----------------------------------------------------------------------
common_suffix_add(Ln, Rn, Ls, Rs, Lt, Rt, Suffix) ->
common_suffix_next(Ln, Rn, Ls, Rs, [Suffix | Lt], [Suffix | Rt]).
%-----------------------------------------------------------------------
%common_suffix_next(Ln, Rn, Ls, Rs, Lt, Rt) when Ln > 10 orelse Rn > 10 ->
% % avoid "large" lists
% {[Ls | Lt], [Rs | Rt]};
common_suffix_next(Ln, Rn, Ls, Rs, Lt, Rt) when Ln < Rn ->
single_edit(Ln, Rn, Ls, Rs, Lt, Rt);
common_suffix_next(Ln, Rn, Ls, Rs, Lt, Rt) ->
% give single_edit() shortest on left
% swap before and after
{Re, Le} = single_edit(Rn, Ln, Rs, Ls, Rt, Lt),
{Le, Re}.
%=======================================================================
% Ls is shorter or equal to Rs
single_edit(0, _, Ls, Rs, Lt, Rt) ->
{ [Ls | Lt]
, [Rs | Rt]
};
single_edit(Ln, Rn, Ls, Rs, Lt, Rt) ->
two_edits(Ln, Rn, Ls, Rs, Lt, Rt).
%=======================================================================
% Ls is shorter or equal to Rs
two_edits(Ln, Rn, Ls, Rs, Lt, Rt) when Ln + 2 > Rn ->
half_match(Ln, Rn, Ls, Rs, Lt, Rt);
two_edits(Ln, Rn, Ls, Rs, Lt, Rt) ->
case find_inside(Ls, Rs, 0, Rn - Ln - 1) of
nomatch ->
half_match(Ln, Rn, Ls, Rs, Lt, Rt);
{N, Suffix} ->
Prefix = lists:sublist(Rs, N),
{ [[], Ls, [] | Lt]
, [Prefix, Ls, Suffix | Rt]
}
end.
%-----------------------------------------------------------------------
find_inside(Small, Large, 0, Max) ->
find_inside(Small, tl(Large), 1, Max);
find_inside(Small, Large, N, Max) ->
case find_inside_check(Small, Large) of
nomatch when N >= Max ->
nomatch;
nomatch ->
find_inside(Small, tl(Large), N + 1, Max);
Suffix ->
{N, Suffix}
end.
%-----------------------------------------------------------------------
find_inside_check([], Large) ->
Large;
find_inside_check([A | Small], [A | Large]) ->
find_inside_check(Small, Large);
find_inside_check(_, _) ->
nomatch.
%-----------------------------------------------------------------------
-ifdef(TEST).
find_inside_test_() ->
[ ?_assertEqual(" hat", find_inside_check("the", "the hat"))
, ?_assertEqual({3, " hat"}, find_inside("the", "in the hat", 0, 6))
].
-endif.
%=======================================================================
% Ls is shorter or equal to Rs
half_match(1, _, Ls, Rs, Lt, Rt) ->
% not worth it of length(Left) =:= 1
{ [Ls | Lt]
, [Rs | Rt]
};
half_match(_, _, Ls, Rs, Lt, Rt) ->
{ [Ls | Lt]
, [Rs | Rt]
}.
%=======================================================================
-ifdef(TEST).
fraser_1_1_test() ->
Old = "Equality",
New = Old,
Expect = {[Old], [New]},
?assertEqual(Expect, solarized_list_diff:diff(Old, New)).
%-----------------------------------------------------------------------
fraser_1_2_test() ->
Old = "The cat in the hat.",
New = "The dog in the hat.",
Expect =
{ ["The ", "cat", " in the hat."]
, ["The ", "dog", " in the hat."]
},
?assertEqual(Expect, solarized_list_diff:diff(Old, New)).
%-----------------------------------------------------------------------
fraser_1_3_a_test() ->
Old = "The cat in the hat.",
New = "The furry cat in the hat.",
Expect =
{ ["The ", "", "cat in the hat."]
, ["The ", "furry ", "cat in the hat."]
},
?assertEqual(Expect, solarized_list_diff:diff(Old, New)).
%-----------------------------------------------------------------------
fraser_1_3_b_test() ->
Old = "The cat in the hat.",
New = "The cat.",
Expect =
{ ["The cat", " in the hat", "."]
, ["The cat", "", "."]
},
?assertEqual(Expect, solarized_list_diff:diff(Old, New)).
%-----------------------------------------------------------------------
fraser_1_4_a_test_() ->
Old = "The cat in the hat.",
New = "The happy cat in the black hat.",
Expect =
{ ["The ", "", "cat in the", "", " hat."]
, ["The ", "happy ", "cat in the", " black", " hat."]
},
Reverse = { element(2, Expect), element(1, Expect) },
[ ?_assertEqual(Expect, solarized_list_diff:diff(Old, New))
, ?_assertEqual(Reverse, solarized_list_diff:diff(New, Old))
].
%-----------------------------------------------------------------------
%fraser_1_4_b_test_() ->
% Old = <<"The cat in the hat.">>,
% New = <<"The ox in the box.">>,
% Expect =
% { [<<"The ">>, <<"cat">>, <<" in the ">>, <<"hat">>, <<".">>]
% , [<<"The ">>, <<"ok">>, <<" in the ">>, <<"box">>, <<".">>]
% },
% Reverse = { element(2, Expect), element(1, Expect) },
% [ ?_assertEqual(Expect, solarized_list_diff:diff(Old, New))
% , ?_assertEqual(Reverse, solarized_list_diff:diff(New, Old))
% ].
%-----------------------------------------------------------------------
-endif. | src/solarized_list_diff.erl | 0.582254 | 0.55929 | solarized_list_diff.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% riak_kv_pncounter: A convergent, replicated, state based PN counter
%%
%% Copyright (c) 2007-2013 Basho Technologies, Inc. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%% @doc
%% A PN-Counter CRDT. A PN-Counter is essentially two G-Counters: one for increments and
%% one for decrements. The value of the counter is the difference between the value of the
%% Positive G-Counter and the value of the Negative G-Counter.
%%
%% @see riak_kv_gcounter.erl
%%
%% @reference <NAME>, <NAME>, <NAME>, <NAME> (2011) A comprehensive study of
%% Convergent and Commutative Replicated Data Types. http://hal.upmc.fr/inria-00555588/
%%
%% @end
-module(riak_kv_pncounter).
-export([new/0, new/2, value/1, update/3, merge/2, equal/2, to_binary/1, from_binary/1]).
%% EQC API
-ifdef(EQC).
-include_lib("eqc/include/eqc.hrl").
-export([gen_op/0, update_expected/3, eqc_state_value/1]).
-endif.
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-export_type([pncounter/0, pncounter_op/0]).
-opaque pncounter() :: {riak_kv_gcounter:gcounter(), riak_kv_gcounter:gcounter()}.
-type pncounter_op() :: riak_kv_gcounter:gcounter_op() | decrement_op().
-type decrement_op() :: decrement | {decrement, pos_integer()}.
%% @doc Create a new, empty `pncounter()'
-spec new() -> pncounter().
new() ->
{riak_kv_gcounter:new(), riak_kv_gcounter:new()}.
%% @doc Create a `pncounter()' with an initial `Value' for `Actor'.
-spec new(term(), integer()) -> pncounter().
new(Actor, Value) when Value > 0 ->
update({increment, Value}, Actor, new());
new(Actor, Value) when Value < 0 ->
update({decrement, Value * -1}, Actor, new());
new(_Actor, _Zero) ->
new().
%% @doc The single, total value of a `pncounter()'
-spec value(pncounter()) -> integer().
value({Incr, Decr}) ->
riak_kv_gcounter:value(Incr) - riak_kv_gcounter:value(Decr).
%% @doc Update a `pncounter()'. The first argument is either the atom
%% `increment' or `decrement' or the two tuples `{increment, pos_integer()}' or
%% `{decrement, pos_integer()}'. In the case of the former, the operation's amount
%% is `1'. Otherwise it is the value provided in the tuple's second element.
%% `Actor' is any term, and the 3rd argument is the `pncounter()' to update.
%%
%% returns the updated `pncounter()'
-spec update(pncounter_op(), term(), pncounter()) -> {ok, pncounter()}.
update(increment, Actor, {Incr, Decr}) ->
{ok, GC} = riak_kv_gcounter:update(increment, Actor, Incr),
{ok, {GC, Decr}};
update({_Op, 0}, _Actor, Cntr) ->
{ok, Cntr};
update({increment, By}, Actor, {Incr, Decr}) when is_integer(By), By > 0 ->
{ok, GC} = riak_kv_gcounter:update({increment, By}, Actor, Incr),
{ok, {GC, Decr}};
update(decrement, Actor, {Incr, Decr}) ->
{ok, GC} = riak_kv_gcounter:update(increment, Actor, Decr),
{ok, {Incr, GC}};
update({decrement, By}, Actor, {Incr, Decr}) when is_integer(By), By > 0 ->
{ok, GC} = riak_kv_gcounter:update({increment, By}, Actor, Decr),
{ok, {Incr, GC}}.
%% @doc Merge two `pncounter()'s to a single `pncounter()'. This is the Least Upper Bound
%% function described in the literature.
-spec merge(pncounter(), pncounter()) -> pncounter().
merge({Incr1, Decr1}, {Incr2, Decr2}) ->
MergedIncr = riak_kv_gcounter:merge(Incr1, Incr2),
MergedDecr = riak_kv_gcounter:merge(Decr1, Decr2),
{MergedIncr, MergedDecr}.
%% @doc Are two `pncounter()'s structurally equal? This is not `value/1' equality.
%% Two counters might represent the total `-42', and not be `equal/2'. Equality here is
%% that both counters represent exactly the same information.
-spec equal(pncounter(), pncounter()) -> boolean().
equal({Incr1, Decr1}, {Incr2, Decr2}) ->
riak_kv_gcounter:equal(Incr1, Incr2) andalso riak_kv_gcounter:equal(Decr1, Decr2).
-define(TAG, 71).
-define(V1_VERS, 1).
%% @doc Encode an effecient binary representation of `pncounter()'
-spec to_binary(pncounter()) -> binary().
to_binary({P, N}) ->
PBin = riak_kv_gcounter:to_binary(P),
NBin = riak_kv_gcounter:to_binary(N),
PBinLen = byte_size(PBin),
NBinLen = byte_size(NBin),
<<?TAG:8/integer, ?V1_VERS:8/integer,
PBinLen:32/integer, PBin:PBinLen/binary,
NBinLen:32/integer, NBin:NBinLen/binary>>.
%% @doc Decode a binary encoded PN-Counter
-spec from_binary(binary()) -> pncounter().
from_binary(<<?TAG:8/integer, ?V1_VERS:8/integer,
PBinLen:32/integer, PBin:PBinLen/binary,
NBinLen:32/integer, NBin:NBinLen/binary>>) ->
{riak_kv_gcounter:from_binary(PBin), riak_kv_gcounter:from_binary(NBin)}.
%% ===================================================================
%% EUnit tests
%% ===================================================================
-ifdef(TEST).
-ifdef(EQC).
%% EQC generator
gen_op() ->
oneof([increment, {increment, gen_pos()}, decrement, {decrement, gen_pos()} ]).
gen_pos()->
?LET(X, int(), 1+abs(X)).
update_expected(_ID, increment, Prev) ->
Prev+1;
update_expected(_ID, decrement, Prev) ->
Prev-1;
update_expected(_ID, {increment, By}, Prev) ->
Prev+By;
update_expected(_ID, {decrement, By}, Prev) ->
Prev-By;
update_expected(_ID, _Op, Prev) ->
Prev.
eqc_state_value(S) ->
S.
eqc_value_test_() ->
{timeout, 120, [?_assert(crdt_statem_eqc:prop_converge(0, 1000, ?MODULE))]}.
-endif.
new_test() ->
?assertEqual({[], []}, new()).
value_test() ->
PNCnt1 = {[{1, 1}, {2, 13}, {3, 1}], [{2, 10}, {4, 1}]},
PNCnt2 = {[], []},
PNCnt3 = {[{1, 3}, {2, 1}, {3, 1}], [{1, 3}, {2, 1}, {3, 1}]},
?assertEqual(4, value(PNCnt1)),
?assertEqual(0, value(PNCnt2)),
?assertEqual(0, value(PNCnt3)).
update_increment_test() ->
PNCnt0 = new(),
{ok, PNCnt1} = update(increment, 1, PNCnt0),
{ok, PNCnt2} = update(increment, 2, PNCnt1),
{ok, PNCnt3} = update(increment, 1, PNCnt2),
?assertEqual({[{1, 2}, {2, 1}], []}, PNCnt3).
update_increment_by_test() ->
PNCnt0 = new(),
{ok, PNCnt1} = update({increment, 7}, 1, PNCnt0),
?assertEqual({[{1, 7}], []}, PNCnt1).
update_decrement_test() ->
PNCnt0 = new(),
{ok, PNCnt1} = update(increment, 1, PNCnt0),
{ok, PNCnt2} = update(increment, 2, PNCnt1),
{ok, PNCnt3} = update(increment, 1, PNCnt2),
{ok, PNCnt4} = update(decrement, 1, PNCnt3),
?assertEqual({[{1, 2}, {2, 1}], [{1, 1}]}, PNCnt4).
update_decrement_by_test() ->
PNCnt0 = new(),
{ok, PNCnt1} = update({increment, 7}, 1, PNCnt0),
{ok, PNCnt2} = update({decrement, 5}, 1, PNCnt1),
?assertEqual({[{1, 7}], [{1, 5}]}, PNCnt2).
merge_test() ->
PNCnt1 = {[{<<"1">>, 1},
{<<"2">>, 2},
{<<"4">>, 4}], []},
PNCnt2 = {[{<<"3">>, 3},
{<<"4">>, 3}], []},
?assertEqual({[], []}, merge(new(), new())),
?assertEqual({[{<<"1">>,1},{<<"2">>,2},{<<"4">>,4},{<<"3">>,3}], []},
merge(PNCnt1, PNCnt2)).
merge_too_test() ->
PNCnt1 = {[{<<"5">>, 5}], [{<<"7">>, 4}]},
PNCnt2 = {[{<<"6">>, 6}, {<<"7">>, 7}], [{<<"5">>, 2}]},
?assertEqual({[{<<"5">>, 5},{<<"6">>,6}, {<<"7">>, 7}], [{<<"7">>, 4}, {<<"5">>, 2}]},
merge(PNCnt1, PNCnt2)).
equal_test() ->
PNCnt1 = {[{1, 2}, {2, 1}, {4, 1}], [{1, 1}, {3, 1}]},
PNCnt2 = {[{1, 1}, {2, 4}, {3, 1}], []},
PNCnt3 = {[{1, 2}, {2, 1}, {4, 1}], [{3, 1}, {1, 1}]},
PNCnt4 = {[{4, 1}, {1, 2}, {2, 1}], [{1, 1}, {3, 1}]},
?assertNot(equal(PNCnt1, PNCnt2)),
?assert(equal(PNCnt3, PNCnt4)),
?assert(equal(PNCnt1, PNCnt3)).
usage_test() ->
PNCnt1 = new(),
PNCnt2 = new(),
?assert(equal(PNCnt1, PNCnt2)),
{ok, PNCnt1_1} = update({increment, 2}, a1, PNCnt1),
{ok, PNCnt2_1} = update(increment, a2, PNCnt2),
PNCnt3 = merge(PNCnt1_1, PNCnt2_1),
{ok, PNCnt2_2} = update({increment, 3}, a3, PNCnt2_1),
{ok, PNCnt3_1} = update(increment, a4, PNCnt3),
{ok, PNCnt3_2} = update(increment, a1, PNCnt3_1),
{ok, PNCnt3_3} = update({decrement, 2}, a5, PNCnt3_2),
{ok, PNCnt2_3} = update(decrement, a2, PNCnt2_2),
?assertEqual({[{a1, 3}, {a4, 1}, {a2, 1}, {a3, 3}], [{a5, 2}, {a2, 1}]},
merge(PNCnt3_3, PNCnt2_3)).
roundtrip_bin_test() ->
PN = new(),
{ok, PN1} = update({increment, 2}, <<"a1">>, PN),
{ok, PN2} = update({decrement, 1000000000000000000000000}, douglas_Actor, PN1),
{ok, PN3} = update(increment, [{very, ["Complex"], <<"actor">>}, honest], PN2),
{ok, PN4} = update(decrement, "another_acotr", PN3),
Bin = to_binary(PN4),
Decoded = from_binary(Bin),
?assert(equal(PN4, Decoded)).
-endif. | deps/riak_kv/src/riak_kv_pncounter.erl | 0.630912 | 0.484624 | riak_kv_pncounter.erl | starcoder |
-module(tql_lists).
%% API
-export([ all/1
, any/1
, droplast_n/2
, intersperse/2
, shuffle/1
, take/2
, uniq/1
, groups_of/2
]).
%%%---------------------------------------------------------------------
%%% API
%%%---------------------------------------------------------------------
%% @doc Conjunction of a list of booleans. Returns `true' if and only if
%% all the booleans are `true'.
-spec all([boolean()]) -> boolean().
all(Xs) ->
lists:all(fun tql:id/1, Xs).
%% @doc Disjunction of a list of booleans. Returns `true' if at least
%% one of the booleans is `true'.
-spec any([boolean()]) -> boolean().
any(Xs) ->
lists:any(fun tql:id/1, Xs).
%% @doc Drops the last `N' entries from the given list.
-spec droplast_n(N :: non_neg_integer(), [X]) -> [X].
droplast_n(_, L = []) ->
L;
droplast_n(0, L) ->
L;
droplast_n(N, L) ->
droplast_n(N - 1, lists:droplast(L)).
%% @doc Place the given value between all members of the given list.
-spec intersperse(X, [X]) -> [X].
intersperse(_S, L = [_]) ->
L;
intersperse(S, [X | Xs]) ->
[X, S | intersperse(S, Xs)].
%% @doc Shuffle a list.
shuffle(L) ->
[X || {_, X} <- lists:sort([{rand:uniform(), X} || X <- L])].
%% @doc Take the first `N' elements from the given list.
-spec take(N :: non_neg_integer(), [X]) -> [X].
take(0, _) ->
[];
take(_, []) ->
[];
take(N, [X | Xs]) ->
[X | take(N-1, Xs)].
%% @doc Returns only unique elements from the given list, filtering out
%% duplicates.
%%
%% Elements are considered to be different if they do not match (`=:=').
-spec uniq([A]) -> [A].
uniq(L) ->
sets:to_list(sets:from_list(L)).
%% @doc Splits a list of items into sublists of size equal to N
-spec groups_of(pos_integer(), [Data]) -> [[Data]].
groups_of(N, Xs) -> groups_of(N, Xs, []).
groups_of(_, [], Acc) -> lists:reverse(Acc);
groups_of(N, Data, Acc) when length(Data) =< N -> lists:reverse([Data | Acc]);
groups_of(N, Data, Acc) ->
{Group, Rest} = lists:split(N, Data),
groups_of(N, Rest, [Group | Acc]).
%% Local variables:
%% mode: erlang
%% erlang-indent-level: 2
%% indent-tabs-mode: nil
%% fill-column: 72
%% coding: latin-1
%% End: | src/tql_lists.erl | 0.508056 | 0.558869 | tql_lists.erl | starcoder |
%%% @doc Internal representation and logic of a LSL match.
-module(lsl_core).
-author('<EMAIL>').
-type stick_state() :: clean | crossed.
-type row_state() :: [stick_state()].
-type board() :: [row_state()].
-type history() :: [row_state()].
-opaque match() :: #{ board => board()
, history => history()
}.
-export_type([match/0]).
-type row() :: pos_integer().
-type col() :: pos_integer().
-type length() :: pos_integer().
-export_type([row/0, col/0, length/0]).
-type stick_snapshot() :: i | x.
-type row_snapshot() :: [stick_snapshot(), ...].
-type board_snapshot() :: [row_snapshot(), ...].
-export_type([stick_snapshot/0, row_snapshot/0, board_snapshot/0]).
-type cross_result() :: next | won | lost.
-export_type([cross_result/0]).
-export([new/1, rows/1, snapshot/1]).
-export([cross/4, undo/1, last_result/1, turns/1]).
-export([print/1, to_json/1]).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% EXPORTED FUNCTIONS
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% @doc Generates a new match.
-spec new(pos_integer()) -> match().
new(Rows) when is_integer(Rows), Rows >= 2 ->
Board = [lists:duplicate(Row, clean) || Row <- lists:seq(1, Rows)],
#{board => Board, history => []};
new(_Rows) -> throw(invalid_board).
%% @doc Returns the number of rows for the match
-spec rows(match()) -> pos_integer().
rows(#{board := Board}) -> length(Board).
%% @doc Returns a snapshot of a match
-spec snapshot(match()) -> board_snapshot().
snapshot(#{board := Board}) -> do_snapshot(Board).
%% @doc Crosses a couple of adjacent sticks
-spec cross(row(), col(), length(), match()) -> {cross_result(), match()}.
cross(Row, Col, Length, Match) ->
validate_bounds(Row, Col, Length, rows(Match)),
#{board := Board, history := History} = Match,
{Up, OldRow, Down} = split_board_at(Row, Board),
{Left, Rest} = lists:split(Col - 1, OldRow),
{ToCross, Right} = lists:split(Length, Rest),
validate_not_crossed(ToCross),
Middle = lists:duplicate(Length, crossed),
NewRow = Left ++ Middle ++ Right,
NewBoard = Up ++ [NewRow|Down],
{ cross_result(NewBoard)
, Match#{board := NewBoard, history := [OldRow|History]}
}.
%% @doc Undoes the last move
%% @throws no_history if there are no moves to undo
-spec undo(match()) -> match().
undo(Match = #{history := [Row|History]}) ->
#{board := Board} = Match,
RowNum = length(Row),
{Up, _, Down} = split_board_at(RowNum, Board),
NewBoard = Up ++ [Row|Down],
Match#{board := NewBoard, history := History}.
%% @doc Returns the last cross result.
%% In other words, the status of the match
-spec last_result(match()) -> cross_result().
last_result(#{board := Board}) ->
cross_result(Board).
%% @doc How many turns have been played.
%% In other words, the length of the history
-spec turns(match()) -> non_neg_integer().
turns(#{history := History}) -> length(History).
%% @doc returns a printable version of the board
-spec print(match()) -> iodata().
print(#{board := Board}) ->
RowWidth = length(Board),
[print(Row, RowWidth) || Row <- Board].
%% @doc returns a json-able version of the board
-spec to_json(match()) -> [[boolean()]].
to_json(#{board := Board}) ->
[[Item == crossed || Item <- Row] || Row <- Board].
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% INTERNAL FUNCTIONS
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
validate_bounds(Row, Col, Length, Rows)
when Row > 0
, Row =< Rows
, Col > 0
, Length > 0
, Col =< Row
, Col + Length - 1 =< Row -> ok;
validate_bounds(_Row, _ColStart, _ColEnd, _Rows) -> throw(out_of_bounds).
validate_not_crossed(Sticks) ->
case lists:member(crossed, Sticks) of
true -> throw(already_crossed);
false -> ok
end.
cross_result(Board) ->
case [clean || Row <- Board, clean <- Row] of
[] -> lost;
[clean] -> won;
[clean|_] -> next
end.
print(Row, RowWidth) ->
Padding = lists:duplicate(RowWidth - length(Row), $\s),
[Padding, do_print(Row, <<>>), Padding, $\n].
do_print([clean], Acc) -> <<Acc/binary, "|">>;
do_print([crossed], Acc) -> <<Acc/binary, "+">>;
do_print([crossed, clean], Acc) -> <<Acc/binary, "+ |">>;
do_print([clean | Sticks], Acc) ->
do_print(Sticks, <<Acc/binary, "| ">>);
do_print([crossed, clean | Sticks], Acc) ->
do_print(Sticks, <<Acc/binary, "+ | ">>);
do_print([crossed, crossed | Sticks], Acc) ->
do_print([crossed | Sticks], <<Acc/binary, "+-">>).
do_snapshot(clean) -> i;
do_snapshot(crossed) -> x;
do_snapshot(List) -> [do_snapshot(Elem) || Elem <- List].
split_board_at(RowNum, Board) ->
{Up, [Row|Down]} = lists:split(RowNum - 1, Board),
{Up, Row, Down}. | src/core/lsl_core.erl | 0.513668 | 0.526038 | lsl_core.erl | starcoder |
%%%-------------------------------------------------------------------
%% @copyright <NAME>
%% @author <NAME> <<EMAIL>>
%% @version {@vsn}, {@date} {@time}
%% @doc Library to turn rrd datastructures into rrd command strings.
%% @end
%%%-------------------------------------------------------------------
-module(errd_command).
-include_lib("errd_internal.hrl").
%% API
-export([format/1,
to_list/1,
create/3,
steps/2]).
%%====================================================================
%% API
%%====================================================================
%%--------------------------------------------------------------------
%% @spec (RrdRecord) -> RrdCommand::string()
%% RrdRecord = #rrd_create{} | #rrd_ds{} | #rrd_rra{}
%% @doc Converts the data structure describing the rrd command to
%% a string that can be executed by rrdtool.
%% @end
format(#rrd_create{file=File,start_time=Time,
step=Step,ds_defs=DSs,rra_defs=RRAs}) when is_integer(Step) ->
TimeStr = case Time of
now ->
"now-10s"; % default according to man rrdcreate
_ ->
Time
end,
Dstr = lists:flatten(string:join(lists:map(fun (D) -> format(D) end, DSs), " ")),
RRAstr = lists:flatten(string:join(lists:map(fun (D) -> format(D) end, RRAs), " ")),
lists:flatten(io_lib:format("create ~s -b ~s --step ~p ~s ~s~n", [File, TimeStr, Step, Dstr, RRAstr]));
format(#rrd_ds{name=Name,type=Type,args=Args}) when is_atom(Type) ->
io_lib:format("DS:~s:~s:~s", [Name, to_list(Type), Args]);
format(#rrd_rra{cf=CF,args=Args}) when is_atom(CF) ->
io_lib:format("RRA:~s:~s", [to_list(CF), Args]);
format(#rrd_update{file=File, time=Time, updates=Updates}) when is_list(File) ->
TimeFmt = case Time of
now ->
"N";
_ ->
Time
end,
{Template, Update} = format(Updates),
lists:flatten(io_lib:format("update ~s -t ~s ~s:~s~n", [File, Template, TimeFmt, Update]));
format([#rrd_ds_update{} | _Tail] = List) ->
format_updates(List, [], []).
%% @spec (File::string(), DSName::string(), Type) -> #rrd_create{}
%% Type = guage | counter | derive | absolute
%% @doc Creates the #rrd_create{} command data structure for a data
%% source called DSName, in a file named File. The data source Type
%% determines how rrdtool will treat updates (see
%% http://oss.oetiker.ch/rrdtool/ for more information).
%% @end
create(File,DSName,Type) when Type == gauge; Type == counter;
Type == derive; Type == absolute ->
#rrd_create{file=File,
ds_defs=[#rrd_ds{name=DSName,type=Type,
args="900:0:U"}],
rra_defs=[#rrd_rra{cf=average,
args="0.5:1:288"}, % 1 day of 5min averages
#rrd_rra{cf=average,
args="0.5:12:168"}, % 7 days of 1hr averages
#rrd_rra{cf=average,
args="0.5:288:365"}, % 1 year of daily average
#rrd_rra{cf=min,
args="0.5:1:288"}, % 1 day of 5min averages
#rrd_rra{cf=min,
args="0.5:12:168"}, % 7 days of 1hr averages
#rrd_rra{cf=min,
args="0.5:288:365"}, % 1 year of daily average
#rrd_rra{cf=max,
args="0.5:1:288"}, % 1 day of 5min averages
#rrd_rra{cf=max,
args="0.5:12:168"}, % 7 days of 1hr averages
#rrd_rra{cf=max,
args="0.5:288:365"}, % 1 year of daily average
#rrd_rra{cf=hwpredict,
args="2016:0.1:0.0035:288"} % 1 week of forecast
]}.
%%====================================================================
%% Internal functions
%%====================================================================
%% @spec steps(Unit::atom(), Step::integer()) -> Steps::integer()
%% Unit = day | week
%% @doc Calculates the number seconds per Step given the number of Steps in a time period.
%%
%% @end
steps(day, 1) ->
86400;
steps(day, Step) ->
round(steps(day, 1) / Step);
steps(week, 1) ->
7 * steps(day, 1);
steps(week, Step) ->
round(steps(week, 1) / Step).
one_day_test()->
?assert(steps(day, 300) == 288).
one_week_test() ->
?assert(steps(week, 300) == 2016).
%% @spec to_list(atom()) -> string()
%% @doc Converts the given atom to an upper case string.
%% @end
to_list(S) when is_atom(S) ->
string:to_upper(atom_to_list(S)).
format_updates([], Template, Update) ->
{string:join(lists:reverse(Template), ":"), string:join(lists:reverse(Update), ":")};
format_updates([#rrd_ds_update{name=Name, value=Value} | Tail], Template, Update) ->
format_updates(Tail, [Name | Template], [value_to_list(Value) | Update]).
value_to_list(unknown) ->
"U";
value_to_list(Value) when is_list(Value) ->
Value;
value_to_list(Value) when is_atom(Value) ->
atom_to_list(Value);
value_to_list(Value) when is_integer(Value) ->
integer_to_list(Value);
value_to_list(Value) when is_float(Value) ->
float_to_list(Value);
value_to_list(Value) when is_binary(Value) ->
binary_to_list(Value).
join_test() ->
?assert(string:join(["This", "is", "a", "test."], " ") == "This is a test."),
?assert(string:join(["test."], " ") == "test.").
% vim: set ts=4 sw=4 expandtab: | src/errd_command.erl | 0.646795 | 0.486088 | errd_command.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% riak_kv_crdt: A general purpose bridge between a CRDT and riak_object
%%
%% Copyright (c) 2007-2013 Basho Technologies, Inc. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
-module(riak_kv_crdt).
-export([update/3, merge/1, value/2, new/3,
supported/1, to_mod/1, from_mod/1, from_mod/2, mod_map/1]).
-export([to_binary/2, to_binary/1, from_binary/1]).
-export([log_merge_errors/4, meta/2, merge_value/2]).
%% MR helper funs
-export([value/1, counter_value/1, set_value/1, map_value/1]).
%% Other helper funs
-export([is_crdt/1]).
-include("riak_kv_wm_raw.hrl").
-include("riak_object.hrl").
-include_lib("riak_kv_types.hrl").
-ifdef(TEST).
-ifdef(EQC).
-compile(export_all).
-include_lib("eqc/include/eqc.hrl").
-endif.
-include_lib("eunit/include/eunit.hrl").
-endif.
-define(TAG, 69).
-define(V1_VERS, 1).
-define(V2_VERS, 2).
-ifdef(namespaced_types).
-type riak_kv_crdt_dict() :: dict:dict().
-else.
-type riak_kv_crdt_dict() :: dict().
-endif.
-type crdts() :: [{DT_MOD::module(), crdt()}].
-type ro_content() :: {Meta::riak_kv_crdt_dict(), Value::binary()}.
-type ro_contents() :: [ro_content()].
-type precondition_error() :: {error, {precondition, {not_present, term()}}}.
%% @doc applies the given `Operation' to the merged value. first
%% performs a merge, and then applies the update.
%%
%% NOTE: operation needs to be a `?CRDT_OP' in case of siblings of different
%% types
%%
%% @see merge/1
-spec update(riak_object:riak_object(), riak_dt:actor(), riak_dt:operation()) ->
riak_object:riak_object() | precondition_error().
update(RObj, Actor, Operation) ->
{CRDTs0, Siblings} = merge_object(RObj),
case update_crdt(CRDTs0, Actor, Operation) of
{error, _}=E ->
E;
CRDTs ->
update_object(RObj, CRDTs, Siblings)
end.
%% @doc Merge all sibling values that are CRDTs into a single value
%% for that CRDT type. NOTE: handles sibling types. For example if
%% there are 5 siblings, 2 or which are riak_dt_pncounter, and 2 are
%% riak_dt_vvorset, and 1 user supplied opaque value, then the results
%% is a converge counter, a converged set, and the opaque sibling, a
%% total of 3 sibings. Hopefully with bucket types, sibling types will
%% NEVER occur.
-spec merge(riak_object:riak_object()) -> riak_object:riak_object().
merge(RObj) ->
{CRDTs, Siblings} = merge_object(RObj),
update_object(RObj, CRDTs, Siblings).
%% @doc for the given riak_object `RObj' and the provided `Type',
%% which must be a support riak_dt crdt module, returns an update
%% context, and user value. Performs a merge, then gets the CRDT end
%% user value.
%% @see merge/1
-spec value(riak_object:riak_object(), module()) -> {{binary(), riak_dt:value()}, [{atom(), atom(), number()}]}.
value(RObj, Type) ->
{CRDTs, _NonCRDTSiblings} = merge_object(RObj),
DType = orddict:find(Type, CRDTs),
{crdt_value(Type, DType), crdt_stats(Type, DType)}.
%% @doc convenience function for (e.g.) MapReduce. Attempt to get a
%% CRDT value for a given object. Checks the bucket props for the
%% object, if it has a datatype entry, uses that to get value. Returns
%% either a tuple of `{Type, Value}' or `undefined' if not a 2.0 CRDT.
-spec value(riak_object:riak_object()) -> {atom(), term()} | undefined.
value(RObj) ->
Bucket = riak_object:bucket(RObj),
case riak_core_bucket:get_bucket(Bucket) of
BProps when is_list(BProps) ->
Type = proplists:get_value(datatype, BProps),
Mod = riak_kv_crdt:to_mod(Type),
case supported(Mod) of
true ->
{{_Ctx, V}, _Stats} = value(RObj, Mod),
{Type, V};
false ->
undefined
end
end.
%% @doc convenience for (e.g.) MapReduce functions. Pass an object,
%% get a 2.0+ counter type value, or zero if no counter is present.
-spec counter_value(riak_object:riak_object()) -> integer().
counter_value(RObj) ->
{{_Ctx, Count}, _Stats} = value(RObj, ?COUNTER_TYPE),
Count.
%% @doc convenience for (e.g.) MapReduce functions. Pass an object,
%% get a 2.0+ Set type value, or `[]' if no Set is present.
-spec set_value(riak_object:riak_object()) -> list().
set_value(RObj) ->
{{_Ctx, Set}, _Stats} = value(RObj, ?SET_TYPE),
Set.
%% @doc convenience for (e.g.) MapReduce functions. Pass an object,
%% get a 2.0+ Map type value, or `[]' if no Map is present.
-spec map_value(riak_object:riak_object()) -> proplists:proplist().
map_value(RObj) ->
{{_Ctx, Map}, _Stats} = value(RObj, ?MAP_TYPE),
Map.
%% @doc convenience function for (e.g.) Yokozuna. Checks the bucket props for
%% the object, if it has a supported datatype entry, returns true; otherwise
%% false if not a 2.0 CRDT.
-spec is_crdt(riak_object:riak_object()) -> boolean()|{error,_}.
is_crdt(RObj) ->
Bucket = riak_object:bucket(RObj),
case riak_core_bucket:get_bucket(Bucket) of
BProps when is_list(BProps) ->
Type = proplists:get_value(datatype, BProps),
Mod = riak_kv_crdt:to_mod(Type),
supported(Mod);
{error, _}=Err ->
Err
end.
%% @TODO in riak_dt change value to query allow query to take an
%% argument, (so as to query subfields of map, or set membership etc)
-spec crdt_value(module(), error | {ok, {riak_kv_crdt_dict(), crdt()}}) ->
{binary(), riak_dt:value()}.
crdt_value(Type, error) ->
{<<>>, Type:value(Type:new())};
crdt_value(Type, {ok, {_Meta, ?CRDT{mod=Type, value=Value}}}) ->
{get_context(Type, Value), Type:value(Value)}.
crdt_stats(_, error) -> [];
crdt_stats(Type, {ok, {_Meta, ?CRDT{mod=Type, value=Value}}}) ->
case lists:member({stat,2}, Type:module_info(exports)) of
true ->
EnabledStats = app_helper:get_env(riak_kv, datatype_stats, ?DATATYPE_STATS_DEFAULTS),
lists:foldr(fun(S, Acc) ->
case Type:stat(S, Value) of
undefined -> Acc;
Stat -> [{from_mod(Type), S, Stat}|Acc]
end
end, [], EnabledStats);
false -> []
end.
%% @private Merge contents _AND_ meta
-spec merge_object(riak_object:riak_object()) ->
{crdts(), list()}.
merge_object(RObj) ->
Contents = riak_object:get_contents(RObj),
{CRDTs, NonCRDTSiblings, Errors} = merge_contents(Contents),
Bucket = riak_object:bucket(RObj),
Key = riak_object:key(RObj),
log_errors(Bucket, Key, Errors),
maybe_log_sibling_crdts(Bucket, Key, CRDTs),
{CRDTs, NonCRDTSiblings}.
%% @doc log any accumulated merge errors
-spec log_merge_errors(riak_object:bucket(), riak_object:key(), crdts(), list()) -> ok.
log_merge_errors(Bucket, Key, CRDTs, Errors) ->
log_errors(Bucket, Key, Errors),
maybe_log_sibling_crdts(Bucket, Key, CRDTs).
log_errors(_, _, []) ->
ok;
log_errors(Bucket, Key, Errors) ->
lager:error("Error(s) deserializing CRDT at ~p ~p: ~p~n", [Bucket, Key, Errors]).
maybe_log_sibling_crdts(Bucket, Key, CRDTs) when length(CRDTs) > 1 ->
lager:error("Sibling CRDTs at ~p ~p: ~p~n", [Bucket, Key, orddict:fetch_keys(CRDTs)]);
maybe_log_sibling_crdts(_, _, _) ->
ok.
%% @private Only merge the values of CRDTs If there are siblings that
%% are CRDTs BUT NOT THE SAME TYPE (don't do that!!) Merge
%% type-to-type and store a single sibling per-type If a non-CRDT data
%% are present, keep them as sibling values
-spec merge_contents(ro_contents()) ->
{crdts(), ro_contents(), Errors::list()}.
merge_contents(Contents) ->
lists:foldl(fun merge_value/2,
{orddict:new(), [], []},
Contents).
%% @doc if the content is a CRDT, de-binary it, merge it and store the
%% most merged value in the accumulator dictionary.
-spec merge_value(ro_content(), {crdts(), ro_contents(), Errors::list()}) ->
{crdts(), ro_contents(), Errors::list()}.
merge_value({MD, <<?TAG:8/integer, Version:8/integer, CRDTBin/binary>>=Content},
{Dict, NonCRDTSiblings, Errors}) ->
case deserialize_crdt(Version, CRDTBin) of
{ok, CRDT=?CRDT{mod=Mod, value=Val, ctype=CType}} ->
D2 = orddict:update(Mod, fun({Meta, Mergedest=?CRDT{value=Value}}) ->
NewMeta = merge_meta(CType, Meta, MD),
NewVal = Mod:merge(Value, Val),
{NewMeta, Mergedest?CRDT{value = NewVal}}
end,
{MD, CRDT}, Dict),
{D2, NonCRDTSiblings, Errors};
{error, Error} ->
{Dict, [{MD, Content} | NonCRDTSiblings], [Error | Errors]}
end;
merge_value(NonCRDT, {Dict, NonCRDTSiblings, Errors}) ->
{Dict, [NonCRDT | NonCRDTSiblings], Errors}.
deserialize_crdt(?V1_VERS, CounterBin) ->
v1_counter_from_binary(CounterBin);
deserialize_crdt(?V2_VERS, CRDTBin) ->
crdt_from_binary(CRDTBin);
deserialize_crdt(V, _Bin) ->
{error, {invalid_version, V}}.
counter_op(N) when N < 0 ->
{decrement, -N};
counter_op(N) ->
{increment, N}.
%% @private Apply the updates to the CRDT. If there is no context for
%% the operation then apply the operation to the local merged replica,
%% and risk precondition errors and unexpected behaviour.
%%
%% @see split_ops/1
-spec update_crdt(orddict:orddict(), riak_dt:actor(), crdt_op() | non_neg_integer()) ->
orddict:orddict() | precondition_error().
update_crdt(Dict, Actor, Amt) when is_integer(Amt) ->
%% Handle legacy 1.4 counter operation, upgrade to current OP
CounterOp = counter_op(Amt),
Op = ?CRDT_OP{mod=?V1_COUNTER_TYPE, op=CounterOp},
update_crdt(Dict, Actor, Op);
update_crdt(Dict, Actor, ?CRDT_OP{mod=Mod, op=Op, ctx=undefined}) ->
{Meta, Record, Value} = fetch_with_default(Mod, Dict),
case Mod:update(Op, Actor, Value) of
{ok, NewVal} ->
orddict:store(Mod, {Meta, Record?CRDT{value=NewVal}}, Dict);
{error, _}=E -> E
end;
update_crdt(Dict, Actor, ?CRDT_OP{mod=Mod, op=Ops, ctx=OpCtx}) when Mod==?MAP_TYPE;
Mod==?SET_TYPE->
case orddict:find(Mod, Dict) of
error ->
%% No local replica of this CRDT, apply the ops to a new
%% instance
case update_crdt(Mod, Ops, Actor, Mod:new(), OpCtx) of
{ok, InitialVal} ->
orddict:store(Mod, {undefined, to_record(Mod, InitialVal)}, Dict);
E ->
E
end;
{ok, {Meta, LocalCRDT=?CRDT{value=LocalReplica}}} ->
case update_crdt(Mod, Ops, Actor, LocalReplica, OpCtx) of
{error, _}=E -> E;
{ok, NewVal} ->
orddict:store(Mod, {Meta, LocalCRDT?CRDT{value=NewVal}}, Dict)
end
end.
%% @private call update/3 or update/4 depending on context value
-spec update_crdt(module(), term(), riak_dt:actor(), term(),
undefined | riak_dt_vclock:vclock()) ->
term().
update_crdt(Mod, Ops, Actor, CRDT, undefined) ->
Mod:update(Ops, Actor, CRDT);
update_crdt(Mod, Ops, Actor, CRDT, Ctx0) ->
Ctx = get_context(Ctx0),
Mod:update(Ops, Actor, CRDT, Ctx).
-spec get_context(undefined | binary()) -> riak_dt_vclock:vclock().
get_context(undefined) ->
undefined;
get_context(Bin) ->
riak_dt_vclock:from_binary(Bin).
%% @doc get the merged CRDT for type `Mod' from the dictionary. If it
%% is not present generate a default entry
fetch_with_default(Mod, Dict) ->
case orddict:find(Mod, Dict) of
error ->
Value = Mod:new(),
{undefined, to_record(Mod, Value), Value};
{ok, {Meta, Record=?CRDT{value=Value}}} ->
{Meta, Record, Value}
end.
%% This uses an exported but marked INTERNAL
%% function of `riak_object:set_contents' to preserve
%% non-crdt sibling values and Metadata
%% NOTE: if `Meta' is `undefined' then this
%% is a new crdt.
update_object(RObj, CRDTs, SiblingValues) ->
%% keep non-counter siblings, too
CRDTSiblings = [{meta(Meta, CRDT), to_binary(CRDT)} || {_Mod, {Meta, CRDT}} <- orddict:to_list(CRDTs)],
riak_object:set_contents(RObj, CRDTSiblings ++ SiblingValues).
meta(undefined, ?CRDT{ctype=CType}) ->
Now = os:timestamp(),
M = dict:new(),
M2 = dict:store(?MD_LASTMOD, Now, M),
M3 = dict:store(?MD_VTAG, riak_kv_util:make_vtag(Now), M2),
dict:store(?MD_CTYPE, CType, M3);
meta(Meta, _CRDT) ->
Meta.
%% Just a simple take the largest for meta values based on last mod
merge_meta(CType, Meta1, Meta2) ->
Meta = case later(lastmod(Meta1), lastmod(Meta2)) of
true ->
Meta1;
false ->
Meta2
end,
%% Make sure the content type is
%% up-to-date
drop_the_dot(dict:store(?MD_CTYPE, CType, Meta)).
%% @private Never keep a dot for CRDTs, we want all values to survive
%% a riak_obect:merge/2
drop_the_dot(Dict) ->
dict:erase(?DOT, Dict).
lastmod(Meta) ->
dict:fetch(?MD_LASTMOD, Meta).
later(TS1, TS2) ->
case timer:now_diff(TS1, TS2) of
Before when Before < 0 ->
false;
_ ->
true
end.
new(B, K, Mod) ->
CRDT=#crdt{ctype=CType} = to_record(Mod, Mod:new()),
Bin = to_binary(CRDT),
Doc0 = riak_object:new(B, K, Bin, CType),
riak_object:set_vclock(Doc0, vclock:fresh()).
%% @doc turn a `crdt()' record into a binary for storage on disk /
%% passing on the network
-spec to_binary(crdt()) -> binary().
to_binary(CRDT=?CRDT{mod=?V1_COUNTER_TYPE}) ->
to_binary(CRDT, ?V1_VERS);
to_binary(?CRDT{mod=Mod, value=Value}) ->
%% Store the CRDT in the version that is negotiated cluster wide
Version = crdt_version(Mod),
{ok, CRDTBin} = Mod:to_binary(Version, Value),
Type = atom_to_binary(Mod, latin1),
TypeLen = byte_size(Type),
<<?TAG:8/integer, ?V2_VERS:8/integer, TypeLen:32/integer, Type:TypeLen/binary, CRDTBin/binary>>.
%% @doc turn a `crdt()' record into a `Version' binary for storage on
%% disk / passing on the network
-spec to_binary(crdt(), Version::pos_integer()) -> binary().
to_binary(CRDT, ?V2_VERS) ->
to_binary(CRDT);
to_binary(?CRDT{mod=?V1_COUNTER_TYPE, value=Value}, ?V1_VERS) ->
CounterBin = ?V1_COUNTER_TYPE:to_binary(Value),
<<?TAG:8/integer, ?V1_VERS:8/integer, CounterBin/binary>>.
%% @doc deserialize a crdt from it's binary format. The binary must
%% start with the riak_kv_crdt tag and a version If the binary can be
%% deserailised into a `crdt()' returns `{ok, crdt()}', otherwise
%% `{error, term()}'
-spec from_binary(binary()) -> {ok, crdt()} | {error, term()}.
from_binary(<<?TAG:8/integer, ?V1_VERS:8/integer, CounterBin/binary>>) ->
v1_counter_from_binary(CounterBin);
from_binary(<<?TAG:8/integer, ?V2_VERS:8/integer, CRDTBin/binary>>) ->
crdt_from_binary(CRDTBin);
from_binary(Bin) ->
{error, {invalid_binary, Bin}}.
%% @private attempt to deserialize a v1 counter (riak 1.4.x counter)
v1_counter_from_binary(CounterBin) ->
try
to_record(?V1_COUNTER_TYPE, ?V1_COUNTER_TYPE:from_binary(CounterBin)) of
?CRDT{}=Counter ->
{ok, Counter}
catch
Class:Err ->
{error, {Class, Err}}
end.
%% @private attempt to deserialize a v2 CRDT (That is a data type, not a 1.4 counter)
crdt_from_binary(<<TypeLen:32/integer, Type:TypeLen/binary, CRDTBin/binary>>) ->
try
Mod = binary_to_existing_atom(Type, latin1),
%% You don't need a target version, as Mod:from_binary/1 will
%% always give you the highest version you can work with,
%% assuming Mod:to_binary/2 was called before storing.
{ok, Val} = Mod:from_binary(CRDTBin),
to_record(Mod, Val) of
?CRDT{}=CRDT ->
{ok, CRDT}
catch
Class:Err ->
{error, {Class, Err}}
end;
crdt_from_binary(_) ->
{error, {invalid_crdt_binary}}.
to_record(?V1_COUNTER_TYPE, Val) ->
?V1_COUNTER_TYPE(Val);
to_record(?COUNTER_TYPE, Val) ->
?COUNTER_TYPE(Val);
to_record(?MAP_TYPE, Val) ->
?MAP_TYPE(Val);
to_record(?SET_TYPE, Val) ->
?SET_TYPE(Val).
%% @doc Check cluster capability for crdt support
supported(Mod) ->
lists:member(Mod, riak_core_capability:get({riak_kv, crdt}, [])).
%% @private get the binary version for a crdt mod, default to `1' for
%% pre-versioned.
-spec crdt_version(module()) -> pos_integer().
crdt_version(Mod) ->
%% due to the riak-2.0.4 disaster where mixed format maps were
%% written to disk (see riak#667 for more) override the cluster
%% negotiated capability with an env var, this is to ensure that
%% in a multi-cluster environment, Epoch 1 binary format is used until
%% all clusters are Epoch 2 capable.
case app_helper:get_env(riak_kv, mdc_crdt_epoch) of
1 ->
proplists:get_value(Mod, ?E1_DATATYPE_VERSIONS, 1);
_ ->
%% use any term except the integer `1' to unset app env
%% and use capability negotiated CRDT version epoch.
%% Default to 1 for any unknown CRDT version.
NegotiatedCap = riak_core_capability:get({riak_kv, crdt_epoch_versions}, ?E1_DATATYPE_VERSIONS),
proplists:get_value(Mod, NegotiatedCap, 1)
end.
%% @doc turn a string token / atom into a
%% CRDT type
to_mod("sets") ->
?SET_TYPE;
to_mod("counters") ->
?COUNTER_TYPE;
to_mod("maps") ->
?MAP_TYPE;
to_mod(?CRDT{mod=Mod}) ->
Mod;
to_mod(Type) ->
proplists:get_value(Type, ?MOD_MAP).
from_mod(Mod) ->
from_mod(Mod, ?MOD_MAP).
from_mod(Mod, ModMap) ->
case lists:keyfind(Mod, 2, ModMap) of
{Type, Mod} ->
Type;
false ->
undefined
end.
%% @doc mapping of atom/shortname types (map, set, counter etc) to
%% actual modules that implement them. Notice the mod map for maps is
%% different since embedded types are different.
-spec mod_map(atom()) -> [{atom(), atom()}].
mod_map(map) ->
?EMBEDDED_TYPES;
mod_map(_) ->
?MOD_MAP.
%% @doc the update context can be empty for some types.
%% Those that support an precondition_context should supply
%% a smaller than Type:to_binary(Value) binary context.
get_context(Type, Value) ->
case lists:member({precondition_context, 1}, Type:module_info(exports)) of
true -> riak_dt_vclock:to_binary(Type:precondition_context(Value));
false -> <<>>
end.
%% ===================================================================
%% EUnit tests
%% ===================================================================
-ifdef(TEST).
is_crdt_test_() ->
{setup,
fun() ->
meck:new(riak_core_bucket),
meck:new(riak_core_capability, []),
meck:expect(riak_core_capability, get,
fun({riak_kv, crdt}, []) ->
[pncounter,riak_dt_pncounter,riak_dt_orswot,
riak_dt_map];
(X, Y) -> meck:passthrough([X, Y]) end),
ok
end,
fun(_) ->
meck:unload(riak_core_capability),
meck:unload(riak_core_bucket)
end,
[
?_test(begin
meck:expect(riak_core_bucket, get_bucket,
fun(_Bucket) -> [{datatype, foo}] end),
Bucket = {<<"counterz">>, <<"crdt">>},
BTProps = riak_core_bucket:get_bucket(Bucket),
?assertEqual(foo, proplists:get_value(datatype, BTProps)),
?assertNot(is_crdt(riak_object:new(Bucket, <<"k1">>, hello)))
end),
?_test(begin
Bucket = {<<"t">>, <<"bucketjumpy">>},
?assertNot(is_crdt(riak_object:new(Bucket, <<"k1">>, hi)))
end),
?_test(begin
meck:expect(riak_core_bucket, get_bucket,
fun({<<"maps">>, _Name}) -> [{datatype, map}];
({<<"sets">>, _Name}) -> [{datatype, set}];
({<<"counters">>, _Name}) -> [{datatype, counter}];
({X, Y}) -> meck:passthrough([X, Y]) end),
Bucket1 = {<<"maps">>, <<"crdt">>},
Bucket2 = {<<"sets">>, <<"crdt">>},
Bucket3 = {<<"counters">>, <<"crdt">>},
BTPropsMap = riak_core_bucket:get_bucket(Bucket1),
BTPropsSet = riak_core_bucket:get_bucket(Bucket2),
BTPropsCounter = riak_core_bucket:get_bucket(Bucket3),
?assertEqual(map, proplists:get_value(datatype, BTPropsMap)),
?assertEqual(set, proplists:get_value(datatype, BTPropsSet)),
?assertEqual(counter,
proplists:get_value(datatype, BTPropsCounter)),
[?assert(is_crdt(riak_object:new(B, K, V)))
|| {B, K, V} <- [{Bucket1, <<"k1">>, hi},
{Bucket2, <<"k2">>, hey},
{Bucket3, <<"k3">>, hey}]]
end)]}.
-ifdef(EQC).
-define(QC_OUT(P),
eqc:on_output(fun(Str, Args) ->
io:format(user, Str, Args) end, P)).
-define(TEST_TIME_SECONDS, 10).
-define(TIMED_QC(Prop), eqc:quickcheck(?QC_OUT(eqc:testing_time(?TEST_TIME_SECONDS, Prop)))).
eqc_test_() ->
{timeout,
60,
?_test(?TIMED_QC(prop_binary_roundtrip()))}.
prop_binary_roundtrip() ->
?FORALL({_Type, Mod}, oneof(?MOD_MAP),
begin
{ok, ?CRDT{mod=SMod, value=SValue}} = from_binary(to_binary(?CRDT{mod=Mod, value=Mod:new()})),
conjunction([{module, equals(Mod, SMod)},
{value, Mod:equal(SValue, Mod:new())}])
end).
-endif.
-endif. | deps/riak_kv/src/riak_kv_crdt.erl | 0.606615 | 0.403743 | riak_kv_crdt.erl | starcoder |
%% ----------------------------------------------------------------------------
%%
%% oauth2: Erlang OAuth 2.0 implementation
%%
%% Copyright (c) 2012-2013 KIVRA
%%
%% Permission is hereby granted, free of charge, to any person obtaining a
%% copy of this software and associated documentation files (the "Software"),
%% to deal in the Software without restriction, including without limitation
%% the rights to use, copy, modify, merge, publish, distribute, sublicense,
%% and/or sell copies of the Software, and to permit persons to whom the
%% Software is furnished to do so, subject to the following conditions:
%%
%% The above copyright notice and this permission notice shall be included in
%% all copies or substantial portions of the Software.
%%
%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
%% DEALINGS IN THE SOFTWARE.
%%
%% ----------------------------------------------------------------------------
-module(oauth2_priv_set).
%%% API
-export([new/1]).
-export([union/2]).
-export([is_subset/2]).
-export([is_member/2]).
%%%===================================================================
%%% API
%%%===================================================================
%% Invariant: Children are sorted increasingly by name.
-type priv_tree() :: {node, Name :: binary(), Children :: [priv_tree()]} | '*'.
%% Invariant:
%% The list of trees is sorted increasingly by the name of the root node.
-type priv_set() :: [priv_tree()].
%% @doc Constructs a new priv_set from a single path or a list of paths.
%% A path denotes a single privilege.
%% @end
-spec new(Paths) -> PrivSet when
Paths :: binary() | [binary()],
PrivSet :: priv_set().
new(Paths) when is_list(Paths) ->
lists:foldl(fun union/2, [], [make_forest(Path) || Path <- Paths]);
new(Path) when is_binary(Path) ->
make_forest(Path).
%% @doc Returns the union of Set1 and Set2, i.e., a set such that
%% any path present in either Set1 or Set2 is also present in the result.
%% @end
-spec union(Set1, Set2) -> Union when
Set1 :: priv_set(),
Set2 :: priv_set(),
Union :: priv_set().
union([H1={node, Name1, _}|T1], [H2={node, Name2, _}|T2]) when Name1 < Name2 ->
[H1|union(T1, [H2|T2])];
union([H1={node, Name1, _}|T1], [H2={node, Name2, _}|T2]) when Name1 > Name2 ->
[H2|union([H1|T1], T2)];
union([{node, Name, S1}|T1], [{node, Name, S2}|T2]) ->
[{node, Name, union(S1, S2)}|union(T1, T2)];
union(['*'|_], _) -> %% '*' in union with anything is still '*'.
['*'];
union(_, ['*'|_]) ->
['*'];
union([], Set) ->
Set;
union(Set, []) ->
Set.
%% @doc Return true if Set1 is a subset of Set2, i.e., if
%% every privilege held by Set1 is also held by Set2.
%% @end
-spec is_subset(Set1, Set2) -> Result when
Set1 :: priv_set(),
Set2 :: priv_set(),
Result :: boolean().
is_subset([{node, Name1, _}|_], [{node, Name2, _}|_]) when Name1 < Name2 ->
false; %% This tree isn't present in Set2 as per the invariant.
is_subset(Set1 = [{node, Name1, _}|_], [{node, Name2, _}|T2]) when Name1 > Name2 ->
is_subset(Set1, T2);
is_subset([{node, Name, S1}|T1], [{node, Name, S2}|T2]) ->
case is_subset(S1, S2) of
true ->
is_subset(T1, T2);
false ->
false
end;
is_subset(['*'|_], ['*'|_]) -> %% '*' is only a subset of '*'.
true;
is_subset(_, ['*'|_]) -> %% Everything is a subset of '*'.
true;
is_subset([], _) -> %% The empty set is a subset of every set.
true;
is_subset(_, _) ->
false.
%% @doc Returns true if Path is present in Set, i.e, if
%% the privilege denoted by Path is contained within Set.
%% @end
-spec is_member(Path, Set) -> Result when
Path :: binary(),
Set :: priv_set(),
Result :: boolean().
is_member(Path, Set) ->
is_subset(make_forest(Path), Set).
%%%===================================================================
%%% Internal functions
%%%===================================================================
-spec make_forest(Path) -> Forest when
Path :: binary() | list(),
Forest :: priv_set().
make_forest(Path) when is_binary(Path) ->
make_forest(binary:split(Path, <<".">>, [global]));
make_forest(Path) when is_list(Path) ->
[make_tree(Path)].
-spec make_tree(Path) -> Tree when
Path :: [binary()],
Tree :: priv_tree().
make_tree([<<"*">>|_]) ->
'*';
make_tree([N]) ->
make_node(N, []);
make_tree([H|T]) ->
make_node(H, [make_tree(T)]).
-spec make_node(Name, Children) -> Node when
Name :: binary(),
Children :: [priv_tree()],
Node :: priv_tree().
make_node(Name, Children) ->
{node, Name, Children}. | src/oauth2_priv_set.erl | 0.610686 | 0.403508 | oauth2_priv_set.erl | starcoder |
%% @author <NAME>
%% @copyright 2009 <NAME>
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% @doc estring is a string manipulation library.
-module(estring).
-export([begins_with/2,
contains/2,
edit_distance/2,
edit_distance/3,
ends_with/2,
format/2,
is_integer/1,
random/1,
rot13/1,
similarity/2,
similarity/3,
similarity/4,
squeeze/1,
squeeze/2,
strip/1,
strip_split/2]).
-define(CHARS, "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789").
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
%% @spec begins_with(string(), string()) -> bool()
%% @doc Returns `true' if `String' begins with `SubString',
%% and `false' otherwise.
%% ```
%% > estring:begins_with("fancy pants", "fancy").
%% true
%% '''
-spec begins_with(string(), string()) -> boolean().
begins_with(String, SubString) ->
string:substr(String, 1, length(SubString)) =:= SubString.
%% @spec contains(string(), string()) -> bool()
%% @doc Returns `true' if `String' contains `SubString', and `false' otherwise.
%% ```
%% > estring:contains("los angeles", "angel").
%% true
%% '''
-spec contains(string(), string()) -> boolean().
contains(String, SubString) ->
string:str(String, SubString) > 0.
%% @spec edit_distance(String1::string(), String2::string()) -> integer()
%% @doc Returns the damerau-levenshtein edit distance between `String1' and
%% `String2'. Note the comparison is case sensitive.
%% ```
%% > estring:edit_distance("theater", "theatre").
%% 1
%% '''
-spec edit_distance(string(), string()) -> integer().
edit_distance(Source, Source) -> 0;
edit_distance(Source, []) -> length(Source);
edit_distance([], Source) -> length(Source);
edit_distance(Source, Target) ->
D1 = lists:seq(0, length(Target)),
outer_loop([[]|Source], [[]|Target], {D1, D1}, 1).
outer_loop([S1|[S0|S]], T, {D2, D1}, I) ->
D0 = inner_loop(T, [S1, S0], {[[]|D2], D1, [I]}),
outer_loop([S0|S], T, {D1, D0}, I + 1);
outer_loop([_S|[]], _, {_D1, D0}, _) ->
lists:last(D0).
inner_loop([_T|[]], _, {_D2, _D1, D0}) ->
lists:reverse(D0);
inner_loop([T1|[T0|T]], [S1, S0], {D2, D1, D0}) ->
[S1T1|[S1T0|_]] = D1,
Cost = if T0 =:= S0 -> 0; true -> 1 end,
NewDist1 = lists:min([hd(D0) + 1, S1T0 + 1, S1T1 + Cost]),
NewDist2 =
if T1 =/= [] andalso S1 =/= [] andalso T1 =:= S0 andalso T0 =:= S1 ->
lists:min([NewDist1, hd(D2) + Cost]);
true -> NewDist1
end,
inner_loop([T0|T], [S1, S0], {tl(D2), tl(D1), [NewDist2|D0]}).
%% @spec edit_distance(string(), string(), IgnoreCase::bool()) -> integer()
%% @doc Returns the damerau-levenshtein edit distance between `String1' and
%% `String2'. The comparison is case insensitive if `IgnoreCase' is `true'.
%% ```
%% > estring:edit_distance("receive", "RECIEVE", true).
%% 1
%% > estring:edit_distance("Cats", "cast", false).
%% 2
%% '''
-spec edit_distance(string(), string(), boolean()) -> integer().
edit_distance(String1, String2, true) ->
S1 = string:to_lower(String1),
S2 = string:to_lower(String2),
edit_distance(S1, S2);
edit_distance(String1, String2, false) ->
edit_distance(String1, String2).
%% @spec edit_distance_estimate(list(), list()) -> float()
%% @doc Establishes a very conservate lower bound for edit distance.
%% This is useful only for early exit evaluations.
-spec edit_distance_estimate(list(), list()) -> float().
edit_distance_estimate(L, L) -> 0.0;
edit_distance_estimate(L1, L2) ->
%% Divide the estimate by 2 because replacements will be double counted.
%% The downside of this is that inserts or deletes are undercounted.
edit_distance_estimate(lists:sort(L1), lists:sort(L2), 0.0) / 2.
edit_distance_estimate([], L, D) ->
D + length(L);
edit_distance_estimate(L, [], D) ->
D + length(L);
edit_distance_estimate([H1|L1], [H2|L2], D) ->
if
H1 =:= H2 ->
edit_distance_estimate(L1, L2, D);
H1 < H2 ->
edit_distance_estimate(L1, [H2|L2], D+1);
H1 > H2 ->
edit_distance_estimate([H1|L1], L2, D+1)
end.
%% @spec ends_with(string(), string()) -> bool()
%% @doc Returns `true' if `String' ends with `SubString', and `false' otherwise.
%% ```
%% > estring:ends_with("fancy pants", "pants").
%% true
%% '''
-spec ends_with(string(), string()) -> boolean().
ends_with(String, SubString) ->
begins_with(lists:reverse(String), lists:reverse(SubString)).
%% @spec format(string(), list()) -> string()
%% @doc Shortcut for `lists:flatten(io_lib:format(Format, Data))'.
%% ```
%% > estring:format("~w bottles of ~s on the wall", [99, "beer"]).
%% "99 bottles of beer on the wall"
%% '''
-spec format(string(), list()) -> string().
format(Format, Data) ->
lists:flatten(io_lib:format(Format, Data)).
%% @spec is_integer(string()) -> bool()
%% @doc Returns `true' if `String' is a string representation of an integer,
%% and `false' otherwise.
%% ```
%% > estring:is_integer("35").
%% true
%% > estring:is_integer("35.4").
%% false
%% '''
-spec is_integer(string()) -> boolean().
is_integer([]) ->
false;
is_integer(String) ->
lists:all(fun(C) -> C >= 48 andalso C =< 57 end, String).
%% @spec random(integer()) -> string()
%% @doc Returns a random alphanumeric string of length `N'.
%% ```
%% > estring:random(32).
%% "LzahJub1KOMS0U66mdXHtHyMMXIdxv1t"
%% '''
-spec random(N::integer()) -> string().
random(N) when N > 0->
random:seed(now()),
[random_character() || _ <- lists:seq(1, N)].
random_character() ->
lists:nth(random:uniform(62), ?CHARS).
%% @spec rot13(string()) -> string()
%% @doc Applies the rot13 substitution cipher to `String'.
%% ```
%% > estring:rot13("The Quick Brown Fox Jumps Over The Lazy Dog.").
%% "Gur Dhvpx Oebja Sbk Whzcf Bire Gur Ynml Qbt."
%% '''
-spec rot13(string()) -> string().
rot13(String) ->
[r13(C) || C <- String].
r13(C) when (C >= $A andalso C =< $M) -> C + 13;
r13(C) when (C >= $a andalso C =< $m) -> C + 13;
r13(C) when (C >= $N andalso C =< $Z) -> C - 13;
r13(C) when (C >= $n andalso C =< $z) -> C - 13;
r13(C) -> C.
%% @spec similarity(string(), string()) -> float()
%% @doc Returns a score between 0 and 1, representing how similar `Source' is to
%% `Target' based on the edit distance and normalized by the length of `Target'.
%% Note the order of `Source' and `Target' matters, and the comparison is case
%% sensitive.
%% ```
%% > estring:similarity("yahoo", "boohoo").
%% 0.5
%% > estring:similarity("boohoo", "yahoo").
%% 0.4
%% '''
-spec similarity(string(), string()) -> float().
similarity(Source, Source) -> 1.0;
similarity(Source, Target) ->
Score = (length(Target) - edit_distance(Source, Target)) / length(Target),
case Score > 0 of
true -> Score;
false -> 0.0
end.
%% @spec similarity(string(), string(), IgnoreCase::bool()) -> float()
%% @doc Returns a score between 0 and 1, representing how similar `Source' is to
%% `Target' based on the edit distance and normalized by the length of `Target'.
%% Note the order of `Source' and `Target' matters. The comparison is case
%% insensitive if `IgnoreCase' is `true'.
%% ```
%% > estring:similarity("linux", "Linux", true).
%% 1.0
%% '''
-spec similarity(string(), string(), boolean()) -> float().
similarity(Source, Target, true) ->
S = string:to_lower(Source),
T = string:to_lower(Target),
similarity(S, T);
similarity(Source, Target, false) ->
similarity(Source, Target).
%% @spec similarity(string(), string(), IgnoreCase::bool(), float()) ->
%% {ok, float()} | {error, limit_reached}
%% @doc Returns a score between 0 and 1, representing how similar `Source' is to
%% `Target' based on the edit distance and normalized by the length of `Target'.
%% Note the order of `Source' and `Target' matters. The comparison is case
%% insensitive if `IgnoreCase' is `true'. A simple heuristic is used
%% to estimate the upper bound for similarity between `Source' and `Target'.
%% If the estimate is less than `LowerLimit', then `{error, limit_reached}' is
%% returned immediately. otherwise `{ok, float()}' or `{error, limit_reached}'
%% is returned based on a call to {@link similarity/3. similarity/3}.
%% ```
%% > estring:similarity("linux", "microsoft", false, 0.5).
%% {error,limit_reached}
%% '''
-spec similarity(string(), string(), boolean(), float()) ->
{ok, float()} | {error, limit_reached}.
similarity(Source, Target, CaseInsensitive, LowerLimit) ->
{S, T} = case CaseInsensitive of
true -> {string:to_lower(Source), string:to_lower(Target)};
false -> {Source, Target}
end,
case similarity_estimate(S, T) >= LowerLimit of
true ->
Score = similarity(S, T),
case Score >= LowerLimit of
true -> {ok, Score};
false -> {error, limit_reached}
end;
false -> {error, limit_reached}
end.
%% @spec similarity_estimate(string(), string()) -> float()
%% @doc Establishes a very conservate upper bound for string similarity.
-spec similarity_estimate(string(), string()) -> float().
similarity_estimate(S, S) -> 1.0;
similarity_estimate(S, T) ->
DistanceEstimate = edit_distance_estimate(S, T),
SimilarityEstimate = (length(T) - DistanceEstimate ) / length(T),
case SimilarityEstimate > 0 of
true -> SimilarityEstimate;
false -> 0.0
end.
%% @spec squeeze(string()) -> string()
%% @doc Shortcut for `estring:squeeze(String, " ")'.
%% ```
%% > estring:squeeze("i need a squeeze!").
%% "i need a squeeze!"
%% '''
-spec squeeze(string()) -> string().
squeeze(String) -> squeeze(String, " ").
%% @spec squeeze(string(), char()) -> string()
%% @doc Returns a string where runs of `Char' are replaced with a single `Char'.
%% ```
%% > estring:squeeze("the cow says moooo", $o).
%% "the cow says mo"
%% > estring:squeeze("the cow says moooo", "o").
%% "the cow says mo"
%% '''
-spec squeeze(string(), char()) -> string().
squeeze(String, Char) when erlang:is_integer(Char) ->
squeeze(String, Char, [], []);
squeeze(String, Char) when is_list(Char) ->
squeeze(String, hd(Char), [], []).
squeeze([], _, _, Result) ->
lists:reverse(Result);
squeeze([H|T], H, H, Result) ->
squeeze(T, H, H, Result);
squeeze([H|T], Char, _, Result) ->
squeeze(T, Char, H, [H|Result]).
%% @spec strip(string()) -> string()
%% @doc Returns a string where leading and trailing whitespace (`" ",\n\t\f\r')
%% has been removed. Note that `string:strip/1' only removes spaces.
%% ```
%% > estring:strip("\t clean me \r\n").
%% "clean me"
%% '''
-spec strip(string()) -> string().
strip(String) ->
strip(String, [], []).
strip([], _, Result) ->
lists:reverse(Result);
strip([H|T], [], []) ->
case whitespace(H) of
true -> strip(T, [], []);
false -> strip(T, [], [H])
end;
strip([H|T], WhiteSpace, Result) ->
case whitespace(H) of
true -> strip(T, [H|WhiteSpace], Result);
false -> strip(T, [], [H|WhiteSpace] ++ Result)
end.
whitespace($\t) -> true;
whitespace($\n) -> true;
whitespace($\f) -> true;
whitespace($\r) -> true;
whitespace($\ ) -> true;
whitespace(_) -> false.
%% @spec strip_split(string(), string()) -> list()
%% @doc Shortcut for
%% `re:split(estring:strip(String), SeparatorString, [{return, list}])'. This is
%% intended for parsing input like csv files.
%% ```
%% > estring:strip_split("first>,<second>,<third\r\n", ">,<").
%% ["first","second","third"]
%% '''
-spec strip_split(string(), string()) -> list().
strip_split(String, SeparatorString) ->
re:split(strip(String), SeparatorString, [{return, list}]).
-ifdef(TEST).
begins_with_test_() ->
[?_assertEqual(true, begins_with("foobar", "foo")),
?_assertEqual(false, begins_with("foobar", "bar"))].
contains_test_() ->
[?_assertEqual(true, contains("foobar", "foo")),
?_assertEqual(true, contains("foobar", "bar")),
?_assertEqual(true, contains("foobar", "oba")),
?_assertEqual(false, contains("foobar", "car"))].
edit_distance_test_() ->
[?_assertEqual(0, edit_distance("computer", "computer")),
%% deletion
?_assertEqual(1, edit_distance("computer", "compter")),
%% substitution
?_assertEqual(1, edit_distance("computer", "camputer")),
%% insertion
?_assertEqual(1, edit_distance("computer", "computter")),
%% transposition
?_assertEqual(1, edit_distance("computer", "comupter")),
%% deletion + substitution + insertion
?_assertEqual(3, edit_distance("computer", "camputte")),
%% transposition + insertion + deletion
?_assertEqual(3, edit_distance("computer", "cmoputte")),
%% transposition + insertion + deletion, with source and target swapped
?_assertEqual(3, edit_distance("cmoputte", "computer")),
?_assertEqual(3, edit_distance("cars", "BaTS", false)),
?_assertEqual(3, edit_distance("cars", "BaTS")),
?_assertEqual(2, edit_distance("cars", "BaTS", true))].
edit_distance_estimate_test_() ->
[?_assertEqual(0.0, edit_distance_estimate("abc", "abc")),
?_assertEqual(0.0, edit_distance_estimate("", "")),
?_assertEqual(1.5, edit_distance_estimate("abc", "")),
?_assertEqual(1.5, edit_distance_estimate("", "abc")),
?_assertEqual(0.0, edit_distance_estimate("abc", "cba")),
?_assertEqual(1.0, edit_distance_estimate("abc", "xbc")),
?_assertEqual(0.5, edit_distance_estimate("abc", "abbc")),
?_assertEqual(1.5, edit_distance_estimate("abcd", "abbcx")),
?_assertEqual(2.0, edit_distance_estimate("abcd", "aabbccdd"))].
ends_with_test_() ->
[?_assertEqual(false, ends_with("foobar", "foo")),
?_assertEqual(true, ends_with("foobar", "bar"))].
format_test_() ->
[?_assertEqual("99 bottles of beer on the wall",
format("~w bottles of ~s on the wall", [99, "beer"])),
?_assertEqual("", format("",[]))].
is_integer_test_() ->
[?_assertEqual(true, ?MODULE:is_integer("0123")),
?_assertEqual(true, ?MODULE:is_integer("456789")),
?_assertEqual(true, ?MODULE:is_integer("9")),
?_assertEqual(false, ?MODULE:is_integer("10.3")),
?_assertEqual(false, ?MODULE:is_integer("01 23")),
?_assertEqual(false, ?MODULE:is_integer("1x2")),
?_assertEqual(false, ?MODULE:is_integer("")),
?_assertEqual(false, ?MODULE:is_integer("abc"))].
random_test() ->
?assertEqual(100, length(random(100))).
rot13_test_() ->
S1 = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz1234",
S2 = "NOPQRSTUVWXYZABCDEFGHIJKLMnopqrstuvwxyzabcdefghijklm1234",
[?_assertEqual(S2, rot13(S1)),
?_assertEqual(S1, rot13(S2))].
similarity2_test_() ->
[?_assertEqual(0.8, similarity("yaho", "yahoo")),
?_assertEqual(0.75, similarity("espn", "epsn")),
?_assertEqual(0.25, similarity("car", "BaTS")),
?_assertEqual(0.0, similarity("cars", "c")),
?_assertEqual(0.25, similarity("c", "cars")),
?_assertEqual(1.0, similarity("", ""))].
similarity3_test_() ->
[?_assertEqual(0.25, similarity("car", "BaTS", false)),
?_assertEqual(0.5, similarity("cars", "BATS", true))].
similarity4_test_() ->
[?_assertEqual({ok, 1.0}, similarity("yahoo", "yahoo", true, 0.8)),
?_assertEqual({ok, 0.8}, similarity("yahoo", "bahoo", true, 0.8)),
?_assertEqual({ok, 0.8}, similarity("yahoo", "Yahoo", false, 0.7)),
?_assertEqual({error, limit_reached},
similarity("yahoo", "Yahoo", false, 0.9)),
?_assertEqual({error, limit_reached},
similarity("yahoo", "bahoo", true, 0.9))].
similarity_estimate_test_() ->
[?_assertEqual(1.0, similarity_estimate("", "")),
?_assertEqual(0.0, similarity_estimate("abc", "def")),
?_assertEqual(1.0, similarity_estimate("abc", "cba")),
?_assertEqual(0.8, similarity_estimate("abcde", "xbcde"))].
squeeze_test_() ->
[?_assertEqual("i need a squeeze!", squeeze("i need a squeeze!")),
?_assertEqual("i need a squeeze!", squeeze("i need a squeeze!", " ")),
?_assertEqual("yelow moon", squeeze("yellow moon", "l")),
?_assertEqual("babon mon", squeeze("baboon moon", "o")),
?_assertEqual("babon mon", squeeze("baboon moon", $o)),
?_assertEqual("the cow says mo", squeeze("the cow says moooo", $o))].
strip_test_() ->
[?_assertEqual("hello world", strip(" hello world ")),
?_assertEqual("hello world", strip(" \t hello world\f\r")),
?_assertEqual("hello world", strip("hello world")),
?_assertEqual("hello \tworld", strip(" hello \tworld ")),
?_assertEqual("hello world", strip("hello world\n\n \t")),
?_assertEqual("", strip(" ")),
?_assertEqual("", strip(""))].
strip_split_test_() ->
[?_assertEqual(["ab", "cd", "ef"], strip_split(" ab<#>cd<#>ef \n", "<#>")),
?_assertEqual(["a", "b", [], "c" ], strip_split("\ta,b,,c\r\f", ","))].
-endif. | src/estring.erl | 0.664214 | 0.463141 | estring.erl | starcoder |
%% Copyright (c) 2020 Facebook, Inc. and its affiliates.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(erlt_pinning).
-export([parse_transform/2]).
parse_transform(Forms, _Options) ->
St = init_state(),
{Forms1, _St1} = erlt_ast:traverse(Forms, St, fun pre/3, fun post/3),
Forms1.
%% This pass eliminates the ^ notation from pattern variables, either
%% leaving the already-bound variable as it is, or replacing it with a
%% fresh variable in those contexts where shadowing is in effect.
%%
%% To ensure that pinned variables can be accessed whether or not they
%% happen to get shadowed in the same pattern, we always introduce fresh
%% names for them, guaranteed not to become shadowed. We also need a fresh
%% variable for the location in the pattern, since if the pattern is
%% shadowing, we cannot use the original name. For example:
%%
%% X = 42,
%% F = fun({foo, ^X, X}) -> {bar, X} end
%%
%% Here, the inner name X refers to the third element of the tuple, if
%% there is a match, while the ^X refers to the outer X. This will be
%% translated to:
%%
%% X = 42,
%% F = begin
%% _pin_o1=X,
%% fun({foo, _pin_i1, X}) when _pin_i1 =:= _pin_o1 -> {bar, X} end
%% end
%%
%% which ultimately is what the compiler will do anyway with already-bound
%% variables in patterns - the renamings do not cause any runtime overhead.
%%
%% To avoid generating many redundant variables in case the same variable
%% is pinned in many parts of the same construct, we try to reuse the
%% generated outer and inner names where this is possible (inner names
%% cannot be reused in comprehensions, since generators shadow each other).
-record(ctx, {
keep = true,
reuse_inner = true
}).
-record(state, {
%% lift pinnings only for shadowing
keep_nonshadowing = true,
%% stack of contexts
ctx = [] :: #ctx{},
%% stack of variable sets
pinned = [],
var_count = 0
}).
init_state() ->
#state{}.
pre({op, _, '^', {var, Line, V}}, #state{ctx = [#ctx{keep = true} | _]} = St, pattern) ->
%% in constructs where we allow use of already bound variables
%% we just drop the ^ and keep the variable as it is
{{var, Line, V}, St};
pre({op, _, '^', {var, Line, V}}, #state{ctx = [#ctx{keep = false} | _]} = St, pattern) ->
%% generate inner & outer names, reusing the outer if already existing,
%% and replace ^V with inner name; in both cases we ensure that the
%% mapping is stored in the current clause pinnings so that guard tests
%% are generated in all clauses where this variable occurs pinned
case find_pin(V, St) of
{ok, {Vo, Vi}} ->
case St#state.ctx of
[#ctx{reuse_inner = false} | _] ->
{Vi1, St1} = mk_var("__pin_i", St),
{{var, Line, Vi1}, add_pin(V, Vo, Vi1, St1)};
_ ->
{{var, Line, Vi}, add_pin(V, Vo, Vi, St)}
end;
error ->
{Vo, Vi, St1} = mk_var_pair(St),
{{var, Line, Vi}, add_pin(V, Vo, Vi, St1)}
end;
pre({match, _L, _P, _E} = Expr, St, expr) ->
%% since matches have no separate clause substructure we need to
%% push two empty sets here in order to make end_construct() work
St1 = push_empty_pinned(St),
{Expr, begin_construct(St#state.keep_nonshadowing, St1)};
pre({'case', _L, _E, _Cs} = Expr, St, expr) ->
{Expr, begin_construct(St#state.keep_nonshadowing, St)};
pre({'receive', _L, _Cs} = Expr, St, expr) ->
{Expr, begin_construct(St#state.keep_nonshadowing, St)};
pre({'receive', _L, _Cs, _T, _A} = Expr, St, expr) ->
{Expr, begin_construct(St#state.keep_nonshadowing, St)};
pre({'try', _L, _Es, _OCs, _CCs, _A} = Expr, St, expr) ->
{Expr, begin_construct(St#state.keep_nonshadowing, St)};
pre({'fun', _L, {clauses, _Cs}} = Expr, St, expr) ->
{Expr, begin_construct(false, St)};
pre({named_fun, _L, _N, _Cs} = Expr, St, expr) ->
{Expr, begin_construct(false, St)};
pre({lc, _L, _E, _C} = Expr, St, expr) ->
{Expr, begin_construct(false, St)};
pre({bc, _L, _E, _C} = Expr, St, expr) ->
{Expr, begin_construct(false, St)};
pre({clause, _L, _Head, _Guard, _Body} = Clause, #state{ctx = [#ctx{keep = false} | _]} = St, _) ->
%% push an empty set of pinnings for the clause
{Clause, push_empty_pinned(St)};
pre({Generate, _L, _Pattern, _Expr} = Generator, St, expr) when
Generate =:= generate; Generate =:= b_generate
->
%% push an empty set of pinnings for the generator and ensure we do not
%% reuse inner variable names because of shadowing between generators
{Generator, push_empty_pinned(disable_reuse(St))};
pre(Other, St, _) ->
{Other, St}.
disable_reuse(#state{ctx = [Ctx | Cs]} = St) ->
St#state{ctx = [Ctx#ctx{reuse_inner = false} | Cs]}.
begin_construct(Keep, St) ->
%% pushes a new pinning set and context
push_empty_pinned(St#state{ctx = [#ctx{keep = Keep} | St#state.ctx]}).
%% Since we need to collect all pinned vars for a whole expression (like a
%% case), but generate guard tests per clause, and we also need to handle
%% nested constructs in clause bodies, we keep a stack of sets of
%% pinnings, like this: [ClausePins, ExprPins, ...], where the topmost is
%% the collection of pinned variables found in the current clause, and the
%% second is the collection of pinned variables found in previous clauses
%% of the expression. When entering a new matching expression or a new
%% clause, an empty set is pushed onto the stack, which get popped when we
%% leave the clause or expression again. We use orddicts for the sets, so
%% we know that [] is an empty set and we can traverse the sets as lists.
%%
%% We check for existing pinnings both for the current clause and for the
%% expression as a whole so as not to generate lots of redundant names for
%% the same variable, in case there are many clauses containing the same
%% pinned variable.
%% look up existing pinning for a variable, either in the current clause
%% set or the current expression set (if in both, should have the same
%% outer variable)
find_pin(V, #state{pinned = [Ps0, Ps1 | _]}) ->
case orddict:find(V, Ps0) of
{ok, Info} ->
{ok, Info};
error ->
case orddict:find(V, Ps1) of
{ok, Info} -> {ok, Info};
error -> error
end
end.
%% adds a pinned variable to the set of the current clause
add_pin(V, Vo, Vi, #state{pinned = [Ps | Pss]} = St) ->
Ps1 = orddict:store(V, {Vo, Vi}, Ps),
St#state{pinned = [Ps1 | Pss]}.
%% pushes an empty pinned set on the stack
push_empty_pinned(#state{pinned = Pss} = St) ->
St#state{pinned = [[] | Pss]}.
%% pops the top pinned set
pop_pinned(#state{pinned = [Ps | Pss]} = St) ->
{Ps, St#state{pinned = Pss}}.
%% merges the two top pinned sets, returning the previously topmost; if the
%% same key exists in both, we assert that they have the same outer name
%% and make a list of corresponding inner variables (mainly for debugging)
merge_pinned(#state{pinned = [Ps0, Ps1 | Pss]} = St) ->
% assert same Vo and merge Vi to a list
MergeFun = fun(_K, {Vo, Vi0}, {Vo, Vi1}) ->
if
is_list(Vi0) -> {Vo, [Vi1 | Vi0]};
true -> {Vo, [Vi1, Vi0]}
end
end,
Merged = orddict:merge(MergeFun, Ps0, Ps1),
{Ps0, St#state{pinned = [Merged | Pss]}}.
%% resets the pinning state
clear_pinned(St) ->
St#state{pinned = []}.
%% We must set up outer bindings for all pattern matching expressions,
%% and tests for equality to clause guards for all inner variables.
%% Note: forgetting to pop the pinning stack will have strange effects.
post(Form, St, form) ->
%% ensure pinning info doesn't propagate between forms
{Form, clear_pinned(St)};
post({match, L, Pat, Expr} = Match, #state{ctx = [#ctx{keep = false} | _]} = St, expr) ->
%% pop the set of pinnings for this match and add them to the total set
%% for the enclosing expression
{Pins, St1} = merge_pinned(St),
%% matches are equivalent to case expressions with a single clause, but
%% we must preserve the 'badmatch' error and not get a 'case_clause',
%% and it is then simplest to put the extra tests in a separate 'if'
%% following the match, raising 'badmatch' in the catch-all; we need an
%% extra variable to carry the matched value past the tests
%% only rewrite matches if they actually contain pinned variables
case Pins of
[] ->
end_construct(Match, St1);
_ ->
Tests = mk_tests(Pins),
{V, St2} = mk_var("__pin_m", St1),
%% note that we want to always allow begin...end blocks to
%% export bindings, otherwise it gets much more complicated to
%% replace a single expression with a code sequence like this
Expr1 =
{block, L, [
{match, L, {var, L, V}, Expr},
{match, L, Pat, {var, L, V}},
{'if', L, [
{clause, L, [], [Tests], [{var, L, V}]},
{clause, L, [], [[{atom, L, true}]], [
{call, L, {remote, L, {atom, L, erlang}, {atom, L, error}}, [
{tuple, L, [
{atom, L, badmatch},
{var, L, V}
]}
]}
]}
]}
]},
end_construct(Expr1, St2)
end;
post({match, _L, _P, _E} = Expr, #state{ctx = [#ctx{keep = true} | _]} = St, expr) ->
end_construct(Expr, St);
post({'case', _L, _E, _Cs} = Expr, St, expr) ->
end_construct(Expr, St);
post({'receive', _L, _Cs} = Expr, St, expr) ->
end_construct(Expr, St);
post({'receive', _L, _Cs, _T, _A} = Expr, St, expr) ->
end_construct(Expr, St);
post({'try', _L, _Es, _OCs, _CCs, _A} = Expr, St, expr) ->
end_construct(Expr, St);
post({'fun', _L, {clauses, _Cs}} = Expr, St, expr) ->
end_construct(Expr, St);
post({named_fun, _L, _N, _Cs} = Expr, St, expr) ->
end_construct(Expr, St);
post({lc, L, E, C}, St, expr) ->
%% flatten the list of generators-and-tests, see the generator case below
Expr1 = {lc, L, E, lists:flatten(C)},
end_construct(Expr1, St);
post({bc, L, E, C}, St, expr) ->
%% flatten the list of generators-and-tests, see the generator case below
Expr = {bc, L, E, lists:flatten(C)},
end_construct(Expr, St);
post({clause, L, Head, Guard, Body}, #state{ctx = [#ctx{keep = false} | _]} = St, _) ->
%% pop the set of pinnings for this clause and add them to the total
%% set for the enclosing expression
{Pins, St1} = merge_pinned(St),
%% add corresponding guard tests to the clause; note that guards are
%% lists of lists of tests, but you can't have empty inner lists
Tests = mk_tests(Pins),
Guard1 =
case Guard of
[] when Tests =/= [] -> [Tests];
[] -> [];
[_ | _] -> [Tests ++ Conj || Conj <- Guard]
end,
{{clause, L, Head, Guard1, Body}, St1};
post({Generate, _L, _Pattern, _Expr}, St, expr) when
Generate =:= generate; Generate =:= b_generate
->
%% pop the set of pinnings for this generator and add them to the total
%% set for the enclosing expression
{Pins, St1} = merge_pinned(St),
%% insert corresponding guard tests right after the generator, but note
%% that afterwards we need to flatten the list of generators-and-tests
%% in the post-handling for the comprehension as a whole
Tests = mk_tests(Pins),
{[{Generate, _L, _Pattern, _Expr} | Tests], St1};
post(Other, St, _) ->
{Other, St}.
%% pops the total set of pinnings for an expression and adds corresponding
%% outer bindings to the expression; also pops the context
end_construct(Expr, #state{ctx = [_ | Ctx]} = St) ->
{Pins, St1} = pop_pinned(St),
{mk_bind(Pins, Expr), St1#state{ctx = Ctx}}.
mk_bind([], Expr) ->
Expr;
mk_bind(Pins, Expr) ->
{block, 0, mk_bind_1(Pins, Expr)}.
mk_bind_1([{V, {Vo, _Vi}} | Pins], Expr) ->
[{match, 0, {var, 0, Vo}, {var, 0, V}} | mk_bind_1(Pins, Expr)];
mk_bind_1([], Expr) ->
[Expr].
mk_tests([{_V, {Vo, Vi}} | Pins]) ->
[{op, 0, '=:=', {var, 0, Vi}, {var, 0, Vo}} | mk_tests(Pins)];
mk_tests([]) ->
[].
mk_var(Prefix, #state{var_count = N} = St) ->
V = list_to_atom(Prefix ++ integer_to_list(N)),
{V, St#state{var_count = N + 1}}.
mk_var_pair(#state{var_count = N} = St) ->
Vo = list_to_atom("__pin_o" ++ integer_to_list(N)),
Vi = list_to_atom("__pin_i" ++ integer_to_list(N)),
{Vo, Vi, St#state{var_count = N + 1}}. | erltc/src/erlt_pinning.erl | 0.622 | 0.446495 | erlt_pinning.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% Machi: a small village of replicated files
%%
%% Copyright (c) 2014 Basho Technologies, Inc. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
-module(machi_util).
-export([repair_merge/1]).
-ifdef(TEST).
-ifdef(EQC).
-include_lib("eqc/include/eqc.hrl").
-define(QC_OUT(P),
eqc:on_output(fun(Str, Args) -> io:format(user, Str, Args) end, P)).
-endif.
-include_lib("eunit/include/eunit.hrl").
-compile(export_all).
-endif.
%% repair_merge(): Given a list of lists of {Filename, StartOff, EndOff, FLU},
%% merge them into a single list of {Filename, StartOff, EndOff, FLU_list}
%% where the FLUs in FLU_list all have a copy of {Filename, StartOff, EndOff}.
repair_merge(ListOfLists) ->
repair_merge2(lists:sort(lists:append(ListOfLists))).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% repair_merge2() invariant, to be "enforced"/honored by the caller:
%% L __must__ must be sorted.
repair_merge2([]=L) ->
L;
repair_merge2([_]=L) ->
L;
repair_merge2([H1, H2|T]) ->
repair_merge_pair(H1, H2, T).
repair_merge_pair({F1, _, _, _}=H1, {F2, _, _, _}=H2, T)
when F1 /= F2 ->
%% No overlap: different files
[H1|repair_merge2([H2|T])];
repair_merge_pair({_F1, _P1a, P1z, _M1s}=H1, {_F2, P2a, _P2z, _}=H2, T)
when P1z < P2a ->
%% No overlap: same file, H1 is strictly earlier than H2
[H1|repair_merge2([H2|T])];
repair_merge_pair({F1, P1a, P1z, M1s}=_H1, {F2, P2a, P2z, M2s}=_H2, T)
when F1 == F2, P1a == P2a, P1z == P2z ->
%% Exact file & range: merge Ms
NewMs = lists:usort(M1s ++ M2s),
repair_merge2([{F1, P1a, P1z, NewMs}|T]);
repair_merge_pair(F1, F2, T) ->
Split = split_overlapping(F1, F2),
%% If we don't sort *everything* at this step, then we can end up
%% with an invariant violation for repair_merge2(), which is that
%% all items in L __must__ be sorted.
repair_merge2(lists:sort(Split ++ T)).
split_overlapping({F1, _, _, _}=H1, {F2, _, _, _}=H2) when F1 /= F2 ->
%% These are different files, why were we called?
throw({whaaa, H1, H2}),
[H1, H2];
split_overlapping({F, F1a, F1z, M1s}=H1, {F, F2a, F2z, M2s}=H2) when H1 =< H2 ->
if F1a == F2a, F1z == F2z ->
%% These are the same, why were we called?
[{F, F1a, F1z, lists:usort(M1s ++ M2s)}];
F1a == F2a ->
%% 100% overlap, starting at the beginning of H1
[{F, F2a, F1z, lists:usort(M1s ++ M2s)},
{F, F1z + 1, F2z, M2s}];
F1z == F2z ->
%% 100% overlap, ending at the end of H1
[{F, F1a, F2a - 1, M1s},
{F, F2a, F1z, lists:usort(M1s ++ M2s)}];
F2a < F1z, F2z < F1z ->
%% 100% overlap, H2 is in the middle of H1
[{F, F1a, F2a - 1, M1s},
{F, F2a, F2z, lists:usort(M1s ++ M2s)},
{F, F2z + 1, F1z, M1s}];
true ->
%% partial overlap
[{F, F1a, F2a - 1, M1s},
{F, F2a, F1z, lists:usort(M1s ++ M2s)},
{F, F1z + 1, F2z, M2s}]
end;
split_overlapping(H1, H2) ->
split_overlapping(H2, H1).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% | prototype/chain-manager/src/machi_util.erl | 0.5 | 0.414366 | machi_util.erl | starcoder |
%% @author <NAME> <<EMAIL>>
%% @copyright 2009-2014 <NAME>
%%
%% @doc Utility functions for datetime handling and representation.
%% Copyright 2009-2014 <NAME>
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(z_datetime).
-author("<NAME> <<EMAIL>").
%% interface functions
-export([
to_local/2,
to_utc/2,
to_datetime/1,
to_datetime/2,
format/3,
timesince/2,
timesince/3,
timesince/4,
timesince/5,
week_start/0,
week_start/2,
days_in_year/1,
prev_year/1,
prev_month/1,
prev_day/1,
prev_hour/1,
prev_minute/1,
prev_second/1,
next_year/1,
next_month/1,
next_day/1,
next_hour/1,
next_minute/1,
next_second/1,
diff/2,
month_boundaries/1,
week_boundaries/1,
week_boundaries/2,
timestamp/0,
timestamp_to_datetime/1,
datetime_to_timestamp/1,
undefined_if_invalid_date/1
]).
-include_lib("zotonic.hrl").
%% @doc Convert a time to the local context time using the current timezone.
-spec to_local(calendar:datetime()|undefined|time_not_exists, string()|binary()|#context{}) -> calendar:datetime() | undefined.
to_local(undefined, _Tz) ->
undefined;
to_local(time_not_exists, _Tz) ->
undefined;
to_local({_Y, _M, _D} = Date, Tz) ->
to_local({Date, {0,0,0}}, Tz);
to_local({{9999, _, _}, _} = DT, _Tz) ->
DT;
to_local(DT, <<"UTC">>) ->
DT;
to_local(DT, <<"GMT">>) ->
DT;
to_local(DT, <<>>) ->
DT;
to_local(DT, #context{} = Context) ->
to_local(DT, z_context:tz(Context));
to_local(DT, Tz) ->
case qdate:to_date(z_convert:to_list(Tz), {DT, "GMT"}) of
{error, unknown_tz} ->
lager:warning("Unknown timezone ~p for to_local of ~p", [Tz, DT]),
DT;
{error, Error} ->
lager:warning("to_utc error ~p for to_utc of ~p", [Error, DT]),
DT;
{ambiguous, _Standard, Daylight} ->
Daylight;
time_not_exists ->
undefined;
NewDT ->
NewDT
end.
%% @doc Convert a time to the local context time using the current timezone.
-spec to_utc(calendar:datetime()|undefined|time_not_exists, string()|binary()|#context{}) -> calendar:datetime() | undefined.
to_utc(undefined, _Tz) ->
undefined;
to_utc(time_not_exists, _Tz) ->
undefined;
to_utc({_Y, _M, _D} = Date, Tz) ->
to_utc({Date, {0,0,0}}, Tz);
to_utc({{9999, _, _}, _} = DT, _Tz) ->
DT;
to_utc(DT, <<"UTC">>) ->
DT;
to_utc(DT, <<"GMT">>) ->
DT;
to_utc(DT, <<>>) ->
DT;
to_utc(DT, #context{} = Context) ->
to_utc(DT, z_context:tz(Context));
to_utc(DT, Tz) ->
case qdate:to_date("GMT", {DT, z_convert:to_list(Tz)}) of
{error, unknown_tz} ->
lager:warning("Unknown timezone ~p for to_utc of ~p", [Tz, DT]),
DT;
{error, Error} ->
lager:warning("to_utc error ~p for to_utc of ~p", [Error, DT]),
DT;
{ambiguous, _Standard, Daylight} ->
Daylight;
time_not_exists ->
undefined;
NewDT ->
NewDT
end.
%% @doc Convert an input to a (universal) datetime, using to_date/1 and
%% to_time/1. When the input is a string, it is expected to be in iso
%% 8601 format, although it can also handle timestamps without time
%% zones. The time component of the datetime is optional.
to_datetime(undefined) ->
undefined;
to_datetime(N) when is_integer(N) ->
z_datetime:timestamp_to_datetime(N);
to_datetime(B) when is_binary(B); is_list(B) ->
case z_utils:only_digits(B) of
true ->
to_datetime(z_convert:to_integer(B));
false ->
to_dt(B, calendar:universal_time())
end;
to_datetime(DT) ->
to_dt(DT, calendar:universal_time()).
to_datetime(DT, Tz) ->
Now = to_local(calendar:universal_time(), Tz),
to_utc(to_dt(DT, Now), Tz).
to_dt({{_,_,_},{_,_,_}} = DT, _Now) -> DT;
to_dt({_,_,_} = D, _Now) -> {D, {0,0,0}};
to_dt(B, Now) when is_binary(B) -> to_dt(binary_to_list(B), Now);
to_dt("now", Now) -> Now;
to_dt("today", Now) -> Now;
to_dt("tomorrow", Now) -> relative_time(1, '+', "day", Now);
to_dt("yesterday", Now) -> relative_time(1, '+', "day", Now);
to_dt("+"++Relative, Now) -> to_relative_time('+', Relative, Now);
to_dt("-"++Relative, Now) -> to_relative_time('-', Relative, Now);
to_dt(DT, _Now) -> z_convert:to_datetime(DT).
to_relative_time(Op, S, Now) ->
Ts = string:tokens(S, " "),
Ts1 = [ T || T <- Ts, T =/= [] ],
relative_time(1, Op, Ts1, Now).
relative_time(_N, Op, [[C|_]=N|Ts], Now) when C >= $0, C =< $9 ->
relative_time(list_to_integer(N), Op, Ts, Now);
relative_time(N, '+', ["minute"|_], Now) -> relative_time_n(N, fun next_minute/1, Now);
relative_time(N, '+', ["hour"|_], Now) -> relative_time_n(N, fun next_hour/1, Now);
relative_time(N, '+', ["day"|_], Now) -> relative_time_n(N, fun next_day/1, Now);
relative_time(N, '+', ["sunday"|_], Now) -> relative_time_n(N*7, fun next_day/1, week_start(7, Now));
relative_time(N, '+', ["monday"|_], Now) -> relative_time_n(N*7, fun next_day/1, week_start(1, Now));
relative_time(N, '+', ["tuesday"|_], Now) -> relative_time_n(N*7, fun next_day/1, week_start(2, Now));
relative_time(N, '+', ["wednesday"|_], Now) -> relative_time_n(N*7, fun next_day/1, week_start(3, Now));
relative_time(N, '+', ["thursday"|_], Now) -> relative_time_n(N*7, fun next_day/1, week_start(4, Now));
relative_time(N, '+', ["friday"|_], Now) -> relative_time_n(N*7, fun next_day/1, week_start(5, Now));
relative_time(N, '+', ["saturday"|_], Now) -> relative_time_n(N*7, fun next_day/1, week_start(6, Now));
relative_time(N, '+', ["week"|_], Now) -> relative_time_n(N*7, fun next_day/1, Now);
relative_time(N, '+', ["month"|_], Now) -> relative_time_n(N, fun next_month/1, Now);
relative_time(N, '+', ["year"|_], Now) -> relative_time_n(N, fun next_year/1, Now);
relative_time(N, '-', ["day"|_], Now) -> relative_time_n(N, fun prev_day/1, Now);
relative_time(N, '-', ["sunday"|_], Now) -> relative_time_n(N*7, fun prev_day/1, week_start(7, Now));
relative_time(N, '-', ["monday"|_], Now) -> relative_time_n(N*7, fun prev_day/1, week_start(1, Now));
relative_time(N, '-', ["tuesday"|_], Now) -> relative_time_n(N*7, fun prev_day/1, week_start(2, Now));
relative_time(N, '-', ["wednesday"|_], Now) -> relative_time_n(N*7, fun prev_day/1, week_start(3, Now));
relative_time(N, '-', ["thursday"|_], Now) -> relative_time_n(N*7, fun prev_day/1, week_start(4, Now));
relative_time(N, '-', ["friday"|_], Now) -> relative_time_n(N*7, fun prev_day/1, week_start(5, Now));
relative_time(N, '-', ["week"|_], Now) -> relative_time_n(N*7, fun prev_day/1, Now);
relative_time(N, '-', ["month"|_], Now) -> relative_time_n(N, fun prev_month/1, Now);
relative_time(N, '-', ["year"|_], Now) -> relative_time_n(N, fun prev_year/1, Now);
relative_time(_N, _Op, _Unit, _Now) -> undefined.
relative_time_n(N, _F, DT) when N =< 0 ->
DT;
relative_time_n(N, F, DT) ->
relative_time_n(N-1, F, F(DT)).
%% @doc Format a date/time. Convenience function which calls the zotonic erlydtl stub.
format(Date, FormatString, Context) ->
erlydtl_dateformat:format(Date, FormatString, Context).
%% @doc Show a humanized version of a relative datetime. Like "4 months, 3 days ago".
%% @spec timesince(Date, Context) -> string()
timesince(Date, Context) ->
timesince(Date, calendar:universal_time(), Context).
%% @doc Show a humanized version of a period between two dates. Like "4 months, 3 days ago".
%% @spec timesince(Date, BaseDate, Context) -> string()
timesince(Date, Base, Context) ->
timesince(Date, Base, ?__(<<"ago">>, Context), ?__(<<"now">>, Context), ?__(<<"in">>, Context), 2, Context).
timesince(Date, Base, IndicatorStrings, Context) ->
timesince(Date, Base, IndicatorStrings, 2, Context).
%% @spec timesince(Date, BaseDate, IndicatorStrings, Mode, Context) -> string()
%% @doc Show a humanized version of a period between two dates. Like "4 months, 3 days ago".
%% `WhenText' is a string containing a maximum of three tokens. Example "ago, now, in"
timesince(Date, Base, IndicatorStrings, Mode, Context) ->
%% strip the tokens, so the user can specify the text more flexible.
case [string:strip(S, both) || S <- string:tokens(z_convert:to_list(IndicatorStrings), ",")] of
[AgoText, NowText, InText] ->
timesince(Date, Base, AgoText, NowText, InText, Mode, Context);
[AgoText, NowText] ->
timesince(Date, Base, AgoText, NowText, "", Mode, Context);
[AgoText] ->
timesince(Date, Base, AgoText, "", "", Mode, Context);
[] ->
timesince(Date, Base, "", "", "", Mode, Context)
end.
%% @doc Show a humanized version of a period between two dates. Like "4 months, 3 days ago".
%% @spec timesince(Date, BaseDate, AgoText, NowText, InText, Mode, Context) -> string()
timesince(undefined, _, _AgoText, _NowText, _InText, _Mode, _Context) ->
"";
timesince(_, undefined, _AgoText, _NowText, _InText, _Mode, _Context) ->
"";
timesince(Date, Base, _AgoText, NowText, _InText, _Mode, _Context) when Date == Base ->
NowText;
timesince(Date, Base, _AgoText, _NowText, InText, Mode, Context) when Date > Base ->
combine({InText, combine(reldate(Base, Date, Mode, Context))}, " ");
timesince(Date, Base, AgoText, _NowText, _InText, Mode, Context) ->
combine({combine(reldate(Date, Base, Mode, Context)), AgoText}, " ").
combine(Tup) -> combine(Tup, ", ").
combine({"", B}, _Sep) -> B;
combine({A,""}, _Sep) -> A;
combine({<<>>, B}, _Sep) -> B;
combine({A,<<>>}, _Sep) -> A;
combine({A,B}, Sep) -> [A,Sep,B].
%% @doc Return a string describing the relative date difference.
reldate(D1, D2, 1, Context) ->
{A, _} = reldate(D1,D2, 2, Context),
{A,[]};
reldate(D1, D2, 2, Context) ->
case diff(D1,D2) of
{{0,0,0},{0,0,0}} -> {?__(<<"now">>,Context),[]};
{{0,0,0},{0,0,S}} when S < 10 -> {?__(<<"moments">>,Context),[]};
{{0,0,0},{0,0,S}} -> {plural(S, ?__(<<"second">>,Context), ?__(<<"seconds">>,Context)),
[]};
{{0,0,0},{0,I,S}} -> {plural(I, ?__(<<"minute">>,Context), ?__(<<"minutes">>,Context)),
plural(S, ?__(<<"second">>,Context), ?__(<<"seconds">>,Context))};
{{0,0,0},{H,I,_}} -> {plural(H, ?__(<<"hour">>,Context), ?__(<<"hours">>,Context)),
plural(I, ?__(<<"minute">>,Context), ?__(<<"minutes">>,Context))};
{{0,0,D},{H,_,_}} -> {plural(D, ?__(<<"day">>,Context), ?__(<<"days">>,Context)),
plural(H, ?__(<<"hour">>,Context), ?__(<<"hours">>,Context))};
{{0,M,D},{_,_,_}} -> {plural(M, ?__(<<"month">>,Context), ?__(<<"months">>,Context)),
plural(D, ?__(<<"day">>,Context), ?__(<<"days">>,Context))};
{{Y,M,_},{_,_,_}} -> {plural(Y, ?__(<<"year">>,Context), ?__(<<"years">>,Context)),
plural(M, ?__(<<"month">>,Context), ?__(<<"months">>,Context))}
end.
plural(0,_Single,_Plural) ->
"";
plural(1,Single,_Plural) ->
[$1, 32, Single];
plural(N,_Single,Plural) ->
[integer_to_list(N), 32, Plural].
%% @doc Return the date the current week starts (monday)
week_start() ->
week_start(1, calendar:universal_time()).
week_start(StartDayNr, {D,_}) ->
Today = {D,{0,0,0}},
WeekDay = calendar:day_of_the_week(D),
if
WeekDay > StartDayNr -> relative_time_n(WeekDay - StartDayNr, fun prev_day/1, Today);
WeekDay =:= StartDayNr -> Today;
WeekDay < StartDayNr -> relative_time_n(WeekDay - StartDayNr + 7, fun prev_day/1, Today)
end.
%% @doc Return the date one year earlier.
prev_year({{Y,2,29},T}) ->
{{Y-1,3,1}, T};
prev_year({{Y,M,D},T}) ->
{{Y-1,M,D}, T}.
%% @doc Return the date one month earlier.
prev_month({{Y,1,D},T}) -> {{Y-1,12,D},T};
prev_month({{Y,M,D},T}) -> {{Y,M-1,D}, T}.
%% @doc Return the date one day earlier.
prev_day({{_,_,1},_} = Date) ->
{{Y1,M1,_},T1} = prev_month(Date),
{{Y1,M1,calendar:last_day_of_the_month(Y1,M1)}, T1};
prev_day({{Y,M,D},T}) ->
{{Y,M,D-1}, T};
prev_day({_,_,_} = Date) ->
prev_day({Date, {0,0,0}}).
%% @doc Return the date one hour earlier.
prev_hour({_,{0,_,_}} = Date) ->
{YMD,{_,I,S}} = prev_day(Date),
{YMD,{23,I,S}};
prev_hour({YMD,{H,I,S}}) ->
{YMD, {H-1,I,S}}.
%% @doc Return the date one minute earlier.
prev_minute({_,{_,0,_}} = Date) ->
{YMD,{H,_,S}} = prev_hour(Date),
{YMD,{H,59,S}};
prev_minute({YMD,{H,I,S}}) ->
{YMD, {H,I-1,S}}.
%% @doc Return the date one second earlier.
prev_second({_,{_,_,0}} = Date) ->
{YMD,{H,I,_}} = prev_minute(Date),
{YMD,{H,I,59}};
prev_second({YMD,{H,I,S}}) ->
{YMD, {H,I,S-1}}.
%% @doc Return the date one year later.
next_year({{Y,2,29},T}) ->
{{Y+1,3,1}, T};
next_year({{Y,M,D},T}) ->
{{Y+1,M,D}, T}.
%% @doc Return the date one month later.
next_month({{Y,12,D},T}) -> {{Y+1,1,D},T};
next_month({{Y,M,D},T}) -> {{Y,M+1,D}, T}.
%% @doc Return the date one day later.
next_day({{Y,M,D},T} = Date) ->
case calendar:last_day_of_the_month(Y,M) of
D ->
{{Y1,M1,_},T1} = next_month(Date),
{{Y1,M1,1},T1};
_ ->
{{Y,M,D+1},T}
end;
next_day({_,_,_} = Date) ->
next_day({Date, {0,0,0}}).
%% @doc Return the date one hour later.
next_hour({_,{23,_,_}} = Date) ->
{YMD,{_,I,S}} = next_day(Date),
{YMD,{0,I,S}};
next_hour({YMD,{H,I,S}}) ->
{YMD, {H+1,I,S}}.
%% @doc Return the date one minute later.
next_minute({_,{_,59,_}} = Date) ->
{YMD,{H,_,S}} = next_hour(Date),
{YMD,{H,0,S}};
next_minute({YMD,{H,I,S}}) ->
{YMD, {H,I+1,S}}.
%% @doc Return the date one second later.
next_second({_,{_,_,59}} = Date) ->
{YMD,{H,I,_}} = next_minute(Date),
{YMD,{H,I,0}};
next_second({YMD,{H,I,S}}) ->
{YMD, {H,I,S+1}}.
%% @doc Return the number of days in a certain year.
days_in_year(Y) ->
case calendar:is_leap_year(Y) of
true -> 366;
false -> 365
end.
%% @doc Return the absolute difference between two dates. Does not take daylight saving into account.
diff({Y,M,D}, Date2) when is_integer(Y), is_integer(M), is_integer(D) ->
diff({{Y,M,D},{0,0,0}}, Date2);
diff(Date1, {Y,M,D}) when is_integer(Y), is_integer(M), is_integer(D) ->
diff(Date1, {{Y,M,D},{0,0,0}});
diff(Date1, Date2) when Date1 < Date2 ->
diff(Date2,Date1);
diff({YMD1,{H1,I1,S1}}, {_,{_,_,S2}} = Date2) when S2 > S1 ->
NextDate2 = next_minute(Date2),
diff({YMD1,{H1,I1,S1+60}},NextDate2);
diff({YMD1,{H1,I1,S1}}, {_,{_,I2,_}} = Date2) when I2 > I1 ->
NextDate2 = next_hour(Date2),
diff({YMD1,{H1,I1+60,S1}},NextDate2);
diff({YMD1,{H1,I1,S1}}, {_,{H2,_,_}} = Date2) when H2 > H1 ->
NextDate2 = next_day(Date2),
diff({YMD1,{H1+24,I1,S1}},NextDate2);
diff({{Y1,M1,D1},T1}, {{Y2,M2,D2},_} = Date2) when D2 > D1 ->
NextDate2 = next_month(Date2),
diff({{Y1,M1,D1+calendar:last_day_of_the_month(Y2,M2)},T1},NextDate2);
diff({{Y1,M1,D1},T1}, {{_,M2,_},_} = Date2) when M2 > M1 ->
NextDate2 = next_year(Date2),
diff({{Y1,M1+12,D1},T1},NextDate2);
diff({{Y1,M1,D1},{H1,I1,S1}}, {{Y2,M2,D2},{H2,I2,S2}}) ->
{{Y1-Y2, M1-M2, D1-D2}, {H1-H2, I1-I2, S1-S2}}.
%% @doc Return the month-boundaries of a given date
month_boundaries({{Y,M,_}, _}) ->
Start = {{Y,M,1}, {0,0,0}},
{End,_} = prev_day(next_month(Start)),
{Start, {End, {23,59,59}}}.
%% @doc Return the week-boundaries of a given date.
%% WeekStart is optional, and determines on which day a week starts.
week_boundaries(Date) ->
week_boundaries(Date, 1).
week_boundaries({D,_T}=Date, WeekStart) ->
DOW = calendar:day_of_the_week(D),
Start = -weeknorm(DOW - WeekStart),
{S,_} = day_add(Date, Start),
{E,_} = day_add(Date, Start + 6),
{ {S, {0,0,0}}, {E, {23,59,59}} }.
weeknorm(D) when D < 0 ->
weeknorm(D+7);
weeknorm(D) when D > 6 ->
weeknorm(D-7);
weeknorm(D) ->
D.
day_add(Date, 0) ->
Date;
day_add(Date, Num) when Num < 0 ->
day_add(prev_day(Date), Num + 1);
day_add(Date, Num) when Num > 0 ->
day_add(next_day(Date), Num - 1).
% Constant value of calendar:datetime_to_gregorian_seconds({{1970,1,1},{0,0,0}})
-define(SECS_1970, 62167219200).
%% @doc Calculate the current UNIX timestamp (seconds since Jan 1, 1970)
timestamp() ->
calendar:datetime_to_gregorian_seconds(calendar:universal_time())-?SECS_1970.
%% @doc Translate UNIX timestamp to local datetime.
timestamp_to_datetime(Seconds) ->
calendar:gregorian_seconds_to_datetime(?SECS_1970 + Seconds).
%% @doc Translate a local time date to UNIX timestamp
datetime_to_timestamp(?ST_JUTTEMIS) ->
undefined;
datetime_to_timestamp(undefined) ->
undefined;
datetime_to_timestamp(DT) ->
calendar:datetime_to_gregorian_seconds(DT) - ?SECS_1970.
%% @doc Return 'undefined' if a given date is invalid
undefined_if_invalid_date({{Y,M,D},{H,I,S}} = Date) when
is_integer(Y), is_integer(M), is_integer(D),
is_integer(H), is_integer(I), is_integer(S),
H >= 0, H =< 23, I >= 0, I =< 59, S >= 0, S =< 59,
M >= 1, M =< 12, D >= 1, Y >= -4713, Y =< 9999
->
MaxDays = case M of
1 -> 31;
3 -> 31;
5 -> 31;
7 -> 31;
8 -> 31;
10 -> 31;
12 -> 31;
2 ->
case Y rem 400 of
0 -> 29;
_ ->
case Y rem 100 of
0 -> 28;
_ ->
case Y rem 4 of
0 -> 29;
_ -> 28
end
end
end;
_ ->
30
end,
case D =< MaxDays of
true -> Date;
false -> undefined
end;
undefined_if_invalid_date(_) ->
undefined. | src/support/z_datetime.erl | 0.505127 | 0.401629 | z_datetime.erl | starcoder |
%%
%% %CopyrightBegin%
%%
%% Copyright Ericsson AB 2010-2020. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
-module(diameter_codegen).
%%
%% This module generates erl/hrl files for encode/decode modules from
%% the orddict parsed from a dictionary file by diameter_dict_util.
%% The generated code is simple (one-liners), and is called from
%% diameter_gen. The orddict itself is returned by dict/0 in the
%% generated module and diameter_dict_util calls this function when
%% importing dictionaries as a consequence of @inherits sections. That
%% is, @inherits introduces a dependency on the beam file of another
%% dictionary.
%%
-export([from_dict/4,
is_printable_ascii/1]). %% used by ?TERM/1 in diameter_forms.hrl
-include("diameter_forms.hrl").
-include("diameter_vsn.hrl").
-define(S, atom_to_list).
-define(A, list_to_atom).
-define(Atom(T), ?ATOM(?A(T))).
%% ===========================================================================
-spec from_dict(File, ParseD, Opts, Mode)
-> ok
| term()
when File :: string(),
ParseD :: orddict:orddict(),
Opts :: list(),
Mode :: parse | forms | erl | hrl.
from_dict(File, ParseD, Opts, Mode) ->
Outdir = proplists:get_value(outdir, Opts, "."),
Return = proplists:get_value(return, Opts, false),
Mod = mod(File, orddict:find(name, ParseD)),
putr(verbose, lists:member(verbose, Opts)),
try
maybe_write(Return, Mode, Outdir, Mod, gen(Mode, ParseD, ?A(Mod)))
after
eraser(verbose)
end.
mod(File, error) ->
filename:rootname(filename:basename(File));
mod(_, {ok, Mod}) ->
Mod.
maybe_write(true, _, _, _, T) ->
T;
maybe_write(_, Mode, Outdir, Mod, T) ->
Path = filename:join(Outdir, Mod), %% minus extension
do_write(Mode, [Path, $., ext(Mode)], T).
ext(parse) ->
"D";
ext(forms) ->
"F";
ext(T) ->
?S(T).
do_write(M, Path, T)
when M == parse;
M == forms ->
write_term(Path, T);
do_write(_, Path, T) ->
write(Path, T).
write(Path, T) ->
write(Path, "~s", T).
write_term(Path, T) ->
write(Path, "~p.~n", T).
write(Path, Fmt, T) ->
{ok, Fd} = file:open(Path, [write]),
io:fwrite(Fd, Fmt, [T]),
ok = file:close(Fd).
%% Optional reports when running verbosely.
report(What, Data) ->
report(getr(verbose), What, Data),
Data.
report(true, Tag, Data) ->
io:format(">>~n>> ~p ~p~n", [Tag, Data]);
report(false, _, _) ->
ok.
putr(Key, Value) ->
put({?MODULE, Key}, Value).
getr(Key) ->
get({?MODULE, Key}).
eraser(Key) ->
erase({?MODULE, Key}).
%% ===========================================================================
%% ===========================================================================
is_printable_ascii(C) ->
16#20 =< C andalso C =< 16#7F.
get_value(Key, Plist) ->
proplists:get_value(Key, Plist, []).
gen(parse, ParseD, _Mod) ->
[?VERSION | ParseD];
gen(forms, ParseD, Mod) ->
preprocess(Mod, erl_forms(Mod, ParseD));
gen(hrl, ParseD, Mod) ->
gen_hrl(Mod, ParseD);
gen(erl, ParseD, Mod) ->
[header(), prettypr(erl_forms(Mod, ParseD)), $\n].
erl_forms(Mod, ParseD) ->
Forms = [[{?attribute, module, Mod},
{?attribute, compile, {parse_transform, diameter_exprecs}},
{?attribute, compile, nowarn_unused_function},
{?attribute, dialyzer, no_return}],
make_hrl_forms(ParseD),
[{?attribute, export, [{name, 0},
{id, 0},
{vendor_id, 0},
{vendor_name, 0},
{decode_avps, 3}, %% in diameter_gen.hrl
{encode_avps, 3}, %%
{grouped_avp, 4}, %%
{msg_name, 2},
{msg_header, 1},
{rec2msg, 1},
{msg2rec, 1},
{name2rec, 1},
{avp_name, 2},
{avp_arity, 1},
{avp_arity, 2},
{avp_header, 1},
{avp, 4},
{enumerated_avp, 3},
{empty_value, 2},
{dict, 0}]},
%% diameter.hrl is included for #diameter_avp
{?attribute, include_lib, "diameter/include/diameter.hrl"},
{?attribute, include_lib, "diameter/include/diameter_gen.hrl"},
f_name(Mod),
f_id(ParseD),
f_vendor_id(ParseD),
f_vendor_name(ParseD),
f_msg_name(ParseD),
f_msg_header(ParseD),
f_rec2msg(ParseD),
f_msg2rec(ParseD),
f_name2rec(ParseD),
f_avp_name(ParseD),
f_avp_arity_1(ParseD),
f_avp_arity_2(ParseD),
f_avp_header(ParseD),
f_avp(ParseD),
f_enumerated_avp(ParseD),
f_empty_value(ParseD),
f_dict(ParseD),
{eof, ?LINE}]],
lists:append(Forms).
make_hrl_forms(ParseD) ->
{_Prefix, MsgRecs, GrpRecs, ImportedGrpRecs}
= make_record_forms(ParseD),
RecordForms = MsgRecs ++ GrpRecs ++ lists:flatmap(fun({_,Fs}) -> Fs end,
ImportedGrpRecs),
RecNames = lists:map(fun({attribute,_,record,{N,_}}) -> N end,
RecordForms),
%% export_records is used by the diameter_exprecs parse transform.
[{?attribute, export_records, RecNames} | RecordForms].
make_record_forms(ParseD) ->
Prefix = prefix(ParseD),
MsgRecs = a_record(Prefix, fun msg_proj/1, get_value(messages, ParseD)),
GrpRecs = a_record(Prefix, fun grp_proj/1, get_value(grouped, ParseD)),
ImportedGrpRecs = [{M, a_record(Prefix, fun grp_proj/1, Gs)}
|| {M,Gs} <- get_value(import_groups, ParseD)],
{to_upper(Prefix), MsgRecs, GrpRecs, ImportedGrpRecs}.
msg_proj({Name, _, _, _, Avps}) ->
{Name, Avps}.
grp_proj({Name, _, _, Avps}) ->
{Name, Avps}.
%% a_record/3
a_record(Prefix, ProjF, L) ->
lists:map(fun(T) -> a_record(ProjF(T), Prefix) end, L).
a_record({Nm, Avps}, Prefix) ->
Name = list_to_atom(Prefix ++ Nm),
Fields = lists:map(fun field/1, Avps),
{?attribute, record, {Name, Fields}}.
field(Avp) ->
{Name, Arity} = avp_info(Avp),
if 1 == Arity ->
{?record_field, ?Atom(Name)};
true ->
{?record_field, ?Atom(Name), ?NIL}
end.
%%% ------------------------------------------------------------------------
%%% # name/0
%%% ------------------------------------------------------------------------
f_name(Name) ->
{?function, name, 0,
[{?clause, [], [], [?ATOM(Name)]}]}.
%%% ------------------------------------------------------------------------
%%% # id/0
%%% ------------------------------------------------------------------------
f_id(ParseD) ->
{?function, id, 0,
[c_id(orddict:find(id, ParseD))]}.
c_id({ok, Id}) ->
{?clause, [], [], [?INTEGER(Id)]};
c_id(error) ->
?BADARG(0).
%%% ------------------------------------------------------------------------
%%% # vendor_id/0
%%% ------------------------------------------------------------------------
f_vendor_id(ParseD) ->
{?function, vendor_id, 0,
[{?clause, [], [], [b_vendor_id(orddict:find(vendor, ParseD))]}]}.
b_vendor_id({ok, {Id, _}}) ->
?INTEGER(Id);
b_vendor_id(error) ->
?APPLY(erlang, error, [?TERM(undefined)]).
%%% ------------------------------------------------------------------------
%%% # vendor_name/0
%%% ------------------------------------------------------------------------
f_vendor_name(ParseD) ->
{?function, vendor_name, 0,
[{?clause, [], [], [b_vendor_name(orddict:find(vendor, ParseD))]}]}.
b_vendor_name({ok, {_, Name}}) ->
?Atom(Name);
b_vendor_name(error) ->
?APPLY(erlang, error, [?TERM(undefined)]).
%%% ------------------------------------------------------------------------
%%% # msg_name/1
%%% ------------------------------------------------------------------------
f_msg_name(ParseD) ->
{?function, msg_name, 2, msg_name(ParseD)}.
%% Return the empty name for any unknown command to which
%% DIAMETER_COMMAND_UNSUPPORTED should be replied.
msg_name(ParseD) ->
lists:flatmap(fun c_msg_name/1, proplists:get_value(command_codes,
ParseD,
[]))
++ [{?clause, [?VAR('_'), ?VAR('_')], [], [?ATOM('')]}].
c_msg_name({Code, Req, Ans}) ->
[{?clause, [?INTEGER(Code), ?ATOM(true)],
[],
[?Atom(Req)]},
{?clause, [?INTEGER(Code), ?ATOM(false)],
[],
[?Atom(Ans)]}].
%%% ------------------------------------------------------------------------
%%% # msg2rec/1
%%% ------------------------------------------------------------------------
f_msg2rec(ParseD) ->
{?function, msg2rec, 1, msg2rec(ParseD)}.
msg2rec(ParseD) ->
Pre = prefix(ParseD),
lists:map(fun(T) -> c_msg2rec(T, Pre) end, get_value(messages, ParseD))
++ [?BADARG(1)].
c_msg2rec({N,_,_,_,_}, Pre) ->
c_name2rec(N, Pre).
%%% ------------------------------------------------------------------------
%%% # rec2msg/1
%%% ------------------------------------------------------------------------
f_rec2msg(ParseD) ->
{?function, rec2msg, 1, rec2msg(ParseD)}.
rec2msg(ParseD) ->
Pre = prefix(ParseD),
lists:map(fun(T) -> c_rec2msg(T, Pre) end, get_value(messages, ParseD))
++ [?BADARG(1)].
c_rec2msg({N,_,_,_,_}, Pre) ->
{?clause, [?Atom(rec_name(N, Pre))], [], [?Atom(N)]}.
%%% ------------------------------------------------------------------------
%%% # name2rec/1
%%% ------------------------------------------------------------------------
f_name2rec(ParseD) ->
{?function, name2rec, 1, name2rec(ParseD)}.
name2rec(ParseD) ->
Pre = prefix(ParseD),
Groups = get_value(grouped, ParseD)
++ lists:flatmap(fun avps/1, get_value(import_groups, ParseD)),
lists:map(fun({N,_,_,_}) -> c_name2rec(N, Pre) end, Groups)
++ [{?clause, [?VAR('T')], [], [?CALL(msg2rec, [?VAR('T')])]}].
c_name2rec(Name, Pre) ->
{?clause, [?Atom(Name)], [], [?Atom(rec_name(Name, Pre))]}.
avps({_Mod, Avps}) ->
Avps.
%%% ------------------------------------------------------------------------
%%% # avp_name/1
%%% ------------------------------------------------------------------------
f_avp_name(ParseD) ->
{?function, avp_name, 2, avp_name(ParseD)}.
%% 3588, 4.1:
%%
%% AVP Code
%% The AVP Code, combined with the Vendor-Id field, identifies the
%% attribute uniquely. AVP numbers 1 through 255 are reserved for
%% backward compatibility with RADIUS, without setting the Vendor-Id
%% field. AVP numbers 256 and above are used for Diameter, which are
%% allocated by IANA (see Section 11.1).
avp_name(ParseD) ->
Avps = get_value(avp_types, ParseD),
Imported = get_value(import_avps, ParseD),
Vid = orddict:find(vendor, ParseD),
Vs = vendor_id_map(ParseD),
lists:map(fun(T) -> c_avp_name(T, Vs, Vid) end, Avps)
++ lists:flatmap(fun(T) -> c_imported_avp_name(T, Vs) end, Imported)
++ [{?clause, [?VAR('_'), ?VAR('_')], [], [?ATOM('AVP')]}].
c_avp_name({Name, Code, Type, Flags}, Vs, Vid) ->
c_avp_name_(?TERM({?A(Name), ?A(Type)}),
?INTEGER(Code),
vid(Name, Flags, Vs, Vid)).
%% Note that an imported AVP's vendor id is determined by
%% avp_vendor_id in the inheriting module and vendor in the inherited
%% module. In particular, avp_vendor_id in the inherited module is
%% ignored so can't just call Mod:avp_header/1 to retrieve the vendor
%% id. A vendor id specified in @grouped is equivalent to one
%% specified as avp_vendor_id.
c_imported_avp_name({Mod, Avps}, Vs) ->
lists:map(fun(A) -> c_avp_name(A, Vs, {module, Mod}) end, Avps).
c_avp_name_(T, Code, undefined = U) ->
{?clause, [Code, ?ATOM(U)],
[],
[T]};
c_avp_name_(T, Code, Vid) ->
{?clause, [Code, ?INTEGER(Vid)],
[],
[T]}.
vendor_id_map(ParseD) ->
lists:flatmap(fun({V,Ns}) -> [{N,V} || N <- Ns] end,
get_value(avp_vendor_id, ParseD))
++ lists:flatmap(fun({_,_,[],_}) -> [];
({N,_,[V],_}) -> [{N,V}]
end,
get_value(grouped, ParseD)).
%%% ------------------------------------------------------------------------
%%% # avp_arity/1
%%% ------------------------------------------------------------------------
f_avp_arity_1(ParseD) ->
{?function, avp_arity, 1, avp_arities(ParseD) ++ [?BADARG(1)]}.
avp_arities(ParseD) ->
Msgs = get_value(messages, ParseD),
Groups = get_value(grouped, ParseD)
++ lists:flatmap(fun avps/1, get_value(import_groups, ParseD)),
lists:map(fun c_avp_arities/1, Msgs ++ Groups).
c_avp_arities({N,_,_,_,As}) ->
c_avp_arities(N,As);
c_avp_arities({N,_,_,As}) ->
c_avp_arities(N,As).
c_avp_arities(Name, Avps) ->
Arities = [{?A(N), A} || T <- Avps, {N,A} <- [avp_info(T)]],
{?clause, [?Atom(Name)], [], [?TERM(Arities)]}.
%%% ------------------------------------------------------------------------
%%% # avp_arity/2
%%% ------------------------------------------------------------------------
f_avp_arity_2(ParseD) ->
{?function, avp_arity, 2, avp_arity(ParseD)}.
avp_arity(ParseD) ->
Msgs = get_value(messages, ParseD),
Groups = get_value(grouped, ParseD)
++ lists:flatmap(fun avps/1, get_value(import_groups, ParseD)),
c_avp_arity(Msgs ++ Groups)
++ [{?clause, [?VAR('_'), ?VAR('_')], [], [?INTEGER(0)]}].
c_avp_arity(L)
when is_list(L) ->
lists:flatmap(fun c_avp_arity/1, L);
c_avp_arity({N,_,_,_,As}) ->
c_avp_arity(N,As);
c_avp_arity({N,_,_,As}) ->
c_avp_arity(N,As).
c_avp_arity(Name, Avps) ->
lists:map(fun(A) -> c_arity(Name, A) end, Avps).
c_arity(Name, Avp) ->
{AvpName, Arity} = avp_info(Avp),
{?clause, [?Atom(Name), ?Atom(AvpName)], [], [?TERM(Arity)]}.
%%% ------------------------------------------------------------------------
%%% # avp/3
%%% ------------------------------------------------------------------------
f_avp(ParseD) ->
{?function, avp, 4, avp(ParseD) ++ [?BADARG(4)]}.
avp(ParseD) ->
Native = get_value(avp_types, ParseD),
CustomMods = get_value(custom_types, ParseD),
TypeMods = get_value(codecs, ParseD),
Imported = get_value(import_avps, ParseD),
Enums = get_value(enum, ParseD),
Custom = lists:map(fun({M,As}) -> {M, custom_types, As} end,
CustomMods)
++ lists:map(fun({M,As}) -> {M, codecs, As} end,
TypeMods),
avp(types(Native), Imported, Custom, Enums).
types(Avps) ->
lists:map(fun({N,_,T,_}) -> {N,T} end, Avps).
avp(Native, Imported, Custom, Enums) ->
report(native, Native),
report(imported, Imported),
report(custom, Custom),
TypeDict = lists:foldl(fun({N,_,T,_}, D) -> orddict:store(N,T,D) end,
orddict:from_list(Native),
lists:flatmap(fun avps/1, Imported)),
CustomNames = lists:flatmap(fun({_,_,Ns}) -> Ns end, Custom),
lists:map(fun c_base_avp/1,
lists:filter(fun({N,_}) -> not_in(CustomNames, N) end,
Native))
++ lists:flatmap(fun(I) -> cs_imported_avp(I, Enums, CustomNames) end,
Imported)
++ lists:flatmap(fun(C) -> cs_custom_avp(C, TypeDict) end, Custom).
not_in(List, X) ->
not lists:member(X, List).
c_base_avp({AvpName, "Enumerated"}) ->
{?clause, [?VAR('T'), ?VAR('Data'), ?Atom(AvpName), ?VAR('_')],
[],
[?CALL(enumerated_avp, [?VAR('T'), ?Atom(AvpName), ?VAR('Data')])]};
c_base_avp({AvpName, "Grouped"}) ->
{?clause, [?VAR('T'), ?VAR('Data'), ?Atom(AvpName), ?VAR('Opts')],
[],
[?CALL(grouped_avp, [?VAR('T'),
?Atom(AvpName),
?VAR('Data'),
?VAR('Opts')])]};
c_base_avp({AvpName, Type}) ->
{?clause, [?VAR('T'), ?VAR('Data'), ?Atom(AvpName), ?VAR('Opts')],
[],
[?APPLY(diameter_types, ?A(Type), [?VAR('T'),
?VAR('Data'),
?VAR('Opts')])]}.
cs_imported_avp({Mod, Avps}, Enums, CustomNames) ->
lists:map(fun(A) -> imported_avp(Mod, A, Enums) end,
lists:filter(fun({N,_,_,_}) -> not_in(CustomNames, N) end,
Avps)).
imported_avp(_Mod, {AvpName, _, "Grouped" = T, _}, _) ->
c_base_avp({AvpName, T});
imported_avp(Mod, {AvpName, _, "Enumerated" = T, _}, Enums) ->
case lists:keymember(AvpName, 1, Enums) of
true ->
c_base_avp({AvpName, T});
false ->
c_imported_avp(Mod, AvpName)
end;
imported_avp(Mod, {AvpName, _, _, _}, _) ->
c_imported_avp(Mod, AvpName).
c_imported_avp(Mod, AvpName) ->
{?clause, [?VAR('T'), ?VAR('Data'), ?Atom(AvpName), ?VAR('Opts')],
[],
[?CALL(avp, [?VAR('T'),
?VAR('Data'),
?Atom(AvpName),
?VAR('Opts'),
?ATOM(Mod)])]}.
cs_custom_avp({Mod, Key, Avps}, Dict) ->
lists:map(fun(N) -> c_custom_avp(Mod, Key, N, orddict:fetch(N, Dict)) end,
Avps).
c_custom_avp(Mod, Key, AvpName, Type) ->
{F,A} = custom(Key, AvpName, Type),
{?clause, [?VAR('T'), ?VAR('Data'), ?Atom(AvpName), ?VAR('Opts')],
[],
[?APPLY(?A(Mod), ?A(F), [?VAR('T'),
?Atom(A),
?VAR('Data'),
?VAR('Opts')])]}.
custom(custom_types, AvpName, Type) ->
{AvpName, Type};
custom(codecs, AvpName, Type) ->
{Type, AvpName}.
%%% ------------------------------------------------------------------------
%%% # enumerated_avp/3
%%% ------------------------------------------------------------------------
f_enumerated_avp(ParseD) ->
{?function, enumerated_avp, 3, enumerated_avp(ParseD) ++ [?BADARG(3)]}.
enumerated_avp(ParseD) ->
Enums = get_value(enum, ParseD),
lists:flatmap(fun cs_enumerated_avp/1, Enums)
++ lists:flatmap(fun({M,Es}) -> enumerated_avp(M, Es, Enums) end,
get_value(import_enums, ParseD)).
enumerated_avp(Mod, Es, Enums) ->
lists:flatmap(fun({N,_}) ->
cs_enumerated_avp(lists:keymember(N, 1, Enums),
Mod,
N)
end,
Es).
cs_enumerated_avp(true, Mod, Name) ->
[{?clause, [?VAR('T'), ?Atom(Name), ?VAR('Data')],
[],
[?APPLY(Mod, enumerated_avp, [?VAR('T'),
?Atom(Name),
?VAR('Data')])]}];
cs_enumerated_avp(false, _, _) ->
[].
cs_enumerated_avp({AvpName, Values}) ->
lists:flatmap(fun(V) -> c_enumerated_avp(AvpName, V) end, Values).
c_enumerated_avp(AvpName, {_,I}) ->
[{?clause, [?ATOM(decode), ?Atom(AvpName), ?TERM(<<I:32>>)],
[],
[?TERM(I)]},
{?clause, [?ATOM(encode), ?Atom(AvpName), ?INTEGER(I)],
[],
[?TERM(<<I:32>>)]}].
%%% ------------------------------------------------------------------------
%%% msg_header/1
%%% ------------------------------------------------------------------------
f_msg_header(ParseD) ->
{?function, msg_header, 1, msg_header(ParseD) ++ [?BADARG(1)]}.
msg_header(ParseD) ->
msg_header(get_value(messages, ParseD), ParseD).
msg_header([], _) ->
[];
msg_header(Msgs, ParseD) ->
ApplId = orddict:fetch(id, ParseD),
lists:map(fun({M,C,F,_,_}) -> c_msg_header(M, C, F, ApplId) end, Msgs).
%% Note that any application id in the message header spec is ignored.
c_msg_header(Name, Code, Flags, ApplId) ->
{?clause, [?Atom(Name)],
[],
[?TERM({Code, encode_msg_flags(Flags), ApplId})]}.
encode_msg_flags(Flags) ->
lists:foldl(fun emf/2, 0, Flags).
emf('REQ', N) -> N bor 2#10000000;
emf('PXY', N) -> N bor 2#01000000;
emf('ERR', N) -> N bor 2#00100000.
%%% ------------------------------------------------------------------------
%%% # avp_header/1
%%% ------------------------------------------------------------------------
f_avp_header(ParseD) ->
{?function, avp_header, 1, avp_header(ParseD) ++ [?BADARG(1)]}.
avp_header(ParseD) ->
Native = get_value(avp_types, ParseD),
Imported = get_value(import_avps, ParseD),
Vid = orddict:find(vendor, ParseD),
Vs = vendor_id_map(ParseD),
lists:flatmap(fun(A) -> c_avp_header(A, Vs, Vid) end,
Native ++ Imported).
c_avp_header({Name, Code, _Type, Flags}, Vs, Vid) ->
[{?clause, [?Atom(Name)],
[],
[?TERM({Code, encode_avp_flags(Flags), vid(Name, Flags, Vs, Vid)})]}];
c_avp_header({Mod, Avps}, Vs, _Vid) ->
lists:map(fun(A) -> c_imported_avp_header(A, Mod, Vs) end, Avps).
%% Note that avp_vendor_id in the inherited dictionary is ignored. The
%% value must be changed in the inheriting dictionary. This is
%% consistent with the semantics of avp_name/2.
c_imported_avp_header({Name, _Code, _Type, _Flags}, Mod, Vs) ->
Apply = ?APPLY(Mod, avp_header, [?Atom(Name)]),
{?clause, [?Atom(Name)],
[],
[case proplists:get_value(Name, Vs) of
undefined ->
Apply;
Vid ->
?CALL(setelement, [?INTEGER(3), Apply, ?INTEGER(Vid)])
end]}.
encode_avp_flags(Fs) ->
lists:foldl(fun eaf/2, 0, Fs).
eaf($V, F) -> 2#10000000 bor F;
eaf($M, F) -> 2#01000000 bor F;
eaf($P, F) -> 2#00100000 bor F.
vid(Name, Flags, Vs, Vid) ->
v(lists:member($V, Flags), Name, Vs, Vid).
v(true = T, Name, Vs, {module, Mod}) ->
v(T, Name, Vs, {ok, {Mod:vendor_id(), Mod:vendor_name()}});
v(true, Name, Vs, Vid) ->
case proplists:get_value(Name, Vs) of
undefined ->
{ok, {Id, _}} = Vid,
Id;
Id ->
Id
end;
v(false, _, _, _) ->
undefined.
%%% ------------------------------------------------------------------------
%%% # empty_value/0
%%% ------------------------------------------------------------------------
f_empty_value(ParseD) ->
{?function, empty_value, 2, empty_value(ParseD)}.
empty_value(ParseD) ->
Imported = lists:flatmap(fun avps/1, get_value(import_enums, ParseD)),
Groups = get_value(grouped, ParseD)
++ lists:flatmap(fun avps/1, get_value(import_groups, ParseD)),
Enums = [T || {N,_} = T <- get_value(enum, ParseD),
not lists:keymember(N, 1, Imported)]
++ Imported,
lists:map(fun c_empty_value/1, Groups ++ Enums)
++ [{?clause, [?VAR('Name'), ?VAR('Opts')],
[],
[?CALL(empty, [?VAR('Name'), ?VAR('Opts')])]}].
c_empty_value({Name, _, _, _}) ->
{?clause, [?Atom(Name), ?VAR('Opts')],
[],
[?CALL(empty_group, [?Atom(Name), ?VAR('Opts')])]};
c_empty_value({Name, _}) ->
{?clause, [?Atom(Name), ?VAR('_')],
[],
[?TERM(<<0:32>>)]}.
%%% ------------------------------------------------------------------------
%%% # dict/0
%%% ------------------------------------------------------------------------
f_dict(ParseD) ->
{?function, dict, 0,
[{?clause, [], [], [?TERM([?VERSION | ParseD])]}]}.
%%% ------------------------------------------------------------------------
%%% # gen_hrl/2
%%% ------------------------------------------------------------------------
gen_hrl(Mod, ParseD) ->
{Prefix, MsgRecs, GrpRecs, ImportedGrpRecs}
= make_record_forms(ParseD),
[hrl_header(Mod),
forms("Message records", MsgRecs),
forms("Grouped AVP records", GrpRecs),
lists:map(fun({M,Fs}) ->
forms("Grouped AVP records from " ++ atom_to_list(M),
Fs)
end,
ImportedGrpRecs),
format("ENUM Macros", m_enums(Prefix, false, get_value(enum, ParseD))),
format("DEFINE Macros", m_enums(Prefix, false, get_value(define, ParseD))),
lists:map(fun({M,Es}) ->
format("ENUM Macros from " ++ atom_to_list(M),
m_enums(Prefix, true, Es))
end,
get_value(import_enums, ParseD))].
forms(_, [] = No) ->
No;
forms(Banner, Forms) ->
format(Banner, prettypr(Forms)).
format(_, [] = No) ->
No;
format(Banner, Str) ->
[banner(Banner), Str, $\n].
prettypr(Forms) ->
erl_prettypr:format(erl_syntax:form_list(Forms)).
banner(Heading) ->
["\n\n"
"%%% -------------------------------------------------------\n"
"%%% ", Heading, ":\n"
"%%% -------------------------------------------------------\n\n"].
z(S) ->
string:join(string:tokens(S, "\s\t"), "\s").
m_enums(Prefix, Wrap, Enums) ->
lists:map(fun(T) -> m_enum(Prefix, Wrap, T) end, Enums).
m_enum(Prefix, B, {Name, Values}) ->
P = Prefix ++ to_upper(Name) ++ "_",
lists:map(fun({A,I}) ->
N = ["'", P, to_upper(z(A)), "'"],
wrap(B,
N,
["-define(", N, ", ", integer_to_list(I), ").\n"])
end,
Values).
wrap(true, Name, Def) ->
["-ifndef(", Name, ").\n", Def, "-endif.\n"];
wrap(false, _, Def) ->
Def.
to_upper(A) when is_atom(A) ->
to_upper(atom_to_list(A));
to_upper(S) ->
lists:map(fun tu/1, S).
tu(C) when C >= $a, C =< $z ->
C + $A - $a;
tu(C) ->
C.
header() ->
("%% -------------------------------------------------------------------\n"
"%% This is a generated file.\n"
"%% -------------------------------------------------------------------\n"
"\n").
hrl_header(Name) ->
header() ++ "-hrl_name('" ++ ?S(Name) ++ ".hrl').\n".
%% avp_info/1
avp_info(Entry) -> %% {Name, Arity}
case Entry of
{{A}} -> {A, 1};
{A} -> {A, 1};
[A] -> {A, {0,1}};
{Q,T} ->
{A,_} = avp_info(T),
{A, arity(T,Q)}
end.
%% Normalize arity to 1 or {N,X} where N is an integer. A record field
%% for an AVP is list-valued iff the normalized arity is not 1.
arity({{_}}, '*' = Inf) -> {0, Inf};
arity([_], '*' = Inf) -> {0, Inf};
arity({_}, '*' = Inf) -> {1, Inf};
arity(_, {_,_} = Q) -> Q.
prefix(ParseD) ->
case orddict:find(prefix, ParseD) of
{ok, P} ->
P ++ "_";
error ->
""
end.
rec_name(Name, Prefix) ->
Prefix ++ Name.
%% ===========================================================================
%% preprocess/2
%%
%% Preprocess forms as generated by 'forms' option. In particular,
%% replace the include_lib attributes in generated forms by the
%% corresponding forms, extracting the latter from an existing
%% dictionary (diameter_gen_relay). The resulting forms can be
%% compiled to beam using compile:forms/2 (which does no preprocessing
%% of it's own; DiY currently appears to be the only way to preprocess
%% a forms list).
preprocess(Mod, Forms) ->
{_, Beam, _} = code:get_object_code(diameter_gen_relay),
pp(Forms, remod(Mod, abstract_code(Beam))).
pp(Forms, {ok, Code}) ->
Files = files(Code, []),
lists:flatmap(fun(T) -> include(T, Files) end, Forms);
pp(Forms, {error, Reason}) ->
erlang:error({forms, Reason, Forms}).
%% Replace literal diameter_gen_relay atoms in the extracted forms.
%% ?MODULE for example.
remod(Mod, L)
when is_list(L) ->
[remod(Mod, T) || T <- L];
remod(Mod, {atom, _, diameter_gen_relay} = T) ->
setelement(3, T, Mod);
remod(Mod, T)
when is_tuple(T) ->
list_to_tuple(remod(Mod, tuple_to_list(T)));
remod(_, T) ->
T.
%% Replace include_lib by the corresponding forms.
include({attribute, _, include_lib, Path}, Files) ->
Inc = filename:basename(Path),
[{Inc, Forms}] = [T || {F, _} = T <- Files, F == Inc], %% expect one
lists:flatmap(fun filter/1, Forms);
include(T, _) ->
[T].
%% Extract abstract code.
abstract_code(Beam) ->
case beam_lib:chunks(Beam, [abstract_code]) of
{ok, {_Mod, [{abstract_code, {_Vsn, Code}}]}} ->
{ok, Code};
{ok, {_Mod, [{abstract_code, no_abstract_code = No}]}} ->
{error, No};
{error = E, beam_lib, Reason} ->
{E, Reason}
end.
%% Extract filename/forms pairs for included forms.
files([{attribute, _, file, {Path, _}} | T], Acc) ->
{Body, Rest} = lists:splitwith(fun({attribute, _, file, _}) -> false;
(_) -> true
end,
T),
files(Rest, [{filename:basename(Path), Body} | Acc]);
files([], Acc) ->
Acc.
%% Only retain record diameter_avp and functions not generated by
%% diameter_exprecs.
filter({attribute, _, record, {diameter_avp, _}} = T) ->
[T];
filter({function, _, Name, _, _} = T) ->
case ?S(Name) of
[$#|_] -> %% generated by diameter_exprecs
[];
_ ->
[T]
end;
filter(_) ->
[]. | lib/diameter/src/compiler/diameter_codegen.erl | 0.603932 | 0.491761 | diameter_codegen.erl | starcoder |
%%
%% %CopyrightBegin%
%%
%% Copyright Ericsson AB 1996-2015. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%% @doc EDoc interface to Erlang specifications and types.
-module(edoc_specs).
-export([type/2, spec/2, dummy_spec/1, docs/2]).
-export([add_data/4, tag/1, is_tag/1]).
-include("edoc.hrl").
-include("edoc_types.hrl").
-type syntaxTree() :: erl_syntax:syntaxTree().
-define(TOP_TYPE, term).
%%
%% Exported functions
%%
-spec type(Form::syntaxTree(), TypeDocs::dict:dict()) -> #tag{}.
%% @doc Convert an Erlang type to EDoc representation.
%% TypeDocs is a dict of {Name, Doc}.
%% Note: #t_typedef.name is set to {record, R} for record types.
type(Form, TypeDocs) ->
{Name, Data0} = erl_syntax_lib:analyze_wild_attribute(Form),
type = tag(Name),
{TypeName, Type, Args, Doc} =
case Data0 of
{{record, R}, Fs, []} ->
L = erl_syntax:get_pos(Form),
{{record, R}, {type, L, record, [{atom,L,R} | Fs]}, [], ""};
{N,T,As} ->
Doc0 =
case dict:find({N, length(As)}, TypeDocs) of
{ok, Doc1} ->
Doc1;
error ->
""
end,
{#t_name{name = N}, T, As, Doc0}
end,
#tag{name = type, line = get_line(element(2, Type)),
origin = code,
data = {#t_typedef{name = TypeName,
args = d2e(Args),
type = d2e(opaque2abstr(Name, Type))},
Doc}}.
-spec spec(Form::syntaxTree(), ClauseN::pos_integer()) -> #tag{}.
%% @doc Convert an Erlang spec to EDoc representation.
spec(Form, Clause) ->
{Name, _Arity, TypeSpecs} = get_spec(Form),
TypeSpec = lists:nth(Clause, TypeSpecs),
#tag{name = spec, line = get_line(element(2, TypeSpec)),
origin = code,
data = aspec(d2e(TypeSpec), Name)}.
-spec dummy_spec(Form::syntaxTree()) -> #tag{}.
%% @doc Create a #tag{} record where data is a string with the name of
%% the given Erlang spec and an empty list of arguments.
dummy_spec(Form) ->
{#t_name{name = Name}, Arity, TypeSpecs} = get_spec(Form),
As = string:join(lists:duplicate(Arity, "_X"), ","),
S = lists:flatten(io_lib:format("~p(~s) -> true\n", [Name, As])),
#tag{name = spec, line = get_line(element(2, hd(TypeSpecs))),
origin = code, data = S}.
-spec docs(Forms::[syntaxTree()],
CommentFun :: fun( ([syntaxTree()], Line :: term()) -> #tag{} ))
-> dict:dict().
%% @doc Find comments after -type/-opaque declarations.
%% Postcomments "inside" the type are skipped.
docs(Forms, CommentFun) ->
find_type_docs(Forms, [], CommentFun).
-type entry() :: #entry{}.
-type module_info() :: #module{}.
-type entries() :: [entry()].
-spec add_data(Entries::entries(), Options::proplists:proplist(),
File::file:filename(), Module::module_info()) -> entries().
%% @doc Create tags a la EDoc for Erlang specifications and types.
%% Exported types and types used (indirectly) by Erlang specs are
%% added to the entries.
add_data(Entries, Opts, File, Module) ->
TypeDefs0 = espec_types(Entries),
TypeTable = ets:new(etypes, [ordered_set]),
Es1 = expand_records(Entries, TypeDefs0, TypeTable, Opts, File, Module),
Es = [use_tags(E, TypeTable) || E <- Es1],
true = ets:delete(TypeTable),
Es.
%%
%% Local functions
%%
aspec(#t_spec{}=Spec, Name) ->
Spec#t_spec{name = Name};
aspec(Type, Name) ->
#t_spec{name = Name, type = Type}.
get_spec(Form) ->
{spec, Data0} = erl_syntax_lib:analyze_wild_attribute(Form),
case Data0 of
{{F,A}, D} ->
{#t_name{name = F}, A, D};
{{M,F,A}, D} ->
{#t_name{module = M, name = F}, A, D}
end.
find_type_docs([], Cs, _Fun) ->
dict:from_list(Cs);
find_type_docs([F | Fs], Cs, Fun) ->
try get_name_and_last_line(F) of
{Name, LastTypeLine} ->
C0 = erl_syntax:comment(["% @type f(). "]),
C1 = erl_syntax:set_pos(C0, LastTypeLine),
%% Postcomments before the dot after the typespec are ignored.
C2 = [C1 | [C ||
C <- erl_syntax:get_postcomments(F),
erl_syntax:get_pos(C) >= LastTypeLine]],
C3 = collect_comments(Fs, LastTypeLine),
#tag{data = Doc0} = Fun(lists:reverse(C2 ++ C3), LastTypeLine),
case strip(Doc0) of % Strip away "f(). \n"
"" ->
find_type_docs(Fs, Cs, Fun);
Doc ->
W = edoc_wiki:parse_xml(Doc, LastTypeLine),
find_type_docs(Fs, [{Name, W}|Cs], Fun)
end
catch _:_ ->
find_type_docs(Fs, Cs, Fun)
end.
collect_comments([], _Line) ->
[];
collect_comments([F | Fs], Line) ->
L1 = erl_syntax:get_pos(F),
if
L1 =:= Line + 1;
L1 =:= Line -> % a separate postcomment
case is_comment(F) of
true ->
[F | collect_comments(Fs, L1)];
false ->
[]
end;
true ->
[]
end.
%% Note: there is a creepy bug concerning an include file terminated
%% by a -type attribute and the include statement is followed by a
%% comment (which is not meant to be documentation of the type).
is_comment(F) ->
erl_syntax_lib:analyze_form(F) =:= comment.
strip("") ->
"";
strip([$\n | S]) ->
S;
strip([_ | S]) ->
strip(S).
%% Find the type name and the greatest line number of a type spec.
%% Should use syntax_tools but this has to do for now.
get_name_and_last_line(F) ->
{Name, Data} = erl_syntax_lib:analyze_wild_attribute(F),
type = edoc_specs:tag(Name),
Attr = {attribute, erl_syntax:get_pos(F), Name, Data},
Fun = fun(A) ->
Line = get_line(A),
case get('$max_line') of
Max when Max < Line ->
_ = put('$max_line', Line);
_ ->
ok
end
end,
undefined = put('$max_line', 0),
_ = erl_parse:map_anno(Fun, Attr),
Line = erase('$max_line'),
TypeName = case Data of
{N, _T, As} when is_atom(N) -> % skip records
{N, length(As)}
end,
{TypeName, Line}.
get_line(Anno) ->
erl_anno:line(Anno).
%% Collect all Erlang types. Types in comments (@type) shadow Erlang
%% types (-spec/-opaque).
espec_types(Entries) ->
Tags = get_all_tags(Entries),
CommTs = [type_name(T) ||
#tag{name = type, origin = comment}=T <- Tags],
CT = sets:from_list(CommTs),
[T || #tag{name = Name, origin = code}=T <- Tags,
tag(Name) =:= type,
not sets:is_element(type_name(T), CT)].
get_all_tags(Es) ->
lists:flatmap(fun (#entry{data = Ts}) -> Ts end, Es).
%% Turns an opaque type into an abstract datatype.
%% Note: top level annotation is ignored.
opaque2abstr(opaque, _T) -> undefined;
opaque2abstr(type, T) -> T.
%% Replaces the parameters extracted from the source (by
%% edoc_extract:parameters/1) by annotations and variable names, using
%% the source parameters as default values
%% Selects seen types (exported types, types used by specs),
%% skips records and unused types.
use_tags(#entry{data = Ts}=E, TypeTable) ->
use_tags(Ts, E, TypeTable, []).
use_tags([], E, _TypeTable, NTs) ->
E#entry{data = lists:reverse(NTs)};
use_tags([#tag{origin = code}=T | Ts], E, TypeTable, NTs) ->
case tag(T#tag.name) of
spec ->
Args = params(T, E#entry.args),
use_tags(Ts, E#entry{args = Args}, TypeTable, [T | NTs]);
type ->
TypeName = type_name(T),
case ets:lookup(TypeTable, TypeName) of
[{{{record,_},_},_,_}] ->
use_tags(Ts, E, TypeTable, NTs);
[{_,_,not_seen}] ->
use_tags(Ts, E, TypeTable, NTs);
[] ->
use_tags(Ts, E, TypeTable, NTs);
[{TypeName, Tag, seen}] ->
use_tags(Ts, E, TypeTable, [Tag | NTs])
end
end;
use_tags([T | Ts], E, TypeTable, NTs) ->
use_tags(Ts, E, TypeTable, [T | NTs]).
params(#tag{name = spec, data=#t_spec{type = #t_fun{args = As}}}, Default) ->
parms(As, Default).
parms([], []) ->
[];
parms([A | As], [D | Ds]) ->
[param(A, D) | parms(As, Ds)].
param(#t_list{type = Type}, Default) ->
param(Type, Default);
param(#t_paren{type = Type}, Default) ->
param(Type, Default);
param(#t_nonempty_list{type = Type}, Default) ->
param(Type, Default);
param(#t_record{name = #t_atom{val = Name}}, _Default) ->
list_to_atom(capitalize(atom_to_list(Name)));
param(T, Default) ->
arg_name(?t_ann(T), Default).
capitalize([C | Cs]) when C >= $a, C =< $z -> [C - 32 | Cs];
capitalize(Cs) -> Cs.
%% Like edoc_types:arg_name/1
arg_name([], Default) ->
Default;
arg_name([A | As], Default) ->
case is_name(A) of
true -> A;
false -> arg_name(As, Default)
end.
is_name(A) ->
is_atom(A).
d2e(T) ->
d2e(T, 0).
d2e({ann_type,_,[V, T0]}, Prec) ->
%% Note: the -spec/-type syntax allows annotations everywhere, but
%% EDoc does not. The fact that the annotation is added to the
%% type here does not necessarily mean that it will be used by the
%% layout module.
{_L,P,R} = erl_parse:type_inop_prec('::'),
T1 = d2e(T0, R),
T = ?add_t_ann(T1, element(3, V)),
maybe_paren(P, Prec, T); % the only necessary call to maybe_paren()
d2e({remote_type,_,[{atom,_,M},{atom,_,F},Ts0]}, _Prec) ->
Ts = d2e(Ts0),
typevar_anno(#t_type{name = #t_name{module = M, name = F}, args = Ts}, Ts);
d2e({type,_,'fun',[{type,_,product,As0},Ran0]}, _Prec) ->
Ts = [Ran|As] = d2e([Ran0|As0]),
%% Assume that the linter has checked type variables.
typevar_anno(#t_fun{args = As, range = Ran}, Ts);
d2e({type,_,'fun',[A0={type,_,any},Ran0]}, _Prec) ->
Ts = [A, Ran] = d2e([A0, Ran0]),
typevar_anno(#t_fun{args = [A], range = Ran}, Ts);
d2e({type,_,'fun',[]}, _Prec) ->
#t_type{name = #t_name{name = function}, args = []};
d2e({type,_,any}, _Prec) ->
#t_var{name = '...'}; % Kludge... not a type variable!
d2e({type,_,nil,[]}, _Prec) ->
#t_nil{};
d2e({paren_type,_,[T]}, Prec) ->
d2e(T, Prec);
d2e({type,_,list,[T0]}, _Prec) ->
T = d2e(T0),
typevar_anno(#t_list{type = T}, [T]);
d2e({type,_,nonempty_list,[T0]}, _Prec) ->
T = d2e(T0),
typevar_anno(#t_nonempty_list{type = T}, [T]);
d2e({type,_,bounded_fun,[T,Gs]}, _Prec) ->
[F0|Defs] = d2e([T|Gs]),
F = ?set_t_ann(F0, lists:keydelete(type_variables, 1, ?t_ann(F0))),
%% Assume that the linter has checked type variables.
#t_spec{type = typevar_anno(F, [F0]), defs = Defs};
d2e({type,_,range,[V1,V2]}, Prec) ->
{_L,P,_R} = erl_parse:type_inop_prec('..'),
{integer,_,I1} = erl_eval:partial_eval(V1),
{integer,_,I2} = erl_eval:partial_eval(V2),
T0 = #t_integer_range{from = I1, to = I2},
maybe_paren(P, Prec, T0);
d2e({type,_,constraint,[Sub,Ts0]}, _Prec) ->
case {Sub,Ts0} of
{{atom,_,is_subtype},[{var,_,N},T0]} ->
Ts = [T] = d2e([T0]),
#t_def{name = #t_var{name = N}, type = typevar_anno(T, Ts)};
{{atom,_,is_subtype},[ST0,T0]} ->
%% Should not happen.
Ts = [ST,T] = d2e([ST0,T0]),
#t_def{name = ST, type = typevar_anno(T, Ts)};
_ ->
throw_error(get_line(element(2, Sub)), "cannot handle guard", [])
end;
d2e({type,_,union,Ts0}, Prec) ->
{_L,P,R} = erl_parse:type_inop_prec('|'),
Ts = d2e(Ts0, R),
T = maybe_paren(P, Prec, #t_union{types = Ts}),
typevar_anno(T, Ts);
d2e({type,_,tuple,any}, _Prec) ->
#t_type{name = #t_name{name = tuple}, args = []};
d2e({type,_,binary,[Base,Unit]}, _Prec) ->
{integer,_,B} = erl_eval:partial_eval(Base),
{integer,_,U} = erl_eval:partial_eval(Unit),
#t_binary{base_size = B, unit_size = U};
d2e({type,_,map,any}, _Prec) ->
#t_map{types = []};
d2e({type,_,map,Es}, _Prec) ->
#t_map{types = d2e(Es) };
d2e({type,_,map_field_assoc,[K,V]}, Prec) ->
T = #t_map_field{k_type = d2e(K), v_type=d2e(V) },
{P,_R} = erl_parse:type_preop_prec('#'),
maybe_paren(P, Prec, T);
d2e({type,_,map_field_exact,K,V}, Prec) ->
T = #t_map_field{k_type = d2e(K), v_type=d2e(V) },
{P,_R} = erl_parse:type_preop_prec('#'),
maybe_paren(P, Prec, T);
d2e({type,_,tuple,Ts0}, _Prec) ->
Ts = d2e(Ts0),
typevar_anno(#t_tuple{types = Ts}, Ts);
d2e({type,_,record,[Name|Fs0]}, Prec) ->
Atom = #t_atom{val = element(3, Name)},
Fs = d2e(Fs0),
{P,_R} = erl_parse:type_preop_prec('#'),
T = maybe_paren(P, Prec, #t_record{name = Atom, fields = Fs}),
typevar_anno(T, Fs);
d2e({type,_,field_type,[Name,Type0]}, Prec) ->
{_L,P,R} = erl_parse:type_inop_prec('::'),
Type = maybe_paren(P, Prec, d2e(Type0, R)),
T = #t_field{name = #t_atom{val = element(3, Name)}, type = Type},
typevar_anno(T, [Type]);
d2e({typed_record_field,{record_field,L,Name},Type}, Prec) ->
d2e({type,L,field_type,[Name,Type]}, Prec);
d2e({typed_record_field,{record_field,L,Name,_E},Type}, Prec) ->
d2e({type,L,field_type,[Name,Type]}, Prec);
d2e({record_field,L,_Name,_E}=F, Prec) ->
d2e({typed_record_field,F,{type,L,any,[]}}, Prec); % Maybe skip...
d2e({record_field,L,_Name}=F, Prec) ->
d2e({typed_record_field,F,{type,L,any,[]}}, Prec); % Maybe skip...
d2e({type,_,Name,Types0}, _Prec) ->
Types = d2e(Types0),
typevar_anno(#t_type{name = #t_name{name = Name}, args = Types}, Types);
d2e({user_type,_,Name,Types0}, _Prec) ->
Types = d2e(Types0),
typevar_anno(#t_type{name = #t_name{name = Name}, args = Types}, Types);
d2e({var,_,'_'}, _Prec) ->
#t_type{name = #t_name{name = ?TOP_TYPE}};
d2e({var,_,TypeName}, _Prec) ->
TypeVar = ordsets:from_list([TypeName]),
T = #t_var{name = TypeName},
%% Annotate type variables with the name of the variable.
%% Doing so will stop edoc_layout (and possibly other layout modules)
%% from using the argument name from the source or to invent a new name.
T1 = ?add_t_ann(T, {type_variables, TypeVar}),
?add_t_ann(T1, TypeName);
d2e(L, Prec) when is_list(L) ->
[d2e(T, Prec) || T <- L];
d2e({atom,_,A}, _Prec) ->
#t_atom{val = A};
d2e(undefined = U, _Prec) -> % opaque
U;
d2e(Expr, _Prec) ->
{integer,_,I} = erl_eval:partial_eval(Expr),
#t_integer{val = I}.
%% A type annotation (a tuple; neither an atom nor a list).
typevar_anno(Type, Ts) ->
Vs = typevars(Ts),
case ordsets:to_list(Vs) of
[] -> Type;
_ -> ?add_t_ann(Type, {type_variables, Vs})
end.
typevars(Ts) ->
ordsets:union(get_typevars(Ts)).
get_typevars(Ts) ->
[Vs || T <- Ts, T =/= undefined, {type_variables, Vs} <- ?t_ann(T)].
maybe_paren(P, Prec, T) when P < Prec ->
#t_paren{type = T};
maybe_paren(_P, _Prec, T) ->
T.
-record(parms, {tab, warn, file, line}).
%% Expands record references. Explicitly given record fields are kept,
%% but otherwise the fields from the record definition are substituted
%% for the reference. The reason is that there are no record types.
%% It is recommended to introduce types like "r() :: r{}" and then use
%% r() everywhere. The right hand side, r{}, is expanded in order to
%% show all fields.
%% Returns updated types in the ETS table DT.
expand_records(Entries, TypeDefs, DT, Opts, File, Module) ->
TypeList = [{type_name(T), T, not_seen} || T <- TypeDefs],
true = ets:insert(DT, TypeList),
Warn = proplists:get_value(report_missing_types, Opts,
?REPORT_MISSING_TYPES) =:= true,
P = #parms{tab = DT, warn = Warn, file = File, line = 0},
ExportedTypes = [Name ||
{export_type,Ts} <- Module#module.attributes,
is_list(Ts),
{N,I} <- Ts,
ets:member(DT, Name = {#t_name{name = N}, I})],
_ = lists:foreach(fun({N,A}) -> true = seen_type(N, A, P)
end, ExportedTypes),
entries(Entries, P, Opts).
entries([E0 | Es], P, Opts) ->
E = case edoc_data:hidden_filter([E0], Opts) of
[] ->
E0;
[_] ->
E0#entry{data = specs(E0#entry.data, P)}
end,
[E | entries(Es, P, Opts)];
entries([], _P, _Opts) ->
[].
specs([#tag{line = L, name = spec, origin = code, data = Spec}=Tag0 | Tags],
P0) ->
#t_spec{type = Type0, defs = Defs0} = Spec,
P = P0#parms{line = L},
Type = xrecs(Type0, P),
Defs = xrecs(Defs0, P),
Tag = Tag0#tag{data = Spec#t_spec{type = Type, defs = Defs}},
[Tag | specs(Tags, P)];
specs([Tag | Tags], P) ->
[Tag | specs(Tags, P)];
specs([], _P) ->
[].
xrecs(#t_def{type = Type0}=T, P) ->
Type = xrecs(Type0, P),
T#t_def{type = Type};
xrecs(#t_type{name = Name, args = Args0}=T, P) ->
Args = xrecs(Args0, P),
NArgs = length(Args),
true = seen_type(Name, NArgs, P),
T#t_type{args = Args};
xrecs(#t_var{}=T, _P) ->
T;
xrecs(#t_fun{args = Args0, range = Range0}=T, P) ->
Args = xrecs(Args0, P),
Range = xrecs(Range0, P),
T#t_fun{args = Args, range = Range};
xrecs(#t_map{types = Ts0 }=T,P) ->
Ts = xrecs(Ts0, P),
T#t_map{types = Ts };
xrecs(#t_map_field{k_type=Kt, v_type=Vt}=T, P) ->
T#t_map_field{k_type=xrecs(Kt,P), v_type=xrecs(Vt,P)};
xrecs(#t_tuple{types = Types0}=T, P) ->
Types = xrecs(Types0, P),
T#t_tuple{types = Types};
xrecs(#t_list{type = Type0}=T, P) ->
Type = xrecs(Type0, P),
T#t_list{type = Type};
xrecs(#t_nil{}=T, _P) ->
T;
xrecs(#t_paren{type = Type0}=T, P) ->
Type = xrecs(Type0, P),
T#t_paren{type = Type};
xrecs(#t_nonempty_list{type = Type0}=T, P) ->
Type = xrecs(Type0, P),
T#t_nonempty_list{type = Type};
xrecs(#t_atom{}=T, _P) ->
T;
xrecs(#t_integer{}=T, _P) ->
T;
xrecs(#t_integer_range{}=T, _P) ->
T;
xrecs(#t_binary{}=T, _P) ->
T;
xrecs(#t_float{}=T, _P) ->
T;
xrecs(#t_union{types = Types0}=T, P) ->
Types = xrecs(Types0, P),
T#t_union{types = Types};
xrecs(#t_record{fields = Fields0}=T, P) ->
Fields1 = xrecs(Fields0, P),
#t_record{name = #t_atom{val = Name}} = T,
RName = {record, Name},
true = seen_type(RName, 0, P),
Fields = select_fields(Fields1, RName, P#parms.tab),
T#t_record{fields = Fields};
xrecs(#t_field{type = Type0}=T, P) ->
Type = xrecs(Type0, P),
T#t_field{type = Type};
xrecs(undefined=T, _P) -> % opaque
T;
xrecs([]=T, _P) ->
T;
xrecs([E0 | Es0], P) ->
[xrecs(E0, P) | xrecs(Es0, P)].
seen_type(N, NArgs, P) ->
TypeName = {N, NArgs},
#parms{tab = DT} = P,
case {ets:lookup(DT, TypeName), N} of
{[{TypeName, _, seen}], _} ->
true;
{[{TypeName, TagType, not_seen}], _} when N#t_name.module =:= [] ->
expand_datatype(TagType, proper_type, DT, P);
{[{TypeName, TagType, not_seen}], {record, _}} ->
expand_datatype(TagType, record_type, DT, P);
{[], {record, R}} ->
#parms{warn = W, line = L, file = File} = P,
[edoc_report:warning(L, File, "reference to untyped record ~w",
[R]) || W],
ets:insert(DT, {TypeName, fake, seen});
{[], _} -> % External type or missing type.
true
end.
expand_datatype(Tag0, Kind, DT, P0) ->
#tag{line = L, data = {T0, Doc}} = Tag0,
#t_typedef{type = Type0, defs = []} = T0,
TypeName = type_name(Tag0),
true = ets:update_element(DT, TypeName, {3, seen}),
P = P0#parms{line = L},
Type = case Kind of
record_type ->
#t_record{fields = Fields0} = Type0,
Fields = xrecs(Fields0, P),
Type0#t_record{fields = Fields};
proper_type ->
xrecs(Type0, P)
end,
Tag = Tag0#tag{data={T0#t_typedef{type=Type}, Doc}},
ets:insert(DT, {TypeName, Tag, seen}).
select_fields(Fields, Name, DT) ->
RecordName = {Name, 0},
case ets:lookup(DT, RecordName) of
[{RecordName, fake, seen}] ->
Fields;
[{RecordName, #tag{data = {T, _Doc}}, seen}] ->
#t_typedef{args = [], type = #t_record{fields = Fs}, defs = []}=T,
[find_field(F, Fields) || F <- Fs]
end.
find_field(F, Fs) ->
case lists:keyfind(F#t_field.name, #t_field.name, Fs) of
false -> F;
NF -> NF
end.
type_name(#tag{name = type,
data = {#t_typedef{name = Name, args = As},_}}) ->
{Name, length(As)}.
%% @doc Return `true' if `Tag' is one of the specification and type
%% attribute tags recognized by the Erlang compiler.
-spec is_tag(Tag::atom()) -> boolean().
is_tag(opaque) -> true;
is_tag(spec) -> true;
is_tag(type) -> true;
is_tag(_) -> false.
%% @doc Return the kind of the attribute tag.
-type tag_kind() :: 'type' | 'spec' | 'unknown'.
-spec tag(Tag::atom()) -> tag_kind().
tag(opaque) -> type;
tag(spec) -> spec;
tag(type) -> type;
tag(_) -> unknown.
throw_error(Line, S, A) ->
edoc_report:error(Line, "", io_lib:format(S, A)),
throw(error). | lib/edoc/src/edoc_specs.erl | 0.510008 | 0.400456 | edoc_specs.erl | starcoder |
-module(tpTc).
-compile(inline).
-compile({inline_size, 128}).
-export([
tc/1
, tc/2
, tc/3
, ts/4
, tm/5
, cvrTimeUnit/3
, test/1
]).
%% Measure the execution time (in nanoseconds) for Fun().
-spec tc(Fun :: function()) -> {Time :: integer(), Value :: term()}.
tc(F) ->
T1 = erlang:monotonic_time(),
Val = F(),
T2 = erlang:monotonic_time(),
Time = cvrTimeUnit(T2 - T1, native, nanosecond),
{Time, Val}.
%% Measure the execution time (in nanoseconds) for Fun(Args).
-spec tc(Fun :: function(), Arguments :: [term()]) -> {Time :: integer(), Value :: term()}.
tc(F, A) ->
T1 = erlang:monotonic_time(),
Val = apply(F, A),
T2 = erlang:monotonic_time(),
Time = cvrTimeUnit(T2 - T1, native, nanosecond),
{Time, Val}.
%% Measure the execution time (in nanoseconds) for an MFA.
-spec tc(Module :: module(), Function :: atom(), Arguments :: [term()]) -> {Time :: integer(), Value :: term()}.
tc(M, F, A) ->
T1 = erlang:monotonic_time(),
Val = apply(M, F, A),
T2 = erlang:monotonic_time(),
Time = cvrTimeUnit(T2 - T1, native, nanosecond),
{Time, Val}.
-spec cvrTimeUnit(Time :: integer(), FromUnit :: erlang:time_unit(), ToUnit :: erlang:time_unit()) -> ConvertedTime :: integer().
cvrTimeUnit(Time, FromUnit, ToUnit) ->
try
FU =
case FromUnit of
native -> erts_internal:time_unit();
perf_counter -> erts_internal:perf_counter_unit();
nanosecond -> 1000 * 1000 * 1000;
microsecond -> 1000 * 1000;
millisecond -> 1000;
second -> 1
end,
TU =
case ToUnit of
native -> erts_internal:time_unit();
perf_counter -> erts_internal:perf_counter_unit();
nanosecond -> 1000 * 1000 * 1000;
microsecond -> 1000 * 1000;
millisecond -> 1000;
second -> 1
end,
case Time < 0 of
true -> (TU * Time - (FU - 1)) div FU;
_ -> TU * Time div FU
end
catch
_ : _ ->
erlang:error(badarg, [Time, FromUnit, ToUnit])
end.
%% 单进程循环测试:LoopTimes是循环次数
%% utTc:ts(LoopTimes, Module, Function, ArgsList).
%% 多进程并发测试:SpawnProcessesCount是并发的进程数 LoopTimes是循环次数
%% utTc:tm(ProcessesCount, LoopTimes, Module, Function, ArgsList).
doTc(M, F, A) ->
T1 = erlang:monotonic_time(),
apply(M, F, A),
T2 = erlang:monotonic_time(),
cvrTimeUnit(T2 - T1, native, nanosecond).
distribution(List, Aver) ->
distribution(List, Aver, 0, 0).
distribution([H | T], Aver, Greater, Less) ->
case H > Aver of
true ->
distribution(T, Aver, Greater + 1, Less);
false ->
distribution(T, Aver, Greater, Less + 1)
end;
distribution([], _Aver, Greater, Less) ->
{Greater, Less}.
%% ===================================================================
%% test: one process test N times
%% ===================================================================
ts(LoopTime, M, F, A) ->
{Max, Min, Sum, Aver, Greater, Less} = loopTs(LoopTime, M, F, A, LoopTime, 0, 0, 0, []),
io:format("=====================~n"),
io:format("execute Args:~p~n", [A]),
io:format("execute Fun :~p~n", [F]),
io:format("execute Mod :~p~n", [M]),
io:format("execute LoopTime:~p~n", [LoopTime]),
io:format("MaxTime: ~10s(ns) ~10s(s)~n", [integer_to_binary(Max), float_to_binary(Max / 1000000000, [{decimals, 6}, compact])]),
io:format("MinTime: ~10s(ns) ~10s(s)~n", [integer_to_binary(Min), float_to_binary(Min / 1000000000, [{decimals, 6}, compact])]),
io:format("SumTime: ~10s(ns) ~10s(s)~n", [integer_to_binary(Sum), float_to_binary(Sum / 1000000000, [{decimals, 6}, compact])]),
io:format("AvgTime: ~10s(ns) ~10s(s)~n", [float_to_binary(Aver, [{decimals, 6}, compact]), float_to_binary(Aver / 1000000000, [{decimals, 6}, compact])]),
io:format("Grar : ~10s(cn) ~10s(~s)~n", [integer_to_binary(Greater), float_to_binary(Greater / LoopTime, [{decimals, 2}]), <<"%">>]),
io:format("Less : ~10s(cn) ~10s(~s)~n", [integer_to_binary(Less), float_to_binary(Less / LoopTime, [{decimals, 2}]), <<"%">>]),
io:format("=====================~n").
loopTs(0, _M, _F, _A, LoopTime, Max, Min, Sum, List) ->
Aver = Sum / LoopTime,
{Greater, Less} = distribution(List, Aver),
{Max, Min, Sum, Aver, Greater, Less};
loopTs(Index, M, F, A, LoopTime, Max, Min, Sum, List) ->
Nanosecond = doTc(M, F, A),
NewSum = Sum + Nanosecond,
if
Max == 0 ->
NewMax = NewMin = Nanosecond;
Max < Nanosecond ->
NewMax = Nanosecond,
NewMin = Min;
Min > Nanosecond ->
NewMax = Max,
NewMin = Nanosecond;
true ->
NewMax = Max,
NewMin = Min
end,
loopTs(Index - 1, M, F, A, LoopTime, NewMax, NewMin, NewSum, [Nanosecond | List]).
%% ===================================================================
%% Concurrency test: N processes each test one time
%% ===================================================================
tm(ProcCnt, LoopTime, M, F, A) ->
loopSpawn(ProcCnt, M, F, A, self(), LoopTime),
{Max, Min, Sum, Aver, Greater, Less} = collector(ProcCnt, 0, 0, 0, ProcCnt, []),
io:format("=====================~n"),
io:format("execute Args:~p~n", [A]),
io:format("execute Fun :~p~n", [F]),
io:format("execute Mod :~p~n", [M]),
io:format("execute LoopTime:~p~n", [LoopTime]),
io:format("execute ProcCnts:~p~n", [ProcCnt]),
io:format("MaxTime: ~10s(ns) ~10s(s)~n", [integer_to_binary(Max), float_to_binary(Max / 1000000000, [{decimals, 6}, compact])]),
io:format("MinTime: ~10s(ns) ~10s(s)~n", [integer_to_binary(Min), float_to_binary(Min / 1000000000, [{decimals, 6}, compact])]),
io:format("SumTime: ~10s(ns) ~10s(s)~n", [integer_to_binary(Sum), float_to_binary(Sum / 1000000000, [{decimals, 6}, compact])]),
io:format("AvgTime: ~10s(ns) ~10s(s)~n", [float_to_binary(Aver, [{decimals, 6}, compact]), float_to_binary(Aver / 1000000000, [{decimals, 6}, compact])]),
io:format("Grar : ~10s(cn) ~10s(~s)~n", [integer_to_binary(Greater), float_to_binary(Greater / LoopTime, [{decimals, 2}]), <<"%">>]),
io:format("Less : ~10s(cn) ~10s(~s)~n", [integer_to_binary(Less), float_to_binary(Less / LoopTime, [{decimals, 2}]), <<"%">>]),
io:format("=====================~n").
loopSpawn(0, _, _, _, _, _) ->
ok;
loopSpawn(ProcCnt, M, F, A, CollectorPid, LoopTime) ->
spawn_link(fun() -> worker(LoopTime, M, F, A, CollectorPid) end),
loopSpawn(ProcCnt - 1, M, F, A, CollectorPid, LoopTime).
collector(0, Max, Min, Sum, ProcCnt, List) ->
Aver = Sum / ProcCnt,
{Greater, Less} = distribution(List, Aver),
{Max, Min, Sum, Aver, Greater, Less};
collector(Index, Max, Min, Sum, ProcCnt, List) ->
receive
{result, Nanosecond} ->
NewSum = Sum + Nanosecond,
if
Max == 0 ->
NewMax = NewMin = Nanosecond;
Max < Nanosecond ->
NewMax = Nanosecond,
NewMin = Min;
Min > Nanosecond ->
NewMax = Max,
NewMin = Nanosecond;
true ->
NewMax = Max,
NewMin = Min
end,
collector(Index - 1, NewMax, NewMin, NewSum, ProcCnt, [Nanosecond | List])
after 1800000 ->
io:format("execute time out~n"),
ok
end.
worker(LoopTime, M, F, A, CollectorPid) ->
SumTime = loopTm(LoopTime, M, F, A, 0),
CollectorPid ! {result, SumTime}.
loopTm(0, _, _, _, SumTime) ->
SumTime;
loopTm(LoopTime, M, F, A, SumTime) ->
Microsecond = doTc(M, F, A),
loopTm(LoopTime - 1, M, F, A, SumTime + Microsecond).
test(N) ->
M1 = erlang:monotonic_time(),
timer:sleep(N),
M2 = erlang:monotonic_time(),
Time = cvrTimeUnit(M2 - M1, native, nanosecond),
io:format("IMY******************111 ~p~n", [Time]),
S1 = erlang:system_time(nanosecond),
timer:sleep(N),
S2 = erlang:system_time(nanosecond),
io:format("IMY******************222 ~p~n", [S2 - S1]). | src/tc/tpTc.erl | 0.589362 | 0.42185 | tpTc.erl | starcoder |
-module(teal_types).
-export([not_of_type/2,
not_record/1, assert_not_record/1, assert_not_record/2,
could_be_record/1, assert_could_be_record/1, assert_could_be_record/2
]).
%%%===================================================================
%%% API
%%%===================================================================
-spec not_of_type(Term :: atom(), Type :: atom()) -> atom().
not_of_type(Term, Type) ->
%% Check for special cases
case Type of
builtin ->
not_implemented;
record ->
not_implemented;
_ ->
FunName = list_to_atom("is_" ++ atom_to_list(Type)),
invert_boolean(apply(erlang, FunName, [Term]))
end.
-spec not_record(Term :: any()) -> boolean().
not_record(Term) ->
case is_tuple(Term) of
true ->
case is_atom(element(1, Term)) of
true ->
false;
false ->
true
end;
false ->
true
end.
-spec assert_not_record(Term :: any()) -> boolean().
assert_not_record(Term) ->
teal:assert(true, not_record(Term),
is_a_record).
-spec assert_not_record(Term :: any(), Msg :: any()) -> boolean().
assert_not_record(Term, Msg) ->
teal:assert(true, not_record(Term), Msg).
-spec could_be_record(Record :: any()) -> boolean().
could_be_record(Record) ->
% Check if term is a tuple with an atom as the first item
case is_tuple(Record) of
true ->
% Check if the first item is an atom
First = erlang:element(1, Record),
is_atom(First);
false ->
false
end.
-spec assert_could_be_record(Record :: any()) -> boolean().
assert_could_be_record(Record) ->
teal:assert(true, could_be_record(Record), not_record).
-spec assert_could_be_record(Record :: any(), Msg :: any()) -> boolean().
assert_could_be_record(Record, Msg) ->
teal:assert(true, could_be_record(Record), Msg).
%%%===================================================================
%%% Private functions
%%%===================================================================
-spec invert_boolean(Boolean :: boolean()) -> boolean().
invert_boolean(Boolean) ->
case Boolean of
true ->
false;
false ->
true
end. | src/teal_types.erl | 0.569972 | 0.462837 | teal_types.erl | starcoder |
%% @doc This module is used to access a sharded Redis cluster.
%%
%% An important thing to note about sharding is that if you change the ring,
%% keys will be hashed on different shards. This requires a data migration. Right now
%% it is recommend to do pre-sharding, which means, create a large number of shards up front,
%% and as your data set grows, move each small shard to its own dedicated box. This means that
%% you will not have to re-hash keys for a very long time.
%%
%% See this link for more information: http://antirez.com/post/redis-presharding.html
%%
%% `erldis_shard:start_link([{"redis01", [{{{"127.0.0.1", 6379}, 5}, master}, {{{"127.0.0.1", 6379}, 5}, slave}, {{{"127.0.0.1", 6380}, 5}, slave}]}, {"redis02", [{{{"127.0.0.1", 6379}, 5}, master}]}]),'
%%
%% `erldis:get(erldis_shard:client("mykey", master), "mykey").'
%%
%%
%% @type shard_spec() = [shard()]. A shard_spec() contains a list of shard().
%% @type shard() = {string(), [{pool_conn_spec(), atom()}]}. Information about a shard.
%%
%%
-module(erldis_shard).
-export([
start_link/1,
client/2,
get_slot/1,
get_ring/0
]).
%% This value specifies how many times one item will appear on the ring.
-define(DEFAULT_NUM_REPLICAS, 128).
%% @doc Initializes a Redis sharded cluster with the given ShardList.
%%
%% ShardSpec contains a mapping of slot -> hosts:
%%
%% `[
%% {"redis01", [
%% {{{"127.0.0.1", 6379}, 5}, master},
%% {{{"127.0.0.1", 6380}, 5}, slave}
%% ]},
%% {"redis02", [
%% {{{"127.0.0.1", 6380}, 1}, master}
%% }
%% ...
%% ]'
%%
%% @spec start_link(shard_spec()) -> {ok, pid()}
%%
start_link(ShardSpec) ->
catch ets:new(?MODULE, [public, named_table, bag]),
ets:delete_all_objects(?MODULE),
% Create a ring that contains all of the slots
NumReplicas = case application:get_env(erldis, hash_num_replicas) of
{ok, Val} -> Val;
_ -> ?DEFAULT_NUM_REPLICAS
end,
SlotNames = lists:map(fun({Name, _}) -> Name end, ShardSpec),
Ring = hash_ring:create_ring(SlotNames, NumReplicas),
ets:insert(?MODULE, {ring, Ring}),
% Store the (slot, type) -> hosts mapping in ETS
lists:foreach(fun({SlotName, Hosts}) ->
lists:foreach(fun({{HostInfo, _PoolSize}, Type}) ->
ets:insert(?MODULE, {{SlotName, Type}, HostInfo})
end, Hosts)
end, ShardSpec),
% Extract all the hosts from the ShardList
ConnList = lists:flatten(lists:map(fun({_SlotName, Hosts}) ->
lists:map(fun({ConnSpec, _}) -> ConnSpec end, Hosts)
end, ShardSpec)),
% Start the pool supervisor to manage all the connections
{ok, Pid} = erldis_pool_sup:start_link(ConnList),
{ok, Pid}.
%%
%% @doc Returns the current Ring
%%
get_ring() ->
[{ring, Ring}] = ets:lookup(?MODULE, ring),
Ring.
%%
%% @doc Returns the slot in the ring for the given Key.
%%
%% @spec get_slot(string() | binary()) -> string()
get_slot(Key) ->
Ring = get_ring(),
% Find the slot that maps to the Key
hash_ring:get_item(Key, Ring).
%%
%% @doc Returns a client for the shard that contains Key for the given Type.
%%
%% Type is specified in ShardSpec when creating your shards.
%% @see shard_spec()
%%
%% @spec client(any(), atom()) -> undefined | pid()
client(Key, Type) ->
Slot = get_slot(Key),
% Find a list of hosts for the given slot and type
Hosts = lists:map(fun({_K, V}) -> V end, ets:lookup(?MODULE, {Slot, Type})),
case Hosts of
[] -> undefined;
_ ->
% Get a random host of the given type and get a client from the pool
RandomHost = lists:nth(erlang:phash(os:timestamp(), length(Hosts)), Hosts),
erldis_pool_sup:get_random_pid(RandomHost)
end. | src/erldis_shard.erl | 0.513425 | 0.567457 | erldis_shard.erl | starcoder |
% Copyright 2008 Konrad-Zuse-Zentrum für Informationstechnik Berlin
%
% Licensed under the Apache License, Version 2.0 (the "License");
% you may not use this file except in compliance with the License.
% You may obtain a copy of the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS,
% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
% See the License for the specific language governing permissions and
% limitations under the License.
%%%-------------------------------------------------------------------
%%% File : util_SUITE.erl
%%% Author : <NAME> <<EMAIL>>
%%% Description : Unit tests for src/util.erl
%%%
%%% Created : 22 Feb 2008 by <NAME> <<EMAIL>>
%%%-------------------------------------------------------------------
-module(util_SUITE).
-author('<EMAIL>').
-vsn('$Id$ ').
-compile(export_all).
-include("unittest.hrl").
all() ->
[is_between, is_between_closed, trunc, min_max].
suite() ->
[
{timetrap, {seconds, 20}}
].
init_per_suite(Config) ->
Config.
end_per_suite(_Config) ->
ok.
is_between(_Config) ->
?assert(util:is_between("1", "2", "3")),
?assert(not util:is_between("1", "4", "3")),
?assert(util:is_between("3", "4", "1")),
?assert(not util:is_between("3", "2", "1")),
?assert(util:is_between("1", "2", "2")),
?assert(not util:is_between("1", "1", "2")),
?assert(util:is_between("2", "1", "1")),
?assert(not util:is_between("2", "2", "1")),
ok.
is_between_closed(_Config) ->
?assert(util:is_between_closed("1", "2", "3")),
?assert(not util:is_between_closed("1", "4", "3")),
?assert(util:is_between_closed("3", "4", "1")),
?assert(not util:is_between_closed("3", "2", "1")),
?assert(not util:is_between_closed("1", "2", "2")),
?assert(not util:is_between_closed("1", "1", "2")),
?assert(not util:is_between_closed("2", "1", "1")),
?assert(not util:is_between_closed("2", "2", "1")),
ok.
trunc(_Config) ->
?assert(util:trunc([1, 2, 3], 1) == [1]),
?assert(util:trunc([1, 2, 3], 2) == [1, 2]),
?assert(util:trunc([1, 2, 3], 3) == [1, 2, 3]),
?assert(util:trunc([1, 2, 3], 4) == [1, 2, 3]),
ok.
min_max(_Config) ->
?assert(util:min(1, 2) == 1),
?assert(util:min(2, 1) == 1),
?assert(util:min(1, 1) == 1),
?assert(util:max(1, 2) == 2),
?assert(util:max(2, 1) == 2),
?assert(util:max(1, 1) == 1),
ok. | test/util_SUITE.erl | 0.675122 | 0.492188 | util_SUITE.erl | starcoder |
%% Copyright (c) 2019-2021, <NAME> <<EMAIL>>. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(uef_num).
-export([round_price/1, round_number/2]).
-export([popcount/1, msb_pos/1, lsb_pos/1, ctz/1]).
%%%------------------------------------------------------------------------------
%%% API
%%%------------------------------------------------------------------------------
%% round_price/1
-spec round_price(Number :: number()) -> float().
%% @doc
%% Rounds the number to the precision of 2. The same as uef_num:round_number(Number, 2).
%% @end
round_price(Price) -> round_number(Price, 2).
%% round_number/2
-spec round_number(Number :: number(), Precision :: integer()) -> float().
%% @doc
%% Rounds the number to the specified precision.
%% @end
round_number(Number, Precision) ->
P = math:pow(10, Precision),
erlang:round(Number * P) / P.
%% popcount/1
-spec popcount(Integer:: non_neg_integer()) -> OneBits :: non_neg_integer().
%% @doc
%% Returns the number of 1's (ones or one-bits) in the binary representation of a non-negative integer.
%% Also known as population count, pop count, popcount, sideways sum, bit summation,
%% or Hamming weight.
%% The call fails with a {badarg,Integer} exception if Integer is not a non-negative integer.
%% @end
popcount(N) when is_integer(N) andalso (N > -1) ->
popcount(N, 0);
popcount(N) ->
erlang:error({badarg, N}, [N]).
%% lsb_pos/1
-spec lsb_pos(Integer:: pos_integer()) -> Pos :: pos_integer().
%% @doc
%% Returns the position of the least significant bit in the binary representation of a positive integer.
%% The call fails with a {badarg,Integer} exception if Integer is not a positive integer.
%% @end
lsb_pos(N) when is_integer(N) andalso (N > 0) ->
lsb_pos(N, 1);
lsb_pos(N) ->
erlang:error({badarg, N}, [N]).
%% msb_pos/1
-spec msb_pos(Integer:: pos_integer()) -> Pos :: pos_integer().
%% @doc
%% Returns the position of the most significant bit in the binary representation of a positive integer.
%% The call fails with a {badarg,Integer} exception if Integer is not a positive integer.
%% @end
msb_pos(N) when is_integer(N) andalso (N > 0) ->
msb_pos(N, 0);
msb_pos(N) ->
erlang:error({badarg, N}, [N]).
%% ctz/1
-spec ctz(Integer:: pos_integer()) -> TrailingZeros :: non_neg_integer().
%% @doc
%% Counts trailing zeros in the binary representation of a positive integer.
%% Returns the number of zero bits following the least significant one bit.
%% The call fails with a {badarg,Integer} exception if Integer is not a positive integer.
%% @end
ctz(N) ->
lsb_pos(N) - 1.
%%%------------------------------------------------------------------------------
%%% Internal functions
%%%------------------------------------------------------------------------------
%% popcount/2
-spec popcount(non_neg_integer(), non_neg_integer()) -> non_neg_integer().
popcount(0, Cnt) -> Cnt;
popcount(N, Cnt) -> popcount(N band (N - 1), Cnt + 1).
%% lsb_pos/2
-spec lsb_pos(pos_integer(), pos_integer()) -> pos_integer().
lsb_pos(N, Cnt) when ((N band 1) =:= 1) -> Cnt;
lsb_pos(N, Cnt) -> lsb_pos(N bsr 1, Cnt + 1).
%% msb_pos/2
-spec msb_pos(non_neg_integer(), non_neg_integer()) -> pos_integer().
msb_pos(0, Cnt) -> Cnt;
msb_pos(N, Cnt) -> msb_pos(N bsr 1, Cnt + 1). | src/uef_num.erl | 0.766862 | 0.602822 | uef_num.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% xqerl - XQuery processor
%%
%% Copyright (c) 2019-2020 <NAME> All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%% @doc Implementation of the "http://expath.org/ns/binary" namespace.
-module(xqerl_mod_expath_binary).
-include("xqerl.hrl").
-define(NS, <<"http://expath.org/ns/binary">>).
-define(PX, <<"bin">>).
%% 5 Defining 'constants' and conversions
-export([
hex/2,
bin/2,
octal/2,
to_octets/2,
from_octets/2
]).
%% 6 Basic operations
-export([
length/2,
part/3, part/4,
join/2,
insert_before/4,
pad_left/3, pad_left/4,
pad_right/3, pad_right/4,
find/4
]).
%% 7 Text decoding and encoding
-export([
decode_string/2, decode_string/3, decode_string/4, decode_string/5,
encode_string/2, encode_string/3
]).
%% 8 Packing and unpacking of encoded numeric values
-export([
pack_double/2, pack_double/3,
pack_float/2, pack_float/3,
pack_integer/3, pack_integer/4,
unpack_double/3, unpack_double/4,
unpack_float/3, unpack_float/4,
unpack_integer/4, unpack_integer/5,
unpack_unsigned_integer/4, unpack_unsigned_integer/5
]).
%% 9 Bitwise operations
-export([
or_/3,
xor_/3,
and_/3,
not_/2,
shift/3
]).
-'module-namespace'({?NS, ?PX}).
-namespaces([]).
-variables([]).
-functions([
%% 5 Defining 'constants' and conversions
{{qname, ?NS, ?PX, <<"hex">>}, {seqType, 'xs:base64Binary', zero_or_one}, [], {hex, 2}, 1, [
{seqType, 'xs:string', zero_or_one}
]},
{{qname, ?NS, ?PX, <<"bin">>}, {seqType, 'xs:base64Binary', zero_or_one}, [], {bin, 2}, 1, [
{seqType, 'xs:string', zero_or_one}
]},
{
{qname, ?NS, ?PX, <<"octal">>},
{seqType, 'xs:base64Binary', zero_or_one},
[],
{octal, 2},
1,
[
{seqType, 'xs:string', zero_or_one}
]
},
{
{qname, ?NS, ?PX, <<"to-octets">>},
{seqType, 'xs:integer', zero_or_many},
[],
{to_octets, 2},
1,
[{seqType, 'xs:base64Binary', one}]
},
{
{qname, ?NS, ?PX, <<"from-octets">>},
{seqType, 'xs:base64Binary', one},
[],
{from_octets, 2},
1,
[{seqType, 'xs:integer', zero_or_many}]
},
%% 6 Basic operations
{{qname, ?NS, ?PX, <<"length">>}, {seqType, 'xs:integer', one}, [], {length, 2}, 1, [
{seqType, 'xs:base64Binary', one}
]},
{{qname, ?NS, ?PX, <<"part">>}, {seqType, 'xs:base64Binary', zero_or_one}, [], {part, 3}, 2, [
{seqType, 'xs:base64Binary', zero_or_one},
{seqType, 'xs:integer', one}
]},
{{qname, ?NS, ?PX, <<"part">>}, {seqType, 'xs:base64Binary', zero_or_one}, [], {part, 4}, 3, [
{seqType, 'xs:base64Binary', zero_or_one},
{seqType, 'xs:integer', one},
{seqType, 'xs:integer', one}
]},
{{qname, ?NS, ?PX, <<"join">>}, {seqType, 'xs:base64Binary', one}, [], {join, 2}, 1, [
{seqType, 'xs:base64Binary', zero_or_many}
]},
{
{qname, ?NS, ?PX, <<"insert-before">>},
{seqType, 'xs:base64Binary', zero_or_one},
[],
{insert_before, 4},
3,
[
{seqType, 'xs:base64Binary', zero_or_one},
{seqType, 'xs:integer', one},
{seqType, 'xs:base64Binary', zero_or_one}
]
},
{
{qname, ?NS, ?PX, <<"pad-left">>},
{seqType, 'xs:base64Binary', zero_or_one},
[],
{pad_left, 3},
2,
[
{seqType, 'xs:base64Binary', zero_or_one},
{seqType, 'xs:integer', one}
]
},
{
{qname, ?NS, ?PX, <<"pad-left">>},
{seqType, 'xs:base64Binary', zero_or_one},
[],
{pad_left, 4},
3,
[
{seqType, 'xs:base64Binary', zero_or_one},
{seqType, 'xs:integer', one},
{seqType, 'xs:integer', one}
]
},
{
{qname, ?NS, ?PX, <<"pad-right">>},
{seqType, 'xs:base64Binary', zero_or_one},
[],
{pad_right, 3},
2,
[
{seqType, 'xs:base64Binary', zero_or_one},
{seqType, 'xs:integer', one}
]
},
{
{qname, ?NS, ?PX, <<"pad-right">>},
{seqType, 'xs:base64Binary', zero_or_one},
[],
{pad_right, 4},
3,
[
{seqType, 'xs:base64Binary', zero_or_one},
{seqType, 'xs:integer', one},
{seqType, 'xs:integer', one}
]
},
{{qname, ?NS, ?PX, <<"find">>}, {seqType, 'xs:integer', zero_or_one}, [], {find, 4}, 3, [
{seqType, 'xs:base64Binary', zero_or_one},
{seqType, 'xs:integer', one},
{seqType, 'xs:base64Binary', one}
]},
%% 7 Text decoding and encoding
{
{qname, ?NS, ?PX, <<"decode-string">>},
{seqType, 'xs:string', zero_or_one},
[],
{decode_string, 2},
1,
[{seqType, 'xs:base64Binary', zero_or_one}]
},
{
{qname, ?NS, ?PX, <<"decode-string">>},
{seqType, 'xs:string', zero_or_one},
[],
{decode_string, 3},
2,
[
{seqType, 'xs:base64Binary', zero_or_one},
{seqType, 'xs:string', one}
]
},
{
{qname, ?NS, ?PX, <<"decode-string">>},
{seqType, 'xs:string', zero_or_one},
[],
{decode_string, 4},
3,
[
{seqType, 'xs:base64Binary', zero_or_one},
{seqType, 'xs:string', one},
{seqType, 'xs:integer', one}
]
},
{
{qname, ?NS, ?PX, <<"decode-string">>},
{seqType, 'xs:string', zero_or_one},
[],
{decode_string, 5},
4,
[
{seqType, 'xs:base64Binary', zero_or_one},
{seqType, 'xs:string', one},
{seqType, 'xs:integer', one},
{seqType, 'xs:integer', one}
]
},
{
{qname, ?NS, ?PX, <<"encode-string">>},
{seqType, 'xs:base64Binary', zero_or_one},
[],
{encode_string, 2},
1,
[{seqType, 'xs:string', zero_or_one}]
},
{
{qname, ?NS, ?PX, <<"encode-string">>},
{seqType, 'xs:base64Binary', zero_or_one},
[],
{encode_string, 3},
2,
[
{seqType, 'xs:string', zero_or_one},
{seqType, 'xs:string', one}
]
},
%% 8 Packing and unpacking of encoded numeric values
{
{qname, ?NS, ?PX, <<"pack-double">>},
{seqType, 'xs:base64Binary', one},
[],
{pack_double, 2},
1,
[{seqType, 'xs:double', one}]
},
{
{qname, ?NS, ?PX, <<"pack-double">>},
{seqType, 'xs:base64Binary', one},
[],
{pack_double, 3},
2,
[
{seqType, 'xs:double', one},
{seqType, 'xs:string', one}
]
},
{
{qname, ?NS, ?PX, <<"pack-float">>},
{seqType, 'xs:base64Binary', one},
[],
{pack_float, 2},
1,
[{seqType, 'xs:float', one}]
},
{
{qname, ?NS, ?PX, <<"pack-float">>},
{seqType, 'xs:base64Binary', one},
[],
{pack_float, 3},
2,
[
{seqType, 'xs:float', one},
{seqType, 'xs:string', one}
]
},
{
{qname, ?NS, ?PX, <<"pack-integer">>},
{seqType, 'xs:base64Binary', one},
[],
{pack_integer, 3},
2,
[
{seqType, 'xs:integer', one},
{seqType, 'xs:integer', one}
]
},
{
{qname, ?NS, ?PX, <<"pack-integer">>},
{seqType, 'xs:base64Binary', one},
[],
{pack_integer, 4},
3,
[
{seqType, 'xs:integer', one},
{seqType, 'xs:integer', one},
{seqType, 'xs:string', one}
]
},
{
{qname, ?NS, ?PX, <<"unpack-double">>},
{seqType, 'xs:double', zero_or_one},
[],
{unpack_double, 3},
2,
[
{seqType, 'xs:base64Binary', one},
{seqType, 'xs:integer', one}
]
},
{
{qname, ?NS, ?PX, <<"unpack-double">>},
{seqType, 'xs:double', zero_or_one},
[],
{unpack_double, 4},
3,
[
{seqType, 'xs:base64Binary', one},
{seqType, 'xs:integer', one},
{seqType, 'xs:string', one}
]
},
{{qname, ?NS, ?PX, <<"unpack-float">>}, {seqType, 'xs:float', one}, [], {unpack_float, 3}, 2, [
{seqType, 'xs:base64Binary', one},
{seqType, 'xs:integer', one}
]},
{{qname, ?NS, ?PX, <<"unpack-float">>}, {seqType, 'xs:float', one}, [], {unpack_float, 4}, 3, [
{seqType, 'xs:base64Binary', one},
{seqType, 'xs:integer', one},
{seqType, 'xs:string', one}
]},
{
{qname, ?NS, ?PX, <<"unpack-integer">>},
{seqType, 'xs:integer', one},
[],
{unpack_integer, 4},
3,
[
{seqType, 'xs:base64Binary', one},
{seqType, 'xs:integer', one},
{seqType, 'xs:integer', one}
]
},
{
{qname, ?NS, ?PX, <<"unpack-integer">>},
{seqType, 'xs:integer', one},
[],
{unpack_integer, 5},
4,
[
{seqType, 'xs:base64Binary', one},
{seqType, 'xs:integer', one},
{seqType, 'xs:integer', one},
{seqType, 'xs:string', one}
]
},
{
{qname, ?NS, ?PX, <<"unpack-unsigned-integer">>},
{seqType, 'xs:integer', one},
[],
{unpack_unsigned_integer, 4},
3,
[
{seqType, 'xs:base64Binary', one},
{seqType, 'xs:integer', one},
{seqType, 'xs:integer', one}
]
},
{
{qname, ?NS, ?PX, <<"unpack-unsigned-integer">>},
{seqType, 'xs:integer', one},
[],
{unpack_unsigned_integer, 5},
4,
[
{seqType, 'xs:base64Binary', one},
{seqType, 'xs:integer', one},
{seqType, 'xs:integer', one},
{seqType, 'xs:string', one}
]
},
%% 9 Bitwise operations
{{qname, ?NS, ?PX, <<"or">>}, {seqType, 'xs:base64Binary', zero_or_one}, [], {or_, 3}, 2, [
{seqType, 'xs:base64Binary', zero_or_one},
{seqType, 'xs:base64Binary', zero_or_one}
]},
{{qname, ?NS, ?PX, <<"xor">>}, {seqType, 'xs:base64Binary', zero_or_one}, [], {xor_, 3}, 2, [
{seqType, 'xs:base64Binary', zero_or_one},
{seqType, 'xs:base64Binary', zero_or_one}
]},
{{qname, ?NS, ?PX, <<"and">>}, {seqType, 'xs:base64Binary', zero_or_one}, [], {and_, 3}, 2, [
{seqType, 'xs:base64Binary', zero_or_one},
{seqType, 'xs:base64Binary', zero_or_one}
]},
{{qname, ?NS, ?PX, <<"not">>}, {seqType, 'xs:base64Binary', zero_or_one}, [], {not_, 2}, 1, [
{seqType, 'xs:base64Binary', zero_or_one}
]},
{{qname, ?NS, ?PX, <<"shift">>}, {seqType, 'xs:base64Binary', zero_or_one}, [], {shift, 3}, 2, [
{seqType, 'xs:base64Binary', zero_or_one},
{seqType, 'xs:integer', one}
]}
]).
-define(BIN(D), #xqAtomicValue{type = 'xs:base64Binary', value = D}).
-define(FLT(D), #xqAtomicValue{type = 'xs:float', value = D}).
%% 5 Defining 'constants' and conversions
%% Users of the package may need to define binary 'constants' within their code
%% or examine the basic octets. The following functions support these:
%%
%% 5.1 bin:hex
%% Summary
%% Returns the binary form of the set of octets written as a sequence of
%% (ASCII) hex digits ([0-9A-Fa-f]).
%% Signature
%% bin:hex($in as xs:string?) as xs:base64Binary?
%% Rules
%% $in will be effectively zero-padded from the left to generate an integral
%% number of octets, i.e. an even number of hexadecimal digits. If $in is an
%% empty string, then the result will be a xs:base64Binary with no embedded
%% data.
%% Byte order in the result follows (per-octet) character order in the string.
%% If the value of $in is the empty sequence, the function returns an empty
%% sequence.
%% Error Conditions
%% [bin:non-numeric-character] is raised if $in cannot be parsed as a
%% hexadecimal number.
%% Notes
%% When the input string has an even number of characters, this function
%% behaves similarly to the double cast xs:base64Binary(xs:hexBinary($string)).
%% Examples
%% bin:hex('11223F4E') => "ESI/Tg=="
%% bin:hex('1223F4E') => "ASI/Tg=="
hex(_, []) ->
[];
hex(_, String) when is_binary(String) ->
Pad = binary:copy(<<"0">>, byte_size(String) rem 2),
String1 = <<Pad/binary, String/binary>>,
try
<<
<<(list_to_integer([B1], 16)):4>>
|| <<B1>> <= String1
>>
of
Bin ->
?BIN(Bin)
catch
_:_ ->
do_throw('non-numeric-character')
end;
hex(C, S) ->
hex(C, xqerl_types:cast_as(S, 'xs:string')).
%% 5.2 bin:bin
%% Summary
%% Returns the binary form of the set of octets written as a sequence of
%% (8-wise) (ASCII) binary digits ([01]).
%% Signature
%% bin:bin($in as xs:string?) as xs:base64Binary?
%% Rules
%% $in will be effectively zero-padded from the left to generate an integral
%% number of octets. If $in is an empty string, then the result will be a
%% xs:base64Binary with no embedded data.
%% Byte order in the result follows (per-octet) character order in the string.
%% If the value of $in is the empty sequence, the function returns an empty
%% sequence.
%% Error Conditions
%% [bin:non-numeric-character] is raised if $in cannot be parsed as a binary
%% number.
%% Examples
%% bin:bin('1101000111010101') => "0dU="
%% bin:bin('1000111010101') => "EdU="
bin(_, []) ->
[];
bin(_, String) when is_binary(String) ->
Rem =
case byte_size(String) rem 8 of
0 -> 0;
V -> 8 - V
end,
Pad = binary:copy(<<"0">>, Rem),
String1 = <<Pad/binary, String/binary>>,
try
<<
<<(list_to_integer([B1, B2, B3, B4, B5, B6, B7, B8], 2)):8>>
|| %<< <<(list_to_integer([B1,B2,B3,B4,B5,B6,B7,B8], 2))>>
<<B1, B2, B3, B4, B5, B6, B7, B8>> <= String1
>>
of
Bin ->
?BIN(Bin)
catch
_:_ ->
do_throw('non-numeric-character')
end;
bin(C, S) ->
bin(C, xqerl_types:cast_as(S, 'xs:string')).
%% 5.3 bin:octal
%% Summary
%% Returns the binary form of the set of octets written as a sequence of
%% (ASCII) octal digits ([0-7]).
%% Signature
%% bin:octal($in as xs:string?) as xs:base64Binary?
%% Rules
%% $in will be effectively zero-padded from the left to generate an integral
%% number of octets. If $in is an empty string, then the result will be a
%% xs:base64Binary with no embedded data.
%% Byte order in the result follows (per-octet) character order in the string.
%% If the value of $in is the empty sequence, the function returns an empty
%% sequence.
%% Error Conditions
%% [bin:non-numeric-character] is raised if $in cannot be parsed as an octal
%% number.
%% Examples
%% bin:octal('11223047') => "JSYn"
octal(_, []) ->
[];
octal(_, String) when is_binary(String) ->
Bitstring = <<(octal_bits(C)) || <<C>> <= String>>,
case bit_size(Bitstring) rem 8 of
0 ->
?BIN(Bitstring);
P ->
Pad = 8 - P,
?BIN(<<0:Pad, Bitstring/bitstring>>)
end;
octal(C, S) ->
octal(C, xqerl_types:cast_as(S, 'xs:string')).
octal_bits(C) when C >= 48, C =< 55 ->
D = C - $0,
<<D:3>>;
octal_bits(_) ->
do_throw('non-numeric-character').
%% 5.4 bin:to-octets
%% Summary
%% Returns binary data as a sequence of octets.
%% Signature
%% bin:to-octets($in as xs:base64Binary) as xs:integer*
%% Rules
%% If $in is a zero length binary data then the empty sequence is returned.
%% Octets are returned as integers from 0 to 255.
to_octets(_, ?BIN(Str)) ->
[C || <<C>> <= Str];
to_octets(C, S) ->
to_octets(C, xqerl_types:cast_as(S, 'xs:base64Binary')).
%% 5.5 bin:from-octets
%% Summary
%% Converts a sequence of octets into binary data.
%% Signature
%% bin:from-octets($in as xs:integer*) as xs:base64Binary
%% Rules
%% Octets are integers from 0 to 255.
%% If the value of $in is the empty sequence, the function returns
%% zero-sized binary data.
%% Error Conditions
%% [bin:octet-out-of-range] is raised if one of the octets lies outside
%% the range 0 – 255.
from_octets(_, []) ->
?BIN(<<>>);
from_octets(_, List) when is_list(List) ->
Check = fun
(I) when is_integer(I), I >= 0, I =< 255 ->
I;
(I) when is_integer(I) ->
do_throw('octet-out-of-range');
(O) ->
case xqerl_types:cast_as(O, 'xs:integer') of
I when is_integer(I), I >= 0, I =< 255 ->
I;
_ ->
do_throw('octet-out-of-range')
end
end,
List1 = lists:map(Check, List),
?BIN(list_to_binary(List1));
from_octets(C, S) ->
from_octets(C, [S]).
%% 6 Basic operations
%% 6.1 bin:length
%% Summary
%% The bin:length function returns the size of binary data in octets.
%% Signature
%% bin:length($in as xs:base64Binary) as xs:integer
%% Rules
%% Returns the size of binary data in octets.
length(_, ?BIN(Str)) ->
erlang:byte_size(Str);
length(C, S) ->
length(C, xqerl_types:cast_as(S, 'xs:base64Binary')).
%% 6.2 bin:part
%% Summary
%% The bin:part function returns a specified part of binary data.
%% Signatures
%% bin:part($in as xs:base64Binary?,
%% $offset as xs:integer) as xs:base64Binary?
%% bin:part($in as xs:base64Binary?,
%% $offset as xs:integer,
%% $size as xs:integer) as xs:base64Binary?
%% Rules
%% Returns a section of binary data starting at the $offset octet. If $size
%% is defined, the size of the returned binary data is $size octets. If
%% $size is absent, all remaining data from $offset is returned.
%% The $offset is zero based.
%% The values of $offset and $size must be non-negative integers.
%% It is a dynamic error if $offset + $size is larger than the size of the
%% binary data in $in.
%% If the value of $in is the empty sequence, the function returns an empty
%% sequence.
%% Error Conditions
%% [bin:index-out-of-range] is raised if $offset is negative or $offset +
%% $size is larger than the size of the binary data of $in.
%% [bin:negative-size] is raised if $size is negative.
%% Notes
%% Note that fn:subsequence() and fn:substring() both use xs:double for
%% offset and size – this is a legacy from XPath 1.0.
%% Examples
%% Testing whether $data variable starts with binary content consistent
%% with a PDF file:
%% bin:part($data, 0, 4) eq bin:hex("25504446")
%% 25504446 is the magic number for PDF files: it is the US-ASCII encoded
%% hexadecimal value for %PDF. 7.2 bin:encode-string can be used to convert
%% a string to its binary representation.
part(_, [], _) ->
[];
part(_, ?BIN(Bin), Off) when is_integer(Off), Off >= 0 ->
case Bin of
<<_:Off/binary, Part/binary>> ->
?BIN(Part);
<<_:Off/binary>> ->
?BIN(<<>>);
_ ->
do_throw('index-out-of-range')
end;
part(_, ?BIN(_), Off) when is_integer(Off) ->
do_throw('index-out-of-range');
part(C, S, I) ->
part(
C,
xqerl_types:cast_as(S, 'xs:base64Binary'),
xqerl_types:cast_as(I, 'xs:integer')
).
part(_, ?BIN(_), _, Size) when is_integer(Size), Size < 0 ->
do_throw('negative-size');
part(_, ?BIN(_), Off, _) when is_integer(Off), Off < 0 ->
do_throw('index-out-of-range');
part(_, ?BIN(_), _, 0) ->
?BIN(<<>>);
part(_, [], _, _) ->
[];
part(_, ?BIN(Bin), Off, Size) when is_integer(Off), is_integer(Size) ->
case Bin of
<<_:Off/binary, Part:Size/binary, _/binary>> ->
?BIN(Part);
<<_:Off/binary, Part:Size/binary>> ->
?BIN(Part);
_ ->
do_throw('index-out-of-range')
end;
part(C, B, O, S) ->
part(
C,
xqerl_types:cast_as(B, 'xs:base64Binary'),
xqerl_types:cast_as(O, 'xs:integer'),
xqerl_types:cast_as(S, 'xs:integer')
).
%% 6.3 bin:join
%% Summary
%% Returns the binary data created by concatenating the binary data items in
%% a sequence.
%% Signature
%% bin:join($in as xs:base64Binary*) as xs:base64Binary
%% Rules
%% The function returns an xs:base64Binary created by concatenating
%% the items in the sequence $in, in order.
%% If the value of $in is the empty sequence, the function returns a binary
%% item containing no data bytes.
join(_, []) ->
?BIN(<<>>);
join(_, List) when is_list(List) ->
F = fun
(?BIN(I)) ->
I;
(O) ->
?BIN(I) = xqerl_types:cast_as(O, 'xs:base64Binary'),
I
end,
?BIN(iolist_to_binary(lists:map(F, List)));
join(C, L) ->
join(C, [L]).
%% 6.4 bin:insert-before
%% Summary
%% The bin:insert-before function inserts additional binary data at a given
%% point in other binary data.
%% Signature
%% bin:insert-before($in as xs:base64Binary?,
%% $offset as xs:integer,
%% $extra as xs:base64Binary?) as xs:base64Binary?
%% Rules
%% Returns binary data consisting sequentially of the data from $in upto
%% and including the $offset - 1 octet, followed by all the data from
%% $extra, and then the remaining data from $in.
%% The $offset is zero based.
%% The value of $offset must be a non-negative integer.
%% If the value of $in is the empty sequence, the function returns an empty
%% sequence.
%% If the value of $extra is the empty sequence, the function returns $in.
%% If $offset eq 0 the result is the binary concatenation of $extra and $in,
%% i.e. equivalent to bin:join(($extra,$in)).
%% Error Conditions
%% [bin:index-out-of-range] is raised if $offset is negative or $offset is
%% larger than the size of the binary data of $in.
%% Notes
%% Note that when $offset gt 0 and $offset lt bin:size($in) the function is
%% equivalent to:
%% bin:join((bin:part($in,0,$offset - 1),$extra,bin:part($in,$offset)))
insert_before(_, [], _, _) ->
[];
insert_before(_, _, Off, _) when is_integer(Off), Off < 0 ->
do_throw('index-out-of-range');
insert_before(_, ?BIN(I), Off, _) when is_integer(Off), Off > byte_size(I) ->
do_throw('index-out-of-range');
insert_before(_, ?BIN(_) = In, _, []) ->
In;
insert_before(_, ?BIN(I), Off, ?BIN(E)) when Off == byte_size(I) ->
?BIN(<<I/binary, E/binary>>);
insert_before(_, ?BIN(I), 0, ?BIN(E)) ->
?BIN(<<E/binary, I/binary>>);
insert_before(_, ?BIN(I), O, ?BIN(E)) when is_integer(O) ->
<<P:O/binary, Rest/binary>> = I,
?BIN(<<P/binary, E/binary, Rest/binary>>);
insert_before(C, I, O, E) ->
insert_before(
C,
xqerl_types:cast_as(I, 'xs:base64Binary'),
xqerl_types:cast_as(O, 'xs:integer'),
xqerl_types:cast_as(E, 'xs:base64Binary')
).
%% 6.5 bin:pad-left
%% Summary
%% Returns the binary data created by padding $in with $size octets from the
%% left. The padding octet values are $octet or zero if omitted.
%% Signatures
%% bin:pad-left($in as xs:base64Binary?,
%% $size as xs:integer) as xs:base64Binary?
%% bin:pad-left($in as xs:base64Binary?,
%% $size as xs:integer,
%% $octet as xs:integer) as xs:base64Binary?
%% Rules
%% The function returns an xs:base64Binary created by padding the input
%% with $size octets in front of the input. If $octet is specified, the
%% padding octets each have that value, otherwise they are initialized to 0.
%% $size must be a non-negative integer.
%% If the value of $in is the empty sequence, the function returns an empty
%% sequence.
%% Error Conditions
%% [bin:negative-size] is raised if $size is negative.
%% [bin:octet-out-of-range] is raised if $octet lies outside the range 0–255.
%% Notes
%% Padding with a non-zero octet value can also be accomplished by the
%% XPath expressions:
%% bin:join((bin:from-octets((1 to $pad-length) ! $pad-octet), $in)) [XPath 3.0]
%% bin:join((bin:from-octets(for $ i in (1 to $pad-length) return $pad-octet), $in)) [XPath 2.0]
pad_left(C, I, S) ->
pad_left(C, I, S, 0).
pad_left(_, [], _, _) ->
[];
pad_left(_, _, S, _) when is_integer(S), S < 0 ->
do_throw('negative-size');
pad_left(_, _, _, O) when is_integer(O) andalso O < 0; is_integer(O) andalso O > 255 ->
do_throw('octet-out-of-range');
pad_left(_, ?BIN(I), S, O) when is_integer(S), is_integer(O) ->
Pad = binary:copy(<<O>>, S),
?BIN(<<Pad/binary, I/binary>>);
pad_left(C, I, S, O) ->
pad_left(
C,
xqerl_types:cast_as(I, 'xs:base64Binary'),
xqerl_types:cast_as(S, 'xs:integer'),
xqerl_types:cast_as(O, 'xs:integer')
).
%% 6.6 bin:pad-right
%% Summary
%% Returns the binary data created by padding $in with $size blank octets
%% from the right. The padding octet values are $octet or zero if omitted.
%% Signatures
%% bin:pad-right($in as xs:base64Binary?,
%% $size as xs:integer) as xs:base64Binary?
%% bin:pad-right($in as xs:base64Binary?,
%% $size as xs:integer,
%% $octet as xs:integer) as xs:base64Binary?
%% Rules
%% The function returns an xs:base64Binary created by padding the input
%% with $size blank octets after the input. If $octet is specified, the
%% padding octets each have that value, otherwise they are initialized to 0.
%% $size must be a non-negative integer.
%% If the value of $in is the empty sequence, the function returns an empty
%% sequence.
%% Error Conditions
%% [bin:negative-size] is raised if $size is negative.
%% [bin:octet-out-of-range] is raised if $octet lies outside the range 0–255.
%% Notes
%% Padding with a non-zero octet value can also be accomplished by the
%% XPath expressions:
%% bin:join(($in,bin:from-octets((1 to $pad-length) ! $pad-octet))) [XPath 3.0]
%% bin:join(($in,bin:from-octets(for $ i in (1 to $pad-length) return $pad-octet))) [XPath 2.0]
pad_right(C, I, S) ->
pad_right(C, I, S, 0).
pad_right(_, [], _, _) ->
[];
pad_right(_, _, S, _) when is_integer(S), S < 0 ->
do_throw('negative-size');
pad_right(_, _, _, O) when is_integer(O) andalso O < 0; is_integer(O) andalso O > 255 ->
do_throw('octet-out-of-range');
pad_right(_, ?BIN(I), S, O) when is_integer(S), is_integer(O) ->
Pad = binary:copy(<<O>>, S),
?BIN(<<I/binary, Pad/binary>>);
pad_right(C, I, S, O) ->
pad_right(
C,
xqerl_types:cast_as(I, 'xs:base64Binary'),
xqerl_types:cast_as(S, 'xs:integer'),
xqerl_types:cast_as(O, 'xs:integer')
).
%% 6.7 bin:find
%% Summary
%% Returns the first location in $in of $search, starting at the $offset
%% octet.
%% Signature
%% bin:find($in as xs:base64Binary?,
%% $offset as xs:integer,
%% $search as xs:base64Binary) as xs:integer?
%% Rules
%% The function returns the first location of the binary search sequence in
%% the input, or if not found, the empty sequence.
%% If $search is empty $offset is returned.
%% The value of $offset must be a non-negative integer.
%% The $offset is zero based.
%% The returned location is zero based.
%% If the value of $in is the empty sequence, the function returns an empty
%% sequence.
%% Error Conditions
%% [bin:index-out-of-range] is raised if $offset is negative or $offset is
%% larger than the size of the binary data of $in.
find(_, [], _, _) ->
[];
find(_, ?BIN(I), O, ?BIN(<<>>)) when is_integer(O), O =< byte_size(I), O >= 0 ->
O;
find(_, ?BIN(I), O, ?BIN(S)) when is_integer(O), O =< byte_size(I), O >= 0 ->
Opts =
if
O == 0 -> [];
true -> [{scope, {O, byte_size(I) - O}}]
end,
case binary:match(I, S, Opts) of
nomatch ->
[];
{Pos, _} ->
Pos
end;
find(_, ?BIN(_), O, ?BIN(_)) when is_integer(O) ->
do_throw('index-out-of-range');
find(C, I, O, S) ->
find(
C,
xqerl_types:cast_as(I, 'xs:base64Binary'),
xqerl_types:cast_as(O, 'xs:integer'),
xqerl_types:cast_as(S, 'xs:base64Binary')
).
%% 7 Text decoding and encoding
%% 7.1 bin:decode-string
%% Summary
%% Decodes binary data as a string in a given encoding.
%% Signatures
%% bin:decode-string($in as xs:base64Binary?) as xs:string?
%% bin:decode-string($in as xs:base64Binary?,
%% $encoding as xs:string) as xs:string?
%% bin:decode-string($in as xs:base64Binary?,
%% $encoding as xs:string,
%% $offset as xs:integer) as xs:string?
%% bin:decode-string($in as xs:base64Binary?,
%% $encoding as xs:string,
%% $offset as xs:integer,
%% $size as xs:integer) as xs:string?
%% Rules
%% If $offset and $size are provided, the $size octets from $offset are
%% decoded. If $offset alone is provided, octets from $offset to the end
%% are decoded, otherwise the entire octet sequence is used.
%% The $encoding argument is the name of an encoding. The values for this
%% attribute follow the same rules as for the encoding attribute in an XML
%% declaration. The only values which every implementation is required to
%% recognize are utf-8 and utf-16.
%% If $encoding is ommitted, utf-8 encoding is assumed.
%% The values of $offset and $size must be non-negative integers.
%% If the value of $in is the empty sequence, the function returns an empty
%% sequence.
%% $offset is zero based.
%% Error Conditions
%% [bin:index-out-of-range] is raised if $offset is negative or $offset +
%% $size is larger than the size of the binary data of $in.
%% [bin:negative-size] is raised if $size is negative.
%% [bin:unknown-encoding] is raised if $encoding is invalid or not
%% supported by the implementation.
%% [bin:conversion-error] is raised if there is an error or malformed input
%% during decoding the string. Additional information about the error may
%% be passed through suitable error reporting mechanisms – this is
%% implementation-dependant.
%% Examples
%% Testing whether $data variable starts with binary content consistent with
%% a PDF file:
%% bin:decode-string($data, 'UTF-8', 0, 4) eq '%PDF'
%% The first four characters of a PDF file are '%PDF'.
decode_string(_, []) -> [];
decode_string(C, I) -> decode_string(C, I, <<"UTF-8">>).
decode_string(_, [], _) -> [];
decode_string(C, I, E) -> decode_string(C, I, E, 0).
decode_string(_, [], _, _) ->
[];
decode_string(C, ?BIN(B) = I, E, O) when is_integer(O) ->
decode_string(C, I, E, O, byte_size(B) - O);
decode_string(C, I, E, O) ->
decode_string(
C,
xqerl_types:cast_as(I, 'xs:base64Binary'),
E,
xqerl_types:cast_as(O, 'xs:integer')
).
decode_string(_, [], _, _, _) ->
[];
decode_string(_, _, _, O, _) when is_integer(O), O < 0 ->
do_throw('index-out-of-range');
decode_string(_, ?BIN(I), _, O, S) when is_integer(O), is_integer(S), (O + S) > byte_size(I) ->
do_throw('index-out-of-range');
decode_string(_, _, _, _, S) when is_integer(S), S < 0 ->
do_throw('negative-size');
decode_string(_, ?BIN(I), E, O, S) when is_binary(E), is_integer(O), is_integer(S) ->
Enc = check_encoding(E),
<<_:O/binary, Part:S/binary, _/binary>> = I,
% strip BOM that could be hiding in the binary
{Enc1, Part1} =
case unicode:bom_to_encoding(Part) of
{_, 0} ->
{Enc, Part};
{{utf16, _} = BomEnc, Len} when Enc == utf16 ->
<<_:Len/binary, Bin1/binary>> = Part,
{BomEnc, Bin1};
{_, Len} ->
<<_:Len/binary, Bin1/binary>> = Part,
{Enc, Bin1}
end,
case unicode:characters_to_binary(Part1, Enc1, utf8) of
{error, _, _} ->
do_throw('conversion-error');
{incomplete, _, _} ->
do_throw('conversion-error');
Bin ->
Bin
end;
decode_string(C, I, E, O, S) ->
decode_string(
C,
xqerl_types:cast_as(I, 'xs:base64Binary'),
xqerl_types:cast_as(E, 'xs:string'),
xqerl_types:cast_as(O, 'xs:integer'),
xqerl_types:cast_as(S, 'xs:integer')
).
%% 7.2 bin:encode-string
%% Summary
%% Encodes a string into binary data using a given encoding.
%% Signatures
%% bin:encode-string($in as xs:string?) as xs:base64Binary?
%% bin:encode-string($in as xs:string?,
%% $encoding as xs:string) as xs:base64Binary?
%% Rules
%% The $encoding argument is the name of an encoding. The values for this
%% attribute follow the same rules as for the encoding attribute in an XML
%% declaration. The only values which every implementation is required to
%% recognize are utf-8 and utf-16.
%% If $encoding is ommitted, utf-8 encoding is assumed.
%% If the value of $in is the empty sequence, the function returns an empty
%% sequence.
%% Error Conditions
%% [bin:unknown-encoding] is raised if $encoding is invalid or not
%% supported by the implementation.
%% [bin:conversion-error] is raised if there is an error or malformed input
%% during encoding the string. Additional information about the error may
%% be passed through suitable error reporting mechanisms – this is
%% implementation-dependant.
encode_string(C, I) ->
encode_string(C, I, <<"UTF-8">>).
encode_string(_, [], _) ->
[];
encode_string(_, I, E) when is_binary(I), is_binary(E) ->
Enc = check_encoding(E),
case unicode:characters_to_binary(I, utf8, Enc) of
{error, _, _} ->
do_throw('conversion-error');
{incomplete, _, _} ->
do_throw('conversion-error');
Bin when Enc == utf16 ->
BOM = unicode:encoding_to_bom(utf16),
?BIN(<<BOM/binary, Bin/binary>>);
% here it is ascii so just check
Bin when Enc == latin1 ->
_ = [do_throw('conversion-error') || <<C>> <= Bin, C > 127],
?BIN(Bin);
Bin ->
?BIN(Bin)
end;
encode_string(C, I, E) ->
encode_string(
C,
xqerl_types:cast_as(I, 'xs:string'),
xqerl_types:cast_as(E, 'xs:string')
).
%% 8 Packing and unpacking of encoded numeric values
%% 8.1 Number 'endianness'
%% Packing and unpacking numeric values can be performed in
%% 'most-significant-first' ('big-endian') or 'least-significant-first'
%% ('little-endian') octet order. The default is 'most-significant-first'. The
%% functions have an optional parameter $octet-order whose string value
%% controls the order. Least-significant-first order is indicated by any of
%% the values least-significant-first, little-endian or LE.
%% Most-significant-first order is indicated by any of the values
%% most-significant-first, big-endian or BE.
%%
%% 8.2 Integer representation
%% Integers within binary data are represented, or assumed to be represented,
%% as an integral number of octets. Integers where $length is greater than 8
%% octets (and thus not representable as a long) might be expected in some
%% situations, e.g. encryption. Whether the range of integers is limited to
%% ±2^63 may be implementation-dependant.
%%
%% 8.3 Representation of floating point numbers
%% Care should be taken with the packing and unpacking of floating point
%% numbers (xs:float and xs:double). The binary representations are expected to
%% correspond with those of the IEEE single/double-precision 32/64-bit
%% floating point types [IEEE 754-1985]. Consequently they will occupy 4 or 8
%% octets when packed.
%%
%% Positive and negative infinities are supported. INF maps to 0x7f80 0000
%% (float), 0x7ff0 0000 0000 0000 (double). -INF maps to 0xff80 0000 (float),
%% 0xfff0 0000 0000 0000 (double).
%%
%% Negative zero (0x8000 0000 0000 0000 double, 0x8000 0000 float) encountered
%% during unpacking will yield negative zero forms (e.g. -xs:double(0.0)) and
%% negative zeros will be written as a result of packing.
%%
%% [XML Schema 1.1 Part 2] provides only one form of NaN which corresponds to a
%% 'quiet' NaN with zero payload of [IEEE 754-1985] with forms 0x7fc0 0000
%% (float), 0x7ff8 0000 0000 0000 (double). These are the bit forms that will
%% be packed. 'Signalling' NaN values (0x7f80 0001 -> 0x7fbf ffff or
%% 0xff80 0001 -> 0xffbf ffff,
%% 0x7ff0 0000 0000 0001 -> 0x7ff7 ffff ffff ffff or
%% 0xfff0 0000 0000 0001 -> 0xfff7 ffff ffff ffff) encountered during unpacking
%% will be replaced by 'quiet' NaN. Any low-order payload in an unpacked quiet
%% NaN is also zeroed.
%% 8.4 bin:pack-double
%% Summary
%% Returns the 8-octet binary representation of a double value.
%% Signatures
%% bin:pack-double($in as xs:double) as xs:base64Binary
%% bin:pack-double($in as xs:double,
%% $octet-order as xs:string) as xs:base64Binary
%% Rules
%% Most-significant-octet-first number representation is assumed unless the
%% $octet-order parameter is specified. Acceptable values for $octet-order
%% are described in 8.1 Number 'endianness'.
%% The binary representation will correspond with that of the IEEE
%% double-precision 64-bit floating point type [IEEE 754-1985]. For more
%% details see 8.3 Representation of floating point numbers.
%% Error Conditions
%% [bin:unknown-significance-order] is raised if the value $octet-order is
%% unrecognized.
pack_double(C, I) ->
pack_double(C, I, <<"BE">>).
pack_double(_, nan, O) when is_binary(O) ->
case check_endianness(O) of
big -> ?BIN(<<127, 248, 0, 0, 0, 0, 0, 0>>);
little -> ?BIN(<<0, 0, 0, 0, 0, 0, 248, 127>>)
end;
pack_double(_, neg_zero, O) when is_binary(O) ->
case check_endianness(O) of
big -> ?BIN(<<128, 0, 0, 0, 0, 0, 0, 0>>);
little -> ?BIN(<<0, 0, 0, 0, 0, 0, 0, 128>>)
end;
pack_double(_, neg_infinity, O) when is_binary(O) ->
case check_endianness(O) of
big -> ?BIN(<<255, 240, 0, 0, 0, 0, 0, 0>>);
little -> ?BIN(<<0, 0, 0, 0, 0, 0, 240, 255>>)
end;
pack_double(_, infinity, O) when is_binary(O) ->
case check_endianness(O) of
big -> ?BIN(<<127, 240, 0, 0, 0, 0, 0, 0>>);
little -> ?BIN(<<0, 0, 0, 0, 0, 0, 240, 127>>)
end;
pack_double(_, I, O) when is_float(I), is_binary(O) ->
case check_endianness(O) of
big -> ?BIN(<<I:64/big-float>>);
little -> ?BIN(<<I:64/little-float>>)
end;
pack_double(C, I, O) ->
pack_double(
C,
xqerl_types:cast_as(I, 'xs:double'),
xqerl_types:cast_as(O, 'xs:string')
).
%% 8.5 bin:pack-float
%% Summary
%% Returns the 4-octet binary representation of a float value.
%% Signatures
%% bin:pack-float($in as xs:float) as xs:base64Binary
%% bin:pack-float($in as xs:float,
%% $octet-order as xs:string) as xs:base64Binary
%% Rules
%% Most-significant-octet-first number representation is assumed unless the
%% $octet-order parameter is specified. Acceptable values for $octet-order
%% are described in 8.1 Number 'endianness'.
%% The binary representation will correspond with that of the IEEE
%% single-precision 32-bit floating point type [IEEE 754-1985]. For more
%% details see 8.3 Representation of floating point numbers.
%% Error Conditions
%% [bin:unknown-significance-order] is raised if the value $octet-order is
%% unrecognized.
pack_float(C, I) ->
pack_float(C, I, <<"BE">>).
pack_float(_, ?FLT(nan), O) when is_binary(O) ->
case check_endianness(O) of
big -> ?BIN(<<127, 192, 0, 0>>);
little -> ?BIN(<<0, 0, 192, 127>>)
end;
pack_float(_, ?FLT(neg_zero), O) when is_binary(O) ->
case check_endianness(O) of
big -> ?BIN(<<128, 0, 0, 0>>);
little -> ?BIN(<<0, 0, 0, 128>>)
end;
pack_float(_, ?FLT(neg_infinity), O) when is_binary(O) ->
case check_endianness(O) of
big -> ?BIN(<<255, 128, 0, 0>>);
little -> ?BIN(<<0, 0, 128, 255>>)
end;
pack_float(_, ?FLT(infinity), O) when is_binary(O) ->
case check_endianness(O) of
big -> ?BIN(<<127, 128, 0, 0>>);
little -> ?BIN(<<0, 0, 128, 127>>)
end;
pack_float(_, ?FLT(I), O) when is_float(I), is_binary(O) ->
case check_endianness(O) of
big -> ?BIN(<<I:32/big-float>>);
little -> ?BIN(<<I:32/little-float>>)
end;
pack_float(C, I, O) ->
pack_float(
C,
xqerl_types:cast_as(I, 'xs:float'),
xqerl_types:cast_as(O, 'xs:string')
).
%% 8.6 bin:pack-integer
%% Summary
%% Returns the twos-complement binary representation of an integer value
%% treated as $size octets long. Any 'excess' high-order bits are discarded.
%% Signatures
%% bin:pack-integer($in as xs:integer,
%% $size as xs:integer) as xs:base64Binary
%% bin:pack-integer($in as xs:integer,
%% $size as xs:integer,
%% $octet-order as xs:string) as xs:base64Binary
%% Rules
%% Most-significant-octet-first number representation is assumed unless the
%% $octet-order parameter is specified. Acceptable values for $octet-order
%% are described in 8.1 Number 'endianness'.
%% Specifying a $size of zero yields an empty binary data.
%% Error Conditions
%% [bin:unknown-significance-order] is raised if the value $octet-order is
%% unrecognized.
%% [bin:negative-size] is raised if $size is negative.
%% Notes
%% If the integer being packed has a maximum precision of $size octets, then
%% signed/unsigned versions are not necessary. If the data is considered
%% unsigned, then the most significant bit of the bottom $size octets has a
%% normal positive (2^(8 *$size - 1)) meaning. If it is considered to be a
%% signed value, then the MSB and all the higher order, discarded bits will
%% be '1' for a negative value and '0' for a positive or zero. If this
%% function were to check the 'sizing' of the supplied integer against the
%% packing size, then any values of MSB and the discarded higher order bits
%% other than 'all 1' or 'all 0' would constitute an error. This function
%% does not perfom such checking.
pack_integer(C, I, S) ->
pack_integer(C, I, S, <<"BE">>).
pack_integer(_, _, S, _) when is_integer(S), S < 0 ->
do_throw('negative-size');
pack_integer(_, _, 0, _) ->
?BIN(<<>>);
pack_integer(_, I, S, O) when is_integer(I), is_integer(S), is_binary(O) ->
Bits = S * 8,
case check_endianness(O) of
big -> ?BIN(<<I:Bits/big-integer>>);
little -> ?BIN(<<I:Bits/little-integer>>)
end;
pack_integer(C, I, S, O) ->
pack_integer(
C,
xqerl_types:cast_as(I, 'xs:integer'),
xqerl_types:cast_as(S, 'xs:integer'),
xqerl_types:cast_as(O, 'xs:string')
).
%% 8.7 bin:unpack-double
%% Summary
%% Extract double value stored at the particular offset in binary data.
%% Signatures
%% bin:unpack-double($in as xs:base64Binary,
%% $offset as xs:integer) as xs:double
%% bin:unpack-double($in as xs:base64Binary,
%% $offset as xs:integer,
%% $octet-order as xs:string) as xs:double
%% Rules
%% Extract the double value stored in the 8 successive octets from the
%% $offset octet of the binary data of $in.
%% Most-significant-octet-first number representation is assumed unless the
%% $octet-order parameter is specified. Acceptable values for $octet-order
%% are described in 8.1 Number 'endianness'.
%% The value of $offset must be a non-negative integer.
%% The $offset is zero based.
%% The binary representation is expected to correspond with that of the
%% IEEE double-precision 64-bit floating point type [IEEE 754-1985]. For
%% more details see 8.3 Representation of floating point numbers.
%% Error Conditions
%% [bin:index-out-of-range] is raised if $offset is negative or $offset + 8
%% (octet-length of xs:double) is larger than the size of the binary
%% data of $in.
%% [bin:unknown-significance-order] is raised if the value $octet-order is
%% unrecognized.
unpack_double(C, I, O) ->
unpack_double(C, I, O, <<"BE">>).
unpack_double(_, ?BIN(I), O, _) when
is_integer(O) andalso O < 0;
is_integer(O) andalso
(O + 8) > byte_size(I)
->
do_throw('index-out-of-range');
unpack_double(_, ?BIN(I), O, E) when is_integer(O), is_binary(E) ->
<<_:O/binary, Part:8/binary, _/binary>> = I,
case check_endianness(E) of
big -> unpack_double_big(Part);
little -> unpack_double_little(Part)
end;
unpack_double(C, I, O, E) ->
unpack_double(
C,
xqerl_types:cast_as(I, 'xs:base64Binary'),
xqerl_types:cast_as(O, 'xs:integer'),
xqerl_types:cast_as(E, 'xs:string')
).
unpack_double_big(<<128, 0, 0, 0, 0, 0, 0, 0>>) -> neg_zero;
unpack_double_big(<<255, 240, 0, 0, 0, 0, 0, 0>>) -> neg_infinity;
unpack_double_big(<<127, 240, 0, 0, 0, 0, 0, 0>>) -> infinity;
%% ["11111111 1111 0000 000000000000000000000000000000000000000000000001",
%% "11111111 1111 0111 111111111111111111111111111111111111111111111111",
%% "01111111 1111 1000 000000000000000000000000000000000000000000000001"]
unpack_double_big(<<127, 15:4, _:4, _, _, _, _, _, _>>) -> nan;
unpack_double_big(<<255, 15:4, _:4, _, _, _, _, _, _>>) -> nan;
unpack_double_big(<<F:64/big-float>>) -> F.
unpack_double_little(<<0, 0, 0, 0, 0, 0, 0, 128>>) -> neg_zero;
unpack_double_little(<<0, 0, 0, 0, 0, 0, 240, 255>>) -> neg_infinity;
unpack_double_little(<<0, 0, 0, 0, 0, 0, 240, 127>>) -> infinity;
unpack_double_little(<<_, _, _, _, _, _, 15:4, _:4, 127>>) -> nan;
unpack_double_little(<<_, _, _, _, _, _, 15:4, _:4, 255>>) -> nan;
unpack_double_little(<<F:64/little-float>>) -> F.
%% 8.8 bin:unpack-float
%% Summary
%% Extract float value stored at the particular offset in binary data.
%%
%% Signatures
%% bin:unpack-float($in as xs:base64Binary,
%% $offset as xs:integer) as xs:float
%% bin:unpack-float($in as xs:base64Binary,
%% $offset as xs:integer,
%% $octet-order as xs:string) as xs:float
%% Rules
%% Extract the float value stored in the 4 successive octets from the
%% $offset octet of the binary data of $in.
%% Most-significant-octet-first number representation is assumed unless the
%% $octet-order parameter is specified. Acceptable values for $octet-order
%% are described in 8.1 Number 'endianness'.
%% The value of $offset must be a non-negative integer.
%% The $offset is zero based.
%% The binary representation is expected to correspond with that of the
%% IEEE single-precision 32-bit floating point type [IEEE 754-1985]. For
%% more details see 8.3 Representation of floating point numbers.
%% Error Conditions
%% [bin:index-out-of-range] is raised if $offset is negative or $offset + 4
%% (octet-length of xs:float) is larger than the size of the binary data
%% of $in.
%% [bin:unknown-significance-order] is raised if the value $octet-order is
%% unrecognized.
unpack_float(C, I, O) ->
unpack_float(C, I, O, <<"BE">>).
unpack_float(_, ?BIN(I), O, _) when
is_integer(O) andalso O < 0;
is_integer(O) andalso
(O + 4) > byte_size(I)
->
do_throw('index-out-of-range');
unpack_float(_, ?BIN(I), O, E) when is_integer(O), is_binary(E) ->
<<_:O/binary, Part:4/binary, _/binary>> = I,
case check_endianness(E) of
big -> ?FLT(unpack_float_big(Part));
little -> ?FLT(unpack_float_little(Part))
end;
unpack_float(C, I, O, E) ->
unpack_float(
C,
xqerl_types:cast_as(I, 'xs:base64Binary'),
xqerl_types:cast_as(O, 'xs:integer'),
xqerl_types:cast_as(E, 'xs:string')
).
unpack_float_big(<<128, 0, 0, 0>>) -> neg_zero;
unpack_float_big(<<255, 128, 0, 0>>) -> neg_infinity;
unpack_float_big(<<127, 128, 0, 0>>) -> infinity;
unpack_float_big(<<127, 1:1, _/bitstring>>) -> nan;
unpack_float_big(<<255, 1:1, _/bitstring>>) -> nan;
unpack_float_big(<<F:32/big-float>>) -> F.
unpack_float_little(<<0, 0, 0, 128>>) -> neg_zero;
unpack_float_little(<<0, 0, 128, 255>>) -> neg_infinity;
unpack_float_little(<<0, 0, 128, 127>>) -> infinity;
unpack_float_little(<<_:23, 1:1, 127>>) -> nan;
unpack_float_little(<<_:23, 1:1, 255>>) -> nan;
unpack_float_little(<<F:32/little-float>>) -> F.
%% 8.9 bin:unpack-integer
%% Summary
%% Returns a signed integer value represented by the $size octets starting
%% from $offset in the input binary representation. Necessary sign extension
%% is performed (i.e. the result is negative if the high order bit is '1').
%% Signatures
%% bin:unpack-integer($in as xs:base64Binary,
%% $offset as xs:integer,
%% $size as xs:integer) as xs:integer
%% bin:unpack-integer($in as xs:base64Binary,
%% $offset as xs:integer,
%% $size as xs:integer,
%% $octet-order as xs:string) as xs:integer
%% Rules
%% Most-significant-octet-first number representation is assumed unless the
%% $octet-order parameter is specified. Acceptable values for $octet-order
%% are described in 8.1 Number 'endianness'.
%% The values of $offset and $size must be non-negative integers.
%% $offset is zero based.
%% Specifying a $size of zero yields the integer 0.
%% Error Conditions
%% [bin:index-out-of-range] is raised if $offset is negative or $offset +
%% $size is larger than the size of the binary data of $in.
%% [bin:negative-size] is raised if $size is negative.
%% [bin:unknown-significance-order] is raised if the value $octet-order is
%% unrecognized.
%% Notes
%% For discussion on integer range see 8.2 Integer representation.
unpack_integer(C, I, O, S) ->
unpack_integer(C, I, O, S, <<"BE">>).
unpack_integer(_, ?BIN(I), O, S, _) when
is_integer(O) andalso O < 0;
is_integer(O) andalso
is_integer(S) andalso
(O + S) > byte_size(I)
->
do_throw('index-out-of-range');
unpack_integer(_, _, _, S, _) when is_integer(S), S < 0 ->
do_throw('negative-size');
unpack_integer(_, ?BIN(I), O, S, E) when is_integer(O), is_integer(S), is_binary(E) ->
Bits = S * 8,
case check_endianness(E) of
big ->
<<_:O/binary, Int:Bits/big-signed-integer, _/binary>> = I,
Int;
little ->
<<_:O/binary, Int:Bits/little-signed-integer, _/binary>> = I,
Int
end;
unpack_integer(C, I, O, S, E) ->
unpack_integer(
C,
xqerl_types:cast_as(I, 'xs:base64Binary'),
xqerl_types:cast_as(O, 'xs:integer'),
xqerl_types:cast_as(S, 'xs:integer'),
xqerl_types:cast_as(E, 'xs:string')
).
%% 8.10 bin:unpack-unsigned-integer
%% Summary
%% Returns an unsigned integer value represented by the $size octets
%% starting from $offset in the input binary representation.
%% Signatures
%% bin:unpack-unsigned-integer($in as xs:base64Binary,
%% $offset as xs:integer,
%% $size as xs:integer) as xs:integer
%% bin:unpack-unsigned-integer($in as xs:base64Binary,
%% $offset as xs:integer,
%% $size as xs:integer,
%% $octet-order as xs:string) as xs:integer
%% Rules
%% Most-significant-octet-first number representation is assumed unless the
%% $octet-order parameter is specified. Acceptable values for $octet-order
%% are described in 8.1 Number 'endianness'.
%% The values of $offset and $size must be non-negative integers.
%% The $offset is zero based.
%% Specifying a $size of zero yields the integer 0.
%% Error Conditions
%% [bin:index-out-of-range] is raised if $offset is negative or $offset +
%% $size is larger than the size of the binary data of $in.
%% [bin:negative-size] is raised if $size is negative.
%% [bin:unknown-significance-order] is raised if the value $octet-order
%% is unrecognized.
%% Notes
%% For discussion on integer range see 8.2 Integer representation.
unpack_unsigned_integer(C, I, O, S) ->
unpack_unsigned_integer(C, I, O, S, <<"BE">>).
unpack_unsigned_integer(_, ?BIN(I), O, S, _) when
is_integer(O) andalso O < 0;
is_integer(O) andalso
is_integer(S) andalso
(O + S) > byte_size(I)
->
do_throw('index-out-of-range');
unpack_unsigned_integer(_, _, _, S, _) when is_integer(S), S < 0 ->
do_throw('negative-size');
unpack_unsigned_integer(_, ?BIN(I), O, S, E) when is_integer(O), is_integer(S), is_binary(E) ->
Bits = S * 8,
case check_endianness(E) of
big ->
<<_:O/binary, Int:Bits/big-unsigned-integer, _/binary>> = I,
Int;
little ->
<<_:O/binary, Int:Bits/little-unsigned-integer, _/binary>> = I,
Int
end;
unpack_unsigned_integer(C, I, O, S, E) ->
unpack_unsigned_integer(
C,
xqerl_types:cast_as(I, 'xs:base64Binary'),
xqerl_types:cast_as(O, 'xs:integer'),
xqerl_types:cast_as(S, 'xs:integer'),
xqerl_types:cast_as(E, 'xs:string')
).
%% 9 Bitwise operations
%% 9.1 bin:or
%% Summary
%% Returns the "bitwise or" of two binary arguments.
%% Signature
%% bin:or($a as xs:base64Binary?,
%% $b as xs:base64Binary?) as xs:base64Binary?
%% Rules
%% Returns "bitwise or" applied between $a and $b.
%% If either argument is the empty sequence, an empty sequence is returned.
%% Error Conditions
%% [bin:differing-length-arguments] is raised if the input arguments are of
%% differing length.
or_(_, [], _) ->
[];
or_(_, _, []) ->
[];
or_(_, ?BIN(A), ?BIN(B)) ->
?BIN(do_bytewise(fun erlang:'bor'/2, A, B));
or_(C, A, B) ->
or_(
C,
xqerl_types:cast_as(A, 'xs:base64Binary'),
xqerl_types:cast_as(B, 'xs:base64Binary')
).
%% 9.2 bin:xor
%% Summary
%% Returns the "bitwise xor" of two binary arguments.
%% Signature
%% bin:xor($a as xs:base64Binary?,
%% $b as xs:base64Binary?) as xs:base64Binary?
%% Rules
%% Returns "bitwise exclusive or" applied between $a and $b.
%% If either argument is the empty sequence, an empty sequence is returned.
%% Error Conditions
%% [bin:differing-length-arguments] is raised if the input arguments are of
%% differing length.
xor_(_, [], _) ->
[];
xor_(_, _, []) ->
[];
xor_(_, ?BIN(A), ?BIN(B)) ->
?BIN(do_bytewise(fun erlang:'bxor'/2, A, B));
xor_(C, A, B) ->
xor_(
C,
xqerl_types:cast_as(A, 'xs:base64Binary'),
xqerl_types:cast_as(B, 'xs:base64Binary')
).
%% 9.3 bin:and
%% Summary
%% Returns the "bitwise and" of two binary arguments.
%% Signature
%% bin:and($a as xs:base64Binary?,
%% $b as xs:base64Binary?) as xs:base64Binary?
%% Rules
%% Returns "bitwise and" applied between $a and $b.
%% If either argument is the empty sequence, an empty sequence is returned.
%% Error Conditions
%% [bin:differing-length-arguments] is raised if the input arguments are of
%% differing length.
and_(_, [], _) ->
[];
and_(_, _, []) ->
[];
and_(_, ?BIN(A), ?BIN(B)) ->
?BIN(do_bytewise(fun erlang:'band'/2, A, B));
and_(C, A, B) ->
and_(
C,
xqerl_types:cast_as(A, 'xs:base64Binary'),
xqerl_types:cast_as(B, 'xs:base64Binary')
).
%% 9.4 bin:not
%% Summary
%% Returns the "bitwise not" of a binary argument.
%% Signature
%% bin:not($in as xs:base64Binary?) as xs:base64Binary?
%% Rules
%% Returns "bitwise not" applied to $in.
%% If the argument is the empty sequence, an empty sequence is returned.
not_(_, []) ->
[];
not_(_, ?BIN(I)) ->
?BIN(<<<<(bnot C)>> || <<C>> <= I>>);
not_(C, I) ->
not_(
C,
xqerl_types:cast_as(I, 'xs:base64Binary')
).
%% 9.5 bin:shift
%% Summary
%% Shift bits in binary data.
%% Signature
%% bin:shift($in as xs:base64Binary?,
%% $by as xs:integer) as xs:base64Binary?
%% Rules
%% If $by is positive then bits are shifted $by times to the left.
%% If $by is negative then bits are shifted -$by times to the right.
%% If $by is zero, the result is identical to $in.
%% If |$by| is greater than the bit-length of $in then an all-zeros result,
%% of the same length as $in, is returned.
%% |$by| can be greater than 8, implying multi-byte shifts.
%% The result always has the same size as $in.
%% The shifting is logical: zeros are placed into discarded bits.
%% If the value of $in is the empty sequence, the function returns an empty
%% sequence.
%% Notes
%% Bit shifting across byte boundaries implies 'big-endian' treatment, i.e.
%% the leftmost (high-order) bit when shifted left becomes the low-order
%% bit of the preceding byte.
%% Examples
%% bin:shift(bin:hex("000001"), 17) -> bin:hex("020000")
shift(_, [], _) ->
[];
shift(_, ?BIN(_) = I, 0) ->
I;
shift(_, ?BIN(I), B) when is_integer(B) ->
L = byte_size(I),
P = abs(B),
Pad = <<0:P>>,
case B > 0 of
% shift left
true ->
<<_:P/bitstring, C:L/binary>> = <<I/binary, Pad/bitstring>>,
?BIN(C);
% shift right
false ->
<<C:L/binary, _/bitstring>> = <<Pad/bitstring, I/binary>>,
?BIN(C)
end;
shift(C, I, B) ->
shift(
C,
xqerl_types:cast_as(I, 'xs:base64Binary'),
xqerl_types:cast_as(B, 'xs:integer')
).
-define(Q(V), #xqAtomicValue{
type = 'xs:QName',
value = #qname{
namespace = ?NS,
prefix = ?PX,
local_name = V
}
}).
do_throw('differing-length-arguments') ->
E = #xqError{
description = <<"The arguments to a bitwise operation are of differing length.">>,
name = ?Q(<<"differing-length-arguments">>)
},
throw(E);
do_throw('index-out-of-range') ->
E = #xqError{
description =
<<"Attempting to retrieve data outside the meaningful range of a binary data type.">>,
name = ?Q(<<"index-out-of-range">>)
},
throw(E);
do_throw('negative-size') ->
E = #xqError{
description = <<"Size of binary portion, required numeric size or padding is negative.">>,
name = ?Q(<<"negative-size">>)
},
throw(E);
do_throw('octet-out-of-range') ->
E = #xqError{
description = <<"Attempting to pack binary value with octet outside range.">>,
name = ?Q(<<"octet-out-of-range">>)
},
throw(E);
do_throw('non-numeric-character') ->
E = #xqError{
description = <<"Wrong character in binary 'numeric constructor' string.">>,
name = ?Q(<<"non-numeric-character">>)
},
throw(E);
do_throw('unknown-encoding') ->
E = #xqError{
description = <<"The specified encoding is not supported.">>,
name = ?Q(<<"unknown-encoding">>)
},
throw(E);
do_throw('conversion-error') ->
E = #xqError{
description = <<"Error in converting to/from a string.">>,
name = ?Q(<<"conversion-error">>)
},
throw(E);
do_throw('unknown-significance-order') ->
E = #xqError{
description = <<"Unknown octet-order value.">>,
name = ?Q(<<"unknown-significance-order">>)
},
throw(E).
check_encoding(E) ->
case string:uppercase(E) of
<<"UTF-8">> ->
utf8;
<<"UTF-16">> ->
utf16;
<<"US-ASCII">> ->
latin1;
<<>> ->
utf8;
_ ->
do_throw('unknown-encoding')
end.
check_endianness(<<"least-significant-first">>) -> little;
check_endianness(<<"little-endian">>) -> little;
check_endianness(<<"LE">>) -> little;
check_endianness(<<"most-significant-first">>) -> big;
check_endianness(<<"big-endian">>) -> big;
check_endianness(<<"BE">>) -> big;
check_endianness(_) -> do_throw('unknown-significance-order').
do_bytewise(F, A, B) ->
case byte_size(A) == byte_size(B) of
true ->
do_bytewise(F, A, B, <<>>);
false ->
do_throw('differing-length-arguments')
end.
do_bytewise(F, <<A, RestA/binary>>, <<B, RestB/binary>>, Acc) ->
C = F(A, B),
do_bytewise(F, RestA, RestB, <<Acc/binary, C>>);
do_bytewise(_, <<>>, <<>>, Acc) ->
Acc. | src/xqerl_mod_expath_binary.erl | 0.576184 | 0.427277 | xqerl_mod_expath_binary.erl | starcoder |
% Licensed under the Apache License, Version 2.0 (the "License"); you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
-module(couch_key_tree).
-export([merge/2, find_missing/2, get_key_leafs/2, get_full_key_paths/2, get/2]).
-export([map/2, get_all_leafs/1, count_leafs/1, remove_leafs/2,
get_all_leafs_full/1,stem/2,map_leafs/2]).
% a key tree looks like this:
% Tree -> [] or [{Key, Value, ChildTree} | SiblingTree]
% ChildTree -> Tree
% SiblingTree -> [] or [{SiblingKey, Value, Tree} | Tree]
% And each Key < SiblingKey
% partial trees arranged by how much they are cut off.
merge(A, B) ->
{Merged, HasConflicts} =
lists:foldl(
fun(InsertTree, {AccTrees, AccConflicts}) ->
{ok, Merged, Conflicts} = merge_one(AccTrees, InsertTree, [], false),
{Merged, Conflicts or AccConflicts}
end,
{A, false}, B),
if HasConflicts or
((length(Merged) /= length(A)) and (length(Merged) /= length(B))) ->
Conflicts = conflicts;
true ->
Conflicts = no_conflicts
end,
{lists:sort(Merged), Conflicts}.
merge_one([], Insert, OutAcc, ConflictsAcc) ->
{ok, [Insert | OutAcc], ConflictsAcc};
merge_one([{Start, Tree}|Rest], {StartInsert, TreeInsert}, OutAcc, ConflictsAcc) ->
if Start =< StartInsert ->
StartA = Start,
StartB = StartInsert,
TreeA = Tree,
TreeB = TreeInsert;
true ->
StartB = Start,
StartA = StartInsert,
TreeB = Tree,
TreeA = TreeInsert
end,
case merge_at([TreeA], StartB - StartA, TreeB) of
{ok, [CombinedTrees], Conflicts} ->
merge_one(Rest, {StartA, CombinedTrees}, OutAcc, Conflicts or ConflictsAcc);
no ->
merge_one(Rest, {StartB, TreeB}, [{StartA, TreeA} | OutAcc], ConflictsAcc)
end.
merge_at([], _Place, _Insert) ->
no;
merge_at([{Key, Value, SubTree}|Sibs], 0, {InsertKey, InsertValue, InsertSubTree}) ->
if Key == InsertKey ->
{Merge, Conflicts} = merge_simple(SubTree, InsertSubTree),
{ok, [{Key, Value, Merge} | Sibs], Conflicts};
true ->
case merge_at(Sibs, 0, {InsertKey, InsertValue, InsertSubTree}) of
{ok, Merged, Conflicts} ->
{ok, [{Key, Value, SubTree} | Merged], Conflicts};
no ->
no
end
end;
merge_at([{Key, Value, SubTree}|Sibs], Place, Insert) ->
case merge_at(SubTree, Place - 1,Insert) of
{ok, Merged, Conflicts} ->
{ok, [{Key, Value, Merged} | Sibs], Conflicts};
no ->
case merge_at(Sibs, Place, Insert) of
{ok, Merged, Conflicts} ->
{ok, [{Key, Value, SubTree} | Merged], Conflicts};
no ->
no
end
end.
% key tree functions
merge_simple([], B) ->
{B, false};
merge_simple(A, []) ->
{A, false};
merge_simple([ATree | ANextTree], [BTree | BNextTree]) ->
{AKey, AValue, ASubTree} = ATree,
{BKey, _BValue, BSubTree} = BTree,
if
AKey == BKey ->
%same key
{MergedSubTree, Conflict1} = merge_simple(ASubTree, BSubTree),
{MergedNextTree, Conflict2} = merge_simple(ANextTree, BNextTree),
{[{AKey, AValue, MergedSubTree} | MergedNextTree], Conflict1 or Conflict2};
AKey < BKey ->
{MTree, _} = merge_simple(ANextTree, [BTree | BNextTree]),
{[ATree | MTree], true};
true ->
{MTree, _} = merge_simple([ATree | ANextTree], BNextTree),
{[BTree | MTree], true}
end.
find_missing(_Tree, []) ->
[];
find_missing([], SeachKeys) ->
SeachKeys;
find_missing([{Start, {Key, Value, SubTree}} | RestTree], SeachKeys) ->
PossibleKeys = [{KeyPos, KeyValue} || {KeyPos, KeyValue} <- SeachKeys, KeyPos >= Start],
ImpossibleKeys = [{KeyPos, KeyValue} || {KeyPos, KeyValue} <- SeachKeys, KeyPos < Start],
Missing = find_missing_simple(Start, [{Key, Value, SubTree}], PossibleKeys),
find_missing(RestTree, ImpossibleKeys ++ Missing).
find_missing_simple(_Pos, _Tree, []) ->
[];
find_missing_simple(_Pos, [], SeachKeys) ->
SeachKeys;
find_missing_simple(Pos, [{Key, _, SubTree} | RestTree], SeachKeys) ->
PossibleKeys = [{KeyPos, KeyValue} || {KeyPos, KeyValue} <- SeachKeys, KeyPos >= Pos],
ImpossibleKeys = [{KeyPos, KeyValue} || {KeyPos, KeyValue} <- SeachKeys, KeyPos < Pos],
SrcKeys2 = PossibleKeys -- [{Pos, Key}],
SrcKeys3 = find_missing_simple(Pos + 1, SubTree, SrcKeys2),
ImpossibleKeys ++ find_missing_simple(Pos, RestTree, SrcKeys3).
filter_leafs([], _Keys, FilteredAcc, RemovedKeysAcc) ->
{FilteredAcc, RemovedKeysAcc};
filter_leafs([{Pos, [{LeafKey, _}|_]} = Path |Rest], Keys, FilteredAcc, RemovedKeysAcc) ->
FilteredKeys = lists:delete({Pos, LeafKey}, Keys),
if FilteredKeys == Keys ->
% this leaf is not a key we are looking to remove
filter_leafs(Rest, Keys, [Path | FilteredAcc], RemovedKeysAcc);
true ->
% this did match a key, remove both the node and the input key
filter_leafs(Rest, FilteredKeys, FilteredAcc, [{Pos, LeafKey} | RemovedKeysAcc])
end.
% Removes any branches from the tree whose leaf node(s) are in the Keys
remove_leafs(Trees, Keys) ->
% flatten each branch in a tree into a tree path
Paths = get_all_leafs_full(Trees),
% filter out any that are in the keys list.
{FilteredPaths, RemovedKeys} = filter_leafs(Paths, Keys, [], []),
% convert paths back to trees
NewTree = lists:foldl(
fun({PathPos, Path},TreeAcc) ->
[SingleTree] = lists:foldl(
fun({K,V},NewTreeAcc) -> [{K,V,NewTreeAcc}] end, [], Path),
{NewTrees, _} = merge(TreeAcc, [{PathPos + 1 - length(Path), SingleTree}]),
NewTrees
end, [], FilteredPaths),
{NewTree, RemovedKeys}.
% get the leafs in the tree matching the keys. The matching key nodes can be
% leafs or an inner nodes. If an inner node, then the leafs for that node
% are returned.
get_key_leafs(Tree, Keys) ->
get_key_leafs(Tree, Keys, []).
get_key_leafs(_, [], Acc) ->
{Acc, []};
get_key_leafs([], Keys, Acc) ->
{Acc, Keys};
get_key_leafs([{Pos, Tree}|Rest], Keys, Acc) ->
{Gotten, RemainingKeys} = get_key_leafs_simple(Pos, [Tree], Keys, []),
get_key_leafs(Rest, RemainingKeys, Gotten ++ Acc).
get_key_leafs_simple(_Pos, _Tree, [], _KeyPathAcc) ->
{[], []};
get_key_leafs_simple(_Pos, [], KeysToGet, _KeyPathAcc) ->
{[], KeysToGet};
get_key_leafs_simple(Pos, [{Key, _Value, SubTree}=Tree | RestTree], KeysToGet, KeyPathAcc) ->
case lists:delete({Pos, Key}, KeysToGet) of
KeysToGet -> % same list, key not found
{LeafsFound, KeysToGet2} = get_key_leafs_simple(Pos + 1, SubTree, KeysToGet, [Key | KeyPathAcc]),
{RestLeafsFound, KeysRemaining} = get_key_leafs_simple(Pos, RestTree, KeysToGet2, KeyPathAcc),
{LeafsFound ++ RestLeafsFound, KeysRemaining};
KeysToGet2 ->
LeafsFound = get_all_leafs_simple(Pos, [Tree], KeyPathAcc),
LeafKeysFound = [LeafKeyFound || {LeafKeyFound, _} <- LeafsFound],
KeysToGet2 = KeysToGet2 -- LeafKeysFound,
{RestLeafsFound, KeysRemaining} = get_key_leafs_simple(Pos, RestTree, KeysToGet2, KeyPathAcc),
{LeafsFound ++ RestLeafsFound, KeysRemaining}
end.
get(Tree, KeysToGet) ->
{KeyPaths, KeysNotFound} = get_full_key_paths(Tree, KeysToGet),
FixedResults = [ {Value, {Pos, [Key0 || {Key0, _} <- Path]}} || {Pos, [{_Key, Value}|_]=Path} <- KeyPaths],
{FixedResults, KeysNotFound}.
get_full_key_paths(Tree, Keys) ->
get_full_key_paths(Tree, Keys, []).
get_full_key_paths(_, [], Acc) ->
{Acc, []};
get_full_key_paths([], Keys, Acc) ->
{Acc, Keys};
get_full_key_paths([{Pos, Tree}|Rest], Keys, Acc) ->
{Gotten, RemainingKeys} = get_full_key_paths(Pos, [Tree], Keys, []),
get_full_key_paths(Rest, RemainingKeys, Gotten ++ Acc).
get_full_key_paths(_Pos, _Tree, [], _KeyPathAcc) ->
{[], []};
get_full_key_paths(_Pos, [], KeysToGet, _KeyPathAcc) ->
{[], KeysToGet};
get_full_key_paths(Pos, [{KeyId, Value, SubTree} | RestTree], KeysToGet, KeyPathAcc) ->
KeysToGet2 = KeysToGet -- [{Pos, KeyId}],
CurrentNodeResult =
case length(KeysToGet2) == length(KeysToGet) of
true -> % not in the key list.
[];
false -> % this node is the key list. return it
[{Pos, [{KeyId, Value} | KeyPathAcc]}]
end,
{KeysGotten, KeysRemaining} = get_full_key_paths(Pos + 1, SubTree, KeysToGet2, [{KeyId, Value} | KeyPathAcc]),
{KeysGotten2, KeysRemaining2} = get_full_key_paths(Pos, RestTree, KeysRemaining, KeyPathAcc),
{CurrentNodeResult ++ KeysGotten ++ KeysGotten2, KeysRemaining2}.
get_all_leafs_full(Tree) ->
get_all_leafs_full(Tree, []).
get_all_leafs_full([], Acc) ->
Acc;
get_all_leafs_full([{Pos, Tree} | Rest], Acc) ->
get_all_leafs_full(Rest, get_all_leafs_full_simple(Pos, [Tree], []) ++ Acc).
get_all_leafs_full_simple(_Pos, [], _KeyPathAcc) ->
[];
get_all_leafs_full_simple(Pos, [{KeyId, Value, []} | RestTree], KeyPathAcc) ->
[{Pos, [{KeyId, Value} | KeyPathAcc]} | get_all_leafs_full_simple(Pos, RestTree, KeyPathAcc)];
get_all_leafs_full_simple(Pos, [{KeyId, Value, SubTree} | RestTree], KeyPathAcc) ->
get_all_leafs_full_simple(Pos + 1, SubTree, [{KeyId, Value} | KeyPathAcc]) ++ get_all_leafs_full_simple(Pos, RestTree, KeyPathAcc).
get_all_leafs(Trees) ->
get_all_leafs(Trees, []).
get_all_leafs([], Acc) ->
Acc;
get_all_leafs([{Pos, Tree}|Rest], Acc) ->
get_all_leafs(Rest, get_all_leafs_simple(Pos, [Tree], []) ++ Acc).
get_all_leafs_simple(_Pos, [], _KeyPathAcc) ->
[];
get_all_leafs_simple(Pos, [{KeyId, Value, []} | RestTree], KeyPathAcc) ->
[{Value, {Pos, [KeyId | KeyPathAcc]}} | get_all_leafs_simple(Pos, RestTree, KeyPathAcc)];
get_all_leafs_simple(Pos, [{KeyId, _Value, SubTree} | RestTree], KeyPathAcc) ->
get_all_leafs_simple(Pos + 1, SubTree, [KeyId | KeyPathAcc]) ++ get_all_leafs_simple(Pos, RestTree, KeyPathAcc).
count_leafs([]) ->
0;
count_leafs([{_Pos,Tree}|Rest]) ->
count_leafs_simple([Tree]) + count_leafs(Rest).
count_leafs_simple([]) ->
0;
count_leafs_simple([{_Key, _Value, []} | RestTree]) ->
1 + count_leafs_simple(RestTree);
count_leafs_simple([{_Key, _Value, SubTree} | RestTree]) ->
count_leafs_simple(SubTree) + count_leafs_simple(RestTree).
map(_Fun, []) ->
[];
map(Fun, [{Pos, Tree}|Rest]) ->
[NewTree] = map_simple(Fun, Pos, [Tree]),
[{Pos, NewTree} | map(Fun, Rest)].
map_simple(_Fun, _Pos, []) ->
[];
map_simple(Fun, Pos, [{Key, Value, SubTree} | RestTree]) ->
Value2 = Fun({Pos, Key}, Value),
[{Key, Value2, map_simple(Fun, Pos + 1, SubTree)} | map_simple(Fun, Pos, RestTree)].
map_leafs(_Fun, []) ->
[];
map_leafs(Fun, [{Pos, Tree}|Rest]) ->
[NewTree] = map_leafs_simple(Fun, Pos, [Tree]),
[{Pos, NewTree} | map_leafs(Fun, Rest)].
map_leafs_simple(_Fun, _Pos, []) ->
[];
map_leafs_simple(Fun, Pos, [{Key, Value, []} | RestTree]) ->
Value2 = Fun({Pos, Key}, Value),
[{Key, Value2, []} | map_leafs_simple(Fun, Pos, RestTree)];
map_leafs_simple(Fun, Pos, [{Key, Value, SubTree} | RestTree]) ->
[{Key, Value, map_leafs_simple(Fun, Pos + 1, SubTree)} | map_leafs_simple(Fun, Pos, RestTree)].
stem(Trees, Limit) ->
% flatten each branch in a tree into a tree path
Paths = get_all_leafs_full(Trees),
Paths2 = [{Pos, lists:sublist(Path, Limit)} || {Pos, Path} <- Paths],
% convert paths back to trees
lists:foldl(
fun({PathPos, Path},TreeAcc) ->
[SingleTree] = lists:foldl(
fun({K,V},NewTreeAcc) -> [{K,V,NewTreeAcc}] end, [], Path),
{NewTrees, _} = merge(TreeAcc, [{PathPos + 1 - length(Path), SingleTree}]),
NewTrees
end, [], Paths2).
% Tests moved to test/etap/06?-*.t | src/couchdb/couch_key_tree.erl | 0.670932 | 0.500183 | couch_key_tree.erl | starcoder |
% Licensed under the Apache License, Version 2.0 (the "License"); you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
-module(bigcouch_spatial_chttpd).
-export([handle_spatial_req/3, handle_spatial_list_req/3]).
-include("couch_spatial.hrl").
-include_lib("couch/include/couch_db.hrl").
-import(chttpd,
[send_json/2,send_json/3,send_json/4,send_method_not_allowed/2,send_chunk/2,
start_json_response/2, start_json_response/3, end_json_response/1,
send_chunked_error/2]).
-record(lacc, {
req,
resp = nil,
qserver,
lname,
db,
etag
}).
%% _spatial/_list handler (for compatibility with geocouch)
handle_spatial_req(#httpd{method='GET', path_parts=[_, _,
_, _, <<"_list">>, ListName, SpatialName]}=Req,
Db, DDoc) ->
handle_spatial_list(Req, ListName, SpatialName, Db, DDoc);
%% _spatial handler
handle_spatial_req(#httpd{method='GET',
path_parts=[_, _, _, _, SpatialName]}=Req, Db, DDoc) ->
QueryArgs = parse_spatial_params(Req),
Etag = couch_uuids:new(),
chttpd:etag_respond(Req, Etag, fun() ->
{ok, Resp} = start_json_response(Req, 200, [{"Etag",Etag}]),
{Acc0, CB} = case QueryArgs#spatial_query_args.count of
true -> {0, fun spatial_count_cb/2};
_ -> {nil, fun spatial_cb/2}
end,
bigcouch_spatial:spatial(Db, DDoc, SpatialName, CB, {Acc0, Resp}, QueryArgs),
chttpd:end_json_response(Resp)
end);
handle_spatial_req(Req, _Db, _DDoc) ->
send_method_not_allowed(Req, "GET,HEAD").
%% _spatial_list handler
handle_spatial_list_req(#httpd{method='GET', path_parts=[_, _,
_, _, ListName, SpatialName]}=Req, Db, DDoc) ->
handle_spatial_list(Req, ListName, SpatialName, Db, DDoc);
handle_spatial_list_req(Req, _Db, _DDoc) ->
send_method_not_allowed(Req, "GET,HEAD").
handle_spatial_list(Req, ListName, SpatialName, Db, DDoc) ->
QueryArgs = parse_spatial_params(Req),
Etag = couch_uuids:new(),
CB = fun chttpd_show:list_callback/2,
chttpd:etag_respond(Req, Etag, fun() ->
couch_query_servers:with_ddoc_proc(DDoc, fun(QServer) ->
Acc0 = #lacc{
lname = ListName,
req = Req,
qserver = QServer,
db = Db,
etag = Etag
},
bigcouch_spatial:spatial(Db, DDoc, SpatialName, CB, Acc0, QueryArgs)
end)
end).
spatial_count_cb({row, _Row}, {Count, Resp}) ->
{ok, {Count + 1, Resp}};
spatial_count_cb(complete, {Count, Resp}) ->
send_chunk(Resp, ?JSON_ENCODE({[{count, Count}]}));
spatial_count_cb({error, Reason}, {_, Resp}) ->
{Code, ErrorStr, ReasonStr} = chttpd:error_info(Reason),
Json = {[{code,Code}, {error,ErrorStr}, {reason,ReasonStr}]},
send_chunk(Resp, [$\n, ?JSON_ENCODE(Json), $\n]).
spatial_cb({total, Total}, {nil, Resp}) ->
Chunk = "{\"total_rows\":~p,\"rows\":[\r\n",
send_chunk(Resp, io_lib:format(Chunk, [Total])),
{ok, {"", Resp}};
spatial_cb({total, _}, Acc) ->
% a sorted=false view where the message came in late. Ignore.
{ok, Acc};
spatial_cb({row, Row}, {nil, Resp}) ->
% first row
send_chunk(Resp, ["{\"rows\":[\r\n", ?JSON_ENCODE(Row)]),
{ok, {",\r\n", Resp}};
spatial_cb({row, Row}, {Prepend, Resp}) ->
send_chunk(Resp, [Prepend, ?JSON_ENCODE(Row)]),
{ok, {",\r\n", Resp}};
spatial_cb(complete, {nil, Resp}) ->
send_chunk(Resp, "{\"rows\":[]}");
spatial_cb(complete, {_, Resp}) ->
send_chunk(Resp, "\r\n]}");
spatial_cb({error, Reason}, {_, Resp}) ->
{Code, ErrorStr, ReasonStr} = chttpd:error_info(Reason),
Json = {[{code,Code}, {error,ErrorStr}, {reason,ReasonStr}]},
send_chunk(Resp, [$\n, ?JSON_ENCODE(Json), $\n]).
parse_spatial_params(Req) ->
QueryList = couch_httpd:qs(Req),
QueryParams = lists:foldl(fun({K, V}, Acc) ->
parse_spatial_param(K, V) ++ Acc
end, [], QueryList),
QueryArgs = lists:foldl(fun({K, V}, Args2) ->
validate_spatial_query(K, V, Args2)
end, #spatial_query_args{}, lists:reverse(QueryParams)),
#spatial_query_args{
bbox = Bbox,
bounds = Bounds
} = QueryArgs,
case {Bbox, Bounds} of
% Coordinates of the bounding box are flipped and no bounds for the
% cartesian plane were set
{{W, S, E, N}, nil} when E < W; N < S ->
Msg = <<"Coordinates of the bounding box are flipped, but no bounds "
"for the cartesian plane were specified "
"(use the `plane_bounds` parameter)">>,
throw({query_parse_error, Msg});
_ ->
QueryArgs
end.
parse_spatial_param("bbox", Bbox) ->
[{bbox, list_to_tuple(?JSON_DECODE("[" ++ Bbox ++ "]"))}];
parse_spatial_param("stale", "ok") ->
[{stale, ok}];
parse_spatial_param("stale", "update_after") ->
[{stale, update_after}];
parse_spatial_param("stale", _Value) ->
throw({query_parse_error,
<<"stale only available as stale=ok or as stale=update_after">>});
parse_spatial_param("count", "true") ->
[{count, true}];
parse_spatial_param("count", _Value) ->
throw({query_parse_error, <<"count only available as count=true">>});
parse_spatial_param("plane_bounds", Bounds) ->
[{bounds, list_to_tuple(?JSON_DECODE("[" ++ Bounds ++ "]"))}];
parse_spatial_param("limit", Limit) ->
[{limit, parse_positive_int_param(Limit)}];
parse_spatial_param("include_docs", Value) ->
[{include_docs, parse_bool_param(Value)}];
parse_spatial_param(Key, Value) ->
[{extra, {Key, Value}}].
validate_spatial_query(bbox, Value, Args) ->
Args#spatial_query_args{bbox=Value};
validate_spatial_query(stale, ok, Args) ->
Args#spatial_query_args{stale=ok};
validate_spatial_query(stale, update_after, Args) ->
Args#spatial_query_args{stale=update_after};
validate_spatial_query(stale, _, Args) ->
Args;
validate_spatial_query(count, true, Args) ->
Args#spatial_query_args{count=true};
validate_spatial_query(bounds, Value, Args) ->
Args#spatial_query_args{bounds=Value};
validate_spatial_query(limit, Value, Args) ->
Args#spatial_query_args{limit=Value};
validate_spatial_query(include_docs, true, Args) ->
Args#spatial_query_args{include_docs=true};
validate_spatial_query(extra, _Value, Args) ->
Args.
parse_bool_param(Val) ->
case string:to_lower(Val) of
"true" -> true;
"false" -> false;
_ ->
Msg = io_lib:format("Invalid boolean parameter: ~p", [Val]),
throw({query_parse_error, ?l2b(Msg)})
end.
parse_int_param(Val) ->
case (catch list_to_integer(Val)) of
IntVal when is_integer(IntVal) ->
IntVal;
_ ->
Msg = io_lib:format("Invalid value for integer parameter: ~p", [Val]),
throw({query_parse_error, ?l2b(Msg)})
end.
parse_positive_int_param(Val) ->
case parse_int_param(Val) of
IntVal when IntVal >= 0 ->
IntVal;
_ ->
Fmt = "Invalid value for positive integer parameter: ~p",
Msg = io_lib:format(Fmt, [Val]),
throw({query_parse_error, ?l2b(Msg)})
end. | src/bigcouch_spatial_chttpd.erl | 0.647464 | 0.40928 | bigcouch_spatial_chttpd.erl | starcoder |
%%%-------------------------------------------------------------------
%%% @doc
%%% A set of optics specific to dicts.
%%% @end
%%%-------------------------------------------------------------------
-module(optic_dict).
%% API
-export([all/0,
all/1,
keys/0,
keys/1,
values/0,
values/1,
associations/0,
associations/1,
key/1,
key/2,
association/1,
association/2]).
%%%===================================================================
%%% API
%%%===================================================================
%% @see values/1
-spec all() -> optic:optic().
all() ->
values().
%% @see values/1
-spec all(Options) -> optic:optic() when
Options :: optic:variations().
all(Options) ->
values(Options).
%% @see keys/1
-spec keys() -> optic:optic().
keys() ->
keys(#{}).
%% @doc
%% Focus on all keys of a dict.
%%
%% Example:
%%
%% ```
%% > optic:get([optic_dict:keys()],
%% dict:from_list([{first, 1}, {second, 2}])).
%% {ok,[first,second]}
%% '''
%% @end
%% @param Options Common optic options.
%% @returns An opaque optic record.
-spec keys(Options) -> optic:optic() when
Options :: optic:variations().
keys(Options) ->
Fold =
fun (Fun, Acc, Dict) ->
case is_dict(Dict) of
true ->
{ok, dict:fold(fun (Key, _Value, InnerAcc) ->
Fun(Key, InnerAcc)
end,
Acc,
Dict)};
false ->
{error, undefined}
end
end,
MapFold =
fun (Fun, Acc, Dict) ->
case is_dict(Dict) of
true ->
{ok, dict:fold(fun (Key, Value, {InnerDict, InnerAcc}) ->
{NewKey, NewAcc} = Fun(Key, InnerAcc),
{dict:store(NewKey, Value, InnerDict), NewAcc}
end,
{dict:new(), Acc},
Dict)};
false ->
{error, undefined}
end
end,
New =
fun (_Data, _Template) ->
dict:new()
end,
Optic = optic:new(MapFold, Fold),
optic:variations(Optic, Options, New).
%% @see values/1
-spec values() -> optic:optic().
values() ->
values(#{}).
%% @doc
%% Focus on all values of a dict.
%%
%% Example:
%%
%% ```
%% > optic:get([optic_dict:values()],
%% dict:from_list([{first, 1}, {second, 2}])).
%% {ok,[1,2]}
%% '''
%% @end
%% @param Options Common optic options.
%% @returns An opaque optic record.
-spec values(Options) -> optic:optic() when
Options :: optic:variations().
values(Options) ->
Fold =
fun (Fun, Acc, Dict) ->
case is_dict(Dict) of
true ->
{ok, dict:fold(fun (_Key, Value, InnerAcc) ->
Fun(Value, InnerAcc)
end,
Acc,
Dict)};
false ->
{error, undefined}
end
end,
MapFold =
fun (Fun, Acc, Dict) ->
case is_dict(Dict) of
true ->
{ok, dict:fold(fun (Key, Value, {InnerDict, InnerAcc}) ->
{NewValue, NewAcc} = Fun(Value, InnerAcc),
{dict:store(Key, NewValue, InnerDict), NewAcc}
end,
{dict:new(), Acc},
Dict)};
false ->
{error, undefined}
end
end,
New =
fun (_Data, _Template) ->
dict:new()
end,
Optic = optic:new(MapFold, Fold),
optic:variations(Optic, Options, New).
%% @see associations/1
-spec associations() -> optic:optic().
associations() ->
associations(#{}).
%% @doc
%% Focus on all associations of a dict. An association is a tuple of
%% the key and value for each entry.
%%
%% Example:
%%
%% ```
%% > optic:get([optic_dict:associations()],
%% dict:from_list([{first, 1}, {second, 2}])).
%% {ok,[{first,1},{second,2}]}
%% '''
%% @end
%% @param Options Common optic options.
%% @returns An opaque optic record.
-spec associations(Options) -> optic:optic() when
Options :: optic:variations().
associations(Options) ->
Fold =
fun (Fun, Acc, Dict) ->
case is_dict(Dict) of
true ->
{ok, dict:fold(fun (Key, Value, InnerAcc) ->
Fun({Key, Value}, InnerAcc)
end,
Acc,
Dict)};
false ->
{error, undefined}
end
end,
MapFold =
fun (Fun, Acc, Dict) ->
case is_dict(Dict) of
true ->
{ok, dict:fold(fun (Key, Value, {InnerDict, InnerAcc}) ->
{{NewKey, NewValue}, NewAcc} = Fun({Key, Value}, InnerAcc),
{dict:store(NewKey, NewValue, InnerDict), NewAcc}
end,
{dict:new(), Acc},
Dict)};
false ->
{error, undefined}
end
end,
New =
fun (_Data, _Template) ->
dict:new()
end,
Optic = optic:new(MapFold, Fold),
optic:variations(Optic, Options, New).
%% @see key/2
-spec key(Key) -> optic:optic() when
Key :: term().
key(Key) ->
key(Key, #{}).
%% @doc
%% Focus on the value of a dict key.
%%
%% Example:
%%
%% ```
%% > optic:get([optic_dict:key(first)],
%% dict:from_list([{first, 1}, {second, 2}])).
%% {ok,[1]}
%% '''
%% @end
%% @param Key The key to focus on.
%% @param Options Common optic options.
%% @returns An opaque optic record.
-spec key(Key, Options) -> optic:optic() when
Key :: term(),
Options :: optic:variations().
key(Key, Options) ->
Fold =
fun (Fun, Acc, Dict) ->
case is_dict(Dict) of
true ->
case dict:find(Key, Dict) of
{ok, Value} ->
{ok, Fun(Value, Acc)};
error ->
{error, undefined}
end;
false ->
{error, undefined}
end
end,
MapFold =
fun (Fun, Acc, Dict) ->
case is_dict(Dict) of
true ->
case dict:find(Key, Dict) of
{ok, Value} ->
{NewValue, NewAcc} = Fun(Value, Acc),
{ok, {dict:store(Key, NewValue, Dict), NewAcc}};
error ->
{error, undefined}
end;
false ->
{error, undefined}
end
end,
New =
fun (Dict, Template) ->
case is_dict(Dict) of
true ->
dict:store(Key, Template, Dict);
false ->
dict:from_list([{Key, Template}])
end
end,
Optic = optic:new(MapFold, Fold),
optic:variations(Optic, Options, New).
%% @see association/2
-spec association(Key) -> optic:optic() when
Key :: term().
association(Key) ->
association(Key, #{}).
%% @doc
%% Focus on the association for a dict key. An association is the
%% tuple of a dict key and value. If the key is modified, the optic is
%% no longer well behaved.
%%
%% Example:
%%
%% ```
%% > optic:get([optic_dict:association(first)],
%% dict:from_list([{first, 1}, {second, 2}])).
%% {ok,[{first,1}]}
%% '''
%% @end
%% @param Key The key to focus on.
%% @param Options Common optic options.
%% @returns An opaque optic record.
-spec association(Key, Options) -> optic:optic() when
Key :: term(),
Options :: optic:variations().
association(Key, Options) ->
Fold =
fun (Fun, Acc, Dict) ->
case is_dict(Dict) of
true ->
case dict:find(Key, Dict) of
{ok, Value} ->
{ok, Fun({Key, Value}, Acc)};
error ->
{error, undefined}
end;
false ->
{error, undefined}
end
end,
MapFold =
fun (Fun, Acc, Dict) ->
case is_dict(Dict) of
true ->
case dict:find(Key, Dict) of
{ok, Value} ->
{{NewKey, NewValue}, NewAcc} = Fun({Key, Value}, Acc),
{ok, {dict:store(NewKey, NewValue, dict:erase(Key, Dict)), NewAcc}};
error ->
{error, undefined}
end;
false ->
{error, undefined}
end
end,
New =
fun (Dict, Template) ->
case is_dict(Dict) of
true ->
dict:store(Key, Template, Dict);
false ->
dict:from_list([{Key, Template}])
end
end,
Optic = optic:new(MapFold, Fold),
optic:variations(Optic, Options, New).
%%%===================================================================
%%% Internal Functions
%%%===================================================================
is_dict(Unknown) ->
try dict:size(Unknown) of
_ ->
true
catch
error:function_clause ->
false
end. | src/optic_dict.erl | 0.602412 | 0.497742 | optic_dict.erl | starcoder |
%%
%% %CopyrightBegin%
%%
%% Copyright Ericsson AB 1996-2009. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
%%
%% %CopyrightEnd%
%%
-module(random).
%% Reasonable random number generator.
%% The method is attributed to <NAME> and <NAME>
%% See "An efficient and portable pseudo-random number generator",
%% Journal of Applied Statistics. AS183. 1982. Also Byte March 1987.
-export([seed/0, seed/1, seed/3, uniform/0, uniform/1,
uniform_s/1, uniform_s/2, seed0/0]).
%%-----------------------------------------------------------------------
%% The type of the state
-type ran() :: {integer(), integer(), integer()}.
%%-----------------------------------------------------------------------
-spec seed0() -> ran().
seed0() ->
{3172, 9814, 20125}.
%% seed()
%% Seed random number generation with default values
-spec seed() -> ran().
seed() ->
reseed(seed0()).
%% seed({A1, A2, A3})
%% Seed random number generation
-spec seed({integer(), integer(), integer()}) -> 'undefined' | ran().
seed({A1, A2, A3}) ->
seed(A1, A2, A3).
%% seed(A1, A2, A3)
%% Seed random number generation
-spec seed(integer(), integer(), integer()) -> 'undefined' | ran().
seed(A1, A2, A3) ->
put(random_seed,
{abs(A1) rem 30269, abs(A2) rem 30307, abs(A3) rem 30323}).
-spec reseed(ran()) -> ran().
reseed({A1, A2, A3}) ->
case seed(A1, A2, A3) of
undefined -> seed0();
{_,_,_} = Tuple -> Tuple
end.
%% uniform()
%% Returns a random float between 0 and 1.
-spec uniform() -> float().
uniform() ->
{A1, A2, A3} = case get(random_seed) of
undefined -> seed0();
Tuple -> Tuple
end,
B1 = (A1*171) rem 30269,
B2 = (A2*172) rem 30307,
B3 = (A3*170) rem 30323,
put(random_seed, {B1,B2,B3}),
R = A1/30269 + A2/30307 + A3/30323,
R - trunc(R).
%% uniform(N) -> I
%% Given an integer N >= 1, uniform(N) returns a random integer
%% between 1 and N.
-spec uniform(pos_integer()) -> pos_integer().
uniform(N) when is_integer(N), N >= 1 ->
trunc(uniform() * N) + 1.
%%% Functional versions
%% uniform_s(State) -> {F, NewState}
%% Returns a random float between 0 and 1.
-spec uniform_s(ran()) -> {float(), ran()}.
uniform_s({A1, A2, A3}) ->
B1 = (A1*171) rem 30269,
B2 = (A2*172) rem 30307,
B3 = (A3*170) rem 30323,
R = A1/30269 + A2/30307 + A3/30323,
{R - trunc(R), {B1,B2,B3}}.
%% uniform_s(N, State) -> {I, NewState}
%% Given an integer N >= 1, uniform(N) returns a random integer
%% between 1 and N.
-spec uniform_s(pos_integer(), ran()) -> {integer(), ran()}.
uniform_s(N, State0) when is_integer(N), N >= 1 ->
{F, State1} = uniform_s(State0),
{trunc(F * N) + 1, State1}. | source/otp_src_R14B02/lib/stdlib/src/random.erl | 0.625896 | 0.457318 | random.erl | starcoder |
%%%-------------------------------------------------------------------
%%% @doc
%%% This is the main module, which contains all Shards/ETS API
%%% functions, BUT works locally.
%%%
%%% <b>Shards</b> is compatible with ETS API, most of the functions
%%% preserves the same ETS semantics, with some exception which you
%%% will find on each function doc.
%%%
%%% Shards gives a top level view of a single logical ETS table,
%%% but inside, that logical table is split in multiple physical
%%% ETS tables called <b>shards</b>, where `Shards = [0 .. N-1]',
%%% and `N' is the number of shards into which you want to split
%%% the table.
%%%
%%% The K/V pairs are distributed across these shards, therefore,
%%% some of the functions does not follows the same semantics as
%%% the original ones ETS.
%%%
%%% A good example of that are the query-based functions, which
%%% returns multiple results, and in case of `ordered_set', with
%%% a particular order. E.g.:
%%% <ul>
%%% <li>`select/2', `select/3', `select/1'</li>
%%% <li>`select_reverse/2', `select_reverse/3', `select_reverse/1'</li>
%%% <li>`match/2', `match/3', `match/1'</li>
%%% <li>`match_object/2', `match_object/3', `match_object/1'</li>
%%% <li>etc...</li>
%%% </ul>
%%% For those cases, the order what results are returned is not
%%% guaranteed to be the same as the original ETS functions.
%%%
%%% Additionally to the ETS functions, `shards_local' module allows
%%% to pass an extra argument, the `State'. When `shards' is
%%% called without the `State', it must fetch the `state' first,
%%% and it is recovered doing an extra call to an ETS control table
%%% owned by `shards_owner_sup'. If any microsecond matters, you can
%%% skip it call by calling `shards_local' directly and passing
%%% the `State'. E.g.:
%%%
%%% ```
%%% % create a table
%%% tab_name = shards:new(tab_name, [{n_shards, 4}]).
%%%
%%% % you can get the state at any time by calling:
%%% State = shards_state:get(tab_name).
%%%
%%% % normal way
%%% shards:lookup(table, key1).
%%%
%%% % calling shards_local directly
%%% shards_local:lookup(table, key1, State).
%%% '''
%%%
%%% Pools of shards can be added/removed dynamically. For example,
%%% using `shards:new/2' you can add more pools, and `shards:delete/1'
%%% to remove the pool you wish.
%%% @end
%%%-------------------------------------------------------------------
-module(shards_local).
%% ETS API
-export([
all/0,
delete/1, delete/2, delete/3,
delete_all_objects/1, delete_all_objects/2,
delete_object/2, delete_object/3,
file2tab/1, file2tab/2,
first/1, first/2,
foldl/3, foldl/4,
foldr/3, foldr/4,
give_away/3, give_away/4,
i/0,
info/1, info/2, info/3,
info_shard/2, info_shard/3,
insert/2, insert/3,
insert_new/2, insert_new/3,
is_compiled_ms/1,
last/1, last/2,
lookup/2, lookup/3,
lookup_element/3, lookup_element/4,
match/2, match/3, match/4, match/1,
match_delete/2, match_delete/3,
match_object/2, match_object/3, match_object/4, match_object/1,
match_spec_compile/1,
match_spec_run/2,
member/2, member/3,
new/2,
next/2, next/3,
prev/2, prev/3,
rename/2, rename/3,
safe_fixtable/2, safe_fixtable/3,
select/2, select/3, select/4, select/1,
select_count/2, select_count/3,
select_delete/2, select_delete/3,
select_reverse/2, select_reverse/3, select_reverse/4, select_reverse/1,
setopts/2, setopts/3,
tab2file/2, tab2file/3, tab2file/4,
tab2list/1, tab2list/2,
tabfile_info/1,
table/1, table/2, table/3,
test_ms/2,
take/2, take/3,
update_counter/3, update_counter/4, update_counter/5,
update_element/3, update_element/4
]).
%% Extended API
-export([
shard_name/2,
pick/3,
list/2,
get_pid/1
]).
%%%===================================================================
%%% Types & Macros
%%%===================================================================
%% @type tweaks() = {write_concurrency, boolean()}
%% | {read_concurrency, boolean()}
%% | compressed.
%%
%% ETS tweaks option
-type tweaks() :: {write_concurrency, boolean()}
| {read_concurrency, boolean()}
| compressed.
%% @type shards_opt() = {scope, l | g}
%% | {n_shards, pos_integer()}
%% | {pick_shard_fun, shards_state:pick_fun()}
%% | {pick_node_fun, shards_state:pick_fun()}
%% | {restart_strategy, one_for_one | one_for_all}.
%%
%% Shards extended options.
-type shards_opt() :: {scope, l | g}
| {n_shards, pos_integer()}
| {pick_shard_fun, shards_state:pick_fun()}
| {pick_node_fun, shards_state:pick_fun()}
| {restart_strategy, one_for_one | one_for_all}.
%% @type option() = ets:type() | ets:access() | named_table
%% | {keypos, pos_integer()}
%% | {heir, pid(), HeirData :: term()}
%% | {heir, none} | tweaks()
%% | shards_opt().
%%
%% Create table options – used by `new/2'.
-type option() :: ets:type() | ets:access() | named_table
| {keypos, pos_integer()}
| {heir, pid(), HeirData :: term()}
| {heir, none} | tweaks()
| shards_opt().
% ETS Info Tuple
-type info_tuple() :: {compressed, boolean()}
| {heir, pid() | none}
| {keypos, pos_integer()}
| {memory, non_neg_integer()}
| {name, atom()}
| {named_table, boolean()}
| {node, node()}
| {owner, pid()}
| {protection, ets:access()}
| {size, non_neg_integer()}
| {type, ets:type()}
| {write_concurrency, boolean()}
| {read_concurrency, boolean()}.
% ETS Info Item
-type info_item() :: compressed | fixed | heir | keypos | memory
| name | named_table | node | owner | protection
| safe_fixed | size | stats | type
| write_concurrency | read_concurrency.
%% @type continuation() = {
%% Tab :: atom(),
%% MatchSpec :: ets:match_spec(),
%% Limit :: pos_integer(),
%% Shard :: non_neg_integer(),
%% Continuation :: ets:continuation()
%% }.
%%
%% Defines the convention to `ets:select/1,3' continuation:
%% <ul>
%% <li>`Tab': Table name.</li>
%% <li>`MatchSpec': The `ets:match_spec()'.</li>
%% <li>`Limit': Results limit.</li>
%% <li>`Shard': Shards number.</li>
%% <li>`Continuation': The `ets:continuation()'.</li>
%% </ul>
-type continuation() :: {
Tab :: atom(),
MatchSpec :: ets:match_spec(),
Limit :: pos_integer(),
Shard :: non_neg_integer(),
Continuation :: ets:continuation()
}.
% Exported Types
-export_type([
option/0,
info_tuple/0,
info_item/0,
continuation/0
]).
%%%===================================================================
%%% ETS API
%%%===================================================================
%% @equiv ets:all()
all() ->
ets:all().
%% @doc
%% This operation behaves like `ets:delete/1'.
%%
%% @see ets:delete/1.
%% @end
-spec delete(Tab :: atom()) -> true.
delete(Tab) ->
SupName = shards_state:sup_name(Tab),
ok = shards_sup:terminate_child(SupName, Tab),
true.
%% @equiv delete(Tab, Key, shards_state:new())
delete(Tab, Key) ->
delete(Tab, Key, shards_state:new()).
%% @doc
%% This operation behaves like `ets:delete/2'.
%%
%% @see ets:delete/2.
%% @end
-spec delete(Tab, Key, State) -> true when
Tab :: atom(),
Key :: term(),
State :: shards_state:state().
delete(Tab, Key, State) ->
_ = mapred(Tab, Key, {fun ets:delete/2, [Key]}, nil, State, d),
true.
%% @equiv delete_all_objects(Tab, shards_state:new())
delete_all_objects(Tab) ->
delete_all_objects(Tab, shards_state:new()).
%% @doc
%% This operation behaves like `ets:delete_all_objects/1'.
%%
%% @see ets:delete_all_objects/1.
%% @end
-spec delete_all_objects(Tab, State) -> true when
Tab :: atom(),
State :: shards_state:state().
delete_all_objects(Tab, State) ->
_ = mapred(Tab, fun ets:delete_all_objects/1, State),
true.
%% @equiv delete_object(Tab, Object, shards_state:new())
delete_object(Tab, Object) ->
delete_object(Tab, Object, shards_state:new()).
%% @doc
%% This operation behaves like `ets:delete_object/2'.
%%
%% @see ets:delete_object/2.
%% @end
-spec delete_object(Tab, Object, State) -> true when
Tab :: atom(),
Object :: tuple(),
State :: shards_state:state().
delete_object(Tab, Object, State) when is_tuple(Object) ->
Key = hd(tuple_to_list(Object)),
_ = mapred(Tab, Key, {fun ets:delete_object/2, [Object]}, nil, State, d),
true.
%% @equiv file2tab(Filenames, [])
file2tab(Filenames) ->
file2tab(Filenames, []).
%% @doc
%% Similar to `shards:file2tab/2'. Moreover, it restores the
%% supervision tree for the `shards' corresponding to the given
%% files, such as if they had been created using `shards:new/2,3'.
%%
%% @see ets:file2tab/2.
%% @end
-spec file2tab(Filenames, Options) -> Response when
Filenames :: [file:name()],
Tab :: atom(),
Options :: [Option],
Option :: {verify, boolean()},
Reason :: term(),
Response :: {ok, Tab} | {error, Reason}.
file2tab(Filenames, Options) ->
try
ShardTabs = [{First, _} | _] = [begin
case tabfile_info(FN) of
{ok, Info} ->
{name, ShardTabName} = lists:keyfind(name, 1, Info),
{ShardTabName, FN};
{error, _} = Error ->
throw(Error)
end
end || FN <- Filenames],
Tab = name_from_shard(First),
Tab = new(Tab, [
{restore, ShardTabs, Options},
{n_shards, length(Filenames)}
]),
{ok, Tab}
catch
_:Error -> Error
end.
%% @equiv first(Tab, shards_state:new())
first(Tab) ->
first(Tab, shards_state:new()).
%% @doc
%% This operation behaves similar to `ets:first/1'.
%% The order in which results are returned, might be not the same
%% as the original ETS function. Remember shards architecture
%% described at the beginning.
%%
%% @see ets:first/1.
%% @end
-spec first(Tab, State) -> Key | '$end_of_table' when
Tab :: atom(),
Key :: term(),
State :: shards_state:state().
first(Tab, State) ->
N = shards_state:n_shards(State),
Shard = N - 1,
first(Tab, ets:first(shard_name(Tab, Shard)), Shard).
%% @private
first(Tab, '$end_of_table', Shard) when Shard > 0 ->
NextShard = Shard - 1,
first(Tab, ets:first(shard_name(Tab, NextShard)), NextShard);
first(_, '$end_of_table', _) ->
'$end_of_table';
first(_, Key, _) ->
Key.
%% @equiv foldl(Function, Acc0, Tab, shards_state:new())
foldl(Function, Acc0, Tab) ->
foldl(Function, Acc0, Tab, shards_state:new()).
%% @doc
%% This operation behaves like `ets:foldl/3'.
%%
%% @see ets:foldl/3.
%% @end
-spec foldl(Function, Acc0, Tab, State) -> Acc1 when
Function :: fun((Element :: term(), AccIn) -> AccOut),
Tab :: atom(),
State :: shards_state:state(),
Acc0 :: term(),
Acc1 :: term(),
AccIn :: term(),
AccOut :: term().
foldl(Function, Acc0, Tab, State) ->
N = shards_state:n_shards(State),
fold(Tab, N, foldl, [Function, Acc0]).
%% @equiv foldr(Function, Acc0, Tab, shards_state:new())
foldr(Function, Acc0, Tab) ->
foldr(Function, Acc0, Tab, shards_state:new()).
%% @doc
%% This operation behaves like `ets:foldr/3'.
%%
%% @see ets:foldr/3.
%% @end
-spec foldr(Function, Acc0, Tab, State) -> Acc1 when
Function :: fun((Element :: term(), AccIn) -> AccOut),
Tab :: atom(),
State :: shards_state:state(),
Acc0 :: term(),
Acc1 :: term(),
AccIn :: term(),
AccOut :: term().
foldr(Function, Acc0, Tab, State) ->
N = shards_state:n_shards(State),
fold(Tab, N, foldr, [Function, Acc0]).
%% @equiv give_away(Tab, Pid, GiftData, shards_state:new())
give_away(Tab, Pid, GiftData) ->
give_away(Tab, Pid, GiftData, shards_state:new()).
%% @doc
%% Equivalent to `ets:give_away/3' for each shard table. It returns
%% a `boolean()' instead that just `true'. Returns `true' if the
%% function was applied successfully on each shard, otherwise
%% `false' is returned.
%%
%% <p><font color="red"><b>WARNING: It is not recommended execute
%% this function, since it might cause an unexpected behavior.
%% Once this function is executed, `shards' doesn't control/manage
%% the ETS shards anymore. So from this point, you should use
%% ETS API instead. Also it is recommended to run `shards:delete/1'
%% after run this function.
%% </b></font></p>
%%
%% @see ets:give_away/3.
%% @end
-spec give_away(Tab, Pid, GiftData, State) -> true when
Tab :: atom(),
Pid :: pid(),
GiftData :: term(),
State :: shards_state:state().
give_away(Tab, Pid, GiftData, State) ->
Map = {fun shards_owner:apply_ets_fun/3, [give_away, [Pid, GiftData]]},
Reduce = {fun(_, Acc) -> Acc end, true},
mapred(Tab, Map, Reduce, State).
%% @equiv ets:i()
i() ->
ets:i().
%% @equiv info(Tab, shards_state:new())
info(Tab) ->
info(Tab, shards_state:new()).
%% @doc
%% If 2nd argument is `info_tuple()' this function behaves like
%% `ets:info/2', but if it is the `shards_state:state()',
%% it behaves like `ets:info/1', but instead of return the
%% information about one single table, it returns a list with
%% the information of each shard table.
%%
%% @see ets:info/1.
%% @see ets:info/2.
%% @see shards:info_shard/2.
%% @see shards:info_shard/3.
%% @end
-spec info(Tab, StateOrItem) -> Result when
Tab :: atom(),
StateOrItem :: shards_state:state() | info_item(),
InfoList :: [info_tuple()],
Result1 :: [InfoList] | undefined,
Value :: [term()] | undefined,
Result :: Result1 | Value.
info(Tab, Item) when is_atom(Item) ->
info(Tab, Item, shards_state:new());
info(Tab, State) ->
case whereis(Tab) of
undefined -> undefined;
_ -> mapred(Tab, fun ets:info/1, State)
end.
%% @doc
%% This operation behaves like `ets:info/2', but instead of return
%% the information about one single table, it returns a list with
%% the information of each shard table.
%%
%% @see ets:info/2.
%% @see shards:info_shard/3.
%% @end
-spec info(Tab, Item, State) -> Value when
Tab :: atom(),
State :: shards_state:state(),
Item :: info_item(),
Value :: [term()] | undefined.
info(Tab, Item, State) ->
case whereis(Tab) of
undefined -> undefined;
_ -> mapred(Tab, {fun ets:info/2, [Item]}, State)
end.
%% @doc
%% This operation behaves like `ets:info/1'
%%
%% @see ets:info/1.
%% @end
-spec info_shard(Tab, Shard) -> InfoList | undefined when
Tab :: atom(),
Shard :: non_neg_integer(),
InfoList :: [info_tuple()].
info_shard(Tab, Shard) ->
ShardName = shard_name(Tab, Shard),
ets:info(ShardName).
%% @doc
%% This operation behaves like `ets:info/2'.
%%
%% @see ets:info/2.
%% @end
-spec info_shard(Tab, Shard, Item) -> Value | undefined when
Tab :: atom(),
Shard :: non_neg_integer(),
Item :: info_item(),
Value :: term().
info_shard(Tab, Shard, Item) ->
ShardName = shard_name(Tab, Shard),
ets:info(ShardName, Item).
%% @equiv insert(Tab, ObjOrObjL, shards_state:new())
insert(Tab, ObjOrObjL) ->
insert(Tab, ObjOrObjL, shards_state:new()).
%% @doc
%% This operation behaves similar to `ets:insert/2', with a big
%% difference, <b>it is not atomic</b>. This means if it fails
%% inserting some K/V pair, previous inserted KV pairs are not
%% rolled back.
%%
%% @see ets:insert/2.
%% @end
-spec insert(Tab, ObjOrObjL, State) -> true when
Tab :: atom(),
ObjOrObjL :: tuple() | [tuple()],
State :: shards_state:state().
insert(Tab, ObjOrObjL, State) when is_list(ObjOrObjL) ->
lists:foreach(fun(Object) ->
true = insert(Tab, Object, State)
end, ObjOrObjL), true;
insert(Tab, ObjOrObjL, State) when is_tuple(ObjOrObjL) ->
Key = hd(tuple_to_list(ObjOrObjL)),
N = shards_state:n_shards(State),
PickShardFun = shards_state:pick_shard_fun(State),
ShardName = shard_name(Tab, PickShardFun(Key, N, w)),
ets:insert(ShardName, ObjOrObjL).
%% @equiv insert_new(Tab, ObjOrObjL, shards_state:new())
insert_new(Tab, ObjOrObjL) ->
insert_new(Tab, ObjOrObjL, shards_state:new()).
%% @doc
%% This operation behaves like `ets:insert_new/2' BUT it is not atomic,
%% which means if it fails inserting some K/V pair, only that K/V
%% pair is affected, the rest may be successfully inserted.
%%
%% This function returns a list if the `ObjectOrObjects' is a list.
%%
%% @see ets:insert_new/2.
%% @end
-spec insert_new(Tab, ObjOrObjL, State) -> Result when
Tab :: atom(),
ObjOrObjL :: tuple() | [tuple()],
State :: shards_state:state(),
Result :: boolean() | [boolean()].
insert_new(Tab, ObjOrObjL, State) when is_list(ObjOrObjL) ->
lists:foldr(fun(Object, Acc) ->
[insert_new(Tab, Object, State) | Acc]
end, [], ObjOrObjL);
insert_new(Tab, ObjOrObjL, State) when is_tuple(ObjOrObjL) ->
Key = hd(tuple_to_list(ObjOrObjL)),
N = shards_state:n_shards(State),
PickShardFun = shards_state:pick_shard_fun(State),
case PickShardFun(Key, N, r) of
any ->
Map = {fun ets:lookup/2, [Key]},
Reduce = fun lists:append/2,
case mapred(Tab, Map, Reduce, State) of
[] ->
ShardName = shard_name(Tab, PickShardFun(Key, N, w)),
ets:insert_new(ShardName, ObjOrObjL);
_ ->
false
end;
_ ->
ShardName = shard_name(Tab, PickShardFun(Key, N, w)),
ets:insert_new(ShardName, ObjOrObjL)
end.
%% @equiv ets:is_compiled_ms(Term)
is_compiled_ms(Term) ->
ets:is_compiled_ms(Term).
%% @equiv last(Tab, shards_state:new())
last(Tab) ->
last(Tab, shards_state:new()).
%% @doc
%% This operation behaves similar to `ets:last/1'.
%% The order in which results are returned, might be not the same
%% as the original ETS function. Remember shards architecture
%% described at the beginning.
%%
%% @see ets:last/1.
%% @end
-spec last(Tab, State) -> Key | '$end_of_table' when
Tab :: atom(),
State :: shards_state:state(),
Key :: term().
last(Tab, State) ->
case ets:info(shard_name(Tab, 0), type) of
ordered_set ->
ets:last(shard_name(Tab, 0));
_ ->
first(Tab, State)
end.
%% @equiv lookup(Tab, Key, shards_state:new())
lookup(Tab, Key) ->
lookup(Tab, Key, shards_state:new()).
%% @doc
%% This operation behaves like `ets:lookup/2'.
%%
%% @see ets:lookup/2.
%% @end
-spec lookup(Tab, Key, State) -> Result when
Tab :: atom(),
Key :: term(),
State :: shards_state:state(),
Result :: [tuple()].
lookup(Tab, Key, State) ->
Map = {fun ets:lookup/2, [Key]},
Reduce = fun lists:append/2,
mapred(Tab, Key, Map, Reduce, State, r).
%% @equiv lookup_element(Tab, Key, Pos, shards_state:new())
lookup_element(Tab, Key, Pos) ->
lookup_element(Tab, Key, Pos, shards_state:new()).
%% @doc
%% This operation behaves like `ets:lookup_element/3'.
%%
%% @see ets:lookup_element/3.
%% @end
-spec lookup_element(Tab, Key, Pos, State) -> Elem when
Tab :: atom(),
Key :: term(),
Pos :: pos_integer(),
State :: shards_state:state(),
Elem :: term() | [term()].
lookup_element(Tab, Key, Pos, State) ->
N = shards_state:n_shards(State),
PickShardFun = shards_state:pick_shard_fun(State),
case PickShardFun(Key, N, r) of
any ->
LookupElem = fun(Tx, Kx, Px) ->
catch ets:lookup_element(Tx, Kx, Px)
end,
Filter = lists:filter(fun
({'EXIT', _}) -> false;
(_) -> true
end, mapred(Tab, {LookupElem, [Key, Pos]}, State)),
case Filter of
[] -> error(badarg);
_ -> lists:append(Filter)
end;
Shard ->
ShardName = shard_name(Tab, Shard),
ets:lookup_element(ShardName, Key, Pos)
end.
match(Tab, Pattern) ->
match(Tab, Pattern, shards_state:new()).
%% @doc
%% If 3rd argument is `pos_integer()' this function behaves
%% like `ets:match/3', but if it is the `shards_state:state()',
%% it behaves like `ets:match/2'.
%%
%% The order in which results are returned, might be not the same
%% as the original ETS function. Remember shards architecture
%% described at the beginning.
%%
%% @see ets:match/2.
%% @see ets:match/3.
%% @end
-spec match(Tab, Pattern, StateOrLimit) -> Response when
Tab :: atom(),
Pattern :: ets:match_pattern(),
StateOrLimit :: shards_state:state() | pos_integer(),
Match :: [term()],
Continuation :: continuation(),
ResWithState :: [Match],
ResWithLimit :: {[Match], Continuation} | '$end_of_table',
Response :: ResWithState | ResWithLimit.
match(Tab, Pattern, Limit) when is_integer(Limit), Limit > 0 ->
match(Tab, Pattern, Limit, shards_state:new());
match(Tab, Pattern, State) ->
Map = {fun ets:match/2, [Pattern]},
Reduce = fun lists:append/2,
mapred(Tab, Map, Reduce, State).
%% @doc
%% This operation behaves similar to `ets:match/3'.
%% The order in which results are returned, might be not the same
%% as the original ETS function. Remember shards architecture
%% described at the beginning.
%%
%% @see ets:match/3.
%% @end
-spec match(Tab, Pattern, Limit, State) -> Response when
Tab :: atom(),
Pattern :: ets:match_pattern(),
Limit :: pos_integer(),
State :: shards_state:state(),
Match :: term(),
Continuation :: continuation(),
Response :: {[Match], Continuation} | '$end_of_table'.
match(Tab, Pattern, Limit, State) ->
N = shards_state:n_shards(State),
q(match, Tab, Pattern, Limit, q_fun(), Limit, N - 1, {[], nil}).
%% @doc
%% This operation behaves similar to `ets:match/1'.
%% The order in which results are returned, might be not the same
%% as the original ETS function. Remember shards architecture
%% described at the beginning.
%%
%% @see ets:match/1.
%% @end
-spec match(Continuation) -> Response when
Match :: term(),
Continuation :: continuation(),
Response :: {[Match], Continuation} | '$end_of_table'.
match({_, _, Limit, _, _} = Continuation) ->
q(match, Continuation, q_fun(), Limit, []).
%% @equiv match_delete(Tab, Pattern, shards_state:new())
match_delete(Tab, Pattern) ->
match_delete(Tab, Pattern, shards_state:new()).
%% @doc
%% This operation behaves like `ets:match_delete/2'.
%%
%% @see ets:match_delete/2.
%% @end
-spec match_delete(Tab, Pattern, State) -> true when
Tab :: atom(),
Pattern :: ets:match_pattern(),
State :: shards_state:state().
match_delete(Tab, Pattern, State) ->
Map = {fun ets:match_delete/2, [Pattern]},
Reduce = {fun(Res, Acc) -> Acc and Res end, true},
mapred(Tab, Map, Reduce, State).
%% @equiv match_object(Tab, Pattern, shards_state:new())
match_object(Tab, Pattern) ->
match_object(Tab, Pattern, shards_state:new()).
%% @doc
%% If 3rd argument is `pos_integer()' this function behaves like
%% `ets:match_object/3', but if it is the `shards_state:state()',
%% it behaves like `ets:match_object/2'.
%%
%% The order in which results are returned, might be not the same
%% as the original ETS function. Remember shards architecture
%% described at the beginning.
%%
%% @see ets:match_object/2.
%% @see ets:match_object/3.
%% @end
-spec match_object(Tab, Pattern, StateOrLimit) -> Response when
Tab :: atom(),
Pattern :: ets:match_pattern(),
StateOrLimit :: shards_state:state() | pos_integer(),
Object :: tuple(),
ResWithState :: [Object],
ResWithLimit :: {[term()], Continuation} | '$end_of_table',
Continuation :: continuation(),
Response :: ResWithState | ResWithLimit.
match_object(Tab, Pattern, Limit) when is_integer(Limit), Limit > 0 ->
match_object(Tab, Pattern, Limit, shards_state:new());
match_object(Tab, Pattern, State) ->
Map = {fun ets:match_object/2, [Pattern]},
Reduce = fun lists:append/2,
mapred(Tab, Map, Reduce, State).
%% @doc
%% This operation behaves similar to `ets:match_object/3'.
%% The order in which results are returned, might be not the same
%% as the original ETS function. Remember shards architecture
%% described at the beginning.
%%
%% @see ets:match_object/3.
%% @end
-spec match_object(Tab, Pattern, Limit, State) -> Response when
Tab :: atom(),
Pattern :: ets:match_pattern(),
Limit :: pos_integer(),
State :: shards_state:state(),
Match :: term(),
Continuation :: continuation(),
Response :: {[Match], Continuation} | '$end_of_table'.
match_object(Tab, Pattern, Limit, State) ->
N = shards_state:n_shards(State),
q(match_object, Tab, Pattern, Limit, q_fun(), Limit, N - 1, {[], nil}).
%% @doc
%% This operation behaves similar to `ets:match_object/1'.
%% The order in which results are returned, might be not the same
%% as the original ETS function. Remember shards architecture
%% described at the beginning.
%%
%% @see ets:match_object/1.
%% @end
-spec match_object(Continuation) -> Response when
Match :: term(),
Continuation :: continuation(),
Response :: {[Match], Continuation} | '$end_of_table'.
match_object({_, _, Limit, _, _} = Continuation) ->
q(match_object, Continuation, q_fun(), Limit, []).
%% @equiv ets:match_spec_compile(MatchSpec)
match_spec_compile(MatchSpec) ->
ets:match_spec_compile(MatchSpec).
%% @equiv ets:match_spec_run(List, CompiledMatchSpec)
match_spec_run(List, CompiledMatchSpec) ->
ets:match_spec_run(List, CompiledMatchSpec).
%% @equiv member(Tab, Key, shards_state:new())
member(Tab, Key) ->
member(Tab, Key, shards_state:new()).
%% @doc
%% This operation behaves like `ets:member/2'.
%%
%% @see ets:member/2.
%% @end
-spec member(Tab, Key, State) -> boolean() when
Tab :: atom(),
Key :: term(),
State :: shards_state:state().
member(Tab, Key, State) ->
case mapred(Tab, Key, {fun ets:member/2, [Key]}, nil, State, r) of
R when is_list(R) -> lists:member(true, R);
R -> R
end.
%% @doc
%% This operation is analogous to `ets:new/2', BUT it behaves totally
%% different. When this function is called, instead of create a single
%% table, a new supervision tree is created and added to `shards_sup'.
%%
%% This supervision tree has a main supervisor `shards_sup' which
%% creates a control ETS table and also creates `N' number of
%% `shards_owner' (being `N' the number of shards). Each `shards_owner'
%% creates an ETS table to represent each shard, so this `gen_server'
%% acts as the table owner.
%%
%% Finally, when you create a table, internally `N' physical tables
%% are created (one per shard), but `shards' encapsulates all this
%% and you see only one logical table (similar to how a distributed
%% storage works).
%%
%% <b>IMPORTANT: By default, `NumShards = number of schedulers'.</b>
%%
%% @see ets:new/2.
%% @end
-spec new(Name, Options) -> Name when
Name :: atom(),
Options :: [option()].
new(Name, Options) ->
case lists:keyfind(sup_name, 1, Options) of
{sup_name, SupName} ->
do_new(SupName, Name, Options);
false ->
do_new(shards_sup, Name, Options)
end.
%% @private
do_new(SupName, Name, Options) ->
case shards_sup:start_child(SupName, Name, Options) of
{ok, Pid} ->
true = register(Name, Pid),
Name;
_ ->
error(badarg)
end.
%% @equiv next(Tab, Key1, shards_state:new())
next(Tab, Key1) ->
next(Tab, Key1, shards_state:new()).
%% @doc
%% This operation behaves similar to `ets:next/2'.
%% The order in which results are returned, might be not the same
%% as the original ETS function. Remember shards architecture
%% described at the beginning. It raises a `bad_pick_fun_ret'
%% exception in case of pick fun returns `any'.
%%
%% @see ets:next/2.
%% @end
-spec next(Tab, Key1, State) -> Key2 | '$end_of_table' when
Tab :: atom(),
Key1 :: term(),
State :: shards_state:state(),
Key2 :: term().
next(Tab, Key1, State) ->
N = shards_state:n_shards(State),
PickShardFun = shards_state:pick_shard_fun(State),
case PickShardFun(Key1, N, r) of
any ->
error(bad_pick_fun_ret);
Shard ->
ShardName = shard_name(Tab, Shard),
next_(Tab, ets:next(ShardName, Key1), Shard)
end.
%% @private
next_(Tab, '$end_of_table', Shard) when Shard > 0 ->
NextShard = Shard - 1,
next_(Tab, ets:first(shard_name(Tab, NextShard)), NextShard);
next_(_, '$end_of_table', _) ->
'$end_of_table';
next_(_, Key2, _) ->
Key2.
%% @equiv prev(Tab, Key1, shards_state:new())
prev(Tab, Key1) ->
prev(Tab, Key1, shards_state:new()).
%% @doc
%% This operation behaves similar to `ets:prev/2'.
%% The order in which results are returned, might be not the same
%% as the original ETS function. Remember shards architecture
%% described at the beginning.
%%
%% @see ets:prev/2.
%% @end
-spec prev(Tab, Key1, State) -> Key2 | '$end_of_table' when
Tab :: atom(),
Key1 :: term(),
State :: shards_state:state(),
Key2 :: term().
prev(Tab, Key1, State) ->
case ets:info(shard_name(Tab, 0), type) of
ordered_set ->
ets:prev(shard_name(Tab, 0), Key1);
_ ->
next(Tab, Key1, State)
end.
%% @equiv rename(Tab, Name, shards_state:new())
rename(Tab, Name) ->
rename(Tab, Name, shards_state:new()).
%% @doc
%% Equivalent to `ets:rename/2'.
%%
%% Renames the table name and all its associated shard tables.
%% If something unexpected occurs during the process, an exception
%% will be thrown.
%%
%% @see ets:rename/2.
%% @end
-spec rename(Tab, Name, State) -> Name | no_return() when
Tab :: atom(),
Name :: atom(),
State :: shards_state:state().
rename(Tab, Name, State) ->
_ = lists:foreach(fun(Shard) ->
ShardName = shard_name(Tab, Shard),
NewShardName = shard_name(Name, Shard),
NewShardName = do_rename(ShardName, NewShardName)
end, lists:seq(0, shards_state:n_shards(State) - 1)),
do_rename(Tab, Name).
%% @private
do_rename(OldName, NewName) ->
NewName = ets:rename(OldName, NewName),
Pid = get_pid(OldName),
true = unregister(OldName),
true = register(NewName, Pid),
NewName.
%% @equiv safe_fixtable(Tab, Fix, shards_state:new())
safe_fixtable(Tab, Fix) ->
safe_fixtable(Tab, Fix, shards_state:new()).
%% @doc
%% Equivalent to `ets:safe_fixtable/2' for each shard table.
%% It returns a `boolean()' instead that just `true'.
%% Returns `true' if the function was applied successfully
%% on each shard, otherwise `false' is returned.
%%
%% @see ets:safe_fixtable/2.
%% @end
-spec safe_fixtable(Tab, Fix, State) -> boolean() when
Tab :: atom(),
Fix :: boolean(),
State :: shards_state:state().
safe_fixtable(Tab, Fix, State) ->
Map = {fun ets:safe_fixtable/2, [Fix]},
Reduce = {fun(E, Acc) -> Acc and E end, true},
mapred(Tab, Map, Reduce, State).
%% @equiv select(Tab, MatchSpec, shards_state:new())
select(Tab, MatchSpec) ->
select(Tab, MatchSpec, shards_state:new()).
%% @doc
%% If 3rd argument is `pos_integer()' this function behaves like
%% `ets:select/3', but if it is the `shards_state:state()',
%% it behaves like `ets:select/2'.
%%
%% The order in which results are returned, might be not the same
%% as the original ETS function. Remember shards architecture
%% described at the beginning.
%%
%% @see ets:select/2.
%% @see ets:select/3.
%% @end
-spec select(Tab, MatchSpec, StateOrLimit) -> Response when
Tab :: atom(),
MatchSpec :: ets:match_spec(),
StateOrLimit :: shards_state:state() | pos_integer(),
Match :: term(),
ResWithState :: [Match],
ResWithLimit :: {[Match], Continuation} | '$end_of_table',
Continuation :: continuation(),
Response :: ResWithState | ResWithLimit.
select(Tab, MatchSpec, Limit) when is_integer(Limit), Limit > 0 ->
select(Tab, MatchSpec, Limit, shards_state:new());
select(Tab, MatchSpec, State) ->
Map = {fun ets:select/2, [MatchSpec]},
Reduce = fun lists:append/2,
mapred(Tab, Map, Reduce, State).
%% @doc
%% This operation behaves similar to `ets:select/3'.
%% The order in which results are returned, might be not the same
%% as the original ETS function. Remember shards architecture
%% described at the beginning.
%%
%% @see ets:select/3.
%% @end
-spec select(Tab, MatchSpec, Limit, State) -> Response when
Tab :: atom(),
MatchSpec :: ets:match_spec(),
Limit :: pos_integer(),
State :: shards_state:state(),
Match :: term(),
Continuation :: continuation(),
Response :: {[Match], Continuation} | '$end_of_table'.
select(Tab, MatchSpec, Limit, State) ->
N = shards_state:n_shards(State),
q(select, Tab, MatchSpec, Limit, q_fun(), Limit, N - 1, {[], nil}).
%% @doc
%% This operation behaves similar to `ets:select/1'.
%% The order in which results are returned, might be not the same
%% as the original ETS function. Remember shards architecture
%% described at the beginning.
%%
%% @see ets:select/1.
%% @end
-spec select(Continuation) -> Response when
Match :: term(),
Continuation :: continuation(),
Response :: {[Match], Continuation} | '$end_of_table'.
select({_, _, Limit, _, _} = Continuation) ->
q(select, Continuation, q_fun(), Limit, []).
%% @equiv select_count(Tab, MatchSpec, shards_state:new())
select_count(Tab, MatchSpec) ->
select_count(Tab, MatchSpec, shards_state:new()).
%% @doc
%% This operation behaves like `ets:select_count/2'.
%%
%% @see ets:select_count/2.
%% @end
-spec select_count(Tab, MatchSpec, State) -> NumMatched when
Tab :: atom(),
MatchSpec :: ets:match_spec(),
State :: shards_state:state(),
NumMatched :: non_neg_integer().
select_count(Tab, MatchSpec, State) ->
Map = {fun ets:select_count/2, [MatchSpec]},
Reduce = {fun(Res, Acc) -> Acc + Res end, 0},
mapred(Tab, Map, Reduce, State).
%% @equiv select_delete(Tab, MatchSpec, shards_state:new())
select_delete(Tab, MatchSpec) ->
select_delete(Tab, MatchSpec, shards_state:new()).
%% @doc
%% This operation behaves like `ets:select_delete/2'.
%%
%% @see ets:select_delete/2.
%% @end
-spec select_delete(Tab, MatchSpec, State) -> NumDeleted when
Tab :: atom(),
MatchSpec :: ets:match_spec(),
State :: shards_state:state(),
NumDeleted :: non_neg_integer().
select_delete(Tab, MatchSpec, State) ->
Map = {fun ets:select_delete/2, [MatchSpec]},
Reduce = {fun(Res, Acc) -> Acc + Res end, 0},
mapred(Tab, Map, Reduce, State).
%% @equiv select_reverse(Tab, MatchSpec, shards_state:new())
select_reverse(Tab, MatchSpec) ->
select_reverse(Tab, MatchSpec, shards_state:new()).
%% @doc
%% If 3rd argument is `pos_integer()' this function behaves like
%% `ets:select_reverse/3', but if it is the `shards_state:state()',
%% it behaves like `ets:select_reverse/2'.
%%
%% The order in which results are returned, might be not the same
%% as the original ETS function. Remember shards architecture
%% described at the beginning.
%%
%% @see ets:select_reverse/2.
%% @see ets:select_reverse/3.
%% @end
-spec select_reverse(Tab, MatchSpec, StateOrLimit) -> Response when
Tab :: atom(),
MatchSpec :: ets:match_spec(),
StateOrLimit :: shards_state:state() | pos_integer(),
Match :: term(),
ResWithState :: [Match],
ResWithLimit :: {[Match], Continuation} | '$end_of_table',
Continuation :: continuation(),
Response :: ResWithState | ResWithLimit.
select_reverse(Tab, MatchSpec, Limit) when is_integer(Limit), Limit > 0 ->
select_reverse(Tab, MatchSpec, Limit, shards_state:new());
select_reverse(Tab, MatchSpec, State) ->
Map = {fun ets:select_reverse/2, [MatchSpec]},
Reduce = fun lists:append/2,
mapred(Tab, Map, Reduce, State).
%% @doc
%% This operation behaves similar to `ets:select_reverse/3'.
%% The order in which results are returned, might be not the same
%% as the original ETS function. Remember shards architecture
%% described at the beginning.
%%
%% @see ets:select_reverse/3.
%% @end
-spec select_reverse(Tab, MatchSpec, Limit, State) -> Response when
Tab :: atom(),
MatchSpec :: ets:match_spec(),
Limit :: pos_integer(),
State :: shards_state:state(),
Match :: term(),
Continuation :: continuation(),
Response :: {[Match], Continuation} | '$end_of_table'.
select_reverse(Tab, MatchSpec, Limit, State) ->
N = shards_state:n_shards(State),
q(select_reverse, Tab, MatchSpec, Limit, q_fun(), Limit, N - 1, {[], nil}).
%% @doc
%% This operation behaves similar to `ets:select_reverse/1'.
%% The order in which results are returned, might be not the same
%% as the original ETS function. Remember shards architecture
%% described at the beginning.
%%
%% @see ets:select_reverse/1.
%% @end
-spec select_reverse(Continuation) -> Response when
Match :: term(),
Continuation :: continuation(),
Response :: {[Match], Continuation} | '$end_of_table'.
select_reverse({_, _, Limit, _, _} = Continuation) ->
q(select_reverse, Continuation, q_fun(), Limit, []).
%% @equiv setopts(Tab, Opts, shards_state:new())
setopts(Tab, Opts) ->
setopts(Tab, Opts, shards_state:new()).
%% @doc
%% Equivalent to `ets:setopts/2' for each shard table. It returns
%% a `boolean()' instead that just `true'. Returns `true' if the
%% function was applied successfully on each shard, otherwise
%% `false' is returned.
%%
%% @see ets:setopts/2.
%% @end
-spec setopts(Tab, Opts, State) -> boolean() when
Tab :: atom(),
Opts :: Opt | [Opt],
Opt :: {heir, pid(), HeirData} | {heir, none},
HeirData :: term(),
State :: shards_state:state().
setopts(Tab, Opts, State) ->
Map = {fun shards_owner:apply_ets_fun/3, [setopts, [Opts]]},
Reduce = {fun(E, Acc) -> Acc and E end, true},
mapred(Tab, Map, Reduce, State).
%% @equiv tab2file(Tab, Filenames, shards_state:new())
tab2file(Tab, Filenames) ->
tab2file(Tab, Filenames, shards_state:new()).
%% @equiv tab2file/4
tab2file(Tab, Filenames, Options) when is_list(Options) ->
tab2file(Tab, Filenames, Options, shards_state:new());
tab2file(Tab, Filenames, State) ->
tab2file(Tab, Filenames, [], State).
%% @doc
%% Similar to `ets:tab2file/3', but it returns a list of
%% responses for each shard table instead.
%%
%% @see ets:tab2file/3.
%% @end
-spec tab2file(Tab, Filenames, Options, State) -> Response when
Tab :: atom(),
Filenames :: [file:name()],
Options :: [Option],
Option :: {extended_info, [ExtInfo]} | {sync, boolean()},
ExtInfo :: md5sum | object_count,
State :: shards_state:state(),
ShardTab :: atom(),
ShardRes :: ok | {error, Reason :: term()},
Response :: [{ShardTab, ShardRes}].
tab2file(Tab, Filenames, Options, State) ->
N = shards_state:n_shards(State),
[begin
ets:tab2file(Shard, Filename, Options)
end || {Shard, Filename} <- lists:zip(list(Tab, N), Filenames)].
%% @equiv tab2list(Tab, shards_state:new())
tab2list(Tab) ->
tab2list(Tab, shards_state:new()).
%% @doc
%% This operation behaves like `ets:tab2list/1'.
%%
%% @see ets:tab2list/1.
%% @end
-spec tab2list(Tab, State) -> [Object] when
Tab :: atom(),
State :: shards_state:state(),
Object :: tuple().
tab2list(Tab, State) ->
mapred(Tab, fun ets:tab2list/1, fun lists:append/2, State).
%% @equiv ets:tabfile_info(Filename)
tabfile_info(Filename) ->
ets:tabfile_info(Filename).
%% @equiv table(Tab, shards_state:new())
table(Tab) ->
table(Tab, shards_state:new()).
%% @equiv table/3
table(Tab, Options) when is_list(Options) ->
table(Tab, Options, shards_state:new());
table(Tab, State) ->
table(Tab, [], State).
%% @doc
%% Similar to `ets:table/2', but it returns a list of `ets:table/2'
%% responses, one for each shard table.
%%
%% @see ets:table/2.
%% @end
-spec table(Tab, Options, State) -> [QueryHandle] when
Tab :: atom(),
QueryHandle :: qlc:query_handle(),
Options :: [Option] | Option,
Option :: {n_objects, NObjects} | {traverse, TraverseMethod},
NObjects :: default | pos_integer(),
State :: shards_state:state(),
MatchSpec :: ets:match_spec(),
TraverseMethod :: first_next | last_prev | select | {select, MatchSpec}.
table(Tab, Options, State) ->
mapred(Tab, {fun ets:table/2, [Options]}, State).
%% @equiv ets:test_ms(Tuple, MatchSpec)
test_ms(Tuple, MatchSpec) ->
ets:test_ms(Tuple, MatchSpec).
%% @equiv take(Tab, Key, shards_state:new())
take(Tab, Key) ->
take(Tab, Key, shards_state:new()).
%% @doc
%% This operation behaves like `ets:take/2'.
%%
%% @see ets:take/2.
%% @end
-spec take(Tab, Key, State) -> [Object] when
Tab :: atom(),
Key :: term(),
State :: shards_state:state(),
Object :: tuple().
take(Tab, Key, State) ->
Map = {fun ets:take/2, [Key]},
Reduce = fun lists:append/2,
mapred(Tab, Key, Map, Reduce, State, r).
%% @equiv update_counter(Tab, Key, UpdateOp, shards_state:new())
update_counter(Tab, Key, UpdateOp) ->
update_counter(Tab, Key, UpdateOp, shards_state:new()).
%% @doc
%% This operation behaves like `ets:update_counter/3'.
%%
%% @see ets:update_counter/3.
%% @end
-spec update_counter(Tab, Key, UpdateOp, State) -> Result when
Tab :: atom(),
Key :: term(),
UpdateOp :: term(),
State :: shards_state:state(),
Result :: integer().
update_counter(Tab, Key, UpdateOp, State) ->
Map = {fun ets:update_counter/3, [Key, UpdateOp]},
mapred(Tab, Key, Map, nil, State, w).
%% @doc
%% This operation behaves like `ets:update_counter/4'.
%%
%% @see ets:update_counter/4.
%% @end
-spec update_counter(Tab, Key, UpdateOp, Default, State) -> Result when
Tab :: atom(),
Key :: term(),
UpdateOp :: term(),
Default :: tuple(),
State :: shards_state:state(),
Result :: integer().
update_counter(Tab, Key, UpdateOp, Default, State) ->
Map = {fun ets:update_counter/4, [Key, UpdateOp, Default]},
mapred(Tab, Key, Map, nil, State, w).
%% @equiv update_element(Tab, Key, ElementSpec, shards_state:new())
update_element(Tab, Key, ElementSpec) ->
update_element(Tab, Key, ElementSpec, shards_state:new()).
%% @doc
%% This operation behaves like `ets:update_element/3'.
%%
%% @see ets:update_element/3.
%% @end
-spec update_element(Tab, Key, ElementSpec, State) -> boolean() when
Tab :: atom(),
Key :: term(),
Pos :: pos_integer(),
Value :: term(),
ElementSpec :: {Pos, Value} | [{Pos, Value}],
State :: shards_state:state().
update_element(Tab, Key, ElementSpec, State) ->
Map = {fun ets:update_element/3, [Key, ElementSpec]},
mapred(Tab, Key, Map, nil, State, w).
%%%===================================================================
%%% Extended API
%%%===================================================================
%% @doc
%% Builds a shard name `ShardName'.
%% <ul>
%% <li>`TabName': Table name from which the shard name is generated.</li>
%% <li>`ShardNum': Shard number – from `0' to `(NumShards - 1)'</li>
%% </ul>
%% @end
-spec shard_name(TabName, ShardNum) -> ShardName when
TabName :: atom(),
ShardNum :: non_neg_integer(),
ShardName :: atom().
shard_name(TabName, Shard) ->
shards_owner:shard_name(TabName, Shard).
%% @doc
%% Pick/computes the shard where the `Key' will be handled.
%% <ul>
%% <li>`Key': The key to be hashed to calculate the shard.</li>
%% <li>`Range': Range/set – number of shards/nodes.</li>
%% <li>`Op': Operation type: `r | w | d'.</li>
%% </ul>
%% @end
-spec pick(Key, Range, Op) -> Result when
Key :: shards_state:key(),
Range :: shards_state:range(),
Op :: shards_state:op(),
Result :: non_neg_integer().
pick(Key, NumShards, _) ->
erlang:phash2(Key, NumShards).
%% @doc
%% Returns the list of shard names associated to the given `TabName'.
%% The shard names that were created in the `shards:new/2,3' fun.
%% <ul>
%% <li>`TabName': Table name.</li>
%% <li>`NumShards': Number of shards.</li>
%% </ul>
%% @end
-spec list(TabName, NumShards) -> ShardTabNames when
TabName :: atom(),
NumShards :: pos_integer(),
ShardTabNames :: [atom()].
list(TabName, NumShards) ->
Shards = lists:seq(0, NumShards - 1),
[shard_name(TabName, Shard) || Shard <- Shards].
%% @doc
%% Returns the PID associated to the table `Tab'.
%% <ul>
%% <li>`TabName': Table name.</li>
%% </ul>
%% @end
-spec get_pid(Tab :: atom()) -> pid() | no_return().
get_pid(Tab) ->
case whereis(Tab) of
undefined -> error(badarg);
Pid -> Pid
end.
%%%===================================================================
%%% Internal functions
%%%===================================================================
%% @private
mapred(Tab, Map, State) ->
mapred(Tab, Map, nil, State).
%% @private
mapred(Tab, Map, Reduce, State) ->
mapred(Tab, nil, Map, Reduce, State, r).
%% @private
mapred(Tab, Key, Map, nil, State, Op) ->
mapred(Tab, Key, Map, fun(E, Acc) -> [E | Acc] end, State, Op);
mapred(Tab, nil, Map, Reduce, State, _) ->
case shards_state:n_shards(State) of
N when N =< 1 ->
s_mapred(Tab, N, Map, Reduce);
N ->
p_mapred(Tab, N, Map, Reduce)
end;
mapred(Tab, Key, {MapFun, Args} = Map, Reduce, State, Op) ->
N = shards_state:n_shards(State),
PickShardFun = shards_state:pick_shard_fun(State),
case PickShardFun(Key, N, Op) of
any ->
s_mapred(Tab, N, Map, Reduce);
Shard ->
apply(MapFun, [shard_name(Tab, Shard) | Args])
end.
%% @private
s_mapred(Tab, NumShards, {MapFun, Args}, {ReduceFun, AccIn}) ->
lists:foldl(fun(Shard, Acc) ->
MapRes = apply(MapFun, [shard_name(Tab, Shard) | Args]),
ReduceFun(MapRes, Acc)
end, AccIn, lists:seq(0, NumShards - 1));
s_mapred(Tab, NumShards, MapFun, ReduceFun) ->
{Map, Reduce} = mapred_funs(MapFun, ReduceFun),
s_mapred(Tab, NumShards, Map, Reduce).
%% @private
p_mapred(Tab, NumShards, {MapFun, Args}, {ReduceFun, AccIn}) ->
Tasks = lists:foldl(fun(Shard, Acc) ->
AsyncTask = shards_task:async(fun() ->
apply(MapFun, [shard_name(Tab, Shard) | Args])
end), [AsyncTask | Acc]
end, [], lists:seq(0, NumShards - 1)),
lists:foldl(fun(Task, Acc) ->
MapRes = shards_task:await(Task),
ReduceFun(MapRes, Acc)
end, AccIn, Tasks);
p_mapred(Tab, NumShards, MapFun, ReduceFun) ->
{Map, Reduce} = mapred_funs(MapFun, ReduceFun),
p_mapred(Tab, NumShards, Map, Reduce).
%% @private
mapred_funs(MapFun, ReduceFun) ->
Map = case is_function(MapFun) of
true -> {MapFun, []};
_ -> MapFun
end,
Reduce = {ReduceFun, []},
{Map, Reduce}.
%% @private
fold(Tab, NumShards, Fold, [Fun, Acc]) ->
lists:foldl(fun(Shard, FoldAcc) ->
ShardName = shard_name(Tab, Shard),
apply(ets, Fold, [Fun, FoldAcc, ShardName])
end, Acc, lists:seq(0, NumShards - 1)).
%% @private
name_from_shard(ShardTabName) ->
BinShardTabName = atom_to_binary(ShardTabName, utf8),
Tokens = binary:split(BinShardTabName, <<"_">>, [global]),
binary_to_atom(join_bin(lists:droplast(Tokens), <<"_">>), utf8).
%% @private
join_bin(BinL, Separator) when is_list(BinL) ->
lists:foldl(fun
(X, <<"">>) -> <<X/binary>>;
(X, Acc) -> <<Acc/binary, Separator/binary, X/binary>>
end, <<"">>, BinL).
%% @private
q(_, Tab, MatchSpec, Limit, _, 0, Shard, {Acc, Continuation}) ->
{Acc, {Tab, MatchSpec, Limit, Shard, Continuation}};
q(_, _, _, _, _, _, Shard, {[], _}) when Shard < 0 ->
'$end_of_table';
q(_, Tab, MatchSpec, Limit, _, _, Shard, {Acc, _}) when Shard < 0 ->
{Acc, {Tab, MatchSpec, Limit, Shard, '$end_of_table'}};
q(F, Tab, MatchSpec, Limit, QFun, I, Shard, {Acc, '$end_of_table'}) ->
q(F, Tab, MatchSpec, Limit, QFun, I, Shard - 1, {Acc, nil});
q(F, Tab, MatchSpec, Limit, QFun, I, Shard, {Acc, _}) ->
case ets:F(shard_name(Tab, Shard), MatchSpec, I) of
{L, Cont} ->
NewAcc = {QFun(L, Acc), Cont},
q(F, Tab, MatchSpec, Limit, QFun, I - length(L), Shard, NewAcc);
'$end_of_table' ->
q(F, Tab, MatchSpec, Limit, QFun, I, Shard, {Acc, '$end_of_table'})
end.
%% @private
q(_, {Tab, MatchSpec, Limit, Shard, Continuation}, _, 0, Acc) ->
{Acc, {Tab, MatchSpec, Limit, Shard, Continuation}};
q(F, {Tab, MatchSpec, Limit, Shard, '$end_of_table'}, QFun, I, Acc) ->
q(F, Tab, MatchSpec, Limit, QFun, I, Shard - 1, {Acc, nil});
q(F, {Tab, MatchSpec, Limit, Shard, Continuation}, QFun, I, Acc) ->
case ets:F(Continuation) of
{L, Cont} ->
NewAcc = QFun(L, Acc),
q(F, {Tab, MatchSpec, Limit, Shard, Cont}, QFun, I - length(L), NewAcc);
'$end_of_table' ->
q(F, {Tab, MatchSpec, Limit, Shard, '$end_of_table'}, QFun, I, Acc)
end.
%% @private
q_fun() ->
fun(L1, L0) -> L1 ++ L0 end. | src/shards_local.erl | 0.589953 | 0.465813 | shards_local.erl | starcoder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.