code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1 value |
|---|---|---|---|---|---|
-module(quic_error).
-export([decode/1]).
-export([encode/1]).
%
% Taken from Chromium: "net/quic/core/quic_error_codes.h" @ 05124be
%
-type decoded_value() :: (
no_error |
% Connection has reached an invalid state.
internal_error |
% There were data frames after the a fin or reset.
stream_data_after_termination |
% Control frame is malformed.
invalid_packet_header |
% Frame data is malformed.
invalid_frame_data |
% The packet contained no payload.
missing_payload |
% FEC data is malformed.
invalid_fec_data |
% STREAM frame data is malformed.
invalid_stream_data |
% STREAM frame data overlaps with buffered data.
overlapping_stream_data |
% Received STREAM frame data is not encrypted.
unencrypted_stream_data |
% Attempt to send unencrypted STREAM frame.
attempt_to_send_unencrypted_stream_data |
% Received a frame which is likely the result of memory corruption.
maybe_corrupted_memory |
% FEC frame data is not encrypted.
unencrypted_fec_data |
% RST_STREAM frame data is malformed.
invalid_rst_stream_data |
% CONNECTION_CLOSE frame data is malformed.
invalid_connection_close_data |
% GOAWAY frame data is malformed.
invalid_goaway_data |
% WINDOW_UPDATE frame data is malformed.
invalid_window_update_data |
% BLOCKED frame data is malformed.
invalid_blocked_data |
% STOP_WAITING frame data is malformed.
invalid_stop_waiting_data |
% PATH_CLOSE frame data is malformed.
invalid_path_close_data |
% ACK frame data is malformed.
invalid_ack_data |
% Version negotiation packet is malformed.
invalid_version_negotiation_packet |
% Public RST packet is malformed.
invalid_public_rst_packet |
% There was an error decrypting.
decryption_failure |
% There was an error encrypting.
encryption_failure |
% The packet exceeded kMaxPacketSize.
packet_too_large |
% The peer is going away. May be a client or server.
peer_going_away |
% A stream ID was invalid.
invalid_stream_id |
% A priority was invalid.
invalid_priority |
% Too many streams already open.
too_many_open_streams |
% The peer created too many available streams.
too_many_available_streams |
% Received public reset for this connection.
public_reset |
% Invalid protocol version.
invalid_version |
% The Header ID for a stream was too far from the previous.
invalid_header_id |
% Negotiable parameter received during handshake had invalid value.
invalid_negotiated_value |
% There was an error decompressing data.
decompression_failure |
% The connection timed out due to no network activity.
network_idle_timeout |
% The connection timed out waiting for the handshake to complete.
handshake_timeout |
% There was an error encountered migrating addresses.
error_migrating_address |
% There was an error encountered migrating port only.
error_migrating_port |
% There was an error while writing to the socket.
packet_write_error |
% There was an error while reading from the socket.
packet_read_error |
% We received a STREAM_FRAME with no data and no fin flag set.
empty_stream_frame_no_fin |
% We received invalid data on the headers stream.
invalid_headers_stream_data |
% The peer received too much data, violating flow control.
flow_control_received_too_much_data |
% The peer sent too much data, violating flow control.
flow_control_sent_too_much_data |
% The peer received an invalid flow control window.
flow_control_invalid_window |
% The connection has been IP pooled into an existing connection.
connection_ip_pooled |
% The connection has too many outstanding sent packets.
too_many_outstanding_sent_packets |
% The connection has too many outstanding received packets.
too_many_outstanding_received_packets |
% The quic connection has been cancelled.
connection_cancelled |
% Disabled QUIC because of high packet loss rate.
bad_packet_loss_rate |
% Disabled QUIC because of too many PUBLIC_RESETs post handshake.
public_resets_post_handshake |
% Disabled QUIC because of too many timeouts with streams open.
timeouts_with_open_streams |
% Closed because we failed to serialize a packet.
failed_to_serialize_packet |
% QUIC timed out after too many RTOs.
too_many_rtos |
% Crypto errors.
% Handshake failed.
handshake_failed |
% Handshake message contained out of order tags.
crypto_tags_out_of_order |
% Handshake message contained too many entries.
crypto_too_many_entries |
% Handshake message contained an invalid value length.
crypto_invalid_value_length |
% A crypto message was received after the handshake was complete.
crypto_message_after_handshake_complete |
% A crypto message was received with an illegal message tag.
invalid_crypto_message_type |
% A crypto message was received with an illegal parameter.
invalid_crypto_message_parameter |
% An invalid channel id signature was supplied.
invalid_channel_id_signature |
% A crypto message was received with a mandatory parameter missing.
crypto_message_parameter_not_found |
% A crypto message was received with a parameter that has no overlap
% with the local parameter.
crypto_message_parameter_no_overlap |
% A crypto message was received that contained a parameter with too few
% values.
crypto_message_index_not_found |
% A demand for an unsupport proof type was received.
unsupported_proof_demand |
% An internal error occurred in crypto processing.
crypto_internal_error |
% A crypto handshake message specified an unsupported version.
crypto_version_not_supported |
% A crypto handshake message resulted in a stateless reject.
crypto_handshake_stateless_reject |
% There was no intersection between the crypto primitives supported by the
% peer and ourselves.
crypto_no_support |
% The server rejected our client hello messages too many times.
crypto_too_many_rejects |
% The client rejected the server's certificate chain or signature.
proof_invalid |
% A crypto message was received with a duplicate tag.
crypto_duplicate_tag |
% A crypto message was received with the wrong encryption level (i.e. it
% should have been encrypted but was not.)
crypto_encryption_level_incorrect |
% The server config for a server has expired.
crypto_server_config_expired |
% We failed to setup the symmetric keys for a connection.
crypto_symmetric_key_setup_failed |
% A handshake message arrived, but we are still validating the
% previous handshake message.
crypto_message_while_validating_client_hello |
% A server config update arrived before the handshake is complete.
crypto_update_before_handshake_complete |
% CHLO cannot fit in one packet.
crypto_chlo_too_large |
% This connection involved a version negotiation which appears to have been
% tampered with.
version_negotiation_mismatch |
% Multipath errors.
% Multipath is not enabled, but a packet with multipath flag on is received.
bad_multipath_flag |
% A path is supposed to exist but does not.
multipath_path_does_not_exist |
% A path is supposed to be active but is not.
multipath_path_not_active |
% IP address changed causing connection close.
ip_address_changed |
% Connection migration errors.
% Network changed, but connection had no migratable streams.
connection_migration_no_migratable_streams |
% Connection changed networks too many times.
connection_migration_too_many_changes |
% Connection migration was attempted, but there was no new network to
% migrate to.
connection_migration_no_new_network |
% Network changed, but connection had one or more non-migratable streams.
connection_migration_non_migratable_stream |
% Stream frames arrived too discontiguously so that stream sequencer buffer
% maintains too many gaps.
too_many_frame_gaps |
% Sequencer buffer get into weird state where continuing read/write will lead
% to crash.
stream_sequencer_invalid_state |
% Connection closed because of server hits max number of sessions allowed.
too_many_sessions_on_server |
{unknown, non_neg_integer()}).
-export_type([decoded_value/0]).
-type encoded_value() :: 0..96.
-export_type([encoded_value/0]).
-spec decode(encoded_value()) -> decoded_value().
decode(0) -> no_error;
decode(1) -> internal_error;
decode(2) -> stream_data_after_termination;
decode(3) -> invalid_packet_header;
decode(4) -> invalid_frame_data;
decode(48) -> missing_payload;
decode(5) -> invalid_fec_data;
decode(46) -> invalid_stream_data;
decode(87) -> overlapping_stream_data;
decode(61) -> unencrypted_stream_data;
decode(88) -> attempt_to_send_unencrypted_stream_data;
decode(89) -> maybe_corrupted_memory;
decode(77) -> unencrypted_fec_data;
decode(6) -> invalid_rst_stream_data;
decode(7) -> invalid_connection_close_data;
decode(8) -> invalid_goaway_data;
decode(57) -> invalid_window_update_data;
decode(58) -> invalid_blocked_data;
decode(60) -> invalid_stop_waiting_data;
decode(78) -> invalid_path_close_data;
decode(9) -> invalid_ack_data;
decode(10) -> invalid_version_negotiation_packet;
decode(11) -> invalid_public_rst_packet;
decode(12) -> decryption_failure;
decode(13) -> encryption_failure;
decode(14) -> packet_too_large;
decode(16) -> peer_going_away;
decode(17) -> invalid_stream_id;
decode(49) -> invalid_priority;
decode(18) -> too_many_open_streams;
decode(76) -> too_many_available_streams;
decode(19) -> public_reset;
decode(20) -> invalid_version;
decode(22) -> invalid_header_id;
decode(23) -> invalid_negotiated_value;
decode(24) -> decompression_failure;
decode(25) -> network_idle_timeout;
decode(67) -> handshake_timeout;
decode(26) -> error_migrating_address;
decode(86) -> error_migrating_port;
decode(27) -> packet_write_error;
decode(51) -> packet_read_error;
decode(50) -> empty_stream_frame_no_fin;
decode(56) -> invalid_headers_stream_data;
decode(59) -> flow_control_received_too_much_data;
decode(63) -> flow_control_sent_too_much_data;
decode(64) -> flow_control_invalid_window;
decode(62) -> connection_ip_pooled;
decode(68) -> too_many_outstanding_sent_packets;
decode(69) -> too_many_outstanding_received_packets;
decode(70) -> connection_cancelled;
decode(71) -> bad_packet_loss_rate;
decode(73) -> public_resets_post_handshake;
decode(74) -> timeouts_with_open_streams;
decode(75) -> failed_to_serialize_packet;
decode(85) -> too_many_rtos;
decode(28) -> handshake_failed;
decode(29) -> crypto_tags_out_of_order;
decode(30) -> crypto_too_many_entries;
decode(31) -> crypto_invalid_value_length;
decode(32) -> crypto_message_after_handshake_complete;
decode(33) -> invalid_crypto_message_type;
decode(34) -> invalid_crypto_message_parameter;
decode(52) -> invalid_channel_id_signature;
decode(35) -> crypto_message_parameter_not_found;
decode(36) -> crypto_message_parameter_no_overlap;
decode(37) -> crypto_message_index_not_found;
decode(94) -> unsupported_proof_demand;
decode(38) -> crypto_internal_error;
decode(39) -> crypto_version_not_supported;
decode(72) -> crypto_handshake_stateless_reject;
decode(40) -> crypto_no_support;
decode(41) -> crypto_too_many_rejects;
decode(42) -> proof_invalid;
decode(43) -> crypto_duplicate_tag;
decode(44) -> crypto_encryption_level_incorrect;
decode(45) -> crypto_server_config_expired;
decode(53) -> crypto_symmetric_key_setup_failed;
decode(54) -> crypto_message_while_validating_client_hello;
decode(65) -> crypto_update_before_handshake_complete;
decode(90) -> crypto_chlo_too_large;
decode(55) -> version_negotiation_mismatch;
decode(79) -> bad_multipath_flag;
decode(91) -> multipath_path_does_not_exist;
decode(92) -> multipath_path_not_active;
decode(80) -> ip_address_changed;
decode(81) -> connection_migration_no_migratable_streams;
decode(82) -> connection_migration_too_many_changes;
decode(83) -> connection_migration_no_new_network;
decode(84) -> connection_migration_non_migratable_stream;
decode(93) -> too_many_frame_gaps;
decode(95) -> stream_sequencer_invalid_state;
decode(96) -> too_many_sessions_on_server;
decode(Unknown) -> {unknown, Unknown}.
-spec encode(decoded_value()) -> encoded_value().
encode(no_error) -> 0;
encode(internal_error) -> 1;
encode(stream_data_after_termination) -> 2;
encode(invalid_packet_header) -> 3;
encode(invalid_frame_data) -> 4;
encode(missing_payload) -> 48;
encode(invalid_fec_data) -> 5;
encode(invalid_stream_data) -> 46;
encode(overlapping_stream_data) -> 87;
encode(unencrypted_stream_data) -> 61;
encode(attempt_to_send_unencrypted_stream_data) -> 88;
encode(maybe_corrupted_memory) -> 89;
encode(unencrypted_fec_data) -> 77;
encode(invalid_rst_stream_data) -> 6;
encode(invalid_connection_close_data) -> 7;
encode(invalid_goaway_data) -> 8;
encode(invalid_window_update_data) -> 57;
encode(invalid_blocked_data) -> 58;
encode(invalid_stop_waiting_data) -> 60;
encode(invalid_path_close_data) -> 78;
encode(invalid_ack_data) -> 9;
encode(invalid_version_negotiation_packet) -> 10;
encode(invalid_public_rst_packet) -> 11;
encode(decryption_failure) -> 12;
encode(encryption_failure) -> 13;
encode(packet_too_large) -> 14;
encode(peer_going_away) -> 16;
encode(invalid_stream_id) -> 17;
encode(invalid_priority) -> 49;
encode(too_many_open_streams) -> 18;
encode(too_many_available_streams) -> 76;
encode(public_reset) -> 19;
encode(invalid_version) -> 20;
encode(invalid_header_id) -> 22;
encode(invalid_negotiated_value) -> 23;
encode(decompression_failure) -> 24;
encode(network_idle_timeout) -> 25;
encode(handshake_timeout) -> 67;
encode(error_migrating_address) -> 26;
encode(error_migrating_port) -> 86;
encode(packet_write_error) -> 27;
encode(packet_read_error) -> 51;
encode(empty_stream_frame_no_fin) -> 50;
encode(invalid_headers_stream_data) -> 56;
encode(flow_control_received_too_much_data) -> 59;
encode(flow_control_sent_too_much_data) -> 63;
encode(flow_control_invalid_window) -> 64;
encode(connection_ip_pooled) -> 62;
encode(too_many_outstanding_sent_packets) -> 68;
encode(too_many_outstanding_received_packets) -> 69;
encode(connection_cancelled) -> 70;
encode(bad_packet_loss_rate) -> 71;
encode(public_resets_post_handshake) -> 73;
encode(timeouts_with_open_streams) -> 74;
encode(failed_to_serialize_packet) -> 75;
encode(too_many_rtos) -> 85;
encode(handshake_failed) -> 28;
encode(crypto_tags_out_of_order) -> 29;
encode(crypto_too_many_entries) -> 30;
encode(crypto_invalid_value_length) -> 31;
encode(crypto_message_after_handshake_complete) -> 32;
encode(invalid_crypto_message_type) -> 33;
encode(invalid_crypto_message_parameter) -> 34;
encode(invalid_channel_id_signature) -> 52;
encode(crypto_message_parameter_not_found) -> 35;
encode(crypto_message_parameter_no_overlap) -> 36;
encode(crypto_message_index_not_found) -> 37;
encode(unsupported_proof_demand) -> 94;
encode(crypto_internal_error) -> 38;
encode(crypto_version_not_supported) -> 39;
encode(crypto_handshake_stateless_reject) -> 72;
encode(crypto_no_support) -> 40;
encode(crypto_too_many_rejects) -> 41;
encode(proof_invalid) -> 42;
encode(crypto_duplicate_tag) -> 43;
encode(crypto_encryption_level_incorrect) -> 44;
encode(crypto_server_config_expired) -> 45;
encode(crypto_symmetric_key_setup_failed) -> 53;
encode(crypto_message_while_validating_client_hello) -> 54;
encode(crypto_update_before_handshake_complete) -> 65;
encode(crypto_chlo_too_large) -> 90;
encode(version_negotiation_mismatch) -> 55;
encode(bad_multipath_flag) -> 79;
encode(multipath_path_does_not_exist) -> 91;
encode(multipath_path_not_active) -> 92;
encode(ip_address_changed) -> 80;
encode(connection_migration_no_migratable_streams) -> 81;
encode(connection_migration_too_many_changes) -> 82;
encode(connection_migration_no_new_network) -> 83;
encode(connection_migration_non_migratable_stream) -> 84;
encode(too_many_frame_gaps) -> 93;
encode(stream_sequencer_invalid_state) -> 95;
encode(too_many_sessions_on_server) -> 96. | src/quic_error.erl | 0.657318 | 0.402921 | quic_error.erl | starcoder |
%% @doc
%% Collects information about memory dynamically allocated
%% by the Erlang emulator using
%% <a href="http://erlang.org/doc/man/erlang.html#memory-0">
%% erlang:memory/0
%% </a>, also provides basic (D)ETS statistics.
%%
%% ==Exported metrics==
%% <ul>
%% <li>
%% `erlang_vm_memory_atom_bytes_total{usage="free|used"}'<br/>
%% Type: gauge.<br/>
%% The total amount of memory currently allocated for atoms.
%% This memory is part of the memory presented as system memory.
%% </li>
%% <li>
%% `erlang_vm_memory_bytes_total{kind="system|processes"}'<br/>
%% Type: gauge.<br/>
%% The total amount of memory currently allocated.
%% This is the same as the sum of the memory size for processes and system.
%% </li>
%% <li>
%% `erlang_vm_memory_dets_tables'<br/>
%% Type: gauge.<br/>
%% Erlang VM DETS Tables count.
%% </li>
%% <li>
%% `erlang_vm_memory_ets_tables'<br/>
%% Type: gauge.<br/>
%% Erlang VM ETS Tables count.
%% </li>
%% <li>
%% `erlang_vm_memory_processes_bytes_total{usage="free|used"}'<br/>
%% Type: gauge.<br/>
%% The total amount of memory currently allocated for the Erlang processes.
%% </li>
%% <li>
%% `erlang_vm_memory_system_bytes_total{usage="atom|binary|code|ets|other"}'
%% <br/>
%% Type: gauge.<br/>
%% The total amount of memory currently allocated for the emulator
%% that is not directly related to any Erlang process.
%% Memory presented as processes is not included in this memory.
%% </li>
%% </ul>
%%
%% ==Configuration==
%%
%% Metrics exported by this collector can be configured via
%% `vm_memory_collector_metrics' key of `prometheus' app environment.
%%
%% Available options:
%% <ul>
%% <li>
%% `atom_bytes_total' for `erlang_vm_memory_atom_bytes_total'.
%% </li>
%% <li>
%% `bytes_total' for `erlang_vm_memory_bytes_total'.
%% </li>
%% <li>
%% `dets_tables' for `erlang_vm_dets_tables'.
%% </li>
%% <li>
%% `ets_tables' for `erlang_vm_ets_tables'.
%% </li>
%% <li>
%% `processes_bytes_total' for `erlang_vm_memory_processes_bytes_total'.
%% </li>
%% <li>
%% `system_bytes_total' for `erlang_vm_memory_system_bytes_total'.
%% </li>
%% </ul>
%%
%% By default all metrics are enabled.
%% @end
-module(prometheus_vm_memory_collector).
-export([deregister_cleanup/1,
collect_mf/2]).
-import(prometheus_model_helpers, [create_mf/4]).
-import(proplists, [get_value/2]).
-include("prometheus.hrl").
-behaviour(prometheus_collector).
%%====================================================================
%% Macros
%%====================================================================
-define(METRIC_NAME_PREFIX, "erlang_vm_memory_").
%%====================================================================
%% Collector API
%%====================================================================
%% @private
deregister_cleanup(_) -> ok.
-spec collect_mf(_Registry, Callback) -> ok when
_Registry :: prometheus_registry:registry(),
Callback :: prometheus_collector:callback().
%% @private
collect_mf(_Registry, Callback) ->
Metrics = metrics(),
EnabledMetrics = enabled_metrics(),
[add_metric_family(Metric, Callback)
|| {Name, _, _, _}=Metric <- Metrics, metric_enabled(Name, EnabledMetrics)],
ok.
add_metric_family({Name, Type, Help, Metrics}, Callback) ->
Callback(create_mf(?METRIC_NAME(Name), Help, Type, Metrics)).
%%====================================================================
%% Private Parts
%%====================================================================
metrics() ->
Data = erlang:memory(),
[{atom_bytes_total, gauge,
"The total amount of memory currently allocated "
"for atoms. This memory is part of the memory "
"presented as system memory.",
[
{[{usage, used}], get_value(atom_used, Data)},
{[{usage, free}],
get_value(atom, Data) - get_value(atom_used, Data)}
]},
{bytes_total, gauge,
"The total amount of memory currently allocated. "
"This is the same as the sum of the memory size "
"for processes and system.",
[
{[{kind, system}], get_value(system, Data)},
{[{kind, processes}], get_value(processes, Data)}
]},
{dets_tables, gauge,
"Erlang VM DETS Tables count.",
length(dets:all())},
{ets_tables, gauge,
"Erlang VM ETS Tables count.",
length(ets:all())},
{processes_bytes_total, gauge,
"The total amount of memory currently allocated "
"for the Erlang processes.",
[
{[{usage, used}], get_value(processes_used, Data)},
{[{usage, free}],
get_value(processes, Data) - get_value(processes_used, Data)}
]},
{system_bytes_total, gauge,
"The total amount of memory currently allocated "
"for the emulator that is not directly related "
"to any Erlang process. Memory presented as processes "
"is not included in this memory.",
[
{[{usage, atom}], get_value(atom, Data)},
{[{usage, binary}], get_value(binary, Data)},
{[{usage, code}], get_value(code, Data)},
{[{usage, ets}], get_value(ets, Data)},
{[{usage, other}], memory_other(Data)}
]}].
memory_other(Data) ->
get_value(system, Data)
- get_value(atom, Data)
- get_value(binary, Data)
- get_value(code, Data)
- get_value(ets, Data).
enabled_metrics() ->
application:get_env(prometheus, vm_memory_collector_metrics, all).
metric_enabled(Name, Metrics) ->
Metrics =:= all orelse lists:member(Name, Metrics). | src/collectors/vm/prometheus_vm_memory_collector.erl | 0.670393 | 0.474327 | prometheus_vm_memory_collector.erl | starcoder |
-module(herd_datetime).
-export([
now/0, now_micro/0,
datetime_from_db/1,
timestamp_to_datetime/1,
timestamp_to_db_datetime/1,
datetime_to_timestamp/1,
datetime_to_ISO/1,
add_interval/2,
subtract_interval/2
]).
-type(timestamp() :: integer()). % seconds, 1476882197
-type(timestamp_micro() :: float()). % int part in seconds, 1476882197.233323
%% PostgreSQL datetime format: 2014-12-20 17:38:56.475565
-type(db_datetime() :: {calendar:date(), {0..23, 0..59, float()}}).
-type(time_interval() :: {integer(), week | day | hour | minute | second}).
-define(DAY, 24 * 3600).
%%% module API
-spec now() -> timestamp().
now() ->
os:system_time(seconds).
-spec now_micro() -> timestamp_micro().
now_micro() ->
os:system_time(micro_seconds) * 1.0e-6.
-spec datetime_from_db(db_datetime()) -> calendar:datetime().
datetime_from_db({Date, {Hour, Minute, Second}}) ->
{Date, {Hour, Minute, trunc(Second)}}.
-spec timestamp_to_datetime(timestamp()) -> calendar:datetime().
timestamp_to_datetime(Timestamp) ->
calendar:now_to_universal_time({Timestamp div 1000000, Timestamp rem 1000000, 0}).
-spec timestamp_to_db_datetime(timestamp_micro()) -> db_datetime().
timestamp_to_db_datetime(Timestamp) ->
T = trunc(Timestamp),
MicroSecs = Timestamp - T,
{D, {H, M, S}} = calendar:now_to_universal_time({T div 1000000, T rem 1000000, 0}),
{D, {H, M, S + MicroSecs}}.
-spec datetime_to_timestamp(calendar:datetime()) -> timestamp().
datetime_to_timestamp(DateTime) ->
% 62167219200 == calendar:datetime_to_gregorian_seconds({{1970, 1, 1}, {0, 0, 0}})
calendar:datetime_to_gregorian_seconds(DateTime) - 62167219200.
-spec datetime_to_ISO(calendar:datetime()) -> string().
datetime_to_ISO({{Year, Month, Day}, {Hour, Minute, Second}}) ->
lists:flatten(
io_lib:format("~4..0b-~2..0b-~2..0bT~2..0b:~2..0b:~2..0b",
[Year, Month, Day, Hour, Minute, trunc(Second)])).
-spec add_interval(calendar:datetime(), time_interval()) -> calendar:datetime().
add_interval(Datetime, {W, week}) ->
add_interval(Datetime, {W * 7 * ?DAY, second});
add_interval(Datetime, {D, day}) ->
add_interval(Datetime, {D * ?DAY, second});
add_interval(Datetime, {H, hour}) ->
add_interval(Datetime, {H * 3600, second});
add_interval(Datetime, {M, minute}) ->
add_interval(Datetime, {M * 60, second});
add_interval(Datetime, {S, second}) ->
T = datetime_to_timestamp(Datetime),
timestamp_to_datetime(T + S).
-spec subtract_interval(calendar:datetime(), time_interval()) -> calendar:datetime().
subtract_interval(Datetime, {M, Type}) ->
add_interval(Datetime, {-M, Type}). | src/herd_datetime.erl | 0.549641 | 0.544135 | herd_datetime.erl | starcoder |
% Licensed under the Apache License, Version 2.0 (the "License"); you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
-module(cbt_ets).
-behaviour(cbt_backend).
-include("cbt.hrl").
%% public API
-export([new/1, delete/1]).
-export([open_btree/2, open_btree/3,
update_btree/3,
delete_btree/2,
bytes/1]).
%% backend API
%%
-export([append_term/2, append_term/3,
pread_term/2,
sync/1,
empty/1]).
%% @doc create new ETS storage
-spec new(DbName :: atom) -> atom().
new(DbName) when is_atom(DbName) ->
Tid = ets:new(DbName, [named_table, ordered_set, public, {keypos, 2}]),
%% make meta
ets:insert_new(DbName, #ets_btree_meta{key=?ETS_META_KEY}),
Tid.
%% @doc delete ETS storage
-spec delete(Tab :: atom()) -> ok.
delete(Tab) ->
true = ets:delete(Tab),
ok.
%% @doc open a btree from the storage
-spec open_btree(Tab :: atom(), BtName :: any()) ->
{ok, cbt_btree:cbbtree()} | {error, term()}.
open_btree(Tab, BtName) ->
open_btree(Tab, BtName, []).
open_btree(Tab, BtName, Options0) when Tab /= ?ETS_META_KEY ->
Options = [{backend, cbt_ets}] ++ Options0,
case ets:lookup(Tab, BtName) of
[] ->
%% create a new btree if missing
cbt_btree:open(nil, Tab, Options);
[#ets_btree{root=BtState}] ->
%% reopen the btree
cbt_btree:open(BtState, Tab, Options)
end.
%% @doc update the btree state in the storage which allows the new changes to
%% be read by others.
-spec update_btree(Tab :: atom(), BtName :: any(),
Btree :: cbt_btre:cbtree()) -> true.
update_btree(Tab, BtName, Btree) ->
BtState = cbt_btree:get_state(Btree),
ets:insert(Tab, #ets_btree{name=BtName, root = BtState}).
%% @doc delete the btree reference in the storage
-spec delete_btree(Tab :: atom(), BtName :: any()) -> true.
delete_btree(Tab, BtName) ->
ets:delete(Tab, BtName).
%% @doc return the size in memory of the storage
-spec bytes(Tab :: atom()) -> integer().
bytes(Tab) ->
ets:info(Tab, memory).
%% BACKEND API
%%
append_term(Tab, Term) ->
append_term(Tab, Term, []).
append_term(Tab, Term, Options) ->
% compress term
Comp = cbt_util:get_value(compression, Options, ?DEFAULT_COMPRESSION),
Data = cbt_compress:compress(Term, Comp),
NewPos = ets:update_counter(Tab, ?ETS_META_KEY,
{#ets_btree_meta.write_loc, 1}),
ets:insert(Tab, #ets_btree_data{pos=NewPos, data=Data}),
{ok, NewPos, byte_size(Data)}.
pread_term(Tab, Pos) ->
case ets:lookup(Tab, Pos) of
[] -> {missing_btree_item, Pos};
[#ets_btree_data{data=Bin}] -> {ok, cbt_compress:decompress(Bin)}
end.
sync(_Tab) ->
ok.
empty(Tab) ->
%% delete all objects in the table
ets:delete_all_objects(Tab),
%% reiitialize the meta data
ets:new(Tab, [named_table, ordered_set, public, {keypos, 2}]),
ets:insert_new(Tab, #ets_btree_meta{key=?ETS_META_KEY}),
ok. | src/cbt_ets.erl | 0.597256 | 0.436922 | cbt_ets.erl | starcoder |
-module(teal).
-export([assert/3,
not_equal/2, assert_not_equal/2, assert_not_equal/3,
raises_exception/1, assert_raises_exception/1,
assert_raises_exception/2,
raises_exception_with_message/2,
assert_raises_exception_with_message/2,
assert_raises_exception_with_message/3,
raises_throw/1, assert_raises_throw/1, assert_raises_throw/2,
raises_throw_with_message/2, assert_raises_throw_with_message/2,
assert_raises_throw_with_message/3,
raises_error/1, assert_raises_error/1, assert_raises_error/2,
raises_error_with_message/2, assert_raises_error_with_message/2,
assert_raises_error_with_message/3,
raises_exit/1, assert_raises_exit/1, assert_raises_exit/2,
raises_exit_with_message/2, assert_raises_exit_with_message/2,
assert_raises_exit_with_message/3
]).
%%%===================================================================
%%% API
%%%===================================================================
-spec assert(Lhs :: any(), Rhs :: any(), Message :: atom()) -> true.
assert(Lhs, Rhs, Message) ->
try Lhs = Rhs of
Lhs -> true
catch
error:{badmatch, _} ->
erlang:error(Message)
end.
%% equality functions
-spec not_equal(Term1 :: any(), Term2 :: any()) -> boolean().
not_equal(Term1, Term2) ->
case Term1 of
Term2 ->
false;
_ ->
true
end.
-spec assert_not_equal(Term1 :: any(), Term2 :: any()) -> boolean().
assert_not_equal(Term1, Term2) ->
teal:assert(true, not_equal(Term1, Term2), equal).
-spec assert_not_equal(Term1 :: any(), Term2 :: any(), Msg :: atom()) -> boolean().
assert_not_equal(Term1, Term2, Msg) ->
teal:assert(true, not_equal(Term1, Term2), Msg).
%% Exception function
-spec raises_exception(Fun :: fun()) -> boolean().
raises_exception(Fun) when is_function(Fun) ->
does_raise_exception(Fun).
-spec assert_raises_exception(Fun :: fun()) -> boolean().
assert_raises_exception(Fun) ->
assert(true, raises_exception(Fun), no_exception_caught).
-spec assert_raises_exception(Fun :: fun(), Msg :: any()) -> boolean().
assert_raises_exception(Fun, Msg) ->
assert(true, raises_exception(Fun), Msg).
-spec raises_exception_with_message(Fun :: fun(), ErrMsg :: any()) -> boolean().
raises_exception_with_message(Fun, ErrMsg) when is_function(Fun) ->
does_raise_exception_with_message(Fun, ErrMsg).
-spec assert_raises_exception_with_message(Fun :: fun(), ErrMsg :: any()) ->
boolean().
assert_raises_exception_with_message(Fun, ErrMsg) ->
assert(true, raises_exception_with_message(Fun, ErrMsg),
no_exception_caught).
-spec assert_raises_exception_with_message(Fun :: fun(), ErrMsg :: any(),
Msg :: any()) -> boolean().
assert_raises_exception_with_message(Fun, ErrMsg, Msg) ->
assert(true, raises_exception_with_message(Fun, ErrMsg), Msg).
-spec raises_throw(Fun :: fun()) -> boolean().
raises_throw(Fun) when is_function(Fun) ->
does_raise_exception(Fun, throw).
-spec assert_raises_throw(Fun :: fun()) -> boolean().
assert_raises_throw(Fun) ->
assert(true, raises_throw(Fun), no_throw_caught).
-spec assert_raises_throw(Fun :: fun(), Msg :: any()) -> boolean().
assert_raises_throw(Fun, Msg) ->
assert(true, raises_throw(Fun), Msg).
-spec raises_throw_with_message(Fun :: fun(), ErrMsg :: any()) -> boolean().
raises_throw_with_message(Fun, ErrMsg) when is_function(Fun) ->
does_raise_exception(Fun, throw, ErrMsg).
-spec assert_raises_throw_with_message(Fun :: fun(), ErrMsg :: any()) ->
boolean().
assert_raises_throw_with_message(Fun, ErrMsg) ->
assert(true, raises_throw_with_message(Fun, ErrMsg), no_throw_caught).
-spec assert_raises_throw_with_message(Fun :: fun(), ErrMsg :: any(),
Msg :: any()) -> boolean().
assert_raises_throw_with_message(Fun, ErrMsg, Msg) ->
assert(true, raises_throw_with_message(Fun, ErrMsg), Msg).
-spec raises_error(Fun :: fun()) -> boolean().
raises_error(Fun) when is_function(Fun) ->
does_raise_exception(Fun, error).
-spec assert_raises_error(Fun :: fun()) -> boolean().
assert_raises_error(Fun) when is_function(Fun) ->
assert(true, raises_error(Fun), no_error_caught).
-spec assert_raises_error(Fun :: fun(), Msg :: any()) -> boolean().
assert_raises_error(Fun, Msg) when is_function(Fun) ->
assert(true, raises_error(Fun), Msg).
-spec raises_error_with_message(Fun :: fun(), ErrMsg :: any()) -> boolean().
raises_error_with_message(Fun, ErrMsg) when is_function(Fun) ->
does_raise_exception(Fun, error, ErrMsg).
-spec assert_raises_error_with_message(Fun :: fun(), ErrMsg :: any()) ->
boolean().
assert_raises_error_with_message(Fun, ErrMsg) when is_function(Fun) ->
assert(true, raises_error_with_message(Fun, ErrMsg), no_error_caught).
-spec assert_raises_error_with_message(Fun :: fun(), ErrMsg :: any(),
Msg :: any()) -> boolean().
assert_raises_error_with_message(Fun, ErrMsg, Msg) when is_function(Fun) ->
assert(true, raises_error_with_message(Fun, ErrMsg), Msg).
-spec raises_exit(Fun :: fun()) -> boolean().
raises_exit(Fun) when is_function(Fun) ->
does_raise_exception(Fun, exit).
-spec assert_raises_exit(Fun :: fun()) -> boolean().
assert_raises_exit(Fun) when is_function(Fun) ->
assert(true, raises_exit(Fun), no_exit_caught).
-spec assert_raises_exit(Fun :: fun(), Msg :: any()) -> boolean().
assert_raises_exit(Fun, Msg) when is_function(Fun) ->
assert(true, raises_exit(Fun), Msg).
-spec raises_exit_with_message(Fun :: fun(), ErrMsg :: any()) -> boolean().
raises_exit_with_message(Fun, ErrMsg) when is_function(Fun) ->
does_raise_exception(Fun, exit, ErrMsg).
-spec assert_raises_exit_with_message(Fun :: fun(), ErrMsg :: any()) ->
boolean().
assert_raises_exit_with_message(Fun, ErrMsg) when is_function(Fun) ->
assert(true, raises_exit_with_message(Fun, ErrMsg), no_exit_caught).
-spec assert_raises_exit_with_message(Fun :: fun(), ErrMsg :: any(),
Msg :: any()) -> boolean().
assert_raises_exit_with_message(Fun, ErrMsg, Msg) when is_function(Fun) ->
assert(true, raises_exit_with_message(Fun, ErrMsg), Msg).
%%%===================================================================
%%% Private functions
%%%===================================================================
does_raise_exception(Fun) when is_function(Fun) ->
try Fun() of
_ -> false
catch
_Error:_ErrMsg ->
true
end.
does_raise_exception(Fun, Error) when is_function(Fun) ->
try Fun() of
_ -> false
catch
Error:_ErrMsg ->
true
end.
does_raise_exception_with_message(Fun, ErrorMessage) when is_function(Fun) ->
try Fun() of
_ -> false
catch
_Error:ErrorMessage ->
true
end.
does_raise_exception(Fun, Error, ErrorMessage) when is_function(Fun) ->
try Fun() of
_ -> false
catch
Error:ErrorMessage ->
true
end. | src/teal.erl | 0.564339 | 0.631935 | teal.erl | starcoder |
-module(dsdc_block_candidate).
-export([ apply_block_txs/5
, apply_block_txs_strict/5
, calculate_fee/1
, calculate_total_fee/1
, create/1
, create_with_state/4
]).
-export([adjust_target/2]).
%% -- API functions ----------------------------------------------------------
-spec create(dsdc_blocks:block() | dsdc_blocks:block_header_hash()) ->
{ok, dsdc_blocks:block(), term()} | {error, term()}.
create(BlockHash) when is_binary(BlockHash) ->
case dsdc_chain:get_block(BlockHash) of
{ok, Block} ->
int_create(BlockHash, Block);
error ->
{error, block_not_found}
end;
create(Block) ->
{ok, BlockHash} = dsdc_blocks:hash_internal_representation(Block),
int_create(BlockHash, Block).
-spec apply_block_txs(list(dsdtx_sign:signed_tx()), dsdc_keys:pubkey(), dsdc_trees:trees(),
dsdc_blocks:height(), non_neg_integer()) ->
{ok, list(dsdtx_sign:signed_tx()), dsdc_trees:trees()}.
apply_block_txs(Txs, Miner, Trees, Height, Version) ->
{ok, Txs1, Trees1, _} = int_apply_block_txs(Txs, Miner, Trees, Height, Version, false),
{ok, Txs1, Trees1}.
-spec apply_block_txs_strict(list(dsdtx_sign:signed_tx()), dsdc_keys:pubkey(),
dsdc_trees:trees(), dsdc_blocks:height(), non_neg_integer()) ->
{ok, list(dsdtx_sign:signed_tx()), dsdc_trees:trees()} | {error, term()}.
apply_block_txs_strict(Txs, Miner, Trees, Height, Version) ->
case int_apply_block_txs(Txs, Miner, Trees, Height, Version, true) of
Err = {error, _} -> Err;
{ok, Txs1, Trees1, _} -> {ok, Txs1, Trees1}
end.
-spec create_with_state(dsdc_blocks:block(), dsdc_keys:pubkey(),
list(dsdtx_sign:signed_tx()), dsdc_trees:trees()) ->
{dsdc_blocks:block(), dsdc_trees:trees()}.
create_with_state(Block, Miner, Txs, Trees) ->
{ok, BlockHash} = dsdc_blocks:hash_internal_representation(Block),
{ok, NewBlock, #{ trees := NewTrees}} =
int_create_block(BlockHash, Block, Trees, Miner, Txs),
{NewBlock, NewTrees}.
-spec adjust_target(dsdc_blocks:block(), list(dsdc_headers:header())) ->
{ok, dsdc_blocks:block()} | {error, term()}.
adjust_target(Block, AdjHeaders) ->
Header = dsdc_blocks:to_header(Block),
DeltaHeight = dsdc_governance:blocks_to_check_difficulty_count(),
case dsdc_headers:height(Header) =< DeltaHeight of
true ->
%% For the first DeltaHeight blocks, use pre-defined target
{ok, Block};
false when DeltaHeight == length(AdjHeaders) ->
CalculatedTarget = dsdc_target:recalculate(Header, AdjHeaders),
Block1 = dsdc_blocks:set_target(Block, CalculatedTarget),
{ok, Block1};
false -> %% Wrong number of headers in AdjHeaders...
{error, {wrong_headers_for_target_adjustment, DeltaHeight, length(AdjHeaders)}}
end.
-spec calculate_fee(list(dsdtx_sign:signed_tx())) -> non_neg_integer().
calculate_fee(SignedTxs) ->
lists:foldl(
fun(SignedTx, TotalFee) ->
Fee = dsdtx:fee(dsdtx_sign:tx(SignedTx)),
TotalFee + Fee
end, 0, SignedTxs).
-spec calculate_total_fee(list(dsdtx_sign:signed_tx())) -> non_neg_integer().
calculate_total_fee(SignedTxs) ->
TxsFee = calculate_fee(SignedTxs),
dsdc_governance:block_mine_reward() + TxsFee.
%% -- Internal functions -----------------------------------------------------
int_create(BlockHash, Block) ->
case dsdc_chain:get_block_state(BlockHash) of
{ok, Trees} ->
int_create(BlockHash, Block, Trees);
error ->
{error, block_state_not_found}
end.
int_create(BlockHash, Block, Trees) ->
N = dsdc_governance:blocks_to_check_difficulty_count(),
case dsdc_blocks:height(Block) < N of
true ->
int_create(BlockHash, Block, Trees, []);
false ->
case dsdc_chain:get_n_headers_backwards_from_hash(BlockHash, N) of
{ok, Headers} ->
int_create(BlockHash, Block, Trees, Headers);
error ->
{error, headers_for_target_adjustment_not_found}
end
end.
int_create(BlockHash, Block, Trees, AdjChain) ->
MaxN = dsdc_governance:max_txs_in_block(),
{ok, Txs} = dsdc_tx_pool:get_candidate(MaxN, BlockHash),
case dsdc_keys:pubkey() of
{ok, Miner} ->
int_create(BlockHash, Block, Trees, Miner, Txs, AdjChain);
{error, _} = Error ->
Error
end.
int_create(PrevBlockHash, PrevBlock, Trees, Miner, Txs, AdjChain) ->
{ok, Block, BlockInfo} = int_create_block(PrevBlockHash, PrevBlock, Trees, Miner, Txs),
case adjust_target(Block, AdjChain) of
{ok, AdjBlock} -> {ok, AdjBlock, BlockInfo#{ adj_chain => AdjChain }};
{error, _} -> {error, failed_to_adjust_target}
end.
int_create_block(PrevBlockHash, PrevBlock, Trees, Miner, Txs) ->
PrevBlockHeight = dsdc_blocks:height(PrevBlock),
%% Assert correctness of last block protocol version, as minimum
%% sanity check on previous block and state (mainly for potential
%% stale state persisted in DB and for development testing).
ExpectedPrevBlockVersion =
dsdc_hard_forks:protocol_effective_at_height(PrevBlockHeight),
{ExpectedPrevBlockVersion, _} = {dsdc_blocks:version(PrevBlock),
{expected, ExpectedPrevBlockVersion}},
Height = PrevBlockHeight + 1,
Version = dsdc_hard_forks:protocol_effective_at_height(Height),
{ok, Txs1, Trees2, TotFee} =
int_apply_block_txs(Txs, Miner, Trees, Height, Version, false),
TxsTree = dsdc_txs_trees:from_txs(Txs1),
TxsRootHash = dsdc_txs_trees:pad_empty(dsdc_txs_trees:root_hash(TxsTree)),
NewBlock = dsdc_blocks:new(Height, PrevBlockHash, dsdc_trees:hash(Trees2),
TxsRootHash, Txs1, dsdc_blocks:target(PrevBlock),
0, dsdu_time:now_in_msecs(), Version, Miner),
BlockInfo = #{ trees => Trees2, tot_fee => TotFee, txs_tree => TxsTree },
{ok, NewBlock, BlockInfo}.
%% Non-strict
int_apply_block_txs(Txs, Miner, Trees, Height, Version, false) ->
Trees0 = dsdc_trees:perform_pre_transformations(Trees, Height),
{ok, Txs1, Trees1} =
dsdc_trees:apply_txs_on_state_trees(Txs, Trees0, Height, Version),
TotFee = calculate_total_fee(Txs1),
Trees2 = dsdc_trees:grant_fee_to_miner(Miner, Trees1, TotFee),
{ok, Txs1, Trees2, TotFee};
%% strict
int_apply_block_txs(Txs, Miner, Trees, Height, Version, true) ->
Trees0 = dsdc_trees:perform_pre_transformations(Trees, Height),
case dsdc_trees:apply_txs_on_state_trees_strict(Txs, Trees0, Height, Version) of
{ok, Txs1, Trees1} ->
TotFee = calculate_total_fee(Txs1),
Trees2 = dsdc_trees:grant_fee_to_miner(Miner, Trees1, TotFee),
{ok, Txs1, Trees2, TotFee};
Err = {error, _} ->
Err
end. | apps/dsdcore/src/dsdc_block_candidate.erl | 0.527317 | 0.417093 | dsdc_block_candidate.erl | starcoder |
%%%----------------------------------------------------------------
%%% @author <NAME> <<EMAIL>>
%%% @doc Utility functions to work with binary strings
%%%
%%% @end
%%% @copyright 2011-2012 <NAME>. See LICENSE file.
%%%----------------------------------------------------------------
-module(sip_binary).
%% API
-export([trim_leading/1, trim_trailing/1, trim/1, to_lower/1, to_upper/1]).
-export([integer_to_binary/1, float_to_binary/1]).
-export([binary_to_integer/1, binary_to_float/1]).
-export([hexstr_to_binary/1, binary_to_hexstr/1]).
-export([parse_until/2, parse_while/2]).
%% Include files
-include("../sip_common.hrl").
%%-----------------------------------------------------------------
%% Conversions upper to/from lower
%%-----------------------------------------------------------------
%% @doc Trim leading whitespaces from the binary string
%% @end
-spec trim_leading(binary()) -> binary().
trim_leading(<<>>) -> <<>>;
trim_leading(Bin) ->
<<C, Rest/binary>> = Bin,
case Bin of
<<C, Rest/binary>> when C =:= $ ; C =:= $\t ; C =:= $\r ; C =:= $\n ->
trim_leading(Rest);
_Other -> Bin
end.
%% @doc Trim trailing whitespaces from the binary string
%% @end
-spec trim_trailing(binary()) -> binary().
trim_trailing(<<>>) -> <<>>;
trim_trailing(Bin) ->
Sz = size(Bin) - 1,
case Bin of
<<Rest:Sz/binary, C>> when C =:= $ ; C =:= $\t ; C =:= $\r ; C =:= $\n ->
trim_trailing(Rest);
_Other -> Bin
end.
%% @doc Trim both trailing and leading whitespaces from the binary string
%% @end
-spec trim(binary()) -> binary().
trim(Bin) ->
trim_trailing(trim_leading(Bin)).
%% @doc Convert binary UTF-8 encoded string to lowercase. Note that only
%% latin1 characters are actually converted.
%% @end
-spec to_lower(binary()) -> binary().
to_lower(Bin) ->
<< <<(string:to_lower(Char))/utf8>> || <<Char/utf8>> <= Bin >>.
%% @doc Convert binary UTF-8 encoded string to uppercase. Note that only
%% latin1 characters are actually converted.
%% @end
-spec to_upper(binary()) -> binary().
to_upper(Bin) ->
<< <<(string:to_upper(Char))/utf8>> || <<Char/utf8>> <= Bin >>.
%% Scanning binaries
%% @doc Parse binary while given predicate function evaluates to `true'
%% @end
-spec parse_while(binary(), fun((char()) -> boolean())) -> {Result :: binary(), Rest :: binary()}.
parse_while(Bin, Fun) ->
parse_while(Bin, Fun, 0).
parse_while(Bin, _Fun, Pos) when Pos =:= size(Bin) ->
{Bin, <<>>};
parse_while(Bin, Fun, Pos) when is_function(Fun) ->
<<Start:Pos/binary, Char, Rest/binary>> = Bin,
case Fun(Char) of
false ->
{Start, <<Char, Rest/binary>>};
_ ->
parse_while(Bin, Fun, Pos + 1)
end.
%% @doc Parse binary until given predicate function evaluates to `true' or until given character is encountered
%% @end
-spec parse_until(binary(), fun((char()) -> boolean()) | char()) -> {Result :: binary(), Rest :: binary()}.
parse_until(Bin, Char) when is_integer(Char) ->
parse_while(Bin, fun (C) -> C =/= Char end, 0);
parse_until(Bin, Fun) when is_function(Fun) ->
parse_while(Bin, fun (C) -> not Fun(C) end, 0).
%% Conversions
%% @doc Convert UTF-8 binary to integer
%% @end
-spec binary_to_integer(binary()) -> integer().
binary_to_integer(Bin) when is_binary(Bin) ->
list_to_integer(binary_to_list(Bin)).
%% @doc Convert integer to ASCII binary.
%% @end
-spec integer_to_binary(integer()) -> binary().
integer_to_binary(Int) when is_integer(Int) ->
list_to_binary(integer_to_list(Int)).
%% @doc Convert UTF-8 binary to floating point number
%% @end
-spec binary_to_float(binary()) -> float().
binary_to_float(Bin) when is_binary(Bin) ->
list_to_float(binary_to_list(Bin)).
%% @doc Convert binary hex string to binary
%%
%% Convert binary hex string to binary. For example, binary hex string
%% `<<"68656c6c6f">>' will be converted to `<<"hello">>'.
%% <em>Note: binary hex string must have even number of digits</em>
%% @end
-spec hexstr_to_binary(binary()) -> binary().
hexstr_to_binary(<<L, Bin/binary>>) when size(Bin) rem 2 =:= 0 ->
Byte = int(L),
hexstr_to_binary(Bin, <<Byte>>);
hexstr_to_binary(Bin) ->
hexstr_to_binary(Bin, <<>>).
hexstr_to_binary(<<>>, Res) -> Res;
hexstr_to_binary(<<H, L, Rest/binary>>, Res) ->
Byte = int(H) * 16 + int(L),
hexstr_to_binary(Rest, <<Res/binary, Byte>>).
%% @doc Convert binary to hex string
%%
%% Convert binary to the hex string. For example, binary string
%% `<<"hello">>' will be converted to hex binary string `<<"68656c6c6f">>'.
%% @end
-spec binary_to_hexstr(binary()) -> binary().
binary_to_hexstr(Bin) ->
binary_to_hexstr(Bin, <<>>).
binary_to_hexstr(<<>>, Res) -> Res;
binary_to_hexstr(<<Byte, Rest/binary>>, Res) ->
H = hex(Byte div 16),
L = hex(Byte rem 16),
binary_to_hexstr(Rest, <<Res/binary, H, L>>).
hex(N) when N >= 0, N =< 9 -> N + $0;
hex(N) when N >= 10, N =< 15 -> N - 10 + $a.
int(C) when C >= $0, C =< $9 -> C - $0;
int(C) when C >= $a, C =< $z -> C - $a + 10;
int(C) when C >= $A, C =< $Z -> C - $A + 10.
%% @doc Convert floating point number to ASCII binary.
%%
%% <em>Note that at most three digits after floating point are returned</em>
%% @end
-spec float_to_binary(float()) -> binary().
float_to_binary(Float) when is_float(Float) ->
[Res] = io_lib:format("~.3f", [Float]),
Bin = list_to_binary(Res),
Sz1 = size(Bin) - 1,
Sz2 = Sz1 - 1,
% Strip last zeros
case Bin of
<<R:Sz2/binary, "00">> -> R;
<<R:Sz1/binary, "0">> -> R;
R -> R
end.
%% Tests
-ifdef(TEST).
-spec binary_test_() -> list().
binary_test_() ->
[% trimming, upper, lower
?_assertEqual(<<>>, trim_leading(<<>>)),
?_assertEqual(<<>>, trim_leading(<<" ">>)),
?_assertEqual(<<"ABC DEF ">>, trim_leading(<<" ABC DEF ">>)),
?_assertEqual(<<>>, trim_trailing(<<>>)),
?_assertEqual(<<>>, trim_trailing(<<" ">>)),
?_assertEqual(<<" ABC DEF">>, trim_trailing(<<" ABC DEF ">>)),
?_assertEqual(<<>>, trim(<<>>)),
?_assertEqual(<<>>, trim(<<" ">>)),
?_assertEqual(<<"ABC DEF">>, trim(<<" ABC DEF ">>)),
?_assertEqual(<<"hello">>, to_lower(<<"HELlo">>)),
?_assertEqual(<<"HELLO">>, to_upper(<<"HELlo">>)),
% conversions to and from binary
?_assertEqual(123, binary_to_integer(<<"123">>)),
?_assertEqual(<<"123">>, integer_to_binary(123)),
?_assertEqual(-123.25, binary_to_float(<<"-123.25">>)),
?_assertEqual(<<"-123.0">>, float_to_binary(-123.0)),
?_assertEqual(<<"-123.2">>, float_to_binary(-123.2)),
?_assertEqual(<<"-123.25">>, float_to_binary(-123.25)),
?_assertEqual(<<"-123.253">>, float_to_binary(-123.253)),
?_assertEqual(<<"hello">>, hexstr_to_binary(<<"68656c6C6F">>)),
?_assertEqual(<<"68656c6c6f">>, binary_to_hexstr(<<"hello">>)),
?_assertEqual(<<"\n">>, hexstr_to_binary(<<"a">>)),
?_assertEqual(<<"0a">>, binary_to_hexstr(<<"\n">>)),
% scanning
?_assertEqual({<<"some">>, <<"! rest ">>}, parse_while(<<"some! rest ">>, fun (C) -> C =/= $! end)),
?_assertEqual({<<"some! rest ">>, <<>>}, parse_while(<<"some! rest ">>, fun (C) -> C =/= $$ end)),
?_assertEqual({<<"some">>, <<" rest ">>}, parse_until(<<"some rest ">>, fun (C) -> C =:= $ end)),
?_assertEqual({<<"somerest">>, <<>>}, parse_until(<<"somerest">>, fun (C) -> C =:= $ end)),
?_assertEqual({<<"some">>, <<"! rest ">>}, parse_until(<<"some! rest ">>, $!)),
?_assertEqual({<<"some! rest ">>, <<>>}, parse_until(<<"some! rest ">>, $$))
].
-endif. | apps/sip/src/syntax/sip_binary.erl | 0.552781 | 0.422147 | sip_binary.erl | starcoder |
%% @author Couchbase <<EMAIL>>
%% @copyright 2015-2020 Couchbase, Inc.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%%
%% Simple KV storage using ETS table as front end and file as the back end
%% for persistence.
%% Initialize using simple_store:start_link([your_store_name]).
%% Current consumer is XDCR checkpoints.
%%
-module(simple_store).
-include("ns_common.hrl").
%% APIs
-export([start_link/1,
get/2, get/3,
set/3,
delete/2,
delete_matching/2,
iterate_matching/2]).
%% Macros
%% Persist the ETS table to file after 10 secs.
%% All updates to the table during that window will automatically get batched
%% and flushed to the file together.
-define(FLUSH_AFTER, 10 * 1000).
%% Max number of unsucessful flush attempts before giving up.
-define(FLUSH_RETRIES, 10).
%% Exported APIs
start_link(StoreName) ->
ProcName = get_proc_name(StoreName),
work_queue:start_link(ProcName, fun () -> init(StoreName) end).
get(StoreName, Key) ->
get(StoreName, Key, false).
get(StoreName, Key, Default) ->
case ets:lookup(StoreName, Key) of
[{Key, Value}] ->
Value;
[] ->
Default
end.
set(StoreName, Key, Value) ->
do_work(StoreName, fun update_store/2, [Key, Value]).
delete(StoreName, Key) ->
do_work(StoreName, fun delete_from_store/2, [Key]).
%% Delete keys with matching prefix
delete_matching(StoreName, KeyPattern) ->
do_work(StoreName, fun del_matching/2, [KeyPattern]).
%% Return keys with matching prefix
iterate_matching(StoreName, KeyPattern) ->
ets:foldl(
fun ({Key, Value}, Acc) ->
case misc:is_prefix(KeyPattern, Key) of
true ->
?metakv_debug("Returning Key ~p.", [Key]),
[{Key, Value} | Acc];
false ->
Acc
end
end, [], StoreName).
%% Internal
init(StoreName) ->
%% Initialize flush_pending to false.
erlang:put(flush_pending, false),
%% Populate the table from the file if the file exists otherwise create
%% an empty table.
FilePath = path_config:component_path(data, get_file_name(StoreName)),
Read =
case filelib:is_regular(FilePath) of
true ->
?metakv_debug("Reading ~p content from ~s", [StoreName, FilePath]),
case ets:file2tab(FilePath, [{verify, true}]) of
{ok, StoreName} ->
true;
{error, Error} ->
?metakv_debug("Failed to read ~p content from ~s: ~p",
[StoreName, FilePath, Error]),
false
end;
false ->
false
end,
case Read of
true ->
ok;
false ->
?metakv_debug("Creating Table: ~p", [StoreName]),
ets:new(StoreName, [named_table, set, protected]),
ok
end.
do_work(StoreName, Fun, Args) ->
work_queue:submit_sync_work(
get_proc_name(StoreName),
fun () ->
Fun(StoreName, Args)
end).
%% Update the ETS table and schedule a flush to the file.
update_store(StoreName, [Key, Value]) ->
?metakv_debug("Updating data ~p in table ~p.", [[{Key, Value}], StoreName]),
ets:insert(StoreName, [{Key, Value}]),
schedule_flush(StoreName, ?FLUSH_RETRIES).
%% Delete from the ETS table and schedule a flush to the file.
delete_from_store(StoreName, [Key]) ->
?metakv_debug("Deleting key ~p in table ~p.", [Key, StoreName]),
ets:delete(StoreName, Key),
schedule_flush(StoreName, ?FLUSH_RETRIES).
del_matching(StoreName, [KeyPattern]) ->
ets:foldl(
fun ({Key, _}, _) ->
case misc:is_prefix(KeyPattern, Key) of
true ->
?metakv_debug("Deleting Key ~p.", [Key]),
ets:delete(StoreName, Key);
false ->
ok
end
end, undefined, StoreName),
schedule_flush(StoreName, ?FLUSH_RETRIES).
%% Nothing can be done if we failed to flush repeatedly.
schedule_flush(StoreName, 0) ->
?metakv_debug("Tried to flush table ~p ~p times but failed. Giving up.",
[StoreName, ?FLUSH_RETRIES]),
exit(flush_failed);
%% If flush is pending then nothing else to do otherwise schedule a
%% flush to the file for later.
schedule_flush(StoreName, NumRetries) ->
case erlang:get(flush_pending) of
true ->
?metakv_debug("Flush is already pending."),
ok;
false ->
erlang:put(flush_pending, true),
{ok, _} = timer:apply_after(?FLUSH_AFTER, work_queue, submit_work,
[self(),
fun () ->
flush_table(StoreName, NumRetries)
end]),
?metakv_debug("Successfully scheduled a flush to the file."),
ok
end.
%% Flush the table to the file.
flush_table(StoreName, NumRetries) ->
%% Reset flush pending.
erlang:put(flush_pending, false),
FilePath = path_config:component_path(data, get_file_name(StoreName)),
?metakv_debug("Persisting Table ~p to file ~p.", [StoreName, FilePath]),
case ets:tab2file(StoreName, FilePath, [{extended_info, [object_count]}]) of
ok ->
ok;
{error, Error} ->
?metakv_debug("Failed to persist table ~p to file ~p with error ~p.",
[StoreName, FilePath, Error]),
%% Reschedule another flush.
schedule_flush(StoreName, NumRetries - 1)
end.
get_proc_name(StoreName) ->
list_to_atom(get_file_name(StoreName)).
get_file_name(StoreName) ->
atom_to_list(?MODULE) ++ "_" ++ atom_to_list(StoreName). | src/simple_store.erl | 0.54577 | 0.412885 | simple_store.erl | starcoder |
% Licensed under the Apache License, Version 2.0 (the "License"); you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
-module(porkrind_json).
-include("porkrind_internal.hrl").
% These are based on the JSON structures returned by Jiffy. That means:
%
% An object is a single element tuple that contains a list of two-tuples. Each
% of the two-tuples first element is a binary. The second element is
% any valid JSON value.
%
% An array is an Erlang list of valid JSON values
-export([
is_json_object/0,
is_json_array/0,
is_json_equal_to/1,
has_json_key/1,
has_json_path/1,
has_json_matching/2,
has_json_matching_at/2,
has_json_entry/2,
has_json_entry_at/3,
has_json_entries/1,
has_json_entries_at/2,
has_json_value/1
]).
is_json_object() ->
#'porkrind.matcher'{
name = is_json_object,
args = [],
match = fun(Value) ->
case Value of
{Props} when is_list(Props) ->
ok;
_ ->
?PR_FAIL({not_object, Value})
end
end,
reason = fun({not_object, Value}) ->
io_lib:format("~p is not a JSON object", [Value])
end
}.
is_json_array() ->
M = porkrind_types:is_list(),
M#'porkrind.matcher'{
name = is_json_array,
reason = fun({bad_type, Value}) ->
io_lib:format("~p is not a JSON array", [Value])
end
}.
is_json_equal_to(Expect) ->
#'porkrind.matcher'{
name = is_json_equal,
args = [Expect],
match = fun(Value) ->
case json_eq(Expect, Value) of
true ->
ok;
false ->
?PR_FAIL({not_equal, Value})
end
end,
reason = fun({not_equal, Value}) ->
io_lib:format("~p is not JSON equivalent to ~p", [Value, Expect])
end
}.
has_json_key(Key) when is_binary(Key); is_atom(Key) ->
M = #'porkrind.matcher'{
name = has_json_key,
args = [Key],
match = fun({Props}) ->
case find_key(Key, Props) of
{_, _} ->
ok;
not_found ->
?PR_FAIL({not_found, {Props}})
end
end,
reason = fun({not_found, Value}) ->
io_lib:format("~p is missing key ~p", [Value, Key])
end
},
porkrind_logic:all_of([
is_json_object(),
M
]).
has_json_path(Path) when is_list(Path) ->
lists:foreach(fun(P) ->
if is_binary(P) orelse is_atom(P) -> ok; true ->
erlang:error({badarg, P})
end
end, Path),
M = #'porkrind.matcher'{
name = has_json_path,
args = [Path],
match = fun(Value) ->
case json_path(Value, Path) of
{ok, _} ->
ok;
not_found ->
?PR_FAIL({no_match, Value})
end
end,
reason = fun({no_match, Value}) ->
io_lib:format("~p has no path ~p", [Value, Path])
end
},
porkrind_logic:all_of([
is_json_object(),
M
]).
has_json_matching(Key, Matcher0) when is_binary(Key); is_atom(Key) ->
Matcher = porkrind_util:maybe_wrap(Matcher0),
M = #'porkrind.matcher'{
name = has_json_matching,
args = [Key, Matcher0],
match = fun({Props}) ->
case find_key(Key, Props) of
{_, Found} ->
porkrind:match(Found, Matcher);
not_found ->
?PR_FAIL({not_found, {Props}})
end
end,
reason = fun
({not_found, Value}) ->
io_lib:format("~p is missing key ~p", [Value, Key])
end
},
porkrind_logic:all_of([
is_json_object(),
M
]).
has_json_matching_at(Path, Matcher0) when is_list(Path) ->
lists:foreach(fun(P) ->
if is_binary(P) orelse is_atom(P) -> ok; true ->
erlang:error({badarg, P})
end
end, Path),
Matcher = porkrind_util:maybe_wrap(Matcher0),
M = #'porkrind.matcher'{
name = has_json_matching_at,
args = [Path, Matcher0],
match = fun(Value) ->
case json_path(Value, Path) of
{ok, Found} ->
porkrind:match(Found, Matcher);
not_found ->
?PR_FAIL({not_found, Value})
end
end,
reason = fun
({not_found, Value}) ->
Args = [Value, Path],
io_lib:format("~p does not have a JSON path matching ~p", Args)
end
},
porkrind_logic:all_of([
is_json_object(),
M
]).
has_json_entry(Key, Expect) ->
?PR_NAME(has_json_entry, has_json_matching(Key, is_json_equal_to(Expect))).
has_json_entry_at(Path, Key, Expect) when is_list(Path) ->
M = has_json_matching_at(Path ++ [Key], is_json_equal_to(Expect)),
?PR_NAME(has_json_entry_at, M).
has_json_entries(Entries) when is_list(Entries), length(Entries) >= 1 ->
Matchers = lists:map(fun
({K, V}) ->
has_json_entry(K, V);
(Else) ->
erlang:error({badarg, Else})
end, Entries),
?PR_NAME(has_json_entries, porkrind_logic:all_of(Matchers)).
has_json_entries_at(Path, Entries) ->
M = has_json_matching_at(Path, has_json_entries(Entries)),
?PR_NAME(has_json_entries, M).
has_json_value(Expect) ->
Matcher = porkrind_lists:has_item(Expect),
M = #'porkrind.matcher'{
name = has_json_value,
args = [Expect],
match = fun({Props}) when is_list(Props) ->
Values = [V || {_K, V} <- Props],
porkrind:match(Values, Matcher)
end
},
porkrind_logic:all_of([
is_json_object(),
M
]).
json_eq({A}, {B}) ->
json_eq_obj(lists:sort(A), lists:sort(B));
json_eq(A, B) when is_list(A), is_list(B) ->
json_eq_array(A, B);
json_eq(A, B) when A == true; B == true ->
A == B;
json_eq(A, B) when A == false; B == false ->
A == B;
json_eq(A, B) when A == null; B == null ->
A == B;
json_eq(A, B) when is_atom(A); is_atom(B) ->
maybe_to_bin(A) == maybe_to_bin(B);
json_eq(A, B) ->
A == B.
json_eq_obj([], []) ->
true;
json_eq_obj([{KeyA, ValA} | RestA], [{KeyB, ValB} | RestB]) ->
case json_eq(KeyA, KeyB) andalso json_eq(ValA, ValB) of
true ->
json_eq_obj(RestA, RestB);
false ->
false
end;
json_eq_obj(_, _) ->
% One of the two has fewer elements
false.
json_eq_array([], []) ->
true;
json_eq_array([A | RestA], [B | RestB]) ->
case json_eq(A, B) of
true ->
json_eq_array(RestA, RestB);
false ->
false
end;
json_eq_array(_, _) ->
% One of the two has fewer elements
false.
maybe_to_bin(Value) ->
if not is_atom(Value) -> Value; true ->
list_to_binary(atom_to_list(Value))
end.
json_path(Value, []) ->
{ok, Value};
json_path({Props}, [NextKey | RestKeys]) when is_list(Props) ->
case find_key(NextKey, Props) of
{_, Value} ->
json_path(Value, RestKeys);
not_found ->
not_found
end;
json_path(_Value, _Path) ->
not_found.
find_key(Key, Props) when is_list(Props) ->
case lists:keyfind(Key, 1, Props) of
{Key, Value} ->
{Key, Value};
false ->
case lists:keyfind(swap_bin_and_atom(Key), 1, Props) of
{SwappedKey, Value} ->
{SwappedKey, Value};
false ->
not_found
end
end.
swap_bin_and_atom(Key) when is_binary(Key) ->
list_to_atom(binary_to_list(Key));
swap_bin_and_atom(Key) when is_atom(Key) ->
list_to_binary(atom_to_list(Key)). | src/porkrind_json.erl | 0.690246 | 0.420005 | porkrind_json.erl | starcoder |
-module(k6_bytea).
-vsn("1.1.2").
-export([count/0, new/1, delete/1, size/1, get/3, set/3, from_binary/1, to_binary/1]).
-on_load(init/0).
-opaque bytea() :: bytea_opaque_resource_type.
%% The byte array resource handle.
-export_type([bytea/0]).
%% Test support
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
%% ===================================================================
%% NIF initialisation and hooks
%% ===================================================================
%% @private @doc Finds the NIF shared object and attempts to load it.
init() ->
SoName =
case code:priv_dir(?MODULE) of
{error, bad_name} ->
case filelib:is_dir(filename:join(["..", priv])) of
true ->
filename:join(["..", priv, ?MODULE]);
false ->
filename:join([priv, ?MODULE])
end;
Dir ->
filename:join(Dir, ?MODULE)
end,
ok = erlang:load_nif(SoName, 0).
%% @doc Returns the number of currently allocated byte arrays.
-spec count() -> integer().
count() ->
erlang:nif_error(nif_not_loaded).
%% @doc Creates a new byte array and returns it.
-spec new(Size::integer()) -> bytea().
new(_) ->
erlang:nif_error(nif_not_loaded).
%% @doc Frees a byte array immediately. It is no longer valid for use by this
%% module; any attempts will result in `badarg'.
-spec delete(Bytea::bytea()) -> ok.
delete(_) ->
erlang:nif_error(nif_not_loaded).
%% @doc Returns the size of the byte array in bytes.
-spec size(Bytea::bytea()) -> integer().
size(_) ->
erlang:nif_error(nif_not_loaded).
%% @doc Gets `Len' bytes from the byte array, starting at `From'. If the slice
%% specified exceeds any boundaries, `badarg' results.
-spec get(Bytea::bytea(), From::integer(), Len::integer()) -> binary().
get(_, _, _) ->
erlang:nif_error(nif_not_loaded).
%% @doc Replaces the substring of byte array, starting at `From', with `Value'.
%% If the slice so specified exceeds boundaries of the byte array, `badarg'
%% results.
-spec set(Bytea::bytea(), From::integer(), Value::binary()) -> ok.
set(_, _, _) ->
erlang:nif_error(nif_not_loaded).
%% @doc Creates a new byte array from the given binary string.
-spec from_binary(Binary::binary()) -> bytea().
from_binary(Binary) ->
Size = erlang:size(Binary),
Bytea = ?MODULE:new(Size),
?MODULE:set(Bytea, 0, Binary),
Bytea.
%% @doc Gets the entire contents of the byte array as a binary string.
-spec to_binary(Bytea::bytea()) -> binary().
to_binary(Bytea) ->
Size = ?MODULE:size(Bytea),
?MODULE:get(Bytea, 0, Size).
%% ===================================================================
%% Unit tests
%% ===================================================================
-ifdef(TEST).
allocation_test() ->
Bytea = ?MODULE:new(256),
?assertMatch(<<>>, Bytea),
?assertEqual(1, ?MODULE:count()),
?assertEqual(256, ?MODULE:size(Bytea)),
?assertEqual(ok, ?MODULE:delete(Bytea)),
?assertError(badarg, ?MODULE:size(Bytea)),
?assertError(badarg, ?MODULE:delete(Bytea)),
?MODULE:new(128),
erlang:garbage_collect(),
% This seems prone to error.
?assertEqual(0, ?MODULE:count()).
usage_test() ->
Bytea = ?MODULE:new(5),
?assertEqual(<<0, 0, 0, 0, 0>>, ?MODULE:get(Bytea, 0, 5)),
?assertEqual(ok, ?MODULE:set(Bytea, 2, <<$H, $I>>)),
?assertEqual(<<0, $H, $I, 0>>, ?MODULE:get(Bytea, 1, 4)),
?assertEqual(ok, ?MODULE:delete(Bytea)),
?assertError(badarg, ?MODULE:get(Bytea, 0, 1)),
?assertError(badarg, ?MODULE:set(Bytea, 0, <<1>>)).
-endif. | src/k6_bytea.erl | 0.537527 | 0.450178 | k6_bytea.erl | starcoder |
%%--------------------------------------------------------------------
%% Copyright (c) 2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(hocon_md).
-export([h/2, link/2, local_link/2, th/1, td/1, ul/1, code/1]).
-export([join/1, indent/2]).
h(1, Text) -> format("# ~s~n", [Text]);
h(2, Text) -> format("## ~s~n", [Text]);
h(3, Text) -> format("### ~s~n", [Text]);
h(4, Text) -> format("#### ~s~n", [Text]);
h(5, Text) -> format("##### ~s~n", [Text]);
h(6, Text) -> format("###### ~s~n", [Text]).
link(Text, Link) -> format("[~s](~s)", [Text, Link]).
local_link(Text, Anchor) ->
format("[~s](#~s)", [Text, anchor(Anchor)]).
th(Elements) ->
Alignment = lists:join("|", ["----" || _ <- lists:seq(0, length(Elements) - 1)]),
format("~s~n~s~n", [lists:join("|", Elements), Alignment]).
td(Elements) ->
format("~s~n", [lists:join("|", [escape_bar(E) || E <- Elements])]).
ul(Elements) ->
lists:flatten([format("- ~s~n", [E]) || E <- Elements] ++ "\n").
format(Template, Values) ->
lists:flatten(io_lib:format(Template, Values)).
escape_bar(Str) ->
lists:flatten(string:replace(Str, "|", "|", all)).
code(Text) -> ["<code>", Text, "</code>"].
join(Mds) ->
lists:join("\n", [Mds]).
indent(N, Lines) when is_list(Lines) ->
indent(N, unicode:characters_to_binary(infix(Lines, "\n"), utf8));
indent(N, Lines0) ->
Pad = lists:duplicate(N, $\s),
Lines = binary:split(Lines0, <<"\n">>, [global]),
infix([pad(Pad, Line) || Line <- Lines], "\n").
pad(_Pad, <<>>) -> <<>>;
pad(Pad, Line) -> [Pad, Line].
infix([], _) -> [];
infix([X], _) -> [X];
infix([H | T], In) -> [H, In | infix(T, In)].
%% ref: https://gist.github.com/asabaylus/3071099
%% GitHub flavored markdown likes ':' being removed
%% VuePress likes ':' being replaced by '-'
anchor(Anchor0) ->
Anchor = string:lowercase(bin(Anchor0)),
Replaces = [{<<"\\.">>, <<"">>}, %% no dot
{<<"'">>, <<"">>}, %% no single quotes
{<<":">>, <<"-">>}, %% vuepress
{<<"\\s">>, <<"-">>} %% space replaced by hyphen
],
lists:foldl(fun({Pattern, Replace}, Acc) ->
re:replace(Acc, Pattern, Replace,
[{return, list}, global])
end, Anchor, Replaces).
bin(S) when is_list(S) -> unicode:characters_to_binary(S, utf8);
bin(A) when is_atom(A) -> atom_to_binary(A, utf8);
bin(B) when is_binary(B) -> B. | src/hocon_md.erl | 0.61231 | 0.432782 | hocon_md.erl | starcoder |
%% The contents of this file are subject to the Mozilla Public License
%% Version 1.1 (the "License"); you may not use this file except in
%% compliance with the License. You may obtain a copy of the License
%% at http://www.mozilla.org/MPL/
%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and
%% limitations under the License.
%%
%% The Original Code is Erlando.
%%
%% The Initial Developer of the Original Code is VMware, Inc.
%% Copyright (c) 2011-2013 VMware, Inc. All rights reserved.
%%
-module(state_t, [M]).
-behaviour(monad_trans).
-compile({parse_transform, do}).
-compile({parse_transform, pmod_pt}).
-export_type([state_t/3]).
-export(['>>='/2, return/1, fail/1]).
-export([get/0, put/1, eval/2, exec/2, run/3,
modify/1, modify_and_return/1, lift/1]).
-opaque state_t(S, M, A) :: fun( (S) -> monad:monadic(M, {A, S}) ).
-spec '>>='(state_t(S, M, A), fun( (A) -> state_t(S, M, B) ), M) -> state_t(S, M, B).
'>>='(X, Fun) ->
fun (S) ->
do([M || {A, S1} <- X(S),
(Fun(A))(S1)])
end.
-spec return(A, M) -> state_t(_S, M, A).
return(A) ->
fun (S) ->
M:return({A, S})
end.
-spec fail(any(), M) -> state_t(_S, M, _A).
fail(E) ->
fun (_) ->
M:fail(E)
end.
-spec get(M) -> state_t(S, M, S).
get() ->
fun (S) ->
M:return({S, S})
end.
-spec put(S, M) -> state_t(S, M, ok).
put(S) ->
fun (_) ->
M:return({ok, S})
end.
-spec eval(state_t(S, M, A), S, M) -> monad:monadic(M, A).
eval(SM, S) ->
do([M || {A, _S1} <- SM(S),
return(A)]).
-spec exec(state_t(S, M, _A), S, M) -> monad:monadic(M, S).
exec(SM, S) ->
do([M || {_A, S1} <- SM(S),
return(S1)]).
%%-spec run(state_t(S, M, A), S, M) -> monad:monadic(M, {A, S}).
run(SM, S, _M) -> SM(S).
-spec modify(fun( (S) -> S ), M) -> state_t(S, M, ok).
modify(Fun) ->
fun (S) ->
M:return({ok, Fun(S)})
end.
-spec modify_and_return(fun( (S) -> {A, S} ), M) -> state_t(S, M, A).
modify_and_return(Fun) ->
fun (S) ->
M:return(Fun(S))
end.
-spec lift(monad:monadic(M, A), M) -> state_t(_S, M, A).
lift(X) ->
fun (S) ->
do([M || A <- X,
return({A, S})])
end. | apps/erlando/src/state_t.erl | 0.63477 | 0.468365 | state_t.erl | starcoder |
%%==============================================================================
%% Copyright 2020 Erlang Solutions Ltd.
%% Licensed under the Apache License, Version 2.0 (see LICENSE file)
%%==============================================================================
-module(amoc_coordinator_worker).
-behaviour(gen_server).
%% API
-export([start_link/1, add/2,
stop/1, reset/1,
timeout/1]).
%% gen_server callbacks
-export([init/1,
handle_call/3,
handle_cast/2]).
-type event() :: amoc_coordinator:coordination_event().
-type action() :: amoc_coordinator:coordination_action().
-type data() :: amoc_coordinator:coordination_data().
-record(state, {required_n = all :: pos_integer() | all,
n = 0 :: non_neg_integer(),
actions = [] :: [action()],
collect_data = true :: boolean(),
accumulator = [] :: [data()]}).
-type state() :: #state{}.
%%%===================================================================
%%% API
%%%===================================================================
-spec start_link(amoc_coordinator:normalized_coordination_item()) -> {ok, Pid :: pid()}.
start_link(CoordinationItem) -> gen_server:start_link(?MODULE, CoordinationItem, []).
-spec reset(pid()) -> ok.
reset(Pid) -> gen_server:call(Pid, {reset, reset}).
-spec timeout(pid()) -> ok.
timeout(Pid) -> gen_server:call(Pid, {reset, timeout}).
-spec stop(pid()) -> ok.
stop(Pid) -> gen_server:call(Pid, {reset, stop}).
-spec add(pid(), data()) -> ok.
add(Pid, Data) -> gen_server:cast(Pid, {add, Data}).
%%%===================================================================
%%% gen_server callbacks
%%%===================================================================
-spec init(amoc_coordinator:normalized_coordination_item()) -> {ok, state()}.
init({NoOfUsers, Actions}) ->
State = #state{required_n = NoOfUsers, actions = Actions},
{ok, State#state{collect_data = is_acc_required(Actions)}}.
-spec handle_call({reset, reset | timeout | stop}, term(), state()) ->
{reply, ok, state()} | {stop, normal, ok, state()}.
handle_call({reset, Event}, _, State) ->
NewState = reset_state(Event, State),
case Event of
stop -> {stop, normal, ok, NewState};
_ ->
{reply, ok, NewState}
end.
-spec handle_cast({add, data()}, state()) -> {noreply, state()}.
handle_cast({add, Data}, State) ->
NewState = add_data(Data, State),
{noreply, NewState}.
%%%===================================================================
%%% Internal functions
%%%===================================================================
-spec is_acc_required([action()]) -> boolean().
is_acc_required(Actions) ->
lists:any(fun(F) when is_function(F, 1) -> false;
(_) -> true
end, Actions).
-spec add_data(data(), state()) -> state().
add_data(Data, #state{n = N, accumulator = Acc} = State) ->
NewState = case State#state.collect_data of
false ->
State#state{n = N + 1};
true ->
State#state{n = N + 1, accumulator = [Data | Acc]}
end,
maybe_reset_state(NewState).
-spec maybe_reset_state(state()) -> state().
maybe_reset_state(#state{n = N, required_n = N} = State) ->
reset_state(coordinate, State);
maybe_reset_state(State) ->
State.
-spec reset_state(event(), state()) -> state().
reset_state(Event, #state{actions = Actions, accumulator = Acc, n = N} = State) ->
[execute_action(Action, {Event, N}, Acc) || Action <- Actions],
State#state{accumulator = [], n = 0}.
-spec execute_action(action(), event(), [data()]) -> any().
execute_action(Action, Event, _) when is_function(Action, 1) ->
safe_executions(Action, [Event]);
execute_action(Action, Event, Acc) when is_function(Action, 2) ->
safe_executions(Action, [Event, Acc]);
execute_action(Action, Event, Acc) when is_function(Action, 3) ->
Fun = fun(A, B) -> safe_executions(Action, [Event, A, B]) end,
distinct_pairs(Fun, Acc).
-spec safe_executions(function(), [any()]) -> any().
safe_executions(Fun, Args) ->
try
erlang:apply(Fun, Args)
catch
_:_ -> ok
end.
-spec distinct_pairs(fun((data(), data()) -> any()), [data()]) -> any().
distinct_pairs(Fun, []) ->
Fun(undefined, undefined);
distinct_pairs(Fun, [OneElement]) ->
Fun(OneElement, undefined);
distinct_pairs(Fun, [Element1, Element2]) ->
Fun(Element1, Element2);
distinct_pairs(Fun, [Element1 | Tail]) -> %% length(Tail) >= 2
[Fun(Element1, Element2) || Element2 <- Tail],
distinct_pairs(Fun, Tail). | src/amoc_coordinator/amoc_coordinator_worker.erl | 0.506347 | 0.494446 | amoc_coordinator_worker.erl | starcoder |
%% This Source Code Form is subject to the terms of the Mozilla Public
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
%% Copyright (c) 2021-2022 VMware, Inc. or its affiliates. All rights reserved.
%%
%% @doc Khepri path API.
%%
%% A path is the type used by Khepri to reference nodes in the tree structure.
%% A path describes how to reach a node from the root node.
%%
%% A path, or <em>native path</em>, is a list of components. Components can be
%% Erlang atoms and binaries. Example:
%%
%% ```
%% %% Native path.
%% Path = [stock, wood, <<"oak">>].
%% '''
%%
%% A path may contain conditions to tune how a node is matched or to match
%% multiple nodes at once. This is called a <em>path pattern</em>. A path
%% pattern may contain conditions in addition to regular components (Erlang
%% atoms and binaries). See {@link khepri_condition} to learn more about
%% conditions. Example:
%%
%% ```
%% %% Path pattern with a condition on `wood'.
%% PathPattern = [stock,
%% #if_all{conditions = [wood,
%% #if_node_exists{exists = true}]},
%% oak].
%% '''
%%
%% To be user-friendly, string-based and binary-based <em>Unix-like paths</em>
%% are accepted by most functions. The syntax of these <em>Unix paths</em> is
%% described in the {@link unix_path()} type documentation. Example:
%%
%% ```
%% %% Unix path, equivalent of the first native path example.
%% UnixPath = "/:stock/:wood/oak".
%% '''
-module(khepri_path).
-include_lib("stdlib/include/assert.hrl").
-include("include/khepri.hrl").
-export([compile/1,
from_string/1,
from_binary/1,
to_string/1,
to_binary/1,
combine_with_conditions/2,
targets_specific_node/1,
component_targets_specific_node/1,
is_valid/1,
ensure_is_valid/1,
abspath/2,
realpath/1,
pattern_includes_root_node/1]).
-ifdef(TEST).
-export([component_to_string/1]).
-endif.
-type node_id() :: atom() | binary().
%% A node name.
-type component() :: node_id() | ?ROOT_NODE | ?THIS_NODE | ?PARENT_NODE.
%% Component name in a path to a node.
-type native_path() :: [component()].
%% Native path to a node.
%%
%% A native path is a list of atoms, binaries and special components.
%%
%% It is called <em>native</em> because it requires no further processing
%% (unlike {@link unix_path()}) and is the format used internally by the state
%% machine.
%%
%% Special components are:
%% <ol>
%% <li>`?ROOT_NODE' to explicitly mark the root node. A path is absolute by
%% default. Using `?ROOT_NODE' is only useful when manipulating the root node
%% itself (querying it or storing something in the root node).</li>
%% <li>`?THIS_NODE' to make a relative path (the default being an absolute
%% path). This is mostly useful for {@link khepri_condition:keep_while()} to
%% make it easy to put a condition on the node itself.</li>
%% <li>`?PARENT_NODE' to target the parent of a node, with the same benefits
%% and use cases as `?THIS_NODE'.</li>
%% </ol>
%%
%% Example:
%%
%% ```
%% %% Native path.
%% Path = [stock, wood, <<"oak">>].
%% '''
-type native_pattern() :: [pattern_component()].
%% Path pattern which may match zero, one or more nodes.
%%
%% A native pattern is a list of atoms, binaries, special components and
%% conditions.
%%
%% It is called <em>native</em> because it requires no further processing
%% (unlike {@link unix_pattern()}) and is the format used internally by the
%% state machine.
%%
%% See {@link native_path()} for a description of special components.
%%
%% Conditions are any condition defined by {@link
%% khepri_condition:condition()}.
%%
%% Example:
%%
%% ```
%% %% Path pattern with a condition on `wood'.
%% PathPattern = [stock,
%% #if_all{conditions = [wood,
%% #if_node_exists{exists = true}]},
%% oak].
%% '''
-type unix_path() :: string() | binary().
%% Unix-like path to a node.
%%
%% These <em>Unix paths</em> have the following syntax:
%%
%% <ul>
%% <li>Path components are separated by a forward slash, `/'.</li>
%% <li>Atom-based node IDs are prefixed with a `:' character: `:wood'.</li>
%% <li>Binary-based node IDs are written as-is: `oak'.</li>
%% <li>Atom and binaries can be percent-encoded.</li>
%% <li>An absolute path must start with `/', otherwise it is considered a
%% relative path</li>
%% <li>`.' and `..' represent `?THIS_NODE' and `?PARENT_NODE'
%% respectively</li>
%% <li>Simple glob patterns are accepted:
%% <ul>
%% <li>`abc*def' is the same as `#if_name_matches{regex = "^abc.*def$"}'</li>
%% <li>`*' is the same as `?STAR' or `#if_name_matches{regex = any}'</li>
%% <li>`**' is the same as `?STAR_STAR' or `if_path_matches{regex = any}'</li>
%% </ul></li>
%% </ul>
%%
%% <strong>Warning</strong>: There is no special handling of Unicode in tree
%% node names. To use Unicode, it is recommended to either use a native path or
%% a binary-based Unix-like path. If using a string-based Unix-like path, the
%% behavior is undefined and the call may crash. Matching against node names is
%% also undefined behavior and may crash, regardless of the type of path being
%% used. It will be improved in the future.
%%
%% Example:
%% ```
%% %% Unix path, equivalent of the first native path example.
%% UnixPath = "/:stock/:wood/oak".
%% '''
-type unix_pattern() :: string() | binary().
%% Unix-like path pattern to a node.
%%
%% It accepts the following special characters:
%% <ol>
%% <li>`*' anywhere in a path component behaves like a {@link
%% khepri_condition:if_name_matches()}.</li>
%% <li>`**' as a path component behaves like a {@link
%% khepri_condition:if_path_matches()}.</li>
%% </ol>
%%
%% A Unix-like path pattern can't express all the conditions of a native path
%% pattern currently.
%%
%% Otherwise it works as a {@link unix_path()} and has the same syntax and
%% limitations.
%%
%% Example:
%% ```
%% %% Unix path pattern, matching multiple types of oak.
%% UnixPathPattern = "/:stock/:wood/*oak".
%% '''
-type path() :: native_path() | unix_path().
%% Path to a node.
-type pattern() :: native_pattern() | unix_pattern().
%% Path pattern which may match zero, one or more nodes.
-type pattern_component() :: component() | khepri_condition:condition().
%% Path pattern component which may match zero, one or more nodes.
-export_type([path/0,
native_path/0,
unix_path/0,
pattern/0,
native_pattern/0,
unix_pattern/0,
component/0,
pattern_component/0,
node_id/0]).
-spec compile(PathPattern) -> PathPattern when
PathPattern :: native_pattern().
%% @private
compile(PathPattern) ->
lists:map(fun khepri_condition:compile/1, PathPattern).
-spec from_string(String) -> PathPattern when
String :: pattern(),
PathPattern :: native_pattern().
%% @doc Converts a Unix-like path to a native path.
%%
%% The Unix-like string can be either an Erlang string or an Erlang binary.
%%
%% For convenience, a native path is also accepted and returned as-is.
from_string("/" ++ MaybeString) ->
from_string(MaybeString, [?ROOT_NODE]);
from_string(MaybeString) when is_list(MaybeString) ->
from_string(MaybeString, []);
from_string(Binary) when is_binary(Binary) ->
String = erlang:binary_to_list(Binary),
from_string(String);
from_string(NotPath) ->
throw({invalid_path, #{path => NotPath}}).
-spec from_binary(String) -> PathPattern when
String :: pattern(),
PathPattern :: native_pattern().
%% @doc Converts a Unix-like path to a native path.
%%
%% This is the same as calling `from_string(String)'. Therefore, it accepts
%% Erlang strings or binaries and native paths.
%%
%% @see from_string/1.
from_binary(MaybeString) ->
from_string(MaybeString).
from_string([Component | _] = Rest, ReversedPath)
when ?IS_NODE_ID(Component) orelse
?IS_CONDITION(Component) ->
finalize_path(Rest, ReversedPath);
from_string([Char, Component | _] = Rest, ReversedPath)
when ?IS_SPECIAL_PATH_COMPONENT(Char) andalso
(?IS_NODE_ID(Component) orelse
?IS_CONDITION(Component)) ->
finalize_path(Rest, ReversedPath);
from_string([?PARENT_NODE, $/ | _] = Rest, ReversedPath) ->
%% If the character used to represent the parent node in a regular path
%% (`^') appears alone in a path component, it's a regular path. Other
%% special path components may appear alone in both forms though.
finalize_path(Rest, ReversedPath);
from_string([Char] = Rest, [] = ReversedPath)
when ?IS_SPECIAL_PATH_COMPONENT(Char) ->
finalize_path(Rest, ReversedPath);
from_string([$/ | Rest], ReversedPath) ->
from_string(Rest, ReversedPath);
from_string([$: | Rest], ReversedPath) ->
parse_atom_from_string(Rest, ReversedPath);
from_string([Char | _] = Rest, ReversedPath) when is_integer(Char) ->
parse_binary_from_string(Rest, ReversedPath);
from_string([], ReversedPath) ->
finalize_path([], ReversedPath);
from_string(Rest, ReversedPath) ->
NotPath = lists:reverse(ReversedPath) ++ Rest,
throw({invalid_path, #{path => NotPath,
tail => Rest}}).
parse_atom_from_string(Rest, ReversedPath) ->
parse_atom_from_string(Rest, "", ReversedPath).
parse_atom_from_string([$/ | _] = Rest, Acc, ReversedPath) ->
Component = finalize_atom_component(Acc),
ReversedPath1 = prepend_component(Component, ReversedPath),
from_string(Rest, ReversedPath1);
parse_atom_from_string([Char | Rest], Acc, ReversedPath)
when is_integer(Char) ->
Acc1 = [Char | Acc],
parse_atom_from_string(Rest, Acc1, ReversedPath);
parse_atom_from_string([] = Rest, Acc, ReversedPath) ->
Component = finalize_atom_component(Acc),
ReversedPath1 = prepend_component(Component, ReversedPath),
from_string(Rest, ReversedPath1).
finalize_atom_component(Acc) ->
Acc1 = lists:reverse(Acc),
Acc2 = percent_decode_string(Acc1),
erlang:list_to_atom(Acc2).
parse_binary_from_string(Rest, ReversedPath) ->
parse_binary_from_string(Rest, "", ReversedPath).
parse_binary_from_string([$/ | _] = Rest, Acc, ReversedPath) ->
Component = finalize_binary_componenent(Acc),
ReversedPath1 = prepend_component(Component, ReversedPath),
from_string(Rest, ReversedPath1);
parse_binary_from_string([Char | Rest], Acc, ReversedPath)
when is_integer(Char) ->
Acc1 = [Char | Acc],
parse_binary_from_string(Rest, Acc1, ReversedPath);
parse_binary_from_string([] = Rest, Acc, ReversedPath) ->
Component = finalize_binary_componenent(Acc),
ReversedPath1 = prepend_component(Component, ReversedPath),
from_string(Rest, ReversedPath1).
finalize_binary_componenent(Acc) ->
Acc1 = lists:reverse(Acc),
case Acc1 of
"." ->
?THIS_NODE;
".." ->
?PARENT_NODE;
"*" ->
?STAR;
"**" ->
?STAR_STAR;
_ ->
Acc2 = percent_decode_string(Acc1),
case re:run(Acc2, "\\*", [{capture, none}]) of
match ->
ReOpts = [global, {return, list}],
Regex = re:replace(Acc2, "\\*", ".*", ReOpts),
#if_name_matches{regex = "^" ++ Regex ++ "$"};
nomatch ->
erlang:list_to_binary(Acc2)
end
end.
prepend_component(Component, []) when ?IS_NODE_ID(Component) ->
%% This is a relative path.
[Component, ?THIS_NODE];
prepend_component(Component, ReversedPath) ->
[Component | ReversedPath].
finalize_path(Rest, []) ->
Rest;
finalize_path(Rest, ReversedPath) ->
case lists:reverse(ReversedPath) ++ Rest of
[?ROOT_NODE | Path] -> Path;
Path -> Path
end.
-spec to_string(NativePath) -> UnixPath when
NativePath :: native_path(),
UnixPath :: string().
%% @doc Converts a native path to a string.
to_string([?ROOT_NODE | Path]) ->
"/" ++
string:join(
lists:map(fun component_to_string/1, Path),
"/");
to_string([?THIS_NODE = Component]) ->
component_to_string(Component);
to_string([?THIS_NODE | Path]) ->
string:join(
lists:map(fun component_to_string/1, Path),
"/");
to_string([?PARENT_NODE | _] = Path) ->
string:join(
lists:map(fun component_to_string/1, Path),
"/");
to_string(Path) ->
"/" ++
string:join(
lists:map(fun component_to_string/1, Path),
"/").
-spec to_binary(NativePath) -> UnixPath when
NativePath :: native_path(),
UnixPath :: binary().
%% @doc Converts a native path to a binary.
to_binary(Path) ->
String = to_string(Path),
erlang:list_to_binary(String).
-spec component_to_string(component()) -> string().
%% @private
component_to_string(?ROOT_NODE) ->
"/";
component_to_string(?THIS_NODE) ->
".";
component_to_string(?PARENT_NODE) ->
"..";
component_to_string(Component) when is_atom(Component) ->
":" ++ percent_encode_string(erlang:atom_to_list(Component));
component_to_string(Component)
when is_binary(Component) andalso Component =/= <<>> ->
percent_encode_string(erlang:binary_to_list(Component));
component_to_string(<<>>) ->
throw(unsupported).
-define(IS_HEX(Digit), (is_integer(Digit) andalso
((Digit >= $0 andalso Digit =< $9) orelse
(Digit >= $A andalso Digit =< $F) orelse
(Digit >= $a andalso Digit =< $f)))).
percent_decode_string(String) when is_list(String) ->
percent_decode_string(String, "").
percent_decode_string([$%, Digit1, Digit2 | Rest], PercentDecoded)
when ?IS_HEX(Digit1) andalso ?IS_HEX(Digit2) ->
Char = erlang:list_to_integer([Digit1, Digit2], 16),
PercentDecoded1 = PercentDecoded ++ [Char],
percent_decode_string(Rest, PercentDecoded1);
percent_decode_string([Char | Rest], PercentDecoded) ->
PercentDecoded1 = PercentDecoded ++ [Char],
percent_decode_string(Rest, PercentDecoded1);
percent_decode_string([], PercentDecoded) ->
PercentDecoded.
percent_encode_string(String) when is_list(String) ->
percent_encode_string(String, "").
percent_encode_string([Char | Rest], PercentEncoded)
when is_integer(Char) andalso
((Char >= $A andalso Char =< $Z) orelse
(Char >= $a andalso Char =< $z) orelse
(Char >= $0 andalso Char =< $9) orelse
(Char =:= $. andalso PercentEncoded =/= "") orelse
Char =:= $- orelse Char =:= $_ orelse Char =:= $~) ->
PercentEncoded1 = PercentEncoded ++ [Char],
percent_encode_string(Rest, PercentEncoded1);
percent_encode_string([Char | Rest], PercentEncoded) ->
PEChar = lists:flatten(io_lib:format("%~2.16.0B", [Char])),
PercentEncoded1 = PercentEncoded ++ PEChar,
percent_encode_string(Rest, PercentEncoded1);
percent_encode_string([], PercentEncoded) ->
PercentEncoded.
-spec combine_with_conditions(PathPattern, Conditions) -> PathPattern when
PathPattern :: native_pattern(),
Conditions :: [khepri_condition:condition()].
combine_with_conditions(Path, []) ->
Path;
combine_with_conditions(Path, Conditions) ->
[ChildName | Rest] = lists:reverse(Path),
Combined = #if_all{conditions = [ChildName | Conditions]},
lists:reverse([Combined | Rest]).
-spec targets_specific_node(PathPattern) -> Ret when
PathPattern :: native_pattern(),
Ret :: {true, Path} | false,
Path :: native_path().
targets_specific_node(PathPattern) ->
targets_specific_node(PathPattern, []).
targets_specific_node([Condition | Rest], Path) ->
case component_targets_specific_node(Condition) of
{true, Component} -> targets_specific_node(Rest, [Component | Path]);
false -> false
end;
targets_specific_node([], Path) ->
{true, lists:reverse(Path)}.
-spec component_targets_specific_node(ComponentPattern) -> Ret when
ComponentPattern :: pattern_component(),
Ret :: {true, Component} | false,
Component :: component().
%% @private
component_targets_specific_node(ChildName)
when ?IS_PATH_COMPONENT(ChildName) ->
{true, ChildName};
component_targets_specific_node(#if_not{condition = Cond}) ->
component_targets_specific_node(Cond);
component_targets_specific_node(#if_all{conditions = []}) ->
false;
component_targets_specific_node(#if_all{conditions = Conds}) ->
lists:foldl(
fun
(Cond, {true, _} = True) ->
case component_targets_specific_node(Cond) of
True -> True;
{true, _} -> false;
false -> True
end;
(Cond, false) ->
case component_targets_specific_node(Cond) of
{true, _} = True -> True;
false -> false
end;
(Cond, undefined) ->
component_targets_specific_node(Cond)
end, undefined, Conds);
component_targets_specific_node(#if_any{conditions = []}) ->
false;
component_targets_specific_node(#if_any{conditions = Conds}) ->
lists:foldl(
fun
(Cond, {true, _} = True) ->
case component_targets_specific_node(Cond) of
True -> True;
{true, _} -> false;
false -> false
end;
(_, false) ->
false;
(Cond, undefined) ->
component_targets_specific_node(Cond)
end, undefined, Conds);
component_targets_specific_node(_) ->
false.
-spec is_valid(PathPattern) -> IsValid when
PathPattern :: native_pattern(),
IsValid :: true | {false, ComponentPattern},
ComponentPattern :: pattern_component().
is_valid(PathPattern) when is_list(PathPattern) ->
lists:foldl(
fun
(_, {false, _} = False) -> False;
(Component, _) -> khepri_condition:is_valid(Component)
end, true, PathPattern);
is_valid(NotPathPattern) ->
{false, NotPathPattern}.
-spec ensure_is_valid(PathPattern) -> ok | no_return() when
PathPattern :: native_pattern().
ensure_is_valid(PathPattern) ->
case is_valid(PathPattern) of
true -> ok;
{false, Component} -> throw({invalid_path, #{path => PathPattern,
component => Component}})
end.
-spec abspath(Path, BasePath) -> Path when
Path :: native_pattern(),
BasePath :: native_pattern().
abspath([FirstComponent | _] = AbsolutePath, _)
when FirstComponent =/= ?THIS_NODE andalso FirstComponent =/= ?PARENT_NODE ->
AbsolutePath;
abspath([_ | _] = RelativePath, BasePath) ->
realpath(BasePath ++ RelativePath, []);
abspath([] = PathToRoot, _) ->
PathToRoot.
-spec realpath(Path) -> Path when
Path :: native_pattern().
realpath(Path) ->
realpath(Path, []).
realpath([?ROOT_NODE | Rest], _Result) ->
realpath(Rest, []);
realpath([?THIS_NODE | Rest], Result) ->
realpath(Rest, Result);
realpath([?PARENT_NODE | Rest], [_ | Result]) ->
realpath(Rest, Result);
realpath([?PARENT_NODE | Rest], [] = Result) ->
realpath(Rest, Result);
realpath([Component | Rest], Result) ->
realpath(Rest, [Component | Result]);
realpath([], Result) ->
lists:reverse(Result).
pattern_includes_root_node(Path) ->
pattern_includes_root_node1(realpath(Path)).
pattern_includes_root_node1([#if_name_matches{regex = any}]) -> true;
pattern_includes_root_node1([#if_path_matches{regex = any}]) -> true;
pattern_includes_root_node1(_) -> false. | src/khepri_path.erl | 0.840161 | 0.459804 | khepri_path.erl | starcoder |
-module(elocaltime).
-include("elocaltime.hrl").
-export([
start/0,
start/1,
stop/0,
civil_lookup/2,
absolute_lookup/2,
utc2local_datetime/2,
utc2local_ts/2,
local2utc_ts/3,
local2utc_datetime/3,
is_timezone_valid/1,
format/2,
format/3
]).
-type reason() :: any().
-type datetime() :: calendar:datetime() | timestamp().
-type timezone() :: binary() | ?TIMEZONE_UTC | ?TIMEZONE_LOCAL | ?TIMEZONE_FIXED(integer()).
-type disambiguation() :: ?DS_STANDARD | ?DS_DAYLIGHT | ?DS_BOTH.
-type absolute_lookup() :: #absolute_lookup{}.
-type civil_lookup() :: #civil_lookup{}.
-spec start() ->
ok | {error, reason()}.
start() ->
start(temporary).
-spec start(permanent | transient | temporary) ->
ok | {error, reason()}.
start(Type) ->
case application:ensure_all_started(elocaltime, Type) of
{ok, _} ->
ok;
Other ->
Other
end.
-spec stop() ->
ok.
stop() ->
application:stop(elocaltime).
-spec civil_lookup(datetime(), timezone()) ->
{ok, civil_lookup()} | {error, reason()}.
civil_lookup(Date, Timezone) ->
case elocaltime_timezone:get_tz(Timezone) of
{ok, TzRef} ->
elocaltime_nif:civil_lookup(to_datetime(Date), TzRef);
Error ->
Error
end.
-spec absolute_lookup(datetime(), timezone()) ->
{ok, absolute_lookup()} | {error, reason()}.
absolute_lookup(Date, Timezone) ->
case elocaltime_timezone:get_tz(Timezone) of
{ok, TzRef} ->
elocaltime_nif:absolute_lookup(to_seconds(Date), TzRef);
Error ->
Error
end.
-spec utc2local_datetime(datetime(), timezone()) ->
{ok, calendar:datetime()} | {error, reason()}.
utc2local_datetime(Date, Timezone) ->
case absolute_lookup(Date, Timezone) of
{ok, #absolute_lookup{date = LocalDate}} ->
{ok, LocalDate};
Error ->
Error
end.
-spec utc2local_ts(datetime(), timezone()) ->
{ok, timestamp()} | {error, reason()}.
utc2local_ts(Date, Timezone) ->
case utc2local_datetime(Date, Timezone) of
{ok, DateTime} ->
{ok, elocaltime_utils:datetime2ts(DateTime)};
Error ->
Error
end.
-spec local2utc_ts(datetime(), timezone(), disambiguation()) ->
{ok, timestamp()} | {ok, timestamp(), timestamp()} | {error, reason()}.
local2utc_ts(Date, Timezone, Disambiguation) ->
case civil_lookup(Date, Timezone) of
{ok, CivilLookup} ->
disambiguation(CivilLookup, Disambiguation);
Error ->
Error
end.
-spec local2utc_datetime(datetime(), timezone(), disambiguation()) ->
{ok, calendar:datetime()} | {ok, calendar:datetime(), calendar:datetime()} | {error, reason()}.
local2utc_datetime(Date, Timezone, Disambiguation) ->
case local2utc_ts(Date, Timezone, Disambiguation) of
{ok, Dt} ->
{ok, elocaltime_utils:ts2datetime(Dt)};
{ok, DtPre, DtPost} ->
{ok, elocaltime_utils:ts2datetime(DtPre), elocaltime_utils:ts2datetime(DtPost)};
Error ->
Error
end.
-spec is_timezone_valid(timezone()) ->
boolean().
is_timezone_valid(Timezone) ->
elocaltime_timezone:is_valid(Timezone).
-spec format(binary(), datetime()) ->
{ok, binary()}.
format(Format, DateTime) ->
elocaltime_nif:format(Format, to_seconds(DateTime)).
-spec format(binary(), datetime(), timezone()) ->
{ok, binary()}.
format(Format, DateTime, Timezone) ->
case elocaltime_timezone:get_tz(Timezone) of
{ok, TzRef} ->
elocaltime_nif:format(Format, to_seconds(DateTime), TzRef);
Error ->
Error
end.
%private
-spec disambiguation(civil_lookup(), disambiguation()) ->
{ok, timestamp()} | {ok, timestamp(), timestamp()}.
disambiguation(#civil_lookup{civil_kind = Kind, pre = Pre, trans = _Trans, post = Post}, Disambiguation) ->
case Kind of
?CIVIL_KIND_UNIQUE ->
{ok, Pre};
_ ->
case Disambiguation of
?DS_STANDARD ->
{ok, Pre};
?DS_DAYLIGHT ->
{ok, Post};
?DS_BOTH ->
{ok, Pre, Post}
end
end.
-spec to_seconds(datetime()) ->
timestamp().
to_seconds(V) when is_integer(V) ->
V;
to_seconds(V) ->
elocaltime_utils:datetime2ts(V).
-spec to_datetime(datetime()) ->
calendar:datetime().
to_datetime(V) when is_integer(V) ->
elocaltime_utils:ts2datetime(V);
to_datetime(V) ->
V. | src/elocaltime.erl | 0.544317 | 0.41941 | elocaltime.erl | starcoder |
%%
%% Example erlang client implementation for the Etsy statsd
%%
%% Copyright (c) 2011 <NAME>
%%
%% Permission is hereby granted, free of charge, to any person
%% obtaining a copy of this software and associated documentation
%% files (the "Software"), to deal in the Software without
%% restriction, including without limitation the rights to use,
%% copy, modify, merge, publish, distribute, sublicense, and/or sell
%% copies of the Software, and to permit persons to whom the
%% Software is furnished to do so, subject to the following
%% conditions:
%%
%% The above copyright notice and this permission notice shall be
%% included in all copies or substantial portions of the Software.
%%
%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
%% EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
%% OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
%% NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
%% HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
%% WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
%% OTHER DEALINGS IN THE SOFTWARE.
%%
%% Implements the following functions:
%% increment(Stat, Value, Sample) -> Increment the counter for Stat.
%% decrement(Stat, Value, Sample) -> Decrement the counter for Stat.
%% timing(Stat, Value, Sample) -> Record timing for Stat.
%%
%% Run tests with:
%% Run statsd with the parameters set in the -defines()
%% % erlc -pa /path/to/eunit/ebin -DTEST -o.statsd.erl
%% % erl -noshell -pa. -eval "eunit:test(statsd, [verbose])" -s init stop
%%
%%
-module(statsd).
% Meta information
-author("<NAME> <<EMAIL>>").
% defines
-define(HOST, "localhost").
-define(PORT, 8125).
% exports
-export([increment/1,increment/2,increment/3,
decrement/1, decrement/2, timing/2,
timing/3]).
%
% API functions
%
% functions for incrementing counters
increment(Stat) -> increment(Stat, 1, 1.0).
increment(Stat, Delta) when is_integer(Delta) ->
increment(Stat, Delta, 1.0);
increment(Stat, Samplerate) when is_float(Samplerate) ->
increment(Stat, 1, Samplerate).
increment(Stat, Delta, Samplerate) when is_integer(Delta), is_float(Samplerate) ->
send({counter, Stat, Delta, Samplerate}).
% functions for decrementing counters
decrement(Stat) -> decrement(Stat, 1, 1.0).
decrement(Stat, Delta) when is_integer(Delta) ->
decrement(Stat, Delta, 1.0);
decrement(Stat, Samplerate) when is_float(Samplerate) ->
decrement(Stat, 1, Samplerate).
decrement(Stat, Delta, Samplerate) when is_integer(Delta), is_float(Samplerate) ->
send({counter, Stat, -abs(Delta), Samplerate}).
% functions for timing values
timing(Stat, Time) when is_float(Time); is_integer(Time) ->
timing(Stat, Time, 1.0).
timing(Stat, Time, Samplerate) when is_float (Time); is_integer(Time),
is_float(Samplerate) ->
send({timer, Stat, Time, Samplerate}).
%
% update functions
%
% recursive functions for multiple stats
send({counter, [H|Stats], Delta, Samplerate}) ->
send({counter, H, Delta, Samplerate}),
send({counter, Stats, Delta, Samplerate});
send({counter, [], _, _}) -> {ok, "Success."};
send({timer, [H|Stats], Delta, Samplerate}) ->
send({timer, H, Delta, Samplerate}),
send({timer, Stats, Delta, Samplerate});
send({timer, [], _, _}) -> {ok, "Success."};
% functions for single stats
send({counter, Stat, Delta, Samplerate}) when Samplerate < 1.0, is_atom(Stat) ->
Rand = random:uniform(),
if
Rand =< Samplerate ->
send_udp_message(?HOST, ?PORT, io_lib:format("~p:~p|c|@~p",
[Stat, Delta, Samplerate]));
true -> {ok, "Success."}
end;
send({counter, Stat, Delta, _}) ->
send_udp_message(?HOST, ?PORT, io_lib:format("~p:~p|c", [Stat, Delta]));
send({timer, Stat, Time, Samplerate}) when Samplerate < 1.0 ->
Rand = random:uniform(),
if
Rand =< Samplerate ->
send_udp_message(?HOST, ?PORT, io_lib:format("~p:~p|ms|@~p",
[Stat, Time, Samplerate]));
true -> {ok, "Success."}
end;
send({timer, Stat, Time, _}) ->
send_udp_message(?HOST, ?PORT, io_lib:format("~p:~p|ms", [Stat, Time])).
%
% Raw UDP send message function
%
send_udp_message(Host, Port, Msg) when is_integer(Port),
is_list(Host) ->
{ok, Socket} = gen_udp:open(0, [binary]),
ok = gen_udp:send(Socket, Host, Port, Msg),
gen_udp:close(Socket),
{ok, "Success."}.
%
% unit tests
%
% EUnit headers
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
% test functions go here
increment_test_() ->
[?_assert(increment(stat1) =:= {ok, "Success."}),
?_assert(increment(stat1, 1) =:= {ok, "Success."}),
?_assert(increment(stat1, 1, 0.5) =:= {ok, "Success."}),
?_assert(increment([stat1, stat2]) =:= {ok, "Success."}),
?_assert(increment([stat1, stat2], 1) =:= {ok, "Success."}),
?_assert(increment([stat1, stat2], 1, 0.5) =:= {ok, "Success."})
].
decrement_test_() ->
[?_assert(decrement(stat1) =:= {ok, "Success."}),
?_assert(decrement(stat1, 1) =:= {ok, "Success."}),
?_assert(decrement(stat1, -1) =:= {ok, "Success."}),
?_assert(decrement(stat1, 1, 0.5) =:= {ok, "Success."}),
?_assert(decrement([stat1, stat2]) =:= {ok, "Success."}),
?_assert(decrement([stat1, stat2], 1) =:= {ok, "Success."}),
?_assert(decrement([stat1, stat2], -1) =:= {ok, "Success."}),
?_assert(decrement([stat1, stat2], 1, 0.5) =:= {ok, "Success."})
].
timing_test_() ->
[
?_assert(timing(stat1, 100) =:= {ok, "Success."}),
?_assert(timing(stat1, 100, 0.5) =:= {ok, "Success."}),
?_assert(timing([stat1, stat2], 100) =:= {ok, "Success."}),
?_assert(timing([stat1, stat2], 100, 0.5) =:= {ok, "Success."})
].
-endif. | examples/statsd.erl | 0.546254 | 0.401512 | statsd.erl | starcoder |
%% This Source Code Form is subject to the terms of the Mozilla Public
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at https://mozilla.org/MPL/2.0/.
%%
%% Copyright (c) 2021-2022 VMware, Inc. or its affiliates. All rights reserved.
%%
-module(conditions).
-include_lib("eunit/include/eunit.hrl").
-include("include/khepri.hrl").
-include("src/internal.hrl").
%% khepri:get_root/1 is unexported when compiled without `-DTEST'.
-dialyzer(no_missing_calls).
%% -------------------------------------------------------------------
%% Compilation & optimization.
%% -------------------------------------------------------------------
optimize_if_all_test() ->
?assertEqual(
#if_all{conditions = []},
khepri_condition:compile(#if_all{conditions = []})),
?assertEqual(
foo,
khepri_condition:compile(#if_all{conditions = [foo]})),
?assertEqual(
#if_child_list_version{version = 3},
khepri_condition:compile(
#if_all{conditions = [#if_child_list_version{version = 3}]})),
?assertEqual(
#if_all{conditions = [foo,
#if_child_list_version{version = 3}]},
khepri_condition:compile(
#if_all{conditions = [foo,
#if_child_list_version{version = 3}]})),
%% The exact match on the component name becomes the first tested
%% condition.
?assertEqual(
#if_all{conditions = [foo,
#if_child_list_version{version = 3}]},
khepri_condition:compile(
#if_all{conditions = [#if_child_list_version{version = 3},
foo]})).
optimize_if_any_test() ->
?assertEqual(
#if_any{conditions = []},
khepri_condition:compile(#if_any{conditions = []})),
?assertEqual(
foo,
khepri_condition:compile(#if_any{conditions = [foo]})),
?assertEqual(
#if_child_list_version{version = 3},
khepri_condition:compile(
#if_any{conditions = [#if_child_list_version{version = 3}]})),
?assertEqual(
#if_any{conditions = [foo,
#if_child_list_version{version = 3}]},
khepri_condition:compile(
#if_any{conditions = [foo,
#if_child_list_version{version = 3}]})),
?assertEqual(
#if_any{conditions = [#if_child_list_version{version = 3},
foo]},
khepri_condition:compile(
#if_any{conditions = [#if_child_list_version{version = 3},
foo]})).
%% -------------------------------------------------------------------
%% Using conditions.
%% -------------------------------------------------------------------
eval_regex_test() ->
?assert(khepri_condition:eval_regex(?STAR, "a", undefined, atom)),
?assert(khepri_condition:eval_regex(?STAR, "b", undefined, <<"bin">>)),
?assert(khepri_condition:eval_regex(?STAR, "a", re:compile("a"), atom)),
?assertEqual(
{false, ?STAR},
khepri_condition:eval_regex(?STAR, "b", undefined, atom)),
?assertEqual(
{false, ?STAR},
khepri_condition:eval_regex(?STAR, "b", undefined, atom)),
?assertEqual(
{false, {?STAR,
{error,
{"missing terminating ] for character class", 3}}}},
khepri_condition:eval_regex(?STAR, "[a-", undefined, atom)),
?assertEqual(
{false, {?STAR,
{error,
{"missing terminating ] for character class", 3}}}},
khepri_condition:eval_regex(?STAR, "[a-", re:compile("[a-"), atom)).
compare_numerical_values_test() ->
?assert(khepri_condition:compare_numerical_values(1, 1)),
?assertNot(khepri_condition:compare_numerical_values(1, 2)),
?assert(khepri_condition:compare_numerical_values(1, {eq, 1})),
?assertNot(khepri_condition:compare_numerical_values(1, {eq, 2})),
?assert(khepri_condition:compare_numerical_values(1, {ne, 2})),
?assertNot(khepri_condition:compare_numerical_values(1, {ne, 1})),
?assert(khepri_condition:compare_numerical_values(1, {lt, 2})),
?assertNot(khepri_condition:compare_numerical_values(1, {lt, 1})),
?assertNot(khepri_condition:compare_numerical_values(2, {lt, 1})),
?assert(khepri_condition:compare_numerical_values(1, {le, 2})),
?assert(khepri_condition:compare_numerical_values(1, {le, 1})),
?assertNot(khepri_condition:compare_numerical_values(2, {le, 1})),
?assert(khepri_condition:compare_numerical_values(2, {gt, 1})),
?assertNot(khepri_condition:compare_numerical_values(2, {gt, 2})),
?assertNot(khepri_condition:compare_numerical_values(2, {gt, 3})),
?assert(khepri_condition:compare_numerical_values(2, {ge, 1})),
?assert(khepri_condition:compare_numerical_values(2, {ge, 2})),
?assertNot(khepri_condition:compare_numerical_values(2, {ge, 3})).
exact_name_matching_test() ->
?assert(khepri_condition:is_met(foo, foo, #node{})),
?assertEqual({false, foo}, khepri_condition:is_met(foo, bar, #node{})).
if_node_exists_matching_test() ->
%% is_met/3 is called when we are sure the node already exists. If the
%% node doesn't exist, the condition is verified by
%% can_continue_update_after_node_not_found/1 in khepri_machine.
?assert(
khepri_condition:is_met(
khepri_condition:compile(#if_node_exists{exists = true}),
foo, #node{})),
?assertEqual(
{false, #if_node_exists{exists = false}},
khepri_condition:is_met(
khepri_condition:compile(#if_node_exists{exists = false}),
foo, #node{})).
if_name_matches_matching_test() ->
?assert(
khepri_condition:is_met(
khepri_condition:compile(#if_name_matches{regex = any}),
foo, #node{})),
?assert(
khepri_condition:is_met(
khepri_condition:compile(#if_name_matches{regex = "o"}),
foo, #node{})),
CompiledCond = khepri_condition:compile(#if_name_matches{regex = "a"}),
?assertEqual(
{false, CompiledCond},
khepri_condition:is_met(CompiledCond, foo, #node{})).
if_path_matches_matching_test() ->
?assert(
khepri_condition:is_met(
khepri_condition:compile(#if_path_matches{regex = any}),
foo, #node{})),
?assert(
khepri_condition:is_met(
khepri_condition:compile(#if_path_matches{regex = "o"}),
foo, #node{})),
CompiledCond = khepri_condition:compile(#if_path_matches{regex = "a"}),
?assertEqual(
{false, CompiledCond},
khepri_condition:is_met(CompiledCond, foo, #node{})).
if_has_data_matching_test() ->
?assert(
khepri_condition:is_met(
khepri_condition:compile(#if_has_data{has_data = false}),
foo, #node{})),
?assertEqual(
{false, #if_has_data{has_data = true}},
khepri_condition:is_met(
khepri_condition:compile(#if_has_data{has_data = true}),
foo, #node{})),
?assertEqual(
{false, #if_has_data{has_data = false}},
khepri_condition:is_met(
khepri_condition:compile(#if_has_data{has_data = false}),
foo, #node{payload = #kpayload_data{data = foo}})),
?assert(
khepri_condition:is_met(
khepri_condition:compile(#if_has_data{has_data = true}),
foo, #node{payload = #kpayload_data{data = foo}})).
if_data_matches_matching_test() ->
CompiledCond1 = khepri_condition:compile(
#if_data_matches{pattern = '_'}),
?assertEqual(
{false, CompiledCond1},
khepri_condition:is_met(
CompiledCond1, foo, #node{})),
?assert(
khepri_condition:is_met(
CompiledCond1, foo, #node{payload = #kpayload_data{data = {a, b}}})),
CompiledCond2 = khepri_condition:compile(
#if_data_matches{pattern = {a, '_'}}),
?assert(
khepri_condition:is_met(
CompiledCond2, foo, #{data => {a, b}})),
?assertEqual(
{false, CompiledCond2},
khepri_condition:is_met(
CompiledCond2, foo, #{})),
?assertEqual(
{false, CompiledCond2},
khepri_condition:is_met(
CompiledCond2, foo, #{data => other})),
?assert(
khepri_condition:is_met(
CompiledCond2, foo, #node{payload = #kpayload_data{data = {a, b}}})),
?assert(
khepri_condition:is_met(
CompiledCond2, foo, #node{payload = #kpayload_data{data = {a, c}}})),
?assertEqual(
{false, CompiledCond2},
khepri_condition:is_met(
CompiledCond2, foo, #node{payload = #kpayload_data{data = {b, c}}})),
?assertEqual(
{false, CompiledCond2},
khepri_condition:is_met(
CompiledCond2, foo, #node{payload = #kpayload_data{data = other}})).
if_payload_version_matching_test() ->
?assert(
khepri_condition:is_met(
khepri_condition:compile(#if_payload_version{version = 2}),
foo, #{payload_version => 2})),
?assert(
khepri_condition:is_met(
khepri_condition:compile(#if_payload_version{version = 2}),
foo, #node{stat = #{payload_version => 2,
child_list_version => 1}})),
?assertEqual(
{false, #if_payload_version{version = 2}},
khepri_condition:is_met(
khepri_condition:compile(#if_payload_version{version = 2}),
foo, #node{stat = #{payload_version => 1,
child_list_version => 1}})),
?assert(
khepri_condition:is_met(
khepri_condition:compile(#if_payload_version{version = {ge, 1}}),
foo, #node{stat = #{payload_version => 1,
child_list_version => 1}})),
?assertEqual(
{false, #if_payload_version{version = {ge, 2}}},
khepri_condition:is_met(
khepri_condition:compile(#if_payload_version{version = {ge, 2}}),
foo, #node{stat = #{payload_version => 1,
child_list_version => 1}})).
if_child_list_version_matching_test() ->
?assert(
khepri_condition:is_met(
khepri_condition:compile(#if_child_list_version{version = 2}),
foo, #{child_list_version => 2})),
?assert(
khepri_condition:is_met(
khepri_condition:compile(#if_child_list_version{version = 2}),
foo, #node{stat = #{payload_version => 1,
child_list_version => 2}})),
?assertEqual(
{false, #if_child_list_version{version = 2}},
khepri_condition:is_met(
khepri_condition:compile(#if_child_list_version{version = 2}),
foo, #node{stat = #{payload_version => 1,
child_list_version => 1}})),
?assert(
khepri_condition:is_met(
khepri_condition:compile(#if_child_list_version{version = {ge, 1}}),
foo, #node{stat = #{payload_version => 1,
child_list_version => 1}})),
?assertEqual(
{false, #if_child_list_version{version = {ge, 2}}},
khepri_condition:is_met(
khepri_condition:compile(#if_child_list_version{version = {ge, 2}}),
foo, #node{stat = #{payload_version => 1,
child_list_version => 1}})).
if_child_list_length_matching_test() ->
?assert(
khepri_condition:is_met(
khepri_condition:compile(#if_child_list_length{count = 2}),
foo, #{child_list_length => 2})),
?assert(
khepri_condition:is_met(
khepri_condition:compile(#if_child_list_length{count = 2}),
foo, #node{child_nodes = #{a => #node{}, b => #node{}}})),
?assertEqual(
{false, #if_child_list_length{count = 3}},
khepri_condition:is_met(
khepri_condition:compile(#if_child_list_length{count = 3}),
foo, #node{child_nodes = #{a => #node{}, b => #node{}}})),
?assert(
khepri_condition:is_met(
khepri_condition:compile(#if_child_list_length{count = {ge, 1}}),
foo, #node{child_nodes = #{a => #node{}, b => #node{}}})),
?assertEqual(
{false, #if_child_list_length{count = {eq, 1}}},
khepri_condition:is_met(
khepri_condition:compile(#if_child_list_length{count = {eq, 1}}),
foo, #node{child_nodes = #{a => #node{}, b => #node{}}})).
if_not_matching_test() ->
?assertEqual(
{false, #if_not{condition =
#if_child_list_length{count = 2}}},
khepri_condition:is_met(
khepri_condition:compile(#if_not{condition =
#if_child_list_length{count = 2}}),
foo, #{child_list_length => 2})),
?assert(
khepri_condition:is_met(
khepri_condition:compile(
#if_not{condition =
#if_any{conditions = []}}),
foo, #node{})),
?assertEqual(
{false, #if_not{condition =
#if_any{conditions = [foo,
#if_payload_version{version = 1}]}}},
khepri_condition:is_met(
khepri_condition:compile(
#if_not{condition =
#if_any{conditions = [foo,
#if_payload_version{version = 1}]}}),
foo, #node{stat = #{payload_version => 1,
child_list_version => 1}})).
if_all_matching_test() ->
?assert(
khepri_condition:is_met(
khepri_condition:compile(
#if_all{conditions = []}),
foo, #node{})),
?assert(
khepri_condition:is_met(
khepri_condition:compile(
#if_all{conditions = [foo,
#if_payload_version{version = 1}]}),
foo, #node{stat = #{payload_version => 1,
child_list_version => 1}})),
?assertEqual(
{false, bar},
khepri_condition:is_met(
khepri_condition:compile(
#if_all{conditions = [bar,
#if_payload_version{version = 1}]}),
foo, #node{stat = #{payload_version => 1,
child_list_version => 1}})),
?assertEqual(
{false, #if_payload_version{version = 2}},
khepri_condition:is_met(
khepri_condition:compile(
#if_all{conditions = [foo,
#if_payload_version{version = 2}]}),
foo, #node{stat = #{payload_version => 1,
child_list_version => 1}})),
?assertEqual(
{false, bar},
khepri_condition:is_met(
khepri_condition:compile(
#if_all{conditions = [bar,
#if_payload_version{version = 2}]}),
foo, #node{stat = #{payload_version => 1,
child_list_version => 1}})).
if_any_matching_test() ->
?assertEqual(
{false, #if_any{conditions = []}},
khepri_condition:is_met(
khepri_condition:compile(
#if_any{conditions = []}),
foo, #node{})),
?assert(
khepri_condition:is_met(
khepri_condition:compile(
#if_any{conditions = [foo,
#if_payload_version{version = 1}]}),
foo, #node{stat = #{payload_version => 1,
child_list_version => 1}})),
?assert(
khepri_condition:is_met(
khepri_condition:compile(
#if_any{conditions = [bar,
#if_payload_version{version = 1}]}),
foo, #node{stat = #{payload_version => 1,
child_list_version => 1}})),
?assert(
khepri_condition:is_met(
khepri_condition:compile(
#if_any{conditions = [foo,
#if_payload_version{version = 2}]}),
foo, #node{stat = #{payload_version => 1,
child_list_version => 1}})),
?assertEqual(
{false, #if_any{conditions = [bar,
#if_payload_version{version = 2}]}},
khepri_condition:is_met(
khepri_condition:compile(
#if_any{conditions = [bar,
#if_payload_version{version = 2}]}),
foo, #node{stat = #{payload_version => 1,
child_list_version => 1}})).
complex_matching_test() ->
?assert(
khepri_condition:is_met(
khepri_condition:compile(
#if_any{conditions =
[#if_all{conditions =
[foo,
#if_child_list_length{count = {lt, 10}}]},
#if_payload_version{version = 1000}]}),
foo, #node{stat = #{payload_version => 1,
child_list_version => 1}})),
?assert(
khepri_condition:is_met(
khepri_condition:compile(
#if_any{conditions =
[#if_all{conditions =
[bar,
#if_child_list_length{count = {lt, 10}}]},
#if_payload_version{version = 1000}]}),
foo, #node{stat = #{payload_version => 1000,
child_list_version => 1}})),
?assertEqual(
{false, #if_any{conditions =
[#if_all{conditions =
[bar,
#if_child_list_length{count = {lt, 10}}]},
#if_payload_version{version = 1}]}},
khepri_condition:is_met(
khepri_condition:compile(
#if_any{conditions =
[#if_all{conditions =
[bar,
#if_child_list_length{count = {lt, 10}}]},
#if_payload_version{version = 1}]}),
foo, #node{stat = #{payload_version => 1000,
child_list_version => 1}})).
path_matching_test() ->
?assert(
khepri_condition:is_met(
khepri_condition:compile(#if_child_list_length{count = 0}),
[], #node{})),
?assert(
khepri_condition:is_met(
khepri_condition:compile(#if_name_matches{regex = "a"}),
[foo, bar], #node{})),
?assertEqual(
{false, baz},
khepri_condition:is_met(
khepri_condition:compile(baz),
[foo, bar], #node{})).
applies_to_grandchildren_test() ->
?assertNot(
khepri_condition:applies_to_grandchildren(
#if_name_matches{regex = any})),
?assert(
khepri_condition:applies_to_grandchildren(
#if_path_matches{regex = any})),
?assertNot(
khepri_condition:applies_to_grandchildren(
#if_data_matches{pattern = '_'})),
?assertNot(
khepri_condition:applies_to_grandchildren(
#if_node_exists{exists = true})),
?assertNot(
khepri_condition:applies_to_grandchildren(
#if_payload_version{version = 1})),
?assertNot(
khepri_condition:applies_to_grandchildren(
#if_child_list_version{version = 1})),
?assertNot(
khepri_condition:applies_to_grandchildren(
#if_child_list_length{count = 1})),
?assertNot(
khepri_condition:applies_to_grandchildren(
#if_all{conditions = []})),
?assertNot(
khepri_condition:applies_to_grandchildren(
#if_all{conditions = [#if_name_matches{regex = any},
#if_data_matches{pattern = '_'}]})),
?assert(
khepri_condition:applies_to_grandchildren(
#if_all{conditions = [#if_path_matches{regex = any},
#if_data_matches{pattern = '_'}]})),
?assertNot(
khepri_condition:applies_to_grandchildren(
#if_any{conditions = []})),
?assertNot(
khepri_condition:applies_to_grandchildren(
#if_any{conditions = [#if_name_matches{regex = any},
#if_data_matches{pattern = '_'}]})),
?assert(
khepri_condition:applies_to_grandchildren(
#if_any{conditions = [#if_path_matches{regex = any},
#if_data_matches{pattern = '_'}]})),
?assertNot(
khepri_condition:applies_to_grandchildren(
#if_not{condition = #if_child_list_length{count = 1}})),
?assertNot(
khepri_condition:applies_to_grandchildren(
#if_not{condition =
#if_all{conditions = [#if_name_matches{regex = any},
#if_data_matches{pattern = '_'}]}})),
?assert(
khepri_condition:applies_to_grandchildren(
#if_not{condition =
#if_all{conditions = [#if_path_matches{regex = any},
#if_data_matches{pattern = '_'}]}})). | test/conditions.erl | 0.5144 | 0.46642 | conditions.erl | starcoder |
%% [author] : <NAME> <<EMAIL>>
%% [project] : Luhn Algorithm
-module(luhn).
-author("<NAME>").
-import(lists, [len/1, chr/2, str/2, fac/1, persistent_term/2, droplast/1, foldr/3, last/1]).
-export([check_luhn/1, for/2, start/1, log/1, valid/1, create/1]).
-export_type([digit/0, valid_char/0]).
-compile(export_all).
%% Luhn Algorithm, the modulus 10 or mod 10 algorithm,
%% is a simple checksum formula used to validate a variety
%% of identification numbers, such as credit card numbers,
%% IMEI numbers, Canadian Social Insurance Numbers.
%% Tests
% -include_lib("include/proper.hrl").
%% Types
%% @type digit(). A digit is a single decimal digit.
-type digit() :: 0..9.
%% @type valid_char(). A valid char is one of `"0123456789 "'.
-type valid_char() :: 48..57 | 32.
%% @type parity(). Parity is either `even' or `odd'.
-type parity() :: even | odd.
%% Inspection Card Number
check_luhn(CARD_NUMBER) ->
Fn = fun() ->
io:fwrite("Card Number: ~p~n", [CARD_NUMBER]),
Len = len(CARD_NUMBER),
% persistent_term:put(IS_SECOND, false),
log(Len) end,
Fn().
%% @doc Given a `String', return `true' if it represents a valid number,
%% per the Luhn formula, otherwise `false'.
-spec valid(String::string()) -> Valid::boolean().
valid(S) -> 0 =:= rem10(do_luhn(S, odd)).
%% @doc Given a `String', calculate its check digit
%% and return it appended to `String'.
-spec create(String::string()) -> Result::string().
create(S) -> S ++ [$0 + rem10(10 - do_luhn(S, even))].
%%%===================================================================
%%% Internal functions
%%%===================================================================
%% @doc Equivalent to {@link check_digit/1}`(P, '{@link do_luhn/3}`(S, 0, 0)'.
-spec do_luhn(String::string(), Parity::parity()) -> CheckDigit::digit().
do_luhn(S, P) -> check_digit(P, do_luhn(S, 0, 0)).
%% @doc Given a `String', `OddAcc' and `EvenAcc', return check digits
%% for both `odd' and `even' parity, as a tuple.
%% @see check_digit/1
%% @see do_luhn/2
-spec do_luhn(String, OddAcc, EvenAcc) -> {OddDigit,EvenDigit} when
String :: string(),
OddAcc :: non_neg_integer(),
EvenAcc :: non_neg_integer(),
OddDigit :: digit(),
EvenDigit :: digit().
do_luhn([C|Cs], O, E) when $0 =< C, C =< $9 ->
C0 = C - $0,
F = fun (P) -> (fun (X) when X > 9 -> X - 9; (X) -> X end)(parity(P)*C0) end,
do_luhn(Cs, E+F(odd), O+F(even));
do_luhn([_|Cs], O, E) -> do_luhn(Cs, O, E);
do_luhn([], O, E) -> {O,E}.
%% @doc Return the numeric value of a given {@link partity/0. parity} `P',
%% i.e. `1' for `odd' and `2' for `even'.
-spec parity(P::parity()) -> K::1 | 2.
parity(odd) -> 1;
parity(even) -> 2.
%% @doc Given a `Parity' and an `{OddAcc,EvenAcc}=OddEvenTuple',
%% return the appropriate `CheckDigit'.
%% @see do_luhn/3
-spec check_digit(Parity, OddEvenTuple) -> CheckDigit when
Parity :: parity(),
OddEvenTuple :: {non_neg_integer(),non_neg_integer()},
CheckDigit :: digit().
check_digit(P, OE) -> rem10(element(parity(P), OE)).
%% @doc Return `X rem 10'.
-spec rem10(non_neg_integer()) -> digit().
rem10(X) -> X rem 10.
% for_loop(I, N, _) when I - 1 >= 0 ->
% if persistent_term:get(IS_SECOND) = false ->
% io:write("c").
for(0, _) ->
[];
for (N, Term) when N - 1 >= 0 ->
[Term|for(N-1, Term)].
log(MSG) ->
io:fwrite("The Length Of The Card Number: ~p~n", [MSG]).
%% Run Init Anonymous Function
start(CARD) ->
Fn = fun() ->
io:fwrite("Luhn Algorithm \n"),
check_luhn(CARD) end,
Fn().
%%% Types
%% @doc A PropEr type generator for a list of {@link digit(). digit}s,
%% such that `length(List) >= 3' and `hd(List) > 0'.
%% @see pos_digit/0
-spec digits() -> proper_types:raw_type().
digits() -> [pos_digit(),digit(),digit()|list(digit())].
%% @doc A PropEr type generator for a digit `0..9'.
%% @see digits/0
%% @see pos_digit/0
-spec digit() -> proper_types:type().
digit() -> integer(0, 9).
%% @doc A PropEr type generator for a digit `1..9'.
%% @see digits/0
%% @see digit/0
-spec pos_digit() -> proper_types:type().
pos_digit() -> integer(1, 9).
%%% Helper functions
%% @doc Given a list of {@link digits/0. digits} `L',
%% if `length(L)' is odd, prepend `0', otherwise return `L'.
-spec maybe_pad([digit(),...]) -> [digit(),...].
maybe_pad(L) when 0 =:= length(L) rem 2 -> L;
maybe_pad(L) -> [0|L].
%% @doc Given a list of {@link digit/0. digits}, return its reversed string
%% representation, e.g. `"4312" = to_string([2,1,3,4])'.
-spec to_string([digit(),...]) -> [valid_char(),...].
to_string(Ds) -> foldr(fun (D, Acc) -> [$0+D|Acc] end, "", Ds).
%% @doc Return `true' iff {@link luhn:valid/1. luhn:valid/1}
%% returns the correct result for `L'.
%% @see prop_create/0
-spec valid1([digit(),...]) -> boolean().
valid1(L) -> (0 =:= rem10(sum(maybe_pad(L)))) =:= valid(to_string(L)).
%% @doc Return the sum, per the Luhn formula, of `Digits'.
-spec sum(Digits::[digit(),...]) -> non_neg_integer().
sum(L) -> last(L) + do_sum(droplast(L), 0).
%% @doc Implementation of {@link sum/1}.
-spec do_sum([digit()], non_neg_integer()) -> non_neg_integer().
do_sum([A,B|T], Acc) -> do_sum(T, Acc + norm(2*A) + B);
do_sum([A], Acc) -> Acc + norm(2*A);
do_sum([], Acc) -> Acc.
%% @doc Given a non-negative integer `N', subtract nine if `N >= 10',
%% otherwise return `N'.
-spec norm(N::non_neg_integer()) -> N1 :: digit().
norm(N) when N >= 10 -> N - 9;
norm(N) -> N. | luhn.erl | 0.604399 | 0.450662 | luhn.erl | starcoder |
%%%-------------------------------------------------------------------
%%% @author <NAME>
%%% @copyright (C) 2019 ACK CYFRONET AGH
%%% This software is released under the MIT license
%%% cited in 'LICENSE.txt'.
%%% @end
%%%-------------------------------------------------------------------
%%% @doc
%%% This module provides ets cache for effective values.
%%% It is based on bounded_cache mechanism (see bounded_cache.erl
%%% in cluster_worker). Cache is cleaned automatically when defined size
%%% is exceeded (size is checked periodically).
%%% It allows calculation of value recursively (from file/dir to space)
%%% caching final and intermediate results for better performance.
%%%
%%% The module allows recursive calculation of value basing on single
%%% reference of file or all file's references. Three modes are possible:
%%% - calculation only for single reference (no additional options needed),
%%% - calculation using all references where final value is merged
%%% using values calculated for all references - merge_callback has to
%%% be provided for reference values` aggregation
%%% - calculation using all references where final value is calculated using
%%% values of multiple references but can be different for different
%%% references - merge_callback has to be provided for reference values`
%%% aggregation and differentiate_callback has to be provider to calculate
%%% final value of the reference using value calculated using ancestors of
%%% this reference and value merged from all references by merge_callback.
%%% @end
%%%-------------------------------------------------------------------
-module(effective_value).
-author("<NAME>").
-include("modules/datastore/datastore_models.hrl").
-include_lib("ctool/include/logging.hrl").
%% API
-export([init_cache/2, cache_exists/1, invalidate/1]).
-export([init_group/2]).
-export([get/2, cache/3, get_or_calculate/3, get_or_calculate/4]).
-type cache() :: bounded_cache:cache().
-type init_options() :: bounded_cache:cache_options().
-type group() :: bounded_cache:group().
-type group_options() :: bounded_cache:group_options().
% Function that calculates value returns additional information (CalculationInfo) that can be useful for further work
% (e.g., calculating function can include datastore documents getting and these documents can be used later without
% calling datastore). Such returned value is provided to calculate function when processing child in case of
% recursive value calculation (see bounded_cache in cluster_worker repository)).
-type calculation_info() :: bounded_cache:additional_info().
-type args() :: list().
-type critical_section_level() :: no | direct | parent. % parent = use section starting from parent directory
% Type that defines return value of get_or_calculate functions and helper functions (used to shorten specs)
-type get_or_calculate_return_value() :: {ok, bounded_cache:value(), calculation_info()} | {error, term()}.
-type callback() :: bounded_cache:callback().
% Merge callback is used to merge values calculated using different references of file.
% If it is not present, only reference pointing at file doc passed by get_or_calculate function argument is used.
-type merge_callback() :: fun((RefValue :: bounded_cache:value(), ValueAcc :: bounded_cache:value(),
RefCalculationInfo :: calculation_info(), CalculationInfoAcc :: calculation_info()) -> get_or_calculate_return_value()).
% Callback that allows caching of different values for different references of the same file during single call.
% It uses value calculated for reference and value provided by merge_callback to obtain final value for particular
% reference.
-type differentiate_callback() :: fun((RefValue :: bounded_cache:value(), MergedValue :: bounded_cache:value(),
calculation_info()) -> {ok, bounded_cache:value()} | {error, term()}).
-type get_or_calculate_options() :: #{
timestamp => time:millis(),
critical_section_level => critical_section_level(),
initial_calculation_info => calculation_info(), % Represents initial value provided to function
% when processing first item.
args => args(),
use_referenced_key => boolean(), % use referenced key to find/cache value instead of key of file doc
% passed by get_or_calculate function argument
merge_callback => merge_callback(), % note: use `referenced_key = true` for more optimal caching
% if differentiate_callback is not used
differentiate_callback => differentiate_callback(), % note: do not use together with `referenced_key = true`
force_execution_on_referenced_key => boolean() % force execution of callback on inode even if reference of original file
% is deleted
}.
-export_type([args/0, calculation_info/0]).
-export_type([cache/0, callback/0, get_or_calculate_options/0]).
-define(CRITICAL_SECTION(Cache, Key), {effective_value_insert, Cache, Key}).
%%%===================================================================
%%% API
%%%===================================================================
-spec init_cache(cache(), init_options()) -> ok | {error, term()}.
init_cache(Cache, CacheOptions) ->
bounded_cache:init_cache(Cache, CacheOptions).
-spec init_group(group(), group_options()) -> ok | {error, term()}.
init_group(Group, Options) ->
bounded_cache:init_group(Group, Options).
-spec cache_exists(cache()) -> boolean().
cache_exists(Cache) ->
bounded_cache:cache_exists(Cache).
-spec invalidate(cache()) -> ok.
invalidate(Cache) ->
bounded_cache:invalidate(Cache).
-spec get(cache(), term()) -> {ok, term()} | {error, not_found}.
get(Cache, Key) ->
bounded_cache:get(Cache, Key).
-spec cache(cache(), term(), term()) -> ok.
cache(Cache, Key, Value) ->
bounded_cache:cache(Cache, Key, Value, bounded_cache:get_timestamp()).
-spec get_or_calculate(cache(), file_meta:doc(), callback()) -> get_or_calculate_return_value().
get_or_calculate(Cache, FileDoc, CalculateCallback) ->
get_or_calculate(Cache, FileDoc, CalculateCallback, #{}).
%%--------------------------------------------------------------------
%% @doc
%% Gets value from cache. If it is not found - uses callback to calculate it.
%% Calculated value is cached. Besides calculated value function returns additional information (CalculationInfo)
%% that is generated by calculate function and can be useful for further work
%% (e.g., calculating function can include datastore documents getting - see bounded_cache.erl in cluster_worker).
%% Calculate function processes single argument that is list [Doc, ParentValue, CalculationInfo | Args] where Doc is
%% file/directory file_meta document while ParentValue and CalculationInfo are results of calling this function on
%% parent. Function is called recursively starting from space document. ParentValue and CalculationInfo are set to
%% undefined and InitialCalculationInfo for space document (it has no parent).
%% Note: it is possible to calculate value using all references of file - see main doc.
%% @end
%%--------------------------------------------------------------------
-spec get_or_calculate(cache(), file_meta:doc(), callback(), get_or_calculate_options()) ->
get_or_calculate_return_value().
get_or_calculate(Cache, #document{key = DocKey} = FileDoc, CalculateCallback, Options) ->
% use_referenced_key option should be used only for file for which function is called, set false for ancestors
{Key, Options2} = case maps:get(use_referenced_key, Options, false) of
true -> {fslogic_uuid:ensure_referenced_uuid(DocKey), Options#{use_referenced_key => false}};
false -> {DocKey, Options}
end,
% Set timestamp if it is undefined as effective_value cannot base on timestamps managed by bounded_cache
% (calculation of effective value may include several calls to bounded_cache and all calls have to use
% timestamp previous to first execution of CalculateCallback function)
Options3 = case maps:get(timestamp, Options2, undefined) of
undefined -> Options2#{timestamp => bounded_cache:get_timestamp()};
_ -> Options2
end,
case maps:get(critical_section_level, Options3, no) of
direct ->
get_or_calculate_in_critical_section(Cache, Key, FileDoc, CalculateCallback, Options3);
no ->
get_or_calculate_internal(Cache, Key, FileDoc, CalculateCallback, Options3);
% Use critical section for parent directory (changed option will be used in recursive call)
parent ->
get_or_calculate_internal(Cache, Key, FileDoc, CalculateCallback, Options3#{critical_section_level => direct})
end.
%%%===================================================================
%%% get_or_calculate - internal functions
%%%===================================================================
-spec get_or_calculate_in_critical_section(cache(), file_meta:uuid(), file_meta:doc(),
callback(), get_or_calculate_options()) -> get_or_calculate_return_value().
get_or_calculate_in_critical_section(Cache, Key, FileDoc, CalculateCallback, Options) ->
case bounded_cache:get(Cache, Key) of
{ok, Value} ->
{ok, Value, maps:get(initial_calculation_info, Options, undefined)};
{error, not_found} ->
critical_section:run(?CRITICAL_SECTION(Cache, Key), fun() ->
get_or_calculate_internal(Cache, Key, FileDoc, CalculateCallback, Options)
end)
end.
-spec get_or_calculate_internal(cache(), file_meta:uuid(), file_meta:doc(), callback(),
get_or_calculate_options()) -> get_or_calculate_return_value().
get_or_calculate_internal(Cache, Key, FileDoc, CalculateCallback, Options) ->
% Note - argument Key and field key if FileDoc can differ - see use_referenced_key option
case bounded_cache:get(Cache, Key) of
{ok, Value} ->
{ok, Value, maps:get(initial_calculation_info, Options, undefined)};
{error, not_found} ->
MergeCallback = maps:get(merge_callback, Options, undefined),
% only reg files and hardlinks can have multiple references
ShouldProcessMultipleRefs = (MergeCallback =/= undefined)
andalso (file_meta:get_effective_type(FileDoc) =:= ?REGULAR_FILE_TYPE),
case {fslogic_uuid:is_space_dir_uuid(Key), fslogic_uuid:is_root_dir_uuid(Key), ShouldProcessMultipleRefs} of
{false, false, false} ->
get_or_calculate_single_reference(Cache, Key, FileDoc, CalculateCallback, Options);
{false, false, true} ->
get_or_calculate_multiple_references(Cache, Key, FileDoc, CalculateCallback, Options);
{false, true, _} ->
?critical("Incorrect usage of effective_value cache ~p. "
"Calculation has reached the global root directory.", [Cache]),
{error, root_dir_reached};
{true, _, _} ->
% root of space - init calculation with parent value undefined
Args = maps:get(args, Options, []),
Timestamp = maps:get(timestamp, Options),
InitialCalculationInfo = maps:get(initial_calculation_info, Options, undefined),
bounded_cache:calculate_and_cache(Cache, Key, CalculateCallback,
[FileDoc, undefined, InitialCalculationInfo | Args], Timestamp)
end
end.
-spec get_or_calculate_single_reference(cache(), file_meta:uuid(), file_meta:doc(),
callback(), get_or_calculate_options()) -> get_or_calculate_return_value().
get_or_calculate_single_reference(Cache, Key, FileDoc, CalculateCallback, Options) ->
case calculate_for_parent(Cache, Key, FileDoc, CalculateCallback, Options) of
{ok, ParentValue, CalculationInfo} ->
Args = maps:get(args, Options, []),
Timestamp = maps:get(timestamp, Options),
bounded_cache:calculate_and_cache(Cache, Key, CalculateCallback,
[FileDoc, ParentValue, CalculationInfo | Args], Timestamp);
{error, _} = Error ->
Error
end.
-spec get_or_calculate_multiple_references(cache(), file_meta:uuid(), file_meta:doc(),
callback(), get_or_calculate_options()) -> get_or_calculate_return_value().
get_or_calculate_multiple_references(Cache, Key, #document{key = DocKey} = FileDoc, CalculateCallback, Options) ->
MergeCallback = maps:get(merge_callback, Options),
References = get_references(FileDoc),
ReferencesValues = lists:map(fun(ReferenceDoc) ->
calculate_for_reference(Cache, ReferenceDoc, CalculateCallback, Options)
end, References),
case merge_references_values(ReferencesValues, undefined, MergeCallback) of
{ok, MergedValue, MergeCalculationInfo} = OkAns ->
{ok, CalculatedValue, CalculationInfo} = case maps:get(force_execution_on_referenced_key, Options, false) of
true ->
INodeKey = fslogic_uuid:ensure_referenced_uuid(Key),
case lists:member(INodeKey, [DocKey | References]) of
true ->
OkAns;
false ->
force_execution_on_referenced_key(
INodeKey, CalculateCallback, MergeCallback, MergeCalculationInfo, MergedValue, Options)
end;
false ->
OkAns
end,
differentiate_and_cache_references(Cache, Key, CalculatedValue, CalculationInfo,
References, ReferencesValues, Options);
Error ->
Error
end.
%%%===================================================================
%%% Helper functions
%%%===================================================================
-spec calculate_for_parent(cache(), file_meta:uuid(), file_meta:doc(), callback(),
get_or_calculate_options()) -> get_or_calculate_return_value().
calculate_for_parent(Cache, Key, FileDoc, CalculateCallback, Options) ->
{ok, ParentUuid} = get_parent(Key, FileDoc),
case file_meta:get_including_deleted(ParentUuid) of
{ok, ParentDoc} -> get_or_calculate(Cache, ParentDoc, CalculateCallback, Options);
_ -> {error, {file_meta_missing, ParentUuid}}
end.
%%--------------------------------------------------------------------
%% @doc
%% Calculate value using single reference.
%% NOTE: this function always calls CalculateCallback as there is high probability that value is not cached.
%% This is because cache invalidation always deletes all cached values and value is always calculated for one
%% or all references. As it has been tried to read value for reference passed in the argument before, it is
%% only possible that value is cached when get_or_calculate function is being executed by multiple processes
%% in parallel (and value has not been cached before). Thus, probability of finding value in cache is very
%% low so cache is not checked to avoid additional cost of call to bounded_cache.
%% @end
%%--------------------------------------------------------------------
-spec calculate_for_reference(cache(), file_meta:doc(), callback(), get_or_calculate_options()) ->
get_or_calculate_return_value().
calculate_for_reference(Cache, #document{key = Key} = FileDoc, CalculateCallback, Options) ->
case calculate_for_parent(Cache, Key, FileDoc, CalculateCallback, Options) of
{ok, ParentValue, ParentCalculationInfo} ->
Args = maps:get(args, Options, []),
CalculateCallback([FileDoc, ParentValue, ParentCalculationInfo | Args]);
{error, _} = Error ->
Error
end.
-spec merge_references_values([get_or_calculate_return_value()], undefined | get_or_calculate_return_value(),
merge_callback()) -> get_or_calculate_return_value().
merge_references_values([], Acc, _MergeCallback) ->
Acc; % No references left - return answer
merge_references_values([Value | Tail], undefined, MergeCallback) ->
merge_references_values(Tail, Value, MergeCallback); % First value - nothing to be merged
merge_references_values(_ReferenceValues, {error, _} = Acc, _MergeCallback) ->
Acc; % Error occurred - return answer
merge_references_values([{error, _} = Error | _Tail], _Acc, _MergeCallback) ->
Error; % Error occurred - return it
merge_references_values([{ok, Value, CalculationInfo} | Tail], {ok, AccValue, AccCalculationInfo}, MergeCallback) ->
merge_references_values(Tail, MergeCallback(Value, AccValue, CalculationInfo, AccCalculationInfo), MergeCallback).
-spec differentiate_and_cache_references(cache(), file_meta:uuid(), bounded_cache:value(),
calculation_info(), [file_meta:doc()], [get_or_calculate_return_value()], get_or_calculate_options()) ->
get_or_calculate_return_value().
differentiate_and_cache_references(Cache, _Key, MergedValue, CalculationInfo,
References, ReferencesValues, #{timestamp := Timestamp, differentiate_callback := DifferentiateCallback}) ->
% Apply callback for all references
FoldlAns = lists:foldl(fun
({ok, ReferenceValue, _}, {ok, Acc}) ->
case DifferentiateCallback(ReferenceValue, MergedValue, CalculationInfo) of
{ok, NewValue} -> {ok, [NewValue | Acc]};
Other -> Other
end;
(_, Error) ->
Error
end, {ok, []}, ReferencesValues),
% Cache reference if all callback calls succeeded
case FoldlAns of
{ok, ReversedMappedReferencesValues} ->
% Head of list is value calculated for reference for which get_or_calculate function has been called
[ReturnValue | _] = MappedReferencesValues = lists:reverse(ReversedMappedReferencesValues),
lists:foreach(fun({#document{key = CacheKey}, ValueToCache}) ->
bounded_cache:cache(Cache, CacheKey, ValueToCache, Timestamp)
end, lists:zip(References, MappedReferencesValues)),
{ok, ReturnValue, CalculationInfo};
FoldlError ->
FoldlError
end;
differentiate_and_cache_references(Cache, Key, MergedValue, CalculationInfo,
_References, _ReferencesValues, #{timestamp := Timestamp}) ->
bounded_cache:cache(Cache, Key, MergedValue, Timestamp),
{ok, MergedValue, CalculationInfo}.
-spec force_execution_on_referenced_key(file_meta:uuid(), callback(), merge_callback(),
calculation_info(), bounded_cache:value(), get_or_calculate_options()) -> get_or_calculate_return_value().
force_execution_on_referenced_key(INodeKey, CalculateCallback, MergeCallback, CalculationInfo, Acc, Options) ->
case file_meta:get_including_deleted(INodeKey) of
{ok, FileDoc} ->
Args = maps:get(args, Options, []),
InitialCalculationInfo = maps:get(initial_calculation_info, Options, undefined),
case CalculateCallback([FileDoc, undefined, InitialCalculationInfo | Args]) of
{ok, Value, NewCalculationInfo} -> MergeCallback(Value, Acc, NewCalculationInfo, CalculationInfo);
{error, _} = Error -> Error
end;
_ ->
{error, {file_meta_missing, INodeKey}}
end.
%%--------------------------------------------------------------------
%% @doc
%% Function used to optimize getting parent doc. If Key is equal to key inside FileDoc,
%% FileDoc can be used. It results in one datastore get operation less.
%% @end
%%--------------------------------------------------------------------
-spec get_parent(file_meta:uuid(), file_meta:doc()) -> {ok, file_meta:uuid()} | {error, term()}.
get_parent(Key, #document{key = Key} = FileDoc) ->
file_meta:get_parent_uuid(FileDoc);
get_parent(Key, _FileDoc) ->
% Key differs from key inside FileDoc (see use_referenced_key option) - file doc for Key
% will be got inside get_parent_uuid function
file_meta:get_parent_uuid(Key).
%%--------------------------------------------------------------------
%% @doc
%% Returns #file_meta{} documents for all references.
%% NOTE: Head of list is always document passed by function's argument
%% (it is equal to document passed by get_or_calculate function argument).
%% @end
%%--------------------------------------------------------------------
-spec get_references(file_meta:doc()) -> [file_meta:doc()].
get_references(#document{key = DocKey} = FileDoc) ->
%% @TODO VFS-7555 Use Doc for listing references after it is allowed
{ok, References} = case fslogic_uuid:ensure_referenced_uuid(DocKey) of
DocKey -> file_meta_hardlinks:list_references(FileDoc);
ReferencedUuid -> file_meta_hardlinks:list_references(ReferencedUuid)
end,
[FileDoc | lists:filtermap(fun(Uuid) ->
case file_meta:get(Uuid) of
{ok, Doc} -> {true, Doc};
{error, not_found} -> false
end
end, References -- [DocKey])]. | src/tree_processing/effective_value.erl | 0.603231 | 0.480966 | effective_value.erl | starcoder |
%% @doc Functions for working with ranges in lists.
%%
%% ETS is fast only as a key-value store.
%% But some data files contains ranges: From..To.
%% The fastest way is using lists for storing this values.
%%
%% There is two types of these lists:
%% * with booleans: `[{1,3}, 6, {8,9}]'. For example, `is_compat';
%% * with values: `[{{1,3}, value1}, {{4,12}, value2}]'.
%%
%% `in_list' function is for the first type.
%% `search' function is for the second type.
%%
%% @end
-module(ux_opt_ranges).
-export([in_list/1, search/2]).
in_list([H|_]=V) ->
SortedV = in_list_sort(V),
R = erlang:list_to_tuple(
lists:map(fun(X) -> [] end,
lists:seq(1,651))),
do_in_list(V, R).
search(Def, V) ->
SortedV = search_sort(V),
R = erlang:list_to_tuple(
lists:map(fun(X) -> [] end,
lists:seq(1,651))),
do_search(Def, V, R).
do_in_list([{H1,H2}=V|T], R) ->
I1 = index(H1),
I2 = index(H2),
R2 = fill_elem(I1, I2, V, R),
do_in_list(T, R2);
do_in_list([H1|T], R) ->
R1 = set_elem(H1, H1, R),
do_in_list(T, R1);
do_in_list([], R) ->
L = erlang:tuple_to_list(R),
ML = lists:map(fun lists:reverse/1, L),
MR = erlang:list_to_tuple(ML),
fun(X) ->
I = index(X),
MiniList = erlang:element(I, MR),
ux_ranges:in_list(MiniList, X)
end.
% skip
do_search(Def, [{_,Def}|T], R) ->
do_search(Def, T, R);
do_search(Def, [{{H1,H2},_P}=V|T], R) ->
I1 = index(H1),
I2 = index(H2),
R2 = fill_elem(I1, I2, V, R),
do_search(Def, T, R2);
do_search(Def, [{H1,_P}=V|T], R) ->
R1 = set_elem(H1, V, R),
do_search(Def, T, R1);
do_search(Def, [], R) ->
L = erlang:tuple_to_list(R),
ML = lists:map(fun lists:reverse/1, L),
MR = erlang:list_to_tuple(ML),
fun(X) ->
I = index(X),
MiniList = erlang:element(I, MR),
case ux_ranges:search(MiniList, X) of
false -> Def;
P -> P
end
end.
set_elem(H, V, R)
when is_tuple(R) ->
I = index(H),
E = erlang:element(I, R),
erlang:setelement(I, R, [V|E]).
set_elem_i(I, V, R)
when is_tuple(R) ->
E = erlang:element(I, R),
erlang:setelement(I, R, [V|E]).
fill_elem(I, I, V, R) ->
set_elem_i(I, V, R);
fill_elem(I1, I2, V, R) when I1<I2 ->
NewR = set_elem_i(I1, V, R),
NewI1 = I1 + 1,
fill_elem(NewI1, I2, V, NewR).
index(N) when N > 65000 ->
651;
index(N) ->
(N div 100) + 1.
in_list_sort(V) ->
MF = fun({From,To} = Key) ->
{From, Key};
(From) ->
{From, From}
end,
MF2 = fun({_,Key}) -> Key end,
V1 = lists:map(MF, V),
V2 = lists:sort(V1),
lists:map(MF2, V2).
search_sort(V) ->
MF = fun(Key) ->
case erlang:element(1, Key) of
({From,To}) ->
{From, Key};
(From) ->
{From, From}
end
end,
MF2 = fun({_,Key}) -> Key end,
V1 = lists:map(MF, V),
V2 = lists:sort(V1),
lists:map(MF2, V2). | src/utils/ux_opt_ranges.erl | 0.503174 | 0.718076 | ux_opt_ranges.erl | starcoder |
-module(danilagamma).
% Source: https://erlangforums.com/t/advent-of-code-2021-day-5/738/2
% Author of the code: <NAME>
% Comments by me (<NAME>)
-compile(export_all).
% Comments for main/1:
% If the file name (File) is "input_from_description.txt"
% First it reads the file and puts a long binary it into RawData.
% The it splits that binary into several lines (breaking on newline \n)
% removing whitespaces befora and after each string (trim):
% 8> binary:split(RawData, <<"\n">>, [global, trim]).
% [<<"0,9 -> 5,9">>,<<"8,0 -> 0,8">>,<<"9,4 -> 3,4">>,
% <<"2,2 -> 2,1">>,<<"7,0 -> 7,4">>,<<"6,4 -> 2,0">>,
% <<"0,9 -> 2,9">>,<<"3,4 -> 1,4">>,<<"0,0 -> 8,8">>,
% <<"5,5 -> 8,2">>]
%
% Them for each Line such as <<"0,9 -> 5,9">>
%
% 9> Line = <<"0,9 -> 5,9">>.
% <<"0,9 -> 5,9">>
%
% First it splits it into two parts separated by " -> "
% 10> [From, To] = binary:split(Line, <<" -> ">>).
% [<<"0,9">>,<<"5,9">>]
% Then, from the resulting list with two binaries (From and To),
% the code gets the binary to the left and the binary to the right of the comma:
% 11> [X1b, Y1b] = binary:split(From, <<",">>).
% [<<"0">>,<<"9">>]
% 12> [X2b, Y2b] = binary:split(To, <<",">>).
% [<<"5">>,<<"9">>]
%
% Finally, it converts each binary to an integer:
% 13> {{binary_to_integer(X1b), binary_to_integer(Y1b)},
% 13> {binary_to_integer(X2b), binary_to_integer(Y2b)}}.
% {{0,9},{5,9}}
%
% And the result is a tuple with two tuples, each tuple containing two integers.
%
% The whole process:
%
% Eshell V12.1 (abort with ^G)
% 1> {ok, RawData} = file:read_file("input_from_description.txt").
% {ok,<<"0,9 -> 5,9\n8,0 -> 0,8\n9,4 -> 3,4\n2,2 -> 2,1\n7,0 -> 7,4\n6,4 -> 2,0\n0,9 -> 2,9\n3,4 -> 1,4\n0,0 -> 8,8\n5,5 -> 8,"...>>}
% 2> Data = [ begin
% 2> [From, To] = binary:split(Line, <<" -> ">>),
% 2> [X1b, Y1b] = binary:split(From, <<",">>),
% 2> [X2b, Y2b] = binary:split(To, <<",">>),
% 2> {{binary_to_integer(X1b), binary_to_integer(Y1b)},
% 2> {binary_to_integer(X2b), binary_to_integer(Y2b)}}
% 2> end || Line <- binary:split(RawData, <<"\n">>, [global, trim]) ].
% [{{0,9},{5,9}},
% {{8,0},{0,8}},
% {{9,4},{3,4}},
% {{2,2},{2,1}},
% {{7,0},{7,4}},
% {{6,4},{2,0}},
% {{0,9},{2,9}},
% {{3,4},{1,4}},
% {{0,0},{8,8}},
% {{5,5},{8,2}}]
% Beautiful!
% I learned that you can use begin and end on list comprehensions.
main(File) ->
{ok, RawData} = file:read_file(File),
Data =
[begin
[From, To] = binary:split(Line, <<" -> ">>),
[X1b, Y1b] = binary:split(From, <<",">>),
[X2b, Y2b] = binary:split(To, <<",">>),
{{binary_to_integer(X1b), binary_to_integer(Y1b)},
{binary_to_integer(X2b), binary_to_integer(Y2b)}}
end
|| Line <- binary:split(RawData, <<"\n">>, [global, trim])],
io:format("part 1: ~p~n", [solve1(Data)]),
io:format("part 2: ~p~n", [solve2(Data)]),
ok.
% To understand the argument to count_overlap/1,
% I had to try this in erl:
% 30> G = fun(Z)->[Z*2] end.
% #Fun<erl_eval.44.65746770>
% 31> [ X || L1 <- [1,2,3], X <- G(L1)].
% [2,4,6]
% So, for each item (two points) in Data, the list comprehension replaces it
% with a list containing the points of the straight line
% between those two points.
%
solve1(Data) ->
count_overlap([L || Coords <- Data, L <- to_straight_line(Coords)]).
% Suppose Data is
% Data =
% [{{0,9},{5,9}},
% {{8,0},{0,8}},
% {{9,4},{3,4}},
% {{2,2},{2,1}},
% {{7,0},{7,4}},
% {{6,4},{2,0}},
% {{0,9},{2,9}},
% {{3,4},{1,4}},
% {{0,0},{8,8}},
% {{5,5},{8,2}}].
% Then:
% Arg = [ L || Coords <- Data, L <- to_straight_line(Coords) ].
% is
% 7> Arg = [ L || Coords <- Data, L <- danilagamma:to_straight_line(Coords) ].
% [{0,9},
% {1,9},
% {2,9},
% ...
% {1,4}]
% What does count_overlap/1 do?
% > danilagamma:count_overlap(Arg).
% 5
%
% The lists:foldl counts how much of each pair there is on the list of pairs (points).
% It starts with an empty map #{}.
% 9> lists:foldl(fun danilagamma:update_counter/2, #{}, Arg).
% #{{0,9} => 2,
% {1,4} => 1,
% {1,9} => 2,
% {2,1} => 1,
% ...
% {8,4} => 1,
% {9,4} => 1}
%
% The second line gets only the values of the map
% and filters those whose are equal or superior to 2.
% I believe an alternative implementation of
% [ X || X <- maps:values(Map), X >= 2 ]
% would be
% lists:filter(fun(X) -> X>=2 end, maps:values(Map)).
% Then it gets the length of that list, that is,
% the number of points that appear twice or more in the original list.
count_overlap(Lines) ->
Map = lists:foldl(fun update_counter/2, #{}, Lines),
length([X || X <- maps:values(Map), X >= 2]).
% Just updates a map
% If the key is not there, it will be added with one.
% If the key is there, its value will be increased by one.
update_counter(Coord, Acc) ->
Fun = fun(V) -> V + 1 end,
maps:update_with(Coord, Fun, 1, Acc).
% This one is quite easy to understand and the implementation is good:
% 33> danilagamma:to_straight_line({{1,1}, {1,8}}).
% [{1,1},{1,2},{1,3},{1,4},{1,5},{1,6},{1,7},{1,8}]
% I just refactored it to use pattern matching.
to_straight_line({{X, Y1}, {X, Y2}}) ->
[{X, Y} || Y <- from_to(Y1, Y2)];
to_straight_line({{X1, Y}, {X2, Y}}) ->
[{X, Y} || X <- from_to(X1, X2)];
to_straight_line(_) ->
[].
from_to(A, B) ->
case B > A of
true ->
lists:seq(A, B);
false ->
lists:seq(A, B, -1)
end.
% Wow! Very good!
% It just added
% ++ to_diagonal_line(Coords)
% and then it was done!
solve2(Data) ->
count_overlap([L
|| Coords <- Data, L <- to_straight_line(Coords) ++ to_diagonal_line(Coords)]).
% to_diagonal_line does quite a simple job!
% 2> danilagamma:to_diagonal_line({{1,1},{3,3}}).
% [{1,1},{2,2},{3,3}]
% 3> danilagamma:to_diagonal_line({{3,3},{1,1}}).
% [{3,3},{2,2},{1,1}]
% 4> danilagamma:to_diagonal_line({{1,3},{3,1}}).
% [{1,3},{2,2},{3,1}]
% 5> danilagamma:to_diagonal_line({{3,1},{1,3}}).
% [{3,1},{2,2},{1,3}]
% 6> danilagamma:to_diagonal_line({{3,1},{1,4}}).
% []
% But the way lists:zip/2 was used with from_to/2 was great!
to_diagonal_line({{X1, Y1}, {X2, Y2}}) when abs(X1 - X2) =:= abs(Y1 - Y2) ->
lists:zip(from_to(X1, X2), from_to(Y1, Y2));
to_diagonal_line(_) ->
[]. | adventofcode/2021/day05_2021/danilagamma.erl | 0.513668 | 0.582966 | danilagamma.erl | starcoder |
%% @doc Simplifies working with and perfoming math on vectors.
%%
%% @copyright 2012 <NAME>
%% Licensed under the MIT license; see the LICENSE file for details.
-module(vector).
% -------------------------------------------------------------------------
% external api
-export([vec_to_list/1, to_vec/1, dot/2, cross/2, multiply/2, divide/2, squared_norm/1, norm/1, length/1]).
-export([unit/1, hpr_to/1, add/2, add/3, subtract/2, is_zero/1]).
-export_type([vec/0]).
% A 3D vector (x + y + z)
-type vec() :: {
X :: float(),
Y :: float(),
X :: float()
}.
% -------------------------------------------------------------------------
-define(NORMALIZED_TOLERANCE, 0.0000001).
%% --------------------------------------------------------------------------------------------------------------------
%% NIF module
%% --------------------------------------------------------------------------------------------------------------------
% Don't enable this unless testing, or until _all_ functions are implemented in C++ too.
% (loading the NIF module actually replaces this module, so we lose the Erlang implementations if we load the C++ ones)
%-on_load(init/0).
init() ->
NifPaths = [
"vector",
"./vector",
"./ebin/vector",
"../ebin/vector"
],
TryLoad = fun(NifPath) ->
case erlang:load_nif(NifPath, 0) of
{error, {load_failed, _}} = Error ->
%?debug("Error loading NIF module from ~p: ~p", [NifPath, Error]),
%io:format("Error loading NIF module from ~p:~n ~p~n", [NifPath, Error]),
false;
ok -> true
end
end,
case lists:any(TryLoad, NifPaths) of
true ->
ok;
false ->
%?warning("Couldn't load NIF from any of the defined locations! Falling back to Erlang implementation."),
io:format("Couldn't load NIF from any of the defined locations! Falling back to Erlang implementation.~n"),
{error, {load_failed, "Couldn't load NIF from any of the defined locations!"}}
end.
%% ------------------------------------------------------------------------
%% External API
%% ------------------------------------------------------------------------
%% @doc Convert from a vector to a list.
vec_to_list({X, Y, Z}) ->
[X, Y, Z].
%% @doc Convert from a list or vector tuple to a vector.
to_vec({_X, _Y, _Z} = Vec) ->
Vec;
to_vec([X, Y, Z]) ->
{X, Y, Z}.
% -------------------------------------------------------------------------
%% @doc Perform dot product.
dot({X1, Y1, Z1}, {X2, Y2, Z2}) ->
X1 * X2 + Y1 * Y2 + Z1 * Z2.
% -------------------------------------------------------------------------
%% @doc Perform cross product.
cross({X1, Y1, Z1}, {X2, Y2, Z2}) ->
{
Y1 * Z2 - Z1 * Y2,
-X1 * Z2 + Z1 * X2,
X1 * Y2 - Y1 * X2
}.
% -------------------------------------------------------------------------
%% @doc Scales a vector by the given factor.
multiply(Factor, {X, Y, Z}) when is_number(Factor) ->
{Factor * X, Factor * Y, Factor * Z}.
%% @doc Scales a vector by the given factor.
%% NOTE: This uses a guard clause instead of a pattern because floats and ints don't match each other in patterns.
divide({_, _, _}, Factor) when Factor == 0 ->
{error, division_by_zero};
divide({X, Y, Z}, Factor) when is_number(Factor) ->
{X / Factor, Y / Factor, Z / Factor}.
% -------------------------------------------------------------------------
%% @doc Returns the squared length of the vector. This is useful in some optimization cases, as it avoids a sqrt call.
squared_norm({X, Y, Z}) ->
math:pow(X, 2) + math:pow(Y, 2) + math:pow(Z, 2).
%% @doc Returns the length of the vector.
norm(Vec) ->
math:sqrt(squared_norm(Vec)).
%% @doc Returns the length of the vector.
length(Vec) ->
norm(Vec).
% -------------------------------------------------------------------------
% @doc Returns a unit vector in the same direction as Vec.
unit({_, _, _} = Vec) ->
VLS = squared_norm(Vec),
unit(VLS, Vec).
%% @doc hidden
unit(VLS, {_, _, _} = Vec) when VLS < ?NORMALIZED_TOLERANCE; abs(VLS - 1) < ?NORMALIZED_TOLERANCE ->
Vec;
%% @doc hidden
unit(VLS, {_, _, _} = Vec) ->
divide(Vec, math:sqrt(VLS)).
%% @doc Gets the Yaw and Pitch required to point in the direction of the given vector.
hpr_to({X, Y, Z}) ->
{X1, Y1, Z1} = unit({X, Y, Z}),
Yaw = -math:atan2(X1, Y1),
Pitch = math:atan2(Z1, math:sqrt(math:pow(X1, 2) + math:pow(Y1, 2))),
{rad2deg(Yaw), rad2deg(Pitch), 0}.
% -------------------------------------------------------------------------
%% @doc Adds two vectors together.
add({X1, Y1, Z1}, {X2, Y2, Z2}) ->
{X1 + X2, Y1 + Y2, Z1 + Z2}.
%% @doc Adds three vectors together.
add(V1, V2, V3) ->
add(add(V1, V2), V3).
%% @doc Subtracts the second vector from the first.
subtract({X1, Y1, Z1}, {X2, Y2, Z2}) ->
{X1 - X2, Y1 - Y2, Z1 - Z2}.
% -------------------------------------------------------------------------
%% @doc Checks to see if this is a zero vector.
%% NOTE: This uses a guard clause instead of a pattern because floats and ints don't match each other in patterns.
is_zero({X, Y, Z}) when X == 0, Y == 0, Z == 0 ->
true;
is_zero({_, _, _}) ->
false.
%% ------------------------------------------------------------------------
%% Internal API
%% ------------------------------------------------------------------------
%%% @doc Convert radians to degrees.
rad2deg(Radians) ->
Radians * (180/math:pi()). | apps/pre_entity_layer/src/vector.erl | 0.609059 | 0.478285 | vector.erl | starcoder |
%% @doc Mandatory background read on language tags: [1].
%%
%% Some quotes from [1]:
%%
%% The golden rule when creating language tags is to keep the tag as short as
%% possible. Avoid region, script or other subtags except where they add useful
%% distinguishing information. For instance, use 'ja' for Japanese and not
%% 'ja-JP', unless there is a particular reason that you need to say that this is
%% Japanese as spoken in Japan, rather than elsewhere.
%%
%% The entries in the registry follow certain conventions with regard to upper
%% and lower letter-casing. For example, language tags are lower case, alphabetic
%% region subtags are upper case, and script tags begin with an initial capital.
%% This is only a convention!
%%
%% Note that we use lower case subtags in subtag identifiers and URLs.
%%
%% Language+extlang combinations are provided to accommodate legacy language tag
%% forms, however, there is a single language subtag available for every
%% language+extlang combination. That language subtag should be used rather than
%% the language+extlang combination, where possible. For example, use 'yue'
%% rather than 'zh-yue' for Cantonese, and 'afb' rather than 'ar-afb' for Gulf
%% Arabic, if you can.
%% Language identifiers can have the following forms:
%% - language;
%% - language-extlang;
%% - language-region;
%% - language-script;
%% It is discouraged to use language-script-region, but it is possible if
%% required.
%% For a list of language, region and script codes, see [2].
%% [1] http://www.w3.org/International/articles/language-tags/
%% [2] http://www.iana.org/assignments/language-subtag-registry/language-subtag-registry
-module(z_language).
-export([
default_language/1,
is_valid/1,
to_language_atom/1,
fallback_language/2,
english_name/1,
is_rtl/1,
properties/1,
sort_properties/2,
all_languages/0,
main_languages/0
]).
-include("zotonic.hrl").
-define(DEFAULT_LANGUAGE, en).
%% @doc Returns the configured default language for this server; if not set, 'en'
%% (English).
-spec default_language(#context{}) -> atom().
default_language(undefined) -> ?DEFAULT_LANGUAGE;
default_language(Context) ->
z_convert:to_atom(m_config:get_value(i18n, language, ?DEFAULT_LANGUAGE, Context)).
%% @doc Check if the language code code is a valid language.
-spec is_valid(Code::binary() | any()) -> boolean().
is_valid(Code) when is_binary(Code) ->
Languages = all_languages(),
proplists:get_value(Code, Languages) /= undefined;
is_valid(Code) ->
is_valid(z_convert:to_binary(Code)).
%% @doc Translate a language-code to an atom.
-spec to_language_atom(Code:: list() | binary()) -> {ok, atom()} | {error, not_a_language}.
to_language_atom(Code) when is_binary(Code) ->
case is_valid(Code) of
false -> {error, not_a_language};
true -> {ok, z_convert:to_atom(Code)}
end;
to_language_atom(Code) ->
to_language_atom(z_convert:to_binary(Code)).
%% @doc Return the fallback language (the base language); if no fallback language is
%% found, returns the default language.
-spec fallback_language(Code::binary() | any() | undefined, #context{}) -> atom().
fallback_language(Code, Context) when Code =:= undefined ->
default_language(Context);
fallback_language(Code, Context) when is_binary(Code) ->
case proplists:get_value(Code, all_languages()) of
undefined -> default_language(Context);
LanguageData ->
LanguageCode = proplists:get_value(language, LanguageData),
case LanguageCode =:= Code of
true -> default_language(Context);
false -> binary_to_atom(LanguageCode, latin1)
end
end;
fallback_language(Code, Context) ->
fallback_language(z_convert:to_binary(Code), Context).
%% @doc Returns the English language name.
-spec english_name(Code::atom()) -> binary() | undefined.
english_name(Code) ->
get_property(Code, name_en).
%% @doc Check if the given language is a rtl language.
-spec is_rtl(Code::binary() | any()) -> boolean().
is_rtl(Code) ->
get_property(Code, direction) == <<"RTL">>.
%% @doc Returns a list of properties from a language item retrieved from *all* languages.
%% Proplists key: language code - this is the ISO 639-1 language code or otherwise
%% the ISO 639-3, combined with region or script extension (lowercase).
%% Properties:
%% - name: native language name.
%% - name_en: English language name.
%% - language: base language; functions as fallback language if translation
%% is not available for the sub-language
%% - region (only for region variations): Alpha-2 code of country/region
%% (ISO 3166-2).
%% - script (only for script variations): 4-letter script code (ISO 15924); if omitted: Latn.
%% - direction: (if omitted: LTR) or RTL.
-spec properties(Code::binary() | any()) -> list().
properties(Code) when is_binary(Code) ->
Data = proplists:get_value(Code, all_languages()),
properties(Code, Data);
properties(Code) ->
properties(z_convert:to_binary(Code)).
%% @private
-spec properties(Code::binary() | any(), list()) -> list().
properties(Code, Data) when is_binary(Code) ->
[
{language, proplists:get_value(language, Data)},
{direction, proplists:get_value(direction, Data)},
{name, proplists:get_value(name, Data)},
{name_en, proplists:get_value(name_en, Data)},
{region, proplists:get_value(region, Data)},
{script, proplists:get_value(script, Data)},
{sublanguages, sort_properties(lists:map(fun({SubCode, SubData}) ->
{SubCode, properties(SubCode, SubData)}
end, proplists:get_value(sublanguages, Data, [])), name_en)}
];
properties(Code, Data) ->
properties(z_convert:to_binary(Code), Data).
%% @doc Sorts a properties list.
-spec sort_properties(List :: list(), SortKey :: atom()) -> list().
sort_properties(List, SortKey) ->
lists:sort(fun({_, PropsA}, {_, PropsB}) ->
z_string:to_lower(proplists:get_value(SortKey, PropsA)) =< z_string:to_lower(proplists:get_value(SortKey, PropsB))
end, List).
%% @doc List of language data.
%% Returns a flattened list of property lists; sub-languages are added to the list of
%% main languages.
%% For each language a property list is returned - see properties/1.
-spec all_languages() -> list().
all_languages() ->
case mochiglobal:get(?MODULE) of
undefined ->
Languages = all_languages1(languages()),
ok = mochiglobal:put(?MODULE, Languages),
Languages;
Languages ->
Languages
end.
all_languages1(List) ->
lists:foldl(fun({Code, Data}, Acc) ->
Language1 = {Code, properties(Code, Data)},
case proplists:get_value(sublanguages, Data) of
undefined ->
[Language1|Acc];
SubLanguages ->
[Language1|Acc ++ all_languages1(SubLanguages)]
end
end, [], List).
%% @doc Flat list of language data of main languages.
-spec main_languages() -> list().
main_languages() ->
lists:foldl(fun({Code, Data}, Acc) ->
[{Code, properties(Code, Data)}|Acc]
end, [], languages()).
%% @private
%% Gets a property from an item retrieved from *all* languages.
-spec get_property(Code::binary() | any(), Key:: atom()) -> binary() | undefined.
get_property(Code, Key) ->
get_property_from_list(Code, Key, all_languages()).
%% @private
%% Gets a property from an item retrieved from specified list
-spec get_property_from_list(Code::binary() | any(), Key:: atom(), List:: list()) -> binary() | list() | undefined.
get_property_from_list(Code, Key, List) when is_binary(Code) ->
case proplists:get_value(Code, List) of
undefined -> undefined;
Data -> proplists:get_value(Key, Data)
end;
get_property_from_list(Code, Key, List) ->
get_property_from_list(z_convert:to_binary(Code), Key, List).
%% @private
-spec languages() -> list().
languages() -> [
{<<"aa">>, [
{language, <<"aa">>},
{name, <<"Qafaraf"/utf8>>},
{name_en, <<"Afar"/utf8>>}
]},
{<<"ab">>, [
{language, <<"ab">>},
{script, <<"Cyrl">>},
{region, <<"GE">>},
{name, <<"Аҧсуа бызшәа"/utf8>>},
{name_en, <<"Abkhazian"/utf8>>}
]},
{<<"af">>, [
{language, <<"af">>},
{name, <<"Afrikaans"/utf8>>},
{name_en, <<"Afrikaans"/utf8>>}
]},
{<<"am">>, [
{language, <<"am">>},
{script, <<"Ethi">>},
{region, <<"ET">>},
{name, <<"አማርኛ"/utf8>>},
{name_en, <<"Amharic"/utf8>>}
]},
{<<"ar">>, [
{type, <<"macro_language">>},
{language, <<"ar">>},
{direction, <<"RTL">>},
{script, <<"Arab">>},
{name, <<"العربية"/utf8>>},
{name_en, <<"Arabic"/utf8>>},
{sublanguages, [
{<<"arb">>, [
{language, <<"ar">>},
{direction, <<"RTL">>},
{script, <<"Arab">>},
{name, <<"اللغة العربية الفصحى"/utf8>>},
{name_en, <<"Standard Arabic"/utf8>>}
]},
{<<"afb">>, [
{language, <<"ar">>},
{direction, <<"RTL">>},
{script, <<"Arab">>},
{name, <<"العربية - الخليج"/utf8>>},
{name_en, <<"Arabic - Gulf"/utf8>>}
]},
{<<"ajp">>, [
{language, <<"ar">>},
{direction, <<"RTL">>},
{script, <<"Arab">>},
{name, <<"العربية - جنوب بلاد الشام"/utf8>>},
{name_en, <<"Arabic - South Levant"/utf8>>}
]},
{<<"apc">>, [
{language, <<"ar">>},
{direction, <<"RTL">>},
{script, <<"Arab">>},
{name, <<"العربية - شمال بلاد الشام"/utf8>>},
{name_en, <<"Arabic - North Levant"/utf8>>}
]},
{<<"apd">>, [
{language, <<"ar">>},
{direction, <<"RTL">>},
{script, <<"Arab">>},
{name, <<"العربية - السودان"/utf8>>},
{name_en, <<"Arabic - Sudan"/utf8>>}
]},
{<<"ar-ae">>, [
{language, <<"ar">>},
{direction, <<"RTL">>},
{region, <<"AE">>},
{script, <<"Arab">>},
{name, <<"العربية - الإمارات العربية المتحدة."/utf8>>},
{name_en, <<"Arabic - U.A.E."/utf8>>}
]},
{<<"ar-bh">>, [
{language, <<"ar">>},
{direction, <<"RTL">>},
{region, <<"BH">>},
{script, <<"Arab">>},
{name, <<"العربية - البحرين"/utf8>>},
{name_en, <<"Arabic - Bahrain"/utf8>>}
]},
{<<"aao">>, [
{language, <<"ar">>},
{direction, <<"RTL">>},
{region, <<"DZ">>},
{script, <<"Arab">>},
{name, <<"العربية الصحراء الجزائرية"/utf8>>},
{name_en, <<"Arabic - Algerian Sahara"/utf8>>}
]},
{<<"ary">>, [
{language, <<"ar">>},
{direction, <<"RTL">>},
{region, <<"DZ">>},
{script, <<"Arab">>},
{name, <<"العربية - المغرب"/utf8>>},
{name_en, <<"Arabic - Marocco"/utf8>>}
]},
{<<"arz">>, [
{language, <<"ar">>},
{direction, <<"RTL">>},
{region, <<"EG">>},
{script, <<"Arab">>},
{name, <<"مصر"/utf8>>},
{name_en, <<"Arabic - Egypt"/utf8>>}
]},
{<<"ar-iq">>, [
{language, <<"ar">>},
{direction, <<"RTL">>},
{region, <<"IQ">>},
{script, <<"Arab">>},
{name, <<"العربية - مصر"/utf8>>},
{name_en, <<"Arabic - Iraq"/utf8>>}
]},
{<<"ar-jo">>, [
{language, <<"ar">>},
{direction, <<"RTL">>},
{region, <<"JO">>},
{script, <<"Arab">>},
{name, <<"العربية - الأردن"/utf8>>},
{name_en, <<"Arabic - Jordan"/utf8>>}
]},
{<<"ar-kw">>, [
{language, <<"ar">>},
{direction, <<"RTL">>},
{region, <<"KW">>},
{script, <<"Arab">>},
{name, <<"العربية - الكويت"/utf8>>},
{name_en, <<"Arabic - Kuwait"/utf8>>}
]},
{<<"ar-lb">>, [
{language, <<"ar">>},
{direction, <<"RTL">>},
{region, <<"LB">>},
{script, <<"Arab">>},
{name, <<"العربية - لبنان"/utf8>>},
{name_en, <<"Arabic - Lebanon"/utf8>>}
]},
{<<"ayl">>, [
{language, <<"ar">>},
{direction, <<"RTL">>},
{region, <<"LY">>},
{script, <<"Arab">>},
{name, <<"العربية - ليبيا"/utf8>>},
{name_en, <<"Arabic - Libya"/utf8>>}
]},
{<<"ar-ma">>, [
{language, <<"ar">>},
{direction, <<"RTL">>},
{region, <<"MA">>},
{script, <<"Arab">>},
{name, <<"العربية - المغرب"/utf8>>},
{name_en, <<"Arabic - Morocco"/utf8>>}
]},
{<<"acx">>, [
{language, <<"ar">>},
{direction, <<"RTL">>},
{region, <<"OM">>},
{script, <<"Arab">>},
{name, <<"العربية - عمان"/utf8>>},
{name_en, <<"Arabic - Oman"/utf8>>}
]},
{<<"ar-qa">>, [
{language, <<"ar">>},
{direction, <<"RTL">>},
{region, <<"QA">>},
{script, <<"Arab">>},
{name, <<"العربية - قطر"/utf8>>},
{name_en, <<"Arabic - Qatar"/utf8>>}
]},
{<<"ar-sa">>, [
{language, <<"ar">>},
{direction, <<"RTL">>},
{region, <<"SA">>},
{script, <<"Arab">>},
{name, <<"العربية - المملكة العربية السعودية"/utf8>>},
{name_en, <<"Arabic - Saudi Arabia"/utf8>>}
]},
{<<"ar-sy">>, [
{language, <<"ar">>},
{direction, <<"RTL">>},
{region, <<"SY">>},
{script, <<"Arab">>},
{name, <<"العربية - سوريا"/utf8>>},
{name_en, <<"Arabic - Syria"/utf8>>}
]},
{<<"aeb">>, [
{language, <<"ar">>},
{direction, <<"RTL">>},
{region, <<"TN">>},
{script, <<"Arab">>},
{name, <<"العربية - تونس"/utf8>>},
{name_en, <<"Arabic - Tunisia"/utf8>>}
]},
{<<"ar-ye">>, [
{language, <<"ar">>},
{direction, <<"RTL">>},
{region, <<"YE">>},
{script, <<"Arab">>},
{name, <<"العربية - اليمن"/utf8>>},
{name_en, <<"Arabic - Yemen"/utf8>>}
]}
]}
]},
{<<"as">>, [
{language, <<"as">>},
{script, <<"Beng">>},
{name, <<"অসমীয়া"/utf8>>},
{name_en, <<"Assamese"/utf8>>}
]},
{<<"ay">>, [
{language, <<"ay">>},
{name, <<"Aymar aru"/utf8>>},
{name_en, <<"Aymara"/utf8>>}
]},
{<<"az">>, [
{type, <<"macrolanguage">>},
{language, <<"az">>},
{name, <<"azərbaycan dili"/utf8>>},
{name_en, <<"Azerbaijani"/utf8>>}
]},
{<<"ba">>, [
{language, <<"ba">>},
{script, <<"Cyrl">>},
{region, <<"RU">>},
{name, <<"<NAME>"/utf8>>},
{name_en, <<"Bashkir"/utf8>>}
]},
{<<"be">>, [
{language, <<"be">>},
{script, <<"Cyrl">>},
{region, <<"BY">>},
{name, <<"беларуская"/utf8>>},
{name_en, <<"Byelorussian"/utf8>>}
]},
{<<"bg">>, [
{language, <<"bg">>},
{script, <<"Cyrl">>},
{region, <<"BG">>},
{name, <<"български"/utf8>>},
{name_en, <<"Bulgarian"/utf8>>}
]},
%% Omitting "bh", which is "Bihari languages", a collection
{<<"bi">>, [
{language, <<"bi">>},
{region, <<"VU">>},
{name, <<"Bislama"/utf8>>},
{name_en, <<"Bislama"/utf8>>}
]},
{<<"bn">>, [
{language, <<"bn">>},
{script, <<"Beng">>},
{name, <<"বাংলা"/utf8>>},
{name_en, <<"Bengali"/utf8>>}
]},
{<<"bo">>, [
{language, <<"bo">>},
{script, <<"Tibt">>},
{name, <<"བོད་སྐད"/utf8>>},
{name_en, <<"Tibetan"/utf8>>}
]},
{<<"br">>, [
{language, <<"br">>},
{name, <<"brezhoneg"/utf8>>},
{name_en, <<"Breton"/utf8>>}
]},
{<<"bs">>, [
{language, <<"bs">>},
{region, <<"BA">>},
{name, <<"bosanski"/utf8>>},
{name_en, <<"Bosnian"/utf8>>}
]},
{<<"ca">>, [
{language, <<"ca">>},
{region, <<"AD">>},
{name, <<"català"/utf8>>},
{name_en, <<"Catalan"/utf8>>}
]},
{<<"ce">>, [
{language, <<"ce">>},
{script, <<"Cyrl">>},
{region, <<"RU">>},
{name, <<"нохчийн"/utf8>>},
{name_en, <<"Chechen"/utf8>>}
]},
{<<"ch">>, [
{language, <<"ch">>},
{region, <<"GU">>},
{name, <<"<NAME>oru"/utf8>>},
{name_en, <<"Chamorro"/utf8>>}
]},
{<<"co">>, [
{language, <<"co">>},
{name, <<"Corsu"/utf8>>},
{name_en, <<"Corsican"/utf8>>}
]},
{<<"cs">>, [
{language, <<"cs">>},
{region, <<"CZ">>},
{name, <<"čeština"/utf8>>},
{name_en, <<"Czech"/utf8>>}
]},
%% Omitting Church Slavic
{<<"cv">>, [
{language, <<"cv">>},
{region, <<"RU">>},
{script, <<"Cyrl">>},
{name, <<"Чӑвашла"/utf8>>},
{name_en, <<"Chuvash"/utf8>>}
]},
{<<"cy">>, [
{language, <<"cy">>},
{region, <<"GB">>},
{name, <<"Cymraeg"/utf8>>},
{name_en, <<"Welsh"/utf8>>}
]},
{<<"de">>, [
{language, <<"de">>},
{name, <<"Deutsch"/utf8>>},
{name_en, <<"German"/utf8>>},
{sublanguages, [
{<<"de-at">>, [
{language, <<"de">>},
{region, <<"AT">>},
{name, <<"Deutsch - Österreich"/utf8>>},
{name_en, <<"German - Austria"/utf8>>}
]},
{<<"de-ch">>, [
{language, <<"de">>},
{region, <<"CH">>},
{name, <<"Deutsch - Schweiz"/utf8>>},
{name_en, <<"German - Switzerland"/utf8>>}
]},
{<<"de-de">>, [
{language, <<"de">>},
{region, <<"DE">>},
{name, <<"Deutsch - Deutschland"/utf8>>},
{name_en, <<"German - Germany"/utf8>>}
]},
{<<"de-li">>, [
{language, <<"de">>},
{region, <<"LI">>},
{name, <<"Deutsch - Liechtenstein"/utf8>>},
{name_en, <<"German - Liechtenstein"/utf8>>}
]},
{<<"de-lu">>, [
{language, <<"de">>},
{region, <<"LU">>},
{name, <<"Deutsch - Luxemburg"/utf8>>},
{name_en, <<"German - Luxembourg"/utf8>>}
]}
]}
]},
{<<"da">>, [
{language, <<"da">>},
{name, <<"dansk"/utf8>>},
{name_en, <<"Danish"/utf8>>}
]},
{<<"dz">>, [
{language, <<"dz">>},
{script, <<"Tibt">>},
{region, <<"BT">>},
{name, <<"རྫོང་ཁ་"/utf8>>},
{name_en, <<"Dzongkha"/utf8>>}
]},
{<<"el">>, [
{language, <<"el">>},
{script, <<"Grek">>},
{name, <<"Ελληνικά"/utf8>>},
{name_en, <<"Greek"/utf8>>}
]},
{<<"en">>, [
{language, <<"en">>},
{name, <<"English"/utf8>>},
{name_en, <<"English"/utf8>>},
{sublanguages, [
{<<"en-au">>, [
{language, <<"en">>},
{region, <<"AU">>},
{name, <<"English - Australia"/utf8>>},
{name_en, <<"English - Australia"/utf8>>}
]},
{<<"en-bz">>, [
{language, <<"en">>},
{region, <<"BZ">>},
{name, <<"English - Belize"/utf8>>},
{name_en, <<"English - Belize"/utf8>>}
]},
{<<"en-ca">>, [
{language, <<"en">>},
{region, <<"CA">>},
{name, <<"English - Canada"/utf8>>},
{name_en, <<"English - Canada"/utf8>>}
]},
{<<"en-cb">>, [
{language, <<"en">>},
{region, <<"CB">>},
{name, <<"English - Caribbean"/utf8>>},
{name_en, <<"English - Caribbean"/utf8>>}
]},
{<<"en-gb">>, [
{language, <<"en">>},
{region, <<"GB">>},
{name, <<"English - United Kingdom"/utf8>>},
{name_en, <<"English - United Kingdom"/utf8>>}
]},
{<<"en-ie">>, [
{language, <<"en">>},
{region, <<"IE">>},
{name, <<"English - Ireland"/utf8>>},
{name_en, <<"English - Ireland"/utf8>>}
]},
{<<"en-jm">>, [
{language, <<"en">>},
{region, <<"JM">>},
{name, <<"English - Jamaica"/utf8>>},
{name_en, <<"English - Jamaica"/utf8>>}
]},
{<<"en-nz">>, [
{language, <<"en">>},
{region, <<"NZ">>},
{name, <<"English - New Zealand"/utf8>>},
{name_en, <<"English - New Zealand"/utf8>>}
]},
{<<"en-ph">>, [
{language, <<"en">>},
{region, <<"PH">>},
{name, <<"English - Republic of the Philippines"/utf8>>},
{name_en, <<"English - Republic of the Philippines"/utf8>>}
]},
{<<"en-tt">>, [
{language, <<"en">>},
{region, <<"TT">>},
{name, <<"English - Trinidad and Tobago"/utf8>>},
{name_en, <<"English - Trinidad and Tobago"/utf8>>}
]},
{<<"en-us">>, [
{language, <<"en">>},
{region, <<"US">>},
{name, <<"English - United States"/utf8>>},
{name_en, <<"English - United States"/utf8>>}
]},
{<<"en-za">>, [
{language, <<"en">>},
{region, <<"ZA">>},
{name, <<"English - South Africa"/utf8>>},
{name_en, <<"English - South Africa"/utf8>>}
]},
{<<"en-zw">>, [
{language, <<"en">>},
{region, <<"ZW">>},
{name, <<"English - Zimbabwe"/utf8>>},
{name_en, <<"English - Zimbabwe"/utf8>>}
]}
]}
]},
{<<"eo">>, [
{language, <<"eo">>},
{name, <<"Esperanto"/utf8>>},
{name_en, <<"Esperanto"/utf8>>}
]},
{<<"es">>, [
{language, <<"es">>},
{name, <<"español"/utf8>>},
{name_en, <<"Spanish"/utf8>>},
{sublanguages, [
{<<"es-419">>, [
{language, <<"es">>},
{region, <<"419">>},
{name, <<"español latinoamericano"/utf8>>},
{name_en, <<"Spanish - Latin America and the Caribbean"/utf8>>}
]},
{<<"es-ar">>, [
{language, <<"es">>},
{region, <<"AR">>},
{name, <<"español - Argentina"/utf8>>},
{name_en, <<"Spanish - Argentina"/utf8>>}
]},
{<<"es-bo">>, [
{language, <<"es">>},
{region, <<"BO">>},
{name, <<"español - Bolivia"/utf8>>},
{name_en, <<"Spanish - Bolivia"/utf8>>}
]},
{<<"es-cl">>, [
{language, <<"es">>},
{region, <<"CL">>},
{name, <<"español - Chile"/utf8>>},
{name_en, <<"Spanish - Chile"/utf8>>}
]},
{<<"es-co">>, [
{language, <<"es">>},
{region, <<"CO">>},
{name, <<"español - Colombia"/utf8>>},
{name_en, <<"Spanish - Colombia"/utf8>>}
]},
{<<"es-cr">>, [
{language, <<"es">>},
{region, <<"CR">>},
{name, <<"español - Costa Rica"/utf8>>},
{name_en, <<"Spanish - Costa Rica"/utf8>>}
]},
{<<"es-do">>, [
{language, <<"es">>},
{region, <<"DO">>},
{name, <<"español - República Dominicana"/utf8>>},
{name_en, <<"Spanish - Dominican Republic"/utf8>>}
]},
{<<"es-ec">>, [
{language, <<"es">>},
{region, <<"EC">>},
{name, <<"español - Ecuador"/utf8>>},
{name_en, <<"Spanish - Ecuador"/utf8>>}
]},
{<<"es-es">>, [
{language, <<"es">>},
{region, <<"ES">>},
{name, <<"español - España"/utf8>>},
{name_en, <<"Spanish - Spain"/utf8>>}
]},
{<<"es-gt">>, [
{language, <<"es">>},
{region, <<"GT">>},
{name, <<"español - Guatemala"/utf8>>},
{name_en, <<"Spanish - Guatemala"/utf8>>}
]},
{<<"es-hn">>, [
{language, <<"es">>},
{region, <<"HN">>},
{name, <<"español - Honduras"/utf8>>},
{name_en, <<"Spanish - Honduras"/utf8>>}
]},
{<<"es-mx">>, [
{language, <<"es">>},
{region, <<"MX">>},
{name, <<"español - México"/utf8>>},
{name_en, <<"Spanish - Mexico"/utf8>>}
]},
{<<"es-ni">>, [
{language, <<"es">>},
{region, <<"NI">>},
{name, <<"español - Nicaragua"/utf8>>},
{name_en, <<"Spanish - Nicaragua"/utf8>>}
]},
{<<"es-pa">>, [
{language, <<"es">>},
{region, <<"PA">>},
{name, <<"español - Panamá"/utf8>>},
{name_en, <<"Spanish - Panama"/utf8>>}
]},
{<<"es-pe">>, [
{language, <<"es">>},
{region, <<"PE">>},
{name, <<"español - Perú"/utf8>>},
{name_en, <<"Spanish - Peru"/utf8>>}
]},
{<<"es-pr">>, [
{language, <<"es">>},
{region, <<"PR">>},
{name, <<"español - Puerto Rico"/utf8>>},
{name_en, <<"Spanish - Puerto Rico"/utf8>>}
]},
{<<"es-py">>, [
{language, <<"es">>},
{region, <<"PY">>},
{name, <<"español - Paraguay"/utf8>>},
{name_en, <<"Spanish - Paraguay"/utf8>>}
]},
{<<"es-sv">>, [
{language, <<"es">>},
{region, <<"SV">>},
{name, <<"español - El Salvador"/utf8>>},
{name_en, <<"Spanish - El Salvador"/utf8>>}
]},
{<<"es-uy">>, [
{language, <<"es">>},
{region, <<"UY">>},
{name, <<"español - Uruguay"/utf8>>},
{name_en, <<"Spanish - Uruguay"/utf8>>}
]},
{<<"es-ve">>, [
{language, <<"es">>},
{region, <<"VE">>},
{name, <<"español - Venezuela"/utf8>>},
{name_en, <<"Spanish - Venezuela"/utf8>>}
]}
]}
]},
{<<"et">>, [
{language, <<"et">>},
{region, <<"EE">>},
{name, <<"eesti"/utf8>>},
{name_en, <<"Estonian"/utf8>>}
]},
{<<"eu">>, [
{language, <<"eu">>},
{region, <<"ES">>},
{name, <<"Euskara"/utf8>>},
{name_en, <<"Basque"/utf8>>}
]},
{<<"fa">>, [
{language, <<"fa">>},
{script, <<"Arab">>},
{name, <<"فارسی"/utf8>>},
{name_en, <<"Persian"/utf8>>}
]},
{<<"fi">>, [
{language, <<"fi">>},
{name, <<"suomi"/utf8>>},
{name_en, <<"Finnish"/utf8>>}
]},
{<<"fj">>, [
{language, <<"fj">>},
{region, <<"FJ">>},
{name, <<"<NAME>"/utf8>>},
{name_en, <<"Fijian"/utf8>>}
]},
{<<"fr">>, [
{language, <<"fr">>},
{name, <<"français"/utf8>>},
{name_en, <<"French"/utf8>>},
{sublanguages, [
{<<"fr-be">>, [
{language, <<"fr">>},
{region, <<"BE">>},
{name, <<"Français - Belgique"/utf8>>},
{name_en, <<"French - Belgium"/utf8>>}
]},
{<<"fr-ca">>, [
{language, <<"fr">>},
{region, <<"CA">>},
{name, <<"Français - Canada"/utf8>>},
{name_en, <<"French - Canada"/utf8>>}
]},
{<<"fr-ch">>, [
{language, <<"fr">>},
{region, <<"CH">>},
{name, <<"Français - Suisse"/utf8>>},
{name_en, <<"French - Switzerland"/utf8>>}
]},
{<<"fr-fr">>, [
{language, <<"fr">>},
{region, <<"FR">>},
{name, <<"Français - France"/utf8>>},
{name_en, <<"French - France"/utf8>>}
]},
{<<"fr-lu">>, [
{language, <<"fr">>},
{region, <<"LU">>},
{name, <<"Français - Luxembourg"/utf8>>},
{name_en, <<"French - Luxembourg"/utf8>>}
]},
{<<"fr-mc">>, [
{language, <<"fr">>},
{region, <<"MC">>},
{name, <<"Français - Monaco"/utf8>>},
{name_en, <<"French - Monaco"/utf8>>}
]}
]}
]},
{<<"fo">>, [
{language, <<"fo">>},
{region, <<"FO">>},
{name, <<"føroyskt"/utf8>>},
{name_en, <<"Faroese"/utf8>>}
]},
{<<"fy">>, [
{language, <<"fy">>},
{region, <<"NL">>},
{name, <<"West-Frysk"/utf8>>},
{name_en, <<"Frisian"/utf8>>}
]},
{<<"ga">>, [
{language, <<"ga">>},
{name, <<"Gaeilge"/utf8>>},
{name_en, <<"Gaelic"/utf8>>}
]},
{<<"gd">>, [
{language, <<"gd">>},
{region, <<"GB">>},
{name, <<"Gàidhlig"/utf8>>},
{name_en, <<"<NAME>"/utf8>>}
]},
{<<"gl">>, [
{language, <<"gl">>},
{region, <<"ES">>},
{name, <<"galego"/utf8>>},
{name_en, <<"Galician"/utf8>>}
]},
{<<"gn">>, [
{language, <<"gn">>},
{region, <<"PY">>},
{name, <<"avañe'ẽ"/utf8>>},
{name_en, <<"Guarani"/utf8>>}
]},
{<<"gu">>, [
{language, <<"gu">>},
{script, <<"Gujr">>},
{name, <<"ગુજરાતી"/utf8>>},
{name_en, <<"Gujarati"/utf8>>}
]},
{<<"he">>, [
{language, <<"he">>},
{direction, <<"RTL">>},
{script, <<"Hebr">>},
{region, <<"IL">>},
{name, <<"עברית"/utf8>>},
{name_en, <<"Hebrew"/utf8>>}
]},
{<<"hi">>, [
{language, <<"hi">>},
{script, <<"Deva">>},
{name, <<"हिन्दी"/utf8>>},
{name_en, <<"Hindi"/utf8>>}
]},
{<<"hr">>, [
{language, <<"hr">>},
{name, <<"hrvatski"/utf8>>},
{name_en, <<"Croatian"/utf8>>},
{sublanguages, [
{<<"hr-ba">>, [
{language, <<"hr">>},
{region, <<"BA">>},
{name, <<"hrvatski - Bosna i Hercegovina"/utf8>>},
{name_en, <<"Croatian - Bosnia and Herzegovina"/utf8>>}
]},
{<<"hr-hr">>, [
{language, <<"hr">>},
{region, <<"HR">>},
{name, <<"hrvatski - Hrvatska"/utf8>>},
{name_en, <<"Croatian - Croatia"/utf8>>}
]}
]}
]},
{<<"hu">>, [
{language, <<"hu">>},
{name, <<"magyar"/utf8>>},
{name_en, <<"Hungarian"/utf8>>}
]},
{<<"id">>, [
{language, <<"id">>},
{region, <<"ID">>},
{name, <<"Indonesia"/utf8>>},
{name_en, <<"Indonesian"/utf8>>}
]},
{<<"ia">>, [
{language, <<"ia">>},
{name, <<"Interlingua"/utf8>>},
{name_en, <<"Interlingua"/utf8>>}
]},
{<<"is">>, [
{language, <<"is">>},
{region, <<"IS">>},
{name, <<"íslenska"/utf8>>},
{name_en, <<"Islandic"/utf8>>}
]},
{<<"it">>, [
{language, <<"it">>},
{name, <<"italiano"/utf8>>},
{name_en, <<"Italian"/utf8>>},
{sublanguages, [
{<<"it-ch">>, [
{language, <<"it">>},
{region, <<"CH">>},
{name, <<"italiano - Svizzera"/utf8>>},
{name_en, <<"Italian - Switzerland"/utf8>>}
]},
{<<"it-it">>, [
{language, <<"it">>},
{region, <<"IT">>},
{name, <<"italiano - Italia"/utf8>>},
{name_en, <<"Italian - Italy"/utf8>>}
]}
]}
]},
{<<"ja">>, [
{language, <<"ja">>},
{script, <<"Jpan">>}, % alias for Han + Hiragana + Katakana
{name, <<"日本語"/utf8>>},
{name_en, <<"Japanese"/utf8>>}
]},
{<<"jv">>, [
{language, <<"jv">>},
{region, <<"ID">>},
{name, <<"basa jawa"/utf8>>},
{name_en, <<"Javanese"/utf8>>}
]},
{<<"ka">>, [
{language, <<"ka">>},
{script, <<"Geor">>},
{region, <<"GE">>},
{name, <<"ქართული"/utf8>>},
{name_en, <<"Georgian"/utf8>>}
]},
{<<"ko">>, [
{language, <<"ko">>},
{script, <<"Kore">>},
{name, <<"한국어"/utf8>>},
{name_en, <<"Korean"/utf8>>}
]},
{<<"lt">>, [
{language, <<"lt">>},
{name, <<"lietuvių"/utf8>>},
{name_en, <<"Lithuanian"/utf8>>}
]},
{<<"lv">>, [
{language, <<"lv">>},
{region, <<"LV">>},
{name, <<"latviešu"/utf8>>},
{name_en, <<"Latvian"/utf8>>}
]},
{<<"mg">>, [
{language, <<"mg">>},
{region, <<"MG">>},
{name, <<"Malagasy"/utf8>>},
{name_en, <<"Malagasy"/utf8>>}
]},
{<<"mk">>, [
{language, <<"mk">>},
{region, <<"MK">>},
{script, <<"Cyrl">>},
{name, <<"македонски"/utf8>>},
{name_en, <<"Macedonian"/utf8>>}
]},
{<<"mn">>, [
{language, <<"mn">>},
{name, <<"монгол"/utf8>>},
{name_en, <<"Mongolian"/utf8>>}
]},
{<<"mt">>, [
{language, <<"mt">>},
{region, <<"MT">>},
{name, <<"Malti"/utf8>>},
{name_en, <<"Maltese"/utf8>>}
]},
{<<"nl">>, [
{language, <<"nl">>},
{name, <<"Nederlands"/utf8>>},
{name_en, <<"Dutch"/utf8>>},
{sublanguages, [
{<<"nl-be">>, [
{language, <<"nl">>},
{region, <<"BE">>},
{name, <<"Vlaams - België"/utf8>>},
{name_en, <<"Flemish - Belgium"/utf8>>}
]},
{<<"nl-nl">>, [
{language, <<"nl">>},
{region, <<"NL">>},
{name, <<"Nederlands - Nederland"/utf8>>},
{name_en, <<"Dutch - Netherlands"/utf8>>}
]}
]}
]},
{<<"no">>, [
{language, <<"no">>},
{name, <<"norsk"/utf8>>},
{name_en, <<"Norwegian"/utf8>>}
]},
{<<"nn">>, [
{language, <<"nn">>},
{region, <<"NO">>},
{name, <<"nynorsk"/utf8>>},
{name_en, <<"Norwegian Nynorsk"/utf8>>}
]},
{<<"pa">>, [
{language, <<"pa">>},
{script, <<"Arab">>},
{name, <<"ਪੰਜਾਬੀ"/utf8>>},
{name_en, <<"Punjabi"/utf8>>},
{sublanguages, [
{<<"pa-arab">>, [
{language, <<"pa">>},
{script, <<"Arab">>},
{name, <<"ابی"/utf8>>},
{name_en, <<"Punjabi - Arab"/utf8>>}
]},
{<<"pa-guru">>, [
{language, <<"pa">>},
{script, <<"Guru">>},
{name, <<"ਪੰਜਾਬੀ ਦੇ - ਗੁਰਮੁਖੀ"/utf8>>},
{name_en, <<"Punjabi - Arab"/utf8>>}
]}
]}
]},
{<<"pl">>, [
{language, <<"pl">>},
{region, <<"PL">>},
{name, <<"polszczyzna"/utf8>>},
{name_en, <<"Polish"/utf8>>}
]},
{<<"ps">>, [
{language, <<"ps">>},
{script, <<"Arab">>},
{name, <<"تو"/utf8>>},
{name_en, <<"Pashto"/utf8>>}
]},
{<<"pt">>, [
{language, <<"pt">>},
{name, <<"português"/utf8>>},
{name_en, <<"Portuguese"/utf8>>},
{sublanguages, [
{<<"pt-br">>, [
{language, <<"pt">>},
{region, <<"BR">>},
{name, <<"português - Brasil"/utf8>>},
{name_en, <<"Portuguese - Brazil"/utf8>>}
]},
{<<"pt-pt">>, [
{language, <<"pt">>},
{region, <<"PT">>},
{name, <<"português - Portugal"/utf8>>},
{name_en, <<"Portuguese - Portugal"/utf8>>}
]}
]}
]},
{<<"ro">>, [
{language, <<"ro">>},
{name, <<"română"/utf8>>},
{name_en, <<"Romanian"/utf8>>}
]},
{<<"ru">>, [
{language, <<"ru">>},
{script, <<"Cyrl">>},
{name, <<"русский язык"/utf8>>},
{name_en, <<"Russian"/utf8>>}
]},
{<<"sk">>, [
{language, <<"sk">>},
{name, <<"slovenčina"/utf8>>},
{name_en, <<"Slovak"/utf8>>}
]},
{<<"sl">>, [
{language, <<"sl">>},
{name, <<"slovenščina"/utf8>>},
{name_en, <<"Slovenian"/utf8>>}
]},
{<<"sr">>, [
{language, <<"sr">>},
{script, <<"Cyrl">>},
{name, <<"српски"/utf8>>},
{name_en, <<"Serbian"/utf8>>}
]},
{<<"sv">>, [
{language, <<"sv">>},
{name, <<"svenska"/utf8>>},
{name_en, <<"Swedish"/utf8>>}
]},
{<<"sq">>, [
{language, <<"sq">>},
{name, <<"shqip"/utf8>>},
{name_en, <<"Albanian"/utf8>>}
]},
{<<"th">>, [
{language, <<"th">>},
{script, <<"Thai">>},
{name, <<"ไทย"/utf8>>},
{name_en, <<"Thai"/utf8>>}
]},
{<<"tr">>, [
{language, <<"tr">>},
{name, <<"Türkçe"/utf8>>},
{name_en, <<"Turkish"/utf8>>}
]},
{<<"uk">>, [
{language, <<"uk">>},
{script, <<"Cyrl">>},
{name, <<"українська"/utf8>>},
{name_en, <<"Ukrainian"/utf8>>}
]},
{<<"vi">>, [
{language, <<"vi">>},
{region, <<"VN">>},
{name, <<"<NAME>"/utf8>>},
{name_en, <<"Vietnamese"/utf8>>}
]},
{<<"zh">>, [
{type, <<"macro_language">>},
{language, <<"zh">>},
{script, <<"Hans">>},
{name, <<"中文"/utf8>>},
{name_en, <<"Chinese (Simplified)"/utf8>>},
{sublanguages, [
{<<"zh-hans">>, [
{language, <<"zh">>},
{script, <<"Hans">>},
{name, <<"简体中文"/utf8>>},
{name_en, <<"Chinese (Simplified)"/utf8>>}
]},
{<<"zh-hans-cn">>, [
{language, <<"zh-hans">>},
{region, <<"CN">>},
{script, <<"Hans">>},
{name, <<"中国大陆简体脚本"/utf8>>},
{name_en, <<"Chinese - Mainland (Simplified)"/utf8>>}
]},
{<<"zh-hans-sg">>, [
{language, <<"zh-hans">>},
{region, <<"SG">>},
{script, <<"Hans">>},
{name, <<"新加坡中国简体脚本"/utf8>>},
{name_en, <<"Chinese - Singapore (Simplified)"/utf8>>}
]}
]}
]},
{<<"zh-hant">>, [
{language, <<"zh-hant">>},
{script, <<"Hant">>},
{name, <<"中國傳統的腳本"/utf8>>},
{name_en, <<"Chinese (Traditional)"/utf8>>},
{sublanguages, [
{<<"zh-hant-hk">>, [
{language, <<"zh-hant">>},
{region, <<"HK">>},
{script, <<"Hant">>},
{name, <<"香港中國傳統腳本"/utf8>>},
{name_en, <<"Chinese - Hong Kong (Traditional)"/utf8>>}
]},
{<<"zh-hant-mo">>, [
{language, <<"zh-hant">>},
{region, <<"MO">>},
{script, <<"Hant">>},
{name, <<"澳門中國人在傳統的腳本"/utf8>>},
{name_en, <<"Chinese - Macau (Traditional)"/utf8>>}
]},
{<<"zh-hant-tw">>, [
{language, <<"zh-hant">>},
{region, <<"TW">>},
{script, <<"Hant">>},
{name, <<"台灣中國傳統腳本"/utf8>>},
{name_en, <<"Chinese - Taiwan (Traditional)"/utf8>>}
]}
]}
]}
].
%% Other, less used languages:
% gv: Manx
% ha: Hausa
% ho: Hiri Motu
% hy: Armenian
% hz: Herero
% ik: Inupiak
% io: Ido
% iu: Inuktitut
% ki: Kikuyu
% kj: Kuanyama
% kk: Kazakh
% kl: Kalaallisut Greenlandic
% km: Khmer Cambodian
% kn: Kannada
% ks: Kashmiri
% ku: Kurdish
% kv: Komi
% kw: Cornish
% ky: Kirghiz
% lb: Letzeburgesch
% ln: Lingala
% lo: Lao Laotian
% mh: Marshall
% mi: Maori
% ml: Malayalam
% mo: Moldavian
% mr: Marathi
% ms: Malay
% my: Burmese
% na: Nauru
% nb: Norwegian Bokmål
% nd: Ndebele, North
% ne: Nepali
% ng: Ndonga
% nr: Ndebele, South
% nv: Navajo
% ny: Chichewa Nyanja
% oc: Occitan Provençal
% om: (Afan) Oromo
% or: Oriya
% os: Ossetian Ossetic
% pi: Pali
% qu: Quechua
% rm: Rhaeto-Romance
% rn: Rundi Kirundi
% rw: Kinyarwanda
% sa: Sanskrit
% sc: Sardinian
% sd: Sindhi
% se: Northern Sami
% sg: Sango Sangro
% si: Sinhalese
% sm: Samoan
% sn: Shona
% so: Somali
% ss: Swati Siswati
% st: Sesotho Sotho, Southern
% su: Sundanese
% sw: Swahili
% ta: Tamil
% te: Telugu
% tg: Tajik
% ti: Tigrinya
% tk: Turkmen
% tl: Tagalog
% tn: Tswana Setswana
% to: Tonga
% ts: Tsonga
% tt: Tatar
% tw: Twi
% ty: Tahitian
% ug: Uighur
% ur: Urdu
% uz: Uzbek
% vo: Volapuk
% wa: Walloon
% wo: Wolof
% xh: Xhosa
% yo: Yoruba
% za: Zhuang
% zu: Zulu | apps/zotonic_core/src/i18n/z_language.erl | 0.537284 | 0.428831 | z_language.erl | starcoder |
%%%-------------------------------------------------------------------
%% @author <NAME> <<EMAIL>>
%% @copyright (C) 2017, <NAME>
%% @doc erl_id3_dectree.erl
%% ID3 Algorithm creates a decision tree for classification based on labeled
%% training data. It creates the tree top-down and selects the attribute to split on
%% which gives most information. The measure for information is entropy. The tree
%% is created recursively and at each step we compute the attribute to split on based on
%% entropy measure of the attributes that have not yet split. In each node the attribute with
%% the highest information gain is selected and branches for each value of the attribute is created
%% and nodes for the branches are computed in the same manner recursively.
%% The tree is on the format {node, attribute, Children} where children is a list of child nodes
%% or branches. A branch is {branch, decision, childNode}.
%% The computed decision tree can be printed as a set of IF-THEN rules.
%% Example use-case:
%% erl
%% > c(erl_id3_dectree).
%% > Examples = erl_id3_dectree:examples_play_tennis().
%% > Tree = erl_id3_dectree:learn_tree(Examples).
%% > erl_id3_dectree:pretty_print(Tree).
%% > erl_id3_dectree:generalize(Tree, [{outlook, overcast}, {temperature, hot}, {humidity, high}, {windy, true}]).
%% @end
%%%-------------------------------------------------------------------
-module(erl_id3_dectree).
-author('<NAME> <<EMAIL>>').
%% API
-export([learn_tree/1, generalize/2, examples_play_tennis/0, pretty_print/1]).
%% types
-type dec_tree():: id3_node().
-type id3_node()::{node, Attribute:: attribute(), Children:: list(id3_node() | branch() | nil)}.
-type branch()::{branch, Decision:: attribute_value(), Child:: id3_node()}.
-type attribute_value_pairs()::list(attribute_value_pair()).
-type attribute_value_pair()::{attribute(), attribute_value()}.
-type attribute():: atom().
-type attributes():: list(attribute()).
-type attribute_value():: atom().
-type values():: values().
-type classification():: atom().
-type classes():: list(classification()).
-type example():: {attribute_value_pairs(), classification()}.
-type examples() :: list(example()).
%%====================================================================
%% API functions
%%====================================================================
%% @doc
%% Learns the decision tree given a list of examples
-spec learn_tree(Examples:: examples()) -> dec_tree().
learn_tree(Examples)->
Classes = classes(Examples),
MostCommonClass = most_common_class(Classes),
Attributes = attributes(Examples),
case length(sets:to_list(sets:from_list(Classes))) =:= 1 of
true ->
{node, lists:nth(1, Classes), []};
false ->
case length(Attributes) of
0 ->
{node, MostCommonClass, []};
_ ->
BestAttribute = best_classifying_attribute(Examples),
Children = lists:foldl(fun(Value, Acc) ->
ExamplesSplit = decide(Examples, BestAttribute, Value),
case length(ExamplesSplit) of
0 ->
[{branch, Value, {MostCommonClass, []}}|Acc];
_ ->
[{branch, Value, learn_tree(remove_attribute(ExamplesSplit, BestAttribute))}|Acc]
end
end, [], sets:to_list(sets:from_list(values(Examples, BestAttribute)))),
{node, BestAttribute, Children}
end
end.
%% @doc
%% Generalize the classification based on decision tree and given attribute-value pairs.
-spec generalize(Tree :: dec_tree(), AttributeValues :: attribute_value_pairs()) -> classification().
generalize({node, Classification, []}, _) ->
Classification;
generalize({node, Attribute, Children}, AttributeValues) ->
case lists:keyfind(Attribute, 1, AttributeValues) of
{Attribute, Value} ->
{branch, Value, Node} = lists:keyfind(Value, 2, Children),
generalize(Node, AttributeValues);
false ->
io:format("Attribute in attribute-value list is missing, cannot generalize with the decision tree ~n"),
nil
end.
%% @doc
%% Print tree with if-then rules
-spec pretty_print(dec_tree()) -> ok.
pretty_print(Tree)->
pretty_print(Tree, ""),
ok.
-spec pretty_print(dec_tree(), string()) -> ok.
pretty_print({node, Attr, Children}, SoFar) ->
lists:map(fun(Child) ->
pretty_print(Child, io_lib:format("~sif ~p ", [SoFar,Attr]))
end, Children),
ok;
pretty_print({branch, Decision, {node, Attr, []}}, SoFar)->
io:format("~s= ~p then ~p~n", [SoFar, Decision, Attr]);
pretty_print({branch, Decision, Child}, SoFar)->
pretty_print(Child, io_lib:format("~s= ~p and ", [SoFar,Decision])).
%%====================================================================
%% Internal functions
%%====================================================================
%% @doc
%% Remove a attribute from examples
remove_attribute(Examples, Attribute)->
lists:map(fun({Attributes, C}) ->
FilteredAttributes = lists:filter(fun({A,_}) -> A =/= Attribute end, Attributes),
{FilteredAttributes, C}
end, Examples).
%% @doc
%% Return examples consistent with Attribute=Value
-spec decide(Examples::examples(), Attribute::attribute(), Value::attribute_value()) -> examples().
decide(Examples, Attribute, Value)->
lists:filter(fun({Attributes, _}) ->
lists:foldl(fun({A,V}, Acc) ->
case A of
Attribute ->
Value =:= V;
_ ->
Acc
end
end, true, Attributes) end, Examples).
%% @doc
%% Returns the attribute from a set of examples that is the best classifier
-spec best_classifying_attribute(Examples::examples())->attribute().
best_classifying_attribute(Examples)->
{_,A} = lists:max(lists:map(fun(Attribute) -> {information_gain(Examples, Attribute), Attribute} end, sets:to_list(sets:from_list(attributes(Examples))))),
A.
%% @doc
%% Calculate the entropy of list of example attribute-value pairs
-spec entropy(Examples :: examples()) -> float().
entropy(Examples) when length(Examples) =:= 0->
0;
entropy(Examples) when length(Examples) > 0->
Classes = classes(Examples),
Proportions = lists:map(fun(C) ->
Part = length(lists:filter(fun(C2) -> C =:=C2 end,
Classes)),
Whole = length(Classes),
Part/Whole
end, sets:to_list(sets:from_list(Classes))),
lists:sum(lists:map(fun(P) -> P * math:log2(1/P) end, Proportions)).
%% @doc
%% Calculate information gain of given attribute based on the examples
-spec information_gain(Examples :: examples(), Attribute :: attribute()) -> float().
information_gain(Examples, Attribute)->
EntropyOfAllExamples = entropy(Examples),
Subsets = subsets(Examples, Attribute),
EntropyOfAllExamples - lists:sum(lists:map(fun(S) ->
Part = length(S),
Whole = length(Examples),
Proportion = Part/Whole,
EntropyOfSubset = entropy(S),
EntropyOfSubset * Proportion
end, Subsets)).
%% @doc
%% Calculate subsets of classifications when splitting on Attribute
-spec subsets(Examples :: examples(), Attribute :: attribute()) -> list(examples()).
subsets(Examples, Attribute)->
Values = values(Examples, Attribute),
lists:foldl(fun(Value, Acc) ->
Subset = lists:filter(fun({Attributes, _}) ->
lists:foldl(fun({A,V}, Bool) ->
case A of
Attribute ->
V =:= Value;
_ ->
Bool
end
end, true, Attributes) end, Examples),
[Subset|Acc]
end, [], sets:to_list(sets:from_list(Values))).
%% @doc
%% Extract list of classes from Examples
-spec classes(Examples :: examples()) -> Classes::classes().
classes(Examples)->
lists:map(fun({_, C}) -> C end, Examples).
%% @doc
%% Extract values for a given attribute from Examples of attribute-value pairs + classification
-spec values(Examples :: examples(), Attribute :: attribute()) -> Values::values().
values(Examples, Attribute)->
lists:map(fun({_,V}) -> V end,
lists:filter(fun({A,_}) -> A =:= Attribute end,
attribute_value_pairs(Examples))).
%% @doc
%% Extract attribute-value-pairs from Examples
-spec attribute_value_pairs(Examples :: examples()) -> AttributeValues::attribute_value_pairs().
attribute_value_pairs(Examples)->
lists:flatten(lists:map(fun({A, _}) -> A end, Examples)).
%% @doc
%% Extract attributes from Examples
-spec attributes(Examples :: examples()) -> Attributes::attributes().
attributes(Examples)->
lists:map(fun({A,_}) -> A end, lists:flatten(lists:map(fun({A, _}) -> A end, Examples))).
%% @doc
%% Gets the most common class of a list of classes
-spec most_common_class(Classes :: classes()) -> classification().
most_common_class(Classes)->
Counted = lists:map(fun(Class) ->
{Class, length(lists:filter(fun(C) -> Class =:= C end, Classes))}
end, Classes),
{Class, _} = lists:foldl(fun({C, N}, {Class, A}) ->
case N > A of
true ->
{C,N};
false ->
{Class, A}
end
end, {nil, 0}, Counted),
Class.
%%====================================================================
%% Example Data
%%====================================================================
%% @doc
%% Sample set of examples
-spec examples_play_tennis() -> examples().
examples_play_tennis()->
[
{[
{outlook,sunny},{temperature,hot},{humidity,high},{windy,false}
], not_play
},
{[
{outlook,sunny},{temperature,hot},{humidity,high},{windy,true}
], not_play
},
{[
{outlook,overcast},{temperature,hot},{humidity,high},{windy,false}
], play
},
{[
{outlook,rain},{temperature,mild},{humidity,high},{windy,false}
], play
},
{[
{outlook,rain},{temperature,cool},{humidity,normal},{windy,false}
], play
},
{[
{outlook,rain},{temperature,cool},{humidity,normal},{windy,true}
], not_play
},
{[
{outlook,overcast},{temperature,cool},{humidity,normal},{windy,true}
], play
},
{[
{outlook,sunny},{temperature,mild},{humidity,high},{windy,false}
], not_play
},
{[
{outlook,sunny},{temperature,cool},{humidity,normal},{windy,false}
], play
},
{[
{outlook,rain},{temperature,mild},{humidity,normal},{windy,false}
], play
},
{[
{outlook,sunny},{temperature,mild},{humidity,normal},{windy,true}
], play
},
{[
{outlook,overcast},{temperature,mild},{humidity,high},{windy,true}
], play
},
{[
{outlook,overcast},{temperature,hot},{humidity,normal},{windy,false}
], play
},
{[
{outlook,rain},{temperature,mild},{humidity,high},{windy,true}
], not_play
}
]. | erl_id3_dectree/erl_id3_dectree.erl | 0.638723 | 0.687977 | erl_id3_dectree.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% Copyright (c) 2014 Basho Technologies, Inc. All Rights Reserved.
%%
%% This Source Code Form is subject to the terms of the Mozilla Public
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at http://mozilla.org/MPL/2.0/.
%%
%% -------------------------------------------------------------------
%%
%% @author <NAME> <<EMAIL>>
%% @author <NAME> <<EMAIL>>
%% @author <NAME> <<EMAIL>>
%%
%% @doc Efficient sliding-window buffer
%%
%% Initial implementation: 29 Sep 2009 by <NAME>
%%
%% This module implements an efficient sliding window, maintaining
%% two lists - a primary and a secondary. Values are paired with a
%% timestamp (millisecond resolution, see `exometer_util:timestamp/0')
%% and prepended to the primary list. When the time span between the oldest
%% and the newest entry in the primary list exceeds the given window size,
%% the primary list is shifted into the secondary list position, and the
%% new entry is added to a new (empty) primary list.
%%
%% The window can be converted to a list using `to_list/1' or folded
%% over using `foldl/3'.
%% @end
-module(exometer_slide).
-export([new/2, new/5,
reset/1,
add_element/2,
add_element/3,
add_element/4,
to_list/1,
foldl/3,
foldl/4]).
-compile(inline).
-compile(inline_list_funcs).
-import(lists, [reverse/1]).
-import(exometer_util, [timestamp/0]).
-type value() :: any().
-type cur_state() :: any().
-type timestamp() :: exometer_util:timestamp().
-type sample_fun() :: fun((timestamp(), value(), cur_state()) ->
cur_state()).
-type transform_fun() :: fun((timestamp(), cur_state()) ->
cur_state()).
-type fold_acc() :: any().
-type fold_fun() :: fun(({timestamp(),value()}, fold_acc()) -> fold_acc()).
%% Fixed size event buffer
-record(slide, {size = 0 :: integer(), % ms window
n = 0 :: integer(), % number of elements in buf1
max_n :: undefined | integer(), % max no of elements
last = 0 :: integer(), % millisecond timestamp
buf1 = [] :: list(),
buf2 = [] :: list()}).
-spec new(integer(), integer(),
sample_fun(), transform_fun(), list()) -> #slide{}.
%% @doc Callback function for exometer_histogram
%%
%% This function is not intended to be used directly. The arguments
%% `_SampleFun' and `_TransformFun' are ignored.
%% @end
new(Size, _Period, _SampleFun, _TransformFun, Opts) ->
new(Size, Opts).
-spec new(_Size::integer(), _Options::list()) -> #slide{}.
%% @doc Create a new sliding-window buffer.
%%
%% `Size' determines the size in milliseconds of the sliding window.
%% The implementation prepends values into a primary list until the oldest
%% element in the list is `Size' ms older than the current value. It then
%% swaps the primary list into a secondary list, and starts prepending to
%% a new primary list. This means that more data than fits inside the window
%% will be kept - upwards of twice as much. On the other hand, updating the
%% buffer is very cheap.
%% @end
new(Size, Opts) ->
#slide{size = Size,
max_n = proplists:get_value(max_n, Opts, infinity),
last = timestamp(),
buf1 = [],
buf2 = []}.
-spec reset(#slide{}) -> #slide{}.
%% @doc Empty the buffer
%%
reset(Slide) ->
Slide#slide{n = 0, buf1 = [], buf2 = [], last = 0}.
-spec add_element(value(), #slide{}) -> #slide{}.
%% @doc Add an element to the buffer, tagging it with the current time.
%%
%% Note that the buffer is a sliding window. Values will be discarded as they
%% move out of the specified time span.
%% @end
%%
add_element(Evt, Slide) ->
add_element(timestamp(), Evt, Slide, false).
-spec add_element(timestamp(), value(), #slide{}) -> #slide{}.
%% @doc Add an element to the buffer, tagged with the given timestamp.
%%
%% Apart from the specified timestamp, this function works just like
%% {@link add_element/2}.
%% @end
%%
add_element(TS, Evt, Slide) ->
add_element(TS, Evt, Slide, false).
-spec add_element(timestamp(), value(), #slide{}, true) ->
{boolean(), #slide{}};
(timestamp(), value(), #slide{}, false) ->
#slide{}.
%% @doc Add an element to the buffer, optionally indicating if a swap occurred.
%%
%% This function works like {@link add_element/3}, but will also indicate
%% whether the sliding window buffer swapped lists (this means that the
%% 'primary' buffer list became full and was swapped to 'secondary', starting
%% over with an empty primary list. If `Wrap == true', the return value will be
%% `{Bool,Slide}', where `Bool==true' means that a swap occurred, and
%% `Bool==false' means that it didn't.
%%
%% If `Wrap == false', this function works exactly like {@link add_element/3}.
%%
%% One possible use of the `Wrap == true' option could be to keep a sliding
%% window buffer of values that are pushed e.g. to an external stats service.
%% The swap indication could be a trigger point where values are pushed in order
%% to not lose entries.
%% @end
%%
add_element(_TS, _Evt, Slide, Wrap) when Slide#slide.size == 0 ->
add_ret(Wrap, false, Slide);
add_element(TS, Evt, #slide{last = Last, size = Sz,
n = N, max_n = MaxN,
buf1 = Buf1} = Slide, Wrap) ->
N1 = N+1,
if TS - Last > Sz; N1 > MaxN ->
%% swap
add_ret(Wrap, true, Slide#slide{last = TS,
n = 1,
buf1 = [{TS, Evt}],
buf2 = Buf1});
true ->
add_ret(Wrap, false, Slide#slide{n = N1, buf1 = [{TS, Evt} | Buf1]})
end.
add_ret(false, _, Slide) ->
Slide;
add_ret(true, Flag, Slide) ->
{Flag, Slide}.
-spec to_list(#slide{}) -> [{timestamp(), value()}].
%% @doc Convert the sliding window into a list of timestamped values.
%% @end
to_list(#slide{size = Sz}) when Sz == 0 ->
[];
to_list(#slide{size = Sz, n = N, max_n = MaxN, buf1 = Buf1, buf2 = Buf2}) ->
Start = timestamp() - Sz,
Buf1Values = take_since(Buf1, Start, n_diff(MaxN, N), []),
take_since(Buf2, Start, n_diff(MaxN, N), reverse(Buf1Values)).
-spec foldl(timestamp(), fold_fun(), fold_acc(), #slide{}) -> fold_acc().
%% @doc Fold over the sliding window, starting from `Timestamp'.
%%
%% The fun should as `fun({Timestamp, Value}, Acc) -> NewAcc'.
%% The values are processed in order from oldest to newest.
%% @end
foldl(_Timestamp, _Fun, _Acc, #slide{size = Sz}) when Sz == 0 ->
[];
foldl(Timestamp, Fun, Acc, #slide{size = Sz, n = N, max_n = MaxN,
buf1 = Buf1, buf2 = Buf2}) ->
Start = Timestamp - Sz,
Buf1Values = take_since(Buf1, Start, n_diff(MaxN, N), []),
Buf2Values = take_since(Buf2, Start, n_diff(MaxN, N), []),
lists:foldr(Fun, lists:foldl(Fun, Acc, Buf2Values), Buf1Values).
-spec foldl(fold_fun(), fold_acc(), #slide{}) -> fold_acc().
%% @doc Fold over all values in the sliding window.
%%
%% The fun should as `fun({Timestamp, Value}, Acc) -> NewAcc'.
%% The values are processed in order from oldest to newest.
%% @end
foldl(Fun, Acc, Slide) ->
foldl(timestamp(), Fun, Acc, Slide).
take_since([{TS,_} = H|T], Start, N, Acc) when TS >= Start, N > 0 ->
take_since(T, Start, decr(N), [H|Acc]);
take_since(_, _, _, Acc) ->
%% Don't reverse; already the wanted order.
Acc.
decr(N) when is_integer(N) ->
N-1.
n_diff(A, B) when is_integer(A) ->
A - B;
n_diff(_, B) ->
B. | src/external/cloudi_x_exometer_core/src/exometer_slide.erl | 0.687315 | 0.407982 | exometer_slide.erl | starcoder |
-module(benchmark).
-export([test_fib/0, test_get_channel_history/0, test_send_message/0]).
%% Fibonacci
fib(0) -> 1;
fib(1) -> 1;
fib(N) -> fib(N - 1) + fib(N - 2).
%% Benchmark helpers
% Recommendation: run each test at least 30 times to get statistically relevant
% results.
run_benchmark(Name, Fun, Times) ->
ThisPid = self(),
lists:foreach(fun (N) ->
% Recommendation: to make the test fair, each new test run is to run in
% its own, newly created Erlang process. Otherwise, if all tests run in
% the same process, the later tests start out with larger heap sizes and
% therefore probably do fewer garbage collections. Also consider
% restarting the Erlang emulator between each test.
% Source: http://erlang.org/doc/efficiency_guide/profiling.html
spawn_link(fun () ->
run_benchmark_once(Name, Fun, N),
ThisPid ! done
end),
receive done ->
ok
end
end, lists:seq(1, Times)).
run_benchmark_once(Name, Fun, N) ->
io:format("Running benchmark ~s: ~p~n", [Name, N]),
% Start timers
% Tips:
% * Wall clock time measures the actual time spent on the benchmark.
% I/O, swapping, and other activities in the operating system kernel are
% included in the measurements. This can lead to larger variations.
% os:timestamp() is more precise (microseconds) than
% statistics(wall_clock) (milliseconds)
% * CPU time measures the actual time spent on this program, summed for all
% threads. Time spent in the operating system kernel (such as swapping and
% I/O) is not included. This leads to smaller variations but is
% misleading.
statistics(runtime), % CPU time, summed for all threads
StartTime = os:timestamp(), % Wall clock time
% Run
Fun(),
% Get and print statistics
% Recommendation [1]:
% The granularity of both measurement types can be high. Therefore, ensure
% that each individual measurement lasts for at least several seconds.
% [1] http://erlang.org/doc/efficiency_guide/profiling.html
{_, Time1} = statistics(runtime),
Time2 = timer:now_diff(os:timestamp(), StartTime),
io:format("CPU time = ~p ms~nWall clock time = ~p ms~n",
[Time1, Time2 / 1000.0]),
io:format("~s done~n", [Name]).
%% Benchmarks
test_fib() ->
run_benchmark("Fibonacci", fun test_fib_benchmark/0, 30).
test_fib_benchmark() ->
fib(38).
% Creates a server with 10 channels and 5000 users.
initialize_server() ->
rand:seed_s(exsplus, {0, 0, 0}),
NumberOfChannels = 10,
NumberOfUsers = 5000,
ChannelNames = lists:seq(1, NumberOfChannels),
UserNames = lists:seq(1, NumberOfUsers),
Channels = dict:from_list(lists:map(fun (Name) ->
Messages = [{message, 5, Name, "Hello!", os:system_time()},
{message, 6, Name, "Hi!", os:system_time()},
{message, 5, Name, "Bye!", os:system_time()}],
Channel = {channel, Name, Messages},
{Name, Channel}
end,
ChannelNames)),
Users = dict:from_list(lists:map(fun (Name) ->
Subscriptions = [rand:uniform(NumberOfChannels),
rand:uniform(NumberOfChannels),
rand:uniform(NumberOfChannels)],
User = {user, Name, sets:from_list(Subscriptions)},
{Name, User}
end,
UserNames)),
ServerPid = server_centralized:initialize_with(Users, dict:new(), Channels),
{ServerPid, Channels, Users}.
% Creates a server with 10 channels and 5000 users, and logs in 100 users.
% `Fun(I)` is executed on the clients after log in, where I is the client's
% index, which is also its corresponding user's name.
initialize_server_and_some_clients(Fun) ->
{ServerPid, Channels, Users} = initialize_server(),
NumberOfActiveUsers = 100,
BenchmarkerPid = self(),
Clients = lists:map(fun (I) ->
ClientPid = spawn_link(fun () ->
server:log_in(ServerPid, I),
BenchmarkerPid ! {logged_in, I},
Fun(I)
end),
{I, ClientPid}
end,
lists:seq(1, NumberOfActiveUsers)),
% Ensure that all log-ins have finished before proceeding
lists:foreach(fun (I) ->
receive {logged_in, I} ->
ok
end
end,
lists:seq(1, NumberOfActiveUsers)),
{ServerPid, Channels, Users, Clients}.
% Get the history of 100000 channels (repeated 30 times).
test_get_channel_history() ->
{ServerPid, Channels, _Users} = initialize_server(),
NumberOfChannels = dict:size(Channels),
run_benchmark("get_channel_history",
fun () ->
lists:foreach(fun (I) ->
server:get_channel_history(ServerPid, I rem NumberOfChannels)
end,
lists:seq(1, 100000))
end,
30).
% Send a message for 1000 users, and wait for all of them to be broadcast
% (repeated 30 times).
test_send_message() ->
run_benchmark("send_message_for_each_user",
fun () ->
BenchmarkerPid = self(),
ClientFun = fun(I) ->
receive_new_messages(BenchmarkerPid, I)
end,
{ServerPid, _Channels, Users, Clients} =
initialize_server_and_some_clients(ClientFun),
ChosenUserNames = lists:sublist(dict:fetch_keys(Users), 1000),
send_message_for_users(ServerPid, ChosenUserNames, Users, Clients)
end,
30).
send_message_for_users(ServerPid, ChosenUserNames, Users, Clients) ->
% For each of the chosen users, send a message to channel 1.
lists:foreach(fun (UserName) ->
server:send_message(ServerPid, UserName, 1, "Test")
end,
ChosenUserNames),
% For each of the active clients, that subscribe to channel 1, wait until
% messages arrive.
ClientUserNames = lists:map(fun ({ClName, _ClPid}) -> ClName end, Clients),
ClientsSubscribedTo1 = lists:filter(fun (UN) ->
{user, UN, Subscriptions} = dict:fetch(UN, Users),
sets:is_element(1, Subscriptions)
end, ClientUserNames),
lists:foreach(fun (Sender) ->
% We expect responses from everyone except the sender.
ExpectedResponses = lists:delete(Sender, ClientsSubscribedTo1),
lists:foreach(fun (ClientUserName) ->
receive {ok, ClientUserName} -> ok end
end,
ExpectedResponses)
end,
ChosenUserNames).
% Helper function: receives new messages and notifies benchmarker for each
% received message.
receive_new_messages(BenchmarkerPid, I) ->
receive {_, new_message, _} ->
BenchmarkerPid ! {ok, I}
end,
receive_new_messages(BenchmarkerPid, I). | src/benchmark.erl | 0.643105 | 0.476519 | benchmark.erl | starcoder |
-module(hash).
% Our own version of Ruby's Hash class
-export([
any/2,
all/2,
delete/2,
dig/2,
empty/1,
has_key/2,
has_value/2,
keys/1,
values/1,
merge/2,
reject/2,
select/2,
shift/1,
size/1,
store/3
]).
% Return true if `Predicate` returns true for any of the pairs in the map
any(Hash, Predicate) ->
Proplist = maps:to_list(Hash),
lists:any(Predicate, Proplist).
% Return true if `Predicate` returns true for all of the pairs in the map
all(Hash, Predicate) ->
Proplist = maps:to_list(Hash),
lists:all(Predicate, Proplist).
% Delete a pair from the map and return the new map
delete(Hash, Key) ->
maps:remove(Key, Hash).
% Retrieve a value from a nested map
dig(Hash, Keys) ->
do_dig(Hash, Keys).
% Returns whether or not the map is empty
empty(Hash) ->
maps:size(Hash) > 0.
% Returns whether or not `Key` is present as a key in map `Hash`
has_key(Hash, Key) ->
lists:member(Key, keys(Hash)).
% Returns whether or not `Value` is present as a value in map `Hash`
has_value(Hash, Value) ->
lists:member(Value, values(Hash)).
% Returns all the keys in the map
keys(Hash) ->
maps:keys(Hash).
% Returns all the values in the map
values(Hash) ->
maps:values(Hash).
% Merges two maps and returns the resulting map
merge(Hash1, Hash2) ->
maps:merge(Hash1, Hash2).
% Returns a map containing all the pairs for which `Predicate` returned false
reject(Hash, Predicate) ->
maps:filter(fun(Key, Value) ->
Predicate(Key, Value) =:= false
end, Hash).
% Returns a map containing all the pairs for which `Predicate` returned true
select(Hash, Predicate) ->
maps:filter(fun(Key, Value) ->
Predicate(Key, Value) =:= true
end, Hash).
% Removes a key from the map and returns the new map
shift(Hash) ->
[Key|_] = keys(Hash),
maps:remove(Key, Hash).
% Returns the size of the map
size(Hash) ->
maps:size(Hash).
% Stores a new key/value pair in the map and returns a new map
store(Hash, Key, Value) ->
maps:put(Hash, Key, Value).
% Private functions
do_dig(Value, []) ->
Value;
do_dig(Value, [Key|Keys]) ->
case is_map(Value) of
true -> % Retrieve a value from a nested map
NewValue = maps:get(Key, Value),
do_dig(NewValue, Keys);
false -> % Add support for proplists too
proplists:get_value(Key, Value)
end. | chapter_5/exercise_3/hash.erl | 0.640411 | 0.459379 | hash.erl | starcoder |
%% Copyright (c) 2013-2019 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(emqx_pqueue_SUITE).
-include("emqx_mqtt.hrl").
-include_lib("eunit/include/eunit.hrl").
-compile(export_all).
-compile(nowarn_export_all).
-define(PQ, emqx_pqueue).
all() -> [t_priority_queue_plen, t_priority_queue_out2, t_priority_queues].
t_priority_queue_plen(_) ->
Q = ?PQ:new(),
0 = ?PQ:plen(0, Q),
Q0 = ?PQ:in(z, Q),
1 = ?PQ:plen(0, Q0),
Q1 = ?PQ:in(x, 1, Q0),
1 = ?PQ:plen(1, Q1),
Q2 = ?PQ:in(y, 2, Q1),
1 = ?PQ:plen(2, Q2),
Q3 = ?PQ:in(z, 2, Q2),
2 = ?PQ:plen(2, Q3),
{_, Q4} = ?PQ:out(1, Q3),
0 = ?PQ:plen(1, Q4),
{_, Q5} = ?PQ:out(Q4),
1 = ?PQ:plen(2, Q5),
{_, Q6} = ?PQ:out(Q5),
0 = ?PQ:plen(2, Q6),
1 = ?PQ:len(Q6),
{_, Q7} = ?PQ:out(Q6),
0 = ?PQ:len(Q7).
t_priority_queue_out2(_) ->
Els = [a, {b, 1}, {c, 1}, {d, 2}, {e, 2}, {f, 2}],
Q = ?PQ:new(),
Q0 = lists:foldl(
fun({El, P}, Acc) ->
?PQ:in(El, P, Acc);
(El, Acc) ->
?PQ:in(El, Acc)
end, Q, Els),
{Val, Q1} = ?PQ:out(Q0),
{value, d} = Val,
{Val1, Q2} = ?PQ:out(2, Q1),
{value, e} = Val1,
{Val2, Q3} = ?PQ:out(1, Q2),
{value, b} = Val2,
{Val3, Q4} = ?PQ:out(Q3),
{value, f} = Val3,
{Val4, Q5} = ?PQ:out(Q4),
{value, c} = Val4,
{Val5, Q6} = ?PQ:out(Q5),
{value, a} = Val5,
{empty, _Q7} = ?PQ:out(Q6).
t_priority_queues(_) ->
Q0 = ?PQ:new(),
Q1 = ?PQ:new(),
PQueue = {pqueue, [{0, Q0}, {1, Q1}]},
?assert(?PQ:is_queue(PQueue)),
[] = ?PQ:to_list(PQueue),
PQueue1 = ?PQ:in(a, 0, ?PQ:new()),
PQueue2 = ?PQ:in(b, 0, PQueue1),
PQueue3 = ?PQ:in(c, 1, PQueue2),
PQueue4 = ?PQ:in(d, 1, PQueue3),
4 = ?PQ:len(PQueue4),
[{1, c}, {1, d}, {0, a}, {0, b}] = ?PQ:to_list(PQueue4),
PQueue4 = ?PQ:from_list([{1, c}, {1, d}, {0, a}, {0, b}]),
empty = ?PQ:highest(?PQ:new()),
0 = ?PQ:highest(PQueue1),
1 = ?PQ:highest(PQueue4),
PQueue5 = ?PQ:in(e, infinity, PQueue4),
PQueue6 = ?PQ:in(f, 1, PQueue5),
{{value, e}, PQueue7} = ?PQ:out(PQueue6),
{empty, _} = ?PQ:out(0, ?PQ:new()),
{empty, Q0} = ?PQ:out_p(Q0),
Q2 = ?PQ:in(a, Q0),
Q3 = ?PQ:in(b, Q2),
Q4 = ?PQ:in(c, Q3),
{{value, a, 0}, _Q5} = ?PQ:out_p(Q4),
{{value,c,1}, PQueue8} = ?PQ:out_p(PQueue7),
Q4 = ?PQ:join(Q4, ?PQ:new()),
Q4 = ?PQ:join(?PQ:new(), Q4),
{queue, [a], [a], 2} = ?PQ:join(Q2, Q2),
{pqueue,[{-1,{queue,[f],[d],2}},
{0,{queue,[a],[a,b],3}}]} = ?PQ:join(PQueue8, Q2),
{pqueue,[{-1,{queue,[f],[d],2}},
{0,{queue,[b],[a,a],3}}]} = ?PQ:join(Q2, PQueue8),
{pqueue,[{-1,{queue,[f],[d,f,d],4}},
{0,{queue,[b],[a,b,a],4}}]} = ?PQ:join(PQueue8, PQueue8). | test/emqx_pqueue_SUITE.erl | 0.604632 | 0.52476 | emqx_pqueue_SUITE.erl | starcoder |
%%%
% This module is used to do proper Geographic queries
% which take into consideration the non-2D nature of the Earth.
%
% We use the underlying R-Tree to represent the Earth's
% surface as 2D in Lat/Lng, which is effiecient as a primarily filter.
% This module provides the secondary filtering and data manipulation to
% translate to reality.
%
%%%
-module(teles_geo_query).
-export([search_around/3, search_nearest/3,
distance/2, latitudinal_width/1, longitudinal_width/1]).
-include_lib("rstar/include/rstar.hrl").
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
% Earth's radius in meters
-define(RADIUS_METERS, 6378137.0).
% Multiplier to convert degrees to radians
-define(DEGREES_TO_RAD, 0.017453292519943295).
% Constants
-define(PI, 3.141592653589793).
-define(E_SQ, 0.00669437999014).
% Search around a point. We replace this query with a search rectangle
% that adjusts for narrowing longitude. Distance is in meters.
search_around(Tree, SearchPoint, Distance) ->
% Perform the primary search
SearchBox = search_box(SearchPoint, Distance),
Primary = rstar:search_within(Tree, SearchBox),
% Perform secondary filter on the distance
[G || G <- Primary, distance(SearchPoint, G) =< Distance].
% Search around a point. We replace the K with 2*K, and sort on true
% distance and select the first K
search_nearest(Tree, SearchPoint, K) ->
% Do a primary search with 2*K
Primary = rstar:search_nearest(Tree, SearchPoint, 2 * K),
% Sort on the distance to the search point
Sorted = lists:sort([{distance(SearchPoint, G), G} || G <- Primary]),
% Select the first K
[G || {_Dist, G} <- lists:sublist(Sorted, K)].
% Generates a search box from a point and a distance
search_box(SearchPoint, Distance) ->
% Extract the Lat/Lng
#geometry{mbr=[{Lat, _}, {Lng, _}]} = SearchPoint,
% Pad the distance a bit so we over-query
DistancePad = 1.5 * Distance,
% Get the lat/lng binding box. We compute the width of a degree
% of latitude in meters, and then create the bounding box using
% the padded distance and the width
LatWidth = latitudinal_width(Lat),
LatSpread = DistancePad / LatWidth,
MinLat = Lat - LatSpread,
MaxLat = Lat + LatSpread,
% Determine the widest latitude. The value farthest from
% the equator will result in the smallest longitude width,
% which creates the largest spread in turn.
WidestLat = if
Lat >= 0 -> MaxLat;
Lat < 0 -> MinLat
end,
LngWidth = longitudinal_width(WidestLat),
LngSpread = DistancePad / LngWidth,
MinLng = Lng - LngSpread,
MaxLng = Lng + LngSpread,
% Create a search box
#geometry{dimensions=2, mbr=[{MinLat, MaxLat}, {MinLng, MaxLng}]}.
% Estimates the distance between two points using the
% Law of Haversines. Provides a better estimate of distance
% than the Euclidean distance of the R-Tree. Result is in
% meters.
% From http://en.wikipedia.org/wiki/Law_of_haversines
distance(A, B) ->
#geometry{mbr=[{LatA, _}, {LngA, _}]} = A,
#geometry{mbr=[{LatB, _}, {LngB, _}]} = B,
LatArc = (LatA - LatB) * ?DEGREES_TO_RAD,
LngArc = (LngA - LngB) * ?DEGREES_TO_RAD,
LatitudeH = math:pow(math:sin(LatArc * 0.5), 2),
LongitudeH = math:pow(math:sin(LngArc * 0.5), 2),
T1 = math:cos(LatA * ?DEGREES_TO_RAD) * math:cos(LatB * ?DEGREES_TO_RAD),
T2 = LatitudeH + T1*LongitudeH,
DistanceAngle = 2.0* math:asin(math:sqrt(T2)),
DistanceAngle * ?RADIUS_METERS.
% Returns the width of a latitudinal degree
% in meters for the given Latitude
latitudinal_width(Lat) ->
LatRad = Lat * ?DEGREES_TO_RAD,
111132.954 - 559.822 * math:cos(2.0 * LatRad) + 1.175 * math:cos(4.0 * LatRad).
% Returns the width of a longitudinal degree
% in meters for the given Latitude
longitudinal_width(Lat) ->
LatRad = Lat * ?DEGREES_TO_RAD,
Numerator = ?PI * ?RADIUS_METERS * math:cos(LatRad),
Denom = 180 * math:sqrt(1 - ?E_SQ * math:pow(math:sin(LatRad), 2)),
Numerator / Denom.
-ifdef(TEST).
distance_test() ->
A = rstar_geometry:point2d(47.123, 120.567, undefined),
B = rstar_geometry:point2d(45.876, 123.876, undefined),
?assertEqual(289038078, trunc(1000*distance(A, B))).
distance_near_test() ->
A = rstar_geometry:point2d(47.123, 120.567, undefined),
B = rstar_geometry:point2d(47.276, 120.576, undefined),
?assertEqual(17045.480008358903, distance(A, B)).
latitudinal_width_test() ->
?assertEqual(110574, round(latitudinal_width(0))),
?assertEqual(110649, round(latitudinal_width(15))),
?assertEqual(111132, round(latitudinal_width(45))),
?assertEqual(111412, round(latitudinal_width(60))),
?assertEqual(111694, round(latitudinal_width(90))).
longitudinal_width_test() ->
?assertEqual(111319, round(longitudinal_width(0))),
?assertEqual(107550, round(longitudinal_width(15))),
?assertEqual(78847, round(longitudinal_width(45))),
?assertEqual(55800, round(longitudinal_width(60))),
?assertEqual(0, round(longitudinal_width(90))).
search_box_equator_test() ->
Point = rstar_geometry:point2d(0, 0, undefined),
Box = search_box(Point, 10000), % 10km
#geometry{mbr=[{MinLat, MaxLat}, {MinLng, MaxLng}]} = Box,
?assertEqual(-0.13565538330708235, MinLat),
?assertEqual(0.13565538330708235, MaxLat),
?assertEqual(-0.1347476677660395, MinLng),
?assertEqual(0.1347476677660395, MaxLng).
search_box_offset_test() ->
Point = rstar_geometry:point2d(45, -120, undefined),
Box = search_box(Point, 10000), % 10km
#geometry{mbr=[{MinLat, MaxLat}, {MinLng, MaxLng}]} = Box,
assert_close(45.0 - 0.134974625, MinLat),
assert_close(45.0 + 0.134974625, MaxLat),
assert_close(-120.0 - 0.19069, MinLng),
assert_close(-120.0 + 0.19069, MaxLng).
assert_close(A, B) ->
?assertEqual(trunc(A*10000), trunc(B*10000)).
-endif. | src/teles_geo_query.erl | 0.768473 | 0.825519 | teles_geo_query.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% riak_kv_stat_bc: backwards compatible stats module. Maps new folsom stats
%% to legacy riak_kv stats.
%%
%% Copyright (c) 2007-2010 Basho Technologies, Inc. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%% @doc riak_kv_stat_bc is a module that maps the new riak_kv_stats metrics
%% to the old set of stats. It exists to maintain backwards compatibility for
%% those using the `/stats` endpoint and `riak-admin status`. This module
%% should be considered soon to be deprecated and temporary.
%%
%% Legacy stats:
%%<dl><dt> vnode_gets
%%</dt><dd> Total number of gets handled by all vnodes on this node
%% in the last minute.
%%</dd>
%%<dt> vnode_puts
%%</dt><dd> Total number of puts handled by all vnodes on this node
%% in the last minute.
%%</dd>
%%<dt> vnode_index_reads
%%</dt><dd> The number of index reads handled by all vnodes on this node.
%% Each query counts as an index read.
%%</dd><
%%<dt> vnode_index_writes
%%</dt><dd> The number of batched writes handled by all vnodes on this node.
%%</dd>
%%<dt> vnode_index_writes_postings
%%</dt><dd> The number of postings written to all vnodes on this node.
%%</dd>
%%<dt> vnode_index_deletes
%%</dt><dd> The number of batched writes handled by all vnodes on this node.
%%</dd><dd> update({vnode_index_delete, PostingsRemoved})
%%
%%</dd><dt> vnode_index_deletes_postings
%%</dt><dd> The number of postings written to all vnodes on this node.
%%</dd><dt> node_gets
%%</dt><dd> Number of gets coordinated by this node in the last
%% minute.
%%</dd><dt> node_get_fsm_siblings
%%</dt><dd> Stats about number of siblings per object in the last minute.
%%</dd><dt> node_get_fsm_objsize
%%</dt><dd> Stats about object size over the last minute. The object
%% size is an estimate calculated by summing the size of the
%% bucket name, key name, and serialized vector clock, plus
%% the value and serialized metadata of each sibling.
%%</dd><dt> node_get_fsm_time_mean
%%</dt><dd> Mean time, in microseconds, between when a riak_kv_get_fsm is
%% started and when it sends a reply to the client, for the
%% last minute.
%%</dd><dt> node_get_fsm_time_median
%%</dt><dd> Median time, in microseconds, between when a riak_kv_get_fsm
%% is started and when it sends a reply to the client, for
%% the last minute.
%%</dd><dt> node_get_fsm_time_95
%%</dt><dd> Response time, in microseconds, met or beaten by 95% of
%% riak_kv_get_fsm executions.
%%</dd><dt> node_get_fsm_time_99
%%</dt><dd> Response time, in microseconds, met or beaten by 99% of
%% riak_kv_get_fsm executions.
%%</dd><dt> node_get_fsm_time_100
%%</dt><dd> Response time, in microseconds, met or beaten by 100% of
%% riak_kv_get_fsm executions.
%%</dd><dt> node_puts
%%</dt><dd> Number of puts coordinated by this node in the last
%% minute.
%%</dd><dt> node_put_fsm_time_mean
%%</dt><dd> Mean time, in microseconds, between when a riak_kv_put_fsm is
%% started and when it sends a reply to the client, for the
%% last minute.
%%</dd><dt> node_put_fsm_time_median
%%</dt><dd> Median time, in microseconds, between when a riak_kv_put_fsm
%% is started and when it sends a reply to the client, for
%% the last minute.
%%</dd><dt> node_put_fsm_time_95
%%</dt><dd> Response time, in microseconds, met or beaten by 95% of
%% riak_kv_put_fsm executions.
%%</dd><dt> node_put_fsm_time_99
%%</dt><dd> Response time, in microseconds, met or beaten by 99% of
%% riak_kv_put_fsm executions.
%%</dd><dt> node_put_fsm_time_100
%%</dt><dd> Response time, in microseconds, met or beaten by 100% of
%% riak_kv_put_fsm executions.
%%</dd><dt> cpu_nprocs
%%</dt><dd> Value returned by {@link cpu_sup:nprocs/0}.
%%
%%</dd><dt> cpu_avg1
%%</dt><dd> Value returned by {@link cpu_sup:avg1/0}.
%%
%%</dd><dt> cpu_avg5
%%</dt><dd> Value returned by {@link cpu_sup:avg5/0}.
%%
%%</dd><dt> cpu_avg15
%%</dt><dd> Value returned by {@link cpu_sup:avg15/0}.
%%
%%</dd><dt> mem_total
%%</dt><dd> The first element of the tuple returned by
%% {@link memsup:get_memory_data/0}.
%%
%%</dd><dt> mem_allocated
%%</dt><dd> The second element of the tuple returned by
%% {@link memsup:get_memory_data/0}.
%%
%%</dd><dt> disk
%%</dt><dd> Value returned by {@link disksup:get_disk_data/0}.
%%
%%</dd><dt> pbc_connects_total
%%</dt><dd> Total number of pb socket connections since start
%%
%%</dd><dt> pbc_active
%%</dt><dd> Number of active pb socket connections
%%
%%</dd><dt> coord_redirs_total
%%</dt><dd> Number of puts forwarded to be coordinated on a node
%% in the preflist.
%%
%%</dd></dl>
%%
%%
-module(riak_kv_stat_bc).
-compile(export_all).
%% @spec produce_stats(state(), integer()) -> proplist()
%% @doc Produce a proplist-formatted view of the current aggregation
%% of stats.
produce_stats() ->
lists:append(
[lists:flatten(legacy_stats()),
read_repair_stats(),
level_stats(),
disk_stats(),
ring_stats(),
config_stats(),
app_stats()
]).
%% Stats in folsom are stored with tuples as keys, the
%% tuples mimic an hierarchical structure. To be free of legacy
%% naming constraints the new names are not simply the old names
%% with commas for underscores. Uses legacy_stat_map to generate
%% legacys stats from the new list of stats.
legacy_stats() ->
riak_kv_status:get_stats(console).
%% @doc legacy stats uses multifield stats for multiple stats
%% don't calculate the same stat many times
get_stat(Name, Type, Cache) ->
get_stat(Name, Type, Cache, fun(S) -> S end).
get_stat(Name, Type, Cache, ValFun) ->
case proplists:get_value(Name, Cache) of
undefined ->
Value = case riak_core_stat_q:calc_stat({Name, Type}) of
unavailable -> [];
Stat -> Stat
end,
{ValFun(Value), [{Name, Value} | Cache]};
Cached -> {ValFun(Cached), Cache}
end.
%% @spec disk_stats() -> proplist()
%% @doc Get stats on the disk, as given by the disksup module
%% of the os_mon application.
disk_stats() ->
[{disk, disksup:get_disk_data()}].
otp_release() ->
list_to_binary(erlang:system_info(otp_release)).
sys_driver_version() ->
list_to_binary(erlang:system_info(driver_version)).
system_version() ->
list_to_binary(string:strip(erlang:system_info(system_version), right, $\n)).
system_architecture() ->
list_to_binary(erlang:system_info(system_architecture)).
%% Count up all monitors, unfortunately has to obtain process_info
%% from all processes to work it out.
sys_monitor_count() ->
lists:foldl(fun(Pid, Count) ->
case erlang:process_info(Pid, monitors) of
{monitors, Mons} ->
Count + length(Mons);
_ ->
Count
end
end, 0, processes()).
app_stats() ->
[{list_to_atom(atom_to_list(A) ++ "_version"), list_to_binary(V)}
|| {A,_,V} <- application:which_applications()].
ring_stats() ->
{ok, R} = riak_core_ring_manager:get_my_ring(),
[{ring_members, riak_core_ring:all_members(R)},
{ring_num_partitions, riak_core_ring:num_partitions(R)},
{ring_ownership, list_to_binary(lists:flatten(io_lib:format("~p", [dict:to_list(
lists:foldl(fun({_P, N}, Acc) ->
case dict:find(N, Acc) of
{ok, V} ->
dict:store(N, V+1, Acc);
error ->
dict:store(N, 1, Acc)
end
end, dict:new(), riak_core_ring:all_owners(R)))])))}].
config_stats() ->
[{ring_creation_size, app_helper:get_env(riak_core, ring_creation_size)},
{storage_backend, app_helper:get_env(riak_kv, storage_backend)}].
%% Leveldb stats are a last minute new edition to the blob
level_stats() ->
Stats = riak_core_stat_q:get_stats([riak_kv, vnode, backend, leveldb, read_block_error]),
[{join(lists:nthtail(3, Name)), Val} || {Name, Val} <- Stats].
%% Read repair stats are a new edition to the legacy blob.
%% Added to the blob since the stat query interface was not ready for the 1.3
%% release.
%% The read repair stats are stored as dimensions with
%% the key {riak_kv, node, gets, read_repairs, Node, Type, Reason}.
%% The CSEs are only interested in aggregations of Type and Reason
%% which are elements 6 and 7 in the key.
read_repair_stats() ->
Pfx = riak_core_stat:prefix(),
aggregate(read_repairs, [Pfx, riak_kv, node, gets, read_repairs, '_', '_', '_'], [7,8]).
%% TODO generalise for riak_core_stat_q
%% aggregates spiral values for stats retrieved by `Query'
%% aggregates by the key field(s) indexed at `Fields'
%% produces a flat list of `BaseName_NameOfFieldAtIndex[_count]'
%% to fit in with the existing naming convention in the legacy stat blob
aggregate(BaseName, Query, Fields) ->
Stats = exometer:get_values(Query),
Aggregates = do_aggregate(Stats, Fields),
FlatStats = flatten_aggregate_stats(BaseName, Aggregates),
lists:flatten(FlatStats).
do_aggregate(Stats, Fields) ->
lists:foldl(fun({Name, [{count, C0}, {one, O0}]}, Acc) ->
Key = key_from_fields(list_to_tuple(Name), Fields),
[{count, C}, {one, O}] = case orddict:find(Key, Acc) of
error -> [{count, 0}, {one, 0}];
{ok, V} -> V
end,
orddict:store(Key, [{count, C+C0}, {one, O+O0}], Acc)
end,
orddict:new(),
Stats).
%% Generate a dictionary key for the running
%% aggregation using key `Name' elements at index(es)
%% in `Fields'
key_from_fields(Name, Fields) ->
Key = [element(N, Name) || N <- Fields],
join(Key).
%% Folds over the aggregate nested dictionaries to create
%% a flat list of stats whose names are made by
%% joining key names to `BaseName'
flatten_aggregate_stats(BaseName, Aggregates) ->
orddict:fold(fun(K, V, Acc) when not is_list(V) ->
[{join([BaseName, K]), V}|Acc];
(K, V, Acc) ->
[flatten_aggregate_stats(join([BaseName, K]), V)|Acc]
end,
[],
Aggregates).
%% Join a list of atoms into a single atom
%% with elements separated by '_'
join(L) ->
join(L, <<>>).
join([], Bin) ->
binary_to_atom(Bin, latin1);
join([Atom|Rest], <<>>) ->
Bin2 = atom_to_binary(Atom, latin1),
join(Rest, <<Bin2/binary>>);
join([Atom|Rest], Bin) ->
Bin2 = atom_to_binary(Atom, latin1),
join(Rest, <<Bin/binary, $_, Bin2/binary>>). | deps/riak_kv/src/riak_kv_stat_bc.erl | 0.629091 | 0.449272 | riak_kv_stat_bc.erl | starcoder |
-module(hoax_invocation).
-export([handle/3]).
-include("hoax_int.hrl").
handle(M, F, Args) ->
case hoax_tab:lookup_expectations({M, F, length(Args)}) of
[] ->
erlang:error({unexpected_invocation, hoax_fmt:fmt({M, F, Args})});
Records ->
case find_matching_args(Args, Records) of
false ->
unexpected_arguments(M, F, Args, Records);
#expectation{call_count=X,expected_count=X,expected_args=ExpectedArgs} ->
erlang:error({too_many_invocations, X+1, hoax_fmt:fmt({M, F, ExpectedArgs})});
#expectation{action = Action} = Record ->
hoax_tab:record_invocation(Record, Args),
perform(Action, Args)
end
end.
% As the most-common case, handle a single expectation separately to provide better error detail
unexpected_arguments(M, F, Args, [#expectation{expected_args = Expected}]) ->
Arity = length(Args),
FuncRep = flatfmt("~s:~s/~b", [M, F, Arity]),
ArgsNotMatched = lists:foldl(
fun ({_, ExpectedArg, ActualArg}, Acc) when ExpectedArg == ActualArg ->
Acc;
({Seq, ExpectedArg, ActualArg}, Acc) ->
Info = flatfmt("parameter ~b expected ~p but got ~p", [Seq, ExpectedArg, ActualArg]),
[Info | Acc]
end,
[],
lists:zip3(lists:seq(1, Arity), Expected, Args)
),
erlang:error({unexpected_arguments, [FuncRep | lists:reverse(ArgsNotMatched)]});
unexpected_arguments(M, F, Args, Records) when length(Records) > 1 ->
erlang:error({unexpected_arguments, hoax_fmt:fmt({M, F, Args})}).
find_matching_args(Args, Records) ->
keyfind(Args, Records).
keyfind(ActualArgs, [ Expectation = #expectation{expected_args = ExpectedArgs} | Rest ]) ->
case replace_wildcards(ActualArgs, ExpectedArgs) of
ActualArgs -> Expectation;
_ -> keyfind(ActualArgs, Rest)
end;
keyfind(_, []) ->
false.
perform(default, _) -> '$_hoax_default_return_$';
perform(Fun, Args) when is_function(Fun) ->
erlang:apply(Fun, Args);
perform({return, Value}, _) -> Value;
perform({Error, Reason}, _) -> erlang:Error(Reason).
replace_wildcards(ActualArgs, ExpectedArgs) ->
lists:zipwith(fun replace_wildcard/2, ActualArgs, ExpectedArgs).
replace_wildcard(Actual, '_') -> Actual;
replace_wildcard(_, Expected) -> Expected.
flatfmt(Fmt, Args) ->
lists:flatten(io_lib:format(Fmt, Args)). | src/hoax_invocation.erl | 0.637482 | 0.526099 | hoax_invocation.erl | starcoder |
%%%-------------------------------------------------------------------
%%% Author :
%%% Description :
%%%
%%% Created :
%%%-------------------------------------------------------------------
-module(ndarray).
-compile({no_auto_import, [size/1, apply/3]}).
-include_lib("axis.hrl").
-include_lib("kernel/include/logger.hrl").
-include_lib("eunit/include/eunit.hrl").
%% API
-export([new/2, get/2, apply/3, reshape/2, reduce/1]).
-export([ndim/1, shape/1, size/1, data/1]).
-type_export([ndarray/0, shape/0, buffer/0, index/0]).
-type index() :: [integer()] | integer() | ':'.
-type shape() :: [integer()].
-type buffer() :: [number()].
-record(ndarray, {
shape :: shape(),
buffer :: buffer()
}).
-type ndarray() :: #ndarray{}.
-define(REVERS(List), lists:reverse(List)).
%%%===================================================================
%%% API
%%%===================================================================
%%--------------------------------------------------------------------
%% @doc Creates a multidimensional array from a list of lists.
%% @end
%%--------------------------------------------------------------------
-spec new(Shape :: shape(), Buffer :: buffer()) ->
NdArray :: ndarray().
new(Shape, Buffer) ->
#ndarray{
shape = Shape,
buffer = Buffer}.
%%--------------------------------------------------------------------
%% @doc Returns the number of axes (dimensions) of the array.
%% @end
%%--------------------------------------------------------------------
-spec ndim(NdArray :: ndarray()) ->
NumberOfDimensions :: integer().
ndim(NdArray) ->
length(NdArray#ndarray.shape).
%%--------------------------------------------------------------------
%% @doc Returns the ndarray shape.
%% @end
%%--------------------------------------------------------------------
-spec shape(NdArray :: ndarray()) ->
Shape :: shape().
shape(NdArray) ->
NdArray#ndarray.shape.
%%--------------------------------------------------------------------
%% @doc Returns the ndarray size.
%% @end
%%--------------------------------------------------------------------
-spec size(NdArray :: ndarray()) ->
Size :: integer().
size(NdArray) ->
ltools:mult(NdArray#ndarray.shape).
size_test() ->
Array = [[[1,2],
[3,4]],
[[5,6],
[7,8]]],
Shape = [2,2,2],
Buffer = lists:flatten(Array),
NdArray = new(Shape, Buffer),
?assertEqual(8, size(NdArray)).
%%--------------------------------------------------------------------
%% @doc Returns the ndarray buffer.
%% @end
%%--------------------------------------------------------------------
-spec data(NdArray :: ndarray()) ->
Buffer :: buffer().
data(NdArray) ->
NdArray#ndarray.buffer.
%%--------------------------------------------------------------------
%% @doc Reshapes an array.
%% One shape dimension can be -1. In this case, the value is inferred
%% from the length of the array and remaining dimensions.
%% @end
%%--------------------------------------------------------------------
-spec reshape(NdArray :: ndarray(), Shape :: shape()) ->
NdArray :: ndarray().
reshape(NdArray, Shape) ->
NewShape = calc_shape(Shape, size(NdArray), []),
NdArray#ndarray{shape = NewShape}.
%%--------------------------------------------------------------------
%% @doc Gets a sub ndarray indicated by the Indexes. An index can be:
%% An 'integer()' to specify a unique position; A ['integer()'] to
%% specify more than one position in that dimension; The atom ':' to
%% indicate all the positions from that dimension.
%% @todo Use another atom (for ie';') to indicate all but reversed.
%% @end
%%--------------------------------------------------------------------
-spec get(Indexes :: [index()], NdArray :: ndarray()) ->
NdArray :: ndarray().
get(Indexes, NdArray) ->
Shape = shape(NdArray),
BufferIndexes = lists:flatten(buffer_index(Indexes, Shape)),
new(shape_indexing(Indexes, Shape),
ltools:get(BufferIndexes, data(NdArray))).
get_2D_test() ->
Array2D = new([3,2], lists:seq(1,6)),
% Check 1st dimension get
?assertEqual(new([3,1], lists:seq(1,3,1)), get([':',0], Array2D)),
?assertEqual(new([3,1], lists:seq(4,6,1)), get([':',1], Array2D)),
?assertEqual(new([3,1], []), get([':',2], Array2D)),
% Check 2nd dimension get
?assertEqual(new([1,2], lists:seq(1,6,3)), get([0,':'], Array2D)),
?assertEqual(new([1,2], lists:seq(2,6,3)), get([1,':'], Array2D)),
?assertEqual(new([1,2], lists:seq(3,6,3)), get([2,':'], Array2D)).
get_3D_test() ->
Array3D = new([4,3,2], lists:seq(1,24)),
% Array = [[[ 1, 2, 3, 4],[ 5, 6, 7, 8],[ 9,10,11,12]],
% [[13,14,15,16],[17,18,19,20],[21,22,23,24]]],
?assertEqual( Array3D, get([':',':',':'], Array3D)),
?assertEqual(new([1,1,1], [24]), get([3,2,1], Array3D)),
?assertEqual(new([1,1,1], [18]), get([1,1,1], Array3D)),
?assertEqual(new([1,1,1], [ 1]), get([0,0,0], Array3D)),
?assertEqual(new([ 4,1,1], [17,18,19,20]),
get([':',1,1], Array3D)),
?assertEqual(new([ 4, 1, 2], [ 9,10,11,12,21,22,23,24]),
get([':', 2,':'], Array3D)),
?assertEqual(new([ 4, 2, 1], [ 1, 2, 3, 4, 5, 6, 7, 8]),
get([':',[0,1], 0], Array3D)),
?assertEqual(new([ 4, 2, 1], [ 1, 2, 3, 4, 9,10,11,12]),
get([':',[0,2], 0], Array3D)),
?assertEqual(new([ 4, 1, 2], [ 5, 6, 7, 8,17,18,19,20]),
get([':', [1],':'], Array3D)).
%%--------------------------------------------------------------------
%% @doc Applies a function over the specified axis. The function must
%% take as input a list of elements and return a number.
%% - fun(List :: [number()]) -> Result :: number().
%% @end
%%--------------------------------------------------------------------
-spec apply(Fun :: function(), NdArray :: ndarray(),
Axis :: axis()) ->
NdArray :: ndarray().
apply(Fun, NdArray, Axis) ->
Shape = shape(NdArray),
Buffer = data(NdArray),
new(ltools:setnth(Axis+1, Shape, 1),
lists:map(Fun, vect_data(Axis, Shape, Buffer))).
apply_3D_test() ->
Array3D = new([4,3,2], lists:seq(1,24)),
SumAll = fun lists:sum/1,
% Check the values on axis=0 are sum
?assertEqual(new([1,3,2], lists:seq(10,90,16)),
apply(SumAll, Array3D, ?AXIS_0)),
% Check the values on axis=1 are sum
?assertEqual(new([4,1,2], [15,18,21,24,51,54,57,60]),
apply(SumAll, Array3D, ?AXIS_1)),
% Check the values on axis=2 are sum
?assertEqual(new([4,3,1], lists:seq(14,36, 2)),
apply(SumAll, Array3D, ?AXIS_2)).
apply_4D_test() ->
Array4D = new([5,4,3,2], lists:seq(1,5*4*3*2)),
SumAll = fun lists:sum/1,
% Check the values on axis=0 are sum
?assertEqual(new([1,4,3,2], lists:seq(15,590,25)),
apply(SumAll, Array4D, ?AXIS_0)),
% Check the values on axis=1 are sum
?assertEqual(new([5,1,3,2], [34,38,42,46,50,114,118,122,126,130,
194,198,202,206,210,274,278,282,286,
290,354,358,362,366,370,434,438,442,
446,450]),
apply(SumAll, Array4D, ?AXIS_1)),
% Check the values on axis=2 are sum
?assertEqual(new([5,4,1,2], [63,66,69,72,75,78,81,84,87,90,93,96,
99,102,105,108,111,114,117,120,243,246,
249,252,255,258,261,264,267,270,273,
276,279,282,285,288,291,294,297,300]),
apply(SumAll, Array4D, ?AXIS_2)),
% Check the values on axis=3 are sum
?assertEqual(new([5,4,3,1], lists:seq(62,180,2)),
apply(SumAll, Array4D, ?AXIS_3)).
%%--------------------------------------------------------------------
%% @doc Deletes all dimensions with shape length == 1.
%% @end
%%--------------------------------------------------------------------
-spec reduce(NdArray :: ndarray()) ->
NdArray :: ndarray().
reduce(NdArray) ->
NewShape = [X || X <- shape(NdArray), X /= 1],
new(NewShape, data(NdArray)).
reduce_test() ->
Array4D = new([1,2,1,2], lists:seq(1,4)),
?assertEqual(new([2,2], data(Array4D)), reduce(Array4D)).
%%%===================================================================
%%% Internal functions
%%%===================================================================
%%--------------------------------------------------------------------
calc_shape([N | Rest], Size, Acc) when N > 0->
calc_shape(Rest, Size div N, [N | Acc]);
calc_shape([-1 | Rest], Size, Acc) ->
case Size div ltools:mult(Rest) of
N when N > 1 -> calc_shape(Rest, Size div N, [N | Acc]);
_ -> error(badarg)
end;
calc_shape([], 1, Acc) ->
lists:reverse(Acc);
calc_shape(_, _, _) ->
error(badarg).
calc_shape_test() ->
% If shape has all elements (no -1) must return the same shape
Shape1 = [1, 2, 3],
Size = ltools:mult(Shape1),
?assertEqual(Shape1, calc_shape(Shape1, Size, [])),
% If there is a -1, the remaining size must be placed in that dim
Shape2 = [1, -1, 3],
?assertEqual(Shape1, calc_shape(Shape2, Size, [])),
% A shape with more than one (-1) must rise a badarg error
?assertError(badarg, calc_shape([1, -1, -1], Size, [])),
% A shape that does not fit with the size must return an error
?assertError(badarg, calc_shape([2, 1, 1], Size, [])),
?assertError(badarg, calc_shape([2, 3, 3], Size, [])).
%%--------------------------------------------------------------------
shape_indexing([ L|Ix], [_|Shape]) when is_list(L) ->
[length(L)|shape_indexing(Ix, Shape)];
shape_indexing([ I|Ix], [_|Shape]) when is_integer(I) ->
[1|shape_indexing(Ix, Shape)];
shape_indexing([':'|Ix], [S|Shape]) -> [S|shape_indexing(Ix, Shape)];
shape_indexing( [], []) -> [].
shape_indexing_test() ->
Shape = [1,2,3,4,5],
Indx1 = [':',1,':',2,4],
?assertEqual([1,1,3,1,1], shape_indexing(Indx1, Shape)),
Indx2 = [':',':',':',':',4],
?assertEqual([1,2,3,4,1], shape_indexing(Indx2, Shape)),
Indx3 = [1,1,1,':',':'],
?assertEqual([1,1,1,4,5], shape_indexing(Indx3, Shape)).
%%--------------------------------------------------------------------
buffer_index(Ix, Dx) ->
buffer_index(lists:reverse(Ix), lists:reverse(Dx), 0).
buffer_index([':'|Ix], [D|Dx], Acc) ->
I = lists:seq(0,D-1,1),
buffer_index([I|Ix], [D|Dx], Acc);
buffer_index([ I|Ix], [D|Dx], Acc) when is_list(I) ->
[buffer_index(Ix, Dx, Acc*D+X) || X <-I];
buffer_index([ I|Ix], [D|Dx], Acc) ->
[buffer_index(Ix, Dx, Acc*D+I)];
buffer_index( [], [], Acc) ->
Acc + 1.
buffer_index_test() ->
Dim = [4,3,2],
Array = [[[ 1, 2, 3, 4],[ 5, 6, 7, 8],[ 9,10,11,12]],
[[13,14,15,16],[17,18,19,20],[21,22,23,24]]],
?assertEqual(Array, buffer_index([':',':',':'], Dim)),
?assertEqual([[[24]]], buffer_index([ 3, 2, 1], Dim)),
?assertEqual([[[18]]], buffer_index([ 1, 1, 1], Dim)),
?assertEqual([[[ 1]]], buffer_index([ 0, 0, 0], Dim)),
?assertEqual([[[17,18,19,20]]],
buffer_index([':', 1, 1], Dim)),
?assertEqual([[[ 9,10,11,12]],[[21,22,23,24]]],
buffer_index([':', 2, ':'], Dim)),
?assertEqual([[[ 1, 2, 3, 4],[ 5, 6, 7, 8]]],
buffer_index([':',[0,1], 0], Dim)),
?assertEqual([[[ 1, 2, 3, 4],[ 9,10,11,12]]],
buffer_index([':',[0,2], 0], Dim)),
?assertEqual([[[ 5, 6, 7, 8]],[[17,18,19,20]]],
buffer_index([':', [1],':'], Dim)).
%%--------------------------------------------------------------------
vect_data(Axis, Shape, Buffer) ->
{DH, _} = lists:split(Axis+1, [1 | Shape]),
DBunch = ltools:mult(DH),
DAxis = lists:nth(Axis+1, Shape),
fill_bunch(DBunch, DBunch*DAxis, Buffer).
fill_bunch(_DBunch,_Size, []) -> [];
fill_bunch( DBunch, Size, Buffer) ->
Lxx = [[] || _ <- lists:seq(1, DBunch)],
{Vx , Rest} = lists:split(Size, Buffer),
fill_bunch(Vx, {Lxx, []}) ++ fill_bunch(DBunch, Size, Rest).
fill_bunch([V | Vx], {[ Lx |Lxx], Axx }) ->
fill_bunch( Vx , { Lxx , [[V|Lx]|Axx]});
fill_bunch( Vx , { [] , Axx }) ->
fill_bunch( Vx , {?REVERS(Axx), [] });
fill_bunch( [] , { Axx , [] }) ->
Axx.
vect_buffer3D_test() ->
Shape = [4,3,2],
Buffer = lists:seq(1,24),
?assertEqual([[ 4, 3, 2, 1],[ 8, 7, 6, 5],[12,11,10, 9],
[16,15,14,13],[20,19,18,17],[24,23,22,21]],
vect_data(0, Shape, Buffer)),
?assertEqual([[ 9, 5, 1],[10, 6, 2],[11, 7, 3],[12, 8, 4],
[21,17,13],[22,18,14],[23,19,15],[24,20,16]],
vect_data(1, Shape, Buffer)),
?assertEqual([[13, 1],[14, 2],[15, 3],[16, 4],[17, 5],[18, 6],
[19, 7],[20, 8],[21, 9],[22,10],[23,11],[24,12]],
vect_data(2, Shape, Buffer)).
vect_buffer4D_test() ->
Shape = [5,4,3,2],
Buffer = lists:seq(1,ltools:mult(Shape)),
?assertEqual([[X+Y
||X<-lists:seq(60, 0,-60)]
||Y<-lists:seq( 1,60, 1)],
vect_data(3, Shape, Buffer)). | src/ndarray.erl | 0.529507 | 0.518973 | ndarray.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% riak_kv_bucket: bucket validation functions
%%
%% Copyright (c) 2007-2011 Basho Technologies, Inc. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%% @doc KV Bucket validation functions
-module(riak_kv_bucket).
-export([validate/4]).
-include("riak_kv_types.hrl").
-ifdef(TEST).
-ifdef(EQC).
-compile([export_all]).
-include_lib("eqc/include/eqc.hrl").
-endif.
-include_lib("eunit/include/eunit.hrl").
-endif.
-type prop() :: {PropName::atom(), PropValue::any()}.
-type error_reason() :: atom() | string().
-type error() :: {PropName::atom(), ErrorReason::error_reason()}.
-type props() :: [prop()].
-type errors() :: [error()].
-export_type([props/0]).
%% @doc called by riak_core in a few places to ensure bucket
%% properties are sane. The arguments combinations have the following
%% meanings:-
%%
%% The first argument is the `Phase' of the bucket/bucket type
%% mutation and can be either `create' or `update'.
%%
%% `create' always means that we are creating a new bucket type or
%% updating an inactive bucket type. In the first case `Existing' is
%% the atom `undefined', in the second it is a list of the valid
%% properties returned from the first invocation of `validate/4'. The
%% value of `Bucket' will only ever be a two-tuple of `{binary(),
%% undefined}' for create, as it is only used on bucket types. The
%% final argument `BucketProps' is a list of the properties the user
%% provided for type creation merged with the default properties
%% defined in `riak_core_bucket_type:defaults/0' The job of the
%% function is to validate the given `BucketProps' and return a two
%% tuple `{Good, Bad}' where the first element is the list of valid
%% properties and the second a list of `error()' tuples. Riak_Core
%% will store the `Good' list in metadata iif the `Bad' list is the
%% empty list. It is worth noting that on `create' we must ignore the
%% `Existing' argument altogether.
%%
%% `update' means that we are either updating a bucket type or a
%% bucket. If `Bucket' is a `binary()' or a tuple `{binary(),
%% binary()}' then, a bucket is being updated. If `bucket' is a two
%% tuple of `{binary(), undefined}' then a bucket type is being
%% updated. When `validate/4' is called with `update' as the phase
%% then `Existing' will be the set of properties stored in metadata
%% for this bucket (the set returned as `Good' from the `create'
%% phase) and `BucketProps' will be ONLY the properties that user has
%% supplied as those to update (note: update may mean adding new
%% properties.) The job of `validate/4' in this case is to validate
%% the new properties and return a complete set of bucket properties
%% (ie the new properties merged with the existing propeties) in
%% `Good', riak will then persist these `Good' properties, providing
%% `Bad' is empty.
%%
%% `validate/4' can be used to enforce immutable or co-invariant bucket
%% properties, like "only non-default bucket types can have a
%% `datatype' property", and that "`datatype' buckets must be
%% allow_mult" and "once set, `datatype' cannot be changed".
%%
%% There is no way to _remove_ a property
%%
%% @see validate_dt_props/3
%% @see assert_no_datatype/1
-spec validate(create | update,
{riak_core_bucket_type:bucket_type(), undefined | binary()} | binary(),
undefined | props(),
props()) -> {props(), errors()}.
validate(create, _Bucket, _Existing, BucketProps) when is_list(BucketProps) ->
validate_create_bucket_type(BucketProps);
validate(update, {_TypeName, undefined}, Existing, New) when is_list(Existing),
is_list(New) ->
validate_update_bucket_type(Existing, New);
validate(update, {Type, Name}, Existing, New) when is_list(Existing),
is_list(New),
is_binary(Name),
Type /= <<"default">> ->
validate_update_typed_bucket(Existing, New);
validate(update, _Bucket, Existing, New) when is_list(Existing),
is_list(New) ->
validate_default_bucket(Existing, New).
%% @private bucket creation time validation
-spec validate_create_bucket_type(props()) -> {props(), errors()}.
validate_create_bucket_type(BucketProps) ->
case proplists:get_value(consistent, BucketProps) of
%% type is explicitly or implicitly not intended to be consistent
Consistent when Consistent =:= false orelse
Consistent =:= undefined ->
{Unvalidated, Valid, Errors} = case get_boolean(write_once, BucketProps) of
true ->
validate_create_w1c_props(BucketProps);
_ ->
validate_create_dt_props(BucketProps)
end;
%% type may be consistent (the value may not be valid)
Consistent ->
{Unvalidated, Valid, Errors} = validate_create_consistent_props(Consistent, BucketProps)
end,
{Good, Bad} = validate(Unvalidated, Valid, Errors),
validate_post_merge(Good, Bad).
%% @private update phase of bucket type. Merges properties from
%% existing with valid new properties. Existing can be assumed valid,
%% since they were validated by the `create' phase.
-spec validate_update_bucket_type(props(), props()) -> {props(), errors()}.
validate_update_bucket_type(Existing, New) ->
Type = type(Existing),
{Unvalidated, Valid, Errors} = validate_update_type(Type, Existing, New),
{Good, Bad} = validate(Unvalidated, Valid, Errors),
validate_post_merge(merge(Good, Existing), Bad).
%% @private pick the validation function depending on existing type.
-spec validate_update_type(Type :: consistent | datatype | write_once | default,
Existing :: props(),
New :: props()) ->
{Unvalidated :: props(),
Valid :: props(),
Errors :: props()}.
validate_update_type(consistent, Existing, New) ->
validate_update_consistent_props(Existing, New);
validate_update_type(write_once, _Existing, New) ->
NewWriteOnce = proplists:get_value(write_once, New),
validate_update_w1c_props(NewWriteOnce, New);
validate_update_type(datatype, Existing, New) ->
validate_update_dt_props(Existing, New);
validate_update_type(default, _Existing, New) ->
validate_update_default_props(New).
%% @private figure out what `type' the existing bucket is. NOTE: only
%% call with validated props from existing buckets!!
-spec type(props()) -> consistent | default | datatype | write_once.
type(Props) ->
type(proplists:get_value(consistent, Props, false),
proplists:get_value(write_once, Props, false),
proplists:get_value(datatype, Props, false)).
-spec type(boolean(), boolean(), atom()) ->
consistent | default | datatype | write_once.
type(_Consistent=true, _WriteOnce, _DataType) ->
consistent;
type(_Consistent, _WriteOnce=true, _DataType) ->
write_once;
type(_Consistent=false, _WriteOnce=false, _DataType=false) ->
default;
type(_, _, _) ->
datatype.
%% @private just delegates, but I added it to illustrate the many
%% possible type of validation.
-spec validate_update_typed_bucket(props(), props()) -> {props(), errors()}.
validate_update_typed_bucket(Existing, New) ->
{Good, Bad} = validate_update_bucket_type(Existing, New),
validate_post_merge(Good, Bad).
%% @private as far as datatypes go, default buckets are free to do as
%% they please, the datatypes API only works on typed buckets. Go
%% wild!
-spec validate_default_bucket(props(), props()) -> {props(), errors()}.
validate_default_bucket(Existing, New) ->
{Good, Bad} = validate(New, [], []),
validate_post_merge(merge(Good, Existing), Bad).
%% @private properties in new overwrite those in old
-spec merge(props(), props()) -> props().
merge(New, Old) ->
riak_core_bucket_props:merge(New, Old).
%% @private general property validation
-spec validate(InProps::props(), ValidProps::props(), Errors::errors()) ->
{props(), errors()}.
validate([], ValidProps, Errors) ->
{ValidProps, Errors};
validate([{BoolProp, MaybeBool}|T], ValidProps, Errors) when is_atom(BoolProp), BoolProp =:= allow_mult
orelse BoolProp =:= basic_quorum
orelse BoolProp =:= last_write_wins
orelse BoolProp =:= notfound_ok
orelse BoolProp =:= stat_tracked ->
case coerce_bool(MaybeBool) of
error ->
validate(T, ValidProps, [{BoolProp, not_boolean}|Errors]);
Bool ->
validate(T, [{BoolProp, Bool}|ValidProps], Errors)
end;
validate([{write_once, Value}|T], ValidProps, Errors) ->
case Value of
false -> validate(T, [{write_once, false} | ValidProps], Errors);
_ -> validate(T, ValidProps, [{write_once, "cannot update write_once property"}|Errors])
end;
validate([{consistent, Value}|T], ValidProps, Errors) ->
case Value of
false -> validate(T, [{consistent, false} | ValidProps], Errors);
_ -> validate(T, ValidProps, [{consistent, "cannot update consistent property"}|Errors])
end;
validate([{IntProp, MaybeInt}=Prop | T], ValidProps, Errors) when IntProp =:= big_vclock
orelse IntProp =:= n_val
orelse IntProp =:= old_vclock
orelse IntProp =:= small_vclock ->
case is_integer(MaybeInt) of
true when MaybeInt > 0 ->
validate(T, [Prop | ValidProps], Errors);
_ ->
validate(T, ValidProps, [{IntProp, not_integer} | Errors])
end;
validate([{QProp, MaybeQ}=Prop | T], ValidProps, Errors) when QProp =:= r
orelse QProp =:= rw
orelse QProp =:= w ->
case is_quorum(MaybeQ) of
true ->
validate(T, [Prop | ValidProps], Errors);
false ->
validate(T, ValidProps, [{QProp, not_valid_quorum} | Errors])
end;
validate([{QProp, MaybeQ}=Prop | T], ValidProps, Errors) when QProp =:= dw
orelse QProp =:= pw
orelse QProp =:= pr ->
case is_opt_quorum(MaybeQ) of
true ->
validate(T, [Prop | ValidProps], Errors);
false ->
validate(T, ValidProps, [{QProp, not_valid_quorum} | Errors])
end;
validate([Prop|T], ValidProps, Errors) ->
validate(T, [Prop|ValidProps], Errors).
-spec is_quorum(term()) -> boolean().
is_quorum(Q) when is_integer(Q), Q > 0 ->
true;
is_quorum(Q) when Q =:= quorum
orelse Q =:= one
orelse Q =:= all
orelse Q =:= <<"quorum">>
orelse Q =:= <<"one">>
orelse Q =:= <<"all">> ->
true;
is_quorum(_) ->
false.
%% @private some quorum options can be zero
-spec is_opt_quorum(term()) -> boolean().
is_opt_quorum(Q) when is_integer(Q), Q >= 0 ->
true;
is_opt_quorum(Q) ->
is_quorum(Q).
-spec coerce_bool(any()) -> boolean() | error.
coerce_bool(true) ->
true;
coerce_bool(false) ->
false;
coerce_bool(MaybeBool) when is_atom(MaybeBool) ->
coerce_bool(atom_to_list(MaybeBool));
coerce_bool(MaybeBool) when is_binary(MaybeBool) ->
coerce_bool(binary_to_list(MaybeBool));
coerce_bool(Int) when is_integer(Int), Int =< 0 ->
false;
coerce_bool(Int) when is_integer(Int) , Int > 0 ->
true;
coerce_bool(MaybeBool) when is_list(MaybeBool) ->
Lower = string:to_lower(MaybeBool),
Atom = (catch list_to_existing_atom(Lower)),
case Atom of
true -> true;
false -> false;
_ -> error
end;
coerce_bool(_) ->
error.
%% @private riak consistent object support requires a bucket type
%% where `consistent' is defined and not `false' to have `consistent'
%% set to true. this function validates that property.
%%
%% We take the indication of a value other than `false' to mean the user
%% intended to create a consistent type. We validate that the value is actually
%% something Riak can understand -- `true'. Why don't we just convert any other
%% value to true? Well, the user maybe type "fals" so lets be careful.
-spec validate_create_consistent_props(any(), props()) -> {props(), props(), errors()}.
validate_create_consistent_props(true, New) ->
% write_once and consistent can't both be true
case get_boolean(write_once, New) of
true ->
{lists:keydelete(consistent, 1, New), [], [{consistent, "Write once buckets must be not be consistent=true"}]};
_ ->
{lists:keydelete(consistent, 1, New), [{consistent, true}], []}
end;
validate_create_consistent_props(false, New) ->
{lists:keydelete(consistent, 1, New), [{consistent, false}], []};
validate_create_consistent_props(undefined, New) ->
{New, [], []};
validate_create_consistent_props(Invalid, New) ->
Err = lists:flatten(io_lib:format("~p is not a valid value for consistent. Use \"true\" or \"false\"", [Invalid])),
{lists:keydelete(consistent, 1, New), [], [{consistent, Err}]}.
%% @private riak datatype support requires a bucket type of `datatype'
%% and `allow_mult' set to `true'. These function enforces those
%% properties.
%%
%% We take the presence of a `datatype' property as indication that
%% this bucket type is a special type, somewhere to store CRDTs. I
%% realise this slightly undermines the reason for bucket types (no
%% magic names) but there has to be some way to indicate intent, and
%% that way is the "special" property name `datatype'.
%%
%% Since we don't ever want sibling CRDT types (though we can handle
%% them: see riak_kv_crdt), `datatype' is an immutable property. Once
%% you create a bucket with a certain datatype you can't change
%% it. The `update' bucket type path enforces this. It doesn't
%% validate the correctness of the type, since it assumes that was
%% done at creation, only that it is either the same as existing or
%% not present.
-spec validate_create_dt_props(props()) -> {props(), props(), errors()}.
validate_create_dt_props(New) ->
validate_create_dt_props(proplists:get_value(datatype, New), New).
%% @private validate the datatype, if present
-spec validate_create_dt_props(undefined | atom(), props()) -> {props(), props(), errors()}.
validate_create_dt_props(undefined, New) ->
{New, [], []};
validate_create_dt_props(DataType, New) ->
Unvalidated = lists:keydelete(datatype, 1, New),
Mod = riak_kv_crdt:to_mod(DataType),
case lists:member(Mod, ?V2_TOP_LEVEL_TYPES) of
true ->
validate_create_dt_props(Unvalidated, [{datatype, DataType}], []);
false ->
Err = lists:flatten(io_lib:format("~p not supported for bucket datatype property", [DataType])),
validate_create_dt_props(Unvalidated, [], [{datatype, Err}])
end.
%% @private validate the boolean property, if `datatype' was present,
%% require `allow_mult=true' even if `datatype' was invalid, as we
%% assume the user meant to create `datatype' bucket
-spec validate_create_dt_props(props(), props(), errors()) -> {props(), props(), errors()}.
validate_create_dt_props(Unvalidated0, Valid, Invalid) ->
Unvalidated = lists:keydelete(allow_mult, 1, Unvalidated0),
case allow_mult(Unvalidated0) of
true ->
{Unvalidated, [{allow_mult, true} | Valid], Invalid};
_ ->
Err = io_lib:format("Data Type buckets must be allow_mult=true", []),
{Unvalidated, Valid, [{allow_mult, Err} | Invalid]}
end.
%% @private Riak write_once support requires a bucket type where
%% write_once is set to true. This function validates that when
%% write_once is set to true, other properties are consistent.
%% See validate_create_w1c_props/3 for an enumeration of these rules.
-spec validate_create_w1c_props(props()) -> {props(), props(), errors()}.
validate_create_w1c_props(New) ->
validate_create_w1c_props(proplists:get_value(write_once, New), New).
%% @private validate the write_once, if present
-spec validate_create_w1c_props(true, props()) -> {props(), props(), errors()}.
validate_create_w1c_props(true, New) ->
Unvalidated = lists:keydelete(write_once, 1, New),
validate_w1c_props(Unvalidated, [{write_once, true}], []).
%% @private checks that a bucket that is not a special immutable type
%% is not attempting to become one.
-spec validate_update_default_props(New :: props()) ->
{Unvalidated :: props(),
Valid :: props(),
Error :: props()}.
validate_update_default_props(New) ->
%% Only called if not already a consistent, datatype, write_once
%% bucket. Check that none of those are being set to `true'/valid
%% datatypes.
ensure_not_present(New, [], [], [{datatype, "`datatype` must not be defined."},
{consistent, true, "Write once buckets must not be consistent=true"},
{write_once, true, "Cannot set existing bucket type to `write_once`"}]).
%% @private validate that strongly-consistent types and buckets do not
%% have their n_val changed, nor become eventually consistent
-spec validate_update_consistent_props(props(), props()) -> {props(), props(), errors()}.
validate_update_consistent_props(Existing, New) ->
Unvalidated = lists:keydelete(n_val, 1, lists:keydelete(consistent, 1, New)),
OldNVal = proplists:get_value(n_val, Existing),
NewNVal = proplists:get_value(n_val, New, OldNVal),
NewConsistent = proplists:get_value(consistent, New),
CErr = "cannot update consistent property",
NErr = "n_val cannot be modified for existing consistent type",
case {NewConsistent, OldNVal, NewNVal} of
{undefined, _, undefined} ->
{Unvalidated, [], []};
{undefined, _N, _N} ->
{Unvalidated, [{n_val, NewNVal}], []};
{true, _N, _N} ->
{Unvalidated, [{n_val, NewNVal}, {consistent, true}], []};
{C, _N, _N} when C =/= undefined orelse
C =/= true ->
{Unvalidated, [{n_val, NewNVal}], [{consistent, CErr}]};
{undefined, _OldN, _NewN} ->
{Unvalidated, [], [{n_val, NErr}]};
{true, _OldN, _NewN} ->
{Unvalidated, [{consistent, true}], [{n_val, NErr}]};
{_, _, _} ->
{Unvalidated, [], [{n_val, NErr}, {consistent, CErr}]}
end.
%% @private somewhat duplicates the create path, but easier to read
%% this way, and chars are free
-spec validate_update_dt_props(props(), props()) -> {props(), props(), errors()}.
validate_update_dt_props(Existing, New0) ->
New = lists:keydelete(datatype, 1, New0),
case {proplists:get_value(datatype, Existing), proplists:get_value(datatype, New0)} of
{_Datatype, undefined} ->
validate_update_dt_props(New, [] , []);
{Datatype, Datatype} ->
validate_update_dt_props(New, [{datatype, Datatype}] , []);
{_Datatype, _Datatype2} ->
validate_update_dt_props(New, [] , [{datatype, "Cannot update datatype on existing bucket"}])
end.
%% @private check that allow_mult is correct
-spec validate_update_dt_props(props(), props(), errors()) -> {props(), props(), errors()}.
validate_update_dt_props(New, Valid, Invalid) ->
Unvalidated = lists:keydelete(allow_mult, 1, New),
case allow_mult(New) of
undefined ->
{Unvalidated, Valid, Invalid};
true ->
{Unvalidated, [{allow_mult, true} | Valid], Invalid};
_ ->
{Unvalidated, Valid, [{allow_mult, "Cannot change datatype bucket from allow_mult=true"} | Invalid]}
end.
%% @private
%% precondition: Existing contains {write_once, true}
-spec validate_update_w1c_props(boolean() | undefined, props()) ->
{props(), props(), errors()}.
validate_update_w1c_props(NewFP, New) ->
Unvalidated = lists:keydelete(write_once, 1, New),
case NewFP of
Unchanged when Unchanged == true orelse Unchanged == undefined ->
validate_w1c_props(Unvalidated, [{write_once, true}], []);
_ ->
validate_w1c_props(Unvalidated, [],
[{write_once, "Cannot modify write_once property once set to true"}])
end.
%% @private validate the boolean property, if `write_once' was present.
%% precondition: write_once is not an entry in Unvalidated
%% write_once is an entry in Valid
%% The following rules apply when write_once is true:
%% - datatype may not be defined
%% - consistent may not be true
-spec validate_w1c_props(props(), props(), errors()) -> {props(), props(), errors()}.
validate_w1c_props(Unvalidated, Valid, Errors) ->
ensure_not_present(Unvalidated, Valid, Errors,
[{consistent, true, "Write once buckets must not be consistent=true"},
{datatype, "Write once buckets must not have datatype defined"}
]).
%% @private any property in `InvalidPropsSpec' present in
%% `Unvalidated' will be added to `Errors'. Returned is the as yet
%% unvalidated remainder properties from `Unvalidated', the properties
%% from `InvalidPropsSpec' that were present and not invalid added to
%% `Valid' and the accumulated errors added to `Errors'.
-spec ensure_not_present(props(), props(), props(), [{atom(), term(), string()} |
{atom(), term()}]) ->
{props(), props(), props()}.
ensure_not_present(Unvalidated, Valid, Errors, InvalidPropsSpec) ->
lists:foldl(fun({Key, NotAllowed, ErrorMessage}, {U, V, E}) ->
case lists:keytake(Key, 1, U) of
false ->
{U, V, E};
{value, {Key, Val}, U2} ->
Val2 = coerce_bool(Val),
if Val2 == NotAllowed ->
{U2, V, [{Key, ErrorMessage} | E]};
true ->
{U, V, E}
end
end;
({Key, ErrorMessage}, {U, V, E}) ->
case lists:keytake(Key, 1, U) of
false -> {U, V, E};
{value, {Key, _Val}, U2} ->
{U2, V, [{Key, ErrorMessage} | E]}
end
end,
{Unvalidated, Valid, Errors},
InvalidPropsSpec).
%% Validate properties after they have all been individually validated, merged,
%% and resolved to their final values. This allows for identifying invalid
%% combinations of properties, such as `last_write_wins=true' and
%% `dvv_enabled=true'.
-spec validate_post_merge(props(), errors()) -> {props(), errors()}.
validate_post_merge(Props, Errors) ->
%% Currently, we only have one validation rule to apply at this stage, so
%% just call the validation function directly. If more are added in the
%% future, it would be good to use function composition to compose the
%% individual validation functions into a single function.
validate_last_write_wins_implies_not_dvv_enabled({Props, Errors}).
%% If `last_write_wins' is true, `dvv_enabled' must not also be true.
-spec validate_last_write_wins_implies_not_dvv_enabled({props(), errors()}) -> {props(), errors()}.
validate_last_write_wins_implies_not_dvv_enabled({Props, Errors}) ->
case {last_write_wins(Props), dvv_enabled(Props)} of
{true, true} ->
{lists:keydelete(dvv_enabled, 1, Props),
[{dvv_enabled,
"If last_write_wins is true, dvv_enabled must be false"}
|Errors]};
{_, _} ->
{Props, Errors}
end.
%% @private just grab the allow_mult value if it exists
-spec allow_mult(props()) -> boolean() | 'undefined' | 'error'.
allow_mult(Props) ->
case proplists:get_value(allow_mult, Props) of
undefined ->
undefined;
MaybeBool ->
coerce_bool(MaybeBool)
end.
%% Boolean value of the `last_write_wins' property, or `undefined' if not present.
-spec last_write_wins(props()) -> boolean() | 'undefined' | 'error'.
last_write_wins(Props) ->
get_boolean(last_write_wins, Props).
%% Boolean value of the `dvv_enabled' property, or `undefined' if not present.
-spec dvv_enabled(props()) -> boolean() | 'undefined' | 'error'.
dvv_enabled(Props) ->
get_boolean(dvv_enabled, Props).
%% @private coerce the value under key to be a boolean, if defined; undefined, otherwise.
-spec get_boolean(PropName::atom(), props()) -> boolean() | 'undefined' | 'error'.
get_boolean(Key, Props) ->
case proplists:get_value(Key, Props) of
undefined ->
undefined;
MaybeBool ->
coerce_bool(MaybeBool)
end.
%%
%% EUNIT tests...
%%
-ifdef (TEST).
coerce_bool_test_ () ->
[?_assertEqual(false, coerce_bool(false)),
?_assertEqual(true, coerce_bool(true)),
?_assertEqual(true, coerce_bool("True")),
?_assertEqual(false, coerce_bool("fAlSE")),
?_assertEqual(false, coerce_bool(<<"FAlse">>)),
?_assertEqual(true, coerce_bool(<<"trUe">>)),
?_assertEqual(true, coerce_bool(1)),
?_assertEqual(true, coerce_bool(234567)),
?_assertEqual(false, coerce_bool(0)),
?_assertEqual(false, coerce_bool(-1234)),
?_assertEqual(false, coerce_bool('FALSE')),
?_assertEqual(true, coerce_bool('TrUe')),
?_assertEqual(error, coerce_bool("Purple")),
?_assertEqual(error, coerce_bool(<<"frangipan">>)),
?_assertEqual(error, coerce_bool(erlang:make_ref()))
].
-ifdef(EQC).
-define(QC_OUT(P),
eqc:on_output(fun(Str, Args) ->
io:format(user, Str, Args) end, P)).
-define(TEST_TIME_SECS, 10).
immutable_test_() ->
{timeout, ?TEST_TIME_SECS+5, [?_assert(test_immutable() =:= true)]}.
valid_test_() ->
{timeout, ?TEST_TIME_SECS+5, [?_assert(test_create() =:= true)]}.
merges_props_test_() ->
{timeout, ?TEST_TIME_SECS+5, [?_assert(test_merges() =:= true)]}.
-define(LAST_WRITE_WINS, {last_write_wins, true}).
-define(DVV_ENABLED, {dvv_enabled, true}).
-define(LWW_DVV, [?LAST_WRITE_WINS, ?DVV_ENABLED]).
validate_create_bucket_type_test() ->
{Validated, Errors} = validate_create_bucket_type(?LWW_DVV),
?assertEqual([{last_write_wins, true}], Validated),
?assertMatch([{dvv_enabled, _Message}], Errors).
validate_update_bucket_type_test() ->
{Validated, Errors} = validate_update_bucket_type([], ?LWW_DVV),
?assertEqual([{last_write_wins, true}], Validated),
?assertMatch([{dvv_enabled, _Message}], Errors).
validate_update_typed_bucket_test() ->
{Validated, Errors} = validate_update_typed_bucket([], ?LWW_DVV),
?assertEqual([{last_write_wins, true}], Validated),
?assertMatch([{dvv_enabled, _Message}], Errors).
validate_default_bucket_test() ->
{Validated, Errors} = validate_default_bucket([], ?LWW_DVV),
?assertEqual([{last_write_wins, true}], Validated),
?assertMatch([{dvv_enabled, _Message}], Errors).
validate_last_write_wins_implies_not_dvv_enabled_test() ->
{Validated, Errors} = validate_last_write_wins_implies_not_dvv_enabled({?LWW_DVV, []}),
?assertEqual([{last_write_wins, true}], Validated),
?assertMatch([{dvv_enabled, _Message}], Errors).
test_immutable() ->
test_immutable(?TEST_TIME_SECS).
test_immutable(TestTimeSecs) ->
eqc:quickcheck(eqc:testing_time(TestTimeSecs, ?QC_OUT(prop_immutable()))).
test_create() ->
test_create(?TEST_TIME_SECS).
test_create(TestTimeSecs) ->
eqc:quickcheck(eqc:testing_time(TestTimeSecs, ?QC_OUT(prop_create_valid()))).
test_merges() ->
test_merges(?TEST_TIME_SECS).
test_merges(TestTimeSecs) ->
eqc:quickcheck(eqc:testing_time(TestTimeSecs, ?QC_OUT(prop_merges()))).
%% Props
%% When validating:
%% * Once the datatype has been set, it cannot be unset or changed and
%% allow_mult must remain true
%% * the consistent property cannot change and neither can the n_val if
%% the type is consistent
%% * the write_once property cannot change
prop_immutable() ->
?FORALL(Args, gen_args(no_default_buckets),
begin
Result = erlang:apply(?MODULE, validate, Args),
Phase = lists:nth(1, Args),
Existing = lists:nth(3, Args),
New = lists:nth(4, Args),
?WHENFAIL(
begin
io:format("Phase: ~p~n", [Phase]),
io:format("Bucket ~p~n", [lists:nth(2, Args)]),
io:format("Existing ~p~n", [Existing]),
io:format("New ~p~n", [New]),
io:format("Result ~p~n", [Result]),
io:format("{allow_mult, valid_dt, valid_consistent, n_val_changed}~n"),
io:format("{~p,~p,~p,~p}~n~n",
[allow_mult(New), valid_datatype(New), valid_consistent(New), n_val_changed(Existing, New)])
end,
collect(with_title("{allow_mult, valid_dt, valid_consistent, n_val_changed}"),
{allow_mult(New), valid_datatype(New), valid_consistent(New), n_val_changed(Existing, New)},
immutable(Phase, New, Existing, Result)))
end).
%% When creating a bucket type:
%% * for datatypes, the datatype must be
%% valid, and allow mult must be true
%% * for consistent data, the consistent property must be valid
prop_create_valid() ->
?FORALL({Bucket, Existing, New}, {gen_bucket(create, bucket_types),
gen_existing(), gen_new(create)},
begin
Result = validate(create, Bucket, Existing, New),
?WHENFAIL(
begin
io:format("Bucket ~p~n", [Bucket]),
io:format("Existing ~p~n", [Existing]),
io:format("New ~p~n", [New]),
io:format("Result ~p~n", [Result]),
io:format("{has_datatype, valid_datatype, allow_mult, has_w1c, valid_w1c, has_consistent, valid_consistent}~n"),
io:format("{~p,~p,~p,~p,~p,~p,~p}~n~n",
[has_datatype(New), valid_datatype(New), allow_mult(New),
has_w1c(New), valid_w1c(New),
has_consistent(New), valid_consistent(New)])
end,
collect(with_title("{has_datatype, valid_datatype, allow_mult, has_w1c, valid_w1c, has_consistent, valid_consistent, lww, dvv_enabled}"),
{has_datatype(New), valid_datatype(New), allow_mult(New), has_w1c(New), valid_w1c(New), has_consistent(New), valid_consistent(New), last_write_wins(New), dvv_enabled(New)},
only_create_if_valid(Result, New)))
end).
%% As of 2.* validate/4 must merge the new and existing props, verify
%% that. Not sure if this test isn't just a tautology. Reviewer?
prop_merges() ->
?FORALL({Bucket, Existing0, New}, {gen_bucket(update, any),
gen_existing(),
gen_new(update)},
begin
%% ensure default buckets are not marked consistent or write_once since that is invalid
Existing = case default_bucket(Bucket) of
true -> lists:keydelete(write_once, 1, lists:keydelete(consistent, 1, Existing0));
false -> Existing0
end,
Result={Good, Bad} = validate(update, Bucket, Existing, New),
%% All we really want to check is that every key in
%% Good replaces the same key in Existing, right?
%% Remove `Bad' from the inputs to validate.
F = fun({Name, _Err}, {Old, Neu}) ->
case lists:keytake(Name, 1, Neu) of
false ->
{Old, Neu};
{value, V, Neu2} ->
%% only want to remove the exact bad value from existing,
%% not the bad key!
{lists:delete(V, Old), Neu2}
end
end,
{NoBadExisting, OnlyGoodNew} = lists:foldl(F, {Existing, New}, Bad),
%% What's left are the good ones from `New'. Replace
%% their keys in `Existing'. `Expected' is the input
%% set, minus the `Bad' properties, and plus the
%% `Good' ones. Compare that to output props `from
%% validate/4' to verify the merge happens as
%% expected.
Expected = lists:ukeymerge(1, lists:ukeysort(1, OnlyGoodNew),
lists:ukeysort(1, NoBadExisting)),
?WHENFAIL(
begin
io:format("Bucket ~p~n", [Bucket]),
io:format("Existing ~p~n", [lists:sort(Existing)]),
io:format("New ~p~n", [New]),
io:format("Result ~p~n", [Result]),
io:format("Expected ~p~n", [lists:sort(Expected)]),
io:format("Expected - Good ~p~n", [sets:to_list(sets:subtract(sets:from_list(Expected), sets:from_list(Good)))]),
io:format("Good - Expected ~p~n", [sets:to_list(sets:subtract(sets:from_list(Good), sets:from_list(Expected)))])
end,
case valid_dvv_lww({Good, Bad}) of
true ->
lists:sort(maybe_remove_dvv_enabled(Expected)) == lists:sort(maybe_remove_dvv_enabled(Good));
_ ->
false
end
)
end).
valid_dvv_lww({Good, Bad}) ->
case last_write_wins(Good) of
true ->
DvvEnabled = dvv_enabled(Good),
(DvvEnabled =:= undefined) orelse (not DvvEnabled) orelse has_dvv_enabled(Bad);
_ ->
true
end.
maybe_remove_dvv_enabled(Props) ->
lists:keydelete(dvv_enabled, 1, lists:keydelete(last_write_wins, 1, Props)).
%% Generators
gen_args(GenDefBucket) ->
?LET(Phase, gen_phase(), [Phase, gen_bucket(Phase, GenDefBucket),
gen_existing(), gen_new(update)]).
gen_phase() ->
oneof([create, update]).
gen_bucket(create, _) ->
gen_bucket_type();
gen_bucket(update, no_default_buckets) ->
oneof([gen_bucket_type(), gen_typed_bucket()]);
gen_bucket(update, _) ->
oneof([gen_bucket_type(), gen_typed_bucket(), gen_bucket()]).
gen_bucket_type() ->
{binary(20), undefined}.
gen_typed_bucket() ->
{binary(20), binary(20)}.
gen_bucket() ->
oneof([{<<"default">>, binary(20)}, binary(20)]).
gen_existing() ->
Defaults = lists:ukeysort(1, riak_core_bucket_type:defaults()),
?LET(Special, oneof([gen_valid_mult_dt(), gen_valid_w1c(), gen_valid_consistent(), gen_valid_dvv_lww(), []]),
lists:ukeymerge(1, lists:ukeysort(1, Special), Defaults)).
gen_maybe_consistent() ->
oneof([[], gen_valid_consistent()]).
gen_maybe_bad_consistent() ->
oneof([gen_valid_consistent(), [{consistent, notvalid}]]).
gen_valid_consistent() ->
?LET(Consistent, bool(), [{consistent, Consistent}]).
gen_valid_mult_dt() ->
?LET(Mult, bool(), gen_valid_mult_dt(Mult)).
gen_valid_mult_dt(false) ->
?LET(AllowMult, bool(), [{allow_mult, AllowMult}]);
gen_valid_mult_dt(true) ->
?LET(Datatype, gen_datatype(), [{allow_mult, true}, {datatype, Datatype}]).
gen_valid_dvv_lww() ->
?LET(LastWriteWins, bool(), gen_valid_dvv_lww(LastWriteWins)).
gen_valid_dvv_lww(true) ->
[{last_write_wins, true}, {dvv_enabled, false}];
gen_valid_dvv_lww(false) ->
?LET(DvvEnabled, bool(), [{last_write_wins, false}, {dvv_enabled, DvvEnabled}]).
gen_new(update) ->
?LET(
{Mult, Datatype, WriteOnce, Consistent, NVal, LastWriteWins, DvvEnabled},
{
gen_allow_mult(),
oneof([[], gen_datatype_property()]),
oneof([[], gen_valid_w1c()]),
oneof([[], gen_maybe_bad_consistent()]),
oneof([[], [{n_val, choose(1, 10)}]]),
oneof([[], gen_lww()]),
oneof([[], gen_dvv_enabled()])
},
Mult ++ Datatype ++ WriteOnce ++ Consistent ++ NVal ++ LastWriteWins ++ DvvEnabled
);
gen_new(create) ->
Defaults0 = riak_core_bucket_type:defaults(),
Defaults1 = lists:keydelete(allow_mult, 1, Defaults0),
Defaults2 = lists:keydelete(last_write_wins, 1, Defaults1),
Defaults = lists:keydelete(dvv_enabled, 1, Defaults2),
?LET(
{Mult, DatatypeOrConsistent, WriteOnce, LastWriteWins, DvvEnabled},
{
gen_allow_mult(),
frequency([{5, gen_datatype_property()},
{5, gen_maybe_bad_consistent()},
{5, []}]),
gen_w1c(), gen_lww(), gen_dvv_enabled()},
Defaults ++ Mult ++ DatatypeOrConsistent ++ WriteOnce ++ LastWriteWins ++ DvvEnabled).
gen_allow_mult() ->
?LET(Mult, frequency([{9, bool()}, {1, binary()}]), [{allow_mult, Mult}]).
gen_datatype_property() ->
?LET(Datattype, oneof([gen_datatype(), notadatatype]), [{datatype, Datattype}]).
gen_lww() ->
?LET(LwwWins, bool(), [{last_write_wins, LwwWins}]).
gen_dvv_enabled() ->
?LET(DvvEnabled, bool(), [{dvv_enabled, DvvEnabled}]).
gen_datatype() ->
?LET(Datamod, oneof(?V2_TOP_LEVEL_TYPES), riak_kv_crdt:from_mod(Datamod)).
%gen_maybe_bad_w1c() ->
% oneof([gen_valid_w1c(), {write_once, rubbish}]).
gen_w1c() ->
?LET(WriteOnce, frequency([{9, bool()}, {1, binary()}]), [{write_once, WriteOnce}]).
gen_valid_w1c() ->
?LET(WriteOnce, bool(), [{write_once, WriteOnce}]).
%% helpers
gen_string_bool() ->
oneof(["true", "false"]).
-spec immutable(Phase :: create | update,
New :: props(),
Existing :: props(),
Result :: {props(), errors()}) -> boolean().
immutable(create, _, _, _) ->
true;
immutable(_, _New, undefined, _) ->
true;
immutable(update, New, Existing, {_Good, Bad}) ->
case type(Existing) of
datatype ->
NewDT = proplists:get_value(datatype, New),
NewAM = proplists:get_value(allow_mult, New),
ExistingDT = proplists:get_value(datatype, Existing),
immutable_dt(NewDT, NewAM, ExistingDT, Bad);
write_once ->
OldFP = proplists:get_value(write_once, Existing),
NewFP = proplists:get_value(write_once, New),
immutable_write_once(OldFP, NewFP, New, Bad);
default ->
%% doesn't mean valid props, just that there is no
%% immutability constraint.
true;
consistent ->
%% existing type (or bucket) is consistent
immutable_consistent(New, Existing, Bad)
end.
immutable_consistent(New, Existing, Bad) ->
NewCS = proplists:get_value(consistent, New),
OldN = proplists:get_value(n_val, Existing),
NewN = proplists:get_value(n_val, New),
immutable_consistent(NewCS, OldN, NewN, Bad).
%% Consistent properties must remain consistent and
%% the n_val must not change. This function assumes the
%% existing value for consistent is true.
immutable_consistent(undefined, _N, undefined, _Bad) ->
%% consistent and n_val not modified
true;
immutable_consistent(true, _N, undefined, _Bad) ->
%% consistent still set to true and n_val not modified
true;
immutable_consistent(Consistent, _N, _N, _Bad) when Consistent =:= undefined orelse
Consistent =:= true ->
%% consistent not modified or still set to true and n_val
%% modified but set to same value
true;
immutable_consistent(Consistent, _OldN, _NewN, Bad) when Consistent =:= undefined orelse
Consistent =:= true ->
%% consistent not modified or still set to true but n_val modified
has_n_val(Bad);
immutable_consistent(_Consistent, OldN, NewN, Bad) when OldN =:= NewN orelse
NewN =:= undefined ->
%% consistent modified but set to invalid value or false, n_val not modified
%% or set to existing value
has_consistent(Bad);
immutable_consistent(_Consistent, _OldN, _NewN, Bad) ->
has_consistent(Bad) andalso has_n_val(Bad).
%% @private only called when the existing bucket type is immutable All
%% that has to be true is that the bucket type is still write_once
immutable_write_once(true, New, NewProps, Bad) when New == true orelse New == undefined ->
not has_write_once(Bad) andalso undefined_props([datatype, {consistent, true}], NewProps, Bad);
immutable_write_once(true, _New, NewProps, Bad) ->
has_write_once(Bad) andalso undefined_props([datatype, {consistent, true}], NewProps, Bad);
immutable_write_once(_Existing, true, _NewProps, Bad) ->
has_write_once(Bad).
%% @private every prop in Names that is present in Props, must be in
%% Errors.
undefined_props(Names, Props, Errors) ->
lists:all(fun({Name, Value}) ->
(Value /= proplists:get_value(Name, Props)) orelse
lists:keymember(Name, 1, Errors);
(Name) ->
(not lists:keymember(Name, 1, Props)) orelse
lists:keymember(Name, 1, Errors)
end,
Names).
%% If data type and allow mult and are in New they must match what is in existing
%% or be in Bad
immutable_dt(_NewDT=undefined, _NewAllowMult=undefined, _ExistingDT, _Bad) ->
%% datatype and allow_mult are not being modified, so its valid
true;
immutable_dt(_Datatype, undefined, _Datatype, _Bad) ->
%% data types from new and existing match and allow mult not modified, valid
true;
immutable_dt(_Datatype, true, _Datatype, _Bad) ->
%% data type from new and existing match and allow mult still set to true, valid
true;
immutable_dt(undefined, true, _Datatype, _Bad) ->
%% data type not modified and allow_mult still set to true, vald
true;
immutable_dt(_Datatype, undefined, _Datatype2, Bad) ->
%% data types do not match, allow_mult not modified
has_datatype(Bad);
immutable_dt(_Datatype, true, _Datatype2, Bad) ->
%% data types do not match, allow_mult still set to true
has_datatype(Bad);
immutable_dt(_Datatype, false, undefined, Bad) ->
%% datatype defined when it wasn't before
has_datatype(Bad);
immutable_dt(_Datatype, false, _Datatype, Bad) ->
%% attempt to set allow_mult to false when data type set is invalid, datatype not modified
has_allow_mult(Bad);
immutable_dt(undefined, false, _Datatype, Bad) ->
%% data type not modified but exists and allow_mult set to false is invalid
has_allow_mult(Bad);
immutable_dt(_Datatype, false, _Datatype2, Bad) ->
%% data type changed and allow mult modified to be false, both are invalid
has_allow_mult(Bad) andalso has_datatype(Bad);
immutable_dt(undefined, _, _Datatype, Bad) ->
%% datatype not modified but allow_mult is invalid
has_allow_mult(Bad);
immutable_dt(_Datatype, _, _Datatype, Bad) ->
%% allow mult is invalid but data types still match
has_allow_mult(Bad);
immutable_dt(_, _, _, Bad) ->
%% allow_mult and data type are invalid
has_allow_mult(Bad) andalso has_datatype(Bad).
only_create_if_valid({Good, Bad}, New) ->
case {last_write_wins(New), dvv_enabled(New)} of
{true, true} ->
case has_dvv_enabled(Bad) and has_last_write_wins(Good) of
true ->
only_create_if_valid2({Good, Bad}, New);
_ ->
false
end;
_ ->
only_create_if_valid2({Good, Bad}, New)
end.
has_dvv_enabled(Props) ->
lists:keyfind(dvv_enabled, 1, Props) /= false.
has_last_write_wins(Props) ->
lists:keyfind(last_write_wins, 1, Props) /= false.
only_create_if_valid2({Good, Bad}, New) ->
DT = proplists:get_value(datatype, New),
AM = proplists:get_value(allow_mult, New),
FP = get_boolean(write_once, New),
CS = proplists:get_value(consistent, New),
case {DT, AM, FP, CS} of
%% write_once true entails data type undefined and consistent false or undefined
{_DataType, _AllowMult, true, _Consistent} ->
not has_datatype(Good)
andalso not is_consistent(Good)
% NB. (!P v Q) iff P => Q
andalso (not has_datatype(New) or has_datatype(Bad))
andalso (not is_consistent(New) or has_consistent(Bad))
;
%% if consistent or datatype properties are not defined then properties should be
%% valid since no other properties generated can be in valid
{undefined, _AllowMult, _WriteOnce, Consistent} when Consistent =:= false orelse
Consistent =:= undefined ->
true;
%% if datatype is defined, its not a consistent type and allow_mult=true
%% then the datatype must be valid
{Datatype, true, _WriteOnce, Consistent} when Consistent =:= false orelse
Consistent =:= undefined ->
case lists:member(riak_kv_crdt:to_mod(Datatype), ?V2_TOP_LEVEL_TYPES) of
true ->
has_datatype(Good) andalso has_allow_mult(Good);
false ->
has_datatype(Bad) andalso has_allow_mult(Good)
end;
%% if the datatype is defined, the type is not consistent and allow_mult is false
%% then allow_mult should be in the Bad list and the datatype may be depending on if it
%% is valid
{Datatype, _, _WriteOnce, Consistent} when Consistent =:= false orelse
Consistent =:= undefined->
case lists:member(riak_kv_crdt:to_mod(Datatype), ?V2_TOP_LEVEL_TYPES) of
true ->
has_allow_mult(Bad) andalso has_datatype(Good);
false ->
has_datatype(Bad) andalso has_allow_mult(Bad)
end;
%% the type is consistent, whether it has a datatype or allow_mult set is irrelevant (for now
%% at least)
{_, _, _, true} ->
has_consistent(Good);
%% the type was not inconsistent (explicitly or implicitly) but the value is invalid
{_, _, _, _Consistent} ->
has_consistent(Bad)
end.
has_datatype(Props) ->
proplists:get_value(datatype, Props) /= undefined.
has_allow_mult(Props) ->
proplists:get_value(allow_mult, Props) /= undefined.
valid_datatype(Props) ->
Datatype = proplists:get_value(datatype, Props),
lists:member(riak_kv_crdt:to_mod(Datatype), ?V2_TOP_LEVEL_TYPES).
has_w1c(Props) ->
proplists:get_value(write_once, Props) /= undefined.
valid_w1c(Props) ->
case proplists:get_value(write_once, Props) of
true ->
true;
false ->
true;
_ ->
false
end.
is_w1c(Props) ->
proplists:get_value(write_once, Props) =:= true.
has_consistent(Props) ->
proplists:get_value(consistent, Props) /= undefined.
valid_consistent(Props) ->
case proplists:get_value(consistent, Props) of
true ->
true;
false ->
true;
_ ->
false
end.
is_consistent(Props) ->
proplists:get_value(consistent, Props) =:= true.
has_n_val(Props) ->
proplists:get_value(n_val, Props) /= undefined.
n_val_changed(Existing, New) ->
NewN = proplists:get_value(n_val, New),
proplists:get_value(n_val, Existing) =/= NewN andalso
NewN =/= undefined.
has_write_once(Bad) ->
proplists:get_value(write_once, Bad) /= undefined.
default_bucket({<<"default">>, _}) ->
true;
default_bucket(B) when is_binary(B) ->
true;
default_bucket(_) ->
false.
maybe_bad_mult(error, Props) ->
lists:keydelete(allow_mult, 1, Props);
maybe_bad_mult(_, Props) ->
Props.
-endif.
-endif. | deps/riak_kv/src/riak_kv_bucket.erl | 0.702632 | 0.445047 | riak_kv_bucket.erl | starcoder |
-module(influxdb).
-export([
query/2,
query/3,
query/4,
write/2,
write/3
]).
-export_type([
config/0,
time_unit/0,
query/0,
query_parameters/0,
point/0
]).
-type config() :: influxdb_config:config().
-type time_unit() :: hour | minute | second | millisecond | microsecond | nanosecond.
-spec query(config(), query()) ->
ok
| {ok, [result()]}
| {error, {not_found, string()}}
| {error, {server_error, string()}}.
query(Config, Query) ->
query(Config, Query, #{}, #{}).
-spec query(config(), query(), query_parameters()) ->
ok
| {ok, [result()]}
| {error, {not_found, string()}}
| {error, {server_error, string()}}.
query(Config, Query, Parameters) ->
query(Config, Query, Parameters, #{}).
-spec query(config(), query(), query_parameters(), query_options()) ->
ok
| {ok, [result()]}
| {error, {not_found, string()}}
| {error, {server_error, string()}}.
-type query() :: iodata().
-type query_parameters() :: #{atom() => atom() | binary() | number()}.
-type query_options() :: #{
timeout => timeout(),
precision => time_unit(),
retention_policy => iodata()
}.
-type result() :: influxdb_http:result().
query(#{host := Host, port := Port, username := Username, password := Password} = Config, Query, Parameters, Options) when is_map(Parameters), is_map(Options) ->
Timeout = maps:get(timeout, Options, infinity),
Url = influxdb_uri:encode(#{
scheme => "http",
host => Host,
port => Port,
path => "/query",
query => url_query(Config, Options)
}),
Body = influxdb_uri:encode_query(#{
q => Query,
params => jsone:encode(Parameters)
}),
influxdb_http:post(query, Url, Username, Password, "application/x-www-form-urlencoded", Body, Timeout).
url_query(Config, Options) ->
maps:fold(fun
(precision, Value, Acc) -> maps:put("epoch", precision(Value), Acc);
(retention_policy, Value, Acc) -> maps:put("rp", Value, Acc);
(_Key, _Value, Acc) -> Acc
end, default_url_query(Config), Options).
default_url_query(#{database := Database}) ->
#{"db" => Database, "epoch" => precision(nanosecond)};
default_url_query(#{}) ->
#{"epoch" => precision(nanosecond)}.
precision(hour) -> "h";
precision(minute) -> "m";
precision(second) -> "s";
precision(millisecond) -> "ms";
precision(microsecond) -> "u";
precision(nanosecond) -> "ns".
-spec write(config(), [point()]) ->
ok
| {error, {not_found, string()}}
| {error, {server_error, string()}}.
write(Config, Measurements) ->
write(Config, Measurements, #{}).
-spec write(config(), [point()], write_options()) ->
ok
| {error, {not_found, string()}}
| {error, {server_error, string()}}.
-type point() :: influxdb_line_encoding:point().
-type write_options() :: #{
timeout => timeout(),
precision => time_unit(),
retention_policy => string()
}.
write(#{host := Host, port := Port, username := Username, password := Password, database := Database}, Measurements, Options) ->
Timeout = maps:get(timeout, Options, infinity),
Url = influxdb_uri:encode(#{
scheme => "http",
host => Host,
port => Port,
path => "/write",
query => maps:fold(fun
(precision, Value, Acc) -> maps:put("precision", precision(Value), Acc);
(retention_policy, Value, Acc) -> maps:put("rp", Value, Acc);
(_Key, _Value, Acc) -> Acc
end, #{"db" => Database}, Options)
}),
Body = influxdb_line_encoding:encode(Measurements),
influxdb_http:post(write, Url, Username, Password, "application/octet-stream", Body, Timeout). | src/influxdb.erl | 0.60871 | 0.436982 | influxdb.erl | starcoder |
% @doc I2C driver API.
%
% This API is based on the
% <a href="https://docs.rtems.org/doxygen/branches/master/group__I2CLinux.html">
% Linux I2C User Space API
% </a>.
% For a description of the I2C protocol see
% [https://www.kernel.org/doc/Documentation/i2c/i2c-protocol].
-module(grisp_i2c).
-behavior(gen_server).
-include("grisp_i2c.hrl").
% API
-export([start_link/1]).
-export([msgs/1]).
% Callbacks
-export([init/1]).
-export([handle_call/3]).
-export([handle_cast/2]).
-export([handle_info/2]).
-export([code_change/3]).
-export([terminate/2]).
%--- Records -------------------------------------------------------------------
-record(state, {driver}).
%--- API -----------------------------------------------------------------------
% @private
start_link(DriverMod) ->
gen_server:start_link({local, ?MODULE}, ?MODULE, DriverMod, []).
% @doc Communicate with the I2C bus.
%
% The first entry in the list has to be the address as an integer.
% The entry `{sleep, Time}' can be used to add delays between messages.
% For possible flags see the
% <a href="https://docs.rtems.org/doxygen/branches/master/group__I2CLinux.html">
% Linux I2C User Space API
% </a>.
%
% Sending a read message `{read, Length}' the `I2C_M_RD' flag will be set
% automatcally. The `Length' is the number of bytes to be read.
%
% === Example ===
% A read message like
% ```
% 1> grisp_i2c:msgs([16#40, {read, 2}]).
% '''
% will send an I2C message with
% ```
% addr = 16#40
% flags = I2C_M_RD
% len = 2
% '''
% using the notation from
% [https://www.kernel.org/doc/Documentation/i2c/i2c-protocol]
% this message will look like:
% ```
% S 16#40 Rd [A] [Data] A [Data] NA P
% '''
%
% A write message like
% ```
% 2> grisp_i2c:msgs([16#40, {write, <<16#02>>}]).
% '''
% will send an I2C message with
% ```
% addr = 16#40
% flags = 16#0000
% len = 1
% *buf = 16#02
% '''
% using the notation from
% [https://www.kernel.org/doc/Documentation/i2c/i2c-protocol]
% this message will look like:
% ```
% S 16#40 Wr [A] 16#02 P
% '''
-spec msgs([Adr::integer()
| {sleep, Time::integer()}
| {write, Data::binary()}
| {write, Data::binary(), Flags::integer()}
| {read, Length::integer()}
| {read, Length::integer(), Flags::integer()}])
-> LastResponse::any().
msgs([Adr | Msgs]) ->
EncodedMsgs = do_msgs(Adr, Msgs),
gen_server:call(?MODULE, {msgs, EncodedMsgs}).
%--- Callbacks -----------------------------------------------------------------
% @private
init(DriverMod) ->
Ref = DriverMod:open(),
{ok, #state{driver = {DriverMod, Ref}}}.
% @private
handle_call({msgs, EncodedMsgs}, _From, State) ->
{DriverMod, Ref} = State#state.driver,
RespList = [maybe_send_msgs(Msg, DriverMod, Ref) || Msg <- EncodedMsgs],
LastResp = lists:last(RespList),
{reply, LastResp, State}.
% @private
handle_cast(Request, _State) -> error({unknown_cast, Request}).
% @private
handle_info(Info, _State) -> error({unknown_info, Info}).
% @private
code_change(_OldVsn, State, _Extra) -> {ok, State}.
% @private
terminate(_Reason, _State) -> ok.
%--- Internal ------------------------------------------------------------------
do_msgs(Adr, Msgs) ->
do_msgs(Adr, Msgs, [], []).
do_msgs(_Adr, [], [], ReversedEncodedMsgs) ->
lists:reverse(ReversedEncodedMsgs);
do_msgs(Adr, [], ReversedToEncode, ReversedEncodedMsgs) ->
ToEncode = lists:reverse(ReversedToEncode),
EncodedMsgs = encode_msgs([Adr | ToEncode]),
NewReversedEncodedMsgs = [EncodedMsgs | ReversedEncodedMsgs],
do_msgs(Adr, [], [], NewReversedEncodedMsgs);
do_msgs(Adr, [{sleep, Time} | Rest], ReversedToEncode, ReversedEncodedMsgs) ->
ToEncode = lists:reverse(ReversedToEncode),
EncodedMsgs = encode_msgs([Adr | ToEncode]),
NewReversedEncodedMsgs = [EncodedMsgs | ReversedEncodedMsgs],
NewReversedEncodedMsgsWithSleep = [{sleep, Time} | NewReversedEncodedMsgs],
do_msgs(Adr, Rest, [], NewReversedEncodedMsgsWithSleep);
do_msgs(Adr, [Msg | Rest], ReversedToEncode, ReversedEncodedMsgs) ->
NewReversedToEncode = [Msg | ReversedToEncode],
do_msgs(Adr, Rest, NewReversedToEncode, ReversedEncodedMsgs).
encode_msgs(Msgs) ->
encode_msgs(Msgs, undefined, <<>>, <<>>).
encode_msgs([Adr|Rest], _, W, M) when is_integer(Adr) ->
encode_msgs(Rest, Adr, W, M);
encode_msgs([{Cmd, Data}|Rest], Adr, W, M) ->
encode_msgs([{Cmd, Data, 0}|Rest], Adr, W, M);
encode_msgs([{write, Data, Flags}|Rest], Adr, W, M) ->
Offset = byte_size(W),
Len = byte_size(Data),
encode_msgs(Rest, Adr, <<W/binary, Data/binary>>,
<<M/binary, Adr:16, Flags:16, Len:16, Offset:16>>);
encode_msgs([{read, Len, Flags}|Rest], Adr, W, M) when is_integer(Len) ->
F = Flags bor ?I2C_M_RD,
encode_msgs(Rest, Adr, W, <<M/binary, Adr:16, F:16, Len:16, 0:16>>);
encode_msgs([], _Adr, W, M) when byte_size(M) rem 8 =:= 0 ->
Data_len = byte_size(W),
Msg_count = byte_size(M) div 8,
<<Data_len:16, W/binary, Msg_count:16, M/binary>>.
maybe_send_msgs({sleep, Time}, _DriverMod, _Ref) ->
timer:sleep(Time);
maybe_send_msgs(Msg, DriverMod, Ref) ->
DriverMod:command(Ref, Msg). | src/grisp_i2c.erl | 0.593609 | 0.422981 | grisp_i2c.erl | starcoder |
% Licensed under the Apache License, Version 2.0 (the "License"); you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
-module(couchdb_os_proc_pool).
-include("couch_eunit.hrl").
-include_lib("couchdb/couch_db.hrl").
-define(TIMEOUT, 3000).
start() ->
{ok, Pid} = couch_server_sup:start_link(?CONFIG_CHAIN),
couch_config:set("query_server_config", "os_process_limit", "3", false),
Pid.
stop(Pid) ->
couch_server_sup:stop(),
erlang:monitor(process, Pid),
receive
{'DOWN', _, _, Pid, _} ->
ok
after ?TIMEOUT ->
throw({timeout, server_stop})
end.
os_proc_pool_test_() ->
{
"OS processes pool tests",
{
setup,
fun start/0, fun stop/1,
[
should_block_new_proc_on_full_pool(),
should_free_slot_on_proc_unexpected_exit()
]
}
}.
should_block_new_proc_on_full_pool() ->
?_test(begin
Client1 = spawn_client(),
Client2 = spawn_client(),
Client3 = spawn_client(),
?assertEqual(ok, ping_client(Client1)),
?assertEqual(ok, ping_client(Client2)),
?assertEqual(ok, ping_client(Client3)),
Proc1 = get_client_proc(Client1, "1"),
Proc2 = get_client_proc(Client2, "2"),
Proc3 = get_client_proc(Client3, "3"),
?assertNotEqual(Proc1, Proc2),
?assertNotEqual(Proc2, Proc3),
?assertNotEqual(Proc3, Proc1),
Client4 = spawn_client(),
?assertEqual(timeout, ping_client(Client4)),
?assertEqual(ok, stop_client(Client1)),
?assertEqual(ok, ping_client(Client4)),
Proc4 = get_client_proc(Client4, "4"),
?assertEqual(Proc1, Proc4),
lists:map(fun(C) ->
?assertEqual(ok, stop_client(C))
end, [Client2, Client3, Client4])
end).
should_free_slot_on_proc_unexpected_exit() ->
?_test(begin
Client1 = spawn_client(),
Client2 = spawn_client(),
Client3 = spawn_client(),
?assertEqual(ok, ping_client(Client1)),
?assertEqual(ok, ping_client(Client2)),
?assertEqual(ok, ping_client(Client3)),
Proc1 = get_client_proc(Client1, "1"),
Proc2 = get_client_proc(Client2, "2"),
Proc3 = get_client_proc(Client3, "3"),
?assertNotEqual(Proc1, Proc2),
?assertNotEqual(Proc2, Proc3),
?assertNotEqual(Proc3, Proc1),
?assertEqual(ok, kill_client(Client1)),
Client4 = spawn_client(),
?assertEqual(ok, ping_client(Client4)),
Proc4 = get_client_proc(Client4, "4"),
?assertNotEqual(Proc4, Proc1),
?assertNotEqual(Proc2, Proc4),
?assertNotEqual(Proc3, Proc4),
lists:map(fun(C) ->
?assertEqual(ok, stop_client(C))
end, [Client2, Client3, Client4])
end).
spawn_client() ->
Parent = self(),
Ref = make_ref(),
Pid = spawn(fun() ->
Proc = couch_query_servers:get_os_process(<<"javascript">>),
loop(Parent, Ref, Proc)
end),
{Pid, Ref}.
ping_client({Pid, Ref}) ->
Pid ! ping,
receive
{pong, Ref} ->
ok
after ?TIMEOUT ->
timeout
end.
get_client_proc({Pid, Ref}, ClientName) ->
Pid ! get_proc,
receive
{proc, Ref, Proc} -> Proc
after ?TIMEOUT ->
erlang:error({assertion_failed,
[{module, ?MODULE},
{line, ?LINE},
{reason, "Timeout getting client "
++ ClientName ++ " proc"}]})
end.
stop_client({Pid, Ref}) ->
Pid ! stop,
receive
{stop, Ref} ->
ok
after ?TIMEOUT ->
timeout
end.
kill_client({Pid, Ref}) ->
Pid ! die,
receive
{die, Ref} ->
ok
after ?TIMEOUT ->
timeout
end.
loop(Parent, Ref, Proc) ->
receive
ping ->
Parent ! {pong, Ref},
loop(Parent, Ref, Proc);
get_proc ->
Parent ! {proc, Ref, Proc},
loop(Parent, Ref, Proc);
stop ->
couch_query_servers:ret_os_process(Proc),
Parent ! {stop, Ref};
die ->
Parent ! {die, Ref},
exit(some_error)
end. | test/couchdb/couchdb_os_proc_pool.erl | 0.582847 | 0.411525 | couchdb_os_proc_pool.erl | starcoder |
%%
%% %CopyrightBegin%
%%
%% Copyright Ericsson AB 2004-2013. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
%% Purpose: Optimizes booleans in guards.
-module(beam_bool).
-export([module/2]).
-import(lists, [reverse/1,reverse/2,foldl/3,mapfoldl/3,map/2]).
-define(MAXREG, 1024).
-record(st,
{next, %Next label number.
ll %Live regs at labels.
}).
module({Mod,Exp,Attr,Fs0,Lc}, _Opts) ->
%%io:format("~p:\n", [Mod]),
{Fs,_} = mapfoldl(fun(Fn, Lbl) -> function(Fn, Lbl) end, 100000000, Fs0),
{ok,{Mod,Exp,Attr,Fs,Lc}}.
function({function,Name,Arity,CLabel,Is0}, Lbl0) ->
try
{Is,#st{next=Lbl}} = bool_opt(Is0, Lbl0),
{{function,Name,Arity,CLabel,Is},Lbl}
catch
Class:Error ->
Stack = erlang:get_stacktrace(),
io:fwrite("Function: ~w/~w\n", [Name,Arity]),
erlang:raise(Class, Error, Stack)
end.
%%
%% Optimize boolean expressions that use guard bifs. Rewrite to
%% use test instructions if possible.
%%
bool_opt(Asm, Lbl) ->
LiveInfo = beam_utils:index_labels(Asm),
bopt(Asm, [], #st{next=Lbl,ll=LiveInfo}).
bopt([{block,Bl0}=Block|
[{jump,{f,Succ}},
{label,Fail},
{block,[{set,[Dst],[{atom,false}],move}]},
{label,Succ}|Is]=Is0], Acc0, St) ->
case split_block(Bl0, Dst, Fail, Acc0, true) of
failed ->
bopt(Is0, [Block|Acc0], St);
{Bl,PreBlock} ->
Acc1 = case PreBlock of
[] -> Acc0;
_ -> [{block,PreBlock}|Acc0]
end,
Acc = [{protected,[Dst],Bl,{Fail,Succ}}|Acc1],
bopt(Is, Acc, St)
end;
bopt([{test,is_eq_exact,{f,Fail},[Reg,{atom,true}]}=I|Is], [{block,_}|_]=Acc0, St0) ->
case bopt_block(Reg, Fail, Is, Acc0, St0) of
failed -> bopt(Is, [I|Acc0], St0);
{Acc,St} -> bopt(Is, Acc, St)
end;
bopt([I|Is], Acc, St) ->
bopt(Is, [I|Acc], St);
bopt([], Acc, St) ->
{bopt_reverse(Acc, []),St}.
bopt_reverse([{protected,[Dst],Block,{Fail,Succ}}|Is], Acc0) ->
Acc = [{block,Block},{jump,{f,Succ}},
{label,Fail},
{block,[{set,[Dst],[{atom,false}],move}]},
{label,Succ}|Acc0],
bopt_reverse(Is, Acc);
bopt_reverse([I|Is], Acc) ->
bopt_reverse(Is, [I|Acc]);
bopt_reverse([], Acc) -> Acc.
%% bopt_block(Reg, Fail, OldIs, Accumulator, St) -> failed | {NewAcc,St}
%% Attempt to optimized a block of guard BIFs followed by a test
%% instruction.
bopt_block(Reg, Fail, OldIs, [{block,Bl0}|Acc0], St0) ->
case split_block(Bl0, Reg, Fail, Acc0, false) of
failed ->
%% Reason for failure: The block either contained no
%% guard BIFs with the failure label Fail, or the final
%% instruction in the block did not assign the Reg register.
%%io:format("split ~p: ~P\n", [Reg,Bl0,20]),
failed;
{Bl1,BlPre} ->
%% The block has been splitted. Bl1 is a non-empty list
%% of guard BIF instructions having the failure label Fail.
%% BlPre is a (possibly empty list) of instructions preceeding
%% Bl1.
Acc1 = make_block(BlPre, Acc0),
{Bl,Acc} = extend_block(Bl1, Fail, Acc1),
try
{NewCode,St} = bopt_tree_cg(Bl, Fail, St0),
ensure_opt_safe(Bl, NewCode, OldIs, Fail, Acc, St),
{NewCode++Acc,St}
catch
%% Not possible to rewrite because a boolean value is
%% passed to another guard bif, e.g. 'abs(A > B)'
%% (in this case, obviously nonsense code). Rare in
%% practice.
throw:mixed ->
failed;
%% There was a reference to a boolean expression
%% from inside a protected block (try/catch), to
%% a boolean expression outside.
throw:protected_barrier ->
failed;
%% The 'xor' operator was used. We currently don't
%% find it worthwile to translate 'xor' operators
%% (the code would be clumsy).
throw:'xor' ->
failed;
%% The block does not contain a boolean expression,
%% but only a call to a guard BIF.
%% For instance: ... when element(1, T) ->
throw:not_boolean_expr ->
failed;
%% The optimization is not safe. (A register
%% used by the instructions following the
%% optimized code is either not assigned a
%% value at all or assigned a different value.)
throw:all_registers_not_killed ->
failed;
throw:registers_used ->
failed;
%% A protected block refered to the value
%% returned by another protected block,
%% probably because the Core Erlang code
%% used nested try/catches in the guard.
%% (v3_core never produces nested try/catches
%% in guards, so it must have been another
%% Core Erlang translator.)
throw:protected_violation ->
failed;
%% Failed to work out the live registers for a GC
%% BIF. For example, if the number of live registers
%% needed to be 4 because {x,3} was a source register,
%% but {x,2} was not known to be initialized, this
%% exception would be thrown.
throw:gc_bif_alloc_failure ->
failed
end
end.
%% ensure_opt_safe(OriginalCode, OptCode, FollowingCode, Fail,
%% ReversedPrecedingCode, State) -> ok
%% Comparing the original code to the optimized code, determine
%% whether the optimized code is guaranteed to work in the same
%% way as the original code.
%%
%% Throw an exception if the optimization is not safe.
%%
ensure_opt_safe(Bl, NewCode, OldIs, Fail, PrecedingCode, St) ->
%% Here are the conditions that must be true for the
%% optimization to be safe.
%%
%% 1. If a register is INITIALIZED by PrecedingCode,
%% then if that register assigned a value in the original
%% code, but not in the optimized code, it must be UNUSED or KILLED
%% in the code that follows.
%%
%% 2. If a register is not known to be INITIALIZED by PreccedingCode,
%% then if that register assigned a value in the original
%% code, but not in the optimized code, it must be KILLED
%% by the code that follows.
%%
%% 3. Any register that is assigned a value in the optimized
%% code must be UNUSED or KILLED in the following code,
%% unless we can be sure that it is always assigned the same
%% value.
InitInPreceding = initialized_regs(PrecedingCode),
PrevDst = dst_regs(Bl),
NewDst = dst_regs(NewCode),
NotSet = ordsets:subtract(PrevDst, NewDst),
MustBeKilled = ordsets:subtract(NotSet, InitInPreceding),
case all_killed(MustBeKilled, OldIs, Fail, St) of
false -> throw(all_registers_not_killed);
true -> ok
end,
MustBeUnused = ordsets:subtract(ordsets:union(NotSet, NewDst),
MustBeKilled),
case none_used(MustBeUnused, OldIs, Fail, St) of
false -> throw(registers_used);
true -> ok
end,
ok.
update_fail_label([{set,Ds,As,{bif,N,{f,_}}}|Is], Fail, Acc) ->
update_fail_label(Is, Fail, [{set,Ds,As,{bif,N,{f,Fail}}}|Acc]);
update_fail_label([{set,Ds,As,{alloc,Regs,{gc_bif,N,{f,_}}}}|Is], Fail, Acc) ->
update_fail_label(Is, Fail,
[{set,Ds,As,{alloc,Regs,{gc_bif,N,{f,Fail}}}}|Acc]);
update_fail_label([], _, Acc) -> reverse(Acc).
make_block(Bl) ->
make_block(Bl, []).
make_block([], Acc) -> Acc;
make_block(Bl, Acc) -> [{block,Bl}|Acc].
extend_block(BlAcc, Fail, [{protected,_,_,_}=Prot|OldAcc]) ->
extend_block([Prot|BlAcc], Fail, OldAcc);
extend_block(BlAcc0, Fail, [{block,Is0}|OldAcc]) ->
case extend_block_1(reverse(Is0), Fail, BlAcc0) of
{BlAcc,[]} -> extend_block(BlAcc, Fail, OldAcc);
{BlAcc,Is} -> {BlAcc,[{block,Is}|OldAcc]}
end;
extend_block(BlAcc, _, OldAcc) -> {BlAcc,OldAcc}.
extend_block_1([{set,[_],_,{bif,_,{f,Fail}}}=I|Is], Fail, Acc) ->
extend_block_1(Is, Fail, [I|Acc]);
extend_block_1([{set,[_],As,{bif,Bif,_}}=I|Is]=Is0, Fail, Acc) ->
case safe_bool_op(Bif, length(As)) of
false -> {Acc,reverse(Is0)};
true -> extend_block_1(Is, Fail, [I|Acc])
end;
extend_block_1([_|_]=Is, _, Acc) -> {Acc,reverse(Is)};
extend_block_1([], _, Acc) -> {Acc,[]}.
%% split_block([Instruction], Destination, FailLabel, [PreInstruction],
%% ProhibitFailLabelInPreBlock) -> failed | {Block,PreBlock}
%% Split a sequence of instructions into two blocks - one containing
%% all guard bif instructions and a pre-block all instructions before
%% the guard BIFs.
split_block(Is0, Dst, Fail, PreIs, ProhibitFailLabel) ->
case ProhibitFailLabel andalso beam_jump:is_label_used_in(Fail, PreIs) of
true ->
%% The failure label was used in one of the instructions (most
%% probably bit syntax construction) preceeding the block,
%% the caller might eliminate the label.
failed;
false ->
case reverse(Is0) of
[{set,[Dst],_,_}|_]=Is ->
split_block_1(Is, Fail, ProhibitFailLabel);
_ -> failed
end
end.
split_block_1(Is, Fail, ProhibitFailLabel) ->
case split_block_2(Is, Fail, []) of
{[],_} -> failed;
{_,PreBlock}=Res ->
case ProhibitFailLabel andalso
split_block_label_used(PreBlock, Fail) of
true ->
%% The failure label was used in the pre-block;
%% not allowed, because the label may be removed.
failed;
false ->
Res
end
end.
split_block_2([{set,[_],_,{bif,_,{f,Fail}}}=I|Is], Fail, Acc) ->
split_block_2(Is, Fail, [I|Acc]);
split_block_2([{set,[_],_,{alloc,_,{gc_bif,_,{f,Fail}}}}=I|Is], Fail, Acc) ->
split_block_2(Is, Fail, [I|Acc]);
split_block_2(Is0, _, Acc) ->
Is = reverse(Is0),
{Acc,Is}.
split_block_label_used([{set,[_],_,{bif,_,{f,Fail}}}|_], Fail) ->
true;
split_block_label_used([{set,[_],_,{alloc,_,{gc_bif,_,{f,Fail}}}}|_], Fail) ->
true;
split_block_label_used([{set,[_],_,{alloc,_,{put_map,_,{f,Fail}}}}|_], Fail) ->
true;
split_block_label_used([_|Is], Fail) ->
split_block_label_used(Is, Fail);
split_block_label_used([], _) -> false.
dst_regs(Is) ->
dst_regs(Is, []).
dst_regs([{block,Bl}|Is], Acc) ->
dst_regs(Bl, dst_regs(Is, Acc));
dst_regs([{set,[D],_,{bif,_,{f,_}}}|Is], Acc) ->
dst_regs(Is, [D|Acc]);
dst_regs([{set,[D],_,{alloc,_,{gc_bif,_,{f,_}}}}|Is], Acc) ->
dst_regs(Is, [D|Acc]);
dst_regs([_|Is], Acc) ->
dst_regs(Is, Acc);
dst_regs([], Acc) -> ordsets:from_list(Acc).
all_killed([R|Rs], OldIs, Fail, St) ->
case is_killed(R, OldIs, Fail, St) of
false -> false;
true -> all_killed(Rs, OldIs, Fail, St)
end;
all_killed([], _, _, _) -> true.
none_used([R|Rs], OldIs, Fail, St) ->
case is_not_used(R, OldIs, Fail, St) of
false -> false;
true -> none_used(Rs, OldIs, Fail, St)
end;
none_used([], _, _, _) -> true.
bopt_tree_cg(Block0, Fail, St) ->
Free = free_variables(Block0),
Block = ssa_block(Block0),
%% io:format("~p\n", [Block0]),
%% io:format("~p\n", [Block]),
%% io:format("~p\n", [gb_trees:to_list(Free)]),
case bopt_tree(Block, Free, []) of
{Pre0,[{_,Tree}]} ->
Pre1 = update_fail_label(Pre0, Fail, []),
Regs0 = init_regs(gb_trees:keys(Free)),
%% io:format("~p\n", [dst_regs(Block0)]),
%% io:format("~p\n", [Pre1]),
%% io:format("~p\n", [Tree]),
%% io:nl(),
{Pre,Regs} = rename_regs(Pre1, Regs0),
%% io:format("~p\n", [Regs0]),
%% io:format("~p\n", [Pre]),
bopt_cg(Tree, Fail, Regs, make_block(Pre), St);
_Res ->
throw(not_boolean_expr)
end.
bopt_tree([{set,[Dst],As0,{bif,'not',_}}|Is], Forest0, Pre) ->
{[Arg],Forest1} = bopt_bool_args(As0, Forest0),
Forest = gb_trees:enter(Dst, {'not',Arg}, Forest1),
bopt_tree(Is, Forest, Pre);
bopt_tree([{set,[Dst],As0,{bif,'and',_}}|Is], Forest0, Pre) ->
{As,Forest1} = bopt_bool_args(As0, Forest0),
Node = make_and_node(As),
Forest = gb_trees:enter(Dst, Node, Forest1),
bopt_tree(Is, Forest, Pre);
bopt_tree([{set,[Dst],As0,{bif,'or',_}}|Is], Forest0, Pre) ->
{As,Forest1} = bopt_bool_args(As0, Forest0),
Node = make_or_node(As),
Forest = gb_trees:enter(Dst, Node, Forest1),
bopt_tree(Is, Forest, Pre);
bopt_tree([{set,_,_,{bif,'xor',_}}|_], _, _) ->
throw('xor');
bopt_tree([{protected,[Dst],Code,_}|Is], Forest0, Pre) ->
ProtForest0 = gb_trees:from_orddict([P || {_,any}=P <- gb_trees:to_list(Forest0)]),
case bopt_tree(Code, ProtForest0, []) of
{ProtPre,[{_,ProtTree}]} ->
Prot = {prot,ProtPre,ProtTree},
Forest = gb_trees:enter(Dst, Prot, Forest0),
bopt_tree(Is, Forest, Pre);
_Res ->
throw(not_boolean_expr)
end;
bopt_tree([{set,[Dst],As,{bif,N,_}}=Bif|Is], Forest0, Pre) ->
Ar = length(As),
case safe_bool_op(N, Ar) of
false ->
bopt_good_args(As, Forest0),
Forest = gb_trees:enter(Dst, any, Forest0),
bopt_tree(Is, Forest, [Bif|Pre]);
true ->
bopt_good_args(As, Forest0),
Test = bif_to_test(Dst, N, As),
Forest = gb_trees:enter(Dst, Test, Forest0),
bopt_tree(Is, Forest, Pre)
end;
bopt_tree([{set,[Dst],As,{alloc,_,{gc_bif,_,_}}}=Bif|Is], Forest0, Pre) ->
bopt_good_args(As, Forest0),
Forest = gb_trees:enter(Dst, any, Forest0),
bopt_tree(Is, Forest, [Bif|Pre]);
bopt_tree([], Forest, Pre) ->
{reverse(Pre),[R || {_,V}=R <- gb_trees:to_list(Forest), V =/= any]}.
safe_bool_op(N, Ar) ->
erl_internal:new_type_test(N, Ar) orelse erl_internal:comp_op(N, Ar).
bopt_bool_args([V0,V0], Forest0) ->
{V,Forest} = bopt_bool_arg(V0, Forest0),
{[V,V],Forest};
bopt_bool_args(As, Forest) ->
mapfoldl(fun bopt_bool_arg/2, Forest, As).
bopt_bool_arg({T,_}=R, Forest) when T =:= x; T =:= y; T =:= tmp ->
Val = case gb_trees:lookup(R, Forest) of
{value,any} -> {test,is_eq_exact,fail,[R,{atom,true}]};
{value,Val0} -> Val0;
none -> throw(mixed)
end,
{Val,gb_trees:delete(R, Forest)};
bopt_bool_arg(Term, Forest) ->
{Term,Forest}.
bopt_good_args([A|As], Regs) ->
bopt_good_arg(A, Regs),
bopt_good_args(As, Regs);
bopt_good_args([], _) -> ok.
bopt_good_arg({Tag,_}=X, Regs) when Tag =:= x; Tag =:= tmp ->
case gb_trees:lookup(X, Regs) of
{value,any} -> ok;
{value,_} -> throw(mixed);
none -> throw(protected_barrier)
end;
bopt_good_arg(_, _) -> ok.
bif_to_test(_, N, As) ->
beam_utils:bif_to_test(N, As, fail).
make_and_node(Is) ->
AndList0 = make_and_list(Is),
case simplify_and_list(AndList0) of
[] -> {atom,true};
[Op] -> Op;
AndList -> {'and',AndList}
end.
make_and_list([{'and',As}|Is]) ->
make_and_list(As++Is);
make_and_list([I|Is]) ->
[I|make_and_list(Is)];
make_and_list([]) -> [].
simplify_and_list([{atom,true}|T]) ->
simplify_and_list(T);
simplify_and_list([{atom,false}=False|_]) ->
[False];
simplify_and_list([H|T]) ->
[H|simplify_and_list(T)];
simplify_and_list([]) -> [].
make_or_node(Is) ->
OrList0 = make_or_list(Is),
case simplify_or_list(OrList0) of
[] -> {atom,false};
[Op] -> Op;
OrList -> {'or',OrList}
end.
make_or_list([{'or',As}|Is]) ->
make_or_list(As++Is);
make_or_list([I|Is]) ->
[I|make_or_list(Is)];
make_or_list([]) -> [].
simplify_or_list([{atom,false}|T]) ->
simplify_or_list(T);
simplify_or_list([{atom,true}=True|_]) ->
[True];
simplify_or_list([H|T]) ->
[H|simplify_or_list(T)];
simplify_or_list([]) -> [].
%% Code generation for a boolean tree.
bopt_cg({'not',Arg}, Fail, Rs, Acc, St) ->
I = bopt_cg_not(Arg),
bopt_cg(I, Fail, Rs, Acc, St);
bopt_cg({'and',As}, Fail, Rs, Acc, St) ->
bopt_cg_and(As, Fail, Rs, Acc, St);
bopt_cg({'or',As}, Fail, Rs, Acc, St0) ->
{Succ,St} = new_label(St0),
bopt_cg_or(As, Succ, Fail, Rs, Acc, St);
bopt_cg({test,N,fail,As0}, Fail, Rs, Acc, St) ->
As = rename_sources(As0, Rs),
Test = {test,N,{f,Fail},As},
{[Test|Acc],St};
bopt_cg({inverted_test,N,fail,As0}, Fail, Rs, Acc, St0) ->
As = rename_sources(As0, Rs),
{Lbl,St} = new_label(St0),
{[{label,Lbl},{jump,{f,Fail}},{test,N,{f,Lbl},As}|Acc],St};
bopt_cg({prot,Pre0,Tree}, Fail, Rs0, Acc, St0) ->
Pre1 = update_fail_label(Pre0, Fail, []),
{Pre,Rs} = rename_regs(Pre1, Rs0),
bopt_cg(Tree, Fail, Rs, make_block(Pre, Acc), St0);
bopt_cg({atom,true}, _Fail, _Rs, Acc, St) ->
{Acc,St};
bopt_cg({atom,false}, Fail, _Rs, Acc, St) ->
{[{jump,{f,Fail}}|Acc],St};
bopt_cg(_, _, _, _, _) ->
throw(not_boolean_expr).
bopt_cg_not({'and',As0}) ->
As = [bopt_cg_not(A) || A <- As0],
{'or',As};
bopt_cg_not({'or',As0}) ->
As = [bopt_cg_not(A) || A <- As0],
{'and',As};
bopt_cg_not({'not',Arg}) ->
bopt_cg_not_not(Arg);
bopt_cg_not({test,Test,Fail,As}) ->
{inverted_test,Test,Fail,As};
bopt_cg_not({atom,Bool}) when is_boolean(Bool) ->
{atom,not Bool};
bopt_cg_not(_) ->
throw(not_boolean_expr).
bopt_cg_not_not({'and',As}) ->
{'and',[bopt_cg_not_not(A) || A <- As]};
bopt_cg_not_not({'or',As}) ->
{'or',[bopt_cg_not_not(A) || A <- As]};
bopt_cg_not_not({'not',Arg}) ->
bopt_cg_not(Arg);
bopt_cg_not_not(Leaf) -> Leaf.
bopt_cg_and([I|Is], Fail, Rs, Acc0, St0) ->
{Acc,St} = bopt_cg(I, Fail, Rs, Acc0, St0),
bopt_cg_and(Is, Fail, Rs, Acc, St);
bopt_cg_and([], _, _, Acc, St) -> {Acc,St}.
bopt_cg_or([I], Succ, Fail, Rs, Acc0, St0) ->
{Acc,St} = bopt_cg(I, Fail, Rs, Acc0, St0),
{[{label,Succ}|Acc],St};
bopt_cg_or([I|Is], Succ, Fail, Rs, Acc0, St0) ->
{Lbl,St1} = new_label(St0),
{Acc,St} = bopt_cg(I, Lbl, Rs, Acc0, St1),
bopt_cg_or(Is, Succ, Fail, Rs, [{label,Lbl},{jump,{f,Succ}}|Acc], St).
new_label(#st{next=LabelNum}=St) when is_integer(LabelNum) ->
{LabelNum,St#st{next=LabelNum+1}}.
free_variables(Is) ->
E = gb_sets:empty(),
free_vars_1(Is, E, E, E).
free_vars_1([{set,Ds,As,{bif,_,_}}|Is], F0, N0, A) ->
F = gb_sets:union(F0, gb_sets:difference(var_list(As), N0)),
N = gb_sets:union(N0, var_list(Ds)),
free_vars_1(Is, F, N, A);
free_vars_1([{set,Ds,As,{alloc,Regs,{gc_bif,_,_}}}|Is], F0, N0, A0) ->
A = gb_sets:union(A0, gb_sets:from_list(free_vars_regs(Regs))),
F = gb_sets:union(F0, gb_sets:difference(var_list(As), N0)),
N = gb_sets:union(N0, var_list(Ds)),
free_vars_1(Is, F, N, A);
free_vars_1([{protected,_,Pa,_}|Is], F, N, A) ->
free_vars_1(Pa++Is, F, N, A);
free_vars_1([], F0, N, A) ->
F = case gb_sets:is_empty(A) of
true ->
%% No GC BIFs.
{x,X} = gb_sets:smallest(N),
P = ordsets:from_list(free_vars_regs(X)),
ordsets:union(gb_sets:to_list(F0), P);
false ->
%% At least one GC BIF.
gb_sets:to_list(gb_sets:union(F0, gb_sets:difference(A, N)))
end,
gb_trees:from_orddict([{K,any} || K <- F]).
var_list(Is) ->
var_list_1(Is, gb_sets:empty()).
var_list_1([{Tag,_}=X|Is], D) when Tag =:= x; Tag =:= y ->
var_list_1(Is, gb_sets:add(X, D));
var_list_1([_|Is], D) ->
var_list_1(Is, D);
var_list_1([], D) -> D.
free_vars_regs(0) -> [];
free_vars_regs(X) -> [{x,X-1}|free_vars_regs(X-1)].
rename_regs(Is, Regs) ->
rename_regs(Is, Regs, []).
rename_regs([{set,[Dst0],Ss0,{alloc,_,Info}}|Is], Regs0, Acc) ->
Live = live_regs(Regs0),
Ss = rename_sources(Ss0, Regs0),
Regs = put_reg(Dst0, Regs0),
Dst = fetch_reg(Dst0, Regs),
rename_regs(Is, Regs, [{set,[Dst],Ss,{alloc,Live,Info}}|Acc]);
rename_regs([{set,[Dst0],Ss0,Info}|Is], Regs0, Acc) ->
Ss = rename_sources(Ss0, Regs0),
Regs = put_reg(Dst0, Regs0),
Dst = fetch_reg(Dst0, Regs),
rename_regs(Is, Regs, [{set,[Dst],Ss,Info}|Acc]);
rename_regs([], Regs, Acc) -> {reverse(Acc),Regs}.
rename_sources(Ss, Regs) ->
map(fun({x,_}=R) -> fetch_reg(R, Regs);
({tmp,_}=R) -> fetch_reg(R, Regs);
(E) -> E
end, Ss).
%%%
%%% Keeping track of register assignments.
%%%
init_regs(Free) ->
init_regs_1(Free, 0).
init_regs_1([{x,I}=V|T], I) ->
[{I,V}|init_regs_1(T, I+1)];
init_regs_1([{x,X}|_]=T, I) when I < X ->
[{I,reserved}|init_regs_1(T, I+1)];
init_regs_1([{y,_}|_], _) -> [];
init_regs_1([], _) -> [].
put_reg(V, Rs) -> put_reg_1(V, Rs, 0).
put_reg_1(V, [R|Rs], I) -> [R|put_reg_1(V, Rs, I+1)];
put_reg_1(V, [], I) -> [{I,V}].
fetch_reg(V, [{I,V}|_]) -> {x,I};
fetch_reg(V, [_|SRs]) -> fetch_reg(V, SRs).
live_regs([{_,reserved}|_]) ->
%% We are not sure that this register is initialized, so we must
%% abort the optimization.
throw(gc_bif_alloc_failure);
live_regs([{I,_}]) ->
I+1;
live_regs([{_,_}|Regs]) ->
live_regs(Regs);
live_regs([]) ->
0.
%%%
%%% Convert a block to Static Single Assignment (SSA) form.
%%%
-record(ssa,
{live=0, %Variable counter.
sub=gb_trees:empty(), %Substitution table.
prot=gb_sets:empty(), %Targets assigned by protecteds.
in_prot=false %Inside a protected.
}).
ssa_block(Is0) ->
{Is,_} = ssa_block_1(Is0, #ssa{}, []),
Is.
ssa_block_1([{protected,[_],Pa0,Pb}|Is], Sub0, Acc) ->
{Pa,Sub1} = ssa_block_1(Pa0, Sub0#ssa{in_prot=true}, []),
Dst = ssa_last_target(Pa),
Sub = Sub1#ssa{prot=gb_sets:insert(Dst, Sub1#ssa.prot),
in_prot=Sub0#ssa.in_prot},
ssa_block_1(Is, Sub, [{protected,[Dst],Pa,Pb}|Acc]);
ssa_block_1([{set,[Dst],As,Bif}|Is], Sub0, Acc0) ->
Sub1 = ssa_in_use_list(As, Sub0),
Sub = ssa_assign(Dst, Sub1),
Acc = [{set,[ssa_sub(Dst, Sub)],ssa_sub_list(As, Sub0),Bif}|Acc0],
ssa_block_1(Is, Sub, Acc);
ssa_block_1([], Sub, Acc) -> {reverse(Acc),Sub}.
ssa_in_use_list(As, Sub) ->
foldl(fun ssa_in_use/2, Sub, As).
ssa_in_use({x,_}=R, #ssa{sub=Sub0}=Ssa) ->
case gb_trees:is_defined(R, Sub0) of
true -> Ssa;
false ->
Sub = gb_trees:insert(R, R, Sub0),
Ssa#ssa{sub=Sub}
end;
ssa_in_use(_, Ssa) -> Ssa.
ssa_assign({x,_}=R, #ssa{sub=Sub0}=Ssa0) ->
{NewReg,Ssa} = ssa_new_reg(Ssa0),
case gb_trees:is_defined(R, Sub0) of
false ->
Sub = gb_trees:insert(R, NewReg, Sub0),
Ssa#ssa{sub=Sub};
true ->
Sub1 = gb_trees:update(R, NewReg, Sub0),
Sub = gb_trees:insert(NewReg, NewReg, Sub1),
Ssa#ssa{sub=Sub}
end.
ssa_sub_list(List, Sub) ->
[ssa_sub(E, Sub) || E <- List].
ssa_sub(R0, #ssa{sub=Sub,prot=Prot,in_prot=InProt}) ->
case gb_trees:lookup(R0, Sub) of
none -> R0;
{value,R} ->
case InProt andalso gb_sets:is_element(R, Prot) of
true ->
throw(protected_violation);
false ->
R
end
end.
ssa_new_reg(#ssa{live=Reg}=Ssa) ->
{{tmp,Reg},Ssa#ssa{live=Reg+1}}.
ssa_last_target([{set,[Dst],_,_}]) -> Dst;
ssa_last_target([_|Is]) -> ssa_last_target(Is).
%% is_killed(Register, [Instruction], FailLabel, State) -> true|false
%% Determine whether a register is killed in the instruction sequence.
%% The state is used to allow us to determine the kill state
%% across branches.
is_killed(R, Is, Label, #st{ll=Ll}) ->
beam_utils:is_killed(R, Is, Ll) andalso
beam_utils:is_killed_at(R, Label, Ll).
%% is_not_used(Register, [Instruction], FailLabel, State) -> true|false
%% Determine whether a register is never used in the instruction sequence
%% (it could still referenced by an allocate instruction, meaning that
%% it MUST be initialized).
%% The state is used to allow us to determine the usage state
%% across branches.
is_not_used(R, Is, Label, #st{ll=Ll}) ->
beam_utils:is_not_used(R, Is, Ll) andalso
beam_utils:is_not_used_at(R, Label, Ll).
%% initialized_regs([Instruction]) -> [Register])
%% Given a REVERSED instruction sequence, return a list of the registers
%% that are guaranteed to be initialized (not contain garbage).
initialized_regs(Is) ->
initialized_regs(Is, ordsets:new()).
initialized_regs([{set,Dst,_Src,{alloc,Live,_}}|_], Regs0) ->
Regs = add_init_regs(free_vars_regs(Live), Regs0),
add_init_regs(Dst, Regs);
initialized_regs([{set,Dst,Src,_}|Is], Regs) ->
initialized_regs(Is, add_init_regs(Dst, add_init_regs(Src, Regs)));
initialized_regs([{test,_,_,Src}|Is], Regs) ->
initialized_regs(Is, add_init_regs(Src, Regs));
initialized_regs([{block,Bl}|Is], Regs) ->
initialized_regs(reverse(Bl, Is), Regs);
initialized_regs([{bs_context_to_binary,Src}|Is], Regs) ->
initialized_regs(Is, add_init_regs([Src], Regs));
initialized_regs([{label,_},{func_info,_,_,Arity}|_], Regs) ->
InitRegs = free_vars_regs(Arity),
add_init_regs(InitRegs, Regs);
initialized_regs([_|_], Regs) -> Regs.
add_init_regs([{x,_}=X|T], Regs) ->
add_init_regs(T, ordsets:add_element(X, Regs));
add_init_regs([_|T], Regs) ->
add_init_regs(T, Regs);
add_init_regs([], Regs) -> Regs. | lib/compiler/src/beam_bool.erl | 0.510496 | 0.439928 | beam_bool.erl | starcoder |
%%
%% Copyright (c) 2015-2016 <NAME>. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
-module(state_type).
-author("<NAME> <<EMAIL>>").
-include("state_type.hrl").
-export([new/1,
mutate/3,
is_inflation/2,
is_strict_inflation/2,
irreducible_is_strict_inflation/2]).
-export([delta/2]).
-export([extract_args/1]).
-export([crdt_size/1]).
-export_type([state_type/0,
crdt/0,
digest/0,
format/0,
delta_method/0]).
%% Define some initial types.
-type state_type() :: state_awmap |
state_awset |
state_awset_ps |
state_bcounter |
state_boolean |
state_dwflag |
state_ewflag |
state_gcounter |
state_gmap |
state_gset |
state_ivar |
state_lexcounter |
state_lwwregister |
state_max_int |
state_mvregister |
state_mvmap |
state_orset |
state_pair |
state_pncounter |
state_twopset.
-type crdt() :: {state_type(), type:payload()}.
-type digest() :: {state, crdt()} | %% state as digest
{mdata, term()}. %% metadata as digest
-type delta_method() :: state | mdata.
%% Supported serialization formats.
-type format() :: erlang.
%% Perform a delta mutation.
-callback delta_mutate(type:operation(), type:id(), crdt()) ->
{ok, crdt()} | {error, type:error()}.
%% Merge two replicas.
%% If we merge two CRDTs, the result is a CRDT.
%% If we merge a delta and a CRDT, the result is a CRDT.
%% If we merge two deltas, the result is a delta (delta group).
-callback merge(crdt(), crdt()) -> crdt().
%% Check if a some state is bottom
-callback is_bottom(crdt()) -> boolean().
%% Inflation testing.
-callback is_inflation(crdt(), crdt()) -> boolean().
-callback is_strict_inflation(crdt(), crdt()) -> boolean().
%% Let A be the first argument.
%% Let B be the second argument.
%% A is a join-irreducible state.
%% This functions checks if A will strictly inflate B.
%% B can be a CRDT or a digest of a CRDT.
-callback irreducible_is_strict_inflation(crdt(),
digest()) -> boolean().
%% CRDT digest (which can be the CRDT state itself).
-callback digest(crdt()) -> digest().
%% Join decomposition.
-callback join_decomposition(crdt()) -> [crdt()].
%% Let A be the first argument.
%% Let B be the second argument.
%% This function returns a ∆ from A that inflates B.
%% "The join of all s in join_decomposition(A) such that s strictly
%% inflates B"
-callback delta(crdt(), digest()) -> crdt().
%% @todo These should be moved to type.erl
%% Encode and Decode.
-callback encode(format(), crdt()) -> binary().
-callback decode(format(), binary()) -> crdt().
%% @doc Builds a new CRDT from a given CRDT
-spec new(crdt()) -> any(). %% @todo Fix this any()
new({?GMAP_TYPE, {ValuesType, _Payload}}) ->
?GMAP_TYPE:new([ValuesType]);
new({?PAIR_TYPE, {Fst, Snd}}) ->
{?PAIR_TYPE, {new(Fst), new(Snd)}};
new({Type, _Payload}) ->
Type:new().
%% @doc Generic Join composition.
-spec mutate(type:operation(), type:id(), crdt()) ->
{ok, crdt()} | {error, type:error()}.
mutate(Op, Actor, {Type, _}=CRDT) ->
case Type:delta_mutate(Op, Actor, CRDT) of
{ok, {Type, Delta}} ->
{ok, Type:merge({Type, Delta}, CRDT)};
Error ->
Error
end.
%% @doc Generic check for inflation.
-spec is_inflation(crdt(), crdt()) -> boolean().
is_inflation({Type, _}=CRDT1, {Type, _}=CRDT2) ->
Type:equal(Type:merge(CRDT1, CRDT2), CRDT2).
%% @doc Generic check for strict inflation.
%% We have a strict inflation if:
%% - we have an inflation
%% - we have different CRDTs
-spec is_strict_inflation(crdt(), crdt()) -> boolean().
is_strict_inflation({Type, _}=CRDT1, {Type, _}=CRDT2) ->
Type:is_inflation(CRDT1, CRDT2) andalso
not Type:equal(CRDT1, CRDT2).
%% @doc Generic check for irreducible strict inflation.
-spec irreducible_is_strict_inflation(crdt(),
digest()) -> boolean().
irreducible_is_strict_inflation({Type, _}=Irreducible,
{state, {Type, _}=CRDT}) ->
Merged = Type:merge(Irreducible, CRDT),
Type:is_strict_inflation(CRDT, Merged).
%% @doc Generic delta calculation.
-spec delta(crdt(), digest()) -> crdt().
delta({Type, _}=A, B) ->
lists:foldl(
fun(Irreducible, Acc) ->
case Type:irreducible_is_strict_inflation(Irreducible,
B) of
true ->
Type:merge(Irreducible, Acc);
false ->
Acc
end
end,
new(A),
Type:join_decomposition(A)
).
%% @doc extract arguments from complex (composite) types
extract_args({Type, Args}) ->
{Type, Args};
extract_args(Type) ->
{Type, []}.
%% @doc Term size.
crdt_size({?AWMAP_TYPE, {_CType, CRDT}}) -> crdt_size(CRDT);
crdt_size({?AWSET_TYPE, CRDT}) -> crdt_size(CRDT);
crdt_size({?BCOUNTER_TYPE, {CRDT1, CRDT2}}) ->
crdt_size(CRDT1) + crdt_size(CRDT2);
crdt_size({?BOOLEAN_TYPE, CRDT}) -> crdt_size(CRDT);
crdt_size({?DWFLAG_TYPE, CRDT}) -> crdt_size(CRDT);
crdt_size({?EWFLAG_TYPE, CRDT}) -> crdt_size(CRDT);
crdt_size({?GCOUNTER_TYPE, CRDT}) -> crdt_size(CRDT);
crdt_size({?GMAP_TYPE, {_CType, CRDT}}) -> crdt_size(CRDT);
crdt_size({?GSET_TYPE, CRDT}) -> crdt_size(CRDT);
crdt_size({?IVAR_TYPE, CRDT}) -> crdt_size(CRDT);
crdt_size({?LEXCOUNTER_TYPE, CRDT}) -> crdt_size(CRDT);
crdt_size({?LWWREGISTER_TYPE, CRDT}) -> crdt_size(CRDT);
crdt_size({?MAX_INT_TYPE, CRDT}) -> crdt_size(CRDT);
crdt_size({?MVMAP_TYPE, CRDT}) -> crdt_size(CRDT);
crdt_size({?MVREGISTER_TYPE, CRDT}) -> crdt_size(CRDT);
crdt_size({?ORSET_TYPE, CRDT}) -> crdt_size(CRDT);
crdt_size({?PAIR_TYPE, {CRDT1, CRDT2}}) ->
crdt_size(CRDT1) + crdt_size(CRDT2);
crdt_size({?PNCOUNTER_TYPE, CRDT}) -> crdt_size(CRDT);
crdt_size({?TWOPSET_TYPE, CRDT}) -> crdt_size(CRDT);
crdt_size(T) ->
erts_debug:flat_size(T). | src/state_type.erl | 0.636353 | 0.428233 | state_type.erl | starcoder |
-module(els_code_action_provider).
-behaviour(els_provider).
-export([
is_enabled/0,
handle_request/2
]).
-include("els_lsp.hrl").
-type state() :: any().
%%==============================================================================
%% els_provider functions
%%==============================================================================
-spec is_enabled() -> boolean().
is_enabled() -> true.
-spec handle_request(any(), state()) -> {response, any()}.
handle_request({document_codeaction, Params}, _State) ->
#{
<<"textDocument">> := #{<<"uri">> := Uri},
<<"range">> := RangeLSP,
<<"context">> := Context
} = Params,
Result = code_actions(Uri, RangeLSP, Context),
{response, Result}.
%%==============================================================================
%% Internal Functions
%%==============================================================================
%% @doc Result: `(Command | CodeAction)[] | null'
-spec code_actions(uri(), range(), code_action_context()) -> [map()].
code_actions(Uri, _Range, #{<<"diagnostics">> := Diagnostics}) ->
lists:flatten([make_code_actions(Uri, D) || D <- Diagnostics]).
-spec make_code_actions(uri(), map()) -> [map()].
make_code_actions(
Uri,
#{<<"message">> := Message, <<"range">> := Range} = Diagnostic
) ->
Data = maps:get(<<"data">>, Diagnostic, <<>>),
make_code_actions(
[
{"function (.*) is unused", fun els_code_actions:export_function/4},
{"variable '(.*)' is unused", fun els_code_actions:ignore_variable/4},
{"variable '(.*)' is unbound", fun els_code_actions:suggest_variable/4},
{"Module name '(.*)' does not match file name '(.*)'",
fun els_code_actions:fix_module_name/4},
{"Unused macro: (.*)", fun els_code_actions:remove_macro/4},
{"function (.*) undefined", fun els_code_actions:create_function/4},
{"Unused file: (.*)", fun els_code_actions:remove_unused/4},
{"Atom typo\\? Did you mean: (.*)", fun els_code_actions:fix_atom_typo/4}
],
Uri,
Range,
Data,
Message
).
-spec make_code_actions([{string(), Fun}], uri(), range(), binary(), binary()) ->
[map()]
when
Fun :: fun((uri(), range(), binary(), [binary()]) -> [map()]).
make_code_actions([], _Uri, _Range, _Data, _Message) ->
[];
make_code_actions([{RE, Fun} | Rest], Uri, Range, Data, Message) ->
Actions =
case re:run(Message, RE, [{capture, all_but_first, binary}]) of
{match, Matches} ->
Fun(Uri, Range, Data, Matches);
nomatch ->
[]
end,
Actions ++ make_code_actions(Rest, Uri, Range, Data, Message). | apps/els_lsp/src/els_code_action_provider.erl | 0.519521 | 0.437703 | els_code_action_provider.erl | starcoder |
%%%-------------------------------------------------------------------
%%% @doc
%%% Partitioned/Sharded ETS tables.
%%%
%%% <h2>Features</h2>
%%%
%%% <ul>
%%% <li>
%%% `shards' implements the ETS API to keep compatibility and make the usage
%%% straightforward; exactly the same if you were using `ets'.
%%% </li>
%%% <li>
%%% Sharded or partitioned tables under-the-hood. This feature is managed
%%% entirely by `shards' and it is 100% transparent for the client. Provides
%%% a logical view of a single ETS table, but internally, it is composed by
%%% `N' number of `partitions' and each partition has associated an ETS table.
%%% </li>
%%% <li>
%%% High performance and scalability. `shards' keeps the lock contention under
%%% control enabling ETS tables to scale out and support higher levels of
%%% concurrency without lock issues; specially write-locks, which most of the
%%% cases might cause significant performance degradation.
%%% </li>
%%% </ul>
%%%
%%% <h2>Partitioned Table</h2>
%%%
%%% When a table is created with `shards:new/2', a new supervision tree is
%%% created to represent the partitioned table. There is a main supervisor
%%% `shards_partition_sup' that create an ETS table to store the metadata
%%% and also starts the children which are the partitions to create. Each
%%% partition is owned by `shards_partition' (it is a `gen_server') and it
%%% creates an ETS table for storing data mapped to that partition. The
%%% supervision tree looks like:
%%%
%%% <br></br>
%%% ```
%%% [shards_partition]--><ETS-Table>
%%% /
%%% [shards_partition_sup]--<-[shards_partition]--><ETS-Table>
%%% | \
%%% <Metadata-ETS-Table> [shards_partition]--><ETS-Table>
%%% '''
%%% <br></br>
%%%
%%% The returned value by `shards:new/2' may be an atom if it is a named
%%% table or a reference otherwise, and in the second case the returned
%%% reference is the one of the metadata table, which is the main entry
%%% point and it is owned by the main supervisor. See `shards:new/2' for
%%% more information.
%%%
%%% <h2>Usage</h2>
%%%
%%% ```
%%% > Tab = shards:new(tab, []).
%%% #Ref<0.1541908042.2337144842.31535>
%%% > shards:insert(Tab, [{a, 1}, {b, 2}, {c, 3}]).
%%% true
%%% > shards:lookup(Tab, a).
%%% [{a,1}]
%%% > shards:lookup(Tab, b).
%%% [{b,2}]
%%% > shards:lookup(Tab, c).
%%% [{c,3}]
%%% > shards:lookup_element(Tab, c, 2).
%%% 3
%%% > shards:lookup(Tab, d).
%%% []
%%% > shards:delete(Tab, c).
%%% true
%%% > shards:lookup(Tab, c).
%%% []
%%% '''
%%%
%%% As you can see, the usage is exactly the same if you were using `ets',
%%% you can try the rest of the ETS API but with `shards' module.
%%%
%%% <h2>Important</h2>
%%%
%%% Despite `shards' aims to keep 100% compatibility with current ETS API,
%%% the semantic for some of the functions may be a bit different due to
%%% the nature of sharding; it is not the same having all the entries in
%%% a single table than distributed across multiple ones. For example,
%%% for query-based functions like `select', `match', etc., the returned
%%% entries are the same but not necessary the same order than `ets'.
%%% For `first', `next', and `last' they behave the similar in the sense
%%% by means of them a partitioned table is traversed, so the final result
%%% is the same, but the order in which the entries are traversed may be
%%% different. Therefore, it is highly recommended to read the documentation
%%% of the functions.
%%%
%%% @end
%%%-------------------------------------------------------------------
-module(shards).
%% ETS API
-export([
all/0,
delete/1, delete/2, delete/3,
delete_all_objects/1, delete_all_objects/2,
delete_object/2, delete_object/3,
file2tab/1, file2tab/2,
first/1, first/2,
foldl/3, foldl/4,
foldr/3, foldr/4,
i/0,
info/1, info/2,
insert/2, insert/3,
insert_new/2, insert_new/3,
is_compiled_ms/1,
last/1,
lookup/2, lookup/3,
lookup_element/3, lookup_element/4,
match/1, match/2, match/3, match/4,
match_delete/2, match_delete/3,
match_object/1, match_object/2, match_object/3, match_object/4,
match_spec_compile/1,
match_spec_run/2,
member/2, member/3,
new/2,
next/2, next/3,
prev/2,
rename/2, rename/3,
safe_fixtable/2,
select/1, select/2, select/3, select/4,
select_count/2, select_count/3,
select_delete/2, select_delete/3,
select_replace/2, select_replace/3,
select_reverse/1, select_reverse/2, select_reverse/3, select_reverse/4,
setopts/2, setopts/3,
tab2file/2, tab2file/3,
tab2list/1, tab2list/2,
tabfile_info/1,
table/1, table/2, table/3,
test_ms/2,
take/2, take/3,
update_counter/3, update_counter/4, update_counter/5,
update_element/3, update_element/4
]).
%% Helpers
-export([
table_meta/1,
get_meta/2,
get_meta/3,
put_meta/3,
partition_owners/1
]).
%% Inline-compiled functions
-compile({inline, [
table_meta/1,
get_meta/2,
get_meta/3,
put_meta/3
]}).
%%%===================================================================
%%% Types & Macros
%%%===================================================================
%% @type tab() = atom() | ets:tid().
%%
%% Table parameter
-type tab() :: atom() | ets:tid().
%% ETS Types
-type access() :: public | protected | private.
-type type() :: set | ordered_set | bag | duplicate_bag.
-type ets_continuation() ::
'$end_of_table'
| {tab(), integer(), integer(), ets:comp_match_spec(), list(), integer()}
| {tab(), _, _, integer(), ets:comp_match_spec(), list(), integer(), integer()}.
%% @type tweaks() =
%% {write_concurrency, boolean()}
%% | {read_concurrency, boolean()}
%% | compressed.
%%
%% ETS tweaks option
-type tweaks() ::
{write_concurrency, boolean()}
| {read_concurrency, boolean()}
| compressed.
%% @type shards_opt() =
%% {partitions, pos_integer()}
%% | {keyslot_fun, shards_meta:keyslot_fun()}
%% | {restore, term(), term()}.
%%
%% Shards extended options.
-type shards_opt() ::
{partitions, pos_integer()}
| {keyslot_fun, shards_meta:keyslot_fun()}
| {restore, term(), term()}.
%% @type option() =
%% type() | access() | named_table
%% | {keypos, pos_integer()}
%% | {heir, pid(), HeirData :: term()}
%% | {heir, none} | tweaks()
%% | shards_opt().
%%
%% Create table options – used by `new/2'.
-type option() ::
type()
| access()
| named_table
| {keypos, pos_integer()}
| {heir, pid(), HeirData :: term()}
| {heir, none}
| tweaks()
| shards_opt().
%% ETS Info Tuple
-type info_tuple() ::
{compressed, boolean()}
| {heir, pid() | none}
| {keypos, pos_integer()}
| {memory, non_neg_integer()}
| {name, atom()}
| {named_table, boolean()}
| {node, node()}
| {owner, pid()}
| {protection, access()}
| {size, non_neg_integer()}
| {type, type()}
| {write_concurrency, boolean()}
| {read_concurrency, boolean()}
| {shards, [atom()]}.
%% ETS Info Item
-type info_item() ::
compressed | fixed | heir | keypos | memory
| name | named_table | node | owner | protection
| safe_fixed | size | stats | type
| write_concurrency | read_concurrency
| shards.
%% ETS TabInfo Item
-type tabinfo_item() ::
{name, atom()}
| {type, type()}
| {protection, access()}
| {named_table, boolean()}
| {keypos, non_neg_integer()}
| {size, non_neg_integer()}
| {extended_info, [md5sum | object_count]}
| {version, {Major :: non_neg_integer(), Minor :: non_neg_integer()}}
| {shards, [atom()]}.
%% @type continuation() = {
%% Tab :: tab(),
%% MatchSpec :: ets:match_spec(),
%% Limit :: pos_integer(),
%% Partition :: non_neg_integer(),
%% Continuation :: ets_continuation()
%% }.
%%
%% Defines the convention for the query functions:
%% <ul>
%% <li>`Tab': Table reference.</li>
%% <li>`MatchSpec': The `ets:match_spec()'.</li>
%% <li>`Limit': Results limit.</li>
%% <li>`Partition': Partition index.</li>
%% <li>`Continuation': The `ets:continuation()'.</li>
%% </ul>
-type continuation() :: {
Tab :: tab(),
MatchSpec :: ets:match_spec(),
Limit :: pos_integer(),
Partition :: non_neg_integer(),
Continuation :: ets_continuation()
}.
%% @type filename() = string() | binary() | atom().
-type filename() :: string() | binary() | atom().
% Exported Types
-export_type([
tab/0,
option/0,
info_tuple/0,
info_item/0,
tabinfo_item/0,
continuation/0,
filename/0
]).
%% Macro to check if the given Filename has the right type
-define(is_filename(FN_), is_list(FN_); is_binary(FN_); is_atom(FN_)).
%%%===================================================================
%%% Helpers
%%%===================================================================
%% @doc Returns the metadata associated with the given table `Tab'.
-spec table_meta(Tab :: tab()) -> shards_meta:t().
table_meta(Tab) -> shards_meta:get(Tab).
%% @equiv get_meta(Tab, Key, undefined)
get_meta(Tab, Key) ->
get_meta(Tab, Key, undefined).
%% @doc Wrapper for `shards_meta:get/3'.
-spec get_meta(Tab, Key, Def) -> Val when
Tab :: tab(),
Key :: term(),
Def :: term(),
Val :: term().
get_meta(Tab, Key, Def) ->
shards_meta:get(Tab, Key, Def).
%% @doc Wrapper for `shards_meta:put/3'.
-spec put_meta(Tab, Key, Val) -> ok when
Tab :: shards:tab(),
Key :: term(),
Val :: term().
put_meta(Tab, Key, Val) ->
shards_meta:put(Tab, Key, Val).
%% @doc Returns the partition PIDs associated with the given table `TabOrPid'.
-spec partition_owners(TabOrPid) -> [OwnerPid] when
TabOrPid :: pid() | tab(),
OwnerPid :: pid().
partition_owners(TabOrPid) when is_pid(TabOrPid) ->
[Child || {_, Child, _, _} <- supervisor:which_children(TabOrPid)];
partition_owners(TabOrPid) when is_atom(TabOrPid); is_reference(TabOrPid) ->
partition_owners(shards_meta:tab_pid(TabOrPid)).
%%%===================================================================
%%% ETS API
%%%===================================================================
%% @equiv ets:all()
all() ->
ets:all().
%% @doc
%% Equivalent to `ets:delete/1'.
%%
%% @see ets:delete/1.
%% @end
-spec delete(Tab :: tab()) -> true.
delete(Tab) ->
Meta = shards_meta:get(Tab),
TabPid = shards_meta:tab_pid(Meta),
ok = shards_partition_sup:stop(TabPid),
true.
%% @equiv delete(Tab, Key, shards_meta:get(Tab))
delete(Tab, Key) ->
delete(Tab, Key, shards_meta:get(Tab)).
%% @doc
%% Equivalent to `ets:delete/2'.
%%
%% @see ets:delete/2.
%% @end
-spec delete(Tab, Key, Meta) -> true when
Tab :: tab(),
Key :: term(),
Meta :: shards_meta:t().
delete(Tab, Key, Meta) ->
PartTid = shards_partition:tid(Tab, Key, Meta),
ets:delete(PartTid, Key).
%% @equiv delete_all_objects(Tab, shards_meta:get(Tab))
delete_all_objects(Tab) ->
delete_all_objects(Tab, shards_meta:get(Tab)).
%% @doc
%% Equivalent to `ets:delete_all_objects/1'.
%%
%% @see ets:delete_all_objects/1.
%% @end
-spec delete_all_objects(Tab, Meta) -> true when
Tab :: tab(),
Meta :: shards_meta:t().
delete_all_objects(Tab, Meta) ->
_ = mapred(Tab, fun ets:delete_all_objects/1, Meta),
true.
%% @equiv delete_object(Tab, Object, shards_meta:get(Tab))
delete_object(Tab, Object) ->
delete_object(Tab, Object, shards_meta:get(Tab)).
%% @doc
%% Equivalent to `ets:delete_object/2'.
%%
%% @see ets:delete_object/2.
%% @end
-spec delete_object(Tab, Object, Meta) -> true when
Tab :: tab(),
Object :: tuple(),
Meta :: shards_meta:t().
delete_object(Tab, Object, Meta) when is_tuple(Object) ->
Key = shards_lib:object_key(Object, Meta),
PartTid = shards_partition:tid(Tab, Key, Meta),
ets:delete_object(PartTid, Object).
%% @equiv file2tab(Filename, [])
file2tab(Filename) ->
file2tab(Filename, []).
%% @doc
%% Equivalent to `shards:file2tab/2'. Moreover, it restores the
%% supervision tree for the `shards' corresponding to the given
%% file, such as if they had been created using `shards:new/2'.
%%
%% @see ets:file2tab/2.
%% @end
-spec file2tab(Filename, Options) -> {ok, Tab} | {error, Reason} when
Filename :: filename(),
Tab :: tab(),
Options :: [Option],
Option :: {verify, boolean()},
Reason :: term().
file2tab(Filename, Options) when ?is_filename(Filename) ->
try
StrFilename = shards_lib:to_string(Filename),
Header = shards_lib:read_tabfile(StrFilename),
TabName = maps:get(name, Header),
Meta = maps:get(metadata, Header),
Partitions = maps:get(partitions, Header),
TabOpts = [
{partitions, shards_meta:partitions(Meta)},
{keyslot_fun, shards_meta:keyslot_fun(Meta)}
| shards_meta:ets_opts(Meta)
],
Return = new(TabName, [{restore, Partitions, Options} | TabOpts]),
{ok, Return}
catch
throw:Error ->
Error;
error:{error, _} = Error ->
Error;
error:{badarg, Arg} ->
{error, {read_error, {file_error, Arg, enoent}}}
end.
%% @equiv first(Tab, shards_meta:get(Tab))
first(Tab) ->
first(Tab, shards_meta:get(Tab)).
%% @doc
%% Equivalent to `ets:first/1'.
%%
%% However, the order in which results are returned might be not the same as
%% the original ETS function, since it is a sharded table.
%%
%% @see ets:first/1.
%% @end
-spec first(Tab, Meta) -> Key | '$end_of_table' when
Tab :: tab(),
Key :: term(),
Meta :: shards_meta:t().
first(Tab, Meta) ->
N = shards_meta:partitions(Meta),
Partition = N - 1,
first(Tab, ets:first(shards_partition:tid(Tab, Partition)), Partition).
%% @private
first(Tab, '$end_of_table', Partition) when Partition > 0 ->
NextPartition = Partition - 1,
first(Tab, ets:first(shards_partition:tid(Tab, NextPartition)), NextPartition);
first(_, '$end_of_table', _) ->
'$end_of_table';
first(_, Key, _) ->
Key.
%% @equiv foldl(Fun, Acc, Tab, shards_meta:get(Tab))
foldl(Fun, Acc, Tab) ->
foldl(Fun, Acc, Tab, shards_meta:get(Tab)).
%% @doc
%% Equivalent to `ets:foldl/3'.
%%
%% However, the order in which the entries are traversed may be different
%% since they are distributed across multiple partitions.
%%
%% @see ets:foldl/3.
%% @end
-spec foldl(Fun, Acc0, Tab, Meta) -> Acc1 when
Fun :: fun((Element :: term(), AccIn) -> AccOut),
Tab :: tab(),
Meta :: shards_meta:t(),
Acc0 :: term(),
Acc1 :: term(),
AccIn :: term(),
AccOut :: term().
foldl(Fun, Acc, Tab, Meta) ->
fold(foldl, Fun, Acc, Tab, shards_meta:partitions(Meta) - 1).
%% @equiv foldr(Fun, Acc, Tab, shards_meta:get(Tab))
foldr(Fun, Acc, Tab) ->
foldr(Fun, Acc, Tab, shards_meta:get(Tab)).
%% @doc
%% Equivalent to `ets:foldr/3'.
%%
%% However, the order in which the entries are traversed may be different
%% since they are distributed across multiple partitions.
%%
%% @see ets:foldr/3.
%% @end
-spec foldr(Fun, Acc0, Tab, Meta) -> Acc1 when
Fun :: fun((Element :: term(), AccIn) -> AccOut),
Tab :: tab(),
Meta :: shards_meta:t(),
Acc0 :: term(),
Acc1 :: term(),
AccIn :: term(),
AccOut :: term().
foldr(Fun, Acc, Tab, Meta) ->
fold(foldr, Fun, Acc, Tab, shards_meta:partitions(Meta) - 1).
%% @private
fold(Fold, Fun, Acc, Tab, Partition) when Partition >= 0 ->
NewAcc = ets:Fold(Fun, Acc, shards_partition:tid(Tab, Partition)),
fold(Fold, Fun, NewAcc, Tab, Partition - 1);
fold(_Fold, _Fun, Acc, _Tab, _Partition) ->
Acc.
%% @equiv ets:i()
i() -> ets:i().
%% @doc
%% Similar to ets:info/1' but extra information about the partitioned
%% table is added.
%%
%% <h2>Extra Info:</h2>
%%
%% <ul>
%% <li>
%% `{partitions, post_integer()}' - Number of partitions.
%% </li>
%% <li>
%% `{keyslot_fun, shards_meta:keyslot_fun()}' - Functions used for compute
%% the keyslot.
%% </li>
%% <li>
%% `{parallel, boolean()}' - Whether the parallel mode is enabled or not.
%% </li>
%% </ul>
%%
%% @see ets:info/1.
%% @end
-spec info(Tab) -> InfoList | undefined when
Tab :: tab(),
InfoList :: [InfoTuple],
InfoTuple :: info_tuple().
info(Tab) ->
with_meta(Tab, fun(Meta) ->
do_info(Tab, Meta)
end).
%% @doc
%% Equivalent to `ets:info/2'.
%%
%% See the added items by `shards:info/1'.
%%
%% @see ets:info/2.
%% @end
-spec info(Tab, Item) -> Value | undefined when
Tab :: tab(),
Item :: info_item(),
Value :: term().
info(Tab, Item) when is_atom(Item) ->
with_meta(Tab, fun(Meta) ->
shards_lib:keyfind(Item, do_info(Tab, Meta))
end).
%% @private
do_info(Tab, Meta) ->
InfoLists = mapred(Tab, fun ets:info/1, Meta),
[
{partitions, shards_meta:partitions(Meta)},
{keyslot_fun, shards_meta:keyslot_fun(Meta)},
{parallel, shards_meta:parallel(Meta)}
| parts_info(Tab, InfoLists, [memory])
].
%% @equiv insert(Tab, ObjOrObjs, shards_meta:get(Tab))
insert(Tab, ObjOrObjs) ->
insert(Tab, ObjOrObjs, shards_meta:get(Tab)).
%% @doc
%% Equivalent to `ets:insert/2'.
%%
%% Despite this functions behaves exactly the same as `ets:insert/2'
%% and produces the same result, there is a big difference due to the
%% nature of the sharding distribution model, <b>IT IS NOT ATOMIC</b>.
%% Therefore, if it fails by inserting an object at some partition,
%% previous inserts execution on other partitions are not rolled back,
%% but an error is raised instead.
%%
%% @see ets:insert/2.
%% @end
-spec insert(Tab, ObjOrObjs, Meta) -> true | no_return() when
Tab :: tab(),
ObjOrObjs :: tuple() | [tuple()],
Meta :: shards_meta:t().
insert(Tab, ObjOrObjs, Meta) when is_list(ObjOrObjs) ->
maps:fold(fun(Partition, Group, Acc) ->
Acc = ets:insert(Partition, Group)
end, true, group_keys_by_partition(Tab, ObjOrObjs, Meta));
insert(Tab, ObjOrObjs, Meta) when is_tuple(ObjOrObjs) ->
ets:insert(get_part_tid(Tab, ObjOrObjs, Meta), ObjOrObjs).
%% @equiv insert_new(Tab, ObjOrObjs, shards_meta:get(Tab))
insert_new(Tab, ObjOrObjs) ->
insert_new(Tab, ObjOrObjs, shards_meta:get(Tab)).
%% @doc
%% Equivalent to `ets:insert_new/2'.
%%
%% Despite this functions behaves exactly the same as `ets:insert_new/2'
%% and produces the same result, there is a big difference due to the
%% nature of the sharding distribution model, <b>IT IS NOT ATOMIC</b>.
%% Opposite to `shards:insert/2', this function tries to roll-back
%% previous inserts execution on other partitions if it fails by
%% inserting an object at some partition, but there might be race
%% conditions during roll-back execution.
%%
%% <b>Example:</b>
%%
%% ```
%% > shards:insert_new(mytab, {k1, 1}).
%% true
%%
%% > shards:insert_new(mytab, {k1, 1}).
%% false
%%
%% > shards:insert_new(mytab, [{k1, 1}, {k2, 2}]).
%% false
%% '''
%%
%% @see ets:insert_new/2.
%% @end
-spec insert_new(Tab, ObjOrObjs, Meta) -> boolean() when
Tab :: tab(),
ObjOrObjs :: tuple() | [tuple()],
Meta :: shards_meta:t().
insert_new(Tab, ObjOrObjs, Meta) when is_list(ObjOrObjs) ->
Result =
shards_enum:reduce_while(fun({Partition, Group}, Acc) ->
case ets:insert_new(Partition, Group) of
true ->
{cont, Group ++ Acc};
false ->
ok = rollback_insert(Tab, Acc, Meta),
{halt, false}
end
end, [], group_keys_by_partition(Tab, ObjOrObjs, Meta)),
case Result of
false -> false;
_ -> true
end;
insert_new(Tab, ObjOrObjs, Meta) when is_tuple(ObjOrObjs) ->
ets:insert_new(get_part_tid(Tab, ObjOrObjs, Meta), ObjOrObjs).
%% @private
rollback_insert(Tab, Entries, Meta) ->
lists:foreach(fun(Entry) ->
Key = element(shards_meta:keypos(Meta), Entry),
?MODULE:delete(Tab, Key)
end, Entries).
%% @equiv ets:is_compiled_ms(Term)
is_compiled_ms(Term) ->
ets:is_compiled_ms(Term).
%% @doc
%% Equivalent to `ets:last/1'.
%%
%% However, the order in which results are returned might be not the same as
%% the original ETS function, since it is a sharded table.
%%
%% @see ets:last/1.
%% @end
-spec last(Tab) -> Key | '$end_of_table' when
Tab :: tab(),
Key :: term().
last(Tab) ->
Partition0 = shards_partition:tid(Tab, 0),
case ets:info(Partition0, type) of
ordered_set -> ets:last(Partition0);
_TableType -> first(Tab)
end.
%% @equiv lookup(Tab, Key, shards_meta:get(Tab))
lookup(Tab, Key) ->
lookup(Tab, Key, shards_meta:get(Tab)).
%% @doc
%% Equivalent to `ets:lookup/2'.
%%
%% @see ets:lookup/2.
%% @end
-spec lookup(Tab, Key, Meta) -> [Object] when
Tab :: tab(),
Key :: term(),
Meta :: shards_meta:t(),
Object :: tuple().
lookup(Tab, Key, Meta) ->
PartTid = shards_partition:tid(Tab, Key, Meta),
ets:lookup(PartTid, Key).
%% @equiv lookup_element(Tab, Key, Pos, shards_meta:get(Tab))
lookup_element(Tab, Key, Pos) ->
lookup_element(Tab, Key, Pos, shards_meta:get(Tab)).
%% @doc
%% Equivalent to `ets:lookup_element/3'.
%%
%% @see ets:lookup_element/3.
%% @end
-spec lookup_element(Tab, Key, Pos, Meta) -> Elem when
Tab :: tab(),
Key :: term(),
Pos :: pos_integer(),
Meta :: shards_meta:t(),
Elem :: term() | [term()].
lookup_element(Tab, Key, Pos, Meta) ->
PartTid = shards_partition:tid(Tab, Key, Meta),
ets:lookup_element(PartTid, Key, Pos).
%% @equiv match(Tab, Pattern, shards_meta:get(Tab))
match(Tab, Pattern) ->
match(Tab, Pattern, shards_meta:get(Tab)).
%% @doc
%% If 3rd argument is `pos_integer()' this function behaves
%% like `ets:match/3', otherwise, the 3rd argument is
%% assumed as `shards_meta:t()` and it behaves like
%% `ets:match/2'.
%%
%% The order in which results are returned might be not the same
%% as the original ETS function.
%%
%% @see ets:match/2.
%% @see ets:match/3.
%% @end
-spec match(Tab, Pattern, LimitOrMeta) -> {[Match], Cont} | '$end_of_table' | [Match] when
Tab :: tab(),
Pattern :: ets:match_pattern(),
LimitOrMeta :: pos_integer() | shards_meta:t(),
Match :: [term()],
Cont :: continuation().
match(Tab, Pattern, Limit) when is_integer(Limit), Limit > 0 ->
match(Tab, Pattern, Limit, shards_meta:get(Tab));
match(Tab, Pattern, Meta) ->
Map = {fun ets:match/2, [Pattern]},
Reduce = fun erlang:'++'/2,
mapred(Tab, Map, Reduce, Meta).
%% @doc
%% Equivalent to `ets:match/3'.
%%
%% The order in which results are returned might be not the same
%% as the original ETS function.
%%
%% @see ets:match/3.
%% @end
-spec match(Tab, Pattern, Limit, Meta) -> {[Match], Cont} | '$end_of_table' when
Tab :: tab(),
Pattern :: ets:match_pattern(),
Limit :: pos_integer(),
Meta :: shards_meta:t(),
Match :: [term()],
Cont :: continuation().
match(Tab, Pattern, Limit, Meta) when is_integer(Limit), Limit > 0 ->
N = shards_meta:partitions(Meta),
q(match, Tab, Pattern, Limit, q_fun(), Limit, N - 1, {[], nil}).
%% @doc
%% Equivalent to `ets:match/1'.
%%
%% The order in which results are returned might be not the same
%% as the original ETS function.
%%
%% @see ets:match/1.
%% @end
-spec match(Continuation) -> {[Match], continuation()} | '$end_of_table' when
Match :: [term()],
Continuation :: continuation().
match({_, _, Limit, _, _} = Continuation) ->
q(match, Continuation, q_fun(), Limit, []).
%% @equiv match_delete(Tab, Pattern, shards_meta:get(Tab))
match_delete(Tab, Pattern) ->
match_delete(Tab, Pattern, shards_meta:get(Tab)).
%% @doc
%% Equivalent to `ets:match_delete/2'.
%%
%% @see ets:match_delete/2.
%% @end
-spec match_delete(Tab, Pattern, Meta) -> true when
Tab :: tab(),
Pattern :: ets:match_pattern(),
Meta :: shards_meta:t().
match_delete(Tab, Pattern, Meta) ->
Map = {fun ets:match_delete/2, [Pattern]},
Reduce = {fun erlang:'and'/2, true},
mapred(Tab, Map, Reduce, Meta).
%% @equiv match_object(Tab, Pattern, shards_meta:get(Tab))
match_object(Tab, Pattern) ->
match_object(Tab, Pattern, shards_meta:get(Tab)).
%% @doc
%% If 3rd argument is `pos_integer()' this function behaves
%% like `ets:match_object/3', otherwise, the 3rd argument is
%% assumed as `shards_meta:t()` and it behaves like
%% `ets:match_object/2'.
%%
%% The order in which results are returned might be not the same
%% as the original ETS function.
%%
%% @see ets:match_object/3.
%% @end
-spec match_object(Tab, Pattern, LimitOrMeta) -> {[Object], Cont} | '$end_of_table' | [Object] when
Tab :: tab(),
Pattern :: ets:match_pattern(),
LimitOrMeta :: pos_integer() | shards_meta:t(),
Object :: tuple(),
Cont :: continuation().
match_object(Tab, Pattern, Limit) when is_integer(Limit), Limit > 0 ->
match_object(Tab, Pattern, Limit, shards_meta:get(Tab));
match_object(Tab, Pattern, Meta) ->
Map = {fun ets:match_object/2, [Pattern]},
Reduce = fun erlang:'++'/2,
mapred(Tab, Map, Reduce, Meta).
%% @doc
%% Equivalent to `ets:match_object/3'.
%%
%% The order in which results are returned might be not the same
%% as the original ETS function.
%%
%% @see ets:match_object/3.
%% @end
-spec match_object(Tab, Pattern, Limit, Meta) -> {[Object], Cont} | '$end_of_table' when
Tab :: tab(),
Pattern :: ets:match_pattern(),
Limit :: pos_integer(),
Meta :: shards_meta:t(),
Object :: tuple(),
Cont :: continuation().
match_object(Tab, Pattern, Limit, Meta) when is_integer(Limit), Limit > 0 ->
N = shards_meta:partitions(Meta),
q(match_object, Tab, Pattern, Limit, q_fun(), Limit, N - 1, {[], nil}).
%% @doc
%% Equivalent to `ets:match_object/1'.
%%
%% The order in which results are returned might be not the same
%% as the original ETS function.
%%
%% @see ets:match_object/1.
%% @end
-spec match_object(Cont) -> {[Object], Cont} | '$end_of_table' when
Object :: tuple(),
Cont :: continuation().
match_object({_, _, Limit, _, _} = Continuation) ->
q(match_object, Continuation, q_fun(), Limit, []).
%% @equiv ets:match_spec_compile(MatchSpec)
match_spec_compile(MatchSpec) ->
ets:match_spec_compile(MatchSpec).
%% @equiv ets:match_spec_run(List, CompiledMatchSpec)
match_spec_run(List, CompiledMatchSpec) ->
ets:match_spec_run(List, CompiledMatchSpec).
%% @equiv member(Tab, Key, shards_meta:get(Tab))
member(Tab, Key) ->
member(Tab, Key, shards_meta:get(Tab)).
%% @doc
%% Equivalent to `ets:member/2'.
%%
%% @see ets:member/2.
%% @end
-spec member(Tab, Key, Meta) -> boolean() when
Tab :: tab(),
Key :: term(),
Meta :: shards_meta:t().
member(Tab, Key, Meta) ->
PartTid = shards_partition:tid(Tab, Key, Meta),
ets:member(PartTid, Key).
%% @doc
%% This operation is equivalent to `ets:new/2', but when is called,
%% instead of create a single ETS table, it creates a new supervision
%% tree for the partitioned table.
%%
%% The supervision tree is composed by a main supervisor `shards_partition_sup'
%% and `N' number of workers or partitions handled by `shards_partition'
%% (partition owner). Each worker creates an ETS table to handle the partition.
%% Also, the main supervisor `shards_partition_sup' creates an ETS table to
%% keep the metadata for the partitioned table.
%%
%% Returns an atom if the created table is a named table, otherwise,
%% a reference is returned. In the last case, the returned reference
%% is the one of the metadata table, which is the main entry-point
%% and it is owned by the main supervisor `shards_partition_sup'.
%%
%% <h3>Options:</h3>
%% In addition to the options given by `ets:new/2', this functions provides
%% the next options:
%% <ul>
%% <li>
%% `{partitions, N}' - Specifies the number of partitions for the sharded
%% table. By default, `N = erlang:system_info(schedulers_online)'.
%% </li>
%% <li>
%% `{keyslot_fun, F}' - Specifies the function used to compute the partition
%% where the action will be evaluated. Defaults to `erlang:phash2/2'.
%% </li>
%% <li>
%% `{parallel, P}' - Specifies whether `shards' should work in parallel mode
%% or not, for the applicable functions, e.g.: `select', `match', etc. By
%% default is set to `false'.
%% </li>
%% <li>
%% `{parallel_timeout, T}' - When `parallel' is set to `true', it specifies
%% the max timeout for a parallel execution. Defaults to `infinity'.
%% </li>
%% </ul>
%%
%% <h3>Access:</h3>
%% Currently, only `public' access is supported by `shards:new/2'. Since a
%% partitioned table is started with its own supervision tree when created,
%% it is very tricky to provide `private' or `protected' access since there
%% are multiple partitions (or ETS tables) and they are owned by the
%% supervisor's children, and the supervisor along with their children
%% (or partitions) are managed by `shards' under-the-hood; it is completely
%% transparent for the client.
%%
%% <h3>Examples:</h3>
%%
%% ```
%% > Tab = shards:new(tab1, []).
%% #Ref<0.1541908042.2337144842.31535>
%%
%% > shards:new(tab2, [named_table]).
%% tab2
%% '''
%%
%% See also the <b>"Partitioned Table"</b> section at the module documentation
%% for more information.
%%
%% @see ets:new/2.
%% @end
-spec new(Name, Options) -> Tab when
Name :: atom(),
Options :: [option()],
Tab :: tab().
new(Name, Options) ->
with_trap_exit(fun() ->
ParsedOpts = shards_opts:parse(Options),
StartResult = shards_partition_sup:start_link(Name, ParsedOpts),
do_new(StartResult, Name, ParsedOpts)
end).
%% @private
do_new({ok, Pid}, Name, Options) ->
ok = maybe_register(Name, Pid, maps:get(ets_opts, Options)),
shards_partition:retrieve_tab(shards_lib:get_sup_child(Pid, 0));
do_new({error, {shutdown, {_, _, {restore_error, Error}}}}, _Name, _Options) ->
ok = wrap_exit(),
error(Error);
do_new({error, {Reason, _}}, _Name, _Options) ->
ok = wrap_exit(),
error(Reason).
%% @private
maybe_register(Name, Pid, Options) ->
case lists:member(named_table, Options) of
true ->
true = register(Name, Pid),
ok;
false ->
ok
end.
%% @private
wrap_exit() ->
% We wait for the 'EXIT' signal from the partition supervisor knowing
% that it will reply at some point, either after roughly timeout or
% when an 'EXIT' signal response is ready.
receive
{'EXIT', _Pid, _Reason} -> ok
end.
%% @equiv next(Tab, Key1, shards_meta:get(Tab))
next(Tab, Key1) ->
next(Tab, Key1, shards_meta:get(Tab)).
%% @doc
%% Equivalent to `ets:next/2'.
%%
%% However, the order in which results are returned might be not the same as
%% the original ETS function, since it is a sharded table.
%%
%% @see ets:next/2.
%% @end
-spec next(Tab, Key1, Meta) -> Key2 | '$end_of_table' when
Tab :: tab(),
Key1 :: term(),
Key2 :: term(),
Meta :: shards_meta:t().
next(Tab, Key1, Meta) ->
KeyslotFun = shards_meta:keyslot_fun(Meta),
Partitions = shards_meta:partitions(Meta),
Idx = KeyslotFun(Key1, Partitions),
PartTid = shards_partition:tid(Tab, Idx),
next_(Tab, ets:next(PartTid, Key1), Idx).
%% @private
next_(Tab, '$end_of_table', Partition) when Partition > 0 ->
NextPartition = Partition - 1,
next_(Tab, ets:first(shards_partition:tid(Tab, NextPartition)), NextPartition);
next_(_, '$end_of_table', _) ->
'$end_of_table';
next_(_, Key2, _) ->
Key2.
%% @doc
%% Equivalent to `ets:next/2'.
%%
%% However, the order in which results are returned might be not the same as
%% the original ETS function, since it is a sharded table.
%%
%% @see ets:prev/2.
%% @end
-spec prev(Tab, Key1) -> Key2 | '$end_of_table' when
Tab :: tab(),
Key1 :: term(),
Key2 :: term().
prev(Tab, Key1) ->
Partition0 = shards_partition:tid(Tab, 0),
case ets:info(Partition0, type) of
ordered_set -> ets:prev(Partition0, Key1);
_TableType -> next(Tab, Key1)
end.
%% @equiv rename(Tab, Name, shards_meta:get(Tab))
rename(Tab, Name) ->
rename(Tab, Name, shards_meta:get(Tab)).
%% @doc
%% Equivalent to `ets:rename/2'.
%%
%% Renames the table name and all its associated shard tables.
%% If something unexpected occurs during the process, an exception
%% will be raised.
%%
%% @see ets:rename/2.
%% @end
-spec rename(Tab, Name, Meta) -> Name when
Tab :: tab(),
Name :: atom(),
Meta :: shards_meta:t().
rename(Tab, Name, Meta) ->
Pid = shards_meta:tab_pid(Meta),
true = unregister(Tab),
true = register(Name, Pid),
Name = shards_meta:rename(Tab, Name).
%% @equiv safe_fixtable(Tab, Fix, shards_meta:get(Tab))
safe_fixtable(Tab, Fix) ->
safe_fixtable(Tab, Fix, shards_meta:get(Tab)).
%% @doc
%% Equivalent to `ets:safe_fixtable/2'.
%%
%% Returns `true' if the function was applied successfully
%% on each partition, otherwise `false' is returned.
%%
%% @see ets:safe_fixtable/2.
%% @end
-spec safe_fixtable(Tab, Fix, Meta) -> true when
Tab :: tab(),
Fix :: boolean(),
Meta :: shards_meta:t().
safe_fixtable(Tab, Fix, Meta) ->
Map = {fun ets:safe_fixtable/2, [Fix]},
Reduce = {fun erlang:'and'/2, true},
mapred(Tab, Map, Reduce, Meta).
%% @equiv select(Tab, MatchSpec, shards_meta:get(Tab))
select(Tab, MatchSpec) ->
select(Tab, MatchSpec, shards_meta:get(Tab)).
%% @doc
%% If 3rd argument is `pos_integer()' this function behaves
%% like `ets:select/3', otherwise, the 3rd argument is
%% assumed as `shards_meta:t()` and it behaves like
%% `ets:select/2'.
%%
%% The order in which results are returned might be not the same
%% as the original ETS function.
%%
%% @see ets:select/3.
%% @end
-spec select(Tab, MatchSpec, LimitOrMeta) -> {[Match], Cont} | '$end_of_table' | [Match] when
Tab :: tab(),
MatchSpec :: ets:match_spec(),
LimitOrMeta :: pos_integer() | shards_meta:t(),
Match :: term(),
Cont :: continuation().
select(Tab, MatchSpec, Limit) when is_integer(Limit) ->
select(Tab, MatchSpec, Limit, shards_meta:get(Tab));
select(Tab, MatchSpec, Meta) ->
Map = {fun ets:select/2, [MatchSpec]},
Reduce = fun erlang:'++'/2,
mapred(Tab, Map, Reduce, Meta).
%% @doc
%% Equivalent to `ets:select/3'.
%%
%% The order in which results are returned might be not the same
%% as the original ETS function.
%%
%% @see ets:select/3.
%% @end
-spec select(Tab, MatchSpec, Limit, Meta) -> {[Match], Cont} | '$end_of_table' when
Tab :: tab(),
MatchSpec :: ets:match_spec(),
Limit :: pos_integer(),
Meta :: shards_meta:t(),
Match :: term(),
Cont :: continuation().
select(Tab, MatchSpec, Limit, Meta) ->
N = shards_meta:partitions(Meta),
q(select, Tab, MatchSpec, Limit, q_fun(), Limit, N - 1, {[], nil}).
%% @doc
%% Equivalent to `ets:select/1'.
%%
%% The order in which results are returned might be not the same
%% as the original ETS function.
%%
%% @see ets:select/1.
%% @end
-spec select(Cont) -> {[Match], Cont} | '$end_of_table' when
Match :: term(),
Cont :: continuation().
select({_, _, Limit, _, _} = Continuation) ->
q(select, Continuation, q_fun(), Limit, []).
%% @equiv select_count(Tab, MatchSpec, shards_meta:get(Tab))
select_count(Tab, MatchSpec) ->
select_count(Tab, MatchSpec, shards_meta:get(Tab)).
%% @doc
%% Equivalent to `ets:select_count/2'.
%%
%% @see ets:select_count/2.
%% @end
-spec select_count(Tab, MatchSpec, Meta) -> NumMatched when
Tab :: tab(),
MatchSpec :: ets:match_spec(),
Meta :: shards_meta:t(),
NumMatched :: non_neg_integer().
select_count(Tab, MatchSpec, Meta) ->
Map = {fun ets:select_count/2, [MatchSpec]},
Reduce = {fun erlang:'+'/2, 0},
mapred(Tab, Map, Reduce, Meta).
%% @equiv select_delete(Tab, MatchSpec, shards_meta:get(Tab))
select_delete(Tab, MatchSpec) ->
select_delete(Tab, MatchSpec, shards_meta:get(Tab)).
%% @doc
%% Equivalent to `ets:select_delete/2'.
%%
%% @see ets:select_delete/2.
%% @end
-spec select_delete(Tab, MatchSpec, Meta) -> NumDeleted when
Tab :: tab(),
MatchSpec :: ets:match_spec(),
Meta :: shards_meta:t(),
NumDeleted :: non_neg_integer().
select_delete(Tab, MatchSpec, Meta) ->
Map = {fun ets:select_delete/2, [MatchSpec]},
Reduce = {fun erlang:'+'/2, 0},
mapred(Tab, Map, Reduce, Meta).
%% @equiv select_replace(Tab, MatchSpec, shards_meta:get(Tab))
select_replace(Tab, MatchSpec) ->
select_replace(Tab, MatchSpec, shards_meta:get(Tab)).
%% @doc
%% Equivalent to `ets:select_replace/2'.
%%
%% @see ets:select_replace/2.
%% @end
-spec select_replace(Tab, MatchSpec, Meta) -> NumReplaced when
Tab :: tab(),
MatchSpec :: ets:match_spec(),
Meta :: shards_meta:t(),
NumReplaced :: non_neg_integer().
select_replace(Tab, MatchSpec, Meta) ->
Map = {fun ets:select_replace/2, [MatchSpec]},
Reduce = {fun erlang:'+'/2, 0},
mapred(Tab, Map, Reduce, Meta).
%% @equiv select_reverse(Tab, MatchSpec, shards_meta:get(Tab))
select_reverse(Tab, MatchSpec) ->
select_reverse(Tab, MatchSpec, shards_meta:get(Tab)).
%% @doc
%% If 3rd argument is `pos_integer()' this function behaves
%% like `ets:select_reverse/3', otherwise, the 3rd argument is
%% assumed as `shards_meta:t()` and it behaves like
%% `ets:select_reverse/2'.
%%
%% The order in which results are returned might be not the same
%% as the original ETS function.
%%
%% @see ets:select_reverse/3.
%% @end
-spec select_reverse(Tab, MatchSpec, LimitOrMeta) -> {[Match], Cont} | '$end_of_table' | [Match] when
Tab :: tab(),
MatchSpec :: ets:match_spec(),
LimitOrMeta :: pos_integer() | shards_meta:t(),
Match :: term(),
Cont :: continuation().
select_reverse(Tab, MatchSpec, Limit) when is_integer(Limit) ->
select_reverse(Tab, MatchSpec, Limit, shards_meta:get(Tab));
select_reverse(Tab, MatchSpec, Meta) ->
Map = {fun ets:select_reverse/2, [MatchSpec]},
Reduce = fun erlang:'++'/2,
mapred(Tab, Map, Reduce, Meta).
%% @doc
%% Equivalent to `ets:select_reverse/3'.
%%
%% The order in which results are returned might be not the same
%% as the original ETS function.
%%
%% @see ets:select_reverse/3.
%% @end
-spec select_reverse(Tab, MatchSpec, Limit, Meta) -> {[Match], Cont} | '$end_of_table' when
Tab :: tab(),
MatchSpec :: ets:match_spec(),
Limit :: pos_integer(),
Meta :: shards_meta:t(),
Match :: term(),
Cont :: continuation().
select_reverse(Tab, MatchSpec, Limit, Meta) ->
N = shards_meta:partitions(Meta),
q(select_reverse, Tab, MatchSpec, Limit, q_fun(), Limit, N - 1, {[], nil}).
%% @doc
%% Equivalent to `ets:select_reverse/1'.
%%
%% The order in which results are returned might be not the same
%% as the original ETS function.
%%
%% @see ets:select_reverse/1.
%% @end
-spec select_reverse(Cont) -> {[Match], Cont} | '$end_of_table' when
Cont :: continuation(),
Match :: term().
select_reverse({_, _, Limit, _, _} = Continuation) ->
q(select_reverse, Continuation, q_fun(), Limit, []).
%% @equiv setopts(Tab, Opts, shards_meta:get(Tab))
setopts(Tab, Opts) ->
setopts(Tab, Opts, shards_meta:get(Tab)).
%% @doc
%% Equivalent to `ets:setopts/2'.
%%
%% Returns `true' if the function was applied successfully on each partition,
%% otherwise, `false' is returned.
%%
%% @see ets:setopts/2.
%% @end
-spec setopts(Tab, Opts, Meta) -> true when
Tab :: tab(),
Opts :: Opt | [Opt],
Opt :: {heir, pid(), HeirData} | {heir, none},
Meta :: shards_meta:t(),
HeirData :: term().
setopts(Tab, Opts, Meta) ->
Map = {fun shards_partition:apply_ets_fun/3, [setopts, [Opts]]},
Reduce = {fun erlang:'and'/2, true},
mapred(Tab, Map, Reduce, {pid, Meta}).
%% @equiv tab2file(Tab, Filename, [])
tab2file(Tab, Filename) ->
tab2file(Tab, Filename, []).
%% @doc
%% Equivalent to `ets:tab2file/3'.
%%
%% This function generates one file per partition using `ets:tab2file/3',
%% and also generates a master file with the given `Filename' that holds
%% the information of the created partition files so that they can be
%% recovered by calling `ets:file2tab/1,2'.
%%
%% @see ets:tab2file/3.
%% @end
-spec tab2file(Tab, Filename, Options) -> ok | {error, Reason} when
Tab :: tab(),
Filename :: filename(),
Options :: [Option],
Option :: {extended_info, [md5sum | object_count]} | {sync, boolean()},
Reason :: term().
tab2file(Tab, Filename, Options) when ?is_filename(Filename) ->
StrFilename = shards_lib:to_string(Filename),
Metadata = shards_meta:get(Tab),
PartitionFilenamePairs =
shards_enum:reduce_while(fun({Idx, Partition}, Acc) ->
PartitionFilename = StrFilename ++ "." ++ integer_to_list(Idx),
case ets:tab2file(Partition, PartitionFilename, Options) of
ok ->
{cont, Acc#{Idx => PartitionFilename}};
{error, _} = Error ->
{halt, Error}
end
end, #{}, shards_meta:get_partition_tids(Tab)),
case PartitionFilenamePairs of
{error, _} = Error ->
Error;
_ ->
TabInfo = maps:from_list(shards:info(Tab)),
Header = #{
name => maps:get(name, TabInfo),
type => maps:get(type, TabInfo),
protection => maps:get(protection, TabInfo),
keypos => maps:get(keypos, TabInfo),
size => maps:get(size, TabInfo),
named_table => maps:get(named_table, TabInfo),
extended_info => maps:get(extended_info, TabInfo, []),
metadata => Metadata,
partitions => PartitionFilenamePairs
},
shards_lib:write_tabfile(StrFilename, Header)
end.
%% @equiv tab2list(Tab, shards_meta:get(Tab))
tab2list(Tab) ->
tab2list(Tab, shards_meta:get(Tab)).
%% @doc
%% Equivalent to `ets:tab2list/1'.
%%
%% @see ets:tab2list/1.
%% @end
-spec tab2list(Tab, Meta) -> [Object] when
Tab :: tab(),
Meta :: shards_meta:t(),
Object :: tuple().
tab2list(Tab, Meta) ->
mapred(Tab, fun ets:tab2list/1, fun erlang:'++'/2, Meta).
%% @doc
%% Equivalent to `ets:tabfile_info/1'.
%%
%% Adds extra information about the partitions.
%%
%% @see ets:tabfile_info/1.
%% @end
-spec tabfile_info(Filename) -> {ok, TableInfo} | {error, Reason} when
Filename :: filename(),
TableInfo :: [tabinfo_item()],
Reason :: term().
tabfile_info(Filename) when ?is_filename(Filename) ->
try
StrFilename = shards_lib:to_string(Filename),
Header = shards_lib:read_tabfile(StrFilename),
TabName = maps:get(name, Header),
Metadata = maps:get(metadata, Header),
NamedTable = lists:member(named_table, shards_meta:ets_opts(Metadata)),
Partitions = maps:get(partitions, Header),
ShardsTabInfo =
maps:fold(fun(_, PartitionFN, Acc) ->
case ets:tabfile_info(PartitionFN) of
{ok, TabInfo} -> [TabInfo | Acc];
Error -> throw(Error)
end
end, [], Partitions),
PartitionedTabInfo =
lists:keystore(
named_table,
1,
[
{partitions, map_size(Partitions)}
| parts_info(TabName, ShardsTabInfo)
],
{named_table, NamedTable}
),
{ok, PartitionedTabInfo}
catch
throw:Error -> Error
end.
%% @equiv table(Tab, [])
table(Tab) ->
table(Tab, []).
%% @equiv table(Tab, Options, shards_meta:get(Tab))
table(Tab, Options) ->
table(Tab, Options, shards_meta:get(Tab)).
%% @doc
%% Similar to `ets:table/2', but it returns a list of `qlc:query_handle()';
%% one per partition.
%%
%% @see ets:table/2.
%% @end
-spec table(Tab, Options, Meta) -> QueryHandle when
Tab :: tab(),
QueryHandle :: qlc:query_handle(),
Options :: [Option] | Option,
Meta :: shards_meta:t(),
Option :: {n_objects, NObjects} | {traverse, TraverseMethod},
NObjects :: 'default' | pos_integer(),
MatchSpec :: ets:match_spec(),
TraverseMethod :: first_next | last_prev | select | {select, MatchSpec}.
table(Tab, Options, Meta) ->
mapred(Tab, {fun ets:table/2, [Options]}, Meta).
%% @equiv ets:test_ms(Tuple, MatchSpec)
test_ms(Tuple, MatchSpec) ->
ets:test_ms(Tuple, MatchSpec).
%% @equiv take(Tab, Key, shards_meta:get(Tab))
take(Tab, Key) ->
take(Tab, Key, shards_meta:get(Tab)).
%% @doc
%% Equivalent to `ets:take/2'.
%%
%% @see ets:take/2.
%% @end
-spec take(Tab, Key, Meta) -> [Object] when
Tab :: tab(),
Key :: term(),
Meta :: shards_meta:t(),
Object :: tuple().
take(Tab, Key, Meta) ->
PartTid = shards_partition:tid(Tab, Key, Meta),
ets:take(PartTid, Key).
%% @equiv update_counter(Tab, Key, UpdateOp, shards_meta:get(Tab))
update_counter(Tab, Key, UpdateOp) ->
update_counter(Tab, Key, UpdateOp, shards_meta:get(Tab)).
%% @doc
%% Equivalent to `ets:update_counter/4'.
%%
%% If the 4th argument is `shards_meta:t()', it behaves like
%% `ets:update_counter/3'.
%%
%% @see ets:update_counter/4.
%% @end
-spec update_counter(Tab, Key, UpdateOp, DefaultOrMeta) -> Result | [Result] when
Tab :: tab(),
Key :: term(),
UpdateOp :: term(),
DefaultOrMeta :: tuple() | shards_meta:t(),
Result :: integer().
update_counter(Tab, Key, UpdateOp, DefaultOrMeta) ->
case shards_meta:is_metadata(DefaultOrMeta) of
true ->
PartTid = shards_partition:tid(Tab, Key, DefaultOrMeta),
ets:update_counter(PartTid, Key, UpdateOp);
false ->
update_counter(Tab, Key, UpdateOp, DefaultOrMeta, shards_meta:get(Tab))
end.
%% @doc
%% Equivalent to `ets:update_counter/4'.
%%
%% @see ets:update_counter/4.
%% @end
-spec update_counter(Tab, Key, UpdateOp, Default, Meta) -> Result | [Result] when
Tab :: tab(),
Key :: term(),
UpdateOp :: term(),
Default :: tuple(),
Meta :: shards_meta:t(),
Result :: integer().
update_counter(Tab, Key, UpdateOp, Default, Meta) ->
PartTid = shards_partition:tid(Tab, Key, Meta),
ets:update_counter(PartTid, Key, UpdateOp, Default).
%% @equiv update_element(Tab, Key, ElementSpec, shards_meta:get(Tab))
update_element(Tab, Key, ElementSpec) ->
update_element(Tab, Key, ElementSpec, shards_meta:get(Tab)).
%% @doc
%% Equivalent to `ets:update_element/3'.
%%
%% @see ets:update_element/3.
%% @end
-spec update_element(Tab, Key, ElementSpec, Meta) -> boolean() when
Tab :: tab(),
Key :: term(),
ElementSpec :: {Pos, Value} | [{Pos, Value}],
Meta :: shards_meta:t().
update_element(Tab, Key, ElementSpec, Meta) ->
PartTid = shards_partition:tid(Tab, Key, Meta),
ets:update_element(PartTid, Key, ElementSpec).
%%%===================================================================
%%% Internal functions
%%%===================================================================
%% @private
with_trap_exit(Fun) ->
Flag = process_flag(trap_exit, true),
try
Fun()
after
process_flag(trap_exit, Flag)
end.
%% @private
with_meta(Tab, Fun) ->
try shards_meta:get(Tab) of
Meta -> Fun(Meta)
catch
error:{unknown_table, Tab} -> undefined
end.
%% @private
get_part_tid(Tab, Object, Meta) ->
Key = shards_lib:object_key(Object, Meta),
shards_partition:tid(Tab, Key, Meta).
%% @private
group_keys_by_partition(Tab, Objects, Meta) ->
lists:foldr(fun(Object, Acc) ->
PartTid = get_part_tid(Tab, Object, Meta),
Acc#{PartTid => [Object | maps:get(PartTid, Acc, [])]}
end, #{}, Objects).
%% @private
parts_info(Tab, InfoLists) ->
parts_info(Tab, InfoLists, []).
%% @private
parts_info(Tab, [FirstInfo | RestInfoLists], ExtraAttrs) ->
FirstInfo1 =
shards_lib:keyupdate(fun(K, _V) ->
ets:info(Tab, K)
end, [id, name, named_table, owner], FirstInfo),
Keys = [size | ExtraAttrs],
lists:foldl(fun(InfoList, InfoListAcc) ->
shards_lib:keyupdate(fun(K, V) ->
{K, V1} = lists:keyfind(K, 1, InfoList),
V + V1
end, Keys, InfoListAcc)
end, FirstInfo1, RestInfoLists).
%% @private
mapred(Tab, Map, Meta) ->
mapred(Tab, Map, nil, Meta).
%% @private
mapred(Tab, Map, nil, Meta) ->
mapred(Tab, Map, fun(E, Acc) -> [E | Acc] end, Meta);
mapred(Tab, Map, Reduce, {PartitionFun, Meta}) ->
do_mapred(Tab, Map, Reduce, PartitionFun, Meta);
mapred(Tab, Map, Reduce, Meta) ->
do_mapred(Tab, Map, Reduce, tid, Meta).
%% @private
do_mapred(Tab, Map, Reduce, PartFun, Meta) ->
case {shards_meta:partitions(Meta), shards_meta:parallel(Meta)} of
{Partitions, true} when Partitions > 1 ->
ParallelTimeout = shards_meta:parallel_timeout(Meta),
p_mapred(Tab, Map, Reduce, PartFun, Partitions, ParallelTimeout);
{Partitions, false} ->
s_mapred(Tab, Map, Reduce, PartFun, Partitions)
end.
%% @private
s_mapred(Tab, {MapFun, Args}, {ReduceFun, AccIn}, PartFun, Partitions) ->
shards_enum:reduce(fun(Part, Acc) ->
PartitionId = shards_partition:PartFun(Tab, Part),
MapRes = apply(MapFun, [PartitionId | Args]),
ReduceFun(MapRes, Acc)
end, AccIn, Partitions);
s_mapred(Tab, MapFun, ReduceFun, PartFun, Partitions) ->
{Map, Reduce} = mapred_funs(MapFun, ReduceFun),
s_mapred(Tab, Map, Reduce, PartFun, Partitions).
%% @private
p_mapred(Tab, {MapFun, Args}, {ReduceFun, AccIn}, PartFun, Partitions, ParallelTimeout) ->
MapResults =
shards_enum:pmap(fun(Idx) ->
PartitionId = shards_partition:PartFun(Tab, Idx),
apply(MapFun, [PartitionId | Args])
end, ParallelTimeout, lists:seq(0, Partitions - 1)),
lists:foldl(ReduceFun, AccIn, MapResults);
p_mapred(Tab, MapFun, ReduceFun, PartFun, Partitions, ParallelTimeout) ->
{Map, Reduce} = mapred_funs(MapFun, ReduceFun),
p_mapred(Tab, Map, Reduce, PartFun, Partitions, ParallelTimeout).
%% @private
mapred_funs(MapFun, ReduceFun) ->
Map =
case is_function(MapFun) of
true -> {MapFun, []};
_ -> MapFun
end,
{Map, {ReduceFun, []}}.
%% @private
q(_, Tab, MatchSpec, Limit, _, I, Shard, {Acc, Continuation}) when I =< 0 ->
{Acc, {Tab, MatchSpec, Limit, Shard, Continuation}};
q(_, _, _, _, _, _, Shard, {[], _}) when Shard < 0 ->
'$end_of_table';
q(_, Tab, MatchSpec, Limit, _, _, Shard, {Acc, _}) when Shard < 0 ->
{Acc, {Tab, MatchSpec, Limit, Shard, '$end_of_table'}};
q(F, Tab, MatchSpec, Limit, QFun, I, Shard, {Acc, '$end_of_table'}) ->
q(F, Tab, MatchSpec, Limit, QFun, I, Shard - 1, {Acc, nil});
q(F, Tab, MatchSpec, Limit, QFun, I, Shard, {Acc, _}) ->
case ets:F(shards_partition:tid(Tab, Shard), MatchSpec, I) of
{L, Cont} ->
NewAcc = {QFun(L, Acc), Cont},
q(F, Tab, MatchSpec, Limit, QFun, I - length(L), Shard, NewAcc);
'$end_of_table' ->
q(F, Tab, MatchSpec, Limit, QFun, I, Shard, {Acc, '$end_of_table'})
end.
%% @private
q(_, {Tab, MatchSpec, Limit, Shard, Continuation}, _, I, Acc) when I =< 0 ->
{Acc, {Tab, MatchSpec, Limit, Shard, Continuation}};
q(F, {Tab, MatchSpec, Limit, Shard, '$end_of_table'}, QFun, _I, Acc) ->
q(F, Tab, MatchSpec, Limit, QFun, Limit, Shard - 1, {Acc, nil});
q(F, {Tab, MatchSpec, Limit, Shard, Continuation}, QFun, I, Acc) ->
case ets:F(Continuation) of
{L, Cont} ->
NewAcc = QFun(L, Acc),
q(F, {Tab, MatchSpec, Limit, Shard, Cont}, QFun, I - length(L), NewAcc);
'$end_of_table' ->
q(F, {Tab, MatchSpec, Limit, Shard, '$end_of_table'}, QFun, I, Acc)
end.
%% @private
q_fun() ->
fun(L1, L0) -> L1 ++ L0 end. | src/shards.erl | 0.516108 | 0.583025 | shards.erl | starcoder |
%%--------------------------------------------------------------------
%% Copyright (c) 2019-2022 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
%% a simple decimal module for rate-related calculations
-module(emqx_limiter_decimal).
%% API
-export([
add/2,
sub/2,
mul/2,
put_to_counter/3,
floor_div/2
]).
-export_type([decimal/0, zero_or_float/0]).
-type decimal() :: infinity | number().
-type zero_or_float() :: 0 | float().
%%--------------------------------------------------------------------
%%% API
%%--------------------------------------------------------------------
-spec add(decimal(), decimal()) -> decimal().
add(A, B) when
A =:= infinity orelse
B =:= infinity
->
infinity;
add(A, B) ->
A + B.
-spec sub(decimal(), decimal()) -> decimal().
sub(A, B) when
A =:= infinity orelse
B =:= infinity
->
infinity;
sub(A, B) ->
A - B.
-spec mul(decimal(), decimal()) -> decimal().
mul(A, B) when
A =:= infinity orelse
B =:= infinity
->
infinity;
mul(A, B) ->
A * B.
-spec floor_div(decimal(), number()) -> decimal().
floor_div(infinity, _) ->
infinity;
floor_div(A, B) ->
erlang:floor(A / B).
-spec put_to_counter(counters:counters_ref(), pos_integer(), decimal()) -> ok.
put_to_counter(_, _, infinity) ->
ok;
put_to_counter(Counter, Index, Val) when is_float(Val) ->
IntPart = erlang:floor(Val),
counters:put(Counter, Index, IntPart);
put_to_counter(Counter, Index, Val) ->
counters:put(Counter, Index, Val). | apps/emqx/src/emqx_limiter/src/emqx_limiter_decimal.erl | 0.68056 | 0.4474 | emqx_limiter_decimal.erl | starcoder |
%% @doc: Implementation of HyperLogLog with bias correction as
%% described in the Google paper,
%% http://static.googleusercontent.com/external_content/untrusted_dlcp/
%% research.google.com/en//pubs/archive/40671.pdf
-module(hyper).
%%-compile(native).
-export([new/1, new/2, insert/2, insert_many/2]).
-export([union/1, union/2]).
-export([card/1, intersect_card/2]).
-export([to_json/1, from_json/1, from_json/2, precision/1, bytes/1, is_hyper/1]).
-export([compact/1, reduce_precision/2]).
-type precision() :: 4..16.
-type registers() :: any().
-record(hyper, {p :: precision(),
registers :: {module(), registers()}}).
-type value() :: binary().
-type filter() :: #hyper{}.
-export_type([filter/0, precision/0, registers/0]).
%% Exported for testing
-export([run_of_zeroes/1, perf_report/0, estimate_report/0]).
-define(DEFAULT_BACKEND, hyper_binary).
%%
%% API
%%
-spec new(precision()) -> filter().
new(P) ->
new(P, ?DEFAULT_BACKEND).
-spec new(precision(), module()) -> filter().
new(P, Mod) when 4 =< P andalso P =< 16 andalso is_atom(Mod) ->
#hyper{p = P, registers = {Mod, Mod:new(P)}}.
-spec is_hyper(filter()) -> boolean().
is_hyper(#hyper{}) -> true;
is_hyper(_) -> false.
-spec insert(value(), filter()) -> filter().
insert(Value, #hyper{registers = {Mod, Registers}, p = P} = Hyper)
when is_binary(Value) ->
Hash = crypto:hash(sha, Value),
<<Index:P, RegisterValue:P/bitstring, _/bitstring>> = Hash,
ZeroCount = run_of_zeroes(RegisterValue) + 1,
%% Registers are only allowed to increase, implement by backend
Hyper#hyper{registers = {Mod, Mod:set(Index, ZeroCount, Registers)}};
insert(_Value, _Hyper) ->
error(badarg).
-spec insert_many([value()], filter()) -> filter().
insert_many(L, Hyper) ->
lists:foldl(fun insert/2, Hyper, L).
-spec union([filter()]) -> filter().
union(Filters) when is_list(Filters) ->
case lists:usort(lists:map(fun (#hyper{p = P, registers = {Mod, _}}) ->
{P, Mod}
end, Filters)) of
%% same P and backend
[{_P, Mod}] ->
Registers = lists:map(fun (#hyper{registers = {_, R}}) ->
R
end, Filters),
[First | _] = Filters,
First#hyper{registers = {Mod, Mod:max_merge(Registers)}};
%% mixed P, but still must have same backend
[{MinP, Mod} | _] ->
FoldedFilters = lists:map(fun (#hyper{registers = {M, _}} = F)
when M =:= Mod ->
hyper:reduce_precision(MinP, F)
end, Filters),
union(FoldedFilters)
end.
union(Small, Big) ->
union([Small, Big]).
%% NOTE: use with caution, no guarantees on accuracy.
-spec intersect_card(filter(), filter()) -> float().
intersect_card(Left, Right) when Left#hyper.p =:= Right#hyper.p ->
max(0.0, (card(Left) + card(Right)) - card(union(Left, Right))).
-spec card(filter()) -> float().
card(#hyper{registers = {Mod, Registers0}, p = P}) ->
M = trunc(pow(2, P)),
Registers = Mod:compact(Registers0),
RegisterSum = Mod:register_sum(Registers),
E = alpha(M) * pow(M, 2) / RegisterSum,
Ep = case E =< 5 * M of
true -> E - estimate_bias(E, P);
false -> E
end,
V = Mod:zero_count(Registers),
H = case V of
0 ->
Ep;
_ ->
M * math:log(M / V)
end,
case H =< hyper_const:threshold(P) of
true ->
H;
false ->
Ep
end.
precision(#hyper{p = Precision}) ->
Precision.
bytes(#hyper{registers = {Mod, Registers}}) ->
Mod:bytes(Registers).
compact(#hyper{registers = {Mod, Registers}} = Hyper) ->
Hyper#hyper{registers = {Mod, Mod:compact(Registers)}}.
reduce_precision(P, #hyper{p = OldP, registers = {Mod, Registers}} = Hyper)
when P < OldP ->
Hyper#hyper{p = P, registers = {Mod, Mod:reduce_precision(P, Registers)}};
reduce_precision(P, #hyper{p = P} = Filter) ->
Filter.
%%
%% SERIALIZATION
%%
-spec to_json(filter()) -> any().
to_json(#hyper{p = P, registers = {Mod, Registers}}) ->
Compact = Mod:compact(Registers),
{[
{<<"p">>, P},
{<<"registers">>, base64:encode(
zlib:gzip(
Mod:encode_registers(Compact)))}
]}.
-spec from_json(any()) -> filter().
from_json(Struct) ->
from_json(Struct, ?DEFAULT_BACKEND).
-spec from_json(any(), module()) -> filter().
from_json({Struct}, Mod) ->
P = proplists:get_value(<<"p">>, Struct),
Bytes = zlib:gunzip(
base64:decode(
proplists:get_value(<<"registers">>, Struct))),
Registers = Mod:decode_registers(Bytes, P),
#hyper{p = P, registers = {Mod, Registers}}.
%%
%% HELPERS
%%
alpha(16) -> 0.673;
alpha(32) -> 0.697;
alpha(64) -> 0.709;
alpha(M) -> 0.7213 / (1 + 1.079 / M).
pow(X, Y) ->
math:pow(X, Y).
run_of_zeroes(B) ->
run_of_zeroes(1, B).
run_of_zeroes(I, B) ->
case B of
<<0:I, _/bitstring>> ->
run_of_zeroes(I + 1, B);
_ ->
I - 1
end.
estimate_bias(E, P) ->
BiasVector = hyper_const:bias_data(P),
EstimateVector = hyper_const:estimate_data(P),
NearestNeighbours = nearest_neighbours(E, EstimateVector),
lists:sum([element(Index, BiasVector) || Index <- NearestNeighbours])
/ length(NearestNeighbours).
nearest_neighbours(E, Vector) ->
Distances = lists:map(fun (Index) ->
V = element(Index, Vector),
{pow((E - V), 2), Index}
end, lists:seq(1, size(Vector))),
SortedDistances = lists:keysort(1, Distances),
{_, Indexes} = lists:unzip(lists:sublist(SortedDistances, 6)),
Indexes.
%%
%% REPORTS
%%
generate_unique(N) ->
generate_unique(lists:usort(random_bytes(N)), N).
generate_unique(L, N) ->
case length(L) of
N ->
L;
Less ->
generate_unique(lists:usort(random_bytes(N - Less) ++ L), N)
end.
random_bytes(N) ->
random_bytes([], N).
random_bytes(Acc, 0) -> Acc;
random_bytes(Acc, N) ->
Int = random:uniform(100000000000000),
random_bytes([<<Int:64/integer>> | Acc], N-1).
%% Lifted from berk, https://github.com/richcarl/berk/blob/master/berk.erl
median(Ns) ->
N = length(Ns),
Ss = lists:sort(Ns),
if (N rem 2) > 0 ->
lists:nth(1+trunc(N/2), Ss);
true ->
[X1,X2] = lists:sublist(Ss, trunc(N/2),2),
(X1+X2)/2
end.
estimate_report() ->
Ps = lists:seq(11, 16),
Cardinalities = [100, 1000, 10000, 100000, 1000000],
Repetitions = 50,
{ok, F} = file:open("estimates.csv", [write]),
io:format(F, "p,card,median,p05,p95~n", []),
[begin
Stats = [run_report(P, Card, Repetitions) || Card <- Cardinalities],
lists:map(fun ({Card, Median, P05, P95}) ->
io:format(F,
"~p,~p,~p,~p,~p~n",
[P, Card, Median, P05, P95])
end, Stats)
end || P <- Ps],
io:format("~n"),
file:close(F).
run_report(P, Card, Repetitions) ->
{ok, Estimations} = s2_par:map(
fun (I) ->
io:format("~p values with p=~p, rep ~p~n",
[Card, P, I]),
random:seed(erlang:phash2([node()]),
erlang:monotonic_time(),
erlang:unique_integer()),
Elements = generate_unique(Card),
Estimate = card(insert_many(Elements, new(P))),
abs(Card - Estimate) / Card
end,
lists:seq(1, Repetitions),
[{workers, 8}]),
Hist = basho_stats_histogram:update_all(
Estimations,
basho_stats_histogram:new(
0,
lists:max(Estimations),
length(Estimations))),
P05 = basho_stats_histogram:quantile(0.05, Hist),
P95 = basho_stats_histogram:quantile(0.95, Hist),
{Card, median(Estimations), P05, P95}.
perf_report() ->
Ps = [15],
Cards = [1, 100, 500, 1000, 2500, 5000, 10000,
15000, 25000, 50000, 100000, 1000000],
Mods = [hyper_gb, hyper_array, hyper_bisect, hyper_binary, hyper_carray],
Repeats = 10,
Time = fun (F, Args) ->
Run = fun () ->
Parent = self(),
Pid = spawn_link(
fun () ->
{ElapsedUs, _} = timer:tc(F, Args),
Parent ! {self(), ElapsedUs}
end),
receive {Pid, ElapsedUs} -> ElapsedUs end
end,
lists:sum([Run() || _ <- lists:seq(1, Repeats)]) / Repeats
end,
R = [begin
io:format("."),
random:seed(1, 2, 3),
M = trunc(math:pow(2, P)),
InsertUs = Time(fun (Values, H) ->
insert_many(Values, H)
end,
[generate_unique(Card), new(P, Mod)]),
ReusableH = compact(insert_many(generate_unique(Card), new(P, Mod))),
UnionUs = Time(fun (Fs) -> union(Fs) end,
[[insert_many(generate_unique(Card div 10), new(P, Mod)),
insert_many(generate_unique(Card div 10), new(P, Mod)),
insert_many(generate_unique(Card div 10), new(P, Mod)),
insert_many(generate_unique(Card div 10), new(P, Mod)),
insert_many(generate_unique(Card div 10), new(P, Mod))]]),
CardUs = Time(fun card/1, [ReusableH]),
ToJsonUs = Time(fun to_json/1, [ReusableH]),
Filter = insert_many(generate_unique(Card), new(P, Mod)),
{Mod, Registers} = Filter#hyper.registers,
Bytes = Mod:encode_registers(Registers),
Filled = lists:filter(fun (I) -> binary:at(Bytes, I) =/= 0 end,
lists:seq(0, M-1)),
{Mod, P, Card, length(Filled) / M, bytes(Filter),
InsertUs / Card, UnionUs, CardUs, ToJsonUs}
end || Mod <- Mods,
P <- Ps,
Card <- Cards],
io:format("~n"),
io:format("~s ~s ~s ~s ~s ~s ~s ~s ~s~n",
[string:left("module" , 12, $ ),
string:left("P" , 4, $ ),
string:right("card" , 8, $ ),
string:right("fill" , 6, $ ),
string:right("bytes" , 10, $ ),
string:right("insert us" , 10, $ ),
string:right("union ms" , 10, $ ),
string:right("card ms" , 10, $ ),
string:right("json ms" , 10, $ )
]),
lists:foreach(fun ({Mod, P, Card, Fill, Bytes,
AvgInsertUs, AvgUnionUs, AvgCardUs, AvgToJsonUs}) ->
Filled = lists:flatten(io_lib:format("~.2f", [Fill])),
AvgInsertUsL = lists:flatten(
io_lib:format("~.2f", [AvgInsertUs])),
UnionMs = lists:flatten(
io_lib:format("~.2f", [AvgUnionUs / 1000])),
CardMs = lists:flatten(
io_lib:format("~.2f", [AvgCardUs / 1000])),
ToJsonMs = lists:flatten(
io_lib:format("~.2f", [AvgToJsonUs / 1000])),
io:format("~s ~s ~s ~s ~s ~s ~s ~s ~s~n",
[
string:left(atom_to_list(Mod) , 12, $ ),
string:left(integer_to_list(P) , 4, $ ),
string:right(integer_to_list(Card) , 8, $ ),
string:right(Filled , 6, $ ),
string:right(integer_to_list(Bytes), 10, $ ),
string:right(AvgInsertUsL , 10, $ ),
string:right(UnionMs , 10, $ ),
string:right(CardMs , 10, $ ),
string:right(ToJsonMs , 10, $ )
])
end, R). | src/hyper.erl | 0.525369 | 0.551815 | hyper.erl | starcoder |
%%
%% Copyright (c) 2018 <NAME>. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%% @doc LWW Map.
-module(state_lwwmap).
-author("<NAME> <<EMAIL>>").
-behaviour(type).
-behaviour(state_type).
-define(TYPE, ?MODULE).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-export([new/0, new/1]).
-export([mutate/3, delta_mutate/3, merge/2, delta_and_merge/2]).
-export([query/1, query/2, equal/2, is_bottom/1,
is_inflation/2, is_strict_inflation/2,
irreducible_is_strict_inflation/2]).
-export([join_decomposition/1, delta/2, digest/1]).
-export([encode/2, decode/2]).
-export_type([state_lwwmap/0, state_lwwmap_op/0]).
-opaque state_lwwmap() :: {?TYPE, payload()}.
-type payload() :: maps:maps().
-type key() :: term().
-type timestamp() :: non_neg_integer().
-type value() :: term().
-type state_lwwmap_op() :: {set, key(), timestamp(), value()} | [{set, key(), timestamp(), value()}].
%% @doc Create a new, empty `state_lwwmap()'
-spec new() -> state_lwwmap().
new() ->
{?TYPE, maps:new()}.
%% @doc Create a new, empty `state_lwwmap()'
-spec new([term()]) -> state_lwwmap().
new([]) ->
new().
%% @doc Mutate a `state_lwwmap()'.
-spec mutate(state_lwwmap_op(), type:id(), state_lwwmap()) ->
{ok, state_lwwmap()}.
mutate(Op, Actor, {?TYPE, _LWWMap}=CRDT) ->
state_type:mutate(Op, Actor, CRDT).
%% @doc Delta-mutate a `state_lwwmap()'.
-spec delta_mutate(state_lwwmap_op(), type:id(), state_lwwmap()) ->
{ok, state_lwwmap()}.
delta_mutate({set, Key, Timestamp, Value}, _Actor, {?TYPE, _LWWMap}) ->
Delta = maps:put(Key, {Timestamp, Value}, #{}),
{ok, {?TYPE, Delta}};
delta_mutate(OpList, Actor, CRDT) ->
Result = lists:foldl(
fun(Op, Acc) ->
%% NOTE: all operations are done on the original CRDT
{ok, Delta} = delta_mutate(Op, Actor, CRDT),
merge(Delta, Acc)
end,
new(),
OpList
),
{ok, Result}.
%% @doc Returns the value of the `state_lwwmap()'.
-spec query(state_lwwmap()) -> maps:map(key(), value()).
query({?TYPE, LWWMap}) ->
%% simply hide timestamps
maps:map(fun(_, {_, V}) -> V end, LWWMap).
%% @doc Returns the value of the `state_lwwmap()', given a list
%% of extra arguments.
-spec query(list(term()), state_lwwmap()) -> non_neg_integer().
query([MoreRecent], {?TYPE, LWWMap}) ->
Keys = lists:reverse(lists:sort(maps:keys(LWWMap))),
TopKeys = lists:sublist(Keys, MoreRecent),
LWWMapTop = maps:with(TopKeys, LWWMap),
query({?TYPE, LWWMapTop}).
%% @doc Merge two `state_lwwmap()'.
-spec merge(state_lwwmap(), state_lwwmap()) -> state_lwwmap().
merge({?TYPE, LWWMap1}, {?TYPE, LWWMap2}) ->
LWWMap = maps_ext:merge_all(
fun(_, {TS1, _}=V1, {TS2, _}=V2) ->
case TS1 > TS2 of
true -> V1;
false -> V2
end
end,
LWWMap1,
LWWMap2
),
{?TYPE, LWWMap}.
%% @doc Merge two LWWMap and return the delta responsible for the inflation.
-spec delta_and_merge(state_lwwmap(), state_lwwmap()) -> {state_lwwmap(), state_lwwmap()}.
delta_and_merge({?TYPE, Remote}, {?TYPE, Local}) ->
{Delta, CRDT} = maps:fold(
fun(RKey, {RTS, _}=RValue, {DeltaAcc, CRDTAcc}=Acc) ->
%% inflation: when key is there with smaller ts
%% and when it's not
Inflation = case maps:find(RKey, CRDTAcc) of
{ok, {LTS, _}} -> RTS > LTS;
error -> true
end,
case Inflation of
true -> {maps:put(RKey, RValue, DeltaAcc), maps:put(RKey, RValue, CRDTAcc)};
false -> Acc
end
end,
{#{}, Local},
Remote
),
{{?TYPE, Delta}, {?TYPE, CRDT}}.
%% @doc Are two `state_lwwmap()' equal?
-spec equal(state_lwwmap(), state_lwwmap()) -> boolean().
equal(_, _) ->
undefined.
%% @doc Check if a LWWMap is bottom.
-spec is_bottom(state_lwwmap()) -> boolean().
is_bottom({?TYPE, LWWMap}) ->
maps:size(LWWMap) == 0.
%% @doc Given two `state_lwwmap()', check if the second is an inflation
-spec is_inflation(state_lwwmap(), state_lwwmap()) -> boolean().
is_inflation(_, _) ->
undefined.
%% @doc Check for strict inflation.
-spec is_strict_inflation(state_lwwmap(), state_lwwmap()) -> boolean().
is_strict_inflation({?TYPE, _}=CRDT1, {?TYPE, _}=CRDT2) ->
state_type:is_strict_inflation(CRDT1, CRDT2).
%% @doc Check for irreducible strict inflation.
-spec irreducible_is_strict_inflation(state_lwwmap(),
state_type:digest()) ->
boolean().
irreducible_is_strict_inflation(_, _) ->
undefined.
-spec digest(state_lwwmap()) -> state_type:digest().
digest({?TYPE, _}=CRDT) ->
{state, CRDT}.
%% @doc Join decomposition for `state_lwwmap()'.
-spec join_decomposition(state_lwwmap()) -> [state_lwwmap()].
join_decomposition(_) ->
undefined.
%% @doc Delta calculation for `state_lwwmap()'.
-spec delta(state_lwwmap(),
state_type:digest()) -> state_lwwmap().
delta({?TYPE, _}=A, B) ->
state_type:delta(A, B).
-spec encode(state_type:format(), state_lwwmap()) -> binary().
encode(erlang, {?TYPE, _}=CRDT) ->
erlang:term_to_binary(CRDT).
-spec decode(state_type:format(), binary()) -> state_lwwmap().
decode(erlang, Binary) ->
{?TYPE, _} = CRDT = erlang:binary_to_term(Binary),
CRDT.
%% ===================================================================
%% EUnit tests
%% ===================================================================
-ifdef(TEST).
new_test() ->
?assertEqual({?TYPE, #{}}, new()).
query_test() ->
Map0 = new(),
Map1 = {?TYPE, maps:from_list([{1, {10, 1}}, {2, {11, 13}}, {3, {12, 1}}])},
?assertEqual(#{}, query(Map0)),
?assertEqual(maps:from_list([{1, 1}, {2, 13}, {3, 1}]), query(Map1)).
query_args_test() ->
Map1 = {?TYPE, maps:from_list([{1, {10, 1}}, {2, {11, 13}}, {3, {12, 1}}])},
?assertEqual(maps:from_list([{1, 1}, {2, 13}, {3, 1}]), query([10], Map1)),
?assertEqual(maps:from_list([{3, 1}]), query([1], Map1)),
?assertEqual(maps:from_list([]), query([0], Map1)).
delta_set_test() ->
Map0 = new(),
{ok, {?TYPE, Delta1}} = delta_mutate({set, a, 10, v1}, 1, Map0),
Map1 = merge({?TYPE, Delta1}, Map0),
{ok, {?TYPE, Delta2}} = delta_mutate({set, a, 11, v2}, 2, Map1),
Map2 = merge({?TYPE, Delta2}, Map1),
{ok, {?TYPE, Delta3}} = delta_mutate({set, b, 12, v3}, 1, Map2),
Map3 = merge({?TYPE, Delta3}, Map2),
?assertEqual({?TYPE, maps:from_list([{a, {10, v1}}])}, {?TYPE, Delta1}),
?assertEqual({?TYPE, maps:from_list([{a, {10, v1}}])}, Map1),
?assertEqual({?TYPE, maps:from_list([{a, {11, v2}}])}, {?TYPE, Delta2}),
?assertEqual({?TYPE, maps:from_list([{a, {11, v2}}])}, Map2),
?assertEqual({?TYPE, maps:from_list([{b, {12, v3}}])}, {?TYPE, Delta3}),
?assertEqual({?TYPE, maps:from_list([{a, {11, v2}}, {b, {12, v3}}])}, Map3).
delta_multiple_set_test() ->
OpList = [{set, a, 10, v1}, {set, a, 11, v2}, {set, b, 12, v3}],
{ok, {?TYPE, Delta}} = delta_mutate(OpList, 1, new()),
?assertEqual({?TYPE, maps:from_list([{a, {11, v2}}, {b, {12, v3}}])}, {?TYPE, Delta}).
merge_test() ->
Map1 = {?TYPE, maps:from_list([{a, {10, v1}}, {b, {11, v2}}])},
Map2 = {?TYPE, maps:from_list([{a, {12, v3}}, {c, {13, v4}}])},
Expected = {?TYPE, maps:from_list([{a, {12, v3}}, {b, {11, v2}}, {c, {13, v4}}])},
Map3 = merge(Map1, Map2),
Map4 = merge(Map2, Map1),
Map5 = merge(Map1, Map1),
?assertEqual(Expected, Map3),
?assertEqual(Expected, Map4),
?assertEqual(Map1, Map5).
delta_and_merge_test() ->
Local1 = {?TYPE, maps:from_list([{a, {10, v1}}, {b, {11, v2}}])},
Remote1 = {?TYPE, maps:from_list([{a, {12, v3}}, {c, {13, v4}}])},
Remote2 = {?TYPE, maps:from_list([{a, {10, v1}}, {b, {14, v5}}])},
{Delta1, Local2} = delta_and_merge(Remote1, Local1),
%% merging again will return nothing new
{Bottom, Local2} = delta_and_merge(Remote1, Local2),
{Delta2, Local3} = delta_and_merge(Remote2, Local2),
?assertEqual(Remote1, Delta1),
?assert(is_bottom(Bottom)),
?assertEqual({?TYPE, maps:from_list([{a, {12, v3}}, {b, {11, v2}}, {c, {13, v4}}])}, Local2),
?assertEqual({?TYPE, maps:from_list([{b, {14, v5}}])}, Delta2),
?assertEqual({?TYPE, maps:from_list([{a, {12, v3}}, {b, {14, v5}}, {c, {13, v4}}])}, Local3).
is_bottom_test() ->
Map0 = new(),
Map1 = {?TYPE, maps:from_list([{a, {12, v3}}, {c, {13, v4}}])},
?assert(is_bottom(Map0)),
?assertNot(is_bottom(Map1)).
encode_decode_test() ->
Map = {?TYPE, maps:from_list([{a, {12, v3}}, {c, {13, v4}}])},
Binary = encode(erlang, Map),
EMap = decode(erlang, Binary),
?assertEqual(Map, EMap).
-endif. | src/state_lwwmap.erl | 0.619241 | 0.412737 | state_lwwmap.erl | starcoder |
-module(zoo_network).
-export([new/1, run/3, mutate/2, clone/1]).
-export_type([zoo_network/0]).
-record(zoo_network, {
weights :: [[number()]],
state :: [number()]
}).
-opaque zoo_network() :: #zoo_network{}.
% @doc returns a new network with random weights
-spec new(pos_integer()) -> zoo_network().
new(Dimension) ->
#zoo_network{
weights = generate_weights(Dimension),
state = blank_state(Dimension)
}.
% @doc runs the network with specified input and returns the
% network with updated state and the outputs
-spec run([number()], pos_integer(), zoo_network()) -> {zoo_network(), [number()]}.
run(Inputs, OutputNum, Network = #zoo_network{weights = Weights, state = State}) ->
UpdatedState = update_state(Weights, State, Inputs),
Output = lists:nthtail(length(UpdatedState) - OutputNum, UpdatedState),
{Network#zoo_network{state = UpdatedState}, Output}.
% @doc mutate a random weight in a network
-spec mutate(zoo_network(), number()) -> zoo_network().
mutate(Network = #zoo_network{weights = Weights}, Mutations) ->
MutatedWeights = lists:foldl(fun(_, W) ->
zoo_lists:modify_random(fun mutate_neuron_weights/1, W)
end, Weights, lists:seq(1, Mutations)),
Network#zoo_network{weights = MutatedWeights}.
% @doc mutate a random weight in a single row
-spec mutate_neuron_weights([number()]) -> [number()].
mutate_neuron_weights(Weights) ->
zoo_lists:modify_random(fun (_) -> generate_weight() end, Weights).
% @doc make a copy of a network with a blank state
-spec clone(zoo_network()) -> zoo_network().
clone(Network = #zoo_network{weights = Weights}) ->
Network#zoo_network{state = blank_state(length(Weights))}.
% PRIVATE
% @doc returns updated state from network's weights and previous state
-spec update_state([[number()]], [number()], [number()]) -> [number()].
update_state(Weights, State, Inputs) ->
Signals = [neuron_signals(NeuronWeights, State) || NeuronWeights <- Weights],
SignalsWithInputs = add_inputs(Signals, Inputs),
[afn(lists:sum(NeuronSignals)) || NeuronSignals <- SignalsWithInputs].
% @doc returns neuron's input signals from its inputs' weights and state
-spec neuron_signals([number()], [number()]) -> [number()].
neuron_signals(NeuronWeights, State) ->
[W * S || {W, S} <- lists:zip(NeuronWeights, State)].
-spec add_inputs([[number()]], [number()]) -> [[number()]].
add_inputs(Signals, Inputs) ->
add_inputs(Signals, Inputs, []).
-spec add_inputs([[number()]], [number()], [[number()]]) -> [[number()]].
add_inputs([], [], Result) ->
lists:reverse(Result);
add_inputs([NeuronSignals | Signals], [], Result) ->
add_inputs(Signals, [], [NeuronSignals | Result]);
add_inputs([NeuronSignals | Signals], [Input | Inputs], Result) ->
add_inputs(Signals, Inputs, [[Input | NeuronSignals] | Result]).
% @doc neuron activation function
-spec afn(number()) -> number().
afn(X) -> math:tanh(X).
% @doc returns a random weight matrix
-spec generate_weights(pos_integer()) -> [[number()]].
generate_weights(Dimension) ->
[generate_neuron_weights(Dimension) || _ <- lists:seq(1, Dimension)].
% @doc returns a random weight list
-spec generate_neuron_weights(pos_integer()) -> [number()].
generate_neuron_weights(Dimension) ->
[generate_weight() || _ <- lists:seq(1, Dimension)].
% @doc returns a random weight
-spec generate_weight() -> number().
generate_weight() ->
math:pi() * (rand:uniform() * 2 - 1).
% @doc returns a blank state with a given dimension
-spec blank_state(pos_integer()) -> [0].
blank_state(Dimension) ->
[0 || _ <- lists:seq(1, Dimension)]. | src/zoo_network.erl | 0.637144 | 0.616012 | zoo_network.erl | starcoder |
%%% ==========================================================================
%%% Copyright 2015 Silent Circle
%%%
%%% Licensed under the Apache License, Version 2.0 (the "License");
%%% you may not use this file except in compliance with the License.
%%% You may obtain a copy of the License at
%%%
%%% http://www.apache.org/licenses/LICENSE-2.0
%%%
%%% Unless required by applicable law or agreed to in writing, software
%%% distributed under the License is distributed on an "AS IS" BASIS,
%%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%%% See the License for the specific language governing permissions and
%%% limitations under the License.
%%% ==========================================================================
%%% ==========================================================================
%%% @author <NAME> <<EMAIL>>
%%% @copyright 2015 Silent Circle
%%% @doc Test suite for the 'sc_util' module.
%%% @end
%%% ==========================================================================
-module(sc_util_SUITE).
-include_lib("common_test/include/ct.hrl").
-compile(export_all).
-define(assertMsg(Cond, Fmt, Args),
case (Cond) of
true ->
ok;
false ->
ct:fail("Assertion failed: ~p~n" ++ Fmt, [??Cond] ++ Args)
end
).
-define(assert(Cond), ?assertMsg((Cond), "", [])).
-define(assertThrow(Expr, Class, Reason),
begin
ok = (fun() ->
try (Expr) of
Res ->
{unexpected_return, Res}
catch
C:R ->
case {C, R} of
{Class, Reason} ->
ok;
_ ->
{unexpected_exception, {C, R}}
end
end
end)()
end
).
%%--------------------------------------------------------------------
%% COMMON TEST CALLBACK FUNCTIONS
%%--------------------------------------------------------------------
%%--------------------------------------------------------------------
%% Function: suite() -> Info
%%
%% Info = [tuple()]
%% List of key/value pairs.
%%
%% Description: Returns list of tuples to set default properties
%% for the suite.
%%
%% Note: The suite/0 function is only meant to be used to return
%% default data values, not perform any other operations.
%%--------------------------------------------------------------------
suite() -> [
{timetrap, {seconds, 30}}
].
%%--------------------------------------------------------------------
%% Function: init_per_suite(Config0) ->
%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1}
%%
%% Config0 = Config1 = [tuple()]
%% A list of key/value pairs, holding the test case configuration.
%% Reason = term()
%% The reason for skipping the suite.
%%
%% Description: Initialization before the suite.
%%
%% Note: This function is free to add any key/value pairs to the Config
%% variable, but should NOT alter/remove any existing entries.
%%--------------------------------------------------------------------
init_per_suite(Config) ->
Config.
%%--------------------------------------------------------------------
%% Function: end_per_suite(Config0) -> void() | {save_config,Config1}
%%
%% Config0 = Config1 = [tuple()]
%% A list of key/value pairs, holding the test case configuration.
%%
%% Description: Cleanup after the suite.
%%--------------------------------------------------------------------
end_per_suite(_Config) ->
ok.
%%--------------------------------------------------------------------
%% Function: init_per_group(GroupName, Config0) ->
%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1}
%%
%% GroupName = atom()
%% Name of the test case group that is about to run.
%% Config0 = Config1 = [tuple()]
%% A list of key/value pairs, holding configuration data for the group.
%% Reason = term()
%% The reason for skipping all test cases and subgroups in the group.
%%
%% Description: Initialization before each test case group.
%%--------------------------------------------------------------------
init_per_group(_GroupName, Config) ->
Config.
%%--------------------------------------------------------------------
%% Function: end_per_group(GroupName, Config0) ->
%% void() | {save_config,Config1}
%%
%% GroupName = atom()
%% Name of the test case group that is finished.
%% Config0 = Config1 = [tuple()]
%% A list of key/value pairs, holding configuration data for the group.
%%
%% Description: Cleanup after each test case group.
%%--------------------------------------------------------------------
end_per_group(_GroupName, _Config) ->
ok.
%%--------------------------------------------------------------------
%% Function: init_per_testcase(TestCase, Config0) ->
%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1}
%%
%% TestCase = atom()
%% Name of the test case that is about to run.
%% Config0 = Config1 = [tuple()]
%% A list of key/value pairs, holding the test case configuration.
%% Reason = term()
%% The reason for skipping the test case.
%%
%% Description: Initialization before each test case.
%%
%% Note: This function is free to add any key/value pairs to the Config
%% variable, but should NOT alter/remove any existing entries.
%%--------------------------------------------------------------------
init_per_testcase(_Case, Config) ->
Config.
%%--------------------------------------------------------------------
%% Function: end_per_testcase(TestCase, Config0) ->
%% void() | {save_config,Config1} | {fail,Reason}
%%
%% TestCase = atom()
%% Name of the test case that is finished.
%% Config0 = Config1 = [tuple()]
%% A list of key/value pairs, holding the test case configuration.
%% Reason = term()
%% The reason for failing the test case.
%%
%% Description: Cleanup after each test case.
%%--------------------------------------------------------------------
end_per_testcase(_Case, _Config) ->
ok.
%%--------------------------------------------------------------------
%% Function: groups() -> [Group]
%%
%% Group = {GroupName,Properties,GroupsAndTestCases}
%% GroupName = atom()
%% The name of the group.
%% Properties = [parallel | sequence | Shuffle | {RepeatType,N}]
%% Group properties that may be combined.
%% GroupsAndTestCases = [Group | {group,GroupName} | TestCase]
%% TestCase = atom()
%% The name of a test case.
%% Shuffle = shuffle | {shuffle,Seed}
%% To get cases executed in random order.
%% Seed = {integer(),integer(),integer()}
%% RepeatType = repeat | repeat_until_all_ok | repeat_until_all_fail |
%% repeat_until_any_ok | repeat_until_any_fail
%% To get execution of cases repeated.
%% N = integer() | forever
%%
%% Description: Returns a list of test case group definitions.
%%--------------------------------------------------------------------
groups() ->
[
{
util,
[],
[
bitstring_to_hex_test,
ensure_module_loaded_test,
find_first_error_test,
get_req_props_test,
hex_to_bitstring_test,
opt_val_test,
posix_time_0_test,
posix_time_1_test,
make_child_spec_test,
xdigit_test,
nybble_test,
req_binary_or_s_test,
req_s_test,
req_val_binary_or_s_test,
req_val_test,
req_val_s_test,
to_atom_test,
to_bin_test,
to_list_test,
exported_types_test
]
}
].
%%--------------------------------------------------------------------
%% Function: all() -> GroupsAndTestCases | {skip,Reason}
%%
%% GroupsAndTestCases = [{group,GroupName} | TestCase]
%% GroupName = atom()
%% Name of a test case group.
%% TestCase = atom()
%% Name of a test case.
%% Reason = term()
%% The reason for skipping all groups and test cases.
%%
%% Description: Returns the list of groups and test cases that
%% are to be executed.
%%--------------------------------------------------------------------
all() ->
[
{group, util}
].
%%--------------------------------------------------------------------
%% TEST CASES
%%--------------------------------------------------------------------
% t_1(doc) -> ["t/1 should return 0 on an empty list"];
% t_1(suite) -> [];
% t_1(Config) when is_list(Config) ->
% ?line 0 = t:foo([]),
% ok.
%%--------------------------------------------------------------------
%% util group
%%--------------------------------------------------------------------
hex_to_bitstring_test() ->
[].
hex_to_bitstring_test(doc) ->
[
"sc_util:hex_to_bitstring/1 should correctly convert a hexadecimal string"
"to a bitstring"
];
hex_to_bitstring_test(suite) ->
[];
hex_to_bitstring_test(_Config) ->
Hex = test_hex_str(),
Expected = test_hex_bin(),
Actual = sc_util:hex_to_bitstring(Hex),
Expected = Actual,
ok.
bitstring_to_hex_test() ->
[].
bitstring_to_hex_test(doc) ->
[
"sc_util:bitstring_to_hex/1 should correctly convert a bitstring"
"to a hexadecimal string"
];
bitstring_to_hex_test(suite) ->
[];
bitstring_to_hex_test(_Config) ->
Bin = test_hex_bin(),
Expected = test_hex_str(),
Actual = sc_util:bitstring_to_hex(Bin),
Expected = Actual,
ok.
ensure_module_loaded_test() ->
[].
ensure_module_loaded_test(doc) ->
[
"sc_util:ensure_module_loaded/1 test"
];
ensure_module_loaded_test(suite) ->
[];
ensure_module_loaded_test(Config) ->
true = sc_util:ensure_module_loaded(sc_util),
?assertThrow(sc_util:ensure_module_loaded(nosuchmodule),
throw,
{cannot_load_module, {nosuchmodule, nofile}}),
Config.
find_first_error_test() ->
[].
find_first_error_test(doc) ->
[
"sc_util:find_first_error/1 test"
];
find_first_error_test(suite) ->
[];
find_first_error_test(Config) ->
[] = sc_util:find_first_error([]),
[] = sc_util:find_first_error([a,b,c,d,e,{f,g}]),
[{error, x}|_] = sc_util:find_first_error([a,b,c,error,d,{error,x},{error,y}]),
Config.
get_req_props_test() ->
[].
get_req_props_test(doc) ->
[
"sc_util:get_req_props/2 test"
];
get_req_props_test(suite) ->
[];
get_req_props_test(Config) ->
Props1 = [{rk1, rv1}, {k1, v1}, {rk2, rv2}, {k2, v2}],
{[], []} = sc_util:get_req_props([], []),
{[], [rk1]} = sc_util:get_req_props([rk1], []),
{[], [rk1]} = sc_util:get_req_props([rk1], [rk1]), % Props must be 2-tuples
{[{rk1, rv1}, {rk2, rv2}], []} = get_sorted_req_props([rk1, rk2], Props1),
{[{rk1, rv1}, {rk2, rv2}], [rk3]} = get_sorted_req_props([rk1, rk2, rk3], Props1),
Config.
opt_val_test() ->
[].
opt_val_test(doc) ->
[
"sc_util:opt_val/3 test"
];
opt_val_test(suite) ->
[];
opt_val_test(Config) ->
Props = [{k1, v1}, {k2, v2}],
v1 = sc_util:opt_val(k1, Props, foo),
foo = sc_util:opt_val(k9, Props, foo),
foo = sc_util:opt_val(k9, [], foo),
true = sc_util:opt_val(k9, [k9], foo),
Config.
posix_time_0_test() ->
[].
posix_time_0_test(doc) ->
[
"sc_util:posix_time/0 test"
];
posix_time_0_test(suite) ->
[];
posix_time_0_test(Config) ->
TS = os:timestamp(),
PTActual = sc_util:posix_time(),
{M, S, U} = TS,
PTCalc = (M * 1000000) + S + if U + 500000 >= 1000000 -> 1; true -> 0 end,
PTCalc = PTActual, % on the basis that two consecutive timestamps should be in the same second
Config.
posix_time_1_test() ->
[].
posix_time_1_test(doc) ->
[
"sc_util:posix_time test"
];
posix_time_1_test(suite) ->
[];
posix_time_1_test(Config) ->
TS0 = {1355,672572,499999},
TS1 = {1355,672572,500001},
BasePosixTime = 1355672572,
IncPosixTime = BasePosixTime + 1,
BasePosixTime = sc_util:posix_time(TS0),
IncPosixTime = sc_util:posix_time(TS1),
Config.
make_child_spec_test() ->
[].
make_child_spec_test(doc) ->
[
"sc_util:make_child_spec/2 test"
];
make_child_spec_test(Config) ->
Mod = test_mod,
Name = test_name,
SpecCfg = [],
Opts = [{mod, Mod},
{name, Name},
{config, SpecCfg}],
Timeout = 1234,
Spec = sc_util:make_child_spec(Opts, Timeout),
Spec = {Name, {Mod, start_link, [Name, SpecCfg]},
permanent, Timeout, worker, [Mod]},
Config.
xdigit_test() ->
[].
xdigit_test(doc) ->
[
"sc_util:xdigit/1 test"
];
xdigit_test(Config) ->
Hexdigs = lists:zip(lists:seq(0, 15), "0123456789abcdef"),
_ = [XD = sc_util:xdigit(N) || {N, XD} <- Hexdigs],
_ = [?assertThrow(sc_util:xdigit(X), throw, {invalid_nybble, X}) ||
X <- [-1, 16, foo, <<"bar">>]],
Config.
nybble_test() ->
[].
nybble_test(doc) ->
[
"sc_util:nybble/1 test"
];
nybble_test(Config) ->
Digs = lists:seq(0, 9),
Hexdigs = lists:seq(10, 15),
HexZip = lists:zip(Digs ++ Hexdigs ++ Hexdigs,
"0123456789abcdefABCDEF"),
_ = [N = sc_util:nybble(XD) || {N, XD} <- HexZip],
_ = [?assertThrow(sc_util:nybble(X), throw, {invalid_hex_char, X}) ||
X <- [$g, $G, $a - 1, $A - 1, foo, <<"bar">>]],
Config.
req_s_test() ->
[].
req_s_test(doc) ->
[
"sc_util:req_s/1 test"
];
req_s_test(suite) ->
[];
req_s_test(Config) ->
req_s_test_impl(fun sc_util:req_s/1),
Config.
req_binary_or_s_test() ->
[].
req_binary_or_s_test(doc) ->
[
"sc_util:req_binary_or_s/1 test"
];
req_binary_or_s_test(suite) ->
[];
req_binary_or_s_test(Config) ->
req_s_test_impl(fun sc_util:req_binary_or_s/1),
req_bin_s_test_impl(fun sc_util:req_binary_or_s/1),
Config.
req_val_test() ->
[].
req_val_test(doc) ->
[
"sc_util:req_val/2 test"
];
req_val_test(suite) ->
[];
req_val_test(Config) ->
?assertThrow(sc_util:req_val(k, []), throw, {missing_required_key, k}),
v = sc_util:req_val(k, [{foo,bar}, {k,v}, 1, 2, 3]),
true = sc_util:req_val(flag, [{foo,bar}, flag, {k,v}, 1, 2, 3]),
Config.
req_val_binary_or_s_test() ->
[].
req_val_binary_or_s_test(doc) ->
[
"sc_util:req_val_binary_or_s/2 test"
];
req_val_binary_or_s_test(suite) ->
[];
req_val_binary_or_s_test(Config) ->
?assertThrow(sc_util:req_val_binary_or_s(k, []), throw, {missing_required_key, k}),
"v" = sc_util:req_val_binary_or_s(k, [{foo,bar}, {k, "v"}, 1, 2, 3]),
<<"v">> = sc_util:req_val_binary_or_s(k, [{foo,bar}, {k, <<"v">>}, 1, 2, 3]),
?assertThrow(sc_util:req_val_binary_or_s(flag, [{foo,bar}, flag, {k,v}, 1, 2, 3]),
throw,
{not_a_string, true}),
Config.
req_val_s_test() ->
[].
req_val_s_test(doc) ->
[
"sc_util:req_val_s/2 test"
];
req_val_s_test(suite) ->
[];
req_val_s_test(Config) ->
?assertThrow(sc_util:req_val_s(k, []), throw, {missing_required_key, k}),
?assertThrow(sc_util:req_val_s(k, [{k, 1}]), throw, {not_a_string, 1}),
?assertThrow(sc_util:req_val_s(k, [{k, " "}]), throw,
{validation_failed, empty_string_not_allowed}),
V = "v",
V = sc_util:req_val_s(k, [{foo,bar}, {k,V}, 1, 2, 3]),
Config.
to_atom_test() ->
[].
to_atom_test(doc) ->
[
"sc_util:to_atom/1 test"
];
to_atom_test(suite) ->
[];
to_atom_test(Config) ->
Res = 'An Atom',
Res = sc_util:to_atom(Res),
Res = sc_util:to_atom("An Atom"),
Res = sc_util:to_atom(<<"An Atom">>),
?assertThrow(sc_util:to_atom({}), error, function_clause),
Config.
to_bin_test() ->
[].
to_bin_test(doc) ->
[
"sc_util:to_bin/1 test"
];
to_bin_test(suite) ->
[];
to_bin_test(Config) ->
Res = <<"123">>,
Res = sc_util:to_bin(Res),
Res = sc_util:to_bin('123'),
Res = sc_util:to_bin("123"),
Res = sc_util:to_bin(123),
?assertThrow(sc_util:to_bin({}), error, function_clause),
Config.
to_list_test() ->
[].
to_list_test(doc) ->
[
"sc_util:to_list/1 test"
];
to_list_test(suite) ->
[];
to_list_test(Config) ->
Res = "123",
Res = sc_util:to_list(Res),
Res = sc_util:to_list('123'),
Res = sc_util:to_list("123"),
Res = sc_util:to_list(<<"123">>),
Res = sc_util:to_list(123),
?assertThrow(sc_util:to_list({}), error, function_clause),
Config.
exported_types_test(doc) ->
[
"sc_util:exported_types/1 test"
];
exported_types_test(suite) ->
[];
exported_types_test(Config) ->
%% We're going to generate some fake types with different arities.
%% Then we'll generate a module and dynamically compile it.
ArityLo = 0, ArityHi = 5,
ModName = export_type_test_1,
%% Test happy path
Types = generate_types("test_type", ArityLo, ArityHi),
{ModName, Beam} = generate_types_mod(ModName, Types),
ct:log("beam_lib:chunks() -> ~p~n", [beam_lib:chunks(Beam, [abstract_code])]),
{ok, {ModName, ExpTypes}} = sc_util:exported_types(Beam),
ct:log("ExpTypes: ~p~n", [ExpTypes]),
SortedTypes = lists:sort(Types), % bind
SortedTypes = lists:sort(ExpTypes), % assert
%% Test when no abstract code is generated
{ModName, NACBeam} = generate_types_mod_no_abstract_code(ModName, Types),
{ok, {ModName, []}} = sc_util:exported_types(NACBeam),
%% Test when call fails
{error, beam_lib, _Reason} = sc_util:exported_types(<<>>),
Config.
%%====================================================================
%% Internal helper functions
%%====================================================================
test_hex_str() ->
"325e0015046b7d25d10888e503d3248be84b9e1de4ecadda9f0a16dbfc3f6b74".
test_hex_bin() ->
<<
50,94,0,21,4,107,125,37,209,8,136,229,3,211,36,139,232,75,
158,29,228,236,173,218,159,10,22,219,252,63,107,116
>>.
get_sorted_req_props(ReqKeys, Props) ->
{L1, L2} = sc_util:get_req_props(ReqKeys, Props),
{lists:sort(L1), lists:sort(L2)}.
req_s_test_impl(F) when is_function(F, 1) ->
?assertThrow(F(" "),
throw,
{validation_failed, empty_string_not_allowed}),
?assertThrow(F(123),
throw,
{not_a_string, 123}),
"Good string!" = F(" \t\r\nGood string! \t\r\n ").
req_bin_s_test_impl(F) when is_function(F, 1) ->
?assertThrow(F(<<" ">>),
throw,
{validation_failed, empty_string_not_allowed}),
<<"Good string!">> = F(<<" \t\r\nGood string! \t\r\n ">>).
-spec generate_types(BaseTypeName, ArityLo, ArityHi) -> Result when
BaseTypeName :: string(), ArityLo :: non_neg_integer(),
ArityHi :: non_neg_integer(), Result :: [{atom(), non_neg_integer()}].
generate_types(BaseTypeName, ArityLo, ArityHi) ->
[{list_to_atom(BaseTypeName ++ "_" ++ integer_to_list(Arity)), Arity} ||
Arity <- lists:seq(ArityLo, ArityHi)].
-spec generate_types_mod(ModName, Types, CompilerOptions) -> Result when
ModName :: string(), Types :: list(), CompilerOptions :: list(),
Result :: {atom(), binary()}.
generate_types_mod(ModName, Types, CompilerOptions) ->
S = generate_types_mod_source(ModName, Types),
dynamic_compile:from_string(S, CompilerOptions).
-spec generate_types_mod(ModName, Types) -> Result when
ModName :: string(), Types :: list(), Result :: {atom(), binary()}.
generate_types_mod(ModName, Types) ->
generate_types_mod(ModName, Types, [debug_info]).
-spec generate_types_mod_no_abstract_code(ModName, Types) -> Result when
ModName :: string(), Types :: list(), Result :: {atom(), binary()}.
generate_types_mod_no_abstract_code(ModName, Types) ->
generate_types_mod(ModName, Types, []).
-spec generate_types_mod_source(ModName, Types) -> Result when
ModName :: string(), Types :: list(), Result :: string().
generate_types_mod_source(ModName, Types) ->
S = generate_module_attr(ModName) ++
generate_export_type_attr(Types) ++
string:join(generate_typespecs(Types), ""),
ct:log("Generated module: ~s~n", [S]),
S.
%% -> ["A","B","C","D",...]
mkarglist(N) when N =< 26 ->
[[$A + I] || I <- lists:seq(0, N - 1)].
generate_module_attr(ModName) ->
"-module(" ++ atom_to_list(ModName) ++ ").\n".
generate_export_type_attr(Types) ->
"-export_type([\n" ++ generate_export_types(Types) ++ "\n]).\n".
generate_typespecs(Types) ->
[generate_typespec(Type, Arity) || {Type, Arity} <- Types].
generate_typespec(Type, Arity) ->
ArgList = mkarglist(Arity),
"-type " ++ atom_to_list(Type) ++
"(" ++ string:join(ArgList, ",") ++ ")" ++
" :: " ++
case Arity of
0 ->
"any()";
_ ->
"{" ++ string:join(ArgList, ",") ++ "}"
end ++ ".\n".
%% -> "type0/0,type1/1,type2/2,..."
generate_export_types(Types) ->
L = [atom_to_list(Type) ++ "/" ++ integer_to_list(Arity) ||
{Type, Arity} <- Types],
string:join(L, ","). | test/sc_util_SUITE.erl | 0.538255 | 0.490846 | sc_util_SUITE.erl | starcoder |
%%==============================================================================
%% Copyright 2016-2021 <NAME> <<EMAIL>>
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%==============================================================================
%%%-------------------------------------------------------------------
%%% @doc
%% Implements Bloom filters
%% Based on: Scalable Bloom Filters by
%% <NAME>, <NAME>, <NAME>, <NAME>
%%% @end
%%%
%% @author <NAME> <<EMAIL>>
%% @copyright (C) 2016-2021, <NAME> <<EMAIL>>
%%%-------------------------------------------------------------------
-module(bloom).
-copyright('<NAME> <<EMAIL>>').
%% Library functions
-export([filter/0, filter/1,
is_filter/1, type/1,
member/2, add/2,
capacity/1
]).
%% Records
-record(filter,
{type = fixed :: fixed | scalable,
size :: integer(),
capacity :: integer(),
error_prob = 0.001 :: float(),
error_ratio :: float(),
growth_ratio = 1 :: integer(),
slice_size :: integer(),
slices :: [array:array(integer())] | [filter()]
}).
%% Types
-type opt() :: fixed | scalable |
{size, integer()} |
{error_prob, float()} |
{error_ratio, float()} |
{growth_ratio, integer()}.
-opaque filter() :: #filter{}.
%% Exported Types
-export_type([filter/0]).
%% Defines
-define(MAX_32, 4294967296).
-define(MAX_16, 65536).
-define(Width, 27).
%% ===================================================================
%% Library functions.
%% ===================================================================
%%--------------------------------------------------------------------
%% Function:
%% @doc
%% Returns a bloom filter
%% @end
%%--------------------------------------------------------------------
-spec filter() -> filter().
%%--------------------------------------------------------------------
filter() -> filter(check_size(#filter{})).
%%--------------------------------------------------------------------
%% Function:
%% @doc
%% Returns a bloom filter
%% Options are:
%% fixed -> standard partitioned bloom filter, default type
%% scalable -> scalable bloom filter
%% size -> the maximum size for fixed and initial size for scalable filter
%% the defaults are 4000 and 32000 respectively
%% error_prob -> error probability, default 0.001
%% Options relevant to scalable filters only
%% growth_ratio -> log 2 of growth ratio, one of 1, 2, 3 default 1
%% error_ratio -> error probability ratio, default 0.85
%% @end
%%--------------------------------------------------------------------
-spec filter([opt()]) -> filter().
%%--------------------------------------------------------------------
filter(Opts) when is_list(Opts) -> filter(parse_opts(Opts));
filter(F = #filter{type = fixed, size = N, error_prob = E}) ->
K = 1 + trunc(log2(1 / E)),
Size =
1 bsl (1 + trunc(- log2(1 - math:pow(1 - math:pow(E, 1 / K), 1 / N)))),
Capacity = trunc(math:log(1 - math:pow(E, 1 / K)) / math:log(1 - 1 / Size)),
Slice = array:new((Size - 1) div ?Width + 1, {default, 0}),
F#filter{size = 0,
slice_size = Size,
capacity = Capacity,
slices = lists:duplicate(K, Slice)};
filter(F = #filter{type = scalable, size = S, error_prob = E, error_ratio=R}) ->
F#filter{slices = [filter([{size, S}, {error_prob, E * (1 - R)}])]}.
%%--------------------------------------------------------------------
%% Function:
%% @doc
%% Returns true if the argument is a Bloom filter
%% @end
%%--------------------------------------------------------------------
-spec is_filter(_) -> boolean().
%%--------------------------------------------------------------------
is_filter(#filter{}) -> true;
is_filter(_) -> false.
%%--------------------------------------------------------------------
%% Function:
%% @doc
%% Returns the type of Bloom filter
%% @end
%%--------------------------------------------------------------------
-spec type(_) -> fixed | scalable.
%%--------------------------------------------------------------------
type(#filter{type = Type}) -> Type.
%%--------------------------------------------------------------------
%% Function:
%% @doc
%% Returns if the term is a member of the Bloom filter
%% @end
%%--------------------------------------------------------------------
-spec member(_, filter()) -> boolean().
%%--------------------------------------------------------------------
member(Term, #filter{type = fixed, slice_size = Size, slices = Slices}) ->
Mask = Size - 1,
{I0, I1} = indices(Mask, Term),
all_set(Mask, I1, I0, Slices);
member(Term, #filter{type = scalable, slices = Filters}) ->
{H0, H1} = hashes(Term),
member(H0, H1, Filters).
member(_, _, []) -> false;
member(H0, H1, [#filter{slice_size = Size, slices = Slices} | T]) ->
Mask = Size - 1,
{I0, I1} = indices(Mask, H0, H1),
case all_set(Mask, I1, I0, Slices) of
true -> true;
false -> member(H0, H1, T)
end.
%%--------------------------------------------------------------------
%% Function:
%% @doc
%% Adds a term to the Bloom filter
%% @end
%%--------------------------------------------------------------------
-spec add(_, filter())-> filter().
%%--------------------------------------------------------------------
add(Term, F = #filter{type = fixed, size = S, slice_size=SS, slices=Slices}) ->
Mask = SS -1,
{I0, I1} = indices(Mask, Term),
case all_set(Mask, I1, I0, Slices) of
true -> F;
false -> F#filter{size = S + 1,
slices = set_bits(Mask, I1, I0, Slices, [])}
end;
add(Term, F = #filter{type=scalable, slices=Fs,growth_ratio=G,error_ratio=R}) ->
{H0, H1} = hashes(Term),
case member(H0, H1, Fs) of
true -> F;
false ->
case Fs of
[H = #filter{capacity = C, size = S}| T] when S < C ->
F#filter{slices = [add(H0, H1, H) | T]};
[#filter{slice_size = SS, error_prob = E}| _] ->
Opts = [{size, SS bsl G}, {error_prob, E * R}],
F#filter{slices = [add(H0, H1, filter(Opts)) | Fs]}
end
end.
add(H0, H1, F = #filter{size = S, slice_size=SS, slices=Slices}) ->
Mask = SS - 1,
{I0, I1} = indices(Mask, H0, H1),
F#filter{size = S + 1, slices = set_bits(Mask, I1, I0, Slices, [])}.
%%--------------------------------------------------------------------
%% Function:
%% @doc
%% Returns the capacity of the Bloom filter
%% @end
%%--------------------------------------------------------------------
-spec capacity(filter()) -> integer().
%%--------------------------------------------------------------------
capacity(#filter{type = scalable}) -> infinity;
capacity(#filter{type = fixed, capacity = Capacity}) -> Capacity.
%% ===================================================================
%% Internal functions.
%% ===================================================================
log2(X) -> math:log(X) / math:log(2).
indices(Mask, Term) when Mask < ?MAX_16 ->
H = erlang:phash2({Term},?MAX_32),
{(H bsr 16) band Mask, H band Mask};
indices(Mask, Term) ->
{erlang:phash2({Term}, ?MAX_32) band Mask,
erlang:phash2([Term], ?MAX_32) band Mask}.
indices(Mask, H0, _) when Mask<?MAX_16 -> {(H0 bsr 16) band Mask, H0 band Mask};
indices(Mask, H0, H1) -> {H0 band Mask, H1 band Mask}.
hashes(Term) -> {erlang:phash2({Term},?MAX_32), erlang:phash2([Term],?MAX_32)}.
all_set(_, _, _, []) -> true;
all_set(Mask, I1, I, [H | T]) ->
case array:get(I div ?Width, H) band (1 bsl (I rem ?Width)) of
0 -> false;
_ -> all_set(Mask, I1, (I + I1) band Mask, T)
end.
set_bits(_, _, _, [], Acc) -> lists:reverse(Acc);
set_bits(Mask, I1, I, [H | T], Acc) ->
AI = I div ?Width,
H1 = array:set(AI, array:get(AI, H) bor (1 bsl (I rem ?Width)), H),
set_bits(Mask, I1, (I + I1) band Mask, T, [H1 | Acc]).
parse_opts(Opts) -> check_size(lists:foldl(fun parse_opt/2, #filter{}, Opts)).
parse_opt(fixed, Filter) -> Filter#filter{type = fixed};
parse_opt(scalable, Filter) -> Filter#filter{type = scalable};
parse_opt({size, S}, Filter) when is_integer(S), S > 0 ->
Filter#filter{size = S};
parse_opt({error_prob, E}, Filter) when is_float(E), E > 0, E < 1 ->
Filter#filter{error_prob = E};
parse_opt({error_ratio, R}, Filter) when is_float(R), R > 0, R < 1 ->
Filter#filter{error_ratio = R};
parse_opt({growth_ratio, R}, Filter) when is_integer(R), R > 0, R < 4 ->
Filter#filter{growth_ratio = R};
parse_opt(_, _) -> erlang:error(badarg).
%% rule of thumb due to double hashing
check_size(F = #filter{type = fixed, size = undefined}) ->
check_size(F#filter{size = 4000});
check_size(F = #filter{type = fixed, size = S, error_prob = E}) when S >= 4/E ->
F;
check_size(F = #filter{type=scalable, size = undefined}) ->
check_size(F#filter{size = 32000});
check_size(F = #filter{type=scalable, growth_ratio=1, error_ratio=undefined}) ->
check_size(F#filter{error_ratio = 0.85});
check_size(F = #filter{type=scalable, growth_ratio=2, error_ratio=undefined}) ->
check_size(F#filter{error_ratio = 0.75});
check_size(F = #filter{type=scalable, growth_ratio=3, error_ratio=undefined}) ->
check_size(F#filter{error_ratio = 0.65});
check_size(F = #filter{type = scalable, size = S, error_prob=E, error_ratio=R})
when S >= 4/(E * (1 - R)) ->
F. | src/bloom.erl | 0.813609 | 0.415017 | bloom.erl | starcoder |
-module(day15).
%% API exports
-export([part1/1, part2/1]).
part1(Filename) ->
{MaxX, MaxY, Grid} = parse(Filename),
GetAdjacents = make_adjacent_fun(MaxX, MaxY),
Graph0 = build_graph(Grid, GetAdjacents),
Graph = dijkstra(Graph0, _Source = {1, 1}),
#{dist := Dist} = get_vertex_label(Graph, {MaxX, MaxY}),
Dist.
part2(Filename) -> parse(Filename).
% %% @doc https://en.wikipedia.org/wiki/Dijkstra%27s_algorithm
dijkstra(Graph, Source) ->
Vertices = digraph:vertices(Graph),
Q = Vertices,
[set_vertex_label(Graph, Vertex, #{dist => infinity, prev => undefined})
|| Vertex <- Vertices],
set_vertex_label(Graph, Source, #{dist => 0, prev => undefined}),
get_vertex_label(Graph, Source),
loop(Graph, Q).
loop(Graph, []) -> Graph;
loop(Graph, Q0) ->
U = find_min_dist_vertex(Graph, Q0),
Q = lists:delete(U, Q0),
DistU = get_dist(Graph, U),
NeighboursU = digraph:out_neighbours(Graph, U),
[begin
Alt = DistU + get_edge_label(Graph, {U, V}),
DistV = get_dist(Graph, V),
case Alt < DistV of
true -> set_vertex_label(Graph, V, #{dist => Alt, prev => U});
false -> ok
end
end || V <- NeighboursU],
loop(Graph, Q).
get_edge_label(Graph, {Vertex1, Vertex2}) ->
Edges = [digraph:edge(Graph, Edge) || Edge <- digraph:edges(Graph, Vertex1)],
[Label] = ([
Label
|| {_, VertexFrom, VertexTo, Label} <- Edges,
VertexFrom =:= Vertex1,
VertexTo =:= Vertex2
]),
Label.
set_vertex_label(Graph, Vertex, Value) ->
digraph:add_vertex(Graph, Vertex, Value).
get_dist(Graph, Vertex) ->
#{dist := Dist} = get_vertex_label(Graph, Vertex),
Dist.
get_vertex_label(Graph, Vertex) ->
{Vertex, Label} = digraph:vertex(Graph, Vertex),
Label.
find_min_dist_vertex(Graph, Vertices) ->
{_MinDist, Vertex} = hd(lists:sort([
begin
Dist = get_dist(Graph, Vertex),
{Dist, Vertex}
end
|| Vertex <- Vertices
])),
Vertex.
build_graph(Grid, GetAdjacents) ->
Cells = maps:keys(Grid),
Graph = digraph:new(),
% add vertices
[digraph:add_vertex(Graph, Cell) || Cell <- Cells],
% add edges
[
[
digraph:add_edge(Graph, Cell, Adjacent, maps:get(Adjacent, Grid))
|| Adjacent <- GetAdjacents(Cell)
]
|| Cell <- Cells
],
Graph.
make_adjacent_fun(MaxX, MaxY) -> fun({X, Y}) -> adjacent({MaxX, MaxY}, {X, Y}) end.
adjacent({MaxX, MaxY}, {X, Y}) ->
[
{AdjX, AdjY}
|| {AdjX, AdjY} <- [{X + 1, Y}, {X, Y + 1}, {X - 1, Y}, {X, Y - 1}],
% exclude self
{AdjX, AdjY} =/= {X, Y},
AdjX > 0,
AdjY > 0,
AdjX =< MaxX,
AdjY =< MaxY
].
%%% Parse
parse(Filename) ->
{ok, FileContent} = file:read_file(Filename),
Lines = string:lexemes(FileContent, "\n"),
ListOfIntegerLists = [[binary_to_integer(<<Char>>) || <<Char>> <= L] || L <- Lines],
Grid = index_values(ListOfIntegerLists),
MaxX = string:length(lists:nth(1, Lines)),
MaxY = length(Lines),
{MaxX, MaxY, Grid}.
% %% @doc Produces a {X, Y} -> Value map from a list of row values.
index_values(Rows) ->
EnumeratedRows = enumerate([enumerate(Row) || Row <- Rows]),
index_values(EnumeratedRows, _Index = #{}).
index_values([], Index) -> Index;
index_values([{RowIndex, Row} | Rows], Index) -> index_values(Rows, index_values_row(RowIndex, Row, Index)).
index_values_row(_RowIndex, [], Index) ->
Index;
index_values_row(RowIndex, [{ColIndex, Value} | Cols], Index) ->
index_values_row(RowIndex, Cols, Index#{{RowIndex, ColIndex} => Value}).
% Herlpers
enumerate(List) -> lists:zip(lists:seq(1, length(List)), List). | src/day15.erl | 0.541651 | 0.680952 | day15.erl | starcoder |
%% -------- Utility Functions ---------
%%
%% Generally helpful funtions within leveled
%%
-module(leveled_util).
-include("include/leveled.hrl").
-include_lib("eunit/include/eunit.hrl").
-export([generate_uuid/0,
integer_now/0,
integer_time/1,
magic_hash/1]).
-spec generate_uuid() -> list().
%% @doc
%% Generate a new globally unique ID as a string.
%% Credit to
%% https://github.com/afiskon/erlang-uuid-v4/blob/master/src/uuid.erl
generate_uuid() ->
<<A:32, B:16, C:16, D:16, E:48>> = leveled_rand:rand_bytes(16),
L = io_lib:format("~8.16.0b-~4.16.0b-4~3.16.0b-~4.16.0b-~12.16.0b",
[A, B, C band 16#0fff, D band 16#3fff bor 16#8000, E]),
binary_to_list(list_to_binary(L)).
-spec integer_now() -> non_neg_integer().
%% @doc
%% Return now in gregorian seconds
integer_now() ->
integer_time(os:timestamp()).
-spec integer_time (erlang:timestamp()) -> non_neg_integer().
%% @doc
%% Return a given time in gergorian seconds
integer_time(TS) ->
DT = calendar:now_to_universal_time(TS),
calendar:datetime_to_gregorian_seconds(DT).
-spec magic_hash(any()) -> integer().
%% @doc
%% Use DJ Bernstein magic hash function. Note, this is more expensive than
%% phash2 but provides a much more balanced result.
%%
%% Hash function contains mysterious constants, some explanation here as to
%% what they are -
%% http://stackoverflow.com/questions/10696223/reason-for-5381-number-in-djb-hash-function
magic_hash({binary, BinaryKey}) ->
H = 5381,
hash1(H, BinaryKey) band 16#FFFFFFFF;
magic_hash(AnyKey) ->
BK = term_to_binary(AnyKey),
magic_hash({binary, BK}).
hash1(H, <<>>) ->
H;
hash1(H, <<B:8/integer, Rest/bytes>>) ->
H1 = H * 33,
H2 = H1 bxor B,
hash1(H2, Rest).
%%%============================================================================
%%% Test
%%%============================================================================
-ifdef(TEST).
magichashperf_test() ->
KeyFun =
fun(X) ->
K = {o, "Bucket", "Key" ++ integer_to_list(X), null},
{K, X}
end,
KL = lists:map(KeyFun, lists:seq(1, 1000)),
{TimeMH, _HL1} = timer:tc(lists, map, [fun(K) -> magic_hash(K) end, KL]),
io:format(user, "1000 keys magic hashed in ~w microseconds~n", [TimeMH]),
{TimePH, _Hl2} = timer:tc(lists, map, [fun(K) -> erlang:phash2(K) end, KL]),
io:format(user, "1000 keys phash2 hashed in ~w microseconds~n", [TimePH]),
{TimeMH2, _HL1} = timer:tc(lists, map, [fun(K) -> magic_hash(K) end, KL]),
io:format(user, "1000 keys magic hashed in ~w microseconds~n", [TimeMH2]).
-endif. | src/leveled_util.erl | 0.607081 | 0.435601 | leveled_util.erl | starcoder |
%%% -*- erlang -*-
%%%
%%% This file is part of metrics released under the BSD license.
%%% See the LICENSE for more information.
%%%
%% @doc metric module for folsom
%%
-module(metrics_folsom).
-export([
new/2,
delete/1,
increment_counter/1,
increment_counter/2,
decrement_counter/1,
decrement_counter/2,
update_histogram/2,
update_gauge/2,
update_meter/2]).
-spec new(atom(), any()) -> ok | {error, term()}.
new(counter, Name) ->
folsom_metrics:new_counter(Name);
new(histogram, Name) ->
folsom_metrics:new_histogram(Name);
new(gauge, Name) ->
folsom_metrics:new_gauge(Name);
new(meter, Name) ->
folsom_metrics:new_meter(Name);
new(_, _) ->
{error, unsupported_type}.
delete(Name) ->
folsom_metrics:delete_metric(Name).
-spec increment_counter(any()) -> ok | {error, term()}.
increment_counter(Name) ->
notify(Name, {inc, 1}, counter).
-spec increment_counter(any(), pos_integer()) -> ok | {error, term()}.
increment_counter(Name, Value) ->
notify(Name, {inc, Value}, counter).
-spec decrement_counter(any()) -> ok | {error, term()}.
decrement_counter(Name) ->
notify(Name, {dec, 1}, counter).
-spec decrement_counter(any(), pos_integer()) -> ok | {error, term()}.
decrement_counter(Name, Value) ->
notify(Name, {dec, Value}, counter).
-spec update_histogram(any(), number()) -> ok | {error, term()};
(any(), function()) -> ok | {error, term()}.
update_histogram(Name, Fun) when is_function(Fun, 0) ->
Begin = os:timestamp(),
Result = Fun(),
Duration = timer:now_diff(os:timestamp(), Begin) div 1000,
case notify(Name, Duration, histogram) of
ok -> Result;
Error -> throw(Error)
end;
update_histogram(Name, Value) when is_number(Value) ->
notify(Name, Value, histogram).
-spec update_gauge(any(), number()) -> ok | {error, term()}.
update_gauge(Name, Value) ->
notify(Name, Value, gauge).
-spec update_meter(any(), number()) -> ok | {error, term()}.
update_meter(Name, Value) ->
notify(Name, Value, meter).
-spec notify(any(), any(), atom()) -> ok | {error, term()}.
notify(Name, Op, Type) ->
case folsom_metrics:notify(Name, Op) of
ok -> ok;
{error, Name, nonexistent_metric} ->
%% the metric doesn't exists, create it.
new(Type, Name),
%% then notify
folsom_metrics:notify(Name, Op);
Error ->
io:format("error is ~p~n", [Error]),
Error
end. | deps/metrics/src/metrics_folsom.erl | 0.504639 | 0.410343 | metrics_folsom.erl | starcoder |
%% @copyright 2015-2016 <NAME> <<EMAIL>>
%%
%% @doc Pairing Heaps
%% @private
%% @end
%%
%% NOTE:
%% v0.0.12との互換性維持用モジュール.
%%
%% コード自体は https://github.com/sile/logi_stdlib/blob/master/src/logi_util_heap.erl からの移植.
-module(logi_builtin_util_heap).
%%----------------------------------------------------------------------------------------------------------------------
%% Exported API
%%----------------------------------------------------------------------------------------------------------------------
-export([new/0, is_empty/1, in/2, out/1, peek/1, merge/2]).
-export_type([heap/1]).
%%----------------------------------------------------------------------------------------------------------------------
%% Types
%%----------------------------------------------------------------------------------------------------------------------
-opaque heap(Item) :: empty | {Item::tuple(), [heap(Item)]}.
%%----------------------------------------------------------------------------------------------------------------------
%% Exported Functions
%%----------------------------------------------------------------------------------------------------------------------
%% @doc Returns an empty heap
-spec new() -> heap(_Item).
new() -> empty.
%% @doc Tests if `Heap' is empty and returns `true' if so and `false' otherwise
-spec is_empty(Heap :: heap(_Item)) -> boolean().
is_empty(empty) -> true;
is_empty(_) -> false.
%% @doc Inserts `Item' into the heap `Heap'
%%
%% Returns the resulting heap
-spec in(Item, heap(Item)) -> heap(Item).
in(Item, Heap) -> merge({Item, []}, Heap).
%% @doc Removes the smallest item from the heap `Heap'
%%
%% Returns the `Heap2', where `Heap2' is the resulting heap.
%% If `Heap' is empty, the `empty' is returned.
-spec out(Heap :: heap(Item)) -> (Heap2 :: heap(Item)) | empty.
out(empty) -> empty;
out({_, Heap}) -> merge_pairs(Heap).
%% @doc Returns the tuple `Item' where `Item' is the front item of `Heap', or `empty' if `Heap' is empty
-spec peek(Heap :: heap(Item)) -> Item | empty.
peek(empty) -> empty;
peek({Item, _}) -> Item.
%% @doc Returns the merged heap of `Heap1' and `Heap2'
-spec merge(Heap1 :: heap(Item1), Heap2 :: heap(Item2)) -> heap(Item1|Item2).
merge(H, empty) -> H;
merge(empty, H) -> H;
merge(H1 = {X, Hs1}, H2 = {Y, Hs2}) ->
case X < Y of
true -> {X, [H2 | Hs1]};
false -> {Y, [H1 | Hs2]}
end.
%%----------------------------------------------------------------------------------------------------------------------
%% Internal Functions
%%----------------------------------------------------------------------------------------------------------------------
-spec merge_pairs([heap(Item)]) -> heap(Item).
merge_pairs([]) -> empty;
merge_pairs([H]) -> H;
merge_pairs([H1, H2 | Hs]) -> merge(merge(H1, H2), merge_pairs(Hs)). | src/logi_builtin_util_heap.erl | 0.645008 | 0.424651 | logi_builtin_util_heap.erl | starcoder |
%% @author <NAME> <<EMAIL>>
%% @copyright 2021 <NAME>
%% @doc Parse HTML, adds ids to all header elements and returns a toc menu referring to those ids.
%% Copyright 2021 <NAME>
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(filter_toc).
-export([
toc/2,
test/0
]).
-include_lib("zotonic_core/include/zotonic.hrl").
toc(undefined, _Context) ->
{[], <<>>};
toc(#trans{} = Tr, Context) ->
toc(z_trans:lookup_fallback(Tr, Context), Context);
toc(B, _Context) when is_binary(B) ->
{Toc, Html1} = parse_toc(B, $1, [], [], <<>>),
{nested(lists:reverse(Toc)), <<"<div>", Html1/binary, "</div>">>};
toc(V, Context) ->
toc(z_convert:to_binary(V), Context).
parse_toc(<<>>, _Level, _Path, Toc, Html) ->
{Toc, Html};
parse_toc(<<"<h", N, Rest/binary>>, Lvl, Path, Toc, Html) when N >= $2, N =< $6 ->
if
N =< Lvl ->
% Pop Lvl - N items from prefix
% Start count from 1 again.
[ Count | Path1 ] = pop(Lvl - N, Path),
Count1 = Count+1,
Path2 = [ Count1 | Path1 ],
Html1 = <<Html/binary, "</div><div id=\"", (prefix_id(Path2))/binary, "\"><h", N>>,
Toc1 = [ {Path2, header_text(Rest)} | Toc ],
parse_toc(Rest, N, Path2, Toc1, Html1);
N > Lvl ->
% Push items to prefix
Path1 = push(N - Lvl, Path),
Html1 = <<Html/binary, "</div><div id=\"", (prefix_id(Path1))/binary, "\"><h", N>>,
Toc1 = [ {Path1, header_text(Rest)} | Toc ],
parse_toc(Rest, N, Path1, Toc1, Html1)
end;
parse_toc(<<C/utf8, Rest/binary>>, Lvl, Prefix, Toc, Html) ->
parse_toc(Rest, Lvl, Prefix, Toc, <<Html/binary, C/utf8>>).
pop(_, []) -> [];
pop(0, L) -> L;
pop(N, L) -> pop(N-1, tl(L)).
push(0, L) -> L;
push(N, L) -> push(N-1, [ 1 | L ]).
prefix_id(L) ->
L1 = lists:map( fun integer_to_binary/1, L),
iolist_to_binary(["toc", lists:join($-, lists:reverse(L1)) ]).
nested(Toc) ->
{Nested, _} = nested(Toc, 1, []),
Nested.
nested([], _Depth, Acc) ->
{lists:reverse(Acc), []};
nested([ {T, Text} | Ts ] = TTs, Depth, Acc) ->
TLen = length(T),
if
TLen =:= Depth ->
{Sub, Ts1} = nested(Ts, Depth+1, []),
nested(Ts1, Depth, [ {prefix_id(T), Text, Sub} | Acc ]);
TLen > Depth ->
{Sub, Ts1} = nested(TTs, Depth+1, []),
nested(Ts1, Depth, [ {undefined, <<>>, Sub} | Acc ]);
TLen < Depth ->
{lists:reverse(Acc), TTs}
end.
header_text(B) ->
z_string:trim(z_html:strip(header_text(B, <<>>))).
header_text(<<>>, Acc) ->
Acc;
header_text(<<"</h", _/binary>>, Acc) ->
Acc;
header_text(<<C/utf8, Rest/binary>>, Acc) ->
header_text(Rest, <<Acc/binary, C/utf8>>).
test() ->
Html = <<"
<h2>1.</h2
<h3>1.1</h3>
<h4>1.1.1</h4>
<h3>1.2</h3>
<h4>1.2.1</h4>
<h4>1.2.2</h4>
<h2>2.</h2>
<h5>2.1.1.1</h5>
">>,
toc(Html, x). | apps/zotonic_mod_base/src/filters/filter_toc.erl | 0.502197 | 0.524821 | filter_toc.erl | starcoder |
-module(week1).
-export([perimeter/1, area/1, enclose/1, bitSum/1, bitSumR/1]).
%Shapes
% let's is begin with simple shapes like square, rhombus, rectangle, and circle
perimeter({ square, {_X, _Y}, W }) ->
W * 4;
perimeter({ rhombus, {_X, _Y}, W} ) ->
W * 4;
perimeter({ rectangle, {_X, _Y}, W, H }) ->
W * 2 + H * 2;
perimeter({ circle, {_X, _Y}, R }) ->
2 * math:pi() * R;
% a triangle can be represented like this {triangle,[{vertex, {X1, Y1}}, {vertex, {X2, Y2}}, {vertex, {X3, Y3}}])
% the data is composed of an atom and an array of three vertices
perimeter({ triangle,[{ vertex, {X1, Y1} }, { vertex, {X2, Y2} }, { vertex, {X3, Y3} }] }) ->
Edge1 = distance(X1, Y1, X2, Y2),
Edge2 = distance(X1, Y1, X3, Y3),
Edge3 = distance(X2, Y2, X3, Y3),
Edge1 + Edge2 + Edge3;
% We can see that triangle data structure can be generalized for any shape. So a shape could be represented like this
% {shape, [{ vertex, {X1, Y1} }, { vertex, {X2, Y2} }, { vertex, {X3, Y3} }, ..., { vertex, {XN, YN} }] }
% Note: we assume that the vertices are ordered contiguously in the array.
% So the function perimeter can be extended :
% perimeter({shape, [Vertices]}) ->
perimeter({shape, [H|Q]}) ->
iterateVertex(H, [H|Q]).
% A tool function to compute the length of an edge
distance(X1,Y1, X2, Y2) ->
math:sqrt(math:pow(X1 - X2, 2) + math:pow(Y1 - Y2, 2)).
% iterateVertex(FirstVertex, [Vertices]).
iterateVertex({ vertex, {X1, Y1} } , [{ vertex, {X2, Y2} }]) ->
distance(X1, Y1, X2, Y2);
iterateVertex(H, L) ->
[{ vertex, {X1, Y1} }, { vertex, {X2, Y2} } | Q] = L,
distance(X1, Y1, X2, Y2) + iterateVertex(H, [{ vertex, {X2, Y2} } | Q]).
% Examples of perimeter for different shapes
%ex:perimeter({square,{5,5}, 25 }).
% -> 100
%ex:perimeter({rectangle,{5,5}, 25, 5 }).
% -> 60
%ex:perimeter({rhombus,{5,5}, 25 }).
% -> 100
%ex:perimeter({circle,{5,5}, 1 }).
% -> 6.283185307179586
%ex:perimeter({ triangle,[{ vertex, {0, 0} }, { vertex, {0, 5} }, { vertex, {5, 0} }] }).
% -> 17.071067811865476
%ex:perimeter({ shape,[{ vertex, {0, 0} }, { vertex, {0, 5} }, { vertex, {5, 0} }] }).
% -> 17.071067811865476
%ex:perimeter({ shape,[{ vertex, {0, 0} }, { vertex, {0, 5} }, { vertex, {5, 5} } ,{ vertex, {5, 0} }] }).
% -> 20
area({ square, {_X, _Y}, W }) ->
W * W;
area({ rhombus, {_X, _Y}, W} ) ->
W * W;
area({ rectangle, {_X, _Y}, W, H }) ->
W * H;
area({ circle, {_X, _Y}, R }) ->
2 * math:pi() * R * R;
area({ triangle,[{ vertex, {X1, Y1} }, { vertex, {X2, Y2} }, { vertex, {X3, Y3} }] }) ->
Edge1 = distance(X1, Y1, X2, Y2),
Edge2 = distance(X1, Y1, X3, Y3),
Edge3 = distance(X2, Y2, X3, Y3),
% compute the perimeter as the previous version
Sp = (Edge1 + Edge2 + Edge3)/ 2,
math:sqrt(Sp*(Sp - Edge1)*(Sp - Edge2)*(Sp - Edge3)).
% ex:area({square,{5,5}, 25 }).
% -> 625
% ex:area({square,{5,5}, 5 }).
% -> 25
% ex:area({rhombus,{5,5}, 5 }).
% -> 25
% ex:area({rectangle,{5,5}, 5, 2 }).
% -> 10
% ex:area({rectangle,{5,5}, 5, 3 }).
% -> 15
% ex:area({circle,{5,5}, 1 }).
% -> 6.283185307179586
% ex:area({circle,{5,5}, 2 }).
% -> 25.132741228718345
%ex:area({ triangle,[{ vertex, {0, 0} }, { vertex, {0, 5} }, { vertex, {5, 0} }] }).
% -> 12.5
%ex:area({ shape,[{ vertex, {0, 0} }, { vertex, {0, 5} }, { vertex, {5, 5} } ,{ vertex, {5, 0} }] }).
% -> 12.5
% I could not manage the area function for any shape, because the results depends on sereral parameters.(Is the shape concave or convex ? Is it a regular polygon ? )
% Enclose
% For simple shapes
enclose({rectangle, {X, Y}, W, H}) ->
{rectangle, {X, Y}, W, H};
enclose({square, {X, Y}, W}) ->
{rectangle, {X, Y}, W, W};
enclose({rhombus, {X, Y}, W}) ->
{rectangle, {X, Y}, W, W};
enclose({circle, {X, Y}, R}) ->
{rectangle, {X, Y}, R, R};
% For generic shape
enclose({_, [{vertex, {X, Y} }|Q]}) ->
enclose([{vertex, {X, Y} }|Q], X, X, Y, Y).
enclose([ {vertex, {X, Y} }|Q], XMin, XMax, YMin, YMax) ->
NewXmin = min(X, XMin),
NewXMax = max(X, XMax),
NewYMin = min(Y, YMin),
NewYMax = max(Y, YMax),
enclose(Q, NewXmin, NewXMax, NewYMin, NewYMax);
enclose([], XMin, XMax, YMin, YMax) ->
CenterX = (XMin + XMax)/2,
CenterY = (YMin + YMax)/2,
{ rectangle, { CenterX, CenterY }, distance(XMin, 0, XMax, 0), distance(0, YMin, 0, YMax) }.
% examples
% ex:enclose({ shape,[{ vertex, {0, 0} }, { vertex, {0, 5} }, { vertex, {5, 5} } ,{ vertex, {5, 0} }] }).
% --> {rectangle,{2.5,2.5},5.0,5.0}
% ex:enclose({rectangle,{5,5}, 5, 2 }).
% --> {rectangle,{2.5,2.5},5.0,5.0}
% ex:enclose({square,{5,5}, 5 }).
% --> {rectangle,{5,5},5,5}
%ex:enclose({rhombus,{5,5}, 5 }).
% --> {rectangle,{5,5},5,5}
% ex:enclose({circle,{5,5}, 10 }).
% --> {rectangle,{5,5},10,10}
% Summing the bits
%tail recursive version
bitSum(N) ->
bitSum(N, 0).
bitSum(0, Acc) -> Acc;
bitSum(N, Acc) ->
bitSum(N div 2 , Acc + N rem 2).
% direct recursive version
bitSumR(0) -> 0;
bitSumR(N) ->
bitSumR(N div 2) + N rem 2.
% Which do you think is better? Why?
% tail recursive is more efficiant than direct recursive because it optimizes the stack view : no intermediate states. | week1/week1.erl | 0.629091 | 0.776877 | week1.erl | starcoder |
%% Licensed under the Apache License, Version 2.0 (the “License”);
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an “AS IS” BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% @doc General balanced binary Merkle trees. Similar to {@link //stdlib/gb_trees}, but with Merkle proofs.
%%
%% Keys and values need to be binaries. Values are stored only in leaf nodes to shorten Merkle proofs.
%%
%% Hashes of leaf nodes are based on concatenation of hashes of key and value. Hashes of inner nodes are based on concatenation of hashes of left and right node.
%%
%% Similarly as in {@link //stdlib/gb_trees}, deletions do not cause trees to rebalance.
%%
%% SHA-256 is used as the default hashing algorithm. You can define the `GB_MERKLE_TREES_HASH_ALGORITHM' macro to use another algorithm. See documentation of {@link //crypto/crypto:hash/2} for available choices.
%%
%% @author <NAME> <<EMAIL>> [http://jurewicz.org.pl]
%%
%% @reference See <a href="http://cglab.ca/~morin/teaching/5408/refs/a99.pdf">Arne Andersson’s “General Balanced Trees” article</a> for insights about the balancing algorithm. The original balance condition has been changed to 2^h(T) ≤ |T|^2.
%% @reference See <a href="https://github.com/tendermint/go-merkle">go-merkle</a> for a similar in purpose library written in Go which uses AVL trees instead of general balanced trees.
%% @see //stdlib/gb_trees
%% @see //crypto/crypto:hash/2
-module(db_merkle_trees).
-export([
balance/1,
delete/2,
empty/0,
enter/3,
foldr/3,
from_list/2,
keys/1,
lookup/2,
merkle_proof/2,
root_hash/1,
size/1,
to_orddict/1,
verify_merkle_proof/4
]).
-ifdef(TEST).
-include_lib("triq/include/triq.hrl").
-include_lib("eunit/include/eunit.hrl").
-endif.
-ifndef(GB_MERKLE_TREES_HASH_ALGORITHM).
-define(GB_MERKLE_TREES_HASH_ALGORITHM, sha256).
-endif.
-define(HASH(X), crypto:hash(?GB_MERKLE_TREES_HASH_ALGORITHM, X)).
%% Trees are balanced using the condition 2^h(T) ≤ |T|^C
-define(C, 2).
-type key() :: binary().
-type value() :: binary().
-type hash() :: binary().
%% We distinguish inner nodes and tree nodes by tuple length instead of using records to save some space.
-type leaf_node() :: {key(), value(), hash()}.
-type inner_node() :: {key(), hash() | to_be_computed,
Left :: inner_node() | leaf_node(),
Right :: inner_node() | leaf_node()}.
-type tree_node() :: leaf_node() | inner_node() | empty.
-type merkle_proof() :: {hash() | merkle_proof(), hash() | merkle_proof()}.
-type dbh() :: fun((atom(),any(),any()) -> any()).
-type db() :: {dbh(), any()}.
-export_type([key/0,
value/0,
hash/0,
merkle_proof/0]).
-spec delete(key(), db()) -> any().
delete(Key, {DBH, Acc}) ->
{{Size,RootID}, Acc1}=DBH(get,<<"R">>,Acc),
{{S1,NewRootID},Acc2}=delete(Key, {Size, RootID}, {DBH, Acc1}),
DBH(put,{<<"R">>,{S1,NewRootID}},Acc2).
-spec delete(key(), {integer(), binary()}, db()) -> any().
delete(Key, {Size, RootID}, {DBH, Acc}) ->
{Node,_}=DBH(get,RootID,Acc),
{NewRootID,Acc1}=delete_1(Key, Node, {DBH, Acc}),
{{Size - 1, NewRootID}, Acc1}.
-spec delete_1(key(), tree_node(), db()) -> any().
delete_1(Key, {Key, _, _}, {DBH, Acc}) ->
Acc1=DBH(del,<<"L",Key/binary>>,Acc),
{empty, Acc1};
delete_1(Key, {InnerKey, _, LeftNodeID, RightNodeID}, {DBH, Acc0}) ->
Acc=DBH(del,<<"N",InnerKey/binary>>,Acc0),
case Key < InnerKey of
true ->
{LeftNode,_}=DBH(get,LeftNodeID,Acc),
case delete_1(Key, LeftNode, {DBH, Acc}) of
{empty,Acc1} ->
{RightNodeID,Acc1};
{NewLeftNodeID,Acc1} ->
NewNID= <<"N",InnerKey/binary>>,
LeftH=nodeid_hash(NewLeftNodeID, {DBH, Acc1}),
RightH=nodeid_hash(RightNodeID, {DBH, Acc1}),
IH=inner_hash(LeftH,
RightH,
{DBH, Acc1}
),
NewInnerNode= {InnerKey, IH ,
NewLeftNodeID, RightNodeID},
Acc2=DBH(put, {NewNID, NewInnerNode}, DBH(del, Key, Acc1)),
{NewNID,Acc2}
end;
_ ->
{RightNode,_}=DBH(get,RightNodeID,Acc),
case delete_1(Key, RightNode, {DBH, Acc}) of
{empty,Acc1} ->
{LeftNodeID,Acc1};
{NewRightNodeID, Acc1} ->
NewNID= <<"N",InnerKey/binary>>,
IH=inner_hash(nodeid_hash(LeftNodeID, {DBH, Acc1}),
nodeid_hash(NewRightNodeID, {DBH, Acc1}),
{DBH, Acc1}
),
NewInnerNode= {InnerKey,
IH,
LeftNodeID, NewRightNodeID},
Acc2=DBH(put, {NewNID, NewInnerNode}, Acc1),
{NewNID,Acc2}
end
end.
-spec empty() -> any().
%% @doc Return an empty tree.
empty() ->
{0, empty}.
-spec size(db()) -> non_neg_integer().
%% @doc Return number of elements stored in the tree.
size({Size, _}) ->
Size.
-spec leaf_hash(key(), value()) -> hash().
leaf_hash(Key, Value) ->
KeyHash = ?HASH(Key),
ValueHash = ?HASH(Value),
?HASH(<<KeyHash/binary, ValueHash/binary>>).
-spec inner_hash(hash(), hash(), any()) -> hash().
inner_hash(LeftHash, RightHash, _) ->
?HASH(<<LeftHash/binary, RightHash/binary>>).
-spec root_hash(any()) -> hash() | undefined.
%% @doc Return the hash of root node.
root_hash({DBH, DBAcc}) ->
{{_Size,RootID},_Acc1}=DBH(get,<<"R">>,DBAcc),
nodeid_hash(RootID, {DBH,DBAcc}).
-spec merkle_proof(key(), any()) -> merkle_proof().
%% @doc For a given key return a proof that, along with its value, it is contained in tree.
%% Hash for root node is not included in the proof.
merkle_proof(Key, {DBH, Acc}) ->
{{_Size, RootNode},Acc1} = DBH(get,<<"R">>,Acc),
merkle_proof_node(Key, RootNode, {DBH, Acc1}).
% NewLID= <<"L",Key/binary>>,
% NewLeafNode = {Key, Value, leaf_hash(Key, Value)},
% Acc1=DBH(put, {NewLID,NewLeafNode}, Acc),
-spec merkle_proof_node(key(), binary(), db()) -> merkle_proof().
merkle_proof_node(RKey, <<"L",K1/binary>>=Node, {DBH, Acc}) ->
if(RKey =/= K1) ->
throw('no_key');
true ->
ok
end,
{{Key, Value, _},_Acc1} = DBH(get,Node,Acc),
{?HASH(Key), ?HASH(Value)};
merkle_proof_node(Key, <<"N",_/binary>>=Node, {DBH, Acc}) ->
{{InnerKey, _, Left, Right},_}=DBH(get,Node,Acc),
case Key < InnerKey of
true ->
{merkle_proof_node(Key, Left, {DBH, Acc}), nodeid_hash(Right, {DBH, Acc})};
_ ->
{nodeid_hash(Left, {DBH, Acc}), merkle_proof_node(Key, Right, {DBH, Acc})}
end.
-spec verify_merkle_proof(key(), value(), Root::hash(), merkle_proof()) ->
ok | {error, Reason} when
Reason :: {key_hash_mismatch, hash()}
| {value_hash_mismatch, hash()}
| {root_hash_mismatch, hash()}.
%% @doc Verify a proof against a leaf and a root node hash.
verify_merkle_proof(Key, Value, RootHash, Proof) ->
{KH, VH} = {?HASH(Key), ?HASH(Value)},
{PKH, PVH} = bottom_merkle_proof_pair(Proof),
if
PKH =/= KH ->
{error, {key_hash_mismatch, PKH}};
PVH =/= VH ->
{error, {value_hash_mismatch, PKH}};
true ->
PRH = merkle_fold(Proof),
if
PRH =/= RootHash ->
{error, {root_hash_mismatch, PRH}};
true ->
ok
end
end.
-spec from_list(list({key(), value()}), dbh()|db()) -> any().
%% @doc Create a tree from a list.
%% This creates a tree by iteratively inserting elements and not necessarily results in a perfect balance, like the one obtained when running {@link from_orddict/1}.
from_list(List, DBH) when is_function(DBH,3) ->
from_list(List, {DBH,#{<<"R">>=>empty()}});
%-spec from_list(list({key(), value()}), {fun(atom(),any(),any()), any()}) -> any().
from_list([], {_DBH,Acc}) ->
Acc;
from_list([{Key, Value}|Rest], {DBH,Acc}) ->
from_list(Rest, {DBH,enter(Key, Value, {DBH,Acc})}).
-spec to_orddict(db()) -> list({key(), value()}).
%% @doc Convert tree to an orddict.
to_orddict({DBH,Acc}) ->
foldr(
fun({K,V},A) ->
[{K,V}|A]
end,[],{DBH,Acc}).
% foldr(
% fun (KV, A) ->
% [KV|A]
% end,
% [],
% {DBH,Acc}).
-spec keys(any()) -> list(key()).
%% @doc Return the keys as an ordered list.
keys({DBH,Acc}) ->
foldr(
fun ({Key, _}, A) -> [Key|A] end,
[],
{DBH,Acc}).
%-spec foldr(fun(({key(), value()}, Acc :: any(), {fun(atom(),any(),any()), any()} ) -> any()), Acc :: any(), tree()) -> Acc :: any().
%% @doc Iterate through keys and values, from those with highest keys to lowest.
foldr(Fun, Acc, {DBH, DBAcc}) ->
{{_Size,RootID},_Acc1}=DBH(get,<<"R">>,DBAcc),
foldr_1(Fun, Acc, RootID, {DBH, DBAcc}).
%-spec foldr_1(fun(({key(), value()}, Acc :: any()) -> any()), Acc :: any(),
% {fun(atom(),any(),any()) -> any(), any()}) -> Acc :: any().
foldr_1(_, Acc, empty, _) ->
Acc;
foldr_1(F, Acc, <<"L",_/binary>>=LeafID, {DBH, DBAcc}) ->
{{Key, Value, _},_Acc1}=DBH(get,LeafID,DBAcc),
F({Key, Value}, Acc);
foldr_1(F, Acc, <<"N",_/binary>>=NodeID, {DBH, DBAcc}) ->
{{_, _, Left, Right},_Acc1}=DBH(get,NodeID,DBAcc),
foldr_1(F, foldr_1(F, Acc, Right, {DBH, DBAcc}), Left, {DBH, DBAcc}).
-spec nodeid_hash(binary() | 'empty', db()) -> hash() | undefined.
nodeid_hash(empty,_) ->
undefined;
nodeid_hash(<<"N",_/binary>>=Key,{DBH,Acc}) ->
{{_,Hash,_,_},_Acc1}=DBH(get,Key,Acc),
Hash;
nodeid_hash(<<"L",_/binary>>=Key,{DBH,Acc}) ->
{{_,_,Hash},_Acc1}=DBH(get,Key,Acc),
Hash.
-spec node_hash(tree_node()) -> hash() | undefined.
node_hash(empty) ->
undefined;
node_hash({_, _, Hash}) ->
Hash;
node_hash({_, Hash, _, _}) ->
Hash.
node_id({Key, _, _}) ->
<<"L",Key/binary>>;
node_id({Key, _, _, _}) ->
<<"N",Key/binary>>.
-spec enter(key(), value(), db()) -> any().
%% @doc Insert or update key and value into tree.
enter(Key, Value, {DBH, Acc}) ->
{{Size, RootNode},Acc1} = DBH(get,<<"R">>,Acc),
{NewRootNode, undefined, undefined, KeyExists, {_,Acc2}} =
enter_1(Key, Value, RootNode, 0, Size, {DBH, Acc1}),
NewSize = case KeyExists of
true -> Size;
_ -> Size + 1
end,
%{NewSize, NewRootNode},
DBH(put,{<<"R">>,{NewSize, NewRootNode}},Acc2).
-spec enter_1(key(), value(), tree_node(), Depth :: non_neg_integer(), TreeSize ::
non_neg_integer(), db()) -> any().
%% in case of empty list
enter_1(Key, Value, empty, _, _, {DBH, Acc}) ->
NewLID= <<"L",Key/binary>>,
NewLeafNode = {Key, Value, leaf_hash(Key, Value)},
Acc1=DBH(put, {NewLID,NewLeafNode}, Acc),
{NewLID, undefined, undefined, false, {DBH, Acc1}};
%% in case of leaf node reached
enter_1(Key, Value, <<"L",ExistingKey/binary>>=ExistingLeafNode, Depth, TreeSize, {DBH, Acc}) ->
NewLID= <<"L",Key/binary>>,
NewLeafNode = {Key, Value, leaf_hash(Key, Value)},
Acc1=DBH(put, {NewLID,NewLeafNode}, Acc),
case Key =:= ExistingKey of
true -> %update leaf
%{NewLeafNode, undefined, undefined, true};
{NewLID, undefined, undefined, true, {DBH, Acc1}};
_ -> %Make node instead of leaf
NewTreeSize = TreeSize + 1,
NewDepth = Depth + 1,
{InnerKey, LeftNode, RightNode} =
case Key > ExistingKey of
true ->
{Key, ExistingLeafNode, NewLID};
_ ->
{ExistingKey, NewLID, ExistingLeafNode}
end,
NewNID= <<"N",InnerKey/binary>>,
case rebalancing_needed(NewTreeSize, NewDepth) of
true ->
NewInnerNode={InnerKey, to_be_computed, LeftNode, RightNode},
Acc2=DBH(put, {NewNID,NewInnerNode}, Acc1),
{NewNID,
2,
1,
false,
{DBH, Acc2}};
_ ->
HashLeft=nodeid_hash(LeftNode, {DBH, Acc1}),
HashRight=nodeid_hash(RightNode, {DBH, Acc1}),
InHash=inner_hash(HashLeft, HashRight, {DBH, Acc1}),
NewInnerNode={InnerKey, InHash, LeftNode, RightNode},
Acc2=DBH(put, {NewNID,NewInnerNode}, Acc1),
{NewNID,
undefined,
undefined,
false,
{DBH, Acc2}}
end
end;
%% in case of inner node
enter_1(Key, Value, <<"N",_/binary>>=InnerNode, Depth, TreeSize, {DBH, Acc}) ->
{{InnerKey, _, LeftNode, RightNode}, Acc1} = DBH(get,InnerNode,Acc),
NodeToFollowSymb =
case Key < InnerKey of
true -> left;
_ -> right
end,
{NodeToFollow, NodeNotChanged} =
case NodeToFollowSymb of
right -> {RightNode, LeftNode};
left -> {LeftNode, RightNode}
end,
{NewNode, RebalancingCount, Height, KeyExists, {_, Acc2}} =
enter_1(Key, Value, NodeToFollow, Depth + 1, TreeSize, {DBH, Acc1}),
{NewLeftNode, NewRightNode} =
case NodeToFollowSymb of
right ->
{LeftNode, NewNode};
_ ->
{NewNode, RightNode}
end,
case RebalancingCount of
undefined ->
{NewInnerNode,{_, Acc3}} = update_inner_node(InnerNode, NewLeftNode, NewRightNode,
{DBH, Acc2}),
{NewInnerNode, undefined, undefined, KeyExists, {DBH, Acc3}};
_ ->
NodeSize = node_size(NodeNotChanged, {DBH, Acc2}),
Count = RebalancingCount + NodeSize,
NewHeight = Height + 1,
NewInnerNodeUnbalanced = {InnerKey, to_be_computed, NewLeftNode, NewRightNode},
NewNID= <<"N",InnerKey/binary>>,
Acc4=DBH(put, {NewNID,NewInnerNodeUnbalanced}, DBH(del, InnerNode, Acc2)),
case may_be_rebalanced(Count, NewHeight) of
true ->
{BalancedTopNode, Acc5}=balance_node(NewNID, Count, {DBH, Acc4}),
{node_id(BalancedTopNode),
undefined,
undefined,
KeyExists,
{DBH, Acc5}};
_ ->
{NewNID,
Count,
NewHeight,
KeyExists,
{DBH, Acc4}}
end
end.
-spec rebalancing_needed(TreeSize :: non_neg_integer(), Depth :: non_neg_integer()) -> boolean().
rebalancing_needed(TreeSize, Depth) ->
math:pow(2, Depth) > math:pow(TreeSize, ?C).
-spec may_be_rebalanced(Count :: non_neg_integer(), Height :: non_neg_integer()) -> boolean().
may_be_rebalanced(Count, Height) ->
math:pow(2, Height) > math:pow(Count, ?C).
-spec node_size(tree_node(), db()) -> non_neg_integer().
node_size(empty, _) ->
0;
node_size(<<"L",_/binary>>, _) ->
1;
node_size(<<"N",_/binary>>=NodeID, {DBH, Acc}) ->
{{_InnerKey, _, LeftNode, RightNode}, _} = DBH(get,NodeID,Acc),
SizeL=node_size(LeftNode,{DBH,Acc}),
SizeR=node_size(RightNode,{DBH,Acc}),
SizeL+SizeR.
-spec balance_orddict(list({key(), value()}), Size :: non_neg_integer(), db()) ->
{tree_node(),any()}.
balance_orddict(KVOrdDict, Size, {DBH, Acc}) ->
{{Node, []},Acc1} = balance_orddict_1(KVOrdDict, Size, {DBH, Acc}),
{Node,Acc1}.
-spec balance_orddict_1(list({key(), value()}), Size :: non_neg_integer(), db()) ->
{{tree_node(), list({key(), value()})}, any()}.
balance_orddict_1(OrdDict, Size, {DBH, Acc}) when Size > 1 ->
Size2 = Size div 2,
Size1 = Size - Size2,
BOD1=balance_orddict_1(OrdDict, Size1, {DBH, Acc}),
{{LeftNode, OrdDict1=[{Key, _} | _]}, Acc1} = BOD1,
{{RightNode, OrdDict2}, Acc2} = balance_orddict_1(OrdDict1, Size2, {DBH, Acc1}),
InnerHash=inner_hash(
node_hash(LeftNode),
node_hash(RightNode), {DBH, Acc2}),
InnerNode = {Key, InnerHash, node_id(LeftNode), node_id(RightNode)},
NID = node_id(InnerNode),
Acc3=DBH(put, {NID,InnerNode}, Acc2),
{{InnerNode, OrdDict2},Acc3};
balance_orddict_1([{Key, Value} | OrdDict], 1, {_DBH, Acc}) ->
{{{Key, Value, leaf_hash(Key, Value)}, OrdDict}, Acc};
balance_orddict_1(OrdDict, 0, {_DBH, Acc}) ->
{{empty, OrdDict}, Acc}.
%
%-spec node_to_orddict(tree_node()) -> list({key(), value()}).
node_to_orddict(Node, DBH) ->
foldr_1(fun (KV, Acc) -> [KV|Acc] end, [], Node, DBH).
-spec balance_node(binary(), Size :: non_neg_integer(), db()) -> any().
balance_node(Node, Size, DBH) ->
KVOrdDict = node_to_orddict(Node, DBH),
balance_orddict(KVOrdDict, Size, DBH).
%-spec balance(tree()) -> tree().
%% @doc Perfectly balance a tree.
balance({DBH, DBAcc}) ->
{{Size, RootNode}, _} = DBH(get,<<"R">>, DBAcc),
if(Size==0) ->
DBAcc;
true ->
{NewRoot,Acc1}=balance_orddict(node_to_orddict(RootNode, {DBH, DBAcc}), Size, {DBH, DBAcc}),
DBH(put,{<<"R">>,{Size,node_id(NewRoot)}},Acc1)
end.
-spec lookup(key(), db()) -> value() | none.
%% @doc Fetch value for key from tree.
lookup(Key, {_, RootNode}) ->
lookup_1(Key, RootNode).
-spec lookup_1(key(), inner_node() | leaf_node()) -> value() | none.
lookup_1(Key, {Key, Value, _}) ->
Value;
lookup_1(Key, {InnerKey, _, Left, Right}) ->
case Key < InnerKey of
true ->
lookup_1(Key, Left);
_ ->
lookup_1(Key, Right)
end;
lookup_1(_, _) ->
none.
-spec update_inner_node(binary(), Left::binary(), Right::binary(), db()) -> {binary(),db()}.
update_inner_node(<<"N",_/binary>>=Node, NewLeft, NewRight, {DBH, Acc}) ->
{{Key, _, Left, Right}, _} = DBH(get,Node,Acc),
case lists:map(fun (CNode) ->
nodeid_hash(CNode, {DBH,Acc})
end, [Left, Right, NewLeft, NewRight]) of
[LeftHash, RightHash, LeftHash, RightHash] ->
%% Nothing changed, no need to rehash.
{Node,{DBH,Acc}};
[_, _, NewLeftHash, NewRightHash] ->
NewHash=inner_hash(NewLeftHash, NewRightHash, {DBH,Acc}),
NewNID= <<"N",Key/binary>>,
NewInnerNode={Key, NewHash, NewLeft, NewRight},
Acc3=DBH(put, {NewNID,NewInnerNode}, DBH(del, Node, Acc)),
{NewNID, {DBH,Acc3}}
end.
-spec merkle_fold(merkle_proof()) -> hash().
merkle_fold({Left, Right}) ->
LeftHash = merkle_fold(Left),
RightHash = merkle_fold(Right),
?HASH(<<LeftHash/binary, RightHash/binary>>);
merkle_fold(Hash) ->
Hash.
-spec bottom_merkle_proof_pair(merkle_proof()) -> {hash(), hash()}.
bottom_merkle_proof_pair({Pair, Hash}) when is_tuple(Pair), is_binary(Hash) ->
bottom_merkle_proof_pair(Pair);
bottom_merkle_proof_pair({_Hash, Pair}) when is_tuple(Pair) ->
bottom_merkle_proof_pair(Pair);
bottom_merkle_proof_pair(Pair) ->
Pair.
-ifdef(TEST).
empty_test_() ->
[?_assertEqual(0, ?MODULE:size(empty()))].
%% Types for Triq.
key() ->
binary().
value() ->
binary().
kv_orddict() ->
?LET(L, list({key(), value()}), orddict:from_list(L)).
tree() ->
%% The validity of data generated by this generator depends on the validity of the `from_list' function.
%% This should not be a problem as long as the `from_list' function itself is tested.
?LET(KVO, list({key(), value()}), from_list(KVO)).
non_empty_tree() ->
?SUCHTHAT(Tree, tree(), element(1, Tree) > 0).
%% Helper functions for Triq.
-spec height(tree()) -> non_neg_integer().
height({_, RootNode}) ->
node_height(RootNode).
-spec node_height(tree_node()) -> non_neg_integer().
node_height(empty) ->
%% Strictly speaking, there is no height for empty tree.
0;
node_height({_, _, _}) ->
0;
node_height({_, _, Left, Right}) ->
1 + max(node_height(Left), node_height(Right)).
-spec shallow_height(tree()) -> non_neg_integer().
shallow_height({_, RootNode}) ->
node_shallow_height(RootNode).
-spec node_shallow_height(tree_node()) -> non_neg_integer().
node_shallow_height(empty) ->
%% Strictly speaking, there is no height for empty tree.
0;
node_shallow_height({_, _, _}) ->
0;
node_shallow_height({_, _, Left, Right}) ->
1 + min(node_shallow_height(Left), node_shallow_height(Right)).
-spec is_perfectly_balanced(tree()) -> boolean().
is_perfectly_balanced(Tree) ->
height(Tree) - shallow_height(Tree) =< 1.
-spec fun_idempotent(F :: fun((X) -> X), X) -> boolean().
%% @doc Return true if F(X) =:= X.
fun_idempotent(F, X) ->
F(X) =:= X.
prop_lookup_does_not_fetch_deleted_key() ->
?FORALL({Tree, Key, Value},
{tree(), key(), value()},
none =:= lookup(Key, delete(Key, enter(Key, Value, Tree)))).
prop_deletion_decreases_size_by_1() ->
?FORALL({Tree, Key, Value},
{tree(), key(), value()},
?MODULE:size(enter(Key, Value, Tree)) - 1 =:= ?MODULE:size(delete(Key, enter(Key, Value, Tree)))).
prop_merkle_proofs_fold_to_root_hash() ->
?FORALL({Tree, Key, Value},
{tree(), key(), value()},
root_hash(enter(Key, Value, Tree)) =:= merkle_fold(merkle_proof(Key, enter(Key, Value, Tree)))).
prop_merkle_proofs_contain_kv_hashes_at_the_bottom() ->
?FORALL({Tree, Key, Value},
{tree(), key(), value()},
bottom_merkle_proof_pair(merkle_proof(Key, enter(Key, Value, Tree))) =:= {?HASH(Key), ?HASH(Value)}).
prop_merkle_proofs_can_be_verified() ->
?FORALL({Tree, Key, Value},
{tree(), key(), value()},
ok =:= verify_merkle_proof(Key, Value, root_hash(enter(Key, Value, Tree)), merkle_proof(Key, enter(Key, Value, Tree)))).
prop_merkle_proofs_verification_reports_mismatch_for_wrong_key() ->
?FORALL({Tree, Key, Value},
{tree(), key(), value()},
case verify_merkle_proof(<<"X", Key/binary>>, Value, root_hash(enter(Key, Value, Tree)), merkle_proof(Key, enter(Key, Value, Tree))) of
{error, {key_hash_mismatch, H}} when is_binary(H) ->
true;
_ ->
false
end).
prop_merkle_proofs_verification_reports_mismatch_for_wrong_value() ->
?FORALL({Tree, Key, Value},
{tree(), key(), value()},
case verify_merkle_proof(Key, <<"X", Value/binary>>, root_hash(enter(Key, Value, Tree)), merkle_proof(Key, enter(Key, Value, Tree))) of
{error, {value_hash_mismatch, H}} when is_binary(H) ->
true;
_ ->
false
end).
prop_merkle_proofs_verification_reports_mismatch_for_wrong_root_hash() ->
?FORALL({Tree, Key, Value},
{tree(), key(), value()},
case verify_merkle_proof(Key, Value, begin RH = root_hash(enter(Key, Value, Tree)), <<"X", RH/binary>> end, merkle_proof(Key, enter(Key, Value, Tree))) of
{error, {root_hash_mismatch, H}} when is_binary(H) ->
true;
_ ->
false
end).
prop_from_list_size() ->
?FORALL(KVList, list({key(), value()}),
length(proplists:get_keys(KVList)) =:= ?MODULE:size(from_list(KVList))).
prop_from_orddict_size() ->
?FORALL(KVO, kv_orddict(),
length(KVO) =:= ?MODULE:size(from_list(KVO))).
prop_orddict_conversion_idempotence() ->
?FORALL(KVO, kv_orddict(), KVO =:= to_orddict(from_orddict(KVO))).
prop_from_orddict_returns_a_perfectly_balanced_tree() ->
?FORALL(KVO, kv_orddict(), is_perfectly_balanced(from_orddict(KVO))).
prop_keys() ->
?FORALL(Tree, tree(), keys(Tree) =:= [Key || {Key, _} <- to_orddict(Tree)]).
from_list_sometimes_doesnt_return_a_perfectly_balanced_tree_test() ->
?assertNotEqual(
true,
triq:counterexample(
?FORALL(
KVList,
list({key(), value()}),
is_perfectly_balanced(from_list(KVList))))).
prop_foldr_iterates_on_proper_ordering_and_contains_no_duplicates() ->
?FORALL(Tree, tree(),
fun_idempotent(
fun lists:usort/1,
foldr(
fun({Key, _}, Acc) -> [Key|Acc] end,
[],
Tree)
)).
prop_enter_is_idempotent() ->
?FORALL({Tree, Key, Value},
{tree(), key(), value()},
fun_idempotent(
fun (Tree_) -> enter(Key, Value, Tree_) end,
enter(Key, Value, Tree))).
prop_entered_value_can_be_retrieved() ->
?FORALL({Tree, Key, Value},
{tree(), key(), value()},
Value =:= lookup(Key, enter(Key, Value, Tree))).
prop_entered_value_can_be_retrieved_after_balancing() ->
?FORALL({Tree, Key, Value},
{tree(), key(), value()},
Value =:= lookup(Key, balance(enter(Key, Value, Tree)))).
prop_height_constrained() ->
?FORALL(Tree, non_empty_tree(), math:pow(2, height(Tree)) =< math:pow(?MODULE:size(Tree), ?C)).
prop_balancing_yields_same_orddict() ->
?FORALL(Tree, tree(), to_orddict(Tree) =:= to_orddict(balance(Tree))).
prop_entering_key_second_time_does_not_increase_size() ->
?FORALL({Tree, Key, Value1, Value2},
{tree(), key(), value(), value()},
?MODULE:size(enter(Key, Value1, Tree)) =:= ?MODULE:size(enter(Key, Value2, enter(Key, Value1, Tree)))).
prop_tree_after_explicit_balancing_is_perfectly_balanced() ->
?FORALL(Tree, tree(), is_perfectly_balanced(balance(Tree))).
-endif. | src/db_merkle_trees.erl | 0.790813 | 0.541166 | db_merkle_trees.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% riak_kv_coverage_filter: Construct coverage filter functions.
%%
%%
%% Copyright (c) 2007-2011 Basho Technologies, Inc. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%% @doc This module is used to construct a property list of VNode
%% indexes and functions to filter results from a coverage
%% operation. This may include filtering based on the particular
%% VNode or filtering on each item in the result list from any
%% VNode.
-module(riak_kv_coverage_filter).
%% API
-export([build_filter/1, build_filter/3]).
-export_type([filter/0]).
-type bucket() :: binary().
-type filter() :: none | fun((any()) -> boolean()) | [{atom(), atom(), [any()]}].
-type index() :: non_neg_integer().
%% ===================================================================
%% Public API
%% ===================================================================
%% @doc Build the list of filter functions for any required VNode indexes.
%%
%% The ItemFilterInput parameter can be the atom `none' to indicate
%% no filtering based on the request items, a function that returns
%% a boolean indicating whether or not the item should be included
%% in the final results, or a list of tuples of the form
%% {Module, Function, Args}. The latter is the form used by
%% MapReduce filters such as those in the {@link riak_kv_mapred_filters}
%% module. The list of tuples is composed into a function that is
%% used to determine if an item should be included in the final
%% result set.
-spec build_filter(filter()) -> filter().
build_filter(Filter) ->
build_item_filter(Filter).
-spec build_filter(bucket(), filter(), [index()]) -> filter().
build_filter(Bucket, ItemFilterInput, FilterVNode) ->
ItemFilter = build_item_filter(ItemFilterInput),
if
(ItemFilter == none) andalso
(FilterVNode == undefined) -> % no filtering
none;
(FilterVNode == undefined) -> % only key filtering
%% Compose a key filtering function for the VNode
ItemFilter;
(ItemFilter == none) -> % only vnode filtering required
{ok, CHBin} = riak_core_ring_manager:get_chash_bin(),
PrefListFun = build_preflist_fun(Bucket, CHBin),
%% Create a VNode filter
compose_filter(FilterVNode, PrefListFun);
true -> % key and vnode filtering
{ok, CHBin} = riak_core_ring_manager:get_chash_bin(),
PrefListFun = build_preflist_fun(Bucket, CHBin),
%% Create a filter for the VNode
compose_filter(FilterVNode, PrefListFun, ItemFilter)
end.
%% ====================================================================
%% Internal functions
%% ====================================================================
%% @private
compose_filter(KeySpaceIndexes, PrefListFun) ->
VNodeFilter = build_vnode_filter(KeySpaceIndexes, PrefListFun),
VNodeFilter.
compose_filter(undefined, _, ItemFilter) ->
ItemFilter;
compose_filter(KeySpaceIndexes, PrefListFun, ItemFilter) ->
VNodeFilter = build_vnode_filter(KeySpaceIndexes, PrefListFun),
fun(Item) ->
ItemFilter(Item) andalso VNodeFilter(Item)
end.
%% @private
build_vnode_filter(KeySpaceIndexes, PrefListFun) ->
fun(X) ->
PrefListIndex = PrefListFun(X),
lists:member(PrefListIndex, KeySpaceIndexes)
end.
%% @private
build_item_filter(none) ->
none;
build_item_filter(FilterInput) when is_function(FilterInput) ->
FilterInput;
build_item_filter(FilterInput) ->
%% FilterInput is a list of {Module, Fun, Args} tuples
compose(FilterInput).
%% @private
build_preflist_fun(Bucket, CHBin) ->
fun({o, Key, _Value}) -> %% $ index return_body
ChashKey = riak_core_util:chash_key({Bucket, Key}),
chashbin:responsible_index(ChashKey, CHBin);
({_Value, Key}) ->
ChashKey = riak_core_util:chash_key({Bucket, Key}),
chashbin:responsible_index(ChashKey, CHBin);
(Key) ->
ChashKey = riak_core_util:chash_key({Bucket, Key}),
chashbin:responsible_index(ChashKey, CHBin)
end.
compose([]) ->
none;
compose(Filters) ->
compose(Filters, []).
compose([], RevFilterFuns) ->
FilterFuns = lists:reverse(RevFilterFuns),
fun(Val) ->
true =:= lists:foldl(fun(Fun, Acc) -> Fun(Acc) end,
Val,
FilterFuns)
end;
compose([Filter | RestFilters], FilterFuns) ->
{FilterMod, FilterFun, Args} = Filter,
Fun = FilterMod:FilterFun(Args),
compose(RestFilters, [Fun | FilterFuns]). | deps/riak_kv/src/riak_kv_coverage_filter.erl | 0.689201 | 0.522507 | riak_kv_coverage_filter.erl | starcoder |
%%%------------------------------------------------------------------------
%% Copyright 2019, OpenTelemetry Authors
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% @doc
%% Functional interface for span_ctx and span records.
%% @end
%%%-------------------------------------------------------------------------
-module(otel_span_utils).
-export([start_span/3,
end_span/1]).
-include_lib("opentelemetry_api/include/opentelemetry.hrl").
-include("otel_sampler.hrl").
-include("otel_span.hrl").
%% sampling bit is the first bit in 8-bit trace options
-define(IS_ENABLED(X), (X band 1) =/= 0).
-spec start_span(opentelemetry:span_name(), opentelemetry:span_ctx() | undefined, otel_span:start_opts())
-> {opentelemetry:span_ctx(), opentelemetry:span() | undefined}.
start_span(Name, Parent, Opts) ->
Attributes = maps:get(attributes, Opts, []),
Links = maps:get(links, Opts, []),
Kind = maps:get(kind, Opts, ?SPAN_KIND_INTERNAL),
Sampler = maps:get(sampler, Opts),
StartTime = maps:get(start_time, Opts, opentelemetry:timestamp()),
new_span(Name, Parent, Sampler, StartTime, Kind, Attributes, Links).
%% if parent is undefined create a new trace id
new_span(Name, undefined, Sampler, StartTime, Kind, Attributes, Links) ->
TraceId = opentelemetry:generate_trace_id(),
Span = #span_ctx{trace_id=TraceId,
trace_flags=0},
new_span(Name, Span, Sampler, StartTime, Kind, Attributes, Links);
new_span(Name, Parent=#span_ctx{trace_id=TraceId,
span_id=ParentSpanId}, Sampler, StartTime, Kind, Attributes, Links) ->
SpanId = opentelemetry:generate_span_id(),
SpanCtx = Parent#span_ctx{span_id=SpanId},
{TraceFlags, IsRecording, SamplerAttributes, TraceState} =
sample(Sampler, TraceId, case Parent of
#span_ctx{span_id=undefined} ->
undefined;
_ ->
Parent
end,
Links, Name, Kind, Attributes),
Span = #span{trace_id=TraceId,
span_id=SpanId,
tracestate=TraceState,
start_time=StartTime,
parent_span_id=ParentSpanId,
kind=Kind,
name=Name,
attributes=Attributes++SamplerAttributes,
links=Links,
is_recording=IsRecording},
{SpanCtx#span_ctx{trace_flags=TraceFlags,
is_recording=IsRecording}, Span}.
%%--------------------------------------------------------------------
%% @doc
%% Set the end time for a span if it hasn't been set before.
%% @end
%%--------------------------------------------------------------------
-spec end_span(opentelemetry:span()) -> opentelemetry:span().
end_span(Span=#span{end_time=undefined,
trace_options=TraceOptions}) when ?IS_ENABLED(TraceOptions) ->
EndTime = opentelemetry:timestamp(),
Span#span{end_time=EndTime};
end_span(Span) ->
Span.
%%
sample({Sampler, _Description, Opts}, TraceId, Parent, Links, SpanName, Kind, Attributes) ->
{Decision, NewAttributes, TraceState} = Sampler(TraceId, Parent, Links,
SpanName, Kind, Attributes, Opts),
case Decision of
?NOT_RECORD ->
{0, false, NewAttributes, TraceState};
?RECORD ->
{0, true, NewAttributes, TraceState};
?RECORD_AND_SAMPLED ->
{1, true, NewAttributes, TraceState}
end. | apps/opentelemetry/src/otel_span_utils.erl | 0.598312 | 0.403508 | otel_span_utils.erl | starcoder |
%%--------------------------------------------------------------------
%% Copyright (c) 2012-2016 <NAME> <<EMAIL>>.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
%% @doc MQTT Topic Trie:
%% [Trie](http://en.wikipedia.org/wiki/Trie)
%% @end
-module(emqttd_trie).
-include("emqttd_trie.hrl").
%% Mnesia Callbacks
-export([mnesia/1]).
-boot_mnesia({mnesia, [boot]}).
-copy_mnesia({mnesia, [copy]}).
%% Trie API
-export([insert/1, match/1, delete/1, lookup/1]).
%%--------------------------------------------------------------------
%% Mnesia Callbacks
%%--------------------------------------------------------------------
%% @doc Create or Replicate trie tables.
-spec(mnesia(boot | copy) -> ok).
mnesia(boot) ->
%% Trie Table
ok = emqttd_mnesia:create_table(trie, [
{ram_copies, [node()]},
{record_name, trie},
{attributes, record_info(fields, trie)}]),
%% Trie Node Table
ok = emqttd_mnesia:create_table(trie_node, [
{ram_copies, [node()]},
{record_name, trie_node},
{attributes, record_info(fields, trie_node)}]);
mnesia(copy) ->
%% Copy Trie Table
ok = emqttd_mnesia:copy_table(trie),
%% Copy Trie Node Table
ok = emqttd_mnesia:copy_table(trie_node).
%%--------------------------------------------------------------------
%% Trie API
%%--------------------------------------------------------------------
%% @doc Insert topic to trie
-spec(insert(Topic :: binary()) -> ok).
insert(Topic) when is_binary(Topic) ->
case mnesia:read(trie_node, Topic) of
[#trie_node{topic=Topic}] ->
ok;
[TrieNode=#trie_node{topic=undefined}] ->
mnesia:write(TrieNode#trie_node{topic=Topic});
[] ->
%add trie path
lists:foreach(fun add_path/1, emqttd_topic:triples(Topic)),
%add last node
mnesia:write(#trie_node{node_id=Topic, topic=Topic})
end.
%% @doc Find trie nodes that match topic
-spec(match(Topic :: binary()) -> list(MatchedTopic :: binary())).
match(Topic) when is_binary(Topic) ->
TrieNodes = match_node(root, emqttd_topic:words(Topic)),
[Name || #trie_node{topic=Name} <- TrieNodes, Name =/= undefined].
%% @doc Lookup a Trie Node
-spec(lookup(NodeId :: binary()) -> [#trie_node{}]).
lookup(NodeId) ->
mnesia:read(trie_node, NodeId).
%% @doc Delete topic from trie
-spec(delete(Topic :: binary()) -> ok).
delete(Topic) when is_binary(Topic) ->
case mnesia:read(trie_node, Topic) of
[#trie_node{edge_count=0}] ->
mnesia:delete({trie_node, Topic}),
delete_path(lists:reverse(emqttd_topic:triples(Topic)));
[TrieNode] ->
mnesia:write(TrieNode#trie_node{topic = undefined});
[] ->
ok
end.
%%--------------------------------------------------------------------
%% Internal Functions
%%--------------------------------------------------------------------
%% @private
%% @doc Add path to trie tree.
add_path({Node, Word, Child}) ->
Edge = #trie_edge{node_id=Node, word=Word},
case mnesia:read(trie_node, Node) of
[TrieNode = #trie_node{edge_count=Count}] ->
case mnesia:wread({trie, Edge}) of
[] ->
mnesia:write(TrieNode#trie_node{edge_count=Count+1}),
mnesia:write(#trie{edge=Edge, node_id=Child});
[_] ->
ok
end;
[] ->
mnesia:write(#trie_node{node_id=Node, edge_count=1}),
mnesia:write(#trie{edge=Edge, node_id=Child})
end.
%% @private
%% @doc Match node with word or '+'.
match_node(root, [<<"$SYS">>|Words]) ->
match_node(<<"$SYS">>, Words, []);
match_node(NodeId, Words) ->
match_node(NodeId, Words, []).
match_node(NodeId, [], ResAcc) ->
mnesia:read(trie_node, NodeId) ++ 'match_#'(NodeId, ResAcc);
match_node(NodeId, [W|Words], ResAcc) ->
lists:foldl(fun(WArg, Acc) ->
case mnesia:read(trie, #trie_edge{node_id=NodeId, word=WArg}) of
[#trie{node_id=ChildId}] -> match_node(ChildId, Words, Acc);
[] -> Acc
end
end, 'match_#'(NodeId, ResAcc), [W, '+']).
%% @private
%% @doc Match node with '#'.
'match_#'(NodeId, ResAcc) ->
case mnesia:read(trie, #trie_edge{node_id=NodeId, word = '#'}) of
[#trie{node_id=ChildId}] ->
mnesia:read(trie_node, ChildId) ++ ResAcc;
[] ->
ResAcc
end.
%% @private
%% @doc Delete paths from trie tree.
delete_path([]) ->
ok;
delete_path([{NodeId, Word, _} | RestPath]) ->
mnesia:delete({trie, #trie_edge{node_id=NodeId, word=Word}}),
case mnesia:read(trie_node, NodeId) of
[#trie_node{edge_count=1, topic=undefined}] ->
mnesia:delete({trie_node, NodeId}),
delete_path(RestPath);
[TrieNode=#trie_node{edge_count=1, topic=_}] ->
mnesia:write(TrieNode#trie_node{edge_count=0});
[TrieNode=#trie_node{edge_count=C}] ->
mnesia:write(TrieNode#trie_node{edge_count=C-1});
[] ->
throw({notfound, NodeId})
end. | src/emqttd_trie.erl | 0.528777 | 0.42913 | emqttd_trie.erl | starcoder |
%%% @author <NAME> <<EMAIL>>
%%% @copyright (C) 2021, <NAME>
%%% @doc
%%%
%%% Server that manages the state of a single particle.
%%%
%%% The server maintains an internal clock representing the number of
%%% iterations it has evaluated. In addition, it maintains a copy of
%%% the position and value of all its neighboring particles. The list
%%% is updated by casts from the neighboring particles sent whenever
%%% they have completed an iteration.
%%%
%%% @end
%%% Created : 15 May 2021 by <NAME> <<EMAIL>>
-module(particle_server).
-behaviour(gen_server).
-export([init/1, handle_cast/2, handle_call/3, handle_info/2, code_change/3,
terminate/2]).
-export([eval/2, state/1, state/2, set_neighbors/2, start_link/4, stop/1]).
-record(state, {particle :: particle:particle(),
neighbors = [] :: [pid()],
neighbor_clock = #{} :: #{ pid() => non_neg_integer() },
neighbor_state = #{} :: #{ pid() => particle:state()},
iteration = 1 :: pos_integer(),
stop_iteration = 1 :: pos_integer(),
evaluator = idle :: idle | pid(),
pending_requests = #{} :: #{pos_integer() := [{pid(), term()}]}}).
-type state() :: #state{}.
%% @doc
%% Start a particle server for the particle with the given `Position',
%% `Velocity', and `ObjectiveFun'. To specify the minimum and maximum
%% position pass ``[{bounds, {MinPosition, MaxPosition}}]'' in
%% `Options'.
%% @end
-spec start_link(Position :: particle:position(),
Velocity :: particle:velocity(),
ObjectiveFun :: particle:objective(),
Options :: proplists:proplist()) -> {ok, pid()}.
start_link(Position, Velocity, ObjectiveFun, Options) ->
case proplists:get_value(bounds, Options, nil) of
{MinBound, MaxBound} ->
gen_server:start_link(
?MODULE,
[Position, Velocity, ObjectiveFun, MinBound, MaxBound],
[]);
nil ->
gen_server:start_link(
?MODULE,
[Position, Velocity, ObjectiveFun],
[])
end.
%% @doc
%% Evaluate `Iterations' interations of the PSO algorithm. The
%% iteration number of the final iteration that will be evaluated is
%% returned. (For example, if the server has already evaluated 10
%% iterations, and we request a further 12 iterations, ``{ok, 22}'' is
%% returned.)
%% @end
-spec eval(ParticleServer :: pid(), Iterations :: pos_integer())
-> {ok, FinalIteration :: pos_integer()}
| {already_iterating, FinalIteration :: pos_integer()}.
eval(ParticleServer, Iterations) ->
gen_server:call(ParticleServer, {eval, Iterations}).
%% @doc Get the current state of the particle managed by this server.
-spec state(ParticleServer :: pid()) -> particle:state().
state(ParticleServer) ->
gen_server:call(ParticleServer, get_state).
%% @doc
%% Get the state of the particle after `Iteration' iterations have
%% been completed. The returned state may be from any iteration
%% *after* `Iteration', but not before. If `Iteration' has not been
%% reached this call will block until it has been completed.
%% @end
-spec state(ParticleServer :: pid(), Iteration :: pos_integer())
-> particle:state().
state(ParticleServer, Iteration) ->
gen_server:call(ParticleServer, {get_state, Iteration}, infinity).
%% @doc Stop the server.
-spec stop(ParticleServer :: pid()) -> ok.
stop(ParticleServer) ->
gen_server:stop(ParticleServer).
%% @doc Set the particle server's neighbors.
-spec set_neighbors(ParticleServer :: pid(), Neighbors :: [pid()]) -> ok.
set_neighbors(ParticleServer, Neighbors) ->
gen_server:cast(ParticleServer, {set_neighbors, Neighbors}).
init(ParticleArgs) ->
{ok,
#state{particle = apply(particle, new, ParticleArgs),
iteration = 1}}.
handle_call({eval, _}, _From, State = #state{iteration = Iteration,
stop_iteration = StopIteration})
when Iteration < StopIteration ->
{reply, {already_iterating, StopIteration}, State};
handle_call({eval, Iterations}, _From,
State = #state{ iteration = Iteration}) ->
StopIteration = Iteration + Iterations,
{reply, {ok, StopIteration},
try_iteration(State#state{stop_iteration = StopIteration})};
handle_call({get_state, AtIteration}, _From,
State = #state{ iteration = Iteration,
particle = Particle })
when Iteration >= AtIteration ->
%% Already at or past the requested iteration, return immediately.
{reply, particle:state(Particle), State};
handle_call({get_state, AtIteration}, From,
State = #state{ pending_requests = PendingRequests}) ->
{noreply,
State#state{
pending_requests = add_pending_request(AtIteration, From, PendingRequests)}};
handle_call(get_state, _From, State = #state{particle = Particle}) ->
{reply, particle:state(Particle), State}.
handle_cast({set_neighbors, Neighbors},
State = #state{ neighbor_clock = Clocks,
neighbor_state = NeighborState }) ->
%% Only add clocks for previously unknown particles, the state will
%% be left unspecified. By leaving the state unspecified an error
%% will be raised if we try to get the state before a
NewClocks = maps:from_list([{N, 0} || N <- Neighbors]),
NewState = State#state{
neighbors = Neighbors,
%% Discard any information about neighbors not in `Neighbors'
neighbor_clock = maps:with(Neighbors, maps:merge(Clocks, NewClocks)),
neighbor_state = maps:with(Neighbors, NeighborState)},
report_state_to_neighbors(NewState),
{noreply, try_iteration(NewState)};
handle_cast({report_state, Neighbor, NeighborState, Iteration},
State = #state{ neighbor_clock = Clocks,
neighbor_state = Neighbors}) ->
NewClocks = maps:update_with(
Neighbor,
fun (I) when I < Iteration -> Iteration;
(I) when I >= Iteration -> I
end,
Iteration,
Clocks),
Updated = maps:get(Neighbor, NewClocks),
NewNeighbors = maps:update_with(
Neighbor,
fun(_) when Updated =:= Iteration -> NeighborState;
(S) -> S
end,
NeighborState,
Neighbors),
NewState = State#state{ neighbor_clock = NewClocks,
neighbor_state = NewNeighbors },
{noreply, try_iteration(NewState)}.
%% Evaluator finished evaluating the next iteration. Increment the
%% current iteration and replace the particle state with the new
%% particle state.
handle_info({evaluation_result, Evaluator, NewParticle},
State = #state{ evaluator = Evaluator,
iteration = Iteration,
pending_requests = PendingRequests }) ->
NewState = State#state{ particle = NewParticle,
evaluator = idle,
iteration = Iteration + 1 },
report_state_to_neighbors(NewState),
handle_pending_requests(
Iteration + 1, particle:state(NewParticle), PendingRequests),
{noreply, try_iteration(NewState)}.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
terminate(_Reason, #state{evaluator = idle}) -> ok;
terminate(Reason, #state{evaluator = Evaluator}) ->
%% If there is an evaluator running, unlink from it and kill it
%% with `Reason'.
erlang:unlink(Evaluator),
exit(Evaluator, Reason).
%% @doc Report the `Iteration' and state of `Particle' to `ParticleServer'.
-spec report_state(ParticleServer :: pid(),
Particle :: particle:particle(),
Iteration :: pos_integer()) -> ok.
report_state(ParticleServer, Particle, Iteration) ->
gen_server:cast(
ParticleServer,
{report_state, self(), particle:state(Particle), Iteration}).
%% @doc Report the particle's state to all of its neighbors.
-spec report_state_to_neighbors(State :: state()) -> ok.
report_state_to_neighbors(#state{ neighbors = Neighbors,
particle = Particle,
iteration = Iteration }) ->
lists:foreach(fun(N) -> report_state(N, Particle, Iteration) end, Neighbors).
%% @doc
%% If the state of all neighbors at the current current iteration is
%% known, then spawn a new process to evaluate the next iteration of
%% the PSO algorithm. If an evaluator is already running, or no
%% neighbors have been assigned then no iteration is performed.
%% @end
-spec try_iteration(state()) -> state().
try_iteration(State = #state{ evaluator = Evaluator }) when is_pid(Evaluator) ->
State;
try_iteration(State = #state{ neighbors = [] }) ->
State;
try_iteration(State = #state{ neighbors = Neighbors,
neighbor_clock = Clocks,
neighbor_state = NeighborState,
iteration = CurrentIteration,
stop_iteration = StopIteration,
particle = Particle,
evaluator = idle })
when StopIteration > CurrentIteration ->
Ready = lists:all(
fun(Neighbor) ->
maps:get(Neighbor, Clocks) >= CurrentIteration
end, Neighbors),
if Ready ->
Server = self(),
State#state{
evaluator =
spawn_link(
fun() ->
NewParticle =
particle:step(
Particle,
maps:values(maps:with(Neighbors, NeighborState))),
Server ! {evaluation_result, self(), NewParticle}
end)};
not(Ready) ->
State
end;
try_iteration(State = #state{ iteration = CurrentIteration,
stop_iteration = StopIteration})
when StopIteration =:= CurrentIteration ->
State.
-spec add_pending_request(At :: pos_integer(),
Requestor :: {pid(), Tag},
PendingRequests)
-> PendingRequests
when PendingRequests :: #{ pos_integer() := [{pid(), Tag}]}.
add_pending_request(At, Requestor, PendingRequests) ->
maps:update_with(
At,
fun (Requestors) -> [Requestor|Requestors] end,
[Requestor],
PendingRequests).
-spec handle_pending_requests(At :: pos_integer(),
ParticleState :: particle:state(),
PendingRequests) -> PendingRequests
when PendingRequests :: #{pos_integer() := [{pid(), term()}]}.
handle_pending_requests(At, ParticleState, PendingRequests) ->
case maps:take(At, PendingRequests) of
error ->
PendingRequests;
{Requestors, NewPendingRequests} ->
lists:foreach(
fun (Caller) -> gen_server:reply(Caller, ParticleState) end,
Requestors),
NewPendingRequests
end. | src/particle_server.erl | 0.584153 | 0.570032 | particle_server.erl | starcoder |
%% @author <NAME> <<EMAIL>>
%% @doc Provides helpers to work with files.
%% Any function in this module does also accept a `string'/`binary' where a `file()' is expected.
%% To allow some sort of caching, it will also return a new `file()' which then encapsulates
%% metadata. Future reads of an exisitng attribute in the return `file()' will return faster.
%% @end
-module(ea_files).
-export([content/2, filename/2, full_path/2, parse_tree/2]).
-export_type([file/0, filedata/0]).
-type file() :: file:name_all()
| filedata().
%% Represents a file in the project.
-opaque filedata() :: #{ name := file:name_all()
, content => binary()
, tree => ktn_code:tree_node()}.
%% Contains a files metadata.
-define(IS_NAME(Name), (is_binary(Name) orelse is_list(Name))).
%% @doc Parses the files content into a representation that we can analyze.
%% @param Config the project configuration.
%% @param File the filename or metadata.
%% @returns a tuple with the {@link parse_tree()} as its first and the updated
%% {@link filedata()} as the second element.
-spec parse_tree(
Config :: ea_config:config(),
File :: file()
) -> {ktn_code:tree_node(), filedata()}.
parse_tree(_, File = #{tree := Tree}) ->
{Tree, File};
parse_tree(Config, File) ->
{Content, File1} = content(Config, File),
Tree = ktn_code:parse_tree(Content),
File2 = put_field(tree, Tree, File1),
parse_tree(Config, File2).
%% @doc Reads the files content from disk.
%% @param Config the project configuration.
%% @param File the filename or metadata.
%% @returns a tuple with a `binary()' representing the files content as read from disk as its first
%% and the updated {@link filedata()} as its second element.
-spec content(
Config :: ea_config:config(),
File :: file()
) -> {binary(), file()}.
content(_Config, File = #{content := Content}) ->
{Content, File};
content(Config, File) ->
FullPath = full_path(Config, File),
{ok, Content} = file:read_file(FullPath),
FileWithContent = put_field(content, Content, File),
content(Config, FileWithContent).
%% @doc Returns the full path of the file.
%% @param Config the project configuration.
%% @param File the filename or metadata.
%% @returns the absolute path to the file on disk.
-spec full_path(
Config :: ea_config:config(),
File :: file()
) -> file:filename_all().
full_path(Config = #{project_path := Base}, File) ->
filename:join(Base, filename(Config, File)).
%% @doc Returns the files name in the project.
%% @param Config the project configuration.
%% æparam File the filename or metadata.
%% @returns the filename relative to the project root.
-spec filename(
Config :: ea_config:config(),
File :: file()
) -> file:name_all().
filename(_Config, File) when ?IS_NAME(File) -> File;
filename(_Config, #{name := Name}) -> Name.
-spec put_field(Key :: name, Value :: file:name_all(), File :: file()) -> filedata()
; (Key :: content, Value :: binary(), File :: file()) -> filedata()
; (Key :: tree, Value :: ktn_code:tree_node(), File :: file()) -> filedata().
put_field(Key, Value, File = #{}) ->
maps:put(Key, Value, File);
put_field(Key, Value, File) when ?IS_NAME(File) ->
put_field(Key, Value, #{name => File}). | src/ea_files.erl | 0.688887 | 0.403802 | ea_files.erl | starcoder |
%%==============================================================================
%% Copyright 2010 Erlang Solutions Ltd.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%==============================================================================
-module(escalus).
% Public API
-export([suite/0,
init_per_suite/1,
end_per_suite/1,
init_per_testcase/2,
end_per_testcase/2,
create_users/1,
create_users/2,
delete_users/1,
delete_users/2,
get_users/1,
override/3,
make_everyone_friends/1,
fresh_story/3,
fresh_story_with_config/3,
story/3,
assert/2,
assert/3,
assert_many/2,
send/2,
send_and_wait/2,
wait_for_stanza/1,
wait_for_stanza/2,
wait_for_stanzas/2,
wait_for_stanzas/3,
send_iq_and_wait_for_result/2,
send_iq_and_wait_for_result/3,
peek_stanzas/1]).
-export_type([client/0,
config/0]).
-include("escalus.hrl").
%%--------------------------------------------------------------------
%% Public Types
%%--------------------------------------------------------------------
-type client() :: #client{}.
-type config() :: escalus_config:config().
%%--------------------------------------------------------------------
%% Public API
%%--------------------------------------------------------------------
-spec suite() -> [{atom(), atom()}].
suite() ->
[{require, escalus_users}].
-spec init_per_suite(config()) -> config().
init_per_suite(Config) ->
application:ensure_all_started(escalus),
escalus_users:start(Config),
escalus_fresh:start(Config),
Config.
-spec end_per_suite(config()) -> ok.
end_per_suite(Config) ->
escalus_users:stop(Config),
escalus_fresh:stop(Config),
ok.
-spec init_per_testcase(atom(), config()) -> config().
init_per_testcase(CaseName, Config) ->
Config1 = escalus_cleaner:start(Config),
escalus_event:start([{tc_name, CaseName}|Config1]).
-spec end_per_testcase(atom(), config()) -> ok.
end_per_testcase(_CaseName, Config) ->
Config1 = escalus_event:stop(Config),
escalus_cleaner:stop(Config1).
%%--------------------------------------------------------------------
%% Public API - forward functions from other modules
%%--------------------------------------------------------------------
%% User API
-spec create_users(config()) -> config().
create_users(Config) ->
escalus_users:create_users(Config).
-spec create_users(config(), [escalus_users:named_user()]) -> config().
create_users(Config, Users) ->
escalus_users:create_users(Config, Users).
-spec delete_users(config()) -> config().
delete_users(Config) ->
escalus_users:delete_users(Config).
-spec delete_users(config(), [escalus_users:named_user()]) -> config().
delete_users(Config, Users) ->
escalus_users:delete_users(Config, Users).
-spec get_users(Names) -> Result when
Names :: all
| [escalus_users:user_name()]
| {by_name, [escalus_users:user_name()]},
Result :: [escalus_users:named_user()].
get_users(Names) ->
escalus_users:get_users(Names).
%% Story API
-spec make_everyone_friends(config()) -> config().
make_everyone_friends(Config) ->
escalus_story:make_everyone_friends(Config).
-spec fresh_story(config(), [escalus_users:resource_spec()], fun()) -> any().
fresh_story(Config, ResourceCounts, Story) ->
escalus_fresh:story(Config, ResourceCounts, Story).
-spec fresh_story_with_config(config(), [escalus_users:resource_spec()], fun()) -> any().
fresh_story_with_config(Config, ResourceCounts, Story) ->
escalus_fresh:story_with_config(Config, ResourceCounts, Story).
-spec story(config(), [escalus_users:resource_spec()], fun()) -> any().
story(Config, ResourceCounts, Story) ->
escalus_story:story(Config, ResourceCounts, Story).
%% Assertions
assert(PredSpec, Arg) ->
escalus_new_assert:assert(PredSpec, Arg).
assert(PredSpec, Params, Arg) ->
escalus_new_assert:assert(PredSpec, Params, Arg).
assert_many(Predicates, Stanzas) ->
escalus_new_assert:assert_many(Predicates, Stanzas).
%% Client API
send(Client, Packet) ->
escalus_client:send(Client, Packet).
send_and_wait(Client, Packet) ->
escalus_client:send_and_wait(Client, Packet).
wait_for_stanza(Client) ->
escalus_client:wait_for_stanza(Client).
wait_for_stanza(Client, Timeout) ->
escalus_client:wait_for_stanza(Client, Timeout).
wait_for_stanzas(Client, Count) ->
escalus_client:wait_for_stanzas(Client, Count).
wait_for_stanzas(Client, Count, Timeout) ->
escalus_client:wait_for_stanzas(Client, Count, Timeout).
peek_stanzas(Client) ->
escalus_client:peek_stanzas(Client).
send_iq_and_wait_for_result(Client, Iq) ->
escalus_client:send_iq_and_wait_for_result(Client, Iq).
send_iq_and_wait_for_result(Client, Iq, Timeout) ->
escalus_client:send_iq_and_wait_for_result(Client, Iq, Timeout).
%% Other functions
override(Config, OverrideName, NewValue) ->
escalus_overridables:override(Config, OverrideName, NewValue). | src/escalus.erl | 0.586996 | 0.494324 | escalus.erl | starcoder |
%%
%% Copyright 2020 <NAME>
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %ExternalCopyright%
%%
-module(erl_uds_dist).
%%
%% A distributed Erlang system consists of a number of Erlang runtime
%% systems, or Erlang nodes, communicating with each other. The default
%% Erlang distribution protocol (inet_tcp_dist) is using TCP/IP sockets.
%%
%% This is an example of how to plug in an alternative distribution
%% protocol using distribution controller processes. Erlang
%% distribution can use whatever underlying protocols as long as the
%% implementation reliably delivers data chunks to the receiving
%% Erlang node in the order they were sent by the sending node.
%%
%% This example uses stream-oriented Unix Domain Sockets (of the
%% SOCK_STREAM type from the AF_UNIX socket family, also known as
%% AF_LOCAL) as the protocol for exchanging data, allowing Erlang nodes
%% running locally on the same host to communicate with each other.
%%
%% The original uds_dist module is using a port driver written in C,
%% erl_uds_dist is using distribution controllers implemented by Erlang
%% processes instead, so with the code written entirely in Erlang.
%%
%% This implementation is based on the gen_tcp_dist.erl example.
%%
%%
%% To enable this distribution, start erl with the -proto_dist parameter:
%%
%% erl -proto_dist erl_uds -no_epmd -sname node@localhost
%% -pa path/to/erl_uds_dist.beam
%%
%% or
%%
%% erl -proto_dist erl_uds -no_epmd -name node@127.0.0.1
%% -pa path/to/erl_uds_dist.beam
%%
%% Handle the connection setup phase with other Erlang nodes
-export([listen/1, accept/1, accept_connection/5,
setup/5, close/1, select/1, address/0]).
%% Optional functions for alternative distribution protocol
-export([setopts/2, getopts/2]).
%% Internal export
-export([accept_loop/2, accept_supervisor/6, setup_supervisor/5]).
-import(error_logger, [error_msg/2]).
-include_lib("kernel/include/net_address.hrl").
-include_lib("kernel/include/dist.hrl").
-include_lib("kernel/include/dist_util.hrl").
%%
%% If tracing is wanted, uncomment the dist_trace macro in dist_util.hrl
%% to enable all the calls to trace below, or copy the trace macro here.
%%
%% Tracing will freeze the initial boot when a -name or -sname parameter
%% is passed to start directly distributed nodes. To make it work,
%% launch non-distributed nodes first (without -name and -sname) then
%% call net_kernel:start/1 to enable the distribution in a second stage.
%%
%% Uncomment these two lines to disable the trace macro locally:
%% -undef(trace).
%% -define(trace(Fmt, Args), ok).
%%
%% Set the distribution protocol version statically (the different values
%% are listed in epmd.mk). All nodes are expected to use the same version
%% when using this distribution, to avoid the need for epmd.
-undef(ERL_DIST_VER).
-ifdef(ERL_DIST_VER_6).
%% Set it to 6 when supporting 32-bit big Creation numbers
-define(ERL_DIST_VER, 6).
-else.
%% Set it to 5 when supporting Creation numbers in the 1..3 range
-define(ERL_DIST_VER, 5).
-endif.
-spec select(NodeName) -> boolean() when
NodeName :: node().
%% ---------------------------------------------------------------------
%% Return true if the host name part of NodeName is valid for use
%% with this protocol; false otherwise.
%%
%% For Unix Domain Sockets, the host name part doesn't matter, as such
%% sockets are only available for other nodes running locally on the
%% same host, so always return true.
%% ---------------------------------------------------------------------
select(_NodeName) ->
true.
-spec listen(NodeNameWithoutHost) ->
{ok, {ListeningSocket, Address, Creation}} |
{error, Reason} when
NodeNameWithoutHost :: node(),
ListeningSocket :: gen_tcp:socket(),
Address :: #net_address{},
Creation :: 1..16#FFFFFFFF,
Reason :: system_limit | inet:posix().
%% ---------------------------------------------------------------------
%% Listen for incoming connection requests on the specified Unix Domain Socket.
%% It is called only once when the distribution protocol is brought up.
%%
%% NodeNameWithoutHost defines the listening Unix Domain Socket to use, it is
%% the part before the '@' character in a full node name. It is usually a file
%% pathname in the local filesystem (limited in length to 107 bytes on Linux)
%% encoded according to the current system encoding mode, which can be either
%% relative or absolute. Erlang node names have some restrictions; as of this
%% writing, they are limited to the following characters: 0-9 A-Z a-z _ and -
%% (cf. net_kernel:valid_name_head/1) so they can't contain . / or \. As a
%% result, the socket file pathname is relative to the current directory.
%%
%% Return:
%% - ListeningSocket, a handle which is later passed to the accept/1 callback,
%% i.e. the listening Unix Domain Socket through which this Erlang node
%% is accessible.
%% - Address, a #net_address{} record with information about the address
%% for this node.
%% - Creation, an integer 1, 2 or 3, that has to change for different
%% instances of Erlang nodes created with the same node name.
%% ---------------------------------------------------------------------
listen(NodeNameWithoutHost) ->
?trace("~p~n", [{?MODULE, listen, self()}]),
SocketPathname = to_string(NodeNameWithoutHost),
%% Use the gen_tcp module for Unix Domain Sockets of the SOCK_STREAM
%% socket type.
%%
%% The options passed to gen_tcp:listen:
%% - {ifaddr, {local, Pathname :: binary() | string()} indicates to use a
%% Unix Domain Socket and defines which file pathname to listen on.
%% - binary, to have each packet delivered as a binary.
%% - {active, false} sets the listening socket in passive mode, meaning
%% the packets must be explicitly retrieved by calling recv/2,3.
%% - {packet, 2} specifies a packet header length of 2 bytes, which
%% is expected in every message of the distribution protocol
%% until the initial distribution handshake completes.
%%
%% As documented, the port number must weirdly be set to 0 when using
%% gen_tcp API functions for Unix Domain Sockets.
case gen_tcp:listen(0, [{ifaddr, {local, SocketPathname}},
binary, {active, false}, {packet, 2}]) of
%% Successful setup of the listening socket
{ok, ListeningSocket} ->
%% Get the listening socket address in a {local, Pathname} format
{ok, SocketAddress} = inet:sockname(ListeningSocket),
{ok, {ListeningSocket,
#net_address{address = SocketAddress,
%% Simply use 'localhost' as a convention
%% as host is not used in net_kernel.
host = localhost,
%% 'local' is the address family used for
%% Unix Domain Sockets.
family = local,
%% 'stream' is the convention chosen to
%% represent to SOCK_STREAM socket type.
protocol = stream},
%% Get the Creation number for the current Node instance
get_creation(SocketPathname)}};
%% The specified file pathname is already in use or the filesystem
%% socket object already exists.
{error, eaddrinuse} ->
%% Check that another Erlang node instance with the same node name
%% is not currently running. Try to connect to this file pathname.
case gen_tcp:connect({local, SocketPathname},
0,
[binary, {active, false}, {packet, 2}]) of
{ok, SocketWithAnotherNode} ->
%% Connect has succeeded, so there is another Erlang node
%% already running and listening on this same pathname.
gen_tcp:close(SocketWithAnotherNode),
?trace("Another node is already running with the same "
"node name: ~p~n", [SocketPathname]),
{error, duplicate_name};
_ ->
%% No other Erlang node is listening on this file pathname
%% so this is just an existing file or a previous socket
%% object left after a crash/abort. Delete the file and
%% retry to setup the listening socket.
%%
%% The raw option is passed to bypass the need for a file
%% server, which is not available and registered yet during
%% the early boot stage.
case file:delete(SocketPathname, [raw]) of
ok ->
%% The file has been deleted, let's try again
listen(NodeNameWithoutHost);
{error, enoent} ->
%% enoent - No such file or directory to delete
%% anymore; unexpected but let's try again.
listen(NodeNameWithoutHost);
{error, eacces} ->
%% eacces - Permission denied
?trace("The file ~p cannot be deleted, "
"permission denied~n",
[SocketPathname]),
{error, eacces};
_DeleteError ->
?trace("Error returned by file:delete(~p, [raw]): "
"~p~n", [SocketPathname, _DeleteError]),
_DeleteError
end
end;
Error ->
Error
end.
-spec address() -> Address :: #net_address{}.
%% ---------------------------------------------------------------------
%% Support the -dist_listen false option, so that a distribution can be
%% started without listening for incoming connections.
%%
%% In that case, address/0 is called in order to get the Address part of
%% the listen/1 function without creating a listening socket. All fields
%% except address have to be set in the returned Address record.
%%
%% This is used to support the dynamic name feature introduced in OTP 23.0,
%% which allows to start a node with an 'undefined' name at first. It will
%% then get its actual name randomly from the first node it connects to.
%% ---------------------------------------------------------------------
address() ->
#net_address{%% Simply use 'localhost' as a convention
%% as host is not used in net_kernel.
host = localhost,
%% 'local' is the address family used for
%% Unix Domain Sockets.
family = local,
%% 'stream' is the convention chosen to
%% represent to SOCK_STREAM socket type.
protocol = stream}.
-spec get_creation(SocketPathname) -> Creation :: 1..16#FFFFFFFF when
SocketPathname :: string().
%% ---------------------------------------------------------------------
%% Return the Creation number for the Erlang node which is accessible
%% through the Unix Domain Socket listening on the file pathname
%% SocketPathname.
%%
%% The Creation number has to change for different instances of Erlang
%% nodes created with the same distribution name. It is stored in every
%% process identifier sent to another node, so that process identifiers
%% from one given node do not remain valid when sent to a new node
%% instance with the same name.
%%
%% Support small Creation numbers in the 1..3 range with distribution
%% protocol version 5 and 32-bit big Creation numbers in the range
%% 4..4294967295 with protocol version 6. The value 0 must be avoided
%% for normal operations as it is used as a wild card for debug purpose.
%%
%% For big Creations numbers, simply create a new random value each time.
%%
%% For small Creation numbers in the 1..3 range, the value is saved on
%% the filesystem in the file pathname SocketPathname with the added
%% ".uds" extension, stored using a 1 byte character. With this convention,
%% an Erlang node can retrieve the previous value from the filesystem
%% on a new invocation to make sure the Creation number is incremented.
%% ---------------------------------------------------------------------
get_creation(SocketPathname) ->
%% Check the distribution protocol version
case ?ERL_DIST_VER of
6 ->
%% For big Creations numbers, simply create a new random value
%% each time, while avoiding the 0..3 range.
3 + rand:uniform((1 bsl 32) - 4);
_ ->
%% For small Creation numbers, open the file ending with the
%% ".uds" extension read/write, in binary mode (so that read
%% operations return binaries), in raw mode (so that the file
%% operations bypass the need for a file server, which is not
%% available and registered yet during the early boot stage).
case file:open(SocketPathname ++ ".uds",
[raw, read, write, binary]) of
{ok, File} ->
%% Read 1 byte from File, normally its full content
Creation =
case file:read(File, 1) of
%% Try to match this 1-byte binary with a
%% character in the $1..$2 range.
{ok, <<X:8>>} when $0 < X, X < $3 ->
%% Increment the previous value if found
X + 1;
_ ->
%% Start or wrap back to $1 otherwise
$1
end,
%% Write the new Creation number in position 0 in File,
%% so that it overwrites the previous value.
file:pwrite(File, 0, <<Creation:8>>),
file:close(File),
%% Convert the 1 byte character to its integer value
binary_to_integer(<<Creation:8>>);
{error, _Reason} ->
%% The file couldn't be opened, return a random value
rand:uniform(3)
end
end.
-spec accept(ListeningSocket) -> AcceptPid :: pid() when
ListeningSocket :: gen_tcp:socket().
%% ---------------------------------------------------------------------
%% Accept new connection attempts from other Erlang nodes.
%%
%% accept/1 spawns an accept_loop process that accepts connections and
%% returns its process identifier.
%%
%% The caller of the accept_loop is a representative for net_kernel and
%% is identified as Kernel below. This may or may not be the process
%% registered as net_kernel.
%%
%% When a new connection is accepted, the accept_loop creates a distribution
%% controller, whose job is to dispatch traffic on the connection, then
%% it informs Kernel about the accepted connection.
%%
%% The ListeningSocket argument will be the same as the ListeningSocket handle
%% of the return value from the listen/1 callback above, i.e. the listening
%% Unix Domain Socket through which this Erlang node is accessible.
%%
%% accept/1 is called only once when the distribution protocol is brought up.
%% ---------------------------------------------------------------------
accept(ListeningSocket) ->
spawn_opt(?MODULE, accept_loop, [self(), ListeningSocket],
%% Spawn on max priority
[link, {priority, max}]).
accept_loop(Kernel, ListeningSocket) ->
case gen_tcp:accept(ListeningSocket) of
{ok, Socket} ->
%% Create a distribution controller process in charge of the
%% accepted connection, available through Socket.
DistCtrl = spawn_dist_controller(Socket),
?trace("~p~n",[{?MODULE, accept, accepted, Socket,
DistCtrl, self()}]),
%% Set this process as the new controlling process of Socket, i.e.
%% the process that receives messages from Socket.
flush_controller(DistCtrl, Socket),
gen_tcp:controlling_process(Socket, DistCtrl),
flush_controller(DistCtrl, Socket),
%% Inform Kernel about the accepted connection. DistCtrl is
%% passed as the identifier of the distribution controller,
%% 'local' as the address family for Unix Domain Sockets and
%% 'stream' as the protocol for the SOCK_STREAM socket type.
Kernel ! {accept, self(), DistCtrl, local, stream},
receive
%% The request was accepted. SupervisorPid is the process
%% identifier of the connection supervisor process (created
%% in the accept_connection/5 callback).
{Kernel, controller, SupervisorPid} ->
%% Set SupervisorPid as the supervisor of the
%% distribution controller.
call_controller(DistCtrl, {supervisor, SupervisorPid}),
%% And continue with the handshake.
SupervisorPid ! {self(), controller};
%% The request was rejected, this is a fatal error, the
%% accept_loop process should terminate.
{Kernel, unsupported_protocol} ->
exit(unsupported_protocol)
end,
accept_loop(Kernel, ListeningSocket);
{error, closed} ->
?trace("~p~n",[{?MODULE, accept, ListeningSocket,
closed, self()}]),
exit(closing_connection);
Error ->
?trace("~p~n",[{?MODULE, accept, ListeningSocket,
Error, self()}]),
exit(Error)
end.
-spec accept_connection(AcceptPid, DistCtrl, MyNode, Allowed, SetupTime) ->
ConnectionSupervisorPid :: pid() when
AcceptPid :: pid(),
DistCtrl :: pid(),
MyNode :: node(),
Allowed :: list(),
SetupTime :: non_neg_integer().
%% ---------------------------------------------------------------------
%% accept_connection/5 spawns an accept_supervisor process that accepts
%% a new connection attempt from another Erlang node and performs the
%% handshake with the other side. Callbacks and other information needed
%% for the handshake are provided in a #hs_data{} record. If the handshake
%% successfully completes, this process will continue to function as the
%% connection supervisor as long as the connection is up.
%%
%% The process identifier of this accept_supervisor is returned.
%%
%% The caller of accept_supervisor is a representative for net_kernel and
%% is identified as Kernel below.
%%
%% AcceptPid is the process identifier created by accept/1.
%%
%% DistCtrl is the identifier of the distribution controller process in
%% charge of the connection, as created by the accept_loop process above.
%%
%% MyNode is the name of this node.
%%
%% The Allowed argument is to be passed during the handshake.
%%
%% SetupTime is used to create a setup timer, to be passed during the
%% handshake.
%% ---------------------------------------------------------------------
accept_connection(AcceptPid, DistCtrl, MyNode, Allowed, SetupTime) ->
spawn_opt(?MODULE, accept_supervisor,
[self(), AcceptPid, DistCtrl, MyNode, Allowed, SetupTime],
dist_util:net_ticker_spawn_options()).
accept_supervisor(Kernel, AcceptPid, DistCtrl, MyNode, Allowed, SetupTime) ->
?trace("~p~n", [{?MODULE, accept_connection, self()}]),
receive
{AcceptPid, controller} ->
Timer = dist_util:start_timer(SetupTime),
HSData0 = hs_data_common(DistCtrl),
HSData = HSData0#hs_data{
kernel_pid = Kernel,
this_node = MyNode,
socket = DistCtrl,
timer = Timer,
allowed = Allowed,
%% Return the remote address using the #net_address{}
%% record format.
f_address =
fun(_,_) ->
#net_address{
%% Unix Domain Sockets don't have a
%% socket address in a {local, Pathname}
%% format on the 'connect' side, which
%% is unnamed when not bound.
address = [],
%% Simply use 'localhost' as a convention
%% as host is not used in net_kernel.
host = localhost,
%% 'local' is the address family used
%% for Unix Domain Sockets.
family = local,
%% 'stream' is the convention chosen to
%% represent the SOCK_STREAM socket type.
protocol = stream}
end
},
?trace("handshake_other_started received on node (~p)~n",
[MyNode]),
dist_util:handshake_other_started(HSData)
end.
%% ---------------------------------------------------------------------
%% Define common values of the handshake data record, defined in
%% kernel/include/dist_util.hrl
%% ---------------------------------------------------------------------
hs_data_common(DistCtrl) ->
TickHandler = call_controller(DistCtrl, tick_handler),
Socket = call_controller(DistCtrl, socket),
#hs_data{%% Flags the node should use. Simply use the default config
this_flags = 0,
%% Send Packet to the other side
f_send =
fun(Ctrl, Packet) ->
call_controller(Ctrl, {send, Packet})
end,
%% Receive a packet of Length bytes, within Timeout milliseconds
f_recv =
fun(Ctrl, Length, Timeout) ->
case call_controller(Ctrl, {recv, Length, Timeout}) of
{ok, Bin} when is_binary(Bin) ->
{ok, binary_to_list(Bin)};
Other -> Other
end
end,
%% Set the Socket options before nodeup is delivered to net_kernel
f_setopts_pre_nodeup =
fun(Ctrl) ->
call_controller(Ctrl, pre_nodeup)
end,
%% Set the Socket options after nodeup is delivered to net_kernel
f_setopts_post_nodeup =
fun(Ctrl) ->
call_controller(Ctrl, post_nodeup)
end,
%% Get the identifier of the low level entity that handles the
%% connection (often DistCtrl itself).
f_getll =
fun(Ctrl) ->
call_controller(Ctrl, getll)
end,
%% The following two functions are used in the tick loop:
%% Send a 'tick' request to the tick handler
mf_tick = fun (Ctrl) when Ctrl == DistCtrl ->
TickHandler ! tick
end,
%% Get stats about send, received or pending packets
mf_getstat =
fun(Ctrl) when Ctrl == DistCtrl ->
case inet:getstat(Socket,
[recv_cnt, send_cnt, send_pend]) of
{ok, Stat} ->
split_stat(Stat, 0, 0, 0);
Error ->
Error
end
end,
%% New in kernel-5.1 (OTP 19.1):
%% List of Socket options to set on future connections
mf_setopts = fun (Ctrl, Options) when Ctrl == DistCtrl ->
setopts(Socket, Options)
end,
%% List of Socket options to read for future connections
mf_getopts = fun (Ctrl, Options) when Ctrl == DistCtrl ->
getopts(Socket, Options)
end,
%% New in kernel-6.0 (OTP 21.0):
%% Function called when the handshake has completed and the
%% distribution connection is up. The distribution controller
%% can begin dispatching traffic.
%%
%% DHandle is a distribution handle identifying the connection and
%% needed for a few erlang:dist_ctrl_xxx built-in functions.
f_handshake_complete = fun (Ctrl, Node, DHandle) ->
call_controller(Ctrl,
{handshake_complete,
Node, DHandle})
end
%% Optional fields in the handshake data record:
%% add_flags Distribution flags to add to the connection
%% reject_flags Distribution flags to reject
%% require_flags Distribution flags that are required
%% New in kernel-7.0 (OTP 23.0):
%% other_creation Creation number of the other node, passed
%% in the new handshake protocol introduced
%% in distribution protocol version 6.
%% this_creation Used with dynamic node name, that can be
%% requested by a connecting node from the
%% accepting node it first connects to, as
%% part of the handshake. This Creation
%% number to set is received at the same time.
}.
%% ---------------------------------------------------------------------
%% Return the Stat output from inet:getstat in the format expected by
%% the mf_getstat fun as defined in dist_util.hrl
%% ---------------------------------------------------------------------
split_stat([{recv_cnt, R}|Stat], _, W, P) ->
split_stat(Stat, R, W, P);
split_stat([{send_cnt, W}|Stat], R, _, P) ->
split_stat(Stat, R, W, P);
split_stat([{send_pend, P}|Stat], R, W, _) ->
split_stat(Stat, R, W, P);
split_stat([], R, W, P) ->
{ok, R, W, P}.
-spec setopts(ListeningSocket, Options) -> ok | {error, Error} when
ListeningSocket :: gen_tcp:socket(),
Options :: [inet:socket_setopt()],
Error :: inet:posix().
%% ---------------------------------------------------------------------
%% Set the list of options to apply on future connections.
%%
%% ListeningSocket is the handle originally passed from the listen/1 callback.
%%
%% Options is the list of options to apply, with a set of forbidden ones.
%% ---------------------------------------------------------------------
setopts(ListeningSocket, Options) ->
case [Option || {K, _} = Option <- Options,
K =:= active orelse K =:= deliver orelse K =:= packet] of
[] -> inet:setopts(ListeningSocket, Options);
Opts1 -> {error, {badopts, Opts1}}
end.
-spec getopts(ListeningSocket, Options) -> {ok, OptionValues} |
{error, Error} when
ListeningSocket :: gen_tcp:socket(),
Options :: [inet:socket_getopt()],
OptionValues :: [inet:socket_setopt() | gen_tcp:pktoptions_value()],
Error :: inet:posix().
%% ---------------------------------------------------------------------
%% Set the list of options to read for future connections.
%%
%% ListeningSocket is the handle originally passed from the listen/1 callback.
%%
%% Options is the list of options.
%% ---------------------------------------------------------------------
getopts(ListeningSocket, Options) ->
inet:getopts(ListeningSocket, Options).
-spec setup(Node, Type, MyNode, LongOrShortNames, SetupTime) ->
ConnectionSupervisorPid :: pid() when
Node :: node(),
Type :: atom(),
MyNode :: node(),
LongOrShortNames :: shortnames | longnames,
SetupTime :: non_neg_integer().
%% ---------------------------------------------------------------------
%% setup/5 spawns a setup_supervisor process that initiates a new connection
%% attempt with another Erlang node and performs the handshake with the
%% other side. Callbacks and other information needed for the handshake are
%% provided in a #hs_data{} record. If the handshake successfully completes,
%% this process will continue to function as a connection supervisor as long
%% as the connection is up (still 'ticking').
%%
%% The process identifier of this setup_supervisor is returned.
%%
%% The spawned setup_supervisor process creates a separate distribution
%% controller responsible for dispatching traffic on the connection.
%%
%% The caller of setup_supervisor is a representative for net_kernel and
%% is identified as Kernel below.
%%
%% Node is the name of the other Erlang node to connect to, it defines the
%% listening Unix Domain Socket it listens on. The socket file pathname is
%% the part before the '@' character for a full node name in Name@Host format
%% (whether short or long names are used) or the entire Node name otherwise.
%%
%% Type is the connection type to be passed during the handshake.
%%
%% MyNode is the name of this node.
%%
%% The LongOrShortNames argument is either the 'longnames' atom or the
%% 'shortnames' atom, indicating whether long or short names are used. This
%% distinction is simply ignored as all nodes are running locally on the
%% same host with this alternative Erlang distribution protocol.
%%
%% SetupTime is used to create a setup timer, to be passed during the
%% handshake.
%% ---------------------------------------------------------------------
setup(Node, Type, MyNode, _LongOrShortNames, SetupTime) ->
spawn_opt(?MODULE, setup_supervisor,
[self(), Node, Type, MyNode, SetupTime],
dist_util:net_ticker_spawn_options()).
setup_supervisor(Kernel, Node, Type, MyNode, SetupTime) ->
?trace("~p~n", [{?MODULE, setup, self(), Node}]),
%% No need for a host name lookup as this alternative Erlang distribution
%% protocol only supports nodes running locally on the same host.
%%
%% Retrieve the socket pathname from Node.
SocketPathname = get_pathname(Node),
%% The options passed to connect:
%% - {local, Pathname :: binary() | string()} indicates to use a
%% Unix Domain Socket and defines which file pathname to connect to.
%% - binary, to have each packet delivered as a binary.
%% - {active, false} sets the socket in passive mode, meaning
%% the packets must be explicitly retrieved by calling recv/2,3.
%% - {packet, 2} specifies a packet header length of 2 bytes, which
%% is expected in every message of the distribution protocol
%% until the initial handshake completes.
%%
%% As documented, the port number must weirdly be set to 0 when using
%% gen_tcp API functions for Unix Domain Sockets.
case gen_tcp:connect({local, SocketPathname},
0,
[binary, {active, false}, {packet, 2}]) of
{ok, Socket} ->
Timer = dist_util:start_timer(SetupTime),
%% Create a distribution controller process in charge of
%% dispatching traffic on the connection to the other Erlang node,
%% available through Socket.
DistCtrl = spawn_dist_controller(Socket),
%% Set this process as the supervisor of the distribution controller
call_controller(DistCtrl, {supervisor, self()}),
%% Set this process as the new controlling process of Socket, i.e.
%% the process that receives messages from Socket.
flush_controller(DistCtrl, Socket),
gen_tcp:controlling_process(Socket, DistCtrl),
flush_controller(DistCtrl, Socket),
%% Get the remote socket address in a {local, Pathname} format
{ok, SocketAddress} = inet:peername(Socket),
HSData0 = hs_data_common(DistCtrl),
HSData = HSData0#hs_data{
kernel_pid = Kernel,
other_node = Node,
this_node = MyNode,
socket = DistCtrl,
timer = Timer,
other_version = ?ERL_DIST_VER,
request_type = Type,
%% Return the remote address using the #net_address{}
%% record format.
f_address =
fun(_,_) ->
#net_address{
address = SocketAddress,
%% Simply use 'localhost' as a convention
%% as host is not used in net_kernel.
host = localhost,
%% 'local' is the address family used
%% for Unix Domain Sockets.
family = local,
%% 'stream' is the convention chosen to
%% represent the SOCK_STREAM socket type.
protocol = stream}
end
},
%% Start the handshake and check that the connection is up
%% (still 'ticking').
?trace("handshake_we_started with node on socket (~p)~n",
[SocketPathname]),
dist_util:handshake_we_started(HSData);
_Other ->
?trace("gen_tcp:connect to node (~p) failed (~p).~n",
[Node, _Other]),
?shutdown(Node)
end.
-spec close(ListeningSocket) -> ok when
ListeningSocket :: gen_tcp:socket().
%% ---------------------------------------------------------------------
%% Close the listening Unix Domain Socket through which this Erlang node
%% is accessible.
%% ---------------------------------------------------------------------
close(ListeningSocket) ->
%% Get the listening socket address in a {local, Pathname} format
{ok, SocketAddress} = inet:sockname(ListeningSocket),
{local, SocketPathname} = SocketAddress,
%% Remove the socket file from the filesystem. The raw option is used
%% to bypass the need for a file server, which may not be available
%% and registered anymore (for instance during a node shutdown phase).
file:delete(SocketPathname, [raw]),
gen_tcp:close(ListeningSocket).
-spec get_pathname(Node) -> Pathname when
Node :: node(),
Pathname :: string().
%% ---------------------------------------------------------------------
%% Retrieve the socket pathname from Node.
%%
%% The socket pathname is the part before the '@' character for a full node
%% name in Name@Host format (whether short or long names are used) or the
%% entire Node name otherwise.
%% ---------------------------------------------------------------------
get_pathname(Node) ->
NodeString = atom_to_list(Node),
lists:takewhile(fun(C) -> C =/= $@ end, NodeString).
%% ---------------------------------------------------------------------
%% Flush all the tcp and tcp_closed received messages and transfer them
%% to the Pid process. This is used when setting Pid as the new controlling
%% process of Socket. This function needs to be called twice: just before
%% and right after calling controlling_process(Socket, Pid).
%% ---------------------------------------------------------------------
flush_controller(Pid, Socket) ->
receive
{tcp, Socket, Data} ->
Pid ! {tcp, Socket, Data},
flush_controller(Pid, Socket);
{tcp_closed, Socket} ->
Pid ! {tcp_closed, Socket},
flush_controller(Pid, Socket)
after 0 ->
ok
end.
%% ---------------------------------------------------------------------
%% Distribution controller processes
%%
%% There will be five parties working together when the
%% connection is up:
%%
%% - The gen_tcp socket. It provides a connection to the other
%% node through a Unix Domain Socket.
%%
%% - The output handler. It will dispatch all outgoing traffic
%% from this node to the remote node through the socket. This
%% process is registered as a distribution controller for this
%% connection.
%%
%% - The input handler. It will dispatch all incoming traffic
%% from the remote node to this node through the socket. This
%% process is also the socket controlling process, receiving
%% incoming traffic in active mode using {active, N}.
%%
%% - The tick handler. It sends asynchronous tick messages to the
%% socket to check for node liveness. It executes on max priority
%% since it is important to get ticks through to the other end.
%%
%% - The connection supervisor, provided by dist_util. It monitors
%% traffic and issues tick requests to the tick handler when
%% no outgoing traffic is happening. If no incoming traffic is
%% received, the other node is considered to be down and the
%% connection is closed. This process also executes on max priority.
%%
%% These parties are linked together so should one of them fail,
%% all of them are terminated and the connection is taken down.
%% ---------------------------------------------------------------------
%% In order to avoid issues with lingering signal binaries,
%% enable off-heap message queue data as well as fullsweep
%% after 0. The fullsweeps will be cheap since there is more
%% or less no live data.
-define(DIST_CONTROLLER_COMMON_SPAWN_OPTS,
[{message_queue_data, off_heap},
{fullsweep_after, 0}]).
%% ---------------------------------------------------------------------
%% Setup the distribution controller by spawning the tick handler
%% and starting the setup loop.
%% ---------------------------------------------------------------------
spawn_dist_controller(Socket) ->
spawn_opt(fun() -> dist_controller_setup(Socket) end,
%% Spawn on max priority
[{priority, max}] ++ ?DIST_CONTROLLER_COMMON_SPAWN_OPTS).
dist_controller_setup(Socket) ->
TickHandler = spawn_opt(fun() -> tick_handler(Socket) end,
%% Spawn on max priority
[link, {priority, max}] ++ ?DIST_CONTROLLER_COMMON_SPAWN_OPTS),
dist_controller_setup_loop(Socket, TickHandler, undefined).
%% ---------------------------------------------------------------------
%% During the handshake phase, loop in dist_controller_setup_loop(). When the
%% connection is up, spawn an input handler and continue as output handler.
%%
%% Sup, the connection supervisor
%% ---------------------------------------------------------------------
dist_controller_setup_loop(Socket, TickHandler, Sup) ->
receive
{tcp_closed, Socket} ->
exit(connection_closed);
%% Set Pid as the connection supervisor, link with it and
%% send the linking result back.
{Ref, From, {supervisor, Pid}} ->
Res = link(Pid),
From ! {Ref, Res},
dist_controller_setup_loop(Socket, TickHandler, Pid);
%% Send the tick handler to the From process
{Ref, From, tick_handler} ->
From ! {Ref, TickHandler},
dist_controller_setup_loop(Socket, TickHandler, Sup);
%% Send the socket to the From process
{Ref, From, socket} ->
From ! {Ref, Socket},
dist_controller_setup_loop(Socket, TickHandler, Sup);
%% Send Packet onto the socket and send the result back
{Ref, From, {send, Packet}} ->
Res = gen_tcp:send(Socket, Packet),
From ! {Ref, Res},
dist_controller_setup_loop(Socket, TickHandler, Sup);
%% Receive a packet of Length bytes, within Timeout milliseconds
{Ref, From, {recv, Length, Timeout}} ->
Res = gen_tcp:recv(Socket, Length, Timeout),
From ! {Ref, Res},
dist_controller_setup_loop(Socket, TickHandler, Sup);
%% Send the low level distribution controller pid to the From process
{Ref, From, getll} ->
From ! {Ref, {ok, self()}},
dist_controller_setup_loop(Socket, TickHandler, Sup);
%% Set the Socket options just before the connection is established
%% for normal data traffic and before nodeup is delivered. A nodeup
%% message is delivered when a new node is connected.
{Ref, From, pre_nodeup} ->
%% Switch the distribution protocol to a packet header of
%% 4 bytes which is used to store the length of each packet
%% sent over the streamed Unix Domain Sockets.
Res = inet:setopts(Socket,
[{active, false},
{packet, 4}]),
From ! {Ref, Res},
dist_controller_setup_loop(Socket, TickHandler, Sup);
%% Set the Socket options just after the connection is established
%% for normal data traffic and after nodeup is delivered.
{Ref, From, post_nodeup} ->
%% Switch the distribution protocol to a packet header of
%% 4 bytes, as explained above.
%% The previous pre_nodeup case should normally be enough.
Res = inet:setopts(Socket,
[{active, false},
{packet, 4}]),
From ! {Ref, Res},
dist_controller_setup_loop(Socket, TickHandler, Sup);
%% The handshake has completed and the connection is up, the
%% distribution controller can begin dispatching traffic.
{Ref, From, {handshake_complete, _Node, DHandle}} ->
From ! {Ref, ok},
%% Handshake complete! Begin dispatching traffic
%% Use a separate process for dispatching input. This
%% is not necessary, but it enables parallel execution
%% of independent work loads at the same time as it
%% simplifies the implementation.
InputHandler = spawn_opt(
fun() -> dist_controller_input_handler(DHandle,
Socket,
Sup)
end,
[link] ++ ?DIST_CONTROLLER_COMMON_SPAWN_OPTS),
%% Set this process as the new controlling process of Socket, i.e.
%% the process that receives messages from Socket.
flush_controller(InputHandler, Socket),
gen_tcp:controlling_process(Socket, InputHandler),
flush_controller(InputHandler, Socket),
%% Register the input handler process
erlang:dist_ctrl_input_handler(DHandle, InputHandler),
%% Inform the input handler that it has been registered
InputHandler ! DHandle,
%% From now on, execute on normal priority
process_flag(priority, normal),
%% Request notification when outgoing data is available to fetch.
%% A dist_data message will be sent.
erlang:dist_ctrl_get_data_notification(DHandle),
%% And continue as output handler
dist_controller_output_handler(DHandle, Socket)
end.
%% ---------------------------------------------------------------------
%% Call the distribution controller with Message and get Result in return.
%%
%% The distribution controller is monitored to be notified if it has been
%% terminated.
%% ---------------------------------------------------------------------
call_controller(DistCtrl, Message) ->
Ref = erlang:monitor(process, DistCtrl),
DistCtrl ! {Ref, self(), Message},
receive
{Ref, Result} ->
erlang:demonitor(Ref, [flush]),
Result;
{'DOWN', Ref, process, DistCtrl, Reason} ->
exit({dist_controller_exit, Reason})
end.
%% Use active 10 for good throughput while still maintaining back-pressure
%% if the input controller isn't able to handle all incoming messages.
%% This approach is re-used as-is from the gen_tcp_dist.erl example.
-define(ACTIVE_INPUT, 10).
%% ---------------------------------------------------------------------
%% Input handler
%%
%% Dispatch all traffic from the remote node coming to this node through
%% the socket.
%% ---------------------------------------------------------------------
dist_controller_input_handler(DHandle, Socket, Sup) ->
link(Sup),
receive
%% Wait for the input handler to be registered before starting
%% to deliver incoming data.
DHandle ->
dist_controller_input_loop(DHandle, Socket, 0)
end.
dist_controller_input_loop(DHandle, Socket, N) when N =< ?ACTIVE_INPUT/2 ->
%% Set the socket in active mode and define the number of received data
%% packets that will be delivered as {tcp, Socket, Data} messages.
inet:setopts(Socket, [{active, ?ACTIVE_INPUT - N}]),
dist_controller_input_loop(DHandle, Socket, ?ACTIVE_INPUT);
dist_controller_input_loop(DHandle, Socket, N) ->
receive
%% In active mode, data packets are delivered as messages
{tcp, Socket, Data} ->
%% When data is received from the remote node, deliver it
%% to the local node.
try erlang:dist_ctrl_put_data(DHandle, Data)
catch _ : _ -> death_row()
end,
%% Decrease the counter when looping so that the socket is
%% set with {active, Count} again to receive more data.
dist_controller_input_loop(DHandle, Socket, N-1);
%% Connection to remote node terminated
{tcp_closed, Socket} ->
exit(connection_closed);
%% Ignore all other messages
_ ->
dist_controller_input_loop(DHandle, Socket, N)
end.
%% ---------------------------------------------------------------------
%% Output handler
%%
%% Dispatch all outgoing traffic from this node to the remote node through
%% the socket.
%% ---------------------------------------------------------------------
dist_controller_output_handler(DHandle, Socket) ->
receive
dist_data ->
%% Available outgoing data to send from this node
try dist_controller_send_data(DHandle, Socket)
catch _ : _ -> death_row()
end,
dist_controller_output_handler(DHandle, Socket);
_ ->
%% Ignore all other messages
dist_controller_output_handler(DHandle, Socket)
end.
dist_controller_send_data(DHandle, Socket) ->
%% Fetch data from the local node to be sent to the remote node
case erlang:dist_ctrl_get_data(DHandle) of
none ->
%% Request notification when more outgoing data is available.
%% A dist_data message will be sent.
erlang:dist_ctrl_get_data_notification(DHandle);
Data ->
socket_send(Socket, Data),
%% Loop as long as there is more data available to fetch
dist_controller_send_data(DHandle, Socket)
end.
%% ---------------------------------------------------------------------
%% Tick handler
%%
%%
%% The tick handler process writes a tick message to the socket when it
%% receives a 'tick' request from the connection supervisor.
%% ---------------------------------------------------------------------
tick_handler(Socket) ->
?trace("~p~n", [{?MODULE, tick_handler, self()}]),
receive
tick ->
%% May block due to busy port...
socket_send(Socket, "");
_ ->
ok
end,
tick_handler(Socket).
%% ---------------------------------------------------------------------
%% Send Data on Socket
%% ---------------------------------------------------------------------
socket_send(Socket, Data) ->
try gen_tcp:send(Socket, Data) of
ok -> ok;
{error, Reason} -> death_row({send_error, Reason})
catch
Type : Reason -> death_row({send_error, {Type, Reason}})
end.
%% ---------------------------------------------------------------------
%% death_row
%%
%% When the connection is on its way down, operations begin to fail. We
%% catch the failures and call this function waiting for termination. We
%% should be terminated by one of our links to the other involved parties
%% that began bringing the connection down. By waiting for termination we
%% avoid altering the exit reason for the connection teardown. We however
%% limit the wait to 5 seconds and bring down the connection ourselves if
%% not terminated...
%% ---------------------------------------------------------------------
death_row() ->
death_row(connection_closed).
death_row(normal) ->
%% We do not want to exit with normal exit reason since it won't
%% bring down linked processes...
death_row();
death_row(Reason) ->
receive after 5000 -> exit(Reason) end.
%% ---------------------------------------------------------------------
%% to_string(S) -> string()
%%
%%
%% to_string/1 creates a string from an atom or a string.
%% ---------------------------------------------------------------------
to_string(S) when is_atom(S) -> atom_to_list(S);
to_string(S) when is_list(S) -> S. | lib/kernel/examples/erl_uds_dist/src/erl_uds_dist.erl | 0.71423 | 0.449876 | erl_uds_dist.erl | starcoder |
%%
%% Copyright (c) 2012 - 2016, <NAME>
%% All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% @doc
%% scalable bloom filter, based on idea discussed in the paper
%% http://gsd.di.uminho.pt/members/cbm/ps/dbloom.pdf
-module(sbf).
-export([
new/1,
new/2,
new/3,
new/4,
add/2,
has/2
]).
%%
%% scalable bloom filter
-record(sbf, {
r :: integer(),
s :: integer(),
size :: integer(),
list :: [_]
}).
%%
%% bloom filter (bf)
%% partition the M bits on k-slices of size m = M/k bits,
%% each slices per hash function.
-record(bf, {
p :: float(),
k :: integer(),
m :: integer(),
n :: integer(),
size :: integer(),
bits :: [_]
}).
%%
%% create new scalable bloom filter
%% C - initial capacity
%% P - false positive probability
%% R - tightening ratio of error probability (as defined by paper)
%% S - growth ratio (as defined by paper)
new(C) ->
new(C, 0.001).
new(C, P) ->
new(C, P, 0.85).
new(C, P, R) ->
new(C, P, R, 1).
new(C, P, R, S) ->
%% n ≈ -m ln (1 - p)
P0 = P * (1 - R),
K = bf_k(P0),
Pk = math:pow(P0, 1 / K),
M = 1 + trunc(log2(-C / math:log(1 - Pk))),
#sbf{r = R, s = S, size = 0, list = [bf_new(M, P0)]}.
%%
%% add element
add(E, #sbf{r = R, s = S, size = Size, list = [H | T]} = State) ->
case has(E, State) of
true ->
State;
false ->
case bf_add(E, H) of
%% filter overflow
#bf{n = N, size = N} = F ->
State#sbf{size = Size + 1, list = [bf_scale(S, R, F), F | T]};
F ->
State#sbf{size = Size + 1, list = [F | T]}
end
end.
%%
%% check membership
has(E, #sbf{list = List}) ->
lists:any(fun(X) -> bf_has(E, X) end, List).
%%%------------------------------------------------------------------
%%%
%%% bloom filter
%%%
%%%------------------------------------------------------------------
%% M - segment modulo
%% P - desired error probability
bf_new(M, P) ->
K = bf_k(P),
Pk= math:pow(P, 1 / K),
N = trunc(-(1 bsl M) * math:log(1 - Pk)),
#bf{
p = P,
k = K,
m = M,
n = N,
size = 0,
bits = [bits_new(1 bsl M) || _ <- lists:seq(1, K)]
}.
%% number of hash functions with 50% fill rate (optimal rate)
bf_k(P) ->
1 + erlang:trunc(log2(1 / P)).
%%
%% insert element to set
bf_add(E, #bf{m = M, k = K, size = Size, bits = Bits0} = State) ->
Mask = 1 bsl M - 1,
Hash = hashes(E, Mask, K),
{Bool, Bits} = lists:unzip([bits_set(H, B) || {H, B} <- lists:zip(Hash, Bits0)]),
case lists:all(fun(X) -> not X end, Bool) of
true ->
State;
false ->
State#bf{size = Size + 1, bits = Bits}
end.
%%
%% lookup element membership
bf_has(E, #bf{m = M, k = K, bits = Bits0}) ->
Mask = 1 bsl M - 1,
Hash = hashes(E, Mask, K),
Bool = [bits_get(H, B) || {H, B} <- lists:zip(Hash, Bits0)],
lists:all(fun(X) -> X end, Bool).
%% The SBF starts with one filter with k0 slices and error probability P0.
%% When this filter gets full, a new one is added with k1 slices and
%% P1 = P0 * r error probability, where r is the tightening ratio with 0 < r < 1.
%% k0 = log2 P0 ^ −1
%% ki = log2 Pi ^ −1
bf_scale(S, R, #bf{p = P, m = M}) ->
bf_new(M + S, P * R).
%%%------------------------------------------------------------------
%%%
%%% bits set
%%%
%%%------------------------------------------------------------------
-define(WORD, 128). % tuned for space / performance
bits_new(N) ->
array:new(1 + N div ?WORD, [{default, 0}]).
bits_set(I, Bits) ->
Cell = I div ?WORD,
Word = array:get(Cell, Bits),
case (Word band (1 bsl (I rem ?WORD))) of
0 ->
{true, array:set(Cell, (Word bor (1 bsl (I rem ?WORD))), Bits)};
_ ->
{false, Bits}
end.
bits_get(I, Bits) ->
Cell = I div ?WORD,
Word = array:get(Cell, Bits),
case (Word band (1 bsl (I rem ?WORD))) of
0 ->
false;
_ ->
true
end.
%%%------------------------------------------------------------------
%%%
%%% private
%%%
%%%------------------------------------------------------------------
%%
log2(X) ->
math:log(X) / math:log(2).
%%
%% calculates K hashes
%% double hashing technique is defined at
%% http://www.eecs.harvard.edu/~kirsch/pubs/bbbf/rsa.pdf
-define(HASH1(X), erlang:phash2([X], 1 bsl 32)).
-define(HASH2(X), erlang:phash2({X}, 1 bsl 32)).
hashes(X, Mask, K) ->
hashes(?HASH1(X), ?HASH2(X), Mask, K).
hashes(_, _, _, 0) ->
[];
hashes(A, B, Mask, K) ->
X = (A + B) band Mask,
[ X | hashes(A, X, Mask, K - 1) ]. | src/sets/sbf.erl | 0.568775 | 0.413655 | sbf.erl | starcoder |
%%%-------------------------------------------------------------------
%%% @author <NAME>
%%% @copyright (C) 2017
%%% @doc
%%%
%%% @end
%%% Created : 02. Dec 2017 20.00
%%%-------------------------------------------------------------------
-module(day2).
-author("ngunder").
%%--- Day 2: Corruption Checksum ---
%%
%% As you walk through the door, a glowing humanoid shape yells in your direction. "You there! Your state appears to be
%% idle. Come help us repair the corruption in this spreadsheet - if we take another millisecond, we'll have to display
%% an hourglass cursor!"
%%
%% The spreadsheet consists of rows of apparently-random numbers. To make sure the recovery process is on the right
%% track, they need you to calculate the spreadsheet's checksum. For each row, determine the difference between the
%% largest value and the smallest value; the checksum is the sum of all of these differences.
%%
%% For example, given the following spreadsheet:
%%
%% 5 1 9 5
%% 7 5 3
%% 2 4 6 8
%% The first row's largest and smallest values are 9 and 1, and their difference is 8.
%% The second row's largest and smallest values are 7 and 3, and their difference is 4.
%% The third row's difference is 6.
%% In this example, the spreadsheet's checksum would be 8 + 4 + 6 = 18.
%%
%% What is the checksum for the spreadsheet in your puzzle input?
%%
%% Your puzzle answer was 34925.
%%
%% --- Part Two ---
%%
%% "Great work; looks like we're on the right track after all. Here's a star for your effort." However, the program
%% seems a little worried. Can programs be worried?
%%
%% "Based on what we're seeing, it looks like all the User wanted is some information about the evenly divisible values
%% in the spreadsheet. Unfortunately, none of us are equipped for that kind of calculation - most of us specialize in
%% bitwise operations."
%%
%% It sounds like the goal is to find the only two numbers in each row where one evenly divides the other - that is,
%% where the result of the division operation is a whole number. They would like you to find those numbers on each line,
%% divide them, and add up each line's result.
%%
%% For example, given the following spreadsheet:
%%
%% 5 9 2 8
%% 9 4 7 3
%% 3 8 6 5
%% In the first row, the only two numbers that evenly divide are 8 and 2; the result of this division is 4.
%% In the second row, the two numbers are 9 and 3; the result is 3.
%% In the third row, the result is 2.
%% In this example, the sum of the results would be 4 + 3 + 2 = 9.
%%
%% What is the sum of each row's result in your puzzle input?
%%
%% Your puzzle answer was 221.
%%
%% Both parts of this puzzle are complete! They provide two gold stars: **
%%
%% API
-export([part_a/0,
part_b/0,
a/1,
b/1,
test/0]).
part_a() ->
a("./res/day2.input").
part_b() ->
b("./res/day2.input").
a(File) ->
run(File, fun sub_small_large/1).
b(File) ->
run(File, fun div_small_large/1).
run(File, Fun) ->
{ok, Bin}=file:read_file(File),
BreakUp = process_bin(Bin),
lists:foldl(fun(Row, Acc) ->
Fun(convert_row(Row,[]))+Acc
end, 0, BreakUp).
test() ->
18 = a("./res/day2_test_a.input"),
9 = b("./res/day2_test_b.input"),
pass.
process_bin(Bin) ->
Rows = string:tokens(binary_to_list(Bin), "\n"),
lists:foldl(
fun(Row, Acc) ->
[string:tokens(Row, "\t ")|Acc]
end, [], Rows).
convert_row([], Acc) ->
Acc;
convert_row([H|T], Acc) ->
convert_row(T, [list_to_integer(H)|Acc]).
sub_small_large([V1,V2|Rest]) when V1 < V2->
sub_small_large(Rest,V1,V2);
sub_small_large([V1,V2|Rest]) ->
sub_small_large(Rest,V2,V1).
sub_small_large([],S, L) ->
L-S;
sub_small_large([H|T], S, L) when H < S ->
sub_small_large(T, H, L);
sub_small_large([H|T], S, L) when H > L ->
sub_small_large(T, S, H);
sub_small_large([_H|T], S, L) ->
sub_small_large(T, S, L).
div_small_large([]) ->
io:format("Input Data appears incorrect ~n", []),
0;
div_small_large([A|Rest]) ->
check_div(A, Rest, Rest).
check_div(_A, [], Rest) ->
div_small_large(Rest);
check_div(A, [B|_], _) when A / B == A div B ->
A div B;
check_div(A, [B|_], _) when B / A == B div A ->
B div A;
check_div(A, [_|T], Rest) ->
check_div(A, T, Rest). | src/day2.erl | 0.507568 | 0.841631 | day2.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% Copyright (c) 2014 Basho Technologies, Inc. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
-module(tracer_timeit).
-compile(export_all).
%% @doc Dynamically add timing to MFA. There are various types of
%% timing.
%%
%% all - time latency of all calls to MFA
%%
%% {sample, N, Max} - sample every N calls and stop sampling after Max
%%
%% {threshold, Millis, Max} - count # of calls where latency is > Millis
%% and count # of calls total, thus percentage of calls over threshold
timeit(Mod, Fun, Arity, Type) ->
Type2 = case Type of
{sample, N, Max} -> {sample, {N, Max}, {0, 0, 0}};
{threshold, Millis, Max} -> {threshold, {Millis, Max}, {0, 0}};
{all, Max} -> {all, {0, Max}}
end,
dbg:tracer(process, {fun trace/2, {orddict:new(), Type2}}),
dbg:p(all, call),
dbg:tpl(Mod, Fun, Arity, [{'_', [], [{return_trace}]}]).
stop() -> dbg:stop_clear().
trace({trace, Pid, call, {Mod, Fun, _}}, {D, {all, {Count, Max}}}) ->
D2 = orddict:store({Pid, Mod, Fun}, now(), D),
{D2, {all, {Count, Max}}};
trace({trace, Pid, call, {Mod, Fun, _}},
{D, {sample, {N, Max}, {M, K, Total}}}) ->
M2 = M+1,
Total2 = Total+1,
if N == M2 ->
D2 = orddict:store({Pid, Mod, Fun}, now(), D),
{D2, {sample, {N, Max}, {0, K, Total2}}};
true ->
{D, {sample, {N, Max}, {M2, K, Total2}}}
end;
trace({trace, Pid, call, {Mod, Fun, _}},
{D, {threshold, {Millis, Max}, {Over, Total}}}) ->
D2 = orddict:store({Pid, Mod, Fun}, now(), D),
{D2, {threshold, {Millis, Max}, {Over, Total+1}}};
trace({trace, Pid, return_from, {Mod, Fun, _}, _Result},
Acc={D, {all, {Count, Max}}}) ->
Key = {Pid, Mod, Fun},
case orddict:find(Key, D) of
{ok, StartTime} ->
Count2 = Count+1,
ElapsedUs = timer:now_diff(now(), StartTime),
ElapsedMs = ElapsedUs/1000,
io:format(user, "~p:~p:~p: ~p ms\n", [Pid, Mod, Fun, ElapsedMs]),
if Count2 == Max -> stop();
true ->
D2 = orddict:erase(Key, D),
{D2, {all, {Count2, Max}}}
end;
error -> Acc
end;
trace({trace, Pid, return_from, {Mod, Fun, _}, _Result},
Acc={D, {sample, {N, Max}, {M, K, Total}}}) ->
Key = {Pid, Mod, Fun},
case orddict:find(Key, D) of
{ok, StartTime} ->
K2 = K+1,
ElapsedUs = timer:now_diff(now(), StartTime),
ElapsedMs = ElapsedUs/1000,
io:format(user, "[sample ~p/~p] ~p:~p:~p: ~p ms\n",
[K2, Total, Pid, Mod, Fun, ElapsedMs]),
if K2 == Max -> stop();
true ->
D2 = orddict:erase(Key, D),
{D2, {sample, {N, Max}, {M, K2, Total}}}
end;
error -> Acc
end;
trace({trace, Pid, return_from, {Mod, Fun, _}, _Result},
Acc={D, {threshold, {Millis, Max}, {Over, Total}}}) ->
Key = {Pid, Mod, Fun},
case orddict:find(Key, D) of
{ok, StartTime} ->
ElapsedUs = timer:now_diff(now(), StartTime),
ElapsedMs = ElapsedUs / 1000,
if ElapsedMs > Millis ->
Over2 = Over+1,
io:format(user, "[over threshold ~p, ~p/~p] ~p:~p:~p: ~p ms\n",
[Millis, Over2, Total, Pid, Mod, Fun, ElapsedMs]);
true ->
Over2 = Over
end,
if Max == Over -> stop();
true ->
D2 = orddict:erase(Key, D),
{D2, {threshold, {Millis, Max}, {Over2, Total}}}
end;
error -> Acc
end. | deps/riak_kv/priv/tracers/tracer_timeit.erl | 0.534855 | 0.416559 | tracer_timeit.erl | starcoder |
%%
%% Copyright (c) 2015-2016 <NAME>. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%% @doc Max Int CRDT.
%%
%% @reference <NAME>, <NAME>, <NAME> and <NAME>
%% Composition of State-based CRDTs (2015)
%% [http://haslab.uminho.pt/cbm/files/crdtcompositionreport.pdf]
-module(state_max_int).
-author("<NAME> <<EMAIL>>").
-behaviour(type).
-behaviour(state_type).
-define(TYPE, ?MODULE).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-export([new/0, new/1]).
-export([mutate/3, delta_mutate/3, merge/2]).
-export([query/1, equal/2, is_bottom/1,
is_inflation/2, is_strict_inflation/2,
irreducible_is_strict_inflation/2]).
-export([join_decomposition/1, delta/2, digest/1]).
-export([encode/2, decode/2]).
-export_type([state_max_int/0, state_max_int_op/0]).
-opaque state_max_int() :: {?TYPE, payload()}.
-type payload() :: non_neg_integer().
-type state_max_int_op() :: increment.
%% @doc Create a new `state_max_int()'
-spec new() -> state_max_int().
new() ->
{?TYPE, 0}.
%% @doc Create a new `state_max_int()'
-spec new([term()]) -> state_max_int().
new([]) ->
new().
%% @doc Mutate a `state_max_int()'.
-spec mutate(state_max_int_op(), type:id(), state_max_int()) ->
{ok, state_max_int()}.
mutate(Op, Actor, {?TYPE, _}=CRDT) ->
state_type:mutate(Op, Actor, CRDT).
%% @doc Delta-mutate a `state_max_int()'.
%% The first argument can only be `increment'.
%% Returns a `state_max_int()' delta which is a new `state_max_int()'
%% with the value incremented by one.
-spec delta_mutate(state_max_int_op(), type:id(), state_max_int()) ->
{ok, state_max_int()}.
delta_mutate(increment, _Actor, {?TYPE, Value}) ->
{ok, {?TYPE, Value + 1}}.
%% @doc Returns the value of the `state_max_int()'.
-spec query(state_max_int()) -> non_neg_integer().
query({?TYPE, Value}) ->
Value.
%% @doc Merge two `state_max_int()'.
%% Join is the max function.
-spec merge(state_max_int(), state_max_int()) -> state_max_int().
merge({?TYPE, Value1}, {?TYPE, Value2}) ->
{?TYPE, max(Value1, Value2)}.
%% @doc Equality for `state_max_int()'.
-spec equal(state_max_int(), state_max_int()) -> boolean().
equal({?TYPE, Value1}, {?TYPE, Value2}) ->
Value1 == Value2.
%% @doc Check if a Max Int is bottom.
-spec is_bottom(state_max_int()) -> boolean().
is_bottom({?TYPE, Value}) ->
Value == 0.
%% @doc Given two `state_max_int()', check if the second is an inflation
%% of the first.
%% The second is an inflation if its value is greater or equal
%% to the value of the first.
-spec is_inflation(state_max_int(), state_max_int()) -> boolean().
is_inflation({?TYPE, Value1}, {?TYPE, Value2}) ->
Value1 =< Value2.
%% @doc Check for strict inflation.
-spec is_strict_inflation(state_max_int(), state_max_int()) -> boolean().
is_strict_inflation({?TYPE, Value1}, {?TYPE, Value2}) ->
Value1 < Value2.
%% @doc Check for irreducible strict inflation.
-spec irreducible_is_strict_inflation(state_max_int(),
state_type:digest()) ->
boolean().
irreducible_is_strict_inflation(A, {state, B}) ->
is_strict_inflation(B, A).
-spec digest(state_max_int()) -> state_type:digest().
digest({?TYPE, _}=CRDT) ->
{state, CRDT}.
%% @doc Join decomposition for `state_max_int()'.
-spec join_decomposition(state_max_int()) -> [state_max_int()].
join_decomposition({?TYPE, _}=MaxInt) ->
[MaxInt].
%% @doc Delta calculation for `state_max_int()'.
-spec delta(state_max_int(), state_type:digest()) -> state_max_int().
delta({?TYPE, _}=A, B) ->
state_type:delta(A, B).
-spec encode(state_type:format(), state_max_int()) -> binary().
encode(erlang, {?TYPE, _}=CRDT) ->
erlang:term_to_binary(CRDT).
-spec decode(state_type:format(), binary()) -> state_max_int().
decode(erlang, Binary) ->
{?TYPE, _} = CRDT = erlang:binary_to_term(Binary),
CRDT.
%% ===================================================================
%% EUnit tests
%% ===================================================================
-ifdef(TEST).
new_test() ->
?assertEqual({?TYPE, 0}, new()).
query_test() ->
MaxInt0 = new(),
MaxInt1 = {?TYPE, 17},
?assertEqual(0, query(MaxInt0)),
?assertEqual(17, query(MaxInt1)).
delta_increment_test() ->
Actor = 1,
MaxInt0 = new(),
{ok, {?TYPE, Delta1}} = delta_mutate(increment, Actor, MaxInt0),
MaxInt1 = merge({?TYPE, Delta1}, MaxInt0),
{ok, {?TYPE, Delta2}} = delta_mutate(increment, Actor, MaxInt1),
MaxInt2 = merge({?TYPE, Delta2}, MaxInt1),
?assertEqual({?TYPE, 1}, {?TYPE, Delta1}),
?assertEqual({?TYPE, 1}, MaxInt1),
?assertEqual({?TYPE, 2}, {?TYPE, Delta2}),
?assertEqual({?TYPE, 2}, MaxInt2).
increment_test() ->
Actor = 1,
MaxInt0 = {?TYPE, 15},
{ok, MaxInt1} = mutate(increment, Actor, MaxInt0),
{ok, MaxInt2} = mutate(increment, Actor, MaxInt1),
?assertEqual({?TYPE, 16}, MaxInt1),
?assertEqual({?TYPE, 17}, MaxInt2).
merge_idempotent_test() ->
MaxInt1 = {?TYPE, 1},
MaxInt2 = {?TYPE, 17},
MaxInt3 = merge(MaxInt1, MaxInt1),
MaxInt4 = merge(MaxInt2, MaxInt2),
?assertEqual(MaxInt1, MaxInt3),
?assertEqual(MaxInt2, MaxInt4).
merge_commutative_test() ->
MaxInt1 = {?TYPE, 1},
MaxInt2 = {?TYPE, 17},
MaxInt3 = merge(MaxInt1, MaxInt2),
MaxInt4 = merge(MaxInt2, MaxInt1),
?assertEqual({?TYPE, 17}, MaxInt3),
?assertEqual({?TYPE, 17}, MaxInt4).
merge_deltas_test() ->
MaxInt1 = {?TYPE, 1},
Delta1 = {?TYPE, 17},
Delta2 = {?TYPE, 23},
MaxInt2 = merge(Delta1, MaxInt1),
MaxInt3 = merge(MaxInt1, Delta1),
DeltaGroup = merge(Delta1, Delta2),
?assertEqual({?TYPE, 17}, MaxInt2),
?assertEqual({?TYPE, 17}, MaxInt3),
?assertEqual({?TYPE, 23}, DeltaGroup).
equal_test() ->
MaxInt1 = {?TYPE, 17},
MaxInt2 = {?TYPE, 23},
?assert(equal(MaxInt1, MaxInt1)),
?assertNot(equal(MaxInt1, MaxInt2)).
is_bottom_test() ->
MaxInt0 = new(),
MaxInt1 = {?TYPE, 17},
?assert(is_bottom(MaxInt0)),
?assertNot(is_bottom(MaxInt1)).
is_inflation_test() ->
MaxInt1 = {?TYPE, 23},
MaxInt2 = {?TYPE, 42},
?assert(is_inflation(MaxInt1, MaxInt1)),
?assert(is_inflation(MaxInt1, MaxInt2)),
?assertNot(is_inflation(MaxInt2, MaxInt1)),
%% check inflation with merge
?assert(state_type:is_inflation(MaxInt1, MaxInt1)),
?assert(state_type:is_inflation(MaxInt1, MaxInt2)),
?assertNot(state_type:is_inflation(MaxInt2, MaxInt1)).
is_strict_inflation_test() ->
MaxInt1 = {?TYPE, 23},
MaxInt2 = {?TYPE, 42},
?assertNot(is_strict_inflation(MaxInt1, MaxInt1)),
?assert(is_strict_inflation(MaxInt1, MaxInt2)),
?assertNot(is_strict_inflation(MaxInt2, MaxInt1)).
irreducible_is_strict_inflation_test() ->
MaxInt1 = {?TYPE, 10},
Digest = digest(MaxInt1),
Irreducible1 = {?TYPE, 9},
Irreducible2 = {?TYPE, 10},
Irreducible3 = {?TYPE, 11},
?assertNot(irreducible_is_strict_inflation(Irreducible1, Digest)),
?assertNot(irreducible_is_strict_inflation(Irreducible2, Digest)),
?assert(irreducible_is_strict_inflation(Irreducible3, Digest)).
join_decomposition_test() ->
MaxInt1 = {?TYPE, 17},
Decomp1 = join_decomposition(MaxInt1),
?assertEqual([{?TYPE, 17}], Decomp1).
encode_decode_test() ->
MaxInt = {?TYPE, 17},
Binary = encode(erlang, MaxInt),
EMaxInt = decode(erlang, Binary),
?assertEqual(MaxInt, EMaxInt).
-endif. | src/state_max_int.erl | 0.681515 | 0.441131 | state_max_int.erl | starcoder |
%%==============================================================================
%% @author <NAME> <<EMAIL>>
%% @copyright 2015 AWeber Communications
%% @end
%%==============================================================================
-module(autocluster_util).
%% API
-export([as_atom/1,
as_integer/1,
as_string/1,
node_name/1,
parse_port/1]).
%% @spec as_atom(Value) -> list()
%% where Value = list()|integer()
%% @doc Return the value as a list
%% @end
%%
as_atom(Value) when is_atom(Value) =:= true -> Value;
as_atom(Value) when is_binary(Value) =:= true -> list_to_atom(binary_to_list(Value));
as_atom(Value) when is_list(Value) =:= true -> list_to_atom(Value);
as_atom(Value) ->
autocluster_log:error("Unexpected data type for atom value: ~p~n", [Value]),
Value.
%% @spec maybe_convert_to_int(Value) -> integer()
%% where Value = list()|integer()
%% @doc Return the value as an integer
%% @end
%%
as_integer([]) -> undefined;
as_integer(Value) when is_list(Value) =:= true -> list_to_integer(Value);
as_integer(Value) when is_integer(Value) =:= true -> Value;
as_integer(Value) ->
autocluster_log:error("Unexpected data type for integer value: ~p~n", [Value]),
Value.
%% @spec as_string(Value) -> list()
%% where Value = list()|integer()
%% @doc Return the value as a list
%% @end
%%
as_string([]) -> undefined;
as_string(Value) when is_atom(Value) =:= true -> atom_to_list(Value);
as_string(Value) when is_binary(Value) =:= true -> binary_to_list(Value);
as_string(Value) when is_integer(Value) =:= true -> integer_to_list(Value);
as_string(Value) when is_list(Value) =:= true -> Value;
as_string(Value) ->
autocluster_log:error("Unexpected data type for list value: ~p~n", [Value]),
Value.
%% @spec node_name(mixed) -> atom()
%% @doc Return the proper node name for clustering purposes
%% @end
%%
node_name(Value) ->
Host = case autocluster_config:get(longname) of
true -> as_string(Value);
false ->
Parts = string:tokens(as_string(Value), "."),
case length(Parts) of
1 -> as_string(Value);
_ -> as_string(lists:nth(1, Parts))
end
end,
list_to_atom(string:join(["rabbit", Host], "@")).
%% @spec parse_port(mixed) -> integer()
%% @doc Returns the port, even if Docker linking overwrites a configuration value to be a URI instead of numeric value
%% @end
%%
parse_port(Value) when is_list(Value) -> as_integer(lists:last(string:tokens(Value, ":")));
parse_port(Value) -> as_integer(Value). | src/autocluster_util.erl | 0.570212 | 0.495545 | autocluster_util.erl | starcoder |
%%%-------------------------------------------------------------------
%%% @author <NAME>, <<EMAIL>>
%%% @doc
%%% Command line utility behaviour. Usage example:
%%%
%%% From an escript main/1 function (requires `-mode(compile)'):
%%% ```
%%% cli:run(Args).
%%% '''
%%%
%%% Or, to limit cli behaviour discovery,
%%% ```
%%% cli:run(Args, #{modules => ?MODULE, progname => ?MODULE}).
%%% '''
%%% Other options available for run/2:
%%% <ul><li>`modules':<ul>
%%% <li>`all_loaded' - search all loaded modules (`code:all_loaded()') for `cli' behaviour</li>
%%% <li>`module()' - use this module (must export `cli/0')</li>
%%% <li>[module()] - list of modules (must export `cli/0')</li></ul></li>
%%% <li>`warn': set to `suppress' suppresses warnings logged</li>
%%% <li>`error': defines what action is taken upon parser error. Use `ok' to completely ignore the
%%% error (historical behaviour, useful for testing), `error' to raise an exception,
%%% `halt' to halt the emulator with exit code 1 (default behaviour), and `{halt, non_neg_integer()}'
%%% for a custom exit code halting the emulator</li>
%%% <li>`help': set to false suppresses printing `usage' when parser produces
%%% an error, and disables default --help/-h behaviour</li>
%%% <li>`prefixes': prefixes passed to argparse</li>
%%% <li>`progname': specifies executable name instead of 'erl'</li>
%%% </ul>
%%%
%%% Warnings are printed to OTP logger, unless suppressed.
%%%
%%% cli framework attempts to create a handler for each
%%% command exported, including intermediate (non-leaf)
%%% commands, if it can find function exported with
%%% suitable signature.
%%%
%%% cli examples are <a href="https://github.com/max-au/argparse/tree/master/doc/examples">available on GitHub</a>
%%%
%%% @end
-module(cli).
-author("<EMAIL>").
-export([
run/1,
run/2
]).
%%--------------------------------------------------------------------
%% Behaviour definition
%% Callback returning CLI mappings.
%% Must return a command, that may contain sub-commands.
%% Also returns arguments, and handler.
-callback cli() -> argparse:command().
%%--------------------------------------------------------------------
%% API
-compile(warn_missing_spec).
-spec run(Args :: [string()]) -> term().
%% @equiv run(Args, #{})
run(Args) ->
run(Args, #{}).
%% Options map.
%% Allows to choose which modules to consider, and error handling mode.
%% `modules' can be:
%% `all_loaded' - code:all_loaded(), search for `cli' behaviour,
%% module() for a single module (may not have `cli' behaviour),
%% [module()] for a list of modules (may not have `cli' behaviour)
%% `warn' set to `suppress' suppresses warnings logged
%% `help' set to false suppresses printing `usage' when parser produces
%% an error, and disables default --help/-h behaviour
-type run_options() :: #{
modules => all_loaded | module() | [module()],
warn => suppress | warn,
help => boolean(),
error => ok | error | halt | {halt, non_neg_integer()},
prefixes => [integer()],%% prefixes passed to argparse
progname => string() %% specifies executable name instead of 'erl'
}.
%% @doc CLI entry point, parses arguments and executes selected function.
%% Finds all modules loaded, and implementing cli behaviour,
%% then matches a command and runs handler defined for
%% a command.
%% @param Args arguments used to run CLI, e.g. init:get_plain_arguments().
%% @returns callback result, or 'ok' when help/error message printed, and
%% `error' parameter is set to `ok' (meaning, ignore errors, always return ok)
-spec run([string()], run_options()) -> term().
run(Args, Options) ->
Modules = modules(maps:get(modules, Options, all_loaded)),
CmdMap = discover_commands(Modules, Options),
dispatch(Args, CmdMap, Modules, Options).
%%--------------------------------------------------------------------
%% Internal implementation
-include_lib("kernel/include/logger.hrl").
%% Returns a list of all modules providing 'cli' behaviour, or
%% a list of modules.
modules(all_loaded) ->
[
Module || {Module, _} <- code:all_loaded(),
lists:member(?MODULE, behaviours(Module))
];
modules(Mod) when is_atom(Mod) ->
[Mod];
modules(Mods) when is_list(Mods) ->
Mods.
behaviours(Module) ->
Attrs = proplists:get_value(attributes, Module:module_info(), []),
lists:flatten(proplists:get_all_values(behavior, Attrs) ++
proplists:get_all_values(behaviour, Attrs)).
%%
discover_commands(Modules, Options) ->
Warn = maps:get(warn, Options, warn),
ModCount = length(Modules),
lists:foldl(
fun (Mod, Cmds) ->
ModCmd =
try {_, MCmd} = argparse:validate(Mod:cli(), Options), MCmd
catch
Class:Reason:Stack when Warn =:= warn ->
?LOG_WARNING("Error calling ~s:cli(): ~s:~p~n~p",
[Mod, Class, Reason, Stack]), #{};
_:_ when Warn =:= suppress ->
#{}
end,
%% handlers: use first non-empty handler
Cmds1 = case maps:find(handler, ModCmd) of
{ok, Handler} when is_map_key(handler, Cmds) ->
%% merge handler - and warn when not suppressed
Warn =:= warn andalso
?LOG_WARNING("Multiple handlers defined for top-level command, ~p chosen, ~p ignored",
[maps:get(handler, Cmds), Handler]),
Cmds;
{ok, Handler} ->
Cmds#{handler => Handler};
error ->
Cmds
end,
%% help: concatenate help lines
Cmds2 =
if is_map_key(help, ModCmd) ->
Cmds1#{help => maps:get(help, ModCmd) ++ maps:get(help, Cmds1, "")};
true -> Cmds1
end,
%% merge arguments, and warn if warnings are not suppressed, and there
%% is more than a single module
Cmds3 = merge_arguments(maps:get(arguments, ModCmd, []),
(ModCount > 1 andalso Warn =:= warn), Cmds2),
%% merge commands
merge_commands(maps:get(commands, ModCmd, #{}), Mod, Options, Cmds3)
end, #{}, Modules).
%% Dispatches Args over Modules, with specified ErrMode
dispatch(Args, CmdMap, Modules, Options) ->
HelpEnabled = maps:get(help, Options, true),
%% attempt to dispatch the command
try argparse:parse(Args, CmdMap, Options) of
{ArgMap, PathTo} ->
run_handler(CmdMap, ArgMap, PathTo, undefined);
ArgMap ->
%{ maps:find(default, Options), Modules, Options}
run_handler(CmdMap, ArgMap, {[], CmdMap}, {Modules, Options})
catch
error:{argparse, Reason} when HelpEnabled =:= false ->
io:format("error: ~s", [argparse:format_error(Reason)]),
dispatch_error(Options, Reason);
error:{argparse, Reason} ->
%% see if it was cry for help that triggered error message
Prefixes = maps:get(prefixes, Options, "-"),
case help_requested(Reason, Prefixes) of
false ->
Fmt = argparse:format_error(Reason, CmdMap, Options),
io:format("error: ~s", [Fmt]);
CmdPath ->
Fmt = argparse:help(CmdMap, Options#{command => tl(CmdPath)}),
io:format("~s", [Fmt])
end,
dispatch_error(Options, Reason)
end.
dispatch_error(#{error := ok}, _Reason) ->
ok;
dispatch_error(#{error := error}, Reason) ->
error(Reason);
dispatch_error(#{error := halt}, _Reason) ->
erlang:halt(1);
dispatch_error(#{error := {halt, Exit}}, _Reason) ->
erlang:halt(Exit);
%% default is halt(1)
dispatch_error(_Options, _Reason) ->
erlang:halt(1).
%% Executes handler
run_handler(CmdMap, ArgMap, {Path, #{handler := {Mod, ModFun, Default}}}, _MO) ->
ArgList = arg_map_to_arg_list(CmdMap, Path, ArgMap, Default),
%% if argument count may not match, better error can be produced
erlang:apply(Mod, ModFun, ArgList);
run_handler(_CmdMap, ArgMap, {_Path, #{handler := {Mod, ModFun}}}, _MO) when is_atom(Mod), is_atom(ModFun) ->
Mod:ModFun(ArgMap);
run_handler(CmdMap, ArgMap, {Path, #{handler := {Fun, Default}}}, _MO) when is_function(Fun) ->
ArgList = arg_map_to_arg_list(CmdMap, Path, ArgMap, Default),
%% if argument count may not match, better error can be produced
erlang:apply(Fun, ArgList);
run_handler(_CmdMap, ArgMap, {_Path, #{handler := Handler}}, _MO) when is_function(Handler, 1) ->
Handler(ArgMap);
run_handler(CmdMap, ArgMap, {[], _}, {Modules, Options}) ->
% {undefined, {ok, Default}, Modules, Options}
exec_cli(Modules, CmdMap, [ArgMap], Options).
%% finds first module that exports ctl/1 and execute it
exec_cli([], CmdMap, _ArgMap, ArgOpts) ->
%% command not found, let's print usage
io:format(argparse:help(CmdMap, ArgOpts));
exec_cli([Mod|Tail], CmdMap, Args, ArgOpts) ->
case erlang:function_exported(Mod, cli, length(Args)) of
true ->
erlang:apply(Mod, cli, Args);
false ->
exec_cli(Tail, CmdMap, Args, ArgOpts)
end.
%% argparse does not allow clashing options, so if cli is ever to support
%% that, logic to un-clash should be here
merge_arguments([], _Warn, Existing) ->
Existing;
merge_arguments(Args, Warn, Existing) ->
Warn andalso
?LOG_WARNING("cli: multiple modules may export global attributes: ~p", [Args]),
ExistingArgs = maps:get(arguments, Existing, []),
Existing#{arguments => ExistingArgs ++ Args}.
%% argparse accepts a map of commands, which means, commands names
%% can never clash. Yet for cli it is possible when multiple modules
%% export command with the same name. For this case, skip duplicate
%% command names, emitting a warning.
merge_commands(Cmds, Mod, Options, Existing) ->
Warn = maps:get(warn, Options, warn),
MergedCmds = maps:fold(
fun (Name, Cmd, Acc) ->
case maps:find(Name, Acc) of
error ->
%% merge command with name Name into Acc-umulator
Acc#{Name => create_handlers(Mod, Name, Cmd, maps:find(default, Options))};
{ok, Another} when Warn =:= warn ->
%% do not merge this command, another module already exports it
?LOG_WARNING("cli: duplicate definition for ~s found, skipping ~P",
[Name, 8, Another]), Acc;
{ok, _Another} when Warn =:= suppress ->
%% don't merge duplicate, and don't complain about it
Acc
end
end, maps:get(commands, Existing, #{}), Cmds
),
Existing#{commands => MergedCmds}.
%% Descends into sub-commands creating handlers where applicable
create_handlers(Mod, CmdName, Cmd0, DefaultTerm) ->
Handler =
case maps:find(handler, Cmd0) of
error ->
make_handler(CmdName, Mod, DefaultTerm);
{ok, optional} ->
make_handler(CmdName, Mod, DefaultTerm);
{ok, Existing} ->
Existing
end,
%%
Cmd = Cmd0#{handler => Handler},
case maps:find(commands, Cmd) of
error ->
Cmd;
{ok, Sub} ->
NewCmds = maps:map(fun (CN, CV) -> create_handlers(Mod, CN, CV, DefaultTerm) end, Sub),
Cmd#{commands => NewCmds}
end.
%% makes handler in required format
make_handler(CmdName, Mod, error) ->
try
{Mod, list_to_existing_atom(CmdName)}
catch
error:badarg ->
error({invalid_command, [CmdName], handler, "handler for command does not exist"})
end;
make_handler(CmdName, Mod, {ok, Default}) ->
{Mod, list_to_existing_atom(CmdName), Default}.
%% Finds out whether it was --help/-h requested, and exception was thrown due to that
help_requested({unknown_argument, CmdPath, [Prefix, $h]}, Prefixes) ->
is_prefix(Prefix, Prefixes, CmdPath);
help_requested({unknown_argument, CmdPath, [Prefix, Prefix, $h, $e, $l, $p]}, Prefixes) ->
is_prefix(Prefix, Prefixes, CmdPath);
help_requested(_, _) ->
false.
%% returns CmdPath when Prefix is one of supplied Prefixes
is_prefix(Prefix, Prefixes, CmdPath) ->
case lists:member(Prefix, Prefixes) of
true ->
CmdPath;
false ->
false
end.
%% Given command map, path to reach a specific command, and a parsed argument
%% map, returns a list of arguments (effectively used to transform map-based
%% callback handler into positional).
arg_map_to_arg_list(Command, Path, ArgMap, Default) ->
AllArgs = collect_arguments(Command, Path, []),
[maps:get(Arg, ArgMap, Default) || #{name := Arg} <- AllArgs].
%% recursively descend into Path, ignoring arguments with duplicate names
collect_arguments(Command, [], Acc) ->
Acc ++ maps:get(arguments, Command, []);
collect_arguments(Command, [H|Tail], Acc) ->
Args = maps:get(arguments, Command, []),
Next = maps:get(H, maps:get(commands, Command, H)),
collect_arguments(Next, Tail, Acc ++ Args). | src/cli.erl | 0.546496 | 0.498474 | cli.erl | starcoder |
-module(assignment1).
-export([area/1,perimeter/1,enclose/1,bitsR/1,bitsTR/1,tests/0]).
%% Read https://en.wikipedia.org/wiki/Triangle#Computing_the_sides_and_angles for inspiration about triangle representations. Investigations:
%% 1) No need for a base and a height but heavy use a trig funcs, which are very expensive:
%% {triangle,{Xa,Ya,A,Alpha},{Xb,Yb,B,Beta},{Xc,Yc,C,Gamma}}
%%
%% 2) Easier to calculate the area and no need to use trig funcs but has some duplicated data:
%% {triangle,{Xa,Ya,AB},{Xb,Yb,BC},{Xc,Yc,CA}, Base,Height}
%%
%% 3) Cheap enough to compute the area - sqrt is not as expesive as sin - without requiring data duplication. Moreover area re-uses perimeter which is also nice:
%% {triangle,{Xa,Ya},{Xb,Yb},{Xc,Yc},A,B,C}
tests() ->
io:format("testArea:~w~ntestPerimeter:~w~ntestEnclose:~w~ntestBitsR:~w~ntestBitsTR:~w~n", [testArea(), testPerimeter(), testEnclose(), testBitsR(), testBitsTR()]).
testArea() ->
area({triangle,{1,5},{3,6},{1,3},4,3,5}) == 6.0.
testPerimeter() ->
perimeter({triangle,{1,1},{3,3},{1,3},2,3,3.6}) == 8.6.
testEnclose() ->
TestCircle = enclose({circle,{2,2},1}) == {rectangle,{1,1},2,2},
TestTriangle = enclose({triangle,{1,1},{3,1},{2,2},2,1.4142,1.4142}) == {rectangle,{1,1},1,2},
TestRectangle = enclose({rectangle,{6,6},3,4}) == {rectangle,{6,6},3,4},
TestCircle and TestRectangle and TestTriangle.
testBitsR() ->
(bitsR(1111) == 6) and (bitsR(15) == 4) and (bitsR(0) == 0).
testBitsTR() ->
(bitsTR(1111) == 6) and (bitsTR(15) == 4) and (bitsTR(0) == 0).
area({circle,{_X,_Y},Radius}) ->
math:pi() * Radius * Radius;
area({rectangle,{_X,_Y},Heigth,Width}) ->
Heigth * Width;
%% Using https://en.wikipedia.org/wiki/Heron%27s_formula
area({triangle,PointA,PointB,PointC,A,B,C}) ->
Semiperimeter = perimeter({triangle,PointA,PointB,PointC,A,B,C}) / 2,
math:sqrt(Semiperimeter * (Semiperimeter - A) * (Semiperimeter - B) * (Semiperimeter - C)).
perimeter({circle,{_X,_Y},Radius}) ->
2 * math:pi() * Radius;
perimeter({rectangle,{_X,_Y},Heigth,Width}) ->
2 * Heigth + 2 * Width;
perimeter({triangle,_,_,_,A,B,C}) ->
A + B + C.
enclose({circle,{X,Y},Radius}) ->
{rectangle,{X-Radius,Y-Radius},Radius*2,Radius*2};
enclose({rectangle,P,Heigth,Width}) ->
{rectangle,P,Heigth,Width};
enclose({triangle,{Xa,Ya},{Xb,Yb},{Xc,Yc},_,_,_}) ->
MinX = lists:min([Xa,Xb,Xc]),
MinY = lists:min([Ya,Yb,Yc]),
MaxX = lists:max([Xa,Xb,Xc]),
MaxY = lists:max([Ya,Yb,Yc]),
Heigth = MaxY - MinY,
Width = MaxX - MinX,
{rectangle,{MinX,MinY},Heigth,Width}.
bitsR(0) ->
0;
bitsR(N) ->
N rem 2 + bitsR(N div 2).
bitsTR(N) ->
bits(N,0).
bits(0,Acc) ->
Acc;
bits(N,Acc) ->
bits(N div 2,Acc + N rem 2). | assignment1.erl | 0.755727 | 0.874131 | assignment1.erl | starcoder |
% Licensed under the Apache License, Version 2.0 (the "License"); you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
-module(easton_geojson).
-include("easton_geojson.hrl").
-export([
to_wkb/1,
from_wkb/1
]).
to_wkb({Geom}) ->
{_Dims, _Count, Data} = convert({Geom}),
iolist_to_binary(Data).
from_wkb(Bin) when is_binary(Bin) ->
{Json, _Dims, RestBin} = parse(Bin),
if RestBin == <<>> -> ok; true ->
throw({invalid_wkb, trailing_data})
end,
Json.
convert({Props}) ->
Type = case lists:keyfind(?JSON_TYPE, 1, Props) of
{_, Type0} -> Type0;
false -> throw({invalid_geojson, {missing_type, {Props}}})
end,
convert(Type, Props).
convert(?JSON_POINT, Props) ->
Coord = get_coords(Props),
{Dims, 1, IoList} = coord_to_wkb(Coord),
{Dims, 1, mkiolist(?WKB_POINT, Dims, 0, IoList)};
convert(?JSON_LINESTRING, Props) ->
Coords = get_coords(Props),
{Dims, Count, IoList} = coords_to_wkb(Coords),
{Dims, 1, mkiolist(?WKB_LINESTRING, Dims, Count, IoList)};
convert(?JSON_POLYGON, Props) ->
Coords = get_coords(Props),
{Dims, Count, IoList} = rings_to_wkb(Coords),
{Dims, 1, mkiolist(?WKB_POLYGON, Dims, Count, IoList)};
convert(?JSON_MULTIPOINT, Props) ->
Coords = get_coords(Props),
ToWKB = fun coord_to_wkb/1,
{Dims, Count, IoList} = json_multi(?WKB_POINT, ToWKB, Coords),
{Dims, 1, mkiolist(?WKB_MULTIPOINT, Dims, Count, IoList)};
convert(?JSON_MULTILINESTRING, Props) ->
Coords = get_coords(Props),
ToWKB = fun coords_to_wkb/1,
{Dims, Count, IoList} = json_multi(?WKB_LINESTRING, ToWKB, Coords),
{Dims, 1, mkiolist(?WKB_MULTILINESTRING, Dims, Count, IoList)};
convert(?JSON_MULTIPOLYGON, Props) ->
Coords = get_coords(Props),
ToWKB = fun rings_to_wkb/1,
{Dims, Count, IoList} = json_multi(?WKB_POLYGON, ToWKB, Coords),
{Dims, 1, mkiolist(?WKB_MULTIPOLYGON, Dims, Count, IoList)};
convert(?JSON_GEOMETRYCOLLECTION, Props) ->
Geoms = get_geoms(Props),
{Dims, Count, AccIoLists} = lists:foldl(fun(G, {Dims, Count, IoLists}) ->
% This case pattern match is a bit subtle. The logic
% is that we want to assert that all returned values for
% Dims are identical but we don't want to assert what it
% is before hand. Thus when we don't match Dims we assert
% that its because this is the first seen value by checking
% that Dims is undefined.
case convert(G) of
{Dims, 1, NewIoList} ->
{Dims, Count + 1, [[NewIoList] | IoLists]};
{NewDims, 1, NewIoList} when Dims == undefined, IoLists == [] ->
{NewDims, 1, [NewIoList]};
{_, _, _} ->
throw({invalid_geojson, mismatched_dimensions})
end
end, {undefined, 0, []}, Geoms),
FinalIoList = lists:reverse(AccIoLists),
{Dims, 1, mkiolist(?WKB_GEOMETRYCOLLECTION, Dims, Count, FinalIoList)};
convert(_, Props) ->
throw({invalid_geojson, {bad_type, {Props}}}).
parse(<<0:8/integer, WKBType:32/big-unsigned-integer, Rest/binary>>) ->
parse(big, wkb_to_type(WKBType), Rest);
parse(<<1:8/integer, WKBType:32/little-unsigned-integer, Rest/binary>>) ->
parse(little, wkb_to_type(WKBType), Rest).
parse(Endian, {Dims, ?WKB_POINT}, WKB) ->
{Coord, RestWKB} = wkb_to_coord(Endian, Dims, WKB),
Json = {[
{?JSON_TYPE, ?JSON_POINT},
{?JSON_COORDINATES, Coord}
]},
{Json, Dims, RestWKB};
parse(Endian, {Dims, ?WKB_LINESTRING}, WKB) ->
{Coords, RestWKB} = wkb_to_coords(Endian, Dims, WKB),
Json = {[
{?JSON_TYPE, ?JSON_LINESTRING},
{?JSON_COORDINATES, Coords}
]},
{Json, Dims, RestWKB};
parse(Endian, {Dims, ?WKB_POLYGON}, WKB) ->
{Rings, RestWKB} = wkb_to_rings(Endian, Dims, WKB),
Json = {[
{?JSON_TYPE, ?JSON_POLYGON},
{?JSON_COORDINATES, Rings}
]},
{Json, Dims, RestWKB};
parse(Endian, {Dims, ?WKB_MULTIPOINT}, WKB) ->
FromWKB = fun wkb_to_coord/3,
{Coords, RestWKB} = wkb_multi(Endian, ?WKB_POINT, Dims, FromWKB, WKB),
Json = {[
{?JSON_TYPE, ?JSON_MULTIPOINT},
{?JSON_COORDINATES, Coords}
]},
{Json, Dims, RestWKB};
parse(Endian, {Dims, ?WKB_MULTILINESTRING}, WKB) ->
FromWKB = fun wkb_to_coords/3,
{Coords, RestWKB} = wkb_multi(Endian, ?WKB_LINESTRING, Dims, FromWKB, WKB),
Json = {[
{?JSON_TYPE, ?JSON_MULTILINESTRING},
{?JSON_COORDINATES, Coords}
]},
{Json, Dims, RestWKB};
parse(Endian, {Dims, ?WKB_MULTIPOLYGON}, WKB) ->
FromWKB = fun wkb_to_rings/3,
{Coords, RestWKB} = wkb_multi(Endian, ?WKB_POLYGON, Dims, FromWKB, WKB),
Json = {[
{?JSON_TYPE, ?JSON_MULTIPOLYGON},
{?JSON_COORDINATES, Coords}
]},
{Json, Dims, RestWKB};
parse(Endian, {Dims, ?WKB_GEOMETRYCOLLECTION}, WKB) ->
{Count, RestWKB} = wkb_to_count(Endian, WKB),
{FinalGeoms, FinalWKB} = lists:foldl(fun(_, {Geoms, WKBTail}) ->
case parse(WKBTail) of
{G, Dims, NewTail} ->
{[G | Geoms], NewTail};
{_, _BadDims, _} ->
throw({invalid_wkb, mismatched_dimensions})
end
end, {[], RestWKB}, lists:seq(1, Count)),
Json = {[
{?JSON_TYPE, ?JSON_GEOMETRYCOLLECTION},
{?JSON_GEOMETRIES, lists:reverse(FinalGeoms)}
]},
{Json, Dims, FinalWKB};
parse(_Endian, {_Dims, Type}, _WKB) ->
throw({invalid_wkb, {bad_type, Type}}).
get_coords(Props) ->
case lists:keyfind(?JSON_COORDINATES, 1, Props) of
{_, Coords} when is_list(Coords) ->
Coords;
{_, _} ->
throw({invalid_geojson, {bad_coordinates, {Props}}});
false ->
throw({invalid_geojson, {missing_coordinates, {Props}}})
end.
get_geoms(Props) ->
case lists:keyfind(?JSON_GEOMETRIES, 1, Props) of
{_, Geoms} when is_list(Geoms) ->
Geoms;
{_, _} ->
throw({invalid_geojson, {bad_geometries, {Props}}});
false ->
throw({invalid_geojson, {missing_geometries, {Props}}})
end.
json_multi(_Type, _ToWKB, []) ->
throw({invalid_geojson, empty_multi_geometry});
json_multi(Type, ToWKB, [Coords]) ->
{Dims, Count, AccIoList} = ToWKB(Coords),
{Dims, 1, mkiolist(Type, Dims, Count, AccIoList)};
json_multi(Type, ToWKB, [Coords | Rest]) ->
{Dims, Count, AccIoList} = json_multi(Type, ToWKB, Rest),
case json_multi(Type, ToWKB, [Coords]) of
{Dims, 1, NewIoList} ->
{Dims, Count + 1, [NewIoList | AccIoList]};
{_BadDims, _, _} ->
throw({invalid_geojson, mismatched_dimensions})
end.
wkb_multi(Endian, Type, Dims, FromWKB, WKB) ->
{Count, RestWKB} = wkb_to_count(Endian, WKB),
{FinalCoords, FinalTail} = lists:foldl(fun(_, {CoordAcc, WKBTail}) ->
{SubEndian, NewTail1} = wkb_check_type(Type, Dims, WKBTail),
{Coords, NewTail2} = FromWKB(SubEndian, Dims, NewTail1),
{[Coords | CoordAcc], NewTail2}
end, {[], RestWKB}, lists:seq(1, Count)),
{lists:reverse(FinalCoords), FinalTail}.
rings_to_wkb([]) ->
throw({invalid_geojson, empty_polygon});
rings_to_wkb([Ring]) ->
ring_to_wkb(Ring);
rings_to_wkb([Ring | Rest]) ->
{Dims, Count, AccIoList} = rings_to_wkb(Rest),
case ring_to_wkb(Ring) of
{Dims, 1, NewIoList} ->
{Dims, Count + 1, [NewIoList | AccIoList]};
{_BadDims, _, _} ->
throw({invalid_geojson, mismatched_dimensions})
end.
ring_to_wkb(Coords) ->
{Dims, Count, IoList} = coords_to_wkb(Coords),
{Dims, 1, [<<Count:32/big-unsigned-integer>> | IoList]}.
wkb_to_rings(Endian, Dims, WKB) ->
{Count, RestWKB} = wkb_to_count(Endian, WKB),
{FinalRings, FinalWKB} = lists:foldl(fun(_, {Rings, WKBTail}) ->
{Ring, NewTail} = wkb_to_coords(Endian, Dims, WKBTail),
{[Ring | Rings], NewTail}
end, {[], RestWKB}, lists:seq(1, Count)),
{lists:reverse(FinalRings), FinalWKB}.
coords_to_wkb([]) ->
throw({invalid_geojson, empty_coordinates});
coords_to_wkb([Coord]) ->
coord_to_wkb(Coord);
coords_to_wkb([Coord | Rest]) ->
{Dims, Count, AccIoList} = coords_to_wkb(Rest),
case coord_to_wkb(Coord) of
{Dims, 1, NewIoList} ->
{Dims, Count + 1, [NewIoList | AccIoList]};
{_BadDims, _, _} ->
throw({invalid_geojson, mismatched_dimensions})
end.
wkb_to_coords(Endian, Dims, WKB) ->
{Count, RestWKB} = wkb_to_count(Endian, WKB),
{FinalCoords, FinalWKB} = lists:foldl(fun(_, {Coords, WKBTail}) ->
{C, NewTail} = wkb_to_coord(Endian, Dims, WKBTail),
{[C | Coords], NewTail}
end, {[], RestWKB}, lists:seq(1, Count)),
{lists:reverse(FinalCoords), FinalWKB}.
coord_to_wkb([X, Y])
when is_number(X), is_number(Y) ->
{2, 1, [<<X:64/float, Y:64/float>>]};
coord_to_wkb([X, Y, Z])
when is_number(X), is_number(Y), is_number(Z) ->
{3, 1, [<<X:64/float, Y:64/float, Z:64/float>>]};
coord_to_wkb([X, Y, Z, M])
when is_number(X), is_number(Y), is_number(Z), is_number(M) ->
{4, 1, [<<X:64/float, Y:64/float, Z:64/float, M:64/float>>]};
coord_to_wkb(BadCoord) ->
throw({invalid_geojson, {bad_coord, BadCoord}}).
wkb_to_coord(big, 2, <<X:64/big-float, Y:64/big-float, R/binary>>) ->
{[X, Y], R};
wkb_to_coord(big, 3, <<X:64/big-float, Y:64/big-float, Z:64/big-float,
R/binary>>) ->
{[X, Y, Z], R};
wkb_to_coord(big, 4, <<X:64/big-float, Y:64/big-float, Z:64/big-float,
M:64/big-float, R/binary>>) ->
{[X, Y, Z, M], R};
wkb_to_coord(little, 2, <<X:64/little-float, Y:64/little-float, R/binary>>) ->
{[X, Y], R};
wkb_to_coord(little, 3, <<X:64/little-float, Y:64/little-float,
Z:64/little-float, R/binary>>) ->
{[X, Y, Z], R};
wkb_to_coord(little, 4, <<X:64/little-float, Y:64/little-float,
Z:64/little-float, M:64/little-float, R/binary>>) ->
{[X, Y, Z, M], R};
wkb_to_coord(_Endian, _Dims, WKB) ->
throw({invalid_wkb, {bad_coord, WKB}}).
mkiolist(?WKB_POINT = Type0, Dims, _Count, IoList) ->
Type = type_to_wkb(Type0, Dims),
[<<0:8/integer, Type:32/big-unsigned-integer>> | IoList];
mkiolist(Type0, Dims, Count, IoList) ->
Type = type_to_wkb(Type0, Dims),
[<<0:8/integer, Type:32/big-unsigned-integer,
Count:32/big-unsigned-integer>> | IoList].
type_to_wkb(Type, Dims) ->
case Dims of
2 -> Type;
3 -> Type bor ?WKB_Z;
4 -> (Type bor ?WKB_Z) bor ?WKB_M;
_ -> throw({invalid_geojson, {bad_dimensions, Dims}})
end.
wkb_to_type(Type) ->
case Type of
_ when ((Type band ?WKB_Z) == ?WKB_Z) and ((Type band ?WKB_M) == ?WKB_M) ->
{4, Type band ?WKB_TYPE_FILTER};
_ when (Type band ?WKB_Z) == ?WKB_Z orelse (Type band ?WKB_M) == ?WKB_M ->
{3, Type band ?WKB_TYPE_FILTER};
_ ->
{2, Type}
end.
wkb_to_count(big, <<Count:32/big-unsigned-integer, Rest/binary>>) ->
if Count > 0 -> ok; true ->
throw({invalid_wkb, bad_count})
end,
{Count, Rest};
wkb_to_count(little, <<Count:32/little-unsigned-integer, Rest/binary>>) ->
if Count > 0 -> ok; true ->
throw({invalid_wkb, bad_count})
end,
{Count, Rest};
wkb_to_count(_Endian, WKB) ->
throw({invalid_wkb, {bad_count, WKB}}).
wkb_check_type(Type, Dims, <<0:8/integer, WKBType:32/big-unsigned-integer,
Rest/binary>>) ->
case wkb_to_type(WKBType) of
{Dims, Type} ->
{big, Rest};
{Dims, _BadType} ->
throw({invalid_wkb, multi_type_mismatch});
{_BadDims, _} ->
throw({invalid_wkb, multi_dimension_mismatch})
end;
wkb_check_type(Type, Dims, <<1:8/integer, WKBType:32/little-unsigned-integer,
Rest/binary>>) ->
case wkb_to_type(WKBType) of
{Dims, Type} ->
{little, Rest};
{Dims, _BadType} ->
throw({invalid_wkb, multi_type_mismatch});
{_BadDims, _} ->
throw({invalid_wkb, multi_dimension_mismatch})
end. | src/easton_geojson.erl | 0.620047 | 0.470858 | easton_geojson.erl | starcoder |
-module(slacker_channel).
-include("spec.hrl").
-export([archive/2, create/2, history/3, info/2, invite/3,
join/2, kick/3, leave/2, list/2, mark/3, rename/3,
set_purpose/3, set_topic/3, unarchive/2]).
%% @doc Archives a channel.
-spec archive(Token :: string(), Channel :: string()) -> http_response().
archive(Token, Channel) ->
slacker_request:send("channels.archive", [{"token", Token},{"channel", Channel}]).
%% @doc Creates a channel.
-spec create(Token :: string(), Name :: string()) -> http_response().
create(Token, Name) ->
slacker_request:send("channels.create", [{"token", Token},{"name", Name}]).
%% @doc Fetches history of messages and events from a channel.
%%
%% Options can be:
%% latest: end of time range of messages to include in results
%% oldest: start of time range of messages to include in results
%% inclusive: include messages with latest or oldest timestamp in results (default: 0)
%% count: number of messages to return, between 1 and 1000 (default: 100)
%% unreads: include unread_count_display in the output (default: 0)
%%
-spec history(Token :: string(), Channel :: string(), Options :: list()) -> http_response().
history(Token, Channel, Options) ->
slacker_request:send("channels.history", [{"token", Token},{"channel", Channel}], Options).
%% @doc Returns information about a team channel.
-spec info(Token :: string(), Channel :: string()) -> http_response().
info(Token, Channel) ->
slacker_request:send("channels.info", [{"token", Token},{"channel", Channel}]).
%% @doc Invites a user to a channel.
-spec invite(Token :: string(), Channel :: string(), User :: string()) -> http_response().
invite(Token, Channel, User) ->
slacker_request:send("channels.invite", [{"token", Token},{"channel", Channel},{"user", User}]).
%% @doc Join a channel. If the channel does not exist, it is created.
-spec join(Token :: string(), Channel :: string()) -> http_response().
join(Token, Channel) ->
slacker_request:send("channels.join", [{"token", Token},{"channel", Channel}]).
%% @doc Removes a user from a channel.
-spec kick(Token :: string(), Channel :: string(), User :: string()) -> http_response().
kick(Token, Channel, User) ->
slacker_request:send("channels.kick", [{"token", Token},{"channel", Channel},{"user", User}]).
%% @doc Leave a channel.
-spec leave(Token :: string(), Channel :: string()) -> http_response().
leave(Token, Channel) ->
slacker_request:send("channels.leave", [{"token", Token},{"channel", Channel}]).
%% @doc List of all channels in the team.
%%
%% Options can be:
%% exclude_archived: do not return archived channels (default: 0)
%%
-spec list(Token :: string(), Options :: list()) -> http_response().
list(Token, Options) ->
slacker_request:send("channels.list", [{"token", Token}], Options).
%% @doc Set read cursor in a channel.
-spec mark(Token :: string(), Channel :: string(), Timestamp :: string()) -> http_response().
mark(Token, Channel, Timestamp) ->
slacker_request:send("channels.mark", [{"token", Token},{"channel", Channel},{"ts", Timestamp}]).
%% @doc Rename a channel.
-spec rename(Token :: string(), Channel :: string(), Name :: string()) -> http_response().
rename(Token, Channel, Name) ->
slacker_request:send("channels.rename", [{"token", Token},{"channel", Channel},{"name", Name}]).
%% @doc Sets the purpose for a channel.
-spec set_purpose(Token :: string(), Channel :: string(), Purpose :: string()) -> http_response().
set_purpose(Token, Channel, Purpose) ->
slacker_request:send("channels.setPurpose", [{"token", Token},{"channel", Channel},{"purpose", Purpose}]).
%% @doc Sets the topic for a channel.
-spec set_topic(Token :: string(), Channel :: string(), Topic :: string()) -> http_response().
set_topic(Token, Channel, Topic) ->
slacker_request:send("channels.setTopic", [{"token", Token},{"channel", Channel},{"topic", Topic}]).
%% @doc Unarchives a channel.
-spec unarchive(Token :: string(), Channel :: string()) -> http_response().
unarchive(Token, Channel) ->
slacker_request:send("channels.unarchive", [{"token", Token},{"channel", Channel}]). | src/slacker_channel.erl | 0.642208 | 0.448124 | slacker_channel.erl | starcoder |
%% Integer types represented as ranges with infinities
-module(gradualizer_int).
%% Integer types
-export([is_int_type/1,
is_int_subtype/2,
int_type_glb/2,
int_type_diff/2,
negate_int_type/1,
merge_int_types/1]).
%% Ranges <--> integer types
-export([int_type_to_range/1,
int_range_to_type/1,
int_range_to_types/1]).
%% Ranges
-export([int_range_diff/2]).
%% Types
-export_type([int_range/0]).
-type int() :: integer() | neg_inf | pos_inf.
-type int_range() :: {int(), int()}.
-type type() :: gradualizer_type:abstract_type().
%% +----------------------------------------+
%% | Functions operating on integer types |
%% +----------------------------------------+
%% Checks if a type is an integer type. This is a pre-condition for most of
%% the functions in this module.
%%
%% A macro with the same name is defined in typelib.hrl, which can be used in
%% guards.
-spec is_int_type(type()) -> boolean().
is_int_type({type, _, T, _})
when T == pos_integer; T == non_neg_integer; T == neg_integer;
T == integer; T == range -> true;
is_int_type({integer, _, _}) -> true;
is_int_type({char, _, _}) -> true;
is_int_type(_) -> false.
%% Checks if an integer type is a subtype of another integer type. Both
%% arguments must be integer types.
-spec is_int_subtype(type(), type()) -> boolean().
is_int_subtype(Ty1, Ty2) ->
R1 = int_type_to_range(Ty1),
R2 = int_type_to_range(Ty2),
lower_bound_less_or_eq(R2, R1) andalso
upper_bound_more_or_eq(R2, R1).
%% Greatest lower bound of two integer types.
int_type_glb(Ty1, Ty2) ->
{Lo1, Hi1} = int_type_to_range(Ty1),
{Lo2, Hi2} = int_type_to_range(Ty2),
int_range_to_type({int_max(Lo1, Lo2), int_min(Hi1, Hi2)}).
%% Range difference, like set difference. The result may be zero, one or two
%% types.
-spec int_type_diff(type(), type()) -> type().
int_type_diff(Ty1, Ty2) ->
IntRanges = int_range_diff(int_type_to_range(Ty1),
int_type_to_range(Ty2)),
%% Make sure the result is a standard erlang type.
%% Perhaps we can include generalized ranges such as 10..pos_inf
ExpandedRanges = lists:map(fun int_range_expand_to_valid/1, IntRanges),
int_ranges_to_type(ExpandedRanges).
%% Merges integer types by sorting on the lower bound and then merging adjacent
%% ranges. Returns a list of mutually exclusive integer types.
%%
%% This is an adoption of the standard algorithm for merging intervals.
-spec merge_int_types([type()]) -> [type()].
merge_int_types([]) ->
[];
merge_int_types(IntTypes) ->
Ranges = lists:map(fun int_type_to_range/1, IntTypes),
int_ranges_to_types(Ranges).
%% Negates an integer type, e.g. `neg_integer() -> pos_integer()', `1..5 ->
%% -1..-5', etc.
negate_int_type(RangeTy) ->
{L, U} = int_type_to_range(RangeTy),
L2 = int_negate(U),
U2 = int_negate(L),
int_range_to_type({L2, U2}).
%% +---------------------------------------+
%% | Conversion between types and ranges |
%% +---------------------------------------+
%% Integer type to range.
-spec int_type_to_range(type()) -> int_range().
int_type_to_range({type, _, integer, []}) -> {neg_inf, pos_inf};
int_type_to_range({type, _, neg_integer, []}) -> {neg_inf, -1};
int_type_to_range({type, _, non_neg_integer, []}) -> {0, pos_inf};
int_type_to_range({type, _, pos_integer, []}) -> {1, pos_inf};
int_type_to_range({type, _, range, [{Tag1, _, I1}, {Tag2, _, I2}]})
when Tag1 =:= integer orelse Tag1 =:= char,
Tag2 =:= integer orelse Tag2 =:= char -> {I1, I2};
int_type_to_range({char, _, I}) -> {I, I};
int_type_to_range({integer, _, I}) -> {I, I}.
%% Converts a range back to a type.
-spec int_range_to_type(int_range()) -> type().
int_range_to_type(Range) ->
union(int_range_to_types(Range)).
%% +----------------+
%% | Type helpers |
%% +----------------+
%% Converts a range to a list of types. Creates two types in some cases and zero
%% types if lower bound is greater than upper bound.
-spec int_range_to_types(int_range()) -> [type()].
int_range_to_types({neg_inf, pos_inf}) ->
[type(integer)];
int_range_to_types({neg_inf, -1}) ->
[type(neg_integer)];
int_range_to_types({neg_inf, 0}) ->
[type(neg_integer), {integer, erl_anno:new(0), 0}];
int_range_to_types({neg_inf, I}) when I > 0 ->
[type(neg_integer),
{type, erl_anno:new(0), range, [{integer, erl_anno:new(0), 0}
,{integer, erl_anno:new(0), I}]}];
int_range_to_types({neg_inf, I}) when I < -1 ->
%% Non-standard
[{type, erl_anno:new(0), range, [{integer, erl_anno:new(0), neg_inf}
,{integer, erl_anno:new(0), I}]}];
int_range_to_types({I, pos_inf}) when I < -1 ->
[{type, erl_anno:new(0), range, [{integer, erl_anno:new(0), I}
,{integer, erl_anno:new(0), -1}]},
type(non_neg_integer)];
int_range_to_types({-1, pos_inf}) ->
[{integer, erl_anno:new(0), -1}, type(non_neg_integer)];
int_range_to_types({0, pos_inf}) ->
[type(non_neg_integer)];
int_range_to_types({1, pos_inf}) ->
[type(pos_integer)];
int_range_to_types({I, pos_inf}) when I > 1 ->
%% Non-standard
[{type, erl_anno:new(0), range, [{integer, erl_anno:new(0), I}
,{integer, erl_anno:new(0), pos_inf}]}];
int_range_to_types({I, I}) ->
[{integer, erl_anno:new(0), I}];
int_range_to_types({I, J}) when is_integer(I) andalso
is_integer(J) andalso
I < J ->
[{type, erl_anno:new(0), range, [{integer, erl_anno:new(0), I}
,{integer, erl_anno:new(0), J}]}];
int_range_to_types({pos_inf, _}) -> [];
int_range_to_types({_, neg_inf}) -> [];
int_range_to_types({I, J}) when I > J ->
[].
%% Merges ranges and returns a single type (possibly a union).
-spec int_ranges_to_type([int_range()]) -> type().
int_ranges_to_type(Ranges) ->
union(int_ranges_to_types(Ranges)).
%% Merges overlapping ranges and converts them to types.
-spec int_ranges_to_types([int_range()]) -> [type()].
int_ranges_to_types(Ranges) ->
MergedRanges = merge_int_ranges(Ranges),
lists:flatmap(fun int_range_to_types/1, MergedRanges).
-spec union([type()]) -> type().
union([]) -> type(none);
union([T]) -> T;
union(Ts) -> type(union, Ts).
type(Name) -> type(Name, []).
type(Name, Params) -> {type, erl_anno:new(0), Name, Params}.
%% +---------------------------------+
%% | Functions operating on ranges |
%% +---------------------------------+
%% Merges overlapping ranges and returns a sorted list of disjoint ranges.
-spec merge_int_ranges([int_range()]) -> [int_range()].
merge_int_ranges([]) ->
[];
merge_int_ranges(Ranges) ->
[T | Ts] = lists:sort(fun lower_bound_less_or_eq/2, Ranges),
merge_int_ranges_help(Ts, [T]).
merge_int_ranges_help([{R1, R2} = R | Rs], [{S1, S2} | StackTail] = Stack) ->
NewStack = if
R1 == neg_inf; S2 == pos_inf; R1 =< S2 + 1 ->
%% Overlapping or adjacent ranges. Merge them.
[{S1, int_max(R2, S2)} | StackTail];
true ->
%% Not mergeable ranges. Push R to stack.
[R | Stack]
end,
merge_int_ranges_help(Rs, NewStack);
merge_int_ranges_help([], Stack) ->
lists:reverse(Stack).
%% Compares the lower bound of two ranges. Used as callback for sorting ranges.
-spec lower_bound_less_or_eq(int_range(), int_range()) -> boolean().
lower_bound_less_or_eq({A, _}, {B, _}) ->
if
A == neg_inf -> true;
B == neg_inf -> false;
true -> A =< B
end.
%% Compares the upper bound of two ranges.
-spec upper_bound_more_or_eq(int_range(), int_range()) -> boolean().
upper_bound_more_or_eq({_, A}, {_, B}) ->
if
A == pos_inf -> true;
B == pos_inf -> false;
true -> A >= B
end.
%% Computes the difference between two integer intervals and returns the result
%% as a list of zero, one or two intervals.
-spec int_range_diff(int_range(), int_range()) -> [int_range()].
int_range_diff({Lo1, Hi1}, {Lo2, Hi2}) ->
%% R1: xxxxxxxxxxxxxxxxxxxxx
%% R2: xxxxxxxxxxx
%% diff: xxxxxx xxxx
Lo2x = int_decr(Lo2),
Hi2x = int_incr(Hi2),
BeforeR2 = [{Lo1, int_min(Hi1, Lo2x)} || int_less_than(Lo1, Lo2)],
AfterR2 = [{int_max(Lo1, Hi2x), Hi1} || int_greater_than(Hi1, Hi2)],
BeforeR2 ++ AfterR2.
%% Makes sure a range can be represented as a syntactically valid Erlang type,
%% by expanding it if necessary.
int_range_expand_to_valid({neg_inf, N}) when is_integer(N),
N < -1 ->
{neg_inf, -1}; % neg_integer()
int_range_expand_to_valid({N, pos_inf}) when is_integer(N),
N > 1 ->
{1, pos_inf}; % pos_integer()
int_range_expand_to_valid(Range) ->
Range.
%% +-----------------------------------------+
%% | Functions operating on a single int() |
%% +-----------------------------------------+
int_min(A, B) when A == neg_inf; B == neg_inf -> neg_inf;
int_min(pos_inf, B) -> B;
int_min(A, pos_inf) -> A;
int_min(A, B) when is_integer(A), is_integer(B) -> min(A, B).
int_max(A, B) when A == pos_inf; B == pos_inf -> pos_inf;
int_max(neg_inf, B) -> B;
int_max(A, neg_inf) -> A;
int_max(A, B) when is_integer(A), is_integer(B) -> max(A, B).
int_less_than(A, A) -> false;
int_less_than(A, B) -> A =:= int_min(A, B).
int_greater_than(A, A) -> false;
int_greater_than(A, B) -> A =:= int_max(A, B).
int_incr(N) when is_integer(N) -> N + 1;
int_incr(Inf) -> Inf.
int_decr(N) when is_integer(N) -> N - 1;
int_decr(Inf) -> Inf.
int_negate(pos_inf) ->
neg_inf;
int_negate(neg_inf) ->
pos_inf;
int_negate(I) when is_integer(I) ->
-I. | src/gradualizer_int.erl | 0.64512 | 0.405007 | gradualizer_int.erl | starcoder |
%%
%% @doc This module contains a few functions useful when working with
%% HTML forms in ErlyWeb.
%%
%% @author <NAME> <<EMAIL>> [http://yarivsblog.com)]
%% @copyright <NAME> 2006-2007
%% For license information see LICENSE.txt
-module(erlyweb_forms).
-export([to_recs/2, validate/3, validate1/3, validate_rec/2]).
%% @doc to_recs/2 helps process POST requests containing fields that
%% belong to multiple records from one or more ErlyDB models.
%%
%% This function is useful when {@link erlydb_base:new_fields_from_strs/3}
%% isn't sufficient because the latter is only designed to map POST
%% parameters to the fields of a single record.
%%
%% This function expects each form field to be mapped to its corresponding
%% record by being named with a unique prefix identifying
%% the record to which the form field belongs.
%%
%% For example, suppose you have to process an HTML form whose fields
%% represent a house and 2 cars. The house's fields have the
%% prefix "house_" and the cars' fields have the prefixes "car1_" and
%% "car2_". The arg's POST parameters are
%% `[{"house_rooms", "3"}, {"car1_year", "2007"}, {"car2_year", "2006"}]'.
%% With such a setup, calling `to_recs(A, [{"house_", house}, {"car1_", car},
%% {"car2_", car}])'
%% returns the list `[House, Car1, Car2]', where `house:rooms(House) == "3"',
%% `car:year(Car1) == "2007"' and `car:year(Car2) == "2006"'. All other
%% fields are `undefined'.
%%
%% @spec to_recs(A::arg() | [{ParamName::string(), ParamVal::term()}],
%% [{Prefix::string(), Model::atom()}]) -> [Record::tuple()]
to_recs(A, ModelDescs) when is_tuple(A), element(1, A) == arg ->
to_recs(yaws_api:parse_post(A), ModelDescs);
to_recs(Params, ModelDescs) ->
Models =
[{Prefix, Model, Model:new()} || {Prefix, Model} <- ModelDescs],
Models1 =
lists:foldl(
fun({Name, Val}, Acc) ->
case lists:splitwith(
fun({Prefix2, _Module2, _Rec2}) ->
not lists:prefix(Prefix2, Name)
end, Acc) of
{_, []} ->
Acc;
{First, [{Prefix1, Model1, Rec} | Rest]} ->
{_, FieldName} = lists:split(length(Prefix1), Name),
Field = erlydb_field:name(Model1:db_field(FieldName)),
Val1 = case Val of
undefined -> "";
_ -> Val
end,
First ++ [{Prefix1, Model1,
Model1:Field(Rec, Val1)} | Rest]
end
end, Models, Params),
[element(3, Model3) || Model3 <- Models1].
%% @doc validate/3 helps validate the inputs of arbitary forms.
%% It accepts a Yaws arg
%% (or the arg's POST data in the form of a name-value property list), a
%% list of parameter names to validate, and a validation function, and returns
%% a tuple of the form {Values, Errors}.
%% 'Values' contains the list of values for the checked parameters
%% and 'Errors' is a list of errors returned from the validation function.
%% If no validation errors occured, this list is empty.
%%
%% If the name of a field is missing from the arg's POST data, this function
%% calls exit({missing_param, Name}).
%%
%% The validation function takes two parameters: the parameter name and
%% its value, and it may return one of the following values:
%%
%% - `ok' means the parameter's value is valid
%%
%% - `{ok, Val}' means the parameter's value is valid, and it also lets you
%% set the value inserted into 'Values' for this parameter.
%%
%% - `{error, Err}' indicates the parameter didn't validate. Err is inserted
%% into 'Errors'.
%%
%% - `{error, Err, Val}' indicates the parameter didn't validate. Err is
%% inserted into 'Errors' and Val is inserted into 'Values' instead of
%% the parameter's original value.
%%
%% For forms that modify or create ErlyDB records, it's generally more
%% convenient to use {@link to_recs/2}.
%%
%% @spec validate(A::arg() | proplist(), Fields::[string()],
%% Fun::function()) -> {Values::[term()], Errors::[term()]} |
%% exit({missing_param, Field})
validate(A, Fields, Fun) when is_tuple(A), element(1, A) == arg ->
validate(yaws_api:parse_post(A), Fields, Fun);
validate(Params, Fields, Fun) ->
lists:foldr(
fun(Field, Acc) ->
case proplists:lookup(Field, Params) of
none -> exit({missing_param, Field});
{_, Val} ->
check_val(Field, Val, Fun,Acc)
end
end, {[], []}, Fields).
%% @doc validate1/3 is similar to validate/3, but it expects the parameter
%% list to match the field list both in the number of elements and in their
%% order. validate1/3 is more efficient and is also stricter than validate/3.
%% @see validate/3
%%
%% @spec validate1(Params::proplist() | arg(), Fields::[string()],
%% Fun::function()) -> {Vals, Errs} | exit({missing_params, [string()]}) |
%% exit({unexpected_params, proplist()}) | exit({unexpected_param, string()})
validate1(A, Fields, Fun) when is_tuple(A), element(1, A) == arg ->
validate1(yaws_api:parse_post(A), Fields, Fun);
validate1(Params, Fields, Fun) ->
validate1_1(Params, Fields, Fun, {[], []}).
validate1_1([], [], _Fun, {Vals, Errs}) ->
{lists:reverse(Vals), lists:reverse(Errs)};
validate1_1([], Fields, _Fun, _Acc) -> exit({missing_params, Fields});
validate1_1(Params, [], _Fun, _Acc) -> exit({unexpected_params, Params});
validate1_1([{Field, Val} | Params], [Field | Fields], Fun, Acc) ->
Acc1 = check_val(Field, Val, Fun, Acc),
validate1_1(Params, Fields, Fun, Acc1);
validate1_1([{Param, _} | _Params], [Field | _], _Fun, _Acc) ->
exit({unexpected_param, Field, Param}).
check_val(Field, Val, Fun, {Vals, Errs}) ->
Val1 = case Val of undefined -> ""; _ -> Val end,
case Fun(Field, Val1) of
ok ->
{[Val1 | Vals], Errs};
{ok, Val2} ->
{[Val2 | Vals], Errs};
{error, Err, Val2} ->
{[Val2 | Vals], [Err | Errs]};
{error, Err} ->
{[Val1 | Vals], [Err | Errs]}
end.
%% @doc When a form has fields that correspond to the fields of an ErlyDB
%% record, validate_rec/2 helps validate the values of the record's fields.
%%
%% validate_rec/2 accepts an ErlyDB record and a validation function.
%% It folds over all the fields of the record (obtained by calling
%% {@link erlydb_base:db_field_names/0}), calling the validation function
%% with each field's existing value. The validation function's
%% return value indicates if the field's value is valid,
%% and it may also define the record field's final value.
%%
%% The result of validate_rec/2 is a tuple of the form `{Rec1, Errs}', where
%% the first element is the modified record and the second element is
%% a list of errors accumulated by the calls to the validation function.
%%
%% The validation function takes 3 parameters: the field name (an atom),
%% the current value (this can be any term, but it's usually a string,
%% especially if the record came from {@link to_recs/2}), and the record
%% after folding over all the previous fields. It returns
%% `ok', `{ok, NewVal}', `{error, Err}', or `{error, Err, NewVal}'.
%%
%% validate_rec/2 is especially useful in conjunction with {@link to_recs/2}.
%% A common pattern is to create the records for the submitted form using
%% to_recs/2 and then validate their fields using validate_rec/2.
%%
%% @spec validate_rec(Rec::erlydb_record(), Fun::function()) ->
%% {Rec1::erlydb_record(), Errs::[term()]}
validate_rec(Rec, Fun) ->
Module = element(1, Rec),
{Rec1, Errs} =
lists:foldl(
fun(Field, {Rec1, Errs1} = Acc) ->
case Fun(Field,
Module:Field(Rec1), Rec1) of
ok ->
Acc;
{ok, NewVal} ->
{Module:Field(Rec1, NewVal),
Errs1};
{error, Err} ->
{Rec1, [Err | Errs1]};
{error, Err, NewVal} ->
{Module:Field(Rec1, NewVal),
[Err | Errs1]}
end
end, {Rec, []}, Module:db_field_names()),
{Rec1, lists:reverse(Errs)}. | src/erlyweb/erlyweb_forms.erl | 0.61855 | 0.557273 | erlyweb_forms.erl | starcoder |
-module(puzzle).
-export([new/1, foreach_solution/2, to_puzzle_string/1]).
-include("puzzle.hrl").
-include("unknown.hrl").
%% Returns a new Puzzle with empty Cells.
%%
new() ->
#puzzle{
placed = [],
unknown = [unknown:new(N) || N <- lists:seq(0, 80)]
}.
%% Returns a new Puzzle with each Cell initialized according to
%% Setup, which is a string of 81 digits or dashes.
%%
new(Setup) ->
Digits = to_digits(Setup),
Numbers = lists:seq(0, 80),
Zipped = lists:zip(Digits, Numbers),
lists:foldl(
fun ({Digit, Number}, This) ->
case Digit of
undefined ->
This;
_ ->
place(This, Number, Digit)
end
end,
new(),
Zipped).
%% Given a Setup string, returns a list of numbers or undefined for
%% each cell.
%%
to_digits(Setup) ->
[case Char of
$- ->
undefined;
_ ->
Char - $0
end || Char <- Setup].
%% Solve this Puzzle and call Yield with each solved Puzzle.
%% XXC The solver sends solved puzzles asynchornously and can send them
%% faster than Yield can process them which uses unbounded memory until
%% the process is killed.
%%
foreach_solution(This, Yield) when ?is_puzzle(This), is_function(Yield) ->
Collector = self(),
spawn_solver(This, Collector),
collector:collect_and_yield_results(
fun (Result) ->
case Result of
{solved, Puzzle} ->
stats:solved(),
Yield(Puzzle);
failed ->
stats:failed()
end
end).
spawn_solver(Puzzle, Collector) when ?is_puzzle(Puzzle), is_pid(Collector) ->
stats:spawned(),
collector:spawn_solver(Collector, fun () -> solve(Puzzle, Collector) end).
%% Try to solve this Puzzle then report back solved or failed to the
%% Collector, and possibly spawn further processes that also report
%% back to the Collector.
%%
solve(This, Collector) when ?is_puzzle(This), is_pid(Collector) ->
%% We get here either because we're done, we've failed, or we have
%% to guess and recurse. We can distinguish by examining the
%% unplaced cell with the fewest possibilities remaining.
case This#puzzle.unknown of
[] ->
%% Solved. Return This as a solution.
collector:yield(Collector, {solved, This});
Unknowns ->
MinUnknown = unknown:min_by_num_possible(Unknowns),
Possible = unknown:possible(MinUnknown),
case Possible of
[] ->
%% Failed. Return no solutions.
collector:yield(Collector, failed);
_ ->
%% Found an unplaced cell with two or more
%% possibilities. Guess each possibility and
%% either spawn a solver or (for the last
%% possibility) recurse.
do_guesses(This, Collector,
unknown:cell_number(MinUnknown),
Possible)
end
end.
%% Fpr each Digit in the list, use it as a guess for Cell Number
%% and try to solve the resulting Puzzle.
%%
do_guesses(This, Collector, Number, [Digit|Rest]) ->
Guess = place(This, Number, Digit),
%% If this is the last guess then just solve in this process. If
%% there are more guesses to make then spawn a solver for this one
%% and recurse to process the rest.
case Rest of
[] ->
solve(Guess, Collector);
_ ->
spawn_solver(Guess, Collector),
do_guesses(This, Collector, Number, Rest)
end.
%% Returns a new Puzzle with Digit placed in Cell CellNumber. The
%% possible sets of all Cells are updated to account for the new
%% placement.
%%
place(This, CellNumber, Digit)
when ?is_puzzle(This), is_number(CellNumber), is_number(Digit) ->
place(This, unknown:new(CellNumber), Digit);
place(This, Unknown, Digit)
when ?is_puzzle(This), ?is_unknown(Unknown), is_number(Digit) ->
CellNumber = unknown:cell_number(Unknown),
Placed = [{CellNumber, Digit} | This#puzzle.placed],
Unknown2 = lists:filtermap(
fun (E) ->
case unknown:cell_number(E) /= CellNumber of
true -> {true, unknown:place(E, Unknown, Digit)};
false -> false
end
end,
This#puzzle.unknown),
This#puzzle{placed = Placed, unknown = Unknown2}.
%% Returns a raw string of 81 digits and dashes, like the argument to new.
%%
to_string(This) when ?is_puzzle(This) ->
Placed = [{Number, $0 + Digit} || {Number, Digit} <- This#puzzle.placed],
Unknown = [{unknown:cell_number(U), $-} || U <- This#puzzle.unknown],
All = Placed ++ Unknown,
Sorted = lists:sort(All),
[Char || {_, Char} <- Sorted].
%% Returns a string that prints out as a grid of digits.
%%
to_puzzle_string(This) when ?is_puzzle(This) ->
String = to_string(This),
string:join(
lists:map(
fun (Rows) ->
string:join(
lists:map(
fun (Row) ->
string:join(spud:slices(Row, 3), " ")
end,
spud:slices(Rows, 9)),
"\n")
end,
spud:slices(String, 27)),
"\n\n"). | src/puzzle.erl | 0.505127 | 0.664064 | puzzle.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% mi_utils: Utilities used across all merge_index modules.
%%
%% Copyright (c) 2007-2011 Basho Technologies, Inc. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
-module(mi_utils).
-author("<NAME> <<EMAIL>>").
-include("merge_index.hrl").
-export([
ets_keys/1,
longest_prefix/2,
edit_signature/2,
hash_signature/1,
fuzz/2
]).
ets_keys(Table) ->
Key = ets:first(Table),
ets_keys_1(Table, Key).
ets_keys_1(_Table, '$end_of_table') ->
[];
ets_keys_1(Table, Key) ->
[Key|ets_keys_1(Table, ets:next(Table, Key))].
%% longest_prefix/2 - Given two terms, calculate the longest common
%% prefix of the terms.
longest_prefix(A, B) when is_list(A) andalso is_list(B) ->
longest_prefix_list(A, B, []);
longest_prefix(A, B) when is_binary(A) andalso is_binary(B) ->
longest_prefix_binary(A, B, []);
longest_prefix(_, _) ->
<<>>.
longest_prefix_list([C|A], [C|B], Acc) ->
longest_prefix_list(A, B, [C|Acc]);
longest_prefix_list(_, _, Acc) ->
lists:reverse(Acc).
longest_prefix_binary(<<C, A/binary>>, <<C, B/binary>>, Acc) ->
longest_prefix_binary(A, B, [C|Acc]);
longest_prefix_binary(_, _, Acc) ->
lists:reverse(Acc).
%% edit_signature/2 - Given an A term and a B term, return a bitstring
%% consisting of a 0 bit for each matching char and a 1 bit for each
%% non-matching char.
edit_signature(A, B) when is_binary(A) andalso is_binary(B) ->
list_to_bitstring(edit_signature_binary(A, B));
edit_signature(A, B) when is_integer(A) ->
edit_signature(<<A:32/integer>>, B);
edit_signature(A, B) when is_integer(B) ->
edit_signature(A, <<B:32/integer>>);
edit_signature(_, _) ->
<<>>.
edit_signature_binary(<<C, A/binary>>, <<C, B/binary>>) ->
[<<0:1/integer>>|edit_signature_binary(A, B)];
edit_signature_binary(<<_, A/binary>>, <<_, B/binary>>) ->
[<<1:1/integer>>|edit_signature_binary(A, B)];
edit_signature_binary(<<>>, <<_, B/binary>>) ->
[<<1:1/integer>>|edit_signature_binary(<<>>, B)];
edit_signature_binary(_, <<>>) ->
[].
%% hash_signature/1 - Given a term, repeatedly xor and rotate the bits
%% of the field to calculate a unique 1-byte signature. This is used
%% for speedier matches.
hash_signature(Term) when is_binary(Term)->
hash_signature_binary(Term, 0);
hash_signature(Term) when is_integer(Term) ->
hash_signature(<<Term:64/integer>>);
hash_signature(_Term) ->
<<>>.
hash_signature_binary(<<C, Rest/binary>>, Acc) ->
case Acc rem 2 of
0 ->
RotatedAcc = ((Acc bsl 1) band 255),
hash_signature_binary(Rest, RotatedAcc bxor C);
1 ->
RotatedAcc = (((Acc bsl 1) + 1) band 255),
hash_signature_binary(Rest, RotatedAcc bxor C)
end;
hash_signature_binary(<<>>, Acc) ->
Acc.
%% Add some random variation (plus or minus 25%) to the rollover size
%% so that we don't get all buffers rolling over at the same time.
fuzz(Value, FuzzPercent) ->
Scale = 1 + (((rand:uniform(100) - 50)/100) * FuzzPercent * 2),
Value * Scale. | src/mi_utils.erl | 0.516839 | 0.42668 | mi_utils.erl | starcoder |
%% -------------------------------------------------------------------
%% @doc Utility functions for saving and loading the game state to/from disk.<br/>
%% Savegame files are created with {@link autosave/1} or {@link save_state/2}.
%% To load a savegame, the file should be read with {@link file:consult/1},
%% contents validated with {@link validate/3} and applied with {@link apply/2}.
%% @end
%% -------------------------------------------------------------------
-module(savegames).
-include("dj_data.hrl").
-export([
autosave/1,
save_state/2,
validate/4,
apply/2
]).
%% @doc Creates a new savegame file with an auto-generated name.<br/>
%% The files are created in the path specified by the env value 'autosave_dir'.
-spec autosave(#state{}) -> {ok, file:filename()} | {error, file:posix()} | disabled.
autosave(Data=#state{problem_idx=ProblemIndex, round=Round}) ->
case make_autosave_path(ProblemIndex, Round) of
{ok, FilePath} = Ok ->
case save_state(FilePath, Data) of
ok -> Ok;
Error -> Error
end;
disabled ->
disabled;
Error={error, _} ->
Error
end.
%% @doc Creates a new savegame file at the specified location.
-spec save_state(file:filename(), #state{}) -> ok | {error, file:posix()}.
save_state(FilePath, #state{workers=WorkerDict,
config=#config{problems=Problems,
score_mode=ScoreMode},
problem_idx=ProblemIndex,
round=RoundNumber,
problem_state=ProblemState,
worker_input=WorkerInput
}) ->
WorkerList = dict:to_list(WorkerDict),
SaveTuple = {WorkerList, ScoreMode, Problems, ProblemIndex, RoundNumber, ProblemState, WorkerInput},
file:write_file(FilePath, serialize(SaveTuple)).
%% @doc Validates the contents of a savegame file against the current configuration.
-spec validate(SaveGame :: term(), list(problem()), score_mode(), list(worker_id())) ->
boolean().
validate([GameData = {WorkerList,
ScoreMode,
Problems,
ProblemIndex,
RoundNumber,
ProblemState,
WorkerInput},
CRC],
OldProblems,
OldScoreMode,
WorkerIDs) ->
%% if the problem list has changed, ProblemIndex, RoundNumber, ProblemState
%% and WorkerInput are irrelevant
SameProblems = Problems == OldProblems,
%% generate a list of test results
ResultList =
[
case erlang:crc32(term_to_binary(GameData)) of
CRC ->
ok;
_ ->
{error, "CRC check failed"}
end,
case {config:valid_score_mode(ScoreMode), ScoreMode} of
{true, OldScoreMode} ->
ok;
{true, _} ->
{error, "score mode from savegame and current mode differ"};
{false, _} ->
{error, "invalid score mode"}
end,
case is_list(WorkerList) of
true ->
case [ W || W <- WorkerList, not valid_saved_worker(W) ] of
[] ->
case valid_unique_worker_ids(WorkerList) of
true ->
%% if the loaded workers do not correspond to the saved workers, everything is irrelevant too
SortedWorkerIDs = lists:sort(WorkerIDs),
case lists:sort(dict:fetch_keys(dict:from_list(WorkerList))) of
SortedWorkerIDs ->
ok;
Other ->
{error, io_lib:format("Worker configuration doesn't fit the one in the savegame: ~p vs. ~p", [Other, WorkerIDs])}
end;
false ->
{error, "worker ids not unique"}
end;
ProblemList ->
{error, io_lib:format("illegal saved workers: ~p", [ProblemList])}
end;
false ->
{error, "saved workerList is not a list"}
end,
case is_list(Problems) of
true ->
case [ P || P <- Problems, not valid_saved_problem(P) ] of
[] ->
ok;
ProblemList ->
{error, io_lib:format("illegal saved problems: ~p", [ProblemList])}
end;
false ->
{error, "saved problem list is not a list"}
end
]
++
case SameProblems of
true ->
[
case is_integer(ProblemIndex) of
true ->
case (ProblemIndex >= 0) and (ProblemIndex < length(Problems)) of
true ->
ok;
false ->
{error, "Problem index out of range"}
end;
false ->
{error, "saved problem index is not an integer"}
end,
case is_integer(RoundNumber) of
true ->
case RoundNumber >= 0 of
true ->
ok;
false ->
{error, "round number out of range"}
end;
false ->
{error, "round number is not an integer"}
end,
case is_list(ProblemState) of
true ->
ok;
false ->
%% if it is'nt a list, it can't be a string ;-)
{error, "Problem state is not a string"}
end,
case is_list(WorkerInput) of
true ->
ok;
false ->
%% if it is'nt a list, it can't be a string ;-)
{error, "worker input is not a string"}
end
];
false ->
[ok]
end,
ErrorList = [ String || {error, String} <- ResultList ],
case ErrorList of
[] ->
true;
_ ->
%% generate nice error message from error list
ok = lager:error(utils:intersperse("\n - ", [ "Errors in save game:" | ErrorList ])),
false
end;
validate(_, _, _, _) ->
false.
%% @doc Applies a savegame to the current game state.
-spec apply(term(), #state{}) -> #state{}.
apply(_Savegame = [_GameData = {WorkerList,
ScoreMode,
Problems,
ProblemIndex,
RoundNumber,
ProblemState,
WorkerInput},
_CRC],
CurrentState=#state{config=#config{problems=OldProblems,
score_mode=OldScoreMode}}
) ->
SameSettings = (Problems == OldProblems) and (ScoreMode == OldScoreMode),
case SameSettings of
true ->
CurrentState#state{workers=dict:from_list(WorkerList),
problem_idx=ProblemIndex,
round=RoundNumber,
problem_state=ProblemState,
worker_input=WorkerInput};
false ->
CurrentState#state{workers=dict:from_list(WorkerList)}
end.
%% ===================================================================
%% private functions
%% ===================================================================
%% @doc Serializes an erlang term to a string, including a checksum at the end.<br/>
%% Used here for serializing the game data term.
serialize(GameData) ->
io_lib:fwrite("~p.\n~p.\n", [GameData, erlang:crc32(term_to_binary(GameData))]).
%% @doc Validates a saved problem.
-spec valid_saved_problem(_) -> boolean().
valid_saved_problem(_Problem = {Description,
Spec,
AnswerTime,
StartState})
when is_list(Description),
is_list(Spec),
is_integer(AnswerTime),
AnswerTime > 0,
is_list(StartState) ->
true;
valid_saved_problem(_) ->
false.
%% @doc Validates uniqueness in a list of worker IDs.
-spec valid_unique_worker_ids([any()]) -> boolean().
valid_unique_worker_ids(WorkerList) ->
UniqueList = lists:usort(fun({ID1, _}, {ID2, _}) ->
ID1 =< ID2
end,
WorkerList),
length(UniqueList) == length(WorkerList).
%% @doc Validates a saved worker.
-spec valid_saved_worker(_) -> boolean().
valid_saved_worker(_Worker = {WorkerID, {worker,
Name,
LastProp,
LastPropScore,
ProcessedScore,
Caption,
ProblemScore,
Working,
RankingGroup,
Blocked}})
when is_atom(WorkerID),
is_list(Name),
is_list(LastProp) or (LastProp == none),
is_integer(LastPropScore),
is_integer(ProcessedScore),
is_list(Caption),
is_integer(ProblemScore),
is_boolean(Working),
is_list(RankingGroup) ->
case Blocked of
no -> true;
{idx, Idx} when is_integer(Idx) and (Idx >= 0) -> true;
_ -> false
end;
valid_saved_worker(_) ->
false.
%% @doc Generates a filename (including path) for a new autosave.<br/>
%% Reads path from environment var autosave_dir.
-spec make_autosave_path(non_neg_integer(), non_neg_integer())
-> {ok, file:filename()} | {error, file:posix()} | disabled.
make_autosave_path(ProblemIdx, Round) ->
case application:get_env(autosave_dir) of
{ok, disabled} ->
disabled;
{ok, AutoSaveDir} ->
Dir = filename:absname(AutoSaveDir),
DirStatus = case file:make_dir(Dir) of
ok ->
ok;
{error, eexist} ->
ok;
Error ->
Error
end,
case DirStatus of
ok ->
{{Year, Month, Day}, {Hour, Minute, Second}} = erlang:localtime(),
{ok, Problems} = application:get_env(problems),
{Description, _Spec, _Time, _State} = lists:nth(ProblemIdx+1, Problems),
ProblemDesc = re:replace(Description, "[^a-zA-Z0-9.,-=()]", "", [global]),
DateString = io_lib:format("~w~2..0w~2..0w", [Year, Month, Day]),
TimeString = io_lib:format("~2..0w~2..0w~2..0w", [Hour, Minute, Second]),
ProbString = io_lib:format("p~w_r~w_~s", [ProblemIdx, Round, ProblemDesc]),
FileName = DateString ++ "_" ++ TimeString ++ "_" ++ ProbString ++ ".sav",
{ok, filename:join([Dir, FileName])};
_ ->
DirStatus
end
end. | src/savegames.erl | 0.703448 | 0.449272 | savegames.erl | starcoder |
%
% This file is part of AtomVM.
%
% Copyright 2021 <NAME> <<EMAIL>>
%
% Licensed under the Apache License, Version 2.0 (the "License");
% you may not use this file except in compliance with the License.
% You may obtain a copy of the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS,
% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
% See the License for the specific language governing permissions and
% limitations under the License.
%
% SPDX-License-Identifier: Apache-2.0 OR LGPL-2.1-or-later
%
%%
%% Copyright (c) 2021 dushin.net
%% All rights reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%%-----------------------------------------------------------------------------
%% @doc A <em>naive</em> implementation of the Erlang/OTP `maps' interface.
%%
%% The `maps' module provides several convenience operations for interfacing
%% with the Erlang map type, which associates (unique) keys with values.
%%
%% Note that the ordering of entries in a map is implementation-defined. While
%% many operations in this module present entries in lexical order, users should
%% in general make no assumptions about the ordering of entries in a map.
%%
%% This module implements a susbset of the Erlang/OTP `maps' interface.
%% Some OTP functions are not implemented, and the approach favors
%% correctness and readability over speed and performance.
%% @end
%%-----------------------------------------------------------------------------
-module(maps).
-export([
get/2, get/3,
is_key/2,
put/3,
iterator/1,
next/1,
new/0,
keys/1,
values/1,
to_list/1,
from_list/1,
size/1,
find/2,
filter/2,
fold/3,
map/2,
merge/2,
remove/2,
update/3
]).
-type key() :: term().
-type value() :: term().
-type iterator() ::
{key(), value(), iterator()} | none | nonempty_improper_list(non_neg_integer(), map()).
-type map_or_iterator() :: map() | iterator().
%%-----------------------------------------------------------------------------
%% @param Key the key to get
%% @param Map the map from which to get the value
%% @returns the value in `Map' associated with `Key', if it exists.
%% @throws {badkey, Key} | {badmap, Map}
%% @doc Get the value in `Map' associated with `Key', if it exists.
%%
%% This function throws a `{badkey, Key}' exception if 'Key' does not occur in `Map' or
%% a `{badmap, Map}' if `Map' is not a map.
%% @end
%%-----------------------------------------------------------------------------
-spec get(Key :: key(), Map :: map()) -> Value :: value().
get(Key, Map) ->
erlang:map_get(Key, Map).
%%-----------------------------------------------------------------------------
%% @param Key the key
%% @param Map the map
%% @param Default default value
%% @throws {badmap, Map}
%% @returns the value in `Map' associated with `Key', or `Default', if
%% the key is not associated with a value in `Map'.
%% @doc Get the value in `Map' associated with `Key', or `Default', if
%% the key is not associated with a value in `Map'.
%%
%% This function throws a `{badmap, Map}' exception if `Map' is not a map.
%% @end
%%-----------------------------------------------------------------------------
-spec get(Key :: key(), Map :: map(), Default :: term()) -> Value :: value().
get(Key, Map, Default) ->
try
?MODULE:get(Key, Map)
catch
_:{badkey, _} ->
Default
end.
%%-----------------------------------------------------------------------------
%% @param Key the key
%% @param Map the map
%% @returns `true' if `Key' is associated with a value in `Map'; `false', otherwise.
%% @throws {badmap, Map}
%% @doc Return `true' if `Key' is associated with a value in `Map'; `false', otherwise.
%%
%% This function throws a `{badmap, Map}' exception if `Map' is not a map.
%% @end
%%-----------------------------------------------------------------------------
-spec is_key(Key :: key(), Map :: map()) -> boolean().
is_key(Key, Map) ->
erlang:is_map_key(Key, Map).
%%-----------------------------------------------------------------------------
%% @param Key the key
%% @param Value the value
%% @param Map the map
%% @returns A copy of `Map' containing the `{Key, Value}' association.
%% @throws {badmap, Map}
%% @doc Return the map containing the `{Key, Value}' association.
%%
%% If `Key' occurs in `Map' then it will be over-written. Otherwise, the
%% returned map will contain the new association.
%%
%% This function throws a `{badmap, Map}' exception if `Map' is not a map.
%% @end
%%-----------------------------------------------------------------------------
-spec put(Key :: key(), Value :: value(), Map :: map()) -> map().
put(Key, Value, Map) when is_map(Map) ->
Map#{Key => Value};
put(_Key, _Value, Map) when not is_map(Map) ->
throw({badmap, Map}).
%%-----------------------------------------------------------------------------
%% @param Map the map
%% @returns an iterator structure that can be used to iterate over associations
%% in a map.
%% @throws {badmap, Map}
%% @see next/1
%% @doc Return an iterator structure that can be used to iterate over associations
%% in a map.
%%
%% In general, users shouuld make no assumptions about the order in which entries
%% appear in an iterator. The order of entries in a map is implementation-defined.
%%
%% This function throws a `{badmap, Map}' exception if `Map' is not a map.
%% @end
%%-----------------------------------------------------------------------------
-spec iterator(Map :: map()) -> iterator().
iterator(Map) when is_map(Map) ->
[0 | Map];
iterator(Map) ->
throw({badmap, Map}).
%%-----------------------------------------------------------------------------
%% @param Iterator a map iterator
%% @returns the key and value, along with the next iterator in the map, or the
%% atom `none' if there are no more items over which to iterate.
%% @throws badarg
%% @doc Returns the next key and value in the map, along with
%% a new iterator that can be used to iterate over the remainder of the map.
%%
%% This function throws a `badarg' exception if the supplied iterator is not
%% of the expected type. Only use iterators that are returned from functions
%% in this module.
%% @end
%%-----------------------------------------------------------------------------
-spec next(Iterator :: iterator()) ->
{Key :: key(), Value :: value(), NextIterator :: iterator()} | none.
next([_Pos | _Map] = _Iterator) ->
throw(nif_error).
%%-----------------------------------------------------------------------------
%% @returns a new map
%% @doc Return a new (empty) map.
%% @end
%%-----------------------------------------------------------------------------
-spec new() -> iterator().
new() ->
#{}.
%%-----------------------------------------------------------------------------
%% @param Map the map
%% @returns the list of keys that occur in this map.
%% @throws {badmap, Map}
%% @doc Returns the list of keys that occur in this map.
%%
%% No guarantees are provided about the order of keys returned from this function.
%%
%% This function throws a `{badmap, Map}' exception if `Map' is not a map.
%% @end
%%-----------------------------------------------------------------------------
-spec keys(Map :: map()) -> [key()].
keys(Map) when is_map(Map) ->
iterate_keys(maps:next(maps:iterator(Map)), []);
keys(Map) ->
throw({badmap, Map}).
%%-----------------------------------------------------------------------------
%% @param Map the map
%% @returns the list of values that occur in this map.
%% @throws {badmap, Map}
%% @doc Returns the list of values that occur in this map.
%%
%% No guarantees are provided about the order of values returned from this function.
%%
%% This function throws a `{badmap, Map}' exception if `Map' is not a map.
%% @end
%%-----------------------------------------------------------------------------
-spec values(Map :: map()) -> [key()].
values(Map) when is_map(Map) ->
iterate_values(maps:next(maps:iterator(Map)), []);
values(Map) ->
throw({badmap, Map}).
%%-----------------------------------------------------------------------------
%% @param Map
%% @returns a list of `[{Key, Value}]' tuples
%% @doc Return the list of entries, expressed as `{Key, Value}' pairs, in the supplied map.
%%
%% No guarantees are provided about the order of entries returned from this function.
%%
%% This function throws a `{badmap, Map}' exception if `Map' is not a map.
%% @end
%%-----------------------------------------------------------------------------
-spec to_list(Map :: map()) -> [key()].
to_list(Map) when is_map(Map) ->
iterate_entries(maps:next(maps:iterator(Map)), []);
to_list(Map) ->
throw({badmap, Map}).
%%-----------------------------------------------------------------------------
%% @param List a list of `[{Key, Value}]' pairs
%% @returns the map containing the entries from the list of supplied key-value pairs.
%% @throws badarg
%% @doc This function constructs a map from the supplied list of key-value pairs.
%%
%% If the input list contains duplicate keys, the returned map will contain the
%% right-most entry.
%%
%% This function will raise a `badarg' exception if the input is not a proper
%% list or contains an element that is not a key-value pair.
%% @end
%%-----------------------------------------------------------------------------
-spec from_list(List :: [{Key :: key(), Value :: value()}]) -> map().
from_list(List) when is_list(List) ->
iterate_from_list(List, ?MODULE:new());
from_list(_List) ->
throw(badarg).
%%-----------------------------------------------------------------------------
%% @param Map the map
%% @returns the size of the map
%% @throws {badmap, Map}
%% @doc Returns the size of (i.e., the number of entries in) the map
%%
%% This function throws a `{badmap, Map}' exception if `Map' is not a map.
%% @end
%%-----------------------------------------------------------------------------
-spec size(Map :: map()) -> non_neg_integer().
size(Map) when is_map(Map) ->
erlang:map_size(Map);
size(Map) ->
throw({badmap, Map}).
%%-----------------------------------------------------------------------------
%% @param Key the key to find
%% @param Map the map in which to search
%% @returns `{ok, Value}' if `Key' is in `Map'; `error', otherwise.
%% @throws {badmap, Map}
%% @doc Returns `{ok, Value}' if `Key' is in `Map'; `error', otherwise.
%%
%% This function throws a `{badmap, Map}' exception if `Map' is not a map.
%% @end
%%-----------------------------------------------------------------------------
-spec find(Key :: key(), Map :: map()) -> {ok, Value :: value()} | error.
find(Key, Map) ->
try
{ok, ?MODULE:get(Key, Map)}
catch
_:{badkey, _} ->
error
end.
%%-----------------------------------------------------------------------------
%% @param Pred a function used to filter entries from the map
%% @param MapOrIterator the map or map iterator to filter
%% @returns a map containing all elements in `MapOrIterator' that satisfy `Pred'
%% @throws {badmap, Map} | badarg
%% @doc Return a map who's entries are filtered by the supplied predicate.
%%
%% This function returns a new map containing all elements from the input
%% `MapOrIterator' that satisfy the input `Pred'.
%%
%% The supplied predicate is a function from key-value inputs to a boolean value.
%%
%% This function throws a `{badmap, Map}' exception if `Map' is not a map or map iterator,
%% and a `badarg' exception if the input predicate is not a function.
%% @end
%%-----------------------------------------------------------------------------
-spec filter(
Pred :: fun((Key :: key(), Value :: value()) -> boolean()),
MapOrIterator :: map_or_iterator()
) -> map().
filter(Pred, Map) when is_function(Pred, 2) andalso is_map(Map) ->
iterate_filter(Pred, maps:next(maps:iterator(Map)), ?MODULE:new());
filter(Pred, [Pos | Map] = Iterator) when
is_function(Pred, 2) andalso is_integer(Pos) andalso is_map(Map)
->
iterate_filter(Pred, maps:next(Iterator), ?MODULE:new());
filter(_Pred, Map) when not is_map(Map) ->
throw({badmap, Map});
filter(_Pred, _Map) ->
throw(badarg).
%%-----------------------------------------------------------------------------
%% @param Fun function over which to fold values
%% @param Init the initial value of the fold accumulator
%% @param MapOrIterator the map or map iterator over which to fold
%% @returns the result of folding over all elements of the supplied map.
%% @throws {badmap, Map} | badarg
%% @doc Fold over the entries in a map.
%%
%% This function takes a function used to fold over all entries in a map
%% and an initial accumulator value to use as the value supplied to the
%% first entry in the map.
%%
%% This function throws a `badmap' exception if `Map' is not a map or map iterator,
%% and a `badarg' exception if the input function is not a function.
%% @end
%%-----------------------------------------------------------------------------
-spec fold(
Fun :: fun((Key :: key(), Value :: value(), Accum :: term()) -> term()),
Init :: term(),
MapOrIterator :: map_or_iterator()
) -> term().
fold(Fun, Init, Map) when is_function(Fun, 3) andalso is_map(Map) ->
iterate_fold(Fun, maps:next(maps:iterator(Map)), Init);
fold(Fun, Init, [Pos | Map] = Iterator) when
is_function(Fun, 3) andalso is_integer(Pos) andalso is_map(Map)
->
iterate_fold(Fun, maps:next(Iterator), Init);
fold(_Fun, _Init, Map) when not is_map(Map) ->
throw({badmap, Map});
fold(_Fun, _Init, _Map) ->
throw(badarg).
%%-----------------------------------------------------------------------------
%% @param Fun the function to apply to every entry in the map
%% @param Map the map to which to apply the map function
%% @returns the result of applying `Fun' to every entry in `Map'
%% @throws {badmap, Map} | badarg
%% @doc Returns the result of applying a function to every element of a map.
%%
%% This function throws a `badmap' exception if `Map' is not a map or map iterator,
%% and a `badarg' exception if the input function is not a function.
%% @end
%%-----------------------------------------------------------------------------
-spec map(Fun :: fun((Key :: key(), Value :: value()) -> value()), Map :: map_or_iterator()) ->
map().
map(Fun, Map) when is_function(Fun, 2) andalso is_map(Map) ->
iterate_map(Fun, maps:next(maps:iterator(Map)), ?MODULE:new());
map(Fun, [Pos | Map] = Iterator) when
is_function(Fun, 2) andalso is_integer(Pos) andalso is_map(Map)
->
iterate_map(Fun, maps:next(Iterator), ?MODULE:new());
map(_Fun, Map) when not is_map(Map) ->
throw({badmap, Map});
map(_Fun, _Map) ->
throw(badarg).
%%-----------------------------------------------------------------------------
%% @param Map1 a map
%% @param Map2 a mpa
%% @returns the result of merging entries from `Map1' and `Map2'.
%% @throws {badmap, Map}
%% @doc Merge two maps to yield a new map.
%%
%% If `Map1' and `Map2' contain the same key, then the value from `Map2' will be used.
%%
%% This function throws a `badmap' exception if neither `Map1' nor `Map2' is a map.
%% @end
%%-----------------------------------------------------------------------------
-spec merge(Map1 :: map(), Map2 :: map()) -> map().
merge(Map1, Map2) when is_map(Map1) andalso is_map(Map2) ->
iterate_merge(maps:next(maps:iterator(Map2)), Map1);
merge(Map1, _Map2) when not is_map(Map1) ->
throw({badmap, Map1});
merge(_Map1, Map2) when not is_map(Map2) ->
throw({badmap, Map2}).
%%-----------------------------------------------------------------------------
%% @param Key the key to remove
%% @param MapOrIterator the map or map iterator from which to remove the key
%% @returns a new map without `Key' as an entry.
%% @throws {badmap, Map}
%% @doc Remove an entry from a map using a key.
%%
%% If `Key' does not occur in `Map', then the returned Map has the same
%% entries as the input map or map iterator.
%%
%% Note. This function extends the functionality of the OTP `remove/2' function,
%% since the OTP interface only takes a map as input.
%%
%% This function throws a `badmap' exception if `Map' is not a map or map iterator.
%% @end
%%-----------------------------------------------------------------------------
-spec remove(Key :: key(), MapOrIterator :: map_or_iterator()) -> map().
remove(Key, Map) when is_map(Map) ->
case ?MODULE:is_key(Key, Map) of
true ->
iterate_remove(Key, maps:next(maps:iterator(Map)), ?MODULE:new());
_ ->
Map
end;
remove(Key, [Pos | Map] = Iterator) when is_integer(Pos) andalso is_map(Map) ->
iterate_remove(Key, maps:next(Iterator), ?MODULE:new());
remove(_Key, Map) when not is_map(Map) ->
throw({badmap, Map}).
%%-----------------------------------------------------------------------------
%% @param Key the key to update
%% @param Value the value to update
%% @param Map the map to update
%% @returns a new map, with `Key' updated with `Value'
%% @throws {badmap, Map}
%% @doc Returns a new map with an updated key-value association.
%%
%% This function throws a `badmap' exception if `Map' is not a map.
%% @end
%%-----------------------------------------------------------------------------
-spec update(Key :: key(), Value :: value(), Map :: map()) -> map().
update(Key, Value, Map) when is_map(Map) ->
Map#{Key => Value};
update(_Key, _Value, Map) when not is_map(Map) ->
throw({badmap, Map}).
%%
%% Internal functions
%%
%% @private
iterate_keys(none, Accum) ->
lists:reverse(Accum);
iterate_keys({Key, _Value, Iterator}, Accum) ->
iterate_keys(maps:next(Iterator), [Key | Accum]).
%% @private
iterate_values(none, Accum) ->
lists:reverse(Accum);
iterate_values({_Key, Value, Iterator}, Accum) ->
iterate_values(maps:next(Iterator), [Value | Accum]).
%% @private
iterate_entries(none, Accum) ->
lists:reverse(Accum);
iterate_entries({Key, Value, Iterator}, Accum) ->
iterate_entries(maps:next(Iterator), [{Key, Value} | Accum]).
%% @private
iterate_filter(_Pred, none, Accum) ->
Accum;
iterate_filter(Pred, {Key, Value, Iterator}, Accum) ->
NewAccum =
case Pred(Key, Value) of
true ->
Accum#{Key => Value};
_ ->
Accum
end,
iterate_filter(Pred, maps:next(Iterator), NewAccum).
%% @private
iterate_fold(_Fun, none, Accum) ->
Accum;
iterate_fold(Fun, {Key, Value, Iterator}, Accum) ->
NewAccum = Fun(Key, Value, Accum),
iterate_fold(Fun, maps:next(Iterator), NewAccum).
%% @private
iterate_map(_Fun, none, Accum) ->
Accum;
iterate_map(Fun, {Key, Value, Iterator}, Accum) ->
NewAccum = Accum#{Key => Fun(Key, Value)},
iterate_map(Fun, maps:next(Iterator), NewAccum).
%% @private
iterate_merge(none, Accum) ->
Accum;
iterate_merge({Key, Value, Iterator}, Accum) ->
iterate_merge(maps:next(Iterator), Accum#{Key => Value}).
%% @private
iterate_remove(_Key, none, Accum) ->
Accum;
iterate_remove(Key, {Key, _Value, Iterator}, Accum) ->
iterate_remove(Key, maps:next(Iterator), Accum);
iterate_remove(Key, {OtherKey, Value, Iterator}, Accum) ->
iterate_remove(Key, maps:next(Iterator), Accum#{OtherKey => Value}).
%% @private
iterate_from_list([], Accum) ->
Accum;
iterate_from_list([{Key, Value} | T], Accum) ->
iterate_from_list(T, Accum#{Key => Value});
iterate_from_list(_List, _Accum) ->
throw(badarg). | libs/estdlib/src/maps.erl | 0.794345 | 0.434101 | maps.erl | starcoder |
% Copyright 2012-2014 Cloudant
%
% Licensed under the Apache License, Version 2.0 (the "License"); you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
-module(wkb_writer).
-include("wkb.hrl").
-export([geojson_to_wkb/1]).
geojson_to_wkb(Data) when is_tuple(Data)->
{ok, _Dims, Wkb} = parse_geom(Data),
{ok, Wkb};
geojson_to_wkb(Data) when is_list(Data) ->
geojson_to_wkb(list_to_binary(Data));
geojson_to_wkb(Json) ->
JsonData = jiffy:decode(Json),
{ok, _Dims, Wkb} = parse_geom(JsonData),
{ok, Wkb}.
% private api
%
% The default signedness is unsigned.
% The default endianness is big.
%
parse_geom({JsonData}) ->
% geometry collection is a special case
case lists:keyfind(<<"coordinates">>, 1, JsonData) of
{_, Coords} ->
{_, Type} = lists:keyfind(<<"type">>, 1, JsonData),
parse_geom(Type, Coords);
_ ->
case lists:keyfind(<<"geometries">>, 1, JsonData) of
{_, Geometries} ->
{_, Type} = lists:keyfind(<<"type">>, 1, JsonData),
parse_geom(Type, Geometries);
_ ->
throw({error, "error parsing geometries"})
end
end.
parse_geom(?POINT, Coords) ->
{ok, Dims, _Cnt, NewAcc} = make_pts([Coords], 0, <<>>),
Type = create_ewb_type(?wkbPoint, length(Coords)),
{ok, Dims, <<0:8, Type:32, NewAcc/binary>>};
parse_geom(?LINESTRING, Coords) ->
{ok, Dims, Num, NewAcc} = make_pts(Coords, 0, <<>>),
Type = create_ewb_type(?wkbLineString, Dims),
{ok, Dims, <<0:8, Type:32, Num:32, NewAcc/binary>>};
parse_geom(?POLYGON, Coords) ->
{ok, Dims, Num, NewAcc} = make_linear_ring(Coords, 0, <<>>),
Type = create_ewb_type(?wkbPolygon, Dims),
{ok, Dims, <<0:8, Type:32, Num:32, NewAcc/binary>>};
parse_geom(?MULTIPOINT, Coords) ->
{ok, Dims, Num, NewAcc} = parse_nested_geoms(Coords, ?POINT, 0, <<>>),
Type = create_ewb_type(?wkbMultiPoint, Dims),
{ok, Dims, <<0:8, Type:32, Num:32, NewAcc/binary>>};
parse_geom(?MULTILINESTRING, Coords) ->
{ok, Dims, Num, NewAcc} = parse_nested_geoms(Coords, ?LINESTRING, 0, <<>>),
Type = create_ewb_type(?wkbMultiLineString, Dims),
{ok, Dims, <<0:8, Type:32, Num:32, NewAcc/binary>>};
parse_geom(?MULTIPOLYGON, Coords) ->
{ok, Dims, Num, NewAcc} = parse_nested_geoms(Coords, ?POLYGON, 0, <<>>),
Type = create_ewb_type(?wkbMultiPolygon, Dims),
{ok, Dims, <<0:8, Type:32, Num:32, NewAcc/binary>>};
parse_geom(?GEOMETRYCOLLECTION, Geometries) ->
{Dims, Num, NewAcc} = lists:foldl(fun(C, {_, Cntr, Acc1}) ->
{ok, Dims, Acc2} = parse_geom(C),
{Dims, Cntr + 1, <<Acc1/binary, Acc2/binary>>}
end, {0, 0, <<>>}, Geometries),
Type = create_ewb_type(?wkbGeometryCollection, Dims),
{ok, Dims, <<0:8, Type:32, Num:32, NewAcc/binary>>};
parse_geom(false, _Data) ->
throw({error, "error parsing geojson, geometry type not defined."}).
parse_nested_geoms(GeomList, Type, Cntr, Acc) ->
parse_nested_geoms(GeomList, Type, 0, Cntr, Acc).
parse_nested_geoms([], _Type, Dims, Cntr, Acc) ->
{ok, Dims, Cntr, Acc};
parse_nested_geoms([Coord | Rem], Type, _, Cntr, Acc) ->
{ok, Dims, Acc1} = parse_geom(Type, Coord),
parse_nested_geoms(Rem, Type, Dims, Cntr + 1, <<Acc/binary, Acc1/binary>>).
make_linear_ring(CoordList, Cntr, Acc) ->
make_linear_ring(CoordList, 0, Cntr, Acc).
make_linear_ring([], Dims, Cntr, Acc) ->
{ok, Dims, Cntr, Acc};
make_linear_ring([Coords | Rem], _, Cntr, Acc) ->
{ok, Dims, Num, NewAcc} = make_pts(Coords, 0, <<>>),
make_linear_ring(Rem, Dims, Cntr + 1, <<Acc/binary, Num:32, NewAcc/binary>>).
make_pts(CoordList, Cntr, Acc) ->
make_pts(CoordList, 0, Cntr, Acc).
make_pts([], Dims, Cntr, Acc) ->
{ok, Dims, Cntr, Acc};
make_pts([C|_] = CoordList, 0, Cntr, Acc) ->
make_pts(CoordList, length(C), Cntr, Acc);
make_pts([Coords | Rem], Dims, Cntr, Acc) ->
NewAcc = lists:foldl(fun(P, Acc2) ->
<<Acc2/binary, P:64/float>>
end, Acc, Coords),
make_pts(Rem, Dims, Cntr + 1, NewAcc).
create_ewb_type(Type, Dims) ->
case Dims of
2 ->
Type;
3 ->
Type bor ?wkbZ;
4 ->
(Type bor ?wkbZ) bor ?wkbM;
_ ->
throw({error, "error setting WKB type"})
end. | wkb/src/wkb_writer.erl | 0.547706 | 0.470068 | wkb_writer.erl | starcoder |
%%% Licensed under the Apache License, Version 2.0 (the "License");
%%% you may not use this file except in compliance with the License.
%%% You may obtain a copy of the License at
%%%
%%% http://www.apache.org/licenses/LICENSE-2.0
%%%
%%% Unless required by applicable law or agreed to in writing, software
%%% distributed under the License is distributed on an "AS IS" BASIS,
%%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%%% See the License for the specific language governing permissions and
%%% limitations under the License.
-module(fieldmask).
-export([mask/2, parse/1, apply_mask/2]).
-type value() :: null
| number()
| boolean()
| binary()
| [value()]
| #{binary() => value()}.
%% type of a JSON value represented in jsone's map object format.
-type mask_expr() :: string() | binary().
%% type of mask expression. You may use a string or a binary as mask expression.
-type mask() :: '*'
| {'*', mask()}
| [submask()].
-type submask() :: binary()
| {binary(), mask()}.
%% @doc select the parts specified in the `Mask' expression of a `Value'
-spec mask(mask_expr(), value()) -> value().
mask(Mask, Value) ->
{ok, M} = parse(Mask),
apply_mask(M, Value).
%% @doc parse the `Mask' expression to internal representation
-spec parse(mask_expr()) -> Result
when Result :: {ok, mask()} | Error,
Error :: {error, {Line, module(), Reason}},
Line :: pos_integer(),
Reason :: term().
parse(Mask) ->
case fieldmask_lexer:string(unicode:characters_to_list(Mask)) of
{ok, Tokens, _} ->
fieldmask_parser:parse(Tokens);
{error, Error, _} ->
{error, Error}
end.
%% @doc apply `Mask' in internal representation to a `Value'
-spec apply_mask(mask(), value()) -> value().
apply_mask(Mask, Value) when is_list(Value) ->
[apply_mask(Mask, E) || E <- Value];
apply_mask('*', Value) ->
Value;
apply_mask({'*', Submask}, Value) ->
maps:map(fun(_, V) -> apply_mask(Submask, V) end, Value);
apply_mask({Key, Submask}, Value) when is_binary(Key) ->
apply_mask(Submask, apply_mask(Key, Value));
apply_mask(Mask, Value) when is_list(Mask)->
maps:from_list(
[{case E of
{Key, _} ->
Key;
_ ->
E
end,
apply_mask(E, Value)}|| E <- Mask]);
apply_mask(Key, Value) when is_binary(Key) ->
maps:get(Key, Value). | src/fieldmask.erl | 0.721743 | 0.599016 | fieldmask.erl | starcoder |
%%
%% %CopyrightBegin%
%%
%% Copyright Ericsson AB 2002-2016. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
%% Purpose : Using dsetelement to make multiple-field record updates
%% faster.
%% The expansion of record field updates, when more than one field is
%% updated, but not a majority of the fields, will create a sequence of
%% calls to 'erlang:setelement(Index, Value, Tuple)' where Tuple in the
%% first call is the original record tuple, and in the subsequent calls
%% Tuple is the result of the previous call. Furthermore, all Index
%% values are constant positive integers, and the first call to
%% 'setelement' will have the greatest index. Thus all the following
%% calls do not actually need to test at run-time whether Tuple has type
%% tuple, nor that the index is within the tuple bounds.
%%
%% Since this introduces destructive updates in the Core Erlang code, it
%% must be done as a last stage before going to lower-level code.
%%
%% NOTE: Because there are currently no write barriers in the system,
%% this kind of optimization can only be done when we are sure that
%% garbage collection will not be triggered between the creation of the
%% tuple and the destructive updates - otherwise we might insert
%% pointers from an older generation to a newer.
%%
%% The rewriting is done as follows:
%%
%% let X1 = call 'erlang':'setelement(5, Tuple, Value1)
%% in call 'erlang':'setelement(3, X1, Value2)
%% =>
%% let X1 = call 'erlang':'setelement(5, Tuple, Value1)
%% in do primop dsetelement(3, X1, Value2)
%% X1
%% and
%% let X1 = call 'erlang':'setelement(5, Tuple, Value1)
%% in let X2 = call 'erlang':'setelement(3, X1, Value2)
%% in ...
%% =>
%% let X2 = call 'erlang':'setelement(5, Tuple, Value1)
%% in do primop 'dsetelement(3, X2, Value2)
%% ...
%% if X1 is used exactly once.
%% Thus, we need to track variable usage.
%%
-module(sys_core_dsetel).
-export([module/2]).
-include("core_parse.hrl").
-spec module(cerl:c_module(), [compile:option()]) -> {'ok', cerl:c_module()}.
module(M0, _Options) ->
M = visit_module(M0),
{ok,M}.
visit_module(#c_module{defs=Ds0}=R) ->
Env = #{},
Ds = visit_module_1(Ds0, Env, []),
R#c_module{defs=Ds}.
visit_module_1([{Name,F0}|Fs], Env, Acc) ->
try visit(Env, F0) of
{F,_} ->
visit_module_1(Fs, Env, [{Name,F}|Acc])
catch
Class:Error ->
Stack = erlang:get_stacktrace(),
#c_var{name={Func,Arity}} = Name,
io:fwrite("Function: ~w/~w\n", [Func,Arity]),
erlang:raise(Class, Error, Stack)
end;
visit_module_1([], _, Acc) ->
lists:reverse(Acc).
visit(Env, #c_var{name={_,_}}=R) ->
%% Ignore local function name.
{R, Env};
visit(Env0, #c_var{name=X}=R) ->
%% There should not be any free variables. If there are,
%% the case will fail with an exception.
case Env0 of
#{X:=N} ->
{R, Env0#{X:=N+1}}
end;
visit(Env, #c_literal{}=R) ->
{R, Env};
visit(Env0, #c_tuple{es=Es0}=R) ->
{Es1,Env1} = visit_list(Env0, Es0),
{R#c_tuple{es=Es1}, Env1};
visit(Env0, #c_map{es=Es0}=R) ->
{Es1,Env1} = visit_list(Env0, Es0),
{R#c_map{es=Es1}, Env1};
visit(Env0, #c_map_pair{key=K0,val=V0}=R) ->
{K,Env1} = visit(Env0, K0),
{V,Env2} = visit(Env1, V0),
{R#c_map_pair{key=K,val=V}, Env2};
visit(Env0, #c_cons{hd=H0,tl=T0}=R) ->
{H1,Env1} = visit(Env0, H0),
{T1,Env2} = visit(Env1, T0),
{R#c_cons{hd=H1,tl=T1}, Env2};
visit(Env0, #c_binary{segments=Segs}=R) ->
Env = visit_bin_segs(Env0, Segs),
{R, Env};
visit(Env0, #c_values{es=Es0}=R) ->
{Es1,Env1} = visit_list(Env0, Es0),
{R#c_values{es=Es1}, Env1};
visit(Env0, #c_fun{vars=Vs, body=B0}=R) ->
{Xs, Env1} = bind_vars(Vs, Env0),
{B1,Env2} = visit(Env1, B0),
{R#c_fun{body=B1}, restore_vars(Xs, Env0, Env2)};
visit(Env0, #c_let{vars=Vs, arg=A0, body=B0}=R) ->
{A1,Env1} = visit(Env0, A0),
{Xs,Env2} = bind_vars(Vs, Env1),
{B1,Env3} = visit(Env2, B0),
rewrite(R#c_let{arg=A1,body=B1}, Env3, restore_vars(Xs, Env1, Env3));
visit(Env0, #c_seq{arg=A0, body=B0}=R) ->
{A1,Env1} = visit(Env0, A0),
{B1,Env2} = visit(Env1, B0),
{R#c_seq{arg=A1,body=B1}, Env2};
visit(Env0, #c_case{arg=A0,clauses=Cs0}=R) ->
{A1,Env1} = visit(Env0, A0),
{Cs1,Env2} = visit_list(Env1, Cs0),
{R#c_case{arg=A1,clauses=Cs1}, Env2};
visit(Env0, #c_clause{pats=Ps,guard=G0,body=B0}=R) ->
{Vs, Env1} = visit_pats(Ps, Env0),
{G1,Env2} = visit(Env1, G0),
{B1,Env3} = visit(Env2, B0),
{R#c_clause{guard=G1,body=B1}, restore_vars(Vs, Env0, Env3)};
visit(Env0, #c_receive{clauses=Cs0,timeout=T0,action=A0}=R) ->
{T1,Env1} = visit(Env0, T0),
{Cs1,Env2} = visit_list(Env1, Cs0),
{A1,Env3} = visit(Env2, A0),
{R#c_receive{clauses=Cs1,timeout=T1,action=A1}, Env3};
visit(Env0, #c_apply{op=Op0, args=As0}=R) ->
{Op1,Env1} = visit(Env0, Op0),
{As1,Env2} = visit_list(Env1, As0),
{R#c_apply{op=Op1,args=As1}, Env2};
visit(Env0, #c_call{module=M0,name=N0,args=As0}=R) ->
{M1,Env1} = visit(Env0, M0),
{N1,Env2} = visit(Env1, N0),
{As1,Env3} = visit_list(Env2, As0),
{R#c_call{module=M1,name=N1,args=As1}, Env3};
visit(Env0, #c_primop{name=N0, args=As0}=R) ->
{N1,Env1} = visit(Env0, N0),
{As1,Env2} = visit_list(Env1, As0),
{R#c_primop{name=N1,args=As1}, Env2};
visit(Env0, #c_try{arg=E0, vars=Vs, body=B0, evars=Evs, handler=H0}=R) ->
{E1,Env1} = visit(Env0, E0),
{Xs, Env2} = bind_vars(Vs, Env1),
{B1,Env3} = visit(Env2, B0),
Env4 = restore_vars(Xs, Env1, Env3),
{Ys, Env5} = bind_vars(Evs, Env4),
{H1,Env6} = visit(Env5, H0),
{R#c_try{arg=E1,body=B1,handler=H1}, restore_vars(Ys, Env4, Env6)};
visit(Env0, #c_catch{body=B0}=R) ->
{B1,Env1} = visit(Env0, B0),
{R#c_catch{body=B1}, Env1};
visit(Env0, #c_letrec{defs=Ds0,body=B0}=R) ->
{Xs, Env1} = bind_vars([V || {V,_} <- Ds0], Env0),
{Ds1,Env2} = visit_def_list(Env1, Ds0),
{B1,Env3} = visit(Env2, B0),
{R#c_letrec{defs=Ds1,body=B1}, restore_vars(Xs, Env0, Env3)}.
%% The following general code for handling modules is slow if a module
%% contains very many functions. There is special code in visit_module/1
%% which is much faster.
%% visit(Env0, #c_module{defs=D0}=R) ->
%% {R1,Env1} = visit(Env0, #c_letrec{defs=D0,body=#c_nil{}}),
%% {R#c_module{defs=R1#c_letrec.defs}, Env1};
visit_list(Env, L) ->
lists:mapfoldl(fun (E, A) -> visit(A, E) end, Env, L).
visit_def_list(Env, L) ->
lists:mapfoldl(fun ({Name,V0}, E0) ->
{V1,E1} = visit(E0, V0),
{{Name,V1}, E1}
end, Env, L).
visit_bin_segs(Env, Segs) ->
lists:foldl(fun (#c_bitstr{val=Val,size=Sz}, E0) ->
{_, E1} = visit(E0, Val),
{_, E2} = visit(E1, Sz),
E2
end, Env, Segs).
bind_vars(Vs, Env) ->
bind_vars(Vs, Env, []).
bind_vars([#c_var{name=X}|Vs], Env0, Xs)->
bind_vars(Vs, Env0#{X=>0}, [X|Xs]);
bind_vars([], Env,Xs) ->
{Xs, Env}.
visit_pats(Ps, Env) ->
visit_pats(Ps, Env, []).
visit_pats([P|Ps], Env0, Vs0) ->
{Vs1, Env1} = visit_pat(Env0, P, Vs0),
visit_pats(Ps, Env1, Vs1);
visit_pats([], Env, Vs) ->
{Vs, Env}.
visit_pat(Env0, #c_var{name=V}, Vs) ->
{[V|Vs], Env0#{V=>0}};
visit_pat(Env0, #c_tuple{es=Es}, Vs) ->
visit_pats(Es, Env0, Vs);
visit_pat(Env0, #c_map{es=Es}, Vs) ->
visit_pats(Es, Env0, Vs);
visit_pat(Env0, #c_map_pair{op=#c_literal{val=exact},key=V,val=K}, Vs0) ->
{Vs1, Env1} = visit_pat(Env0, V, Vs0),
visit_pat(Env1, K, Vs1);
visit_pat(Env0, #c_cons{hd=H,tl=T}, Vs0) ->
{Vs1, Env1} = visit_pat(Env0, H, Vs0),
visit_pat(Env1, T, Vs1);
visit_pat(Env0, #c_binary{segments=Segs}, Vs) ->
visit_pats(Segs, Env0, Vs);
visit_pat(Env0, #c_bitstr{val=Val,size=Sz}, Vs0) ->
{Vs1, Env1} =
case Sz of
#c_var{name=V} ->
%% We don't tolerate free variables.
case Env0 of
#{V:=N} ->
{Vs0, Env0#{V:=N+1}}
end;
_ ->
visit_pat(Env0, Sz, Vs0)
end,
visit_pat(Env1, Val, Vs1);
visit_pat(Env0, #c_alias{pat=P,var=#c_var{name=V}}, Vs) ->
visit_pat(Env0#{V=>0}, P, [V|Vs]);
visit_pat(Env, #c_literal{}, Vs) ->
{Vs, Env}.
restore_vars([V|Vs], Env0, Env1) ->
case Env0 of
#{V:=N} ->
restore_vars(Vs, Env0, Env1#{V=>N});
_ ->
restore_vars(Vs, Env0, maps:remove(V, Env1))
end;
restore_vars([], _, Env1) ->
Env1.
%% let X1 = call 'erlang':'setelement(5, Tuple, Value1)
%% in call 'erlang':'setelement(3, X1, Value2)
%% =>
%% let X1 = call 'erlang':'setelement(5, Tuple, Value1)
%% in do primop dsetelement(3, X1, Value2)
%% X1
rewrite(#c_let{vars=[#c_var{name=X}=V]=Vs,
arg=#c_call{module=#c_literal{val='erlang'},
name=#c_literal{val='setelement'},
args=[#c_literal{val=Index1}, _Tuple, _Val1]
}=A,
body=#c_call{anno=Banno,module=#c_literal{val='erlang'},
name=#c_literal{val='setelement'},
args=[#c_literal{val=Index2},
#c_var{name=X},
Val2]
}
}=R,
_BodyEnv, FinalEnv)
when is_integer(Index1), is_integer(Index2), Index2 > 0, Index1 > Index2 ->
case is_safe(Val2) of
true ->
{R#c_let{vars=Vs,
arg=A,
body=#c_seq{arg=#c_primop{
anno=Banno,
name=#c_literal{val='dsetelement'},
args=[#c_literal{val=Index2},
V,
Val2]},
body=V}
},
FinalEnv};
false ->
{R, FinalEnv}
end;
%% let X1 = call 'erlang':'setelement(5, Tuple, Value1)
%% in let X2 = 'erlang':'setelement(3, X1, Value2)
%% in ...
%% =>
%% let X2 = call 'erlang':'setelement(5, Tuple, Value1)
%% in do primop dsetelement(3, X2, Value2)
%% ...
%% if X1 is used exactly once.
rewrite(#c_let{vars=[#c_var{name=X1}],
arg=#c_call{module=#c_literal{val='erlang'},
name=#c_literal{val='setelement'},
args=[#c_literal{val=Index1}, _Tuple, _Val1]
}=A,
body=#c_let{vars=[#c_var{}=V]=Vs,
arg=#c_call{anno=Banno,
module=#c_literal{val='erlang'},
name=#c_literal{val='setelement'},
args=[#c_literal{val=Index2},
#c_var{name=X1},
Val2]},
body=B}
}=R,
BodyEnv, FinalEnv)
when is_integer(Index1), is_integer(Index2), Index2 > 0, Index1 > Index2 ->
case is_single_use(X1, BodyEnv) andalso is_safe(Val2) of
true ->
{R#c_let{vars=Vs,
arg=A,
body=#c_seq{arg=#c_primop{
anno=Banno,
name=#c_literal{val='dsetelement'},
args=[#c_literal{val=Index2},
V,
Val2]},
body=B}
},
FinalEnv};
false ->
{R, FinalEnv}
end;
rewrite(R, _, FinalEnv) ->
{R, FinalEnv}.
%% is_safe(CoreExpr) -> true|false
%% Determines whether the Core expression can cause a GC collection at run-time.
%% Note: Assumes that the constant pool is turned on.
is_safe(#c_var{}) -> true;
is_safe(#c_literal{}) -> true;
is_safe(_) -> false.
is_single_use(V, Env) ->
case Env of
#{V:=1} ->
true;
_ ->
false
end. | lib/compiler/src/sys_core_dsetel.erl | 0.539226 | 0.462352 | sys_core_dsetel.erl | starcoder |
%%% @doc Type checking of GraphQL query documents
%%%
%%% The type checker carries out three tasks:
%%%
%%% Make sure that types check. That is, the user supplies a well
%%% typed query document.
%%%
%%% Make sure that types are properly inferred. Some times, the types
%%% the user supply needs an inference pass in order to figure out
%%% what the user supplied. The type-checker also infers the possible
%%% types and makes sure the query document is well typed.
%%%
%%% Handle coercion for the constant fragment of a query document.
%%% Whenever a coercible constant value is encountered in a query
%%% document, or a coercible parameter occurs in a parameter, the type
%%% checker runs "input coercion" which is part canonicalization, part
%%% input validation on input data. The coerced value is expanded into
%%% the query document or parameter string, so the execution engine
%%% always works with coerced data. This choice is somewhat peculiar,
%%% but it serves an optimization purpose since we only have to carry
%%% out a coercion once for a query with constant values.
%%%
%%% Polarity:
%%%
%%% This type checker mentions polarity of types. There are 3 kinds of
%%% polarity: positive, negative and non-polar. The flows of these are
%%% that Client -> Server is positive and Server -> Client is
%%% negative. Non-polar types flows both ways. Since the server engine
%%% doesn't trust the client, type checking follows some polarity
%%% rules. If we check a positive polarity context, we don't trust the
%%% client and we use the schema data to verify that everything is
%%% covered by the client in a valid way. If we check in negative
%%% polarity context, we are the server and can trust things are
%%% correct. So we fold over the query document when considering if
%%% types are correct. Non-polar values fall naturally in both
%%% contexts.
%%%
%%% Algorithm:
%%%
%%% We use a bidirectional type checker. In general we handle two kinds of
%%% typing constructs: G |- e => t (inference) and G |- e <= t,e' (checking)
%%% The first of these gets G,e as inputs and derives a t. The second form
%%% gets G, e, and t as inputs and derives e' which is an e annotated with
%%% more information.
%%%
%%% By having these two forms, the type checking algorithm can switch between
%%% elaboration and lookup of data and checking that the types are correct.
%%% The type checker can thus handle a query in one checking pass over the
%%% structure rather than having to rely on two.
%%% @end
-module(graphql_check).
-include_lib("graphql/include/graphql.hrl").
-include("graphql_internal.hrl").
-include("graphql_schema.hrl").
-export([check/1, check_params/3]).
-export([funenv/1]).
-record(ctx,
{
path = [] :: [any()],
vars = #{} :: #{ binary() => #vardef{} },
frags = #{} :: #{ binary() => #frag{} }
}).
-type ctx() :: #ctx{}.
-type polarity() :: '+' | '-' | '*'.
-type ty() :: schema_type() | schema_object().
-type ty_name() :: binary().
%% -type clause() :: op() | frag_spread() | frag().
%% This is a bidirectional type checker. It proceeds by running three
%% kinds of functions: synth(Gamma, E) -> {ok, T} | {error, Reason}
%% which synthesizes a given type out of its constituent parts.
%% check(Gamma, E, T) -> ok | {error, Reason} which checks that a
%% given term E has type T and sub(S, T) which forms a relation S <: T
%% of subsumption between types.
%% -- INFERENCE ------------------------------------------------------------
%%
%%
%% Elaborate a type and also determine its polarity. This is used for
%% input and output types
-spec infer_type(ctx(), ty_name() | ty()) -> {ok, {polarity(), ty()}}.
infer_type(Ctx, Tau) ->
case infer_type(Tau) of
{error, Reason} ->
err(Ctx, Reason);
{Polarity, TauPrime} ->
{ok, {Polarity, TauPrime}}
end.
-spec infer_type(ty_name() | ty()) -> {polarity(), ty()} | {error, Reason :: term()}.
infer_type({non_null, Ty}) ->
case infer_type(Ty) of
{error, Reason} -> {error, Reason};
{Polarity, V} -> {Polarity, {non_null, V}}
end;
infer_type({list, Ty}) ->
case infer_type(Ty) of
{error, Reason} -> {error, Reason};
{Polarity, V} -> {Polarity, {list, V}}
end;
infer_type({scalar, Name}) ->
#scalar_type{} = Ty = graphql_schema:get(Name),
{_polarity, Ty} = infer_type(Ty);
%% NonPolar
infer_type(#scalar_type{} = Ty) -> {'*', Ty};
infer_type({enum, _} = E) -> {'*', E};
infer_type(#enum_type{} = Ty) -> {'*', Ty};
%% Positive
infer_type(#input_object_type{} = Ty) -> {'+', Ty};
%% Negative
infer_type(#object_type{} = Ty) -> {'-', Ty};
infer_type(#interface_type{} = Ty) -> {'-', Ty};
infer_type(#union_type{} = Ty) -> {'-', Ty};
%% Lookup
infer_type({name, _, N}) -> infer_type(N);
infer_type(N) when is_binary(N) ->
case graphql_schema:lookup(N) of
not_found -> {error, {not_found, N}};
%% Non-polar types
#enum_type{} = Enum -> {'*', Enum};
#scalar_type{} = Scalar -> {'*', Scalar};
%% Positive types
#input_object_type{} = IOType -> {'+', IOType};
%% Negative types
#object_type{} = OT -> {'-', OT};
#interface_type{} = IFace -> {'-', IFace};
#union_type{} = Union -> {'-', Union}
end.
%% Infer a type and assert it is valid in input context
infer_input_type(Ctx, Ty) ->
case infer_type(Ctx, Ty) of
{ok, {'*', Tau}} -> {ok, Tau};
{ok, {'+', Tau}} -> {ok, Tau};
{ok, {'-', _}} -> err(Ctx, {invalid_input_type, Ty})
end.
%% Infer a type and assert it is valid in output context
infer_output_type(Ctx, Ty) ->
case infer_type(Ctx, Ty) of
{ok, {'*', Tau}} -> {ok, Tau};
{ok, {'-', Tau}} -> {ok, Tau};
{ok, {'+', _}} -> err(Ctx, {invalid_output_type, Ty})
end.
%% Main inference judgement
%%
%% Given a context and some graphql expression, we derive
%% a valid type for that expression. This is mostly handled by
%% a lookup into the environment.
infer(Ctx, #directive { id = ID }) ->
case graphql_ast:name(ID) of
<<"include">> -> {ok, graphql_directives:include()};
<<"skip">> -> {ok, graphql_directives:skip()};
Name -> err(Ctx, {unknown_directive, Name})
end;
infer(Ctx, #op { ty = Ty } = Op) ->
CtxP = add_path(Ctx, Op),
case graphql_schema:lookup('ROOT') of
not_found ->
err(Ctx, no_root_schema);
Schema ->
Root = graphql_schema:resolve_root_type(Ty, Schema),
case graphql_schema:lookup(Root) of
not_found ->
err(CtxP, {type_not_found, Root});
#object_type{} = Tau ->
{ok, Tau}
end
end;
infer(Ctx, #frag { ty = Ty } = F) ->
infer_output_type(add_path(Ctx, F), Ty);
infer(#ctx { frags = FragEnv } = Ctx, #frag_spread { id = ID }) ->
Name = graphql_ast:name(ID),
case maps:get(Name, FragEnv, not_found) of
not_found ->
CtxP = add_path(Ctx, Name),
err(CtxP, unknown_fragment);
#frag{} = Frag ->
{ok, Frag}
end;
infer(#ctx { vars = Vars } = Ctx, {var, ID}) ->
Var = graphql_ast:name(ID),
case maps:get(Var, Vars, not_found) of
not_found ->
err(Ctx, {unbound_variable, Var});
#vardef {} = VDef ->
{ok, VDef}
end;
infer(Ctx, X) ->
exit({not_implemented, Ctx, X}).
infer_field(Ctx, #field { id = ID } = F, FieldTypes) ->
CtxP = add_path(Ctx, F),
Name = graphql_ast:name(ID),
case maps:get(Name, FieldTypes, not_found) of
not_found when Name == <<"__typename">> ->
{ok, {introspection, typename}};
not_found ->
err(CtxP, unknown_field);
#schema_field{} = Ty ->
{ok, Ty}
end.
%% -- TYPE CHECKING --------------------------------------------------------
%%
%%
%% Check arguments. Follows the general scheme of checking for uniqueness and
%% then check each argument.
check_args(Ctx, Args, Ty) ->
%% Check uniqueness
NamedArgs = [{graphql_ast:name(K), V} || {K, V} <- Args],
case graphql_ast:uniq(NamedArgs) of
ok ->
ArgTys = maps:to_list(Ty),
check_args_(Ctx, NamedArgs, ArgTys, []);
{not_unique, X} ->
err(Ctx, {not_unique, X})
end.
%% Meat of the argument checker:
%%
%% Since arguments have positive polarity, they are checked according
%% to the schema arguments. We don't trust the client here as it can
%% omit args and it can put in the wrong values for args as well
%%
%% The meat of the argument checker. Walk over each schema arg and
%% verify it type checks according to the type checking rules.
check_args_(_Ctx, [], [], Acc) ->
{ok, Acc};
check_args_(Ctx, [_|_] = Args, [], _Acc) ->
err(Ctx, {excess_args, Args});
check_args_(Ctx, Args, [{N, #schema_arg { ty = TyName }} = SArg | Next], Acc) ->
CtxP = add_path(Ctx, N),
{ok, Sigma} = infer_input_type(Ctx, TyName),
{ok, {_, #{ type := ArgTy, value := Val}}, NextArgs} =
take_arg(CtxP, SArg, Args),
{ok, Tau} = infer_input_type(Ctx, ArgTy),
%% Verify type compabitility
ok = sub_input(CtxP, Tau, Sigma),
Res = case check_value(CtxP, Val, Tau) of
{ok, RVal} -> {N, #{ type => Tau, value => RVal}}
end,
check_args_(Ctx, NextArgs, Next, [Res|Acc]).
check_directive(Ctx, Context, #directive{ args = Args, id = ID} = D,
#directive_type { args = SArgs, locations = Locations } = Ty) ->
CtxP = add_path(Ctx, D),
case lists:member(Context, Locations) of
true ->
{ok, CArgs} = check_args(CtxP, Args, SArgs),
{ok, D#directive { args = CArgs, schema = Ty }};
false ->
Name = graphql_ast:name(ID),
err(Ctx, {invalid_directive_location, Name, Context})
end.
%% @todo this might be an inference
check_directives(Ctx, OpType, Dirs) ->
NamedDirectives = [{graphql_ast:name(ID), D}
|| #directive { id = ID } = D <- Dirs],
case graphql_ast:uniq(NamedDirectives) of
ok ->
{ok, [begin
{ok, Ty} = infer(Ctx, D),
{ok, CDir} = check_directive(Ctx, OpType, D, Ty),
CDir
end || D <- Dirs]};
{not_unique, X} ->
err(Ctx, {directives_not_unique, X})
end.
%% Check values against a type:
%%
%% Judge a type and a value. Used to verify a type judgement of the
%% form 'G |- v <= T,e'' for a value 'v' and a type 'T'. Analysis has shown that
%% it is most efficient to make the case analysis follow 'v' over 'T'.
check_value(Ctx, {name, _, N}, Sigma) ->
check_value(Ctx, N, Sigma);
check_value(Ctx, {var, ID}, Sigma) ->
CtxP = add_path(Ctx, {var, ID}),
{ok, #vardef { ty = Tau}} = infer(Ctx, {var, ID}),
ok = sub_input(CtxP, Tau, Sigma),
{ok, {var, ID, Tau}};
check_value(Ctx, null, {non_null, _} = Sigma) ->
err(Ctx, {type_mismatch,
#{ document => null,
schema => Sigma }});
check_value(Ctx, Val, {non_null, Sigma}) ->
check_value(Ctx, Val, Sigma);
check_value(_Ctx, null, _Sigma) ->
%% Null values are accepted in every other context
{ok, null};
check_value(Ctx, Vals, {list, Sigma}) when is_list(Vals) ->
{ok, [begin
%% TODO: Fold and keep an iterator
{ok, R} = check_value(Ctx, V, Sigma),
R
end || V <- Vals]};
check_value(Ctx, Val, {list, Sigma}) ->
%% The Jun2018 specification says that a singleton value
%% should be treated as if it were wrapped in a singleton type
%% if we are in list-context
check_value(Ctx, [Val], {list, Sigma});
check_value(Ctx, {enum, N}, #enum_type { id = ID } = Sigma) ->
case graphql_schema:validate_enum(ID, N) of
not_found ->
err(Ctx, {unknown_enum, N});
ok ->
coerce(Ctx, N, Sigma);
{other_enums, Others} ->
err(Ctx, {type_mismatch,
#{ document => Others,
schema => Sigma }})
end;
check_value(Ctx, {input_object, _} = InputObj, Sigma) ->
case Sigma of
#input_object_type{} ->
check_input_obj(Ctx, InputObj, Sigma);
_OtherType ->
err(Ctx, {type_mismatch,
#{ document => InputObj,
schema => Sigma }})
end;
check_value(Ctx, Val, #scalar_type{} = Sigma) ->
coerce(Ctx, Val, Sigma);
check_value(Ctx, String, #enum_type{}) when is_binary(String) ->
%% The spec (Jun2018, section 3.9 - Input Coercion) says that this
%% is not allowed, unless given as a parameter. In this case, it
%% is not given as a parameter, but is expanded in as a string in
%% a query document. Reject.
err(Ctx, enum_string_literal);
check_value(Ctx, Val, #enum_type{} = Sigma) ->
coerce(Ctx, Val, Sigma);
check_value(Ctx, Val, Sigma) ->
err(Ctx, {type_mismatch,
#{ document => Val,
schmema => Sigma }}).
check_input_obj(Ctx, {input_object, Obj},
#input_object_type{ fields = Fields }) ->
AssocList = [{coerce_name(K), V} || {K, V} <- Obj],
case graphql_ast:uniq(AssocList) of
{not_unique, Key} ->
err(Ctx, {input_object_not_unique, Key});
ok ->
{ok,
check_input_obj_(Ctx, maps:from_list(AssocList),
maps:to_list(Fields), #{})}
end.
%% Input objects are in positive polarity, so the schema's fields are used
%% to verify that every field is present, and that there are no excess fields
%% As we process fields in the object, we remove them so we can check that
%% there are no more fields in the end.
check_input_obj_(Ctx, Obj, [], Acc) ->
case maps:size(Obj) of
0 -> Acc;
K when K > 0 -> err(Ctx, {excess_fields_in_object, Obj})
end;
%% @todo: Clearly this has to change because Ty isn't known at this
check_input_obj_(Ctx, Obj, [{Name, #schema_arg { ty = Ty,
default = Default }} | Next],
Acc) ->
Result = case maps:get(Name, Obj, not_found) of
not_found ->
case Ty of
{non_null, _} when Default == null ->
err(add_path(Ctx, Name), missing_non_null_param);
_ ->
{ok, R} = coerce_default_param(Ctx, Default, Ty),
R
end;
V ->
CtxP = add_path(Ctx, Name),
{ok, Tau} = infer_input_type(CtxP, Ty),
{ok, R} = check_param(CtxP, V, Tau),
R
end,
check_input_obj_(Ctx,
maps:remove(Name, Obj),
Next,
Acc#{ Name => Result }).
check_sset(Ctx, [], Ty) ->
case Ty of
#object_type{} -> err(Ctx, fieldless_object);
#interface_type{} -> err(Ctx, fieldless_interface);
_ -> {ok, []}
end;
check_sset(Ctx, [_|_], #scalar_type{}) ->
err(Ctx, selection_on_scalar);
check_sset(Ctx, [_|_], #enum_type{}) ->
err(Ctx, selection_on_enum);
check_sset(Ctx, SSet, Ty) ->
check_sset_(Ctx, SSet, Ty).
check_sset_(_Ctx, [], _Ty) ->
{ok, []};
check_sset_(Ctx, [#frag { id = '...', ty = undefined } = Frag | Fs], Sigma) ->
{ok, Rest} = check_sset_(Ctx, Fs, Sigma),
{ok, CFrag} = check(Ctx, Frag, Sigma),
{ok, [CFrag | Rest]};
check_sset_(Ctx, [#frag { id = '...' } = Frag | Fs], Sigma) ->
{ok, Rest} = check_sset_(Ctx, Fs, Sigma),
{ok, Tau} = infer(Ctx, Frag),
{ok, CFrag} = check(Ctx, Frag, Tau),
ok = sub_output(Ctx, Tau, Sigma),
{ok, [CFrag | Rest]};
check_sset_(Ctx, [#frag_spread { directives = Dirs } = FragSpread | Fs], Sigma) ->
{ok, Rest} = check_sset_(Ctx, Fs, Sigma),
CtxP = add_path(Ctx, FragSpread),
{ok, #frag { schema = Tau }} = infer(Ctx, FragSpread),
ok = sub_output(CtxP, Tau, Sigma),
{ok, CDirectives} = check_directives(CtxP, fragment_spread, Dirs),
{ok, [FragSpread#frag_spread { directives = CDirectives } | Rest]};
check_sset_(Ctx, [#field{} = F|Fs], {non_null, Ty}) ->
check_sset_(Ctx, [F|Fs], Ty);
check_sset_(Ctx, [#field{} = F|Fs], {list, Ty}) ->
check_sset_(Ctx, [F|Fs], Ty);
check_sset_(Ctx, [#field{ args = Args, directives = Dirs,
selection_set = SSet } = F | Fs], Sigma) ->
{ok, Rest} = check_sset_(Ctx, Fs, Sigma),
CtxP = add_path(Ctx, F),
{ok, CDirectives} = check_directives(CtxP, field, Dirs),
{ok, FieldTypes} = fields(CtxP, Sigma),
case infer_field(Ctx, F, FieldTypes) of
{ok, {introspection, typename} = Ty} ->
{ok, [F#field { schema = Ty,
directives = CDirectives }
|Rest]};
{ok, #schema_field { ty = Ty, args = TArgs } = SF} ->
{ok, Tau} = infer_output_type(Ctx, Ty),
{ok, CSSet} = check_sset(CtxP, SSet, Tau),
{ok, CArgs} = check_args(CtxP, Args, TArgs),
{ok, [F#field {
args = CArgs,
schema = SF#schema_field { ty = Tau },
directives = CDirectives,
selection_set = CSSet }
| Rest]}
end.
%% Main check relation:
%%
%% Given a Context of environments
%% An expression to check
%% A type to check it against
%%
%% We derive an expression in which we have annotated types into
%% the AST. This helps the later execution stage.
check(Ctx, #frag { ty = undefined } = Frag, Sigma) ->
%% The specification has a rule in which if you omit the
%% type of a fragment, it "picks up" the type of the context
%% because this can be used in the case where you want to include
%% or slip a block of information
check(Ctx, Frag#frag { ty = Sigma }, Sigma);
check(Ctx, #frag { directives = Dirs,
selection_set = SSet } = F, Sigma) ->
CtxP = add_path(Ctx, F),
{ok, Tau} = infer(Ctx, F),
ok = sub_output(CtxP, Tau, Sigma),
{ok, CDirectives} = check_directives(CtxP, inline_fragment, Dirs),
{ok, CSSet} = check_sset(CtxP, SSet, Tau),
{ok, F#frag { schema = Tau,
directives = CDirectives,
selection_set = CSSet }};
check(Ctx, #op { vardefs = VDefs, directives = Dirs, selection_set = SSet } = Op,
#object_type {} = Sigma) ->
CtxP = add_path(Ctx, Op),
OperationType = operation_context(Op),
{ok, VarDefs} = var_defs(CtxP, VDefs),
{ok, CDirectives} = check_directives(CtxP, OperationType, Dirs),
{ok, CSSet} = check_sset(CtxP#ctx { vars = VarDefs }, SSet, Sigma),
{ok, Op#op {
schema = Sigma,
directives = CDirectives,
selection_set = CSSet,
vardefs = VarDefs}}.
%% To check a document, establish a default context and
%% check the document.
check(#document{} = Doc) ->
try check_(Doc) of Res -> Res
catch throw:{error, Path, Msg} ->
graphql_err:abort(Path, type_check, Msg)
end.
check_(#document{ definitions = Defs } = Doc) ->
Fragments = lists:filter(
fun(#frag{}) -> true; (_) -> false end,
Defs),
FragEnv = fragenv(Fragments),
CtxP = add_path(#ctx{}, document),
Ctx = CtxP#ctx { frags = FragEnv },
COps = [begin
{ok, Type} = infer(Ctx, Op),
{ok, COp} = check(Ctx, Op, Type),
COp
end || Op <- Defs],
{ok, #{
ast => Doc#document { definitions = COps },
fun_env => funenv(COps) }}.
%% GraphQL queries are really given in two stages. One stage is the
%% query document containing (static) queries which take parameters.
%% These queries can be seen as functions (stored procedures) you can
%% call.
%%
%% If called, we get a function name, and a set of parameters for that
%% function. So we have to go through the concrete parameters and type
%% check them against the function environment type schema. If the
%% input parameters can not be coerced into the parameters expected by
%% the function scheme, and error occurs.
%% This is the entry-point when checking parameters for an already parsed,
%% type checked and internalized query. It serves to verify that a requested
%% operation and its parameters matches the types in the operation referenced
check_params(FunEnv, OpName, Params) ->
try
case operation(FunEnv, OpName, Params) of
undefined -> #{};
not_found ->
err(#ctx{}, {operation_not_found, OpName});
VarEnv ->
Ctx = #ctx { vars = VarEnv,
path = [OpName] },
check_params_(Ctx, Params)
end
catch throw:{error, Path, Msg} ->
graphql_err:abort(Path, type_check, Msg)
end.
%% Parameter checking has positive polarity, so we fold over
%% the type var environment from the schema and verify that each
%% type is valid.
check_params_(#ctx { vars = VE } = Ctx, OrigParams) ->
F = fun
(Key, Tau, Parameters) ->
{ok, Val} = check_param(add_path(Ctx, Key),
maps:get(Key, Parameters, not_found),
Tau),
Parameters#{ Key => Val }
end,
maps:fold(F, OrigParams, VE).
%% When checking parameters, we must consider the case of default values.
%% If a given parameter is not given, and there is a default, we can supply
%% the default value in some cases. The spec requires special handling of
%% null values, which are handled here.
check_param(Ctx, not_found, Tau) ->
case Tau of
#vardef { ty = {non_null, _}, default = null } ->
err(Ctx, missing_non_null_param);
#vardef { default = Default, ty = Ty } ->
coerce_default_param(Ctx, Default, Ty)
end;
check_param(Ctx, Val, #vardef { ty = Tau }) ->
check_param_(Ctx, Val, Tau);
check_param(Ctx, Val, Tau) ->
check_param_(Ctx, Val, Tau).
%% Lift types up if needed
check_param_(Ctx, Val, Ty) when is_binary(Ty) ->
{ok, Tau} = infer_input_type(Ctx, Ty),
check_param_(Ctx, Val, Tau);
check_param_(Ctx, {var, ID}, Sigma) ->
CtxP = add_path(Ctx, {var, ID}),
{ok, #vardef { ty = Tau}} = infer(Ctx, {var, ID}),
ok = sub_input(CtxP, Tau, Sigma),
{ok, {var, ID, Tau}};
check_param_(Ctx, null, {not_null, _}) ->
err(Ctx, non_null);
check_param_(Ctx, Val, {non_null, Tau}) ->
%% Here, the value cannot be null due to the preceeding clauses
check_param_(Ctx, Val, Tau);
check_param_(_Ctx, null, _Tau) ->
{ok, null};
check_param_(Ctx, Lst, {list, Tau}) when is_list(Lst) ->
%% Build a dummy structure to match the recursor. Unwrap this
%% structure before replacing the list parameter.
%%
%% @todo: Track the index here
{ok, [begin
{ok, V} = check_param_(Ctx, X, Tau),
V
end || X <- Lst]};
check_param_(Ctx, Val, #scalar_type{} = Tau) ->
coerce(Ctx, Val, Tau);
check_param_(Ctx, {enum, Val}, #enum_type{} = Tau) when is_binary(Val) ->
check_param_(Ctx, Val, Tau);
check_param_(Ctx, Val, #enum_type { id = Ty } = Tau) when is_binary(Val) ->
%% Determine the type of any enum term, and then coerce it
case graphql_schema:validate_enum(Ty, Val) of
ok ->
coerce(Ctx, Val, Tau);
not_found ->
err(Ctx, {enum_not_found, Ty, Val});
{other_enums, OtherTys} ->
err(Ctx, {param_mismatch, {enum, Ty, OtherTys}})
end;
check_param_(Ctx, Obj, #input_object_type{} = Tau) when is_map(Obj) ->
%% When an object comes in through JSON for example, then the input object
%% will be a map which is already unique in its fields. To handle this, turn
%% the object into the same form as the one we use on query documents and pass
%% it on. Note that the code will create a map later on once the input has been
%% uniqueness-checked.
check_param_(Ctx, {input_object, maps:to_list(Obj)}, Tau);
check_param_(Ctx, {input_object, KVPairs}, #input_object_type{} = Tau) ->
check_input_obj(Ctx, {input_object, KVPairs}, Tau);
%% Everything else are errors
check_param_(Ctx, Val, Tau) ->
err(Ctx, {param_mismatch, Val, Tau}).
%% -- SUBTYPE/SUBSUMPTION ------------------------------------------------------
%%
%%
%% Subsumption relation over input types:
%%
%% Decide if an input type is an valid subsumption of another type. We assume
%% that the first parameter is the 'Tau' type and the second parameter
%% is the 'Sigma' type.
%%
%% Some of the cases are reflexivity. Some of the cases are congruences.
%% And some are special handling explicitly.
%%
sub_input(Ctx, Tau, Sigma) ->
case sub_input_(Tau, Sigma) of
yes ->
ok;
no ->
err(Ctx, {type_mismatch,
#{ document => Tau,
schema => Sigma }})
end.
sub_input_(#scalar_type { id = ID }, #scalar_type { id = ID }) -> yes;
sub_input_(#enum_type { id = ID }, #enum_type { id = ID }) -> yes;
sub_input_(#input_object_type { id = ID }, #input_object_type { id = ID }) -> yes;
sub_input_({non_null, Tau}, {non_null, Sigma}) ->
sub_input_(Tau, Sigma);
sub_input_({non_null, Tau}, Sigma) ->
%% A more strict document type of non-null is always allowed since
%% it can't be null in the schema then
sub_input_(Tau, Sigma);
sub_input_(_Tau, {non_null, _Sigma}) ->
%% If the schema requires a non-null type but the document doesn't
%% supply that, it is an error
no;
sub_input_({list, Tau}, {list, Sigma}) ->
%% Lists are decided by means of a congruence
sub_input_(Tau, Sigma);
sub_input_(Tau, {list, Sigma}) ->
%% A singleton type is allowed to be embedded in a list according to the
%% specification (Oct 2016)
sub_input_(Tau, Sigma);
sub_input_(_Tau, _Sigma) ->
%% Any other type combination are invalid
no.
%% Subsumption relation over output (fragment) types
%% Decide is a fragment can be embedded in a given scope
%% We proceed by computing the valid set of the Scope and also the
%% Valid set of the fragment. The intersection type between these two,
%% Scope and Spread, must not be the empty set. Otherwise it is a failure.
%%
%% The implementation here works by case splitting the different possible
%% output types one at a time and then handling them systematically rather
%% than running an intersection computation. This trades off computation
%% for code size when you have a match that can be optimized in any way.
%%
%% First a series of congruence checks. We essentially ignore
%% The list and non-null modifiers and check if the given fragment
%% can be expanded in the given scope by recursing.
%%
%% Fragments doesn't care if they sit inside lists or if the scope
%% type is non-null:
sub_output(Ctx, Tau, {list, Sigma}) ->
sub_output(Ctx, Tau, Sigma);
sub_output(Ctx, Tau, {non_null, Sigma}) ->
sub_output(Ctx, Tau, Sigma);
sub_output(Ctx, {non_null, Tau}, Sigma) ->
sub_output(Ctx, Tau, Sigma);
%% Reflexivity:
sub_output(_Ctx, #object_type { id = Ty },
#object_type { id = Ty }) ->
%% Object spread in Object scope requires a perfect match
ok;
sub_output(Ctx, #object_type { id = Tau },
#object_type { id = Sigma }) ->
%% If not a perfect match, this is an error:
err(Ctx, {fragment_spread, Tau, Sigma});
%% An object subsumes a union scope if the object is member of
%% Said union type
sub_output(Ctx, #object_type { id = ID },
#union_type { id = UID,
types = Sigmas }) ->
case lists:member(ID, Sigmas) of
true -> ok;
false -> err(Ctx, {not_union_member, ID, UID})
end;
%% Likewise an object is subsumed by an interface if the object
%% is member of said interface:
sub_output(Ctx, #object_type { id = ID,
interfaces = IFaces },
#interface_type { id = IID }) ->
case lists:member(IID, IFaces) of
true -> ok;
false -> err(Ctx, {not_interface_member, ID, IID})
end;
%% Otherwise, this is an error:
sub_output(Ctx, #interface_type { id = IID },
#object_type { id = OID, interfaces = IFaces }) ->
case lists:member(IID, IFaces) of
true -> ok;
false -> err(Ctx, {not_interface_embedder, IID, OID})
end;
%% Interface Tau subsumes interface Sigma if they have concrete
%% objects in common. This means there is at least one valid expansion,
%% so this should be allowed.
sub_output(Ctx, #interface_type { id = SpreadID },
#interface_type { id = ScopeID }) ->
Taus = graphql_schema:lookup_interface_implementors(SpreadID),
Sigmas = graphql_schema:lookup_interface_implementors(ScopeID),
case ordsets:intersection(
ordsets:from_list(Taus),
ordsets:from_list(Sigmas)) of
[_|_] ->
ok;
[] ->
err(Ctx, {no_common_object, SpreadID, ScopeID})
end;
%% Interfaces subsume unions, if the union has at least one member
%% who implements the interface.
sub_output(Ctx, #interface_type { id = SpreadID },
#union_type{ id = ScopeID, types = ScopeMembers }) ->
Taus = graphql_schema:lookup_interface_implementors(SpreadID),
case ordsets:intersection(
ordsets:from_list(Taus),
ordsets:from_list(ScopeMembers)) of
[_|_] ->
ok;
[] ->
err(Ctx, {no_common_object, SpreadID, ScopeID})
end;
%% Unions subsume objects if they are members
sub_output(Ctx, #union_type { id = UID, types = UMembers },
#object_type { id = OID }) ->
case lists:member(OID, UMembers) of
true -> ok;
false -> err(Ctx, {not_union_embedder, UID, OID})
end;
%% Unions subsume interfaces iff there is an intersection between
%% what members the union has and what the implementors of the interface
%% are.
sub_output(Ctx, #union_type { id = SpreadID, types = SpreadMembers },
#interface_type { id = ScopeID }) ->
Sigmas = graphql_schema:lookup_interface_implementors(ScopeID),
case ordsets:intersection(
ordsets:from_list(SpreadMembers),
ordsets:from_list(Sigmas)) of
[_|_] ->
ok;
[] ->
err(Ctx, {no_common_object, SpreadID, ScopeID})
end;
%% Unions subsume if there are common members
sub_output(Ctx, #union_type { id = SpreadID, types = SpreadMembers },
#union_type { id = ScopeID, types = ScopeMembers }) ->
case ordsets:intersection(
ordsets:from_list(SpreadMembers),
ordsets:from_list(ScopeMembers)) of
[_|_] ->
ok;
[] ->
err(Ctx, {no_common_object, SpreadID, ScopeID})
end.
%% -- COERCION OF INPUTS ---------------------------------------------------
%%
%%
coerce_name(B) when is_binary(B) -> B;
coerce_name(Name) -> graphql_ast:name(Name).
%% This is a function which must go as soon as we have proper
%% type checking on the default values in the schema type checker.
%% There is absolutely no reason to do something like this then since
%% it can never fail like this.
coerce_default_param(#ctx { path = Path } = Ctx, Default, Ty) ->
try check_param(Ctx, Default, Ty) of
Result -> Result
catch
Class:Err ->
error_logger:error_report(
[{path, graphql_err:path(lists:reverse(Path))},
{default_value, Default},
{type, graphql_err:format_ty(Ty)},
{default_coercer_error, Class, Err}]),
err(Path, non_coercible_default)
end.
coerce(Ctx, Val, #enum_type { id = ID, resolve_module = ResolveMod }) ->
case ResolveMod of
undefined ->
{ok, Val};
Mod ->
resolve_input(Ctx, ID, Val, Mod)
end;
coerce(Ctx, Val, #scalar_type { id = ID, resolve_module = Mod }) ->
true = Mod /= undefined,
resolve_input(Ctx, ID, Val, Mod).
resolve_input(Ctx, ID, Val, Mod) ->
try Mod:input(ID, Val) of
{ok, NewVal} -> {ok, NewVal};
{error, Reason} ->
err(Ctx, {input_coercion, ID, Val, Reason})
catch
Cl:Err ->
err_report({input_coercer, ID, Val}, Cl, Err),
err(Ctx, {input_coerce_abort, {Cl, Err}})
end.
%% -- INTERNAL FUNCTIONS ------------------------------------------------------
%% Handle a list of vardefs by elaboration of their types
var_defs(Ctx, Input) ->
VDefs =
[case infer_input_type(Ctx, V#vardef.ty) of
{ok, Tau} -> V#vardef { ty = Tau }
end || V <- Input],
NamedVars = [{graphql_ast:name(K), V}
|| #vardef { id = K } = V <- VDefs],
case graphql_ast:uniq(NamedVars) of
ok ->
{ok, varenv(VDefs)};
{not_unique, Var} ->
err(add_path(Ctx, Var), {param_not_unique, Var})
end.
%% Extract fields from a type, where the type has fields
fields(_Ctx, #object_type { fields = Fields }) -> {ok, Fields};
fields(_Ctx, #interface_type { fields = Fields }) -> {ok, Fields};
fields(_Ctx, #union_type {}) -> {ok, #{}}.
%% Build a varenv
varenv(VarList) ->
maps:from_list(
[{graphql_ast:name(Var), Def} || #vardef { id = Var } = Def <- VarList]).
%% Build a funenv
funenv(Ops) ->
F = fun
(#frag{}, FE) -> FE;
(#op { id = ID, vardefs = VDefs }, FE) ->
Name = graphql_ast:name(ID),
{ok, VarEnv} = var_defs(#ctx{}, maps:values(VDefs)),
FE#{ Name => VarEnv }
end,
lists:foldl(F, #{}, Ops).
annotate_frag(#frag { ty = Ty } = Frag) ->
{ok, Tau} = infer_output_type(#ctx{}, Ty),
Frag#frag { schema = Tau }.
%% Build a fragenv
fragenv(Frags) ->
maps:from_list(
[{graphql_ast:name(ID), annotate_frag(Frg)} || #frag { id = ID } = Frg <- Frags]).
%% Figure out what kind of operation context we have
operation_context(#op { ty = Ty }) ->
case Ty of
undefined -> query;
{query, _} -> query;
{mutation, _} -> mutation;
{subscription, _} -> subscription
end.
%% Pull out a value from a list of arguments. This is used to check
%% we eventually cover all arguments properly since we can check if there
%% are excess arguments in the end.
take_arg(Ctx, {Key, #schema_arg { ty = Tau,
default = Default }}, Args) ->
case lists:keytake(Key, 1, Args) of
{value, {_, null}, _NextArgs} ->
%% You are currently not allowed to input null values
err(Ctx, {null_input, Key});
{value, {_, Val}, NextArgs} ->
%% Argument found, use it
{ok, {Key, #{ type => Tau, value => Val}}, NextArgs};
false ->
%% Argument was not given. Resolve default value if any
case {Tau, Default} of
{{non_null, _}, null} ->
err(Ctx, missing_non_null_param);
_ ->
{ok, {Key, #{ type => Tau, value => Default}}, Args}
end
end.
%% Determine the operation whih the call wants to run
operation(FunEnv, <<>>, Params) ->
%% Supplying an empty string is the same as not supplying anything at all
%% This should solve problems where we have empty requests
operation(FunEnv, undefined, Params);
operation(FunEnv, undefined, Params) ->
case maps:to_list(FunEnv) of
[] when Params == #{} ->
undefined;
[] when Params /= #{} ->
err([], unnamed_operation_params);
[{_, VarEnv}] ->
VarEnv;
_ ->
%% The error here should happen in the execute phase
undefined
end;
operation(FunEnv, OpName, _Params) ->
maps:get(OpName, FunEnv, not_found).
%% Tell the error logger that something is off
err_report(Term, Cl, Err) ->
error_logger:error_report(
[
Term,
{error, Cl, Err}
]).
%% Add a path component to the context
-spec add_path(ctx(), Component :: term()) -> ctx().
add_path(#ctx { path = P } = Ctx, C) ->
Ctx#ctx { path = [C|P] }.
%% Report an error relative to a context
-spec err(ctx(), Term :: term()) -> no_return().
err(#ctx{ path = Path }, Msg) ->
throw({error, Path, Msg}). | src/graphql_check.erl | 0.510741 | 0.66554 | graphql_check.erl | starcoder |
% Licensed under the Apache License, Version 2.0 (the "License"); you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
% Maintain cluster stability information. A cluster is considered stable if there
% were no changes to during a given period of time.
%
% To be notified of cluster stability / instability the owner module must
% implement the mem3_cluster behavior. When cluster membership changes,
% cluster_unstable behavior callback will be called. After that is are no more
% changes to the cluster, then cluster_stable callback will be called.
%
% The period is passed in as start argument but it can also be set dynamically
% via the set_period/2 API call.
%
% In some cases it might be useful to have a shorter pariod during startup.
% That can be configured via the StartPeriod argument. If the time since start
% is less than a full period, then the StartPeriod is used as the period.
-module(mem3_cluster).
-behaviour(gen_server).
-export([
start_link/4,
set_period/2
]).
-export([
init/1,
terminate/2,
handle_call/3,
handle_cast/2,
handle_info/2,
code_change/3
]).
-callback cluster_stable(Context :: term()) -> NewContext :: term().
-callback cluster_unstable(Context :: term()) -> NewContext :: term().
-record(state, {
mod :: atom(),
ctx :: term(),
start_time :: erlang:timestamp(),
last_change :: erlang:timestamp(),
period :: integer(),
start_period :: integer(),
timer :: reference()
}).
-spec start_link(module(), term(), integer(), integer()) ->
{ok, pid()} | ignore | {error, term()}.
start_link(Module, Context, StartPeriod, Period)
when is_atom(Module), is_integer(StartPeriod), is_integer(Period) ->
gen_server:start_link(?MODULE, [Module, Context, StartPeriod, Period], []).
-spec set_period(pid(), integer()) -> ok.
set_period(Server, Period) when is_pid(Server), is_integer(Period) ->
gen_server:cast(Server, {set_period, Period}).
% gen_server callbacks
init([Module, Context, StartPeriod, Period]) ->
net_kernel:monitor_nodes(true),
{ok, #state{
mod = Module,
ctx = Context,
start_time = os:timestamp(),
last_change = os:timestamp(),
period = Period,
start_period = StartPeriod,
timer = new_timer(StartPeriod)
}}.
terminate(_Reason, _State) ->
ok.
handle_call(_Msg, _From, State) ->
{reply, ignored, State}.
handle_cast({set_period, Period}, State) ->
{noreply, State#state{period = Period}}.
handle_info({nodeup, _Node}, State) ->
{noreply, cluster_changed(State)};
handle_info({nodedown, _Node}, State) ->
{noreply, cluster_changed(State)};
handle_info(stability_check, #state{mod = Mod, ctx = Ctx} = State) ->
erlang:cancel_timer(State#state.timer),
case now_diff_sec(State#state.last_change) > interval(State) of
true ->
{noreply, State#state{ctx = Mod:cluster_stable(Ctx)}};
false ->
Timer = new_timer(interval(State)),
{noreply, State#state{timer = Timer}}
end.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%% Internal functions
-spec cluster_changed(#state{}) -> #state{}.
cluster_changed(#state{mod = Mod, ctx = Ctx} = State) ->
State#state{
last_change = os:timestamp(),
timer = new_timer(interval(State)),
ctx = Mod:cluster_unstable(Ctx)
}.
-spec new_timer(non_neg_integer()) -> reference().
new_timer(IntervalSec) ->
erlang:send_after(IntervalSec * 1000, self(), stability_check).
% For the first Period seconds after node boot we check cluster stability every
% StartPeriod seconds. Once the initial Period seconds have passed we continue
% to monitor once every Period seconds
-spec interval(#state{}) -> non_neg_integer().
interval(#state{period = Period, start_period = StartPeriod,
start_time = T0}) ->
case now_diff_sec(T0) > Period of
true ->
% Normal operation
Period;
false ->
% During startup
StartPeriod
end.
-spec now_diff_sec(erlang:timestamp()) -> non_neg_integer().
now_diff_sec(Time) ->
case timer:now_diff(os:timestamp(), Time) of
USec when USec < 0 ->
0;
USec when USec >= 0 ->
USec / 1000000
end. | src/mem3/src/mem3_cluster.erl | 0.806243 | 0.567997 | mem3_cluster.erl | starcoder |
%% Copyright (c) 2019-2021, <NAME> <<EMAIL>>. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(uef_bin).
-export([binary_join/2, split/2, split/3]).
-export([repeat/2]).
-export([reverse/1, reverse_utf8/1]).
-export([replace/3, replace_chars/3]).
-export([random_latin_binary/2, random_binary_from_chars/2]).
-export([numeric_prefix/1]).
-export([strip_left/2, strip_right/2, strip_both/2]).
-export([chomp/1]).
-type split_option() :: undefined | trim_all.
%%%------------------------------------------------------------------------------
%%% API
%%%------------------------------------------------------------------------------
%% binary_join/2
-spec binary_join(ListOfBinaries :: [binary()], Separator :: binary()) -> binary().
%% @doc
%% Joins a list of binaries with separator into a single binary. Returns binary.
%% @end
binary_join([], _Sep) -> <<>>;
binary_join([Bin], _Sep) -> Bin;
binary_join([Head|Tail], Sep) ->
lists:foldl(fun(Value, Acc) ->
<<Acc/binary, Sep/binary, Value/binary>>
end, Head, Tail).
%% split/2
-spec split(Binary :: binary(), Splitter :: binary()) -> ListOfBinaries :: [binary()].
%% @doc
%% Splits binary Binary with splitter Splitter into a list of binaries.
%% Works as binary:split/2 but is more performant in simple cases.
%% @end
split(B, Splitter) ->
split(B, Splitter, undefined).
%% split/3
-spec split(Binary :: binary(), Splitter :: binary(), SplitOption:: split_option()) -> ListOfBinaries :: [binary()].
%% @doc
%% Splits binary Binary with splitter Splitter into a list of binaries.
%% Works as uef_bin:split/2 but removes all epmty `(<<>>)' chunks.
%% It can be used in simple cases instead of binary:split/3 for the reason that it's more performant.
%% @end
split(<<>>, _, _) -> [];
split(B, <<>>, _) -> [B];
split(B, Splitter, Option) ->
SplitterBitSize = erlang:bit_size(Splitter),
List = do_split(B, Splitter, SplitterBitSize, []),
case Option of
trim_all -> lists:filter(fun(<<>>) -> false; (_) -> true end, List);
_ -> List
end.
%% repeat/2
-spec repeat(Binary1 :: binary(), N :: pos_integer()) -> Binary2 :: binary().
%% @doc
%% Returns binary Binary2 consisting of Binary1 repeated N times.
%% @end
repeat(Bin, N) ->
repeat(Bin, N, <<>>).
%% reverse/1
-spec reverse(binary()) -> binary().
%% @doc
%% Returns a binary in reverse byte order.
%% @end
reverse(B) ->
S = erlang:bit_size(B),
<<R:S/integer-little>> = B,
<<R:S/integer-big>>.
%% reverse_utf8/1
-spec reverse_utf8(UTF8_Binary1 :: binary()) -> UTF8_Binary2 :: binary().
%% @doc
%% Returns a binary in reverse character order. Intended to work with UTF-8 binary strings.
%% @end
reverse_utf8(Bin) ->
reverse_utf8(Bin, <<>>).
%% replace/3
-spec replace(Binary1 :: binary(), Chars :: binary(), OtherChars :: binary()) -> Binary2 :: binary().
%% @doc
%% Replaces chars Chars with other chars OtherChars in binary Binary1 and returns another binary Binary2.
%% Works as binary:replace/3 but more permormant and can be used in simple cases.
%% @end
replace(<<>>, _, _) -> <<>>;
replace(B, <<>>, _) -> B;
replace(B, C1, C2) ->
C1BitSize = erlang:bit_size(C1),
replace(B, C1, C1BitSize, C2, <<>>).
%% replace_chars/3
-spec replace_chars(Binary1 :: binary(), ListOfCharsToReplace :: [binary()], OtherChars :: binary()) -> Binary2 :: binary().
%% @doc
%% Replaces chars inluded in list ListOfCharsToReplace with other chars OtherChars in binary Binary1 and returns another binary Binary2.
%% @end
replace_chars(B0, [], _) -> B0;
replace_chars(B0, Chars, ToChar) ->
lists:foldl(fun(Ch, B) ->
replace(B, Ch, ToChar)
end, B0, Chars).
%% random_latin_binary/2
-spec random_latin_binary(Length :: pos_integer(), CaseFlag :: lower | upper | any) -> binary().
%% @doc
%% Returns a random binary of size Length consisting of latins [a-zA-Z] and digits [0-9].
%% The second argument CaseFlag corresponds to a letter case, an atom 'lower', 'upper' or 'any'.
%% @end
random_latin_binary(Length, CaseFlag) ->
Chars = case CaseFlag of
lower -> <<"abcdefghijklmnopqrstuvwxyz0123456789">>;
upper -> <<"ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789">>;
any -> <<"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789">>
end,
random_binary_from_chars(Length, Chars).
%% random_binary_from_chars/2
-spec random_binary_from_chars(Length :: pos_integer(), Chars :: binary()) -> binary().
%% @doc
%% Generates and returns a binary of size Length which consists of the given characters Chars.
%% @end
random_binary_from_chars(Length, Chars) ->
Bsize = erlang:byte_size(Chars),
lists:foldl(
fun(_, Acc) ->
RndChar = binary:at(Chars, rand:uniform(Bsize)-1),
<< Acc/binary, RndChar >>
end,
<<>>,
lists:seq(1, Length)
).
%% numeric_prefix/1
-spec numeric_prefix(Binary :: binary()) -> DigitsOnlyOrEmptyBinary :: binary().
%% @doc
%% Returns new binary DigitsOnlyBinary which consists of digits [0-9] wich are at the beginning in the given binary Binary.
%% If Binary does not begin with digit, this function returns empty binary `(<<>>)'.
%% @end
numeric_prefix(B) -> numeric_prefix(B, <<>>).
%% strip_left/2
-spec strip_left(Bin :: binary(), Chars :: binary() | integer()) -> binary().
%% @doc
%% Removes leading Chars from binary Bin and returns new binary.
%% @end
strip_left(Bin, <<>>) when is_binary(Bin) ->
Bin;
strip_left(Bin, Chars) when is_binary(Bin), is_binary(Chars) ->
do_strip_left(Bin, Chars, erlang:byte_size(Chars));
strip_left(Bin, Chars) when is_binary(Bin), is_integer(Chars) ->
strip_left(Bin, << Chars >>).
%% strip_right/2
-spec strip_right(Bin :: binary(), Chars :: binary() | integer()) -> binary().
%% @doc
%% Removes trailing Chars from binary Bin and returns new binary.
%% @end
strip_right(Bin, <<>>) when is_binary(Bin) ->
Bin;
strip_right(Bin, Chars) when is_binary(Bin), is_binary(Chars) ->
do_strip_right(Bin, Chars, erlang:byte_size(Chars));
strip_right(Bin, Chars) when is_binary(Bin), is_integer(Chars) ->
strip_right(Bin, << Chars >>).
%% strip_both/2
-spec strip_both(Bin :: binary(), Chars :: binary() | integer()) -> binary().
%% @doc
%% Removes leading and trailing Chars from binary Bin and returns new binary.
%% @end
strip_both(Bin, Chars) ->
strip_right(strip_left(Bin, Chars), Chars).
%% chomp/1
-spec chomp(binary()) -> binary().
%% @doc
%% Removes all trailing \n and \r characters from binary.
%% @end
chomp(<<>>) ->
<<>>;
chomp(Bin) ->
HeadSize = erlang:byte_size(Bin) - 1,
case Bin of
<< Head:HeadSize/bytes, C >> when C =:= $\n orelse C =:= $\r ->
chomp(Head);
_ ->
Bin
end.
%%%------------------------------------------------------------------------------
%%% Internal functions
%%%------------------------------------------------------------------------------
%% repeat/3
-spec repeat(binary(), pos_integer(), binary()) -> binary().
repeat(_, N, Acc) when N < 1 ->
Acc;
repeat(Bin, N, Acc) ->
repeat(Bin, N-1, <<Acc/bits, Bin/bits>>).
%% replace/4
-spec replace(binary(), binary(), pos_integer(), binary(), binary()) -> binary().
replace(B, C1, C1BitSize, C2, Acc) ->
case B of
<<>> ->
Acc;
<<C1:C1BitSize/bits, Rest/bits>> ->
replace(Rest, C1, C1BitSize, C2, <<Acc/bits, C2/bits>>); % replacement
<<C, Rest/bits>> ->
replace(Rest, C1, C1BitSize, C2, <<Acc/bits, C>>)
end.
%% reverse_utf8/2
-spec reverse_utf8(binary(), binary()) -> binary().
reverse_utf8(<<>>, Acc) -> Acc;
reverse_utf8(<<U/utf8, Rest/bits>>, Acc) ->
reverse_utf8(Rest, <<U/utf8, Acc/bits>>).
%% do_split/3
-spec do_split(binary(), binary(), pos_integer(), [binary()]) -> [binary()].
do_split(B, Splitter, SplitterBitSize, List) ->
case B of
<<>> ->
lists:reverse(List);
<<Splitter:SplitterBitSize/bits, Rest/bits>> ->
case List of
[_|_] -> do_split(Rest, Splitter, SplitterBitSize, [<<>> | List]);
[] -> do_split(Rest, Splitter, SplitterBitSize, [<<>>, <<>> | List])
end;
<<C, Rest/bits>> ->
List2 = case List of
[H|T] -> [<<H/bits, C>> | T];
[] -> [<< C >>]
end,
do_split(Rest, Splitter, SplitterBitSize, List2)
end.
%% numeric_prefix/2
-spec numeric_prefix(binary(), binary()) -> binary().
numeric_prefix(<< $0, Rest/bits >>, Acc) -> numeric_prefix(Rest, << Acc/bits, $0 >>);
numeric_prefix(<< $1, Rest/bits >>, Acc) -> numeric_prefix(Rest, << Acc/bits, $1 >>);
numeric_prefix(<< $2, Rest/bits >>, Acc) -> numeric_prefix(Rest, << Acc/bits, $2 >>);
numeric_prefix(<< $3, Rest/bits >>, Acc) -> numeric_prefix(Rest, << Acc/bits, $3 >>);
numeric_prefix(<< $4, Rest/bits >>, Acc) -> numeric_prefix(Rest, << Acc/bits, $4 >>);
numeric_prefix(<< $5, Rest/bits >>, Acc) -> numeric_prefix(Rest, << Acc/bits, $5 >>);
numeric_prefix(<< $6, Rest/bits >>, Acc) -> numeric_prefix(Rest, << Acc/bits, $6 >>);
numeric_prefix(<< $7, Rest/bits >>, Acc) -> numeric_prefix(Rest, << Acc/bits, $7 >>);
numeric_prefix(<< $8, Rest/bits >>, Acc) -> numeric_prefix(Rest, << Acc/bits, $8 >>);
numeric_prefix(<< $9, Rest/bits >>, Acc) -> numeric_prefix(Rest, << Acc/bits, $9 >>);
numeric_prefix(_, Acc) -> Acc.
%% do_strip_left/3
-spec do_strip_left(binary(), binary(), pos_integer()) -> binary().
do_strip_left(<<>>, _, _) ->
<<>>;
do_strip_left(Bin, Chars, CharsByteSize) ->
case Bin of
<< Chars:CharsByteSize/bytes, Rest/bits >> ->
do_strip_left(Rest, Chars, CharsByteSize);
_ -> Bin
end.
%% do_strip_right/3
-spec do_strip_right(binary(), binary(), pos_integer()) -> binary().
do_strip_right(<<>>, _, _) ->
<<>>;
do_strip_right(Bin, Chars, CharsByteSize) ->
HeadByteSize = erlang:byte_size(Bin) - CharsByteSize,
case Bin of
<< Head:HeadByteSize/bytes, Chars/bits >> ->
do_strip_right(Head, Chars, CharsByteSize);
_ -> Bin
end. | src/uef_bin.erl | 0.761982 | 0.405979 | uef_bin.erl | starcoder |
%%
%% %CopyrightBegin%
%%
%% Copyright Ericsson AB 2003-2016. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
%% Description : Implements XSLT like transformations in Erlang
%% @doc
% Erlang has similarities to XSLT since both languages
% have a functional programming approach. Using <code>xmerl_xpath</code>
% it is possible to write XSLT like transforms in Erlang.
%
% <p>XSLT stylesheets are often used when transforming XML
% documents, to other XML documents or (X)HTML for presentation.
% XSLT contains quite many
% functions and learning them all may take some effort.
% This document assumes a basic level of
% understanding of XSLT.
% </p>
% <p>Since XSLT is based on a functional programming approach
% with pattern matching and recursion it is possible to write
% similar style sheets in Erlang. At least for basic
% transforms. This
% document describes how to use the XPath implementation together
% with Erlangs pattern matching and a couple of functions to write
% XSLT like transforms.</p>
% <p>This approach is probably easier for an Erlanger but
% if you need to use real XSLT stylesheets in order to "comply to
% the standard" there is an adapter available to the Sablotron
% XSLT package which is written i C++.
% See also the <a href="xmerl_xs_examples.html">Tutorial</a>.
% </p>
-module(xmerl_xs).
-export([xslapply/2, value_of/1, select/2, built_in_rules/2 ]).
-include("xmerl.hrl").
%% @spec xslapply(Function, EList::list()) -> List
%% Function = () -> list()
%% @doc xslapply is a wrapper to make things look similar to
%% xsl:apply-templates.
%%
%% <p>Example, original XSLT:</p><br/><pre>
%% <xsl:template match="doc/title">
%% <h1>
%% <xsl:apply-templates/>
%% </h1>
%% </xsl:template>
%% </pre>
%%
%% <p>becomes in Erlang:</p><br/><pre>
%% template(E = #xmlElement{ parents=[{'doc',_}|_], name='title'}) ->
%% ["<h1>",
%% xslapply(fun template/1, E),
%% "</h1>"];
%% </pre>
xslapply(Fun, EList) when is_list(EList) ->
lists:map(Fun, EList);
xslapply(Fun, E = #xmlElement{})->
lists:map( Fun, E#xmlElement.content).
%% @spec value_of(E) -> List
%% E = term()
%%
%% @doc Concatenates all text nodes within the tree.
%%
%% <p>Example:</p><br/><pre>
%% <xsl:template match="title">
%% <div align="center">
%% <h1><xsl:value-of select="." /></h1>
%% </div>
%% </xsl:template>
%% </pre>
%%
%% <p>becomes:</p><br/> <pre>
%% template(E = #xmlElement{name='title'}) ->
%% ["<div align="center"><h1>",
%% value_of(select(".", E)), "</h1></div>"]
%% </pre>
value_of(E)->
lists:reverse(xmerl_lib:foldxml(fun value_of1/2, [], E)).
value_of1(#xmlText{}=T1, Accu)->
[xmerl_lib:export_text(T1#xmlText.value)|Accu];
value_of1(_, Accu) ->
Accu.
%% @spec select(String::string(),E)-> E
%%
%% @doc Extracts the nodes from the xml tree according to XPath.
%% @see value_of/1
select(Str,E)->
xmerl_xpath:string(Str,E).
%% @spec built_in_rules(Fun, E) -> List
%%
%% @doc The default fallback behaviour. Template funs should end with:
%% <br/><code>template(E) -> built_in_rules(fun template/1, E)</code>.
built_in_rules(Fun, E = #xmlElement{})->
lists:map(Fun, E#xmlElement.content);
built_in_rules(_Fun, E = #xmlText{}) ->
xmerl_lib:export_text(E#xmlText.value);
built_in_rules(_Fun, E = #xmlAttribute{}) ->
E#xmlAttribute.value;
built_in_rules(_Fun, _E) ->[]. | lib/xmerl/src/xmerl_xs.erl | 0.622574 | 0.456228 | xmerl_xs.erl | starcoder |
%% eunit_formatters https://github.com/seancribbs/eunit_formatters
%% Changes made to the original code:
%% - Embedded binomial_heap.erl file contents into current file.
%% - ignore warnings for heap implementation to keep complete implementation.
%% - removed "namespaced_dicts" dependant preprocessor directive,
%% as it does not apply for our project, we just assume OTP version >= 17.
%% This is because the previous verison uses rebar, and we won't do that.
%% Copyright 2014 <NAME>
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%% @doc A listener/reporter for eunit that prints '.' for each
%% success, 'F' for each failure, and 'E' for each error. It can also
%% optionally summarize the failures at the end.
-compile({nowarn_unused_function, [insert/2, to_list/1, to_list/2, size/1]}).
-module(eunit_progress).
-behaviour(eunit_listener).
-define(NOTEST, true).
-include_lib("eunit/include/eunit.hrl").
-define(RED, "\e[0;31m").
-define(GREEN, "\e[0;32m").
-define(YELLOW, "\e[0;33m").
-define(WHITE, "\e[0;37m").
-define(CYAN, "\e[0;36m").
-define(RESET, "\e[0m").
-record(node,{
rank = 0 :: non_neg_integer(),
key :: term(),
value :: term(),
children = new() :: binomial_heap()
}).
-export_type([binomial_heap/0, heap_node/0]).
-type binomial_heap() :: [ heap_node() ].
-type heap_node() :: #node{}.
%% eunit_listener callbacks
-export([
init/1,
handle_begin/3,
handle_end/3,
handle_cancel/3,
terminate/2,
start/0,
start/1
]).
%% -- binomial_heap.erl content start --
-record(state, {
status = dict:new() :: euf_dict(),
failures = [] :: [[pos_integer()]],
skips = [] :: [[pos_integer()]],
timings = new() :: binomial_heap(),
colored = true :: boolean(),
profile = false :: boolean()
}).
-type euf_dict() :: dict:dict().
-spec new() -> binomial_heap().
new() ->
[].
% Inserts a new pair into the heap (or creates a new heap)
-spec insert(term(), term()) -> binomial_heap().
insert(Key,Value) ->
insert(Key,Value,[]).
-spec insert(term(), term(), binomial_heap()) -> binomial_heap().
insert(Key,Value,Forest) ->
insTree(#node{key=Key,value=Value},Forest).
% Merges two heaps
-spec merge(binomial_heap(), binomial_heap()) -> binomial_heap().
merge(TS1,[]) when is_list(TS1) -> TS1;
merge([],TS2) when is_list(TS2) -> TS2;
merge([#node{rank=R1}=T1|TS1]=F1,[#node{rank=R2}=T2|TS2]=F2) ->
if
R1 < R2 ->
[T1 | merge(TS1,F2)];
R2 < R1 ->
[T2 | merge(F1, TS2)];
true ->
insTree(link(T1,T2),merge(TS1,TS2))
end.
% Deletes the top entry from the heap and returns it
-spec delete(binomial_heap()) -> {{term(), term()}, binomial_heap()}.
delete(TS) ->
{#node{key=Key,value=Value,children=TS1},TS2} = getMin(TS),
{{Key,Value},merge(lists:reverse(TS1),TS2)}.
% Turns the heap into list in heap order
-spec to_list(binomial_heap()) -> [{term(), term()}].
to_list([]) -> [];
to_list(List) when is_list(List) ->
to_list([],List).
to_list(Acc, []) ->
lists:reverse(Acc);
to_list(Acc,Forest) ->
{Next, Trees} = delete(Forest),
to_list([Next|Acc], Trees).
% Take N elements from the top of the heap
-spec take(non_neg_integer(), binomial_heap()) -> [{term(), term()}].
take(N,Trees) when is_integer(N), is_list(Trees) ->
take(N,Trees,[]).
take(0,_Trees,Acc) ->
lists:reverse(Acc);
take(_N,[],Acc)->
lists:reverse(Acc);
take(N,Trees,Acc) ->
{Top,T2} = delete(Trees),
take(N-1,T2,[Top|Acc]).
% Get an estimate of the size based on the binomial property
-spec size(binomial_heap()) -> non_neg_integer().
size(Forest) ->
erlang:trunc(lists:sum([math:pow(2,R) || #node{rank=R} <- Forest])).
%% Private API
-spec link(heap_node(), heap_node()) -> heap_node().
link(#node{rank=R,key=X1,children=C1}=T1,#node{key=X2,children=C2}=T2) ->
case X1 < X2 of
true ->
T1#node{rank=R+1,children=[T2|C1]};
_ ->
T2#node{rank=R+1,children=[T1|C2]}
end.
insTree(Tree, []) ->
[Tree];
insTree(#node{rank=R1}=T1, [#node{rank=R2}=T2|Rest] = TS) ->
case R1 < R2 of
true ->
[T1|TS];
_ ->
insTree(link(T1,T2),Rest)
end.
getMin([T]) ->
{T,[]};
getMin([#node{key=K} = T|TS]) ->
{#node{key=K1} = T1,TS1} = getMin(TS),
case K < K1 of
true -> {T,TS};
_ -> {T1,[T|TS1]}
end.
%% -- binomial_heap.erl content end --
%% Startup
start() ->
start([]).
start(Options) ->
eunit_listener:start(?MODULE, Options).
%%------------------------------------------
%% eunit_listener callbacks
%%------------------------------------------
init(Options) ->
#state{colored=proplists:get_bool(colored, Options),
profile=proplists:get_bool(profile, Options)}.
handle_begin(group, Data, St) ->
GID = proplists:get_value(id, Data),
Dict = St#state.status,
St#state{status=dict:store(GID, orddict:from_list([{type, group}|Data]), Dict)};
handle_begin(test, Data, St) ->
TID = proplists:get_value(id, Data),
Dict = St#state.status,
St#state{status=dict:store(TID, orddict:from_list([{type, test}|Data]), Dict)}.
handle_end(group, Data, St) ->
St#state{status=merge_on_end(Data, St#state.status)};
handle_end(test, Data, St) ->
NewStatus = merge_on_end(Data, St#state.status),
St1 = print_progress(Data, St),
St2 = record_timing(Data, St1),
St2#state{status=NewStatus}.
handle_cancel(_, Data, #state{status=Status, skips=Skips}=St) ->
Status1 = merge_on_end(Data, Status),
ID = proplists:get_value(id, Data),
St#state{status=Status1, skips=[ID|Skips]}.
terminate({ok, Data}, St) ->
print_failures(St),
print_pending(St),
print_profile(St),
print_timing(St),
print_results(Data, St);
terminate({error, Reason}, St) ->
io:nl(), io:nl(),
print_colored(io_lib:format("Eunit failed: ~25p~n", [Reason]), ?RED, St),
sync_end(error).
sync_end(Result) ->
receive
{stop, Reference, ReplyTo} ->
ReplyTo ! {result, Reference, Result},
ok
end.
%%------------------------------------------
%% Print and collect information during run
%%------------------------------------------
print_progress(Data, St) ->
TID = proplists:get_value(id, Data),
case proplists:get_value(status, Data) of
ok ->
print_progress_success(St),
St;
{skipped, _Reason} ->
print_progress_skipped(St),
St#state{skips=[TID|St#state.skips]};
{error, Exception} ->
print_progress_failed(Exception, St),
St#state{failures=[TID|St#state.failures]}
end.
record_timing(Data, State=#state{timings=T, profile=true}) ->
TID = proplists:get_value(id, Data),
case lists:keyfind(time, 1, Data) of
{time, Int} ->
%% It's a min-heap, so we insert negative numbers instead
%% of the actuals and normalize when we report on them.
T1 = insert(-Int, TID, T),
State#state{timings=T1};
false ->
State
end;
record_timing(_Data, State) ->
State.
print_progress_success(St) ->
print_colored(".", ?GREEN, St).
print_progress_skipped(St) ->
print_colored("*", ?YELLOW, St).
print_progress_failed(_Exc, St) ->
print_colored("F", ?RED, St).
merge_on_end(Data, Dict) ->
ID = proplists:get_value(id, Data),
dict:update(ID,
fun(Old) ->
orddict:merge(fun merge_data/3, Old, orddict:from_list(Data))
end, Dict).
merge_data(_K, undefined, X) -> X;
merge_data(_K, X, undefined) -> X;
merge_data(_K, _, X) -> X.
%%------------------------------------------
%% Print information at end of run
%%------------------------------------------
print_failures(#state{failures=[]}) ->
ok;
print_failures(#state{failures=Fails}=State) ->
io:nl(),
io:fwrite("Failures:~n",[]),
lists:foldr(print_failure_fun(State), 1, Fails),
ok.
print_failure_fun(#state{status=Status}=State) ->
fun(Key, Count) ->
TestData = dict:fetch(Key, Status),
TestId = format_test_identifier(TestData),
io:fwrite("~n ~p) ~ts~n", [Count, TestId]),
print_failure_reason(proplists:get_value(status, TestData),
proplists:get_value(output, TestData),
State),
io:nl(),
Count + 1
end.
print_failure_reason({skipped, Reason}, _Output, State) ->
print_colored(io_lib:format(" ~ts~n", [format_pending_reason(Reason)]),
?RED, State);
print_failure_reason({error, {_Class, Term, Stack}}, Output, State) when
is_tuple(Term), tuple_size(Term) == 2, is_list(element(2, Term)) ->
print_assertion_failure(Term, Stack, Output, State),
print_failure_output(5, Output, State);
print_failure_reason({error, {error, Error, Stack}}, Output, State) when is_list(Stack) ->
print_colored(indent(5, "Failure: ~p~n", [Error]), ?RED, State),
print_stack(Stack, State),
print_failure_output(5, Output, State);
print_failure_reason({error, Reason}, Output, State) ->
print_colored(indent(5, "Failure: ~p~n", [Reason]), ?RED, State),
print_failure_output(5, Output, State).
print_stack(Stack, State) ->
print_colored(indent(5, "Stacktrace:~n", []), ?CYAN, State),
print_stackframes(Stack, State).
print_stackframes([{eunit_test, _, _, _} | Stack], State) ->
print_stackframes(Stack, State);
print_stackframes([{eunit_proc, _, _, _} | Stack], State) ->
print_stackframes(Stack, State);
print_stackframes([{Module, Function, _Arity, _Location} | Stack], State) ->
print_colored(indent(7, "~p.~p~n", [Module, Function]), ?CYAN, State),
print_stackframes(Stack, State);
print_stackframes([], _State) ->
ok.
print_failure_output(_, <<>>, _) -> ok;
print_failure_output(_, undefined, _) -> ok;
print_failure_output(Indent, Output, State) ->
print_colored(indent(Indent, "Output: ~ts", [Output]), ?CYAN, State).
print_assertion_failure({Type, Props}, Stack, Output, State) ->
FailureDesc = format_assertion_failure(Type, Props, 5),
{M,F,A,Loc} = lists:last(Stack),
LocationText = io_lib:format(" %% ~ts:~p:in `~ts`", [proplists:get_value(file, Loc),
proplists:get_value(line, Loc),
format_function_name(M,F,A)]),
print_colored(FailureDesc, ?RED, State),
io:nl(),
print_colored(LocationText, ?CYAN, State),
io:nl(),
print_failure_output(5, Output, State),
io:nl().
print_pending(#state{skips=[]}) ->
ok;
print_pending(#state{status=Status, skips=Skips}=State) ->
io:nl(),
io:fwrite("Pending:~n", []),
lists:foreach(fun(ID) ->
Info = dict:fetch(ID, Status),
case proplists:get_value(reason, Info) of
undefined ->
ok;
Reason ->
print_pending_reason(Reason, Info, State)
end
end, lists:reverse(Skips)),
io:nl().
print_pending_reason(Reason0, Data, State) ->
Text = case proplists:get_value(type, Data) of
group ->
io_lib:format(" ~ts~n", [proplists:get_value(desc, Data)]);
test ->
io_lib:format(" ~ts~n", [format_test_identifier(Data)])
end,
Reason = io_lib:format(" %% ~ts~n", [format_pending_reason(Reason0)]),
print_colored(Text, ?YELLOW, State),
print_colored(Reason, ?CYAN, State).
print_profile(#state{timings=T, status=Status, profile=true}=State) ->
TopN = take(10, T),
TopNTime = abs(lists:sum([ Time || {Time, _} <- TopN ])),
TLG = dict:fetch([], Status),
TotalTime = proplists:get_value(time, TLG),
if TotalTime =/= undefined andalso TotalTime > 0 andalso TopN =/= [] ->
TopNPct = (TopNTime / TotalTime) * 100,
io:nl(), io:nl(),
io:fwrite("Top ~p slowest tests (~ts, ~.1f% of total time):", [length(TopN), format_time(TopNTime), TopNPct]),
lists:foreach(print_timing_fun(State), TopN),
io:nl();
true -> ok
end;
print_profile(#state{profile=false}) ->
ok.
print_timing(#state{status=Status}) ->
TLG = dict:fetch([], Status),
Time = proplists:get_value(time, TLG),
io:nl(),
io:fwrite("Finished in ~ts~n", [format_time(Time)]),
ok.
print_results(Data, State) ->
Pass = proplists:get_value(pass, Data, 0),
Fail = proplists:get_value(fail, Data, 0),
Skip = proplists:get_value(skip, Data, 0),
Cancel = proplists:get_value(cancel, Data, 0),
Total = Pass + Fail + Skip + Cancel,
{Color, Result} = if Fail > 0 -> {?RED, error};
Skip > 0; Cancel > 0 -> {?YELLOW, error};
Pass =:= 0 -> {?YELLOW, ok};
true -> {?GREEN, ok}
end,
print_results(Color, Total, Fail, Skip, Cancel, State),
sync_end(Result).
print_results(Color, 0, _, _, _, State) ->
print_colored(Color, "0 tests\n", State);
print_results(Color, Total, Fail, Skip, Cancel, State) ->
SkipText = format_optional_result(Skip, "skipped"),
CancelText = format_optional_result(Cancel, "cancelled"),
Text = io_lib:format("~p tests, ~p failures~ts~ts~n", [Total, Fail, SkipText, CancelText]),
print_colored(Text, Color, State).
print_timing_fun(#state{status=Status}=State) ->
fun({Time, Key}) ->
TestData = dict:fetch(Key, Status),
TestId = format_test_identifier(TestData),
io:nl(),
io:fwrite(" ~ts~n", [TestId]),
print_colored([" "|format_time(abs(Time))], ?CYAN, State)
end.
%%------------------------------------------
%% Print to the console with the given color
%% if enabled.
%%------------------------------------------
print_colored(Text, Color, #state{colored=true}) ->
io:fwrite("~s~ts~s", [Color, Text, ?RESET]);
print_colored(Text, _Color, #state{colored=false}) ->
io:fwrite("~ts", [Text]).
%%------------------------------------------
%% Generic data formatters
%%------------------------------------------
format_function_name(M, F, A) ->
io_lib:format("~ts:~ts/~p", [M, F, A]).
format_optional_result(0, _) ->
[];
format_optional_result(Count, Text) ->
io_lib:format(", ~p ~ts", [Count, Text]).
format_test_identifier(Data) ->
{Mod, Fun, Arity} = proplists:get_value(source, Data),
Line = case proplists:get_value(line, Data) of
0 -> "";
L -> io_lib:format(":~p", [L])
end,
Desc = case proplists:get_value(desc, Data) of
undefined -> "";
DescText -> io_lib:format(": ~ts", [DescText])
end,
io_lib:format("~ts~ts~ts", [format_function_name(Mod, Fun, Arity), Line, Desc]).
format_time(undefined) ->
"? seconds";
format_time(Time) ->
io_lib:format("~.3f seconds", [Time / 1000]).
format_pending_reason({module_not_found, M}) ->
io_lib:format("Module '~ts' missing", [M]);
format_pending_reason({no_such_function, {M,F,A}}) ->
io_lib:format("Function ~ts undefined", [format_function_name(M,F,A)]);
format_pending_reason({exit, Reason}) ->
io_lib:format("Related process exited with reason: ~p", [Reason]);
format_pending_reason(Reason) ->
io_lib:format("Unknown error: ~p", [Reason]).
%% @doc Formats all the known eunit assertions, you're on your own if
%% you make an assertion yourself.
format_assertion_failure(Type, Props, I) when Type =:= assertion_failed
; Type =:= assert ->
Keys = proplists:get_keys(Props),
HasEUnitProps = ([expression, value] -- Keys) =:= [],
HasHamcrestProps = ([expected, actual, matcher] -- Keys) =:= [],
if
HasEUnitProps ->
[indent(I, "Failure: ?assert(~ts)~n", [proplists:get_value(expression, Props)]),
indent(I, " expected: true~n", []),
case proplists:get_value(value, Props) of
false ->
indent(I, " got: false", []);
{not_a_boolean, V} ->
indent(I, " got: ~p", [V])
end];
HasHamcrestProps ->
[indent(I, "Failure: ?assertThat(~p)~n", [proplists:get_value(matcher, Props)]),
indent(I, " expected: ~p~n", [proplists:get_value(expected, Props)]),
indent(I, " got: ~p", [proplists:get_value(actual, Props)])];
true ->
[indent(I, "Failure: unknown assert: ~p", [Props])]
end;
format_assertion_failure(Type, Props, I) when Type =:= assertMatch_failed
; Type =:= assertMatch ->
Expr = proplists:get_value(expression, Props),
Pattern = proplists:get_value(pattern, Props),
Value = proplists:get_value(value, Props),
[indent(I, "Failure: ?assertMatch(~ts, ~ts)~n", [Pattern, Expr]),
indent(I, " expected: = ~ts~n", [Pattern]),
indent(I, " got: ~p", [Value])];
format_assertion_failure(Type, Props, I) when Type =:= assertNotMatch_failed
; Type =:= assertNotMatch ->
Expr = proplists:get_value(expression, Props),
Pattern = proplists:get_value(pattern, Props),
Value = proplists:get_value(value, Props),
[indent(I, "Failure: ?assertNotMatch(~ts, ~ts)~n", [Pattern, Expr]),
indent(I, " expected not: = ~ts~n", [Pattern]),
indent(I, " got: ~p", [Value])];
format_assertion_failure(Type, Props, I) when Type =:= assertEqual_failed
; Type =:= assertEqual ->
Expr = proplists:get_value(expression, Props),
Expected = proplists:get_value(expected, Props),
Value = proplists:get_value(value, Props),
[indent(I, "Failure: ?assertEqual(~w, ~ts)~n", [Expected,
Expr]),
indent(I, " expected: ~p~n", [Expected]),
indent(I, " got: ~p", [Value])];
format_assertion_failure(Type, Props, I) when Type =:= assertNotEqual_failed
; Type =:= assertNotEqual ->
Expr = proplists:get_value(expression, Props),
Value = proplists:get_value(value, Props),
[indent(I, "Failure: ?assertNotEqual(~p, ~ts)~n",
[Value, Expr]),
indent(I, " expected not: == ~p~n", [Value]),
indent(I, " got: ~p", [Value])];
format_assertion_failure(Type, Props, I) when Type =:= assertException_failed
; Type =:= assertException ->
Expr = proplists:get_value(expression, Props),
Pattern = proplists:get_value(pattern, Props),
{Class, Term} = extract_exception_pattern(Pattern), % I hate that we have to do this, why not just give DATA
[indent(I, "Failure: ?assertException(~ts, ~ts, ~ts)~n", [Class, Term, Expr]),
case proplists:is_defined(unexpected_success, Props) of
true ->
[indent(I, " expected: exception ~ts but nothing was raised~n", [Pattern]),
indent(I, " got: value ~p", [proplists:get_value(unexpected_success, Props)])];
false ->
Ex = proplists:get_value(unexpected_exception, Props),
[indent(I, " expected: exception ~ts~n", [Pattern]),
indent(I, " got: exception ~p", [Ex])]
end];
format_assertion_failure(Type, Props, I) when Type =:= assertNotException_failed
; Type =:= assertNotException ->
Expr = proplists:get_value(expression, Props),
Pattern = proplists:get_value(pattern, Props),
{Class, Term} = extract_exception_pattern(Pattern), % I hate that we have to do this, why not just give DAT
Ex = proplists:get_value(unexpected_exception, Props),
[indent(I, "Failure: ?assertNotException(~ts, ~ts, ~ts)~n", [Class, Term, Expr]),
indent(I, " expected not: exception ~ts~n", [Pattern]),
indent(I, " got: exception ~p", [Ex])];
format_assertion_failure(Type, Props, I) when Type =:= command_failed
; Type =:= command ->
Cmd = proplists:get_value(command, Props),
Expected = proplists:get_value(expected_status, Props),
Status = proplists:get_value(status, Props),
[indent(I, "Failure: ?cmdStatus(~p, ~p)~n", [Expected, Cmd]),
indent(I, " expected: status ~p~n", [Expected]),
indent(I, " got: status ~p", [Status])];
format_assertion_failure(Type, Props, I) when Type =:= assertCmd_failed
; Type =:= assertCmd ->
Cmd = proplists:get_value(command, Props),
Expected = proplists:get_value(expected_status, Props),
Status = proplists:get_value(status, Props),
[indent(I, "Failure: ?assertCmdStatus(~p, ~p)~n", [Expected, Cmd]),
indent(I, " expected: status ~p~n", [Expected]),
indent(I, " got: status ~p", [Status])];
format_assertion_failure(Type, Props, I) when Type =:= assertCmdOutput_failed
; Type =:= assertCmdOutput ->
Cmd = proplists:get_value(command, Props),
Expected = proplists:get_value(expected_output, Props),
Output = proplists:get_value(output, Props),
[indent(I, "Failure: ?assertCmdOutput(~p, ~p)~n", [Expected, Cmd]),
indent(I, " expected: ~p~n", [Expected]),
indent(I, " got: ~p", [Output])];
format_assertion_failure(Type, Props, I) ->
indent(I, "~p", [{Type, Props}]).
indent(I, Fmt, Args) ->
io_lib:format("~" ++ integer_to_list(I) ++ "s" ++ Fmt, [" "|Args]).
extract_exception_pattern(Str) ->
["{", Class, Term|_] = re:split(Str, "[, ]{1,2}", [unicode,{return,list}]),
{Class, Term}. | test/eunit_progress.erl | 0.522202 | 0.451085 | eunit_progress.erl | starcoder |
%%%-------------------------------------------------------------------
%% @doc Value Representation Age String.
%%
%% A string of characters with one of the following formats --
%% nnnD,
%% nnnW,
%% nnnM,
%% nnnY;
%%
%% where nnn shall contain the number of days for D,
%% weeks for W, months for M, or years for Y.
%% Example: "018M" would represent an age of 18 months.
%%
%% @end
%%%-------------------------------------------------------------------
-module(wolfpacs_vr_as).
-export([encode/2, decode/2]).
-include("wolfpacs_types.hrl").
-import(wolfpacs_vr_utils, [limit_binary/2]).
-spec encode(strategy(), binary()) -> binary().
encode(_Strategy, AE) ->
encode(AE).
-spec decode(strategy(), binary()) -> {ok, binary(), binary()} | {error, binary(), list(string())}.
decode(_Strategy, Data) ->
decode(Data).
%%==============================================================================
%% Private
%%==============================================================================
-spec encode(list() | binary()) -> binary().
encode(AS) when is_list(AS) ->
encode(list_to_binary(AS));
encode(AS) ->
limit_binary(AS, 4).
-spec decode(binary()) -> binary().
decode(<<>>) ->
{error, <<>>, ["empty AS"]};
decode(<<A, B, C, "D", Rest/binary>>) ->
{ok, <<A, B, C, "D">>, Rest};
decode(<<A, B, C, "W", Rest/binary>>) ->
{ok, <<A, B, C, "W">>, Rest};
decode(<<A, B, C, "M", Rest/binary>>) ->
{ok, <<A, B, C, "M">>, Rest};
decode(<<A, B, C, "Y", Rest/binary>>) ->
{ok, <<A, B, C, "Y">>, Rest};
decode(Data) ->
{error, Data, ["incorrect AS"]}.
%%==============================================================================
%% Test
%%==============================================================================
-include_lib("eunit/include/eunit.hrl").
encode_test_() ->
[ ?_assertEqual(encode("018M"), <<"018M">>)
, ?_assertEqual(encode({explicit, little}, "018M"), <<"018M">>)
].
decode_test_() ->
[ ?_assertEqual(decode(<<>>), {error, <<>>, ["empty AS"]})
, ?_assertEqual(decode(<<"ABCQ">>), {error, <<"ABCQ">>, ["incorrect AS"]})
, ?_assertEqual(decode(<<"123WABC">>), {ok, <<"123W">>, <<"ABC">>})
, ?_assertEqual(decode({explicit, little}, <<"123WABC">>), {ok, <<"123W">>, <<"ABC">>})
].
encode_decode_test_() ->
[ ?_assertEqual(decode(encode("123D")), {ok, <<"123D">>, <<>>})
, ?_assertEqual(decode(encode("123W")), {ok, <<"123W">>, <<>>})
, ?_assertEqual(decode(encode("123M")), {ok, <<"123M">>, <<>>})
, ?_assertEqual(decode(encode("123Y")), {ok, <<"123Y">>, <<>>})
, ?_assertEqual(decode(encode("123Y")), {ok, <<"123Y">>, <<>>})
]. | src/wolfpacs_vr_as.erl | 0.620047 | 0.558989 | wolfpacs_vr_as.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% taken from: https://github.com/basho/riak_core/blob/develop/src/chash.erl
%%
%% chash: basic consistent hashing
%%
%% Copyright (c) 2007-2011 Basho Technologies, Inc. All Rights Reserved.
%%
%% This file is provided to you under the Apache License,
%% Version 2.0 (the "License"); you may not use this file
%% except in compliance with the License. You may obtain
%% a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing,
%% software distributed under the License is distributed on an
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%% @doc A consistent hashing implementation. The space described by the ring
%% coincides with SHA-1 hashes, and so any two keys producing the same
%% SHA-1 hash are considered identical within the ring.
%%
%% Warning: It is not recommended that code outside this module make use
%% of the structure of a chash.
%%
%% @reference <NAME>.; <NAME>.; <NAME>.; <NAME>.; <NAME>.;
%% <NAME>. (1997). "Consistent hashing and random trees". Proceedings of the
%% twenty-ninth annual ACM symposium on Theory of computing: 654~663. ACM Press
%% New York, NY, USA
-module(chash).
-export([contains_name/2,
fresh/2,
lookup/2,
key_of/1,
members/1,
merge_rings/2,
next_index/2,
nodes/1,
predecessors/2,
predecessors/3,
ring_increment/1,
size/1,
successors/2,
successors/3,
update/3]).
-export_type([chash/0, index/0, index_as_int/0]).
-define(RINGTOP,
trunc(math:pow(2, 160) - 1)). % SHA-1 space
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-type chash() :: {num_partitions(), [node_entry()]}.
%% A Node is the unique identifier for the owner of a given partition.
%% An Erlang Pid works well here, but the chash module allows it to
%% be any term.
-type chash_node() :: term().
%% Indices into the ring, used as keys for object location, are binary
%% representations of 160-bit integers.
-type index() :: <<_:160>>.
-type index_as_int() :: integer().
-type node_entry() :: {index_as_int(), chash_node()}.
-type num_partitions() :: pos_integer().
%% ===================================================================
%% Public API
%% ===================================================================
%% @doc Return true if named Node owns any partitions in the ring, else false.
-spec contains_name(Name :: chash_node(),
CHash :: chash()) -> boolean().
contains_name(Name, CHash) ->
{_NumPartitions, NodeEntries} = CHash,
[Node || {_, Node} <- NodeEntries, Node == Name] =/= [].
%% @doc Create a brand new ring. The size and seednode are specified;
%% initially all partitions are owned by the seednode. If NumPartitions
%% is not much larger than the intended eventual number of
%% participating nodes, then performance will suffer.
-spec fresh(NumPartitions :: num_partitions(),
SeedNode :: chash_node()) -> chash().
fresh(NumPartitions, SeedNode) ->
Inc = ring_increment(NumPartitions),
{NumPartitions,
[{IndexAsInt, SeedNode}
|| IndexAsInt <- lists:seq(0, (?RINGTOP) -1 -(?RINGTOP rem NumPartitions), Inc)]}.
%% @doc Find the Node that owns the partition identified by IndexAsInt.
-spec lookup(IndexAsInt :: index_as_int(),
CHash :: chash()) -> chash_node().
lookup(IndexAsInt, CHash) ->
{_NumPartitions, NodeEntries} = CHash,
{IndexAsInt, Node} = proplists:lookup(IndexAsInt, NodeEntries),
Node.
sha(Bin) -> crypto:hash(sha, Bin).
%% @doc Given any term used to name an object, produce that object's key
%% into the ring. Two names with the same SHA-1 hash value are
%% considered the same name.
-spec key_of(ObjectName :: term()) -> index().
key_of(ObjectName) -> sha(term_to_binary(ObjectName)).
%% @doc Return all Nodes that own any partitions in the ring.
-spec members(CHash :: chash()) -> [chash_node()].
members(CHash) ->
{_NumPartitions, NodeEntries} = CHash,
lists:usort([Node || {_Idx, Node} <- NodeEntries]).
%% @doc Return a randomized merge of two rings.
%% If multiple nodes are actively claiming nodes in the same
%% time period, churn will occur. Be prepared to live with it.
-spec merge_rings(CHashA :: chash(),
CHashB :: chash()) -> chash().
merge_rings(CHashA, CHashB) ->
{NumPartitions, NodeEntriesA} = CHashA,
{NumPartitions, NodeEntriesB} = CHashB,
{NumPartitions,
[{I, random_node(NodeA, NodeB)}
|| {{I, NodeA}, {I, NodeB}} <- lists:zip(NodeEntriesA, NodeEntriesB)]}.
%% @doc Given the integer representation of a chash key,
%% return the next ring index integer value.
-spec next_index(IntegerKey :: integer(),
CHash :: chash()) -> index_as_int().
next_index(IntegerKey, {NumPartitions, _}) ->
Inc = ring_increment(NumPartitions),
(IntegerKey div Inc + 1) rem NumPartitions * Inc.
%% @doc Return the entire set of NodeEntries in the ring.
-spec nodes(CHash :: chash()) -> [node_entry()].
nodes(CHash) ->
{_NumPartitions, NodeEntries} = CHash,
NodeEntries.
%% @doc Given an object key, return all NodeEntries in order starting at Index.
-spec ordered_from(Index :: index(),
CHash :: chash()) -> [node_entry()].
ordered_from(Index, {NumPartitions, NodeEntries}) ->
<<IndexAsInt:160/integer>> = Index,
Inc = ring_increment(NumPartitions),
{NodeEntriesA, NodeEntriesB} = lists:split(IndexAsInt div Inc + 1, NodeEntries),
NodeEntriesB ++ NodeEntriesA.
%% @doc Given an object key, return all NodeEntries in reverse order
%% starting at Index.
-spec predecessors(Index :: index() | index_as_int(),
CHash :: chash()) -> [node_entry()].
predecessors(Index, CHash) ->
{NumPartitions, _NodeEntries} = CHash,
predecessors(Index, CHash, NumPartitions).
%% @doc Given an object key, return the next N NodeEntries in reverse order
%% starting at Index.
-spec predecessors(Index :: index() | index_as_int(),
CHash :: chash(), N :: integer()) -> [node_entry()].
predecessors(Index, CHash, N) when is_integer(Index) ->
predecessors(<<Index:160/integer>>, CHash, N);
predecessors(Index, CHash, N) ->
Num = max_n(N, CHash),
{Res, _} = lists:split(Num,
lists:reverse(ordered_from(Index, CHash))),
Res.
%% @doc Return increment between ring indexes given
%% the number of ring partitions.
-spec ring_increment(NumPartitions ::
pos_integer()) -> pos_integer().
ring_increment(NumPartitions) ->
(?RINGTOP) div NumPartitions.
%% @doc Return the number of partitions in the ring.
-spec size(CHash :: chash()) -> integer().
size(CHash) ->
{_NumPartitions, NodeEntries} = CHash,
length(NodeEntries).
%% @doc Given an object key, return all NodeEntries in order starting at Index.
-spec successors(Index :: index(),
CHash :: chash()) -> [node_entry()].
successors(Index, CHash) ->
{NumPartitions, _NodeEntries} = CHash,
successors(Index, CHash, NumPartitions).
%% @doc Given an object key, return the next N NodeEntries in order
%% starting at Index.
-spec successors(Index :: index(), CHash :: chash(),
N :: integer()) -> [node_entry()].
successors(Index, CHash, N) ->
Num = max_n(N, CHash),
Ordered = ordered_from(Index, CHash),
{NumPartitions, _NodeEntries} = CHash,
if Num =:= NumPartitions -> Ordered;
true ->
{Res, _} = lists:split(Num, Ordered),
Res
end.
%% @doc Make the partition beginning at IndexAsInt owned by Name'd node.
-spec update(IndexAsInt :: index_as_int(),
Name :: chash_node(), CHash :: chash()) -> chash().
update(IndexAsInt, Name, CHash) ->
{NumPartitions, NodeEntries} = CHash,
NewNodeEntries = lists:keyreplace(IndexAsInt,
1,
NodeEntries,
{IndexAsInt, Name}),
{NumPartitions, NewNodeEntries}.
%% ====================================================================
%% Internal functions
%% ====================================================================
%% @private
%% @doc Return either N or the number of partitions in the ring, whichever
%% is lesser.
-spec max_n(N :: integer(),
CHash :: chash()) -> integer().
max_n(N, {NumPartitions, _NodeEntries}) ->
erlang:min(N, NumPartitions).
%% @private
-spec random_node(NodeA :: chash_node(),
NodeB :: chash_node()) -> chash_node().
random_node(NodeA, NodeA) -> NodeA;
random_node(NodeA, NodeB) ->
lists:nth(rand:uniform(2), [NodeA, NodeB]).
%% ===================================================================
%% EUnit tests
%% ===================================================================
-ifdef(TEST).
fresh_sizes_test() ->
lists:foreach(fun(I) ->
?assertEqual(I, (length(chash:nodes(chash:fresh(I, the_node)))))
end, [1, 10000]).
update_test() ->
Node = old@host,
NewNode = new@host,
% Create a fresh ring...
CHash = chash:fresh(5, Node),
GetNthIndex = fun (N, {_, NodeEntries}) ->
{Index, _} = lists:nth(N, NodeEntries),
Index
end,
{5,
[{_, Node},
{_, Node},
{_, Node},
{_, Node},
{_, Node}]} = CHash,
% Test update...
FirstIndex = GetNthIndex(1, CHash),
ThirdIndex = GetNthIndex(3, CHash),
{5,
[{_, NewNode},
{_, Node},
{_, Node},
{_, Node},
{_, Node}]} =
update(FirstIndex, NewNode, CHash),
{5,
[{_, Node},
{_, Node},
{_, NewNode},
{_, Node},
{_, Node}]} =
update(ThirdIndex, NewNode, CHash).
contains_test() ->
CHash = chash:fresh(8, the_node),
?assertEqual(true, (contains_name(the_node, CHash))),
?assertEqual(false,
(contains_name(some_other_node, CHash))).
max_n_test() ->
CHash = chash:fresh(8, the_node),
?assertEqual(1, (max_n(1, CHash))),
?assertEqual(8, (max_n(11, CHash))).
simple_size_test() ->
?assertEqual(8,
(length(chash:nodes(chash:fresh(8, the_node))))).
successors_length_test() ->
?assertEqual(8,
(length(chash:successors(chash:key_of(0),
chash:fresh(8, the_node))))).
inverse_pred_test() ->
CHash = chash:fresh(8, the_node),
S = [I
|| {I, _} <- chash:successors(chash:key_of(4), CHash)],
P = [I
|| {I, _}
<- chash:predecessors(chash:key_of(4), CHash)],
?assertEqual(S, (lists:reverse(P))).
merge_test() ->
CHashA = chash:fresh(8, node_one),
CHashB = chash:update(0,
node_one,
chash:fresh(8, node_two)),
CHash = chash:merge_rings(CHashA, CHashB),
?assertEqual(node_one, (chash:lookup(0, CHash))).
-endif. | src/chash.erl | 0.766992 | 0.451206 | chash.erl | starcoder |
%% -------------------------------------------------------------------
%%
%% Copyright (c) 2014 Basho Technologies, Inc. All Rights Reserved.
%%
%% This Source Code Form is subject to the terms of the Mozilla Public
%% License, v. 2.0. If a copy of the MPL was not distributed with this
%% file, You can obtain one at http://mozilla.org/MPL/2.0/.
%%
%% -------------------------------------------------------------------
%% @doc Accessor function for exometer data structures
%%
%% This module uses the exprecs transform (see <a href="https://github.com/uwiger/parse_trans/tree/master/doc/exprecs.md">exprecs</a>)
%% to generate accessor functions for exometer data structures.
%%
%% Note that the `value' attribute in `exometer_entry{}' records may not
%% represent the true value of the metric, since exometer entries often
%% have structured values, or are represented as CRDTs for update efficiency.
%%
%% @end
-module(exometer_info).
-export([status/1,
pp/1,
pp_lookup/1,
pp_find/1,
pp_select/1]).
-include("exometer.hrl").
-include_lib("parse_trans/include/exprecs.hrl").
-export_type([pp/0]).
-export_records([exometer_entry]).
-type pp() :: {atom(), [{atom(), any()}]}.
-spec status(exometer:entry()) -> enabled | disabled.
%% @doc Return the operational status of the given exometer entry.
%%
%% The `status' attribute is overloaded in the `#exometer_entry{}' record.
%% This function extracts the correct status (`enabled | disabled').
%% @end
status(#exometer_entry{status = St}) ->
exometer_util:get_status(St).
-spec pp(tuple() | list()) -> pp() | [pp() | any()].
%% @doc Pretty-print a record, or list containing records.
%%
%% This function pretty-prints a record as `{RecordName, [{Attr,Value}]}',
%% or, if the input is a list, recognizes records and pretty-prints them,
%% leaving other data structures unchanged.
%% @end
pp(L) when is_list(L) ->
[pp(X) || X <- L];
pp(X) ->
case '#is_record-'(X) of
true ->
RecName = element(1,X),
{RecName, lists:zip(
'#info-'(RecName,fields),
pp(tl(tuple_to_list(X))))};
false ->
if is_tuple(X) ->
list_to_tuple(pp(tuple_to_list(X)));
true ->
X
end
end.
-spec pp_lookup(exometer:name()) -> pp() | undefined.
%% @doc Performs a lookup by name of entry and pretty-prints the result.
%%
%% This function returns `undefined' if the entry cannot be found.
%% @end
pp_lookup(Name) ->
case exometer:info(Name, entry) of
undefined ->
undefined;
Entry ->
pp(Entry)
end.
-spec pp_find(list()) -> [pp()].
%% @doc Performs `exometer:find_entries(Path) & returns pretty-printed result.
%%
%% This function calls `exometer:find_entries(Path)', retrieves the entry
%% for each matching metric, and calls `pp(Entry)' for each entry.
%% @end
pp_find(Path) ->
pp([exometer:info(M, entry) || {M,_,_} <- exometer:find_entries(Path)]).
-spec pp_select(ets:match_spec()) -> [pp()].
%% @doc Performs `exometer:select(Pattern) & returns pretty-printed result.
%%
%% This function calls `exometer:select(Pattern)', retrieves the entry
%% for each matching metric, and calls `pp(Entry)' for each entry.
%%
%% Note that the match body of the select pattern must produce the full
%% `{Name, Type, Status}' object, e.g. by specifying <code>['$_']</code>.
%% @end
pp_select(Pat) ->
pp([exometer:info(M, entry) || {M,_,_} <- exometer:select(Pat)]). | _build/default/lib/exometer_core/src/exometer_info.erl | 0.706596 | 0.41253 | exometer_info.erl | starcoder |
-module(bingo_checker).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-include("bingo_game.hrl").
-export([is_bingo/1]).
%%-------------------------------------------------------------------------
%% Function is_bingo(Squares) -> boolean().
%%
%% Squares = [[BingoSquare]]
%% A square matrix where each entry is a bingo square.
%% BingoSquare = bingo_square()
%% A bingo square record.
%%
%% Description: Returns `true` if all the squares on any rows, any columns,
%% the main (left) diagonal, and the right diagonal are
%% marked by the same player. Returns `false`, otherwise.
%%-------------------------------------------------------------------------
-spec is_bingo(bingo_squares()) -> boolean().
is_bingo(Squares) ->
Sequences = bingo_sequences(Squares),
lists:any(fun all_squares_marked_by_same_player/1, Sequences).
%%
%% HELPERS.
%%
%% Takes as input a square matrix and returns a lists of all rows,
%% all columns, plus the left and right diagonals.
-spec bingo_sequences([[T]]) -> [[T]].
bingo_sequences(Matrix) ->
LDiag = left_diagonal(Matrix),
RDiag = right_diagonal(Matrix),
Matrix ++ transpose(Matrix) ++ [LDiag, RDiag].
%% Returns `true` if all bingo squares in the specified squares list
%% are marked by the exact same player. Returns `false`, otherwise.
-spec all_squares_marked_by_same_player([bingo_square()]) -> boolean().
all_squares_marked_by_same_player(
[#bingo_square{marked_by = undefined} | _]
) ->
false;
all_squares_marked_by_same_player(
[#bingo_square{marked_by = Someone} | Rest]
) ->
Pred = fun(S) -> S#bingo_square.marked_by =:= Someone end,
lists:all(Pred, Rest).
%% Transpose a 2D matrix.
-spec transpose([[T]]) -> [[T]].
transpose(Matrix) ->
transpose(Matrix, false).
-spec transpose([[T]], boolean()) -> [[T]].
transpose([List], false = _first_zipped) ->
lists:map(fun(X) -> [X] end, List);
transpose([List], true = _first_zipped) ->
List;
transpose([List1, List2], false = _first_zipped) ->
CombineFun = fun(X, Y) -> [X, Y] end,
lists:zipwith(CombineFun, List1, List2);
transpose([List1, List2], true = _first_zipped) ->
CombineFun = fun(X, Y) -> X ++ [Y] end,
lists:zipwith(CombineFun, List1, List2);
transpose([List1, List2, List3], false = _first_zipped) ->
CombineFun = fun(X, Y, Z) -> [X, Y, Z] end,
lists:zipwith3(CombineFun, List1, List2, List3);
transpose([List1, List2, List3], true = _first_zipped) ->
CombineFun = fun(X, Y, Z) -> X ++ [Y, Z] end,
lists:zipwith3(CombineFun, List1, List2, List3);
transpose([List1, List2, List3 | Rest], false = _first_zipped) ->
CombineFun = fun(X, Y, Z) -> [X, Y, Z] end,
Head = lists:zipwith3(CombineFun, List1, List2, List3),
transpose([Head | Rest], true);
transpose([List1, List2, List3 | Rest], true = _first_zipped) ->
CombineFun = fun(X, Y, Z) -> X ++ [Y, Z] end,
Head = lists:zipwith3(CombineFun, List1, List2, List3),
transpose([Head | Rest], true).
%% Returns the left (main) diagonal of the given square matrix.
-spec left_diagonal([[T]]) -> [T].
left_diagonal(Matrix) ->
Size = length(Matrix),
List = lists:flatten(Matrix),
take_every(Size + 1, List).
%% Returns the right diagonal of the given square matrix.
%% The right diagonal starting from the top right corner and go all
%% the way to the bottom left corner of the matrix.
-spec right_diagonal([[T]]) -> [T].
right_diagonal(Matrix) ->
Rotate = rotate_90_degrees(Matrix),
left_diagonal(Rotate).
%% Returns a list of every `N` element in the given `List`, starting
%% with the first element.
-spec take_every(integer(), [T]) -> [T].
take_every(0, List) when is_list(List) ->
[];
take_every(1, List) when is_list(List) ->
List;
take_every(N, List) when is_list(List), is_integer(N), N >= 2 ->
take_every(N, List, 1, []).
%% Helper function to used in `take_every/2`.
-spec take_every(integer(), [T], integer(), [T]) -> [T].
take_every(_N, [], _Run, Ret) ->
lists:reverse(Ret);
take_every(N, [Next | Rest], 1, Ret) ->
take_every(N, Rest, 2, [Next | Ret]);
take_every(N, [_|Rest], N, Ret) ->
take_every(N, Rest, 1, Ret);
take_every(N, [_|Rest], Run, Ret) ->
take_every(N, Rest, Run + 1, Ret).
%% Rotates the given square matrix 90 degrees counter-clockwise.
-spec rotate_90_degrees([[T]]) -> [[T]].
rotate_90_degrees(Matrix) ->
Transpose = transpose(Matrix),
lists:reverse(Transpose).
%%
%% TESTS.
%%
-ifdef(TEST).
-define(
PLAYER(Name, Color),
#bingo_player{name = Name, color = Color}
).
-define(
SQUARE(Phrase, Points),
#bingo_square{phrase = Phrase, points = Points}
).
-define(
SQUARE(Phrase, Points, Player),
#bingo_square{phrase = Phrase, points = Points, marked_by = Player}
).
%% is_bingo tests.
is_bingo_true_test_() ->
P = ?PLAYER("Some Player", green),
T = [
[[?SQUARE("s1",1,P), ?SQUARE("s2",2,P), ?SQUARE("s3",3,P)],
[?SQUARE("s4",4), ?SQUARE("s5",5), ?SQUARE("s6",6)],
[?SQUARE("s7",7), ?SQUARE("s8",8), ?SQUARE("s9",9)]],
[[?SQUARE("s1",1), ?SQUARE("s2",2), ?SQUARE("s3",3)],
[?SQUARE("s4",4,P), ?SQUARE("s5",5,P), ?SQUARE("s6",6,P)],
[?SQUARE("s7",7), ?SQUARE("s8",8), ?SQUARE("s9",9)]],
[[?SQUARE("s1",1), ?SQUARE("s2",2), ?SQUARE("s3",3)],
[?SQUARE("s4",4), ?SQUARE("s5",5), ?SQUARE("s6",6)],
[?SQUARE("s7",7,P), ?SQUARE("s8",8,P), ?SQUARE("s9",9,P)]],
[[?SQUARE("s1",1,P), ?SQUARE("s2",2), ?SQUARE("s3",3)],
[?SQUARE("s4",4,P), ?SQUARE("s5",5), ?SQUARE("s6",6)],
[?SQUARE("s7",7,P), ?SQUARE("s8",8), ?SQUARE("s9",9)]],
[[?SQUARE("s1",1), ?SQUARE("s2",2,P), ?SQUARE("s3",3)],
[?SQUARE("s4",4), ?SQUARE("s5",5,P), ?SQUARE("s6",6)],
[?SQUARE("s7",7), ?SQUARE("s8",8,P), ?SQUARE("s9",9)]],
[[?SQUARE("s1",1), ?SQUARE("s2",2), ?SQUARE("s3",3,P)],
[?SQUARE("s4",4), ?SQUARE("s5",5), ?SQUARE("s6",6,P)],
[?SQUARE("s7",7), ?SQUARE("s8",8), ?SQUARE("s9",9,P)]],
[[?SQUARE("s1",1,P), ?SQUARE("s2",2), ?SQUARE("s3",3)],
[?SQUARE("s4",4), ?SQUARE("s5",5,P), ?SQUARE("s6",6)],
[?SQUARE("s7",7), ?SQUARE("s8",8), ?SQUARE("s9",9,P)]],
[[?SQUARE("s1",1), ?SQUARE("s2",2), ?SQUARE("s3",3,P)],
[?SQUARE("s4",4), ?SQUARE("s5",5,P), ?SQUARE("s6",6)],
[?SQUARE("s7",7,P), ?SQUARE("s8",8), ?SQUARE("s9",9)]]
],
[?_assert(is_bingo(In)) || In <- T].
is_bingo_false_test_() ->
P1 = ?PLAYER("Player 1", "red"),
P2 = ?PLAYER("Player 2", "blue"),
T = [
[[?SQUARE("s1",1), ?SQUARE("s2",2), ?SQUARE("s3",3)],
[?SQUARE("s4",4), ?SQUARE("s5",5), ?SQUARE("s6",6)],
[?SQUARE("s7",7), ?SQUARE("s8",8), ?SQUARE("s9",9)]],
[[?SQUARE("s1",1,P1), ?SQUARE("s2",2,P1), ?SQUARE("s3",3,P2)],
[?SQUARE("s4",4), ?SQUARE("s5",5), ?SQUARE("s6",6)],
[?SQUARE("s7",7), ?SQUARE("s8",8), ?SQUARE("s9",9)]],
[[?SQUARE("s1",1,P1), ?SQUARE("s2",2), ?SQUARE("s3",3,P1)],
[?SQUARE("s4",4), ?SQUARE("s5",5,P2), ?SQUARE("s6",6)],
[?SQUARE("s7",7), ?SQUARE("s8",8), ?SQUARE("s9",9)]]
],
[?_assertNot(is_bingo(In)) || In <- T].
%% bingo_sequences tests.
bingo_sequences_test_() ->
Tests = [
{[[1]], [[1], [1], [1], [1]]},
{
[[1, 2],
[3, 4]],
[[1, 2],
[3, 4],
[1, 3],
[2, 4],
[1, 4],
[2, 3]]
},
{
[[1, 2, 3],
[4, 5, 6],
[7, 8, 9]],
[[1, 2, 3],
[4, 5, 6],
[7, 8, 9],
[1, 4, 7],
[2, 5, 8],
[3, 6, 9],
[1, 5, 9],
[3, 5, 7]]
}
],
[?_assertEqual(Out, bingo_sequences(In)) || {In, Out} <- Tests].
%% left_diagonal tests.
left_diagonal_test_() ->
Tests = [
{[[1]], [1]},
{
[[1, 2],
[3, 4]],
[1, 4]
},
{
[[1, 2, 3],
[4, 5, 6],
[7, 8, 9]],
[1, 5, 9]
},
{
[[a11, a12, a13, a14],
[a21, a22, a23, a24],
[a31, a32, a33, a34],
[a41, a42, a43, a44]],
[a11, a22, a33, a44]
}
],
[?_assertEqual(Out, left_diagonal(In)) || {In, Out} <- Tests].
%% right_diagonal tests.
right_diagonal_test_() ->
Tests = [
{[[1]], [1]},
{
[[1, 2],
[3, 4]],
[2, 3]
},
{
[[1, 2, 3],
[4, 5, 6],
[7, 8, 9]],
[3, 5, 7]
},
{
[[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12],
[13, 14, 15, 16]],
[4, 7, 10, 13]
}
],
[?_assertEqual(Out, right_diagonal(In)) || {In, Out} <- Tests].
%% rotate_90_degrees tests.
rotate_90_degrees_test_() ->
Tests = [
{[[1]], [[1]]},
{
[[1, 2],
[3, 4]],
[[2, 4],
[1, 3]]
},
{
[[1, 2, 3],
[4, 5, 6],
[7, 8, 9]],
[[3, 6, 9],
[2, 5, 8],
[1, 4, 7]]
},
{
[[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12],
[13, 14, 15, 16]],
[[4, 8, 12, 16],
[3, 7, 11, 15],
[2, 6, 10, 14],
[1, 5, 9, 13]]
}
],
[?_assertEqual(Out, rotate_90_degrees(In)) || {In, Out} <- Tests].
%% take_every tests.
take_every_test_() ->
Tests = [
{0, [], []},
{0, [1], []},
{0, [1, 2], []},
{0, [1, 2, 3], []},
{0, lists:seq(1, 10), []},
{1, [], []},
{1, [1], [1]},
{1, [1, 2], [1, 2]},
{1, [1, 2, 3], [1, 2, 3]},
{1, lists:seq(1, 10), lists:seq(1, 10)},
{2, [], []},
{2, [1], [1]},
{2, [1, 2], [1]},
{2, [1, 2, 3], [1, 3]},
{2, [1, 2, 3, 4], [1, 3]},
{2, [1, 2, 3, 4, 5], [1, 3, 5]},
{3, [], []},
{3, [1], [1]},
{3, [1, 2], [1]},
{3, [1, 2, 3], [1]},
{3, [1, 2, 3, 4], [1, 4]},
{3, [1, 2, 3, 4, 5], [1, 4]},
{3, [1, 2, 3, 4, 5, 6], [1, 4]},
{3, [1, 2, 3, 4, 5, 6, 7], [1, 4, 7]},
{2, lists:seq(1, 10), [1, 3, 5, 7, 9]},
{3, lists:seq(1, 10), [1, 4, 7, 10]},
{4, lists:seq(1, 10), [1, 5, 9]},
{5, lists:seq(1, 10), [1, 6]},
{6, lists:seq(1, 10), [1, 7]},
{7, lists:seq(1, 10), [1, 8]},
{8, lists:seq(1, 10), [1, 9]},
{9, lists:seq(1, 10), [1, 10]},
{10, lists:seq(1, 10), [1]},
{11, lists:seq(1, 10), [1]},
{12, lists:seq(1, 10), [1]},
{15, lists:seq(1, 10), [1]},
{2000, lists:seq(1, 10), [1]}
],
[?_assertEqual(Out, take_every(N, In)) || {N, In, Out} <- Tests].
%% all_squares_marked_by_same_player tests.
all_squares_marked_by_same_player_true_test_() ->
P = ?PLAYER("Some Player", green),
Tests = [
[?SQUARE("one", 1, P)],
[?SQUARE("one", 1, P), ?SQUARE("two", 2, P)],
[
?SQUARE("one", 1, P),
?SQUARE("two", 2, P),
?SQUARE("three", 3, P)
],
[
?SQUARE("one", 1, P),
?SQUARE("two", 2, P),
?SQUARE("three", 3, P),
?SQUARE("four", 4, P)
]
],
[?_assert(all_squares_marked_by_same_player(L)) || L <- Tests].
all_squares_marked_by_same_player_false_test_() ->
P1 = ?PLAYER("Player 1", green),
P2 = ?PLAYER("Player 2", red),
Tests = [
[?SQUARE("one", 1)],
[?SQUARE("one", 1), ?SQUARE("two", 2)],
[?SQUARE("one", 1, P1), ?SQUARE("two", 2)],
[?SQUARE("one", 1, P1), ?SQUARE("two", 2, P2)],
[
?SQUARE("one", 1),
?SQUARE("two", 2),
?SQUARE("three", 3)
],
[
?SQUARE("one", 1, P1),
?SQUARE("two", 2),
?SQUARE("three", 3)
],
[
?SQUARE("one", 1, P1),
?SQUARE("two", 2, P1),
?SQUARE("three", 3)
],
[
?SQUARE("one", 1, P1),
?SQUARE("two", 2, P1),
?SQUARE("three", 3, P2)
]
],
[?_assertNot(all_squares_marked_by_same_player(L)) || L <- Tests].
%% transpose tests.
transpose_1xn_matrix_test_() ->
Tests = [
{[[10]], [[10]]},
{[[-7, 9]], [[-7], [9]]},
{[[0, 4, 100]], [[0], [4], [100]]},
{[[a, b, c, d]], [[a], [b], [c], [d]]},
{[[0, a, 10, b, 11]], [[0], [a], [10], [b], [11]]},
{[["M11"]], [["M11"]]},
{[["M11", "M12"]], [["M11"], ["M12"]]},
{[["M11", "M12", "M13"]], [["M11"], ["M12"], ["M13"]]},
{
[["M11", "M12", "M13", "M14"]],
[["M11"], ["M12"], ["M13"], ["M14"]]
},
{
[["M11", "M12", "M13", "M14", "M15"]],
[["M11"], ["M12"], ["M13"], ["M14"], ["M15"]]
},
{
[["M11", "M12", "M13", "M14", "M15", "M16"]],
[["M11"], ["M12"], ["M13"], ["M14"], ["M15"], ["M16"]]
},
{
[["11", "12", "13", "14", "15", "16", "17"]],
[["11"], ["12"], ["13"], ["14"], ["15"], ["16"], ["17"]]
}
],
T1 = [?_assertEqual(Out, transpose(In)) || {In, Out} <- Tests],
T2 = [?_assertEqual(In, transpose(Out)) || {In, Out} <- Tests],
T1 ++ T2.
transpose_2xn_matrix_test_() ->
Tests = [
{[[1], [2]], [[1, 2]]},
{[[1, 2], [3, 4]], [[1, 3], [2, 4]]},
{[[1, 2, 3], [4, 5, 6]], [[1, 4], [2, 5], [3, 6]]},
{
[[a, b, c, d], [1, 2, 3, 4]],
[[a, 1], [b, 2], [c, 3], [d, 4]]
}
],
T1 = [?_assertEqual(Out, transpose(In)) || {In, Out} <- Tests],
T2 = [?_assertEqual(In, transpose(Out)) || {In, Out} <- Tests],
T1 ++ T2.
transpose_3xn_matrix_test_() ->
Tests = [
{[[1], [2], [3]], [[1, 2, 3]]},
{[[1, 2], [3, 4], [5, 6]], [[1, 3, 5], [2, 4, 6]]},
{
[[1, 2, 3], [4, 5, 6], [7, 8, 9]],
[[1, 4, 7], [2, 5, 8], [3, 6, 9]]
},
{
[[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12]],
[[1, 5, 9], [2, 6, 10], [3, 7, 11], [4, 8, 12]]
}
],
T1 = [?_assertEqual(Out, transpose(In)) || {In, Out} <- Tests],
T2 = [?_assertEqual(In, transpose(Out)) || {In, Out} <- Tests],
T1 ++ T2.
transpose_4xn_matrix_test_() ->
Tests = [
{
[[1], [2], [3], [4]],
[[1, 2, 3, 4]]
},
{
[[1, 2], [3, 4], [5, 6], [7, 8]],
[[1, 3, 5, 7], [2, 4, 6, 8]]
},
{
[[1, 2, 3], [4, 5, 6], [7, 8, 9], [10, 11, 12]],
[[1, 4, 7, 10], [2, 5, 8, 11], [3, 6, 9, 12]]
},
{
[[1, 2, 3, 4],
[5, 6, 7, 8],
[9, 10, 11, 12],
[13, 14, 15, 16]],
[[1, 5, 9, 13],
[2, 6, 10, 14],
[3, 7, 11, 15],
[4, 8, 12, 16]]
},
{
[[a11, a12, a13, a14, a15],
[a21, a22, a23, a24, a25],
[a31, a32, a33, a34, a35],
[a41, a42, a43, a44, a45]],
[[a11, a21, a31, a41],
[a12, a22, a32, a42],
[a13, a23, a33, a43],
[a14, a24, a34, a44],
[a15, a25, a35, a45]]
}
],
T1 = [?_assertEqual(Out, transpose(In)) || {In, Out} <- Tests],
T2 = [?_assertEqual(In, transpose(Out)) || {In, Out} <- Tests],
T1 ++ T2.
transpose_3x3_bingo_squares_test_() ->
In = [
[?SQUARE("one", 1), ?SQUARE("two", 2), ?SQUARE("three", 3)],
[?SQUARE("four", 4), ?SQUARE("five", 5), ?SQUARE("six", 6)],
[?SQUARE("seven", 7), ?SQUARE("eight", 8), ?SQUARE("nine", 9)]
],
Out = [
[?SQUARE("one", 1), ?SQUARE("four", 4), ?SQUARE("seven", 7)],
[?SQUARE("two", 2), ?SQUARE("five", 5), ?SQUARE("eight", 8)],
[?SQUARE("three", 3), ?SQUARE("six", 6), ?SQUARE("nine", 9)]
],
[
?_assertEqual(Out, transpose(In)),
?_assertEqual(In, transpose(Out))
].
-endif. | src/bingo_checker.erl | 0.584745 | 0.614351 | bingo_checker.erl | starcoder |
%% =====================================================================
%% Licensed under the Apache License, Version 2.0 (the "License"); you may
%% not use this file except in compliance with the License. You may obtain
%% a copy of the License at <http://www.apache.org/licenses/LICENSE-2.0>
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% Alternatively, you may use this file under the terms of the GNU Lesser
%% General Public License (the "LGPL") as published by the Free Software
%% Foundation; either version 2.1, or (at your option) any later version.
%% If you wish to allow use of your version of this file only under the
%% terms of the LGPL, you should delete the provisions above and replace
%% them with the notice and other provisions required by the LGPL; see
%% <http://www.gnu.org/licenses/>. If you do not delete the provisions
%% above, a recipient may use your version of this file under the terms of
%% either the Apache License or the LGPL.
%%
%% @copyright 2001-2006 <NAME>
%% @author <NAME> <<EMAIL>>
%% @end
%% =====================================================================
%% @doc `epp_dodger' - bypasses the Erlang preprocessor.
%%
%% <p>This module tokenises and parses most Erlang source code without
%% expanding preprocessor directives and macro applications, as long as
%% these are syntactically "well-behaved". Because the normal parse
%% trees of the `erl_parse' module cannot represent these things
%% (normally, they are expanded by the Erlang preprocessor {@link
%% //stdlib/epp} before the parser sees them), an extended syntax tree
%% is created, using the {@link erl_syntax} module.</p>
%% NOTES:
%%
%% * It's OK if the result does not parse - then at least nothing
%% strange happens, and the user can resort to full preprocessing.
%% However, we must avoid generating a token stream that is accepted by
%% the parser, but has a different meaning than the intended. A typical
%% example is when someone uses token-level string concatenation with
%% macros, as in `"foo" ?bar' (where `?bar' expands to a string). If we
%% replace the tokens `? bar' with `( ... )', to preserve precedence,
%% the result will be parsed as an application `"foo" ( ... )' and cause
%% trouble later on. We must detect such cases and report an error.
%%
%% * It is pointless to add a mechanism for tracking which macros are
%% known to take arguments, and which are known to take no arguments,
%% since a lot of the time we will not have seen the macro definition
%% anyway (it's usually in a header file). Hence, we try to use
%% heuristics instead. In most cases, the token sequence `? foo ('
%% indicates that it is a call of a macro that is supposed to take
%% arguments, but e.g., in the context `: ? foo (', the argument list
%% typically belongs to a remote function call, as in `m:?f(...)' and
%% should be parsed as `m:(?f)(...)' unless it is actually a try-clause
%% pattern such as `throw:?f(...) ->'.
%%
%% * We do our best to make macros without arguments pass the parsing
%% stage transparently. Atoms are accepted in most contexts, but
%% variables are not, so we use only atoms to encode these macros.
%% Sadly, the parsing sometimes discards even the line number info from
%% atom tokens, so we can only use the actual characters for this.
%%
%% * We recognize `?m(...' at the start of a form and prevent this from
%% being interpreted as a macro with arguments, since it is probably a
%% function definition. Likewise with attributes `-?m(...'.
-module(epp_dodger).
-export([parse_file/1, quick_parse_file/1, parse_file/2,
quick_parse_file/2, parse/1, quick_parse/1, parse/2,
quick_parse/2, parse/3, quick_parse/3, parse_form/2,
parse_form/3, quick_parse_form/2, quick_parse_form/3,
format_error/1, tokens_to_string/1]).
%% The following should be: 1) pseudo-uniquely identifiable, and 2)
%% cause nice looking error messages when the parser has to give up.
-define(macro_call, '? <macro> (').
-define(atom_prefix, "? ").
-define(var_prefix, "?,").
-define(pp_form, '?preprocessor declaration?').
%% @type errorinfo() = {ErrorLine::integer(),
%% Module::atom(),
%% Descriptor::term()}.
%%
%% This is a so-called Erlang I/O ErrorInfo structure; see the {@link
%% //stdlib/io} module for details.
-type errorinfo() :: {integer(), atom(), term()}.
-type option() :: atom() | {atom(), term()}.
%% =====================================================================
%% @spec parse_file(File) -> {ok, Forms} | {error, errorinfo()}
%% File = file:filename()
%% Forms = [erl_syntax:syntaxTree()]
%%
%% @equiv parse_file(File, [])
-spec parse_file(file:filename()) ->
{'ok', erl_syntax:forms()} | {'error', errorinfo()}.
parse_file(File) ->
parse_file(File, []).
%% @spec parse_file(File, Options) -> {ok, Forms} | {error, errorinfo()}
%% File = file:filename()
%% Options = [term()]
%% Forms = [erl_syntax:syntaxTree()]
%%
%% @doc Reads and parses a file. If successful, `{ok, Forms}'
%% is returned, where `Forms' is a list of abstract syntax
%% trees representing the "program forms" of the file (cf.
%% `erl_syntax:is_form/1'). Otherwise, `{error, errorinfo()}' is
%% returned, typically if the file could not be opened. Note that
%% parse errors show up as error markers in the returned list of
%% forms; they do not cause this function to fail or return
%% `{error, errorinfo()}'.
%%
%% Options:
%% <dl>
%% <dt>{@type {no_fail, boolean()@}}</dt>
%% <dd>If `true', this makes `epp_dodger' replace any program forms
%% that could not be parsed with nodes of type `text' (see {@link
%% erl_syntax:text/1}), representing the raw token sequence of the
%% form, instead of reporting a parse error. The default value is
%% `false'.</dd>
%% <dt>{@type {clever, boolean()@}}</dt>
%% <dd>If set to `true', this makes `epp_dodger' try to repair the
%% source code as it seems fit, in certain cases where parsing would
%% otherwise fail. Currently, it inserts `++'-operators between string
%% literals and macros where it looks like concatenation was intended.
%% The default value is `false'.</dd>
%% </dl>
%%
%% @see parse/2
%% @see quick_parse_file/1
%% @see erl_syntax:is_form/1
-spec parse_file(file:filename(), [option()]) ->
{'ok', erl_syntax:forms()} | {'error', errorinfo()}.
parse_file(File, Options) ->
parse_file(File, fun parse/3, Options).
%% @spec quick_parse_file(File) -> {ok, Forms} | {error, errorinfo()}
%% File = file:filename()
%% Forms = [erl_syntax:syntaxTree()]
%%
%% @equiv quick_parse_file(File, [])
-spec quick_parse_file(file:filename()) ->
{'ok', erl_syntax:forms()} | {'error', errorinfo()}.
quick_parse_file(File) ->
quick_parse_file(File, []).
%% @spec quick_parse_file(File, Options) ->
%% {ok, Forms} | {error, errorinfo()}
%% File = file:filename()
%% Options = [term()]
%% Forms = [erl_syntax:syntaxTree()]
%%
%% @doc Similar to {@link parse_file/2}, but does a more quick-and-dirty
%% processing of the code. Macro definitions and other preprocessor
%% directives are discarded, and all macro calls are replaced with
%% atoms. This is useful when only the main structure of the code is of
%% interest, and not the details. Furthermore, the quick-parse method
%% can usually handle more strange cases than the normal, more exact
%% parsing.
%%
%% Options: see {@link parse_file/2}. Note however that for
%% `quick_parse_file/2', the option `no_fail' is `true' by default.
%%
%% @see quick_parse/2
%% @see parse_file/2
-spec quick_parse_file(file:filename(), [option()]) ->
{'ok', erl_syntax:forms()} | {'error', errorinfo()}.
quick_parse_file(File, Options) ->
parse_file(File, fun quick_parse/3, Options ++ [no_fail]).
parse_file(File, Parser, Options) ->
case do_parse_file(utf8, File, Parser, Options) of
{ok, Forms}=Ret ->
case find_invalid_unicode(Forms) of
none ->
Ret;
invalid_unicode ->
case epp:read_encoding(File) of
utf8 ->
Ret;
_ ->
do_parse_file(latin1, File, Parser, Options)
end
end;
Else ->
Else
end.
do_parse_file(DefEncoding, File, Parser, Options) ->
case file:open(File, [read]) of
{ok, Dev} ->
_ = epp:set_encoding(Dev, DefEncoding),
try Parser(Dev, 1, Options)
after ok = file:close(Dev)
end;
{error, Error} ->
{error, {0, file, Error}} % defer to file:format_error/1
end.
find_invalid_unicode([H|T]) ->
case H of
{error, {_Line, file_io_server, invalid_unicode}} ->
invalid_unicode;
_Other ->
find_invalid_unicode(T)
end;
find_invalid_unicode([]) -> none.
%% =====================================================================
%% @spec parse(IODevice) -> {ok, Forms} | {error, errorinfo()}
%% @equiv parse(IODevice, 1)
-spec parse(file:io_device()) -> {'ok', erl_syntax:forms()}.
parse(Dev) ->
parse(Dev, 1).
%% @spec parse(IODevice, StartLine) -> {ok, Forms} | {error, errorinfo()}
%% IODevice = pid()
%% StartLine = integer()
%% Forms = [erl_syntax:syntaxTree()]
%%
%% @equiv parse(IODevice, StartLine, [])
%% @see parse/1
-spec parse(file:io_device(), integer()) -> {'ok', erl_syntax:forms()}.
parse(Dev, L) ->
parse(Dev, L, []).
%% @spec parse(IODevice, StartLine, Options) ->
%% {ok, Forms} | {error, errorinfo()}
%% IODevice = pid()
%% StartLine = integer()
%% Options = [term()]
%% Forms = [erl_syntax:syntaxTree()]
%%
%% @doc Reads and parses program text from an I/O stream. Characters are
%% read from `IODevice' until end-of-file; apart from this, the
%% behaviour is the same as for {@link parse_file/2}. `StartLine' is the
%% initial line number, which should be a positive integer.
%%
%% @see parse/2
%% @see parse_file/2
%% @see parse_form/2
%% @see quick_parse/3
-spec parse(file:io_device(), integer(), [option()]) ->
{'ok', erl_syntax:forms()}.
parse(Dev, L0, Options) ->
parse(Dev, L0, fun parse_form/3, Options).
%% @spec quick_parse(IODevice) -> {ok, Forms} | {error, errorinfo()}
%% @equiv quick_parse(IODevice, 1)
-spec quick_parse(file:io_device()) ->
{'ok', erl_syntax:forms()}.
quick_parse(Dev) ->
quick_parse(Dev, 1).
%% @spec quick_parse(IODevice, StartLine) ->
%% {ok, Forms} | {error, errorinfo()}
%% IODevice = pid()
%% StartLine = integer()
%% Forms = [erl_syntax:syntaxTree()]
%%
%% @equiv quick_parse(IODevice, StartLine, [])
%% @see quick_parse/1
-spec quick_parse(file:io_device(), integer()) ->
{'ok', erl_syntax:forms()}.
quick_parse(Dev, L) ->
quick_parse(Dev, L, []).
%% @spec (IODevice, StartLine, Options) ->
%% {ok, Forms} | {error, errorinfo()}
%% IODevice = pid()
%% StartLine = integer()
%% Options = [term()]
%% Forms = [erl_syntax:syntaxTree()]
%%
%% @doc Similar to {@link parse/3}, but does a more quick-and-dirty
%% processing of the code. See {@link quick_parse_file/2} for details.
%%
%% @see quick_parse/2
%% @see quick_parse_file/2
%% @see quick_parse_form/2
%% @see parse/3
-spec quick_parse(file:io_device(), integer(), [option()]) ->
{'ok', erl_syntax:forms()}.
quick_parse(Dev, L0, Options) ->
parse(Dev, L0, fun quick_parse_form/3, Options).
parse(Dev, L0, Parser, Options) ->
parse(Dev, L0, [], Parser, Options).
parse(Dev, L0, Fs, Parser, Options) ->
case Parser(Dev, L0, Options) of
{ok, none, L1} ->
parse(Dev, L1, Fs, Parser, Options);
{ok, F, L1} ->
parse(Dev, L1, [F | Fs], Parser, Options);
{error, IoErr, L1} ->
parse(Dev, L1, [{error, IoErr} | Fs], Parser, Options);
{eof, _L1} ->
{ok, lists:reverse(Fs)}
end.
%% =====================================================================
%% @spec parse_form(IODevice, StartLine) -> {ok, Form, LineNo}
%% | {eof, LineNo}
%% | {error, errorinfo(), LineNo}
%% IODevice = pid()
%% StartLine = integer()
%% Form = erl_syntax:syntaxTree()
%% LineNo = integer()
%%
%% @equiv parse_form(IODevice, StartLine, [])
%%
%% @see quick_parse_form/2
-spec parse_form(file:io_device(), integer()) ->
{'ok', erl_syntax:forms(), integer()}
| {'eof', integer()} | {'error', errorinfo(), integer()}.
parse_form(Dev, L0) ->
parse_form(Dev, L0, []).
%% @spec parse_form(IODevice, StartLine, Options) ->
%% {ok, Form, LineNo}
%% | {eof, LineNo}
%% | {error, errorinfo(), LineNo}
%%
%% IODevice = pid()
%% StartLine = integer()
%% Options = [term()]
%% Form = erl_syntax:syntaxTree()
%% LineNo = integer()
%%
%% @doc Reads and parses a single program form from an I/O stream.
%% Characters are read from `IODevice' until an end-of-form
%% marker is found (a period character followed by whitespace), or until
%% end-of-file; apart from this, the behaviour is similar to that of
%% `parse/3', except that the return values also contain the
%% final line number given that `StartLine' is the initial
%% line number, and that `{eof, LineNo}' may be returned.
%%
%% @see parse/3
%% @see parse_form/2
%% @see quick_parse_form/3
-spec parse_form(file:io_device(), integer(), [option()]) ->
{'ok', erl_syntax:forms(), integer()}
| {'eof', integer()} | {'error', errorinfo(), integer()}.
parse_form(Dev, L0, Options) ->
parse_form(Dev, L0, fun normal_parser/2, Options).
%% @spec quick_parse_form(IODevice, StartLine) ->
%% {ok, Form, LineNo}
%% | {eof, LineNo}
%% | {error, errorinfo(), LineNo}
%% IODevice = pid()
%% StartLine = integer()
%% Form = erl_syntax:syntaxTree() | none
%% LineNo = integer()
%%
%% @equiv quick_parse_form(IODevice, StartLine, [])
%%
%% @see parse_form/2
-spec quick_parse_form(file:io_device(), integer()) ->
{'ok', erl_syntax:forms(), integer()}
| {'eof', integer()} | {'error', errorinfo(), integer()}.
quick_parse_form(Dev, L0) ->
quick_parse_form(Dev, L0, []).
%% @spec quick_parse_form(IODevice, StartLine, Options) ->
%% {ok, Form, LineNo}
%% | {eof, LineNo}
%% | {error, errorinfo(), LineNo}
%%
%% IODevice = pid()
%% StartLine = integer()
%% Options = [term()]
%% Form = erl_syntax:syntaxTree()
%% LineNo = integer()
%%
%% @doc Similar to {@link parse_form/3}, but does a more quick-and-dirty
%% processing of the code. See {@link quick_parse_file/2} for details.
%%
%% @see parse/3
%% @see quick_parse_form/2
%% @see parse_form/3
-spec quick_parse_form(file:io_device(), integer(), [option()]) ->
{'ok', erl_syntax:forms(), integer()}
| {'eof', integer()} | {'error', errorinfo(), integer()}.
quick_parse_form(Dev, L0, Options) ->
parse_form(Dev, L0, fun quick_parser/2, Options).
-record(opt, {clever = false :: boolean()}).
parse_form(Dev, L0, Parser, Options) ->
NoFail = proplists:get_bool(no_fail, Options),
Opt = #opt{clever = proplists:get_bool(clever, Options)},
case io:scan_erl_form(Dev, "", L0) of
{ok, Ts, L1} ->
case catch {ok, Parser(Ts, Opt)} of
{'EXIT', Term} ->
{error, io_error(L1, {unknown, Term}), L1};
{error, Term} ->
IoErr = io_error(L1, Term),
{error, IoErr, L1};
{parse_error, _IoErr} when NoFail ->
{ok, erl_syntax:set_pos(
erl_syntax:text(tokens_to_string(Ts)),
start_pos(Ts, L1)),
L1};
{parse_error, IoErr} ->
{error, IoErr, L1};
{ok, F} ->
{ok, F, L1}
end;
{error, _IoErr, _L1} = Err -> Err;
{error, _Reason} -> {eof, L0}; % This is probably encoding problem
{eof, _L1} = Eof -> Eof
end.
io_error(L, Desc) ->
{L, ?MODULE, Desc}.
start_pos([T | _Ts], _L) ->
erl_anno:line(element(2, T));
start_pos([], L) ->
L.
%% Exception-throwing wrapper for the standard Erlang parser stage
parse_tokens(Ts) ->
parse_tokens(Ts, fun fix_form/1).
parse_tokens(Ts, Fix) ->
case erl_parse:parse_form(Ts) of
{ok, Form} ->
Form;
{error, IoErr} ->
case Fix(Ts) of
{form, Form} ->
Form;
{retry, Ts1, Fix1} ->
parse_tokens(Ts1, Fix1);
error ->
throw({parse_error, IoErr})
end
end.
%% ---------------------------------------------------------------------
%% Quick scanning/parsing - deletes macro definitions and other
%% preprocessor directives, and replaces all macro calls with atoms.
quick_parser(Ts, _Opt) ->
filter_form(parse_tokens(quickscan_form(Ts))).
quickscan_form([{'-', _L}, {atom, La, define} | _Ts]) ->
kill_form(La);
quickscan_form([{'-', _L}, {atom, La, undef} | _Ts]) ->
kill_form(La);
quickscan_form([{'-', _L}, {atom, La, include} | _Ts]) ->
kill_form(La);
quickscan_form([{'-', _L}, {atom, La, include_lib} | _Ts]) ->
kill_form(La);
quickscan_form([{'-', _L}, {atom, La, ifdef} | _Ts]) ->
kill_form(La);
quickscan_form([{'-', _L}, {atom, La, ifndef} | _Ts]) ->
kill_form(La);
quickscan_form([{'-', _L}, {'if', La} | _Ts]) ->
kill_form(La);
quickscan_form([{'-', _L}, {atom, La, elif} | _Ts]) ->
kill_form(La);
quickscan_form([{'-', _L}, {atom, La, else} | _Ts]) ->
kill_form(La);
quickscan_form([{'-', _L}, {atom, La, endif} | _Ts]) ->
kill_form(La);
quickscan_form([{'-', L}, {'?', _}, {Type, _, _}=N | [{'(', _} | _]=Ts])
when Type =:= atom; Type =:= var ->
%% minus, macro and open parenthesis at start of form - assume that
%% the macro takes no arguments; e.g. `-?foo(...).'
quickscan_macros_1(N, Ts, [{'-', L}]);
quickscan_form([{'?', _L}, {Type, _, _}=N | [{'(', _} | _]=Ts])
when Type =:= atom; Type =:= var ->
%% macro and open parenthesis at start of form - assume that the
%% macro takes no arguments (see scan_macros for details)
quickscan_macros_1(N, Ts, []);
quickscan_form(Ts) ->
quickscan_macros(Ts).
kill_form(L) ->
[{atom, L, ?pp_form}, {'(', L}, {')', L}, {'->', L}, {atom, L, kill},
{dot, L}].
quickscan_macros(Ts) ->
quickscan_macros(Ts, []).
quickscan_macros([{'?',_}, {Type, _, A} | Ts], [{string, L, S} | As])
when Type =:= atom; Type =:= var ->
%% macro after a string literal: change to a single string
{_, Ts1} = skip_macro_args(Ts),
S1 = S ++ quick_macro_string(A),
quickscan_macros(Ts1, [{string, L, S1} | As]);
quickscan_macros([{'?',_}, {Type, _, _}=N | [{'(',_}|_]=Ts],
[{':',_}|_]=As)
when Type =:= atom; Type =:= var ->
%% macro and open parenthesis after colon - check the token
%% following the arguments (see scan_macros for details)
Ts1 = case skip_macro_args(Ts) of
{_, [{'->',_} | _] = Ts2} -> Ts2;
{_, [{'when',_} | _] = Ts2} -> Ts2;
_ -> Ts %% assume macro without arguments
end,
quickscan_macros_1(N, Ts1, As);
quickscan_macros([{'?',_}, {Type, _, _}=N | Ts], As)
when Type =:= atom; Type =:= var ->
%% macro with or without arguments
{_, Ts1} = skip_macro_args(Ts),
quickscan_macros_1(N, Ts1, As);
quickscan_macros([T | Ts], As) ->
quickscan_macros(Ts, [T | As]);
quickscan_macros([], As) ->
lists:reverse(As).
%% (after a macro has been found and the arglist skipped, if any)
quickscan_macros_1({_Type, _, A}, [{string, L, S} | Ts], As) ->
%% string literal following macro: change to single string
S1 = quick_macro_string(A) ++ S,
quickscan_macros(Ts, [{string, L, S1} | As]);
quickscan_macros_1({_Type, L, A}, Ts, As) ->
%% normal case - just replace the macro with an atom
quickscan_macros(Ts, [{atom, L, quick_macro_atom(A)} | As]).
quick_macro_atom(A) ->
list_to_atom("?" ++ atom_to_list(A)).
quick_macro_string(A) ->
"(?" ++ atom_to_list(A) ++ ")".
%% Skipping to the end of a macro call, tracking open/close constructs.
%% @spec (Tokens) -> {Skipped, Rest}
skip_macro_args([{'(',_}=T | Ts]) ->
skip_macro_args(Ts, [')'], [T]);
skip_macro_args(Ts) ->
{[], Ts}.
skip_macro_args([{'(',_}=T | Ts], Es, As) ->
skip_macro_args(Ts, [')' | Es], [T | As]);
skip_macro_args([{'{',_}=T | Ts], Es, As) ->
skip_macro_args(Ts, ['}' | Es], [T | As]);
skip_macro_args([{'[',_}=T | Ts], Es, As) ->
skip_macro_args(Ts, [']' | Es], [T | As]);
skip_macro_args([{'<<',_}=T | Ts], Es, As) ->
skip_macro_args(Ts, ['>>' | Es], [T | As]);
skip_macro_args([{'begin',_}=T | Ts], Es, As) ->
skip_macro_args(Ts, ['end' | Es], [T | As]);
skip_macro_args([{'if',_}=T | Ts], Es, As) ->
skip_macro_args(Ts, ['end' | Es], [T | As]);
skip_macro_args([{'case',_}=T | Ts], Es, As) ->
skip_macro_args(Ts, ['end' | Es], [T | As]);
skip_macro_args([{'receive',_}=T | Ts], Es, As) ->
skip_macro_args(Ts, ['end' | Es], [T | As]);
skip_macro_args([{'try',_}=T | Ts], Es, As) ->
skip_macro_args(Ts, ['end' | Es], [T | As]);
skip_macro_args([{'cond',_}=T | Ts], Es, As) ->
skip_macro_args(Ts, ['end' | Es], [T | As]);
skip_macro_args([{E,_}=T | Ts], [E], As) -> %final close
{lists:reverse([T | As]), Ts};
skip_macro_args([{E,_}=T | Ts], [E | Es], As) -> %matching close
skip_macro_args(Ts, Es, [T | As]);
skip_macro_args([T | Ts], Es, As) ->
skip_macro_args(Ts, Es, [T | As]);
skip_macro_args([], _Es, _As) ->
throw({error, macro_args}).
filter_form({function, _, ?pp_form, _,
[{clause, _, [], [], [{atom, _, kill}]}]}) ->
none;
filter_form(T) ->
T.
%% ---------------------------------------------------------------------
%% Normal parsing - try to preserve all information
normal_parser(Ts0, Opt) ->
case scan_form(Ts0, Opt) of
Ts when is_list(Ts) ->
rewrite_form(parse_tokens(Ts));
Node ->
Node
end.
scan_form([{'-', _L}, {atom, La, define} | Ts], Opt) ->
[{atom, La, ?pp_form}, {'(', La}, {')', La}, {'->', La},
{atom, La, define} | scan_macros(Ts, Opt)];
scan_form([{'-', _L}, {atom, La, undef} | Ts], Opt) ->
[{atom, La, ?pp_form}, {'(', La}, {')', La}, {'->', La},
{atom, La, undef} | scan_macros(Ts, Opt)];
scan_form([{'-', _L}, {atom, La, include} | Ts], Opt) ->
[{atom, La, ?pp_form}, {'(', La}, {')', La}, {'->', La},
{atom, La, include} | scan_macros(Ts, Opt)];
scan_form([{'-', _L}, {atom, La, include_lib} | Ts], Opt) ->
[{atom, La, ?pp_form}, {'(', La}, {')', La}, {'->', La},
{atom, La, include_lib} | scan_macros(Ts, Opt)];
scan_form([{'-', _L}, {atom, La, ifdef} | Ts], Opt) ->
[{atom, La, ?pp_form}, {'(', La}, {')', La}, {'->', La},
{atom, La, ifdef} | scan_macros(Ts, Opt)];
scan_form([{'-', _L}, {atom, La, ifndef} | Ts], Opt) ->
[{atom, La, ?pp_form}, {'(', La}, {')', La}, {'->', La},
{atom, La, ifndef} | scan_macros(Ts, Opt)];
scan_form([{'-', _L}, {'if', La} | Ts], Opt) ->
[{atom, La, ?pp_form}, {'(', La}, {')', La}, {'->', La},
{atom, La, 'if'} | scan_macros(Ts, Opt)];
scan_form([{'-', _L}, {atom, La, elif} | Ts], Opt) ->
[{atom, La, ?pp_form}, {'(', La}, {')', La}, {'->', La},
{atom, La, 'elif'} | scan_macros(Ts, Opt)];
scan_form([{'-', _L}, {atom, La, else} | Ts], Opt) ->
[{atom, La, ?pp_form}, {'(', La}, {')', La}, {'->', La},
{atom, La, else} | scan_macros(Ts, Opt)];
scan_form([{'-', _L}, {atom, La, endif} | Ts], Opt) ->
[{atom, La, ?pp_form}, {'(', La}, {')', La}, {'->', La},
{atom, La, endif} | scan_macros(Ts, Opt)];
scan_form([{'-', _L}, {atom, La, error} | Ts], _Opt) ->
Desc = build_info_string("-error", Ts),
ErrorInfo = {La, ?MODULE, {error, Desc}},
erl_syntax:error_marker(ErrorInfo);
scan_form([{'-', _L}, {atom, La, warning} | Ts], _Opt) ->
Desc = build_info_string("-warning", Ts),
ErrorInfo = {La, ?MODULE, {warning, Desc}},
erl_syntax:error_marker(ErrorInfo);
scan_form([{'-', L}, {'?', L1}, {Type, _, _}=N | [{'(', _} | _]=Ts], Opt)
when Type =:= atom; Type =:= var ->
%% minus, macro and open parenthesis at start of form - assume that
%% the macro takes no arguments; e.g. `-?foo(...).'
macro(L1, N, Ts, [{'-', L}], Opt);
scan_form([{'?', L}, {Type, _, _}=N | [{'(', _} | _]=Ts], Opt)
when Type =:= atom; Type =:= var ->
%% macro and open parenthesis at start of form - assume that the
%% macro takes no arguments; probably a function declaration on the
%% form `?m(...) -> ...', which will not parse if it is rewritten as
%% `(?m(...)) -> ...', so it must be handled as `(?m)(...) -> ...'
macro(L, N, Ts, [], Opt);
scan_form(Ts, Opt) ->
scan_macros(Ts, Opt).
build_info_string(Prefix, Ts0) ->
Ts = lists:droplast(Ts0),
String = lists:droplast(tokens_to_string(Ts)),
Prefix ++ " " ++ String ++ ".".
scan_macros(Ts, Opt) ->
scan_macros(Ts, [], Opt).
scan_macros([{'?', _}=M, {Type, _, _}=N | Ts], [{string, L, _}=S | As],
#opt{clever = true}=Opt)
when Type =:= atom; Type =:= var ->
%% macro after a string literal: be clever and insert ++
scan_macros([M, N | Ts], [{'++', L}, S | As], Opt);
scan_macros([{'?', L}, {Type, _, _}=N | [{'(',_}|_]=Ts],
[{':',_}|_]=As, Opt)
when Type =:= atom; Type =:= var ->
%% macro and open parentheses after colon - probably a call
%% `m:?F(...)' so the argument list might belong to the call, not
%% the macro - but it could also be a try-clause pattern
%% `...:?T(...) ->' - we need to check the token following the
%% arguments to decide
{Args, Rest} = skip_macro_args(Ts),
case Rest of
[{'->',_} | _] ->
macro_call(Args, L, N, Rest, As, Opt);
[{'when',_} | _] ->
macro_call(Args, L, N, Rest, As, Opt);
_ ->
macro(L, N, Ts, As, Opt)
end;
scan_macros([{'?', L}, {Type, _, _}=N | [{'(',_}|_]=Ts], As, Opt)
when Type =:= atom; Type =:= var ->
%% macro with arguments
{Args, Rest} = skip_macro_args(Ts),
macro_call(Args, L, N, Rest, As, Opt);
scan_macros([{'?', L }, {Type, _, _}=N | Ts], As, Opt)
when Type =:= atom; Type =:= var ->
%% macro without arguments
macro(L, N, Ts, As, Opt);
scan_macros([T | Ts], As, Opt) ->
scan_macros(Ts, [T | As], Opt);
scan_macros([], As, _Opt) ->
lists:reverse(As).
%% Rewriting to a call which will be recognized by the post-parse pass
%% (we insert parentheses to preserve the precedences when parsing).
macro(L, {Type, _, A}, Rest, As, Opt) ->
scan_macros_1([], Rest, [{atom,L,macro_atom(Type,A)} | As], Opt).
macro_call([{'(',_}, {')',_}], L, {_, Ln, _}=N, Rest, As, Opt) ->
{Open, Close} = parentheses(As),
scan_macros_1([], Rest,
lists:reverse(Open ++ [{atom,L,?macro_call},
{'(',L}, N, {')',Ln}] ++ Close,
As), Opt);
macro_call([{'(',_} | Args], L, {_, Ln, _}=N, Rest, As, Opt) ->
{Open, Close} = parentheses(As),
%% note that we must scan the argument list; it may not be skipped
scan_macros_1(Args ++ Close,
Rest,
lists:reverse(Open ++ [{atom,L,?macro_call},
{'(',L}, N, {',',Ln}],
As), Opt).
macro_atom(atom, A) ->
list_to_atom(?atom_prefix ++ atom_to_list(A));
macro_atom(var, A) ->
list_to_atom(?var_prefix ++ atom_to_list(A)).
%% don't insert parentheses after a string token, to avoid turning
%% `"string" ?macro' into a "function application" `"string"(...)'
%% (see note at top of file)
parentheses([{string, _, _} | _]) ->
{[], []};
parentheses(_) ->
{[{'(',0}], [{')',0}]}.
%% (after a macro has been found and the arglist skipped, if any)
scan_macros_1(Args, [{string, L, _} | _]=Rest, As,
#opt{clever = true}=Opt) ->
%% string literal following macro: be clever and insert ++
scan_macros(Args ++ [{'++', L} | Rest], As, Opt);
scan_macros_1(Args, Rest, As, Opt) ->
%% normal case - continue scanning
scan_macros(Args ++ Rest, As, Opt).
rewrite_form({function, L, ?pp_form, _,
[{clause, _, [], [], [{call, _, A, As}]}]}) ->
erl_syntax:set_pos(erl_syntax:attribute(A, rewrite_list(As)), L);
rewrite_form({function, L, ?pp_form, _, [{clause, _, [], [], [A]}]}) ->
erl_syntax:set_pos(erl_syntax:attribute(A), L);
rewrite_form(T) ->
rewrite(T).
rewrite_list([T | Ts]) ->
[rewrite(T) | rewrite_list(Ts)];
rewrite_list([]) ->
[].
%% Note: as soon as we start using erl_syntax:subtrees/1 and similar
%% functions, we cannot assume that we know the exact representation of
%% the syntax tree anymore - we must use erl_syntax functions to analyze
%% and decompose the data.
rewrite(Node) ->
case erl_syntax:type(Node) of
atom ->
case atom_to_list(erl_syntax:atom_value(Node)) of
?atom_prefix ++ As ->
A1 = list_to_atom(As),
N = erl_syntax:copy_pos(Node, erl_syntax:atom(A1)),
erl_syntax:copy_pos(Node, erl_syntax:macro(N));
?var_prefix ++ As ->
A1 = list_to_atom(As),
N = erl_syntax:copy_pos(Node, erl_syntax:variable(A1)),
erl_syntax:copy_pos(Node, erl_syntax:macro(N));
_ ->
Node
end;
application ->
F = erl_syntax:application_operator(Node),
case erl_syntax:type(F) of
atom ->
case erl_syntax:atom_value(F) of
?macro_call ->
[A | As] = erl_syntax:application_arguments(Node),
M = erl_syntax:macro(A, rewrite_list(As)),
erl_syntax:copy_pos(Node, M);
_ ->
rewrite_1(Node)
end;
_ ->
rewrite_1(Node)
end;
_ ->
rewrite_1(Node)
end.
rewrite_1(Node) ->
case erl_syntax:subtrees(Node) of
[] ->
Node;
Gs ->
Node1 = erl_syntax:make_tree(erl_syntax:type(Node),
[[rewrite(T) || T <- Ts]
|| Ts <- Gs]),
erl_syntax:copy_pos(Node, Node1)
end.
%% attempting a rescue operation on a token sequence for a single form
%% if it could not be parsed after the normal treatment
fix_form([{atom, _, ?pp_form}, {'(', _}, {')', _}, {'->', _},
{atom, _, define}, {'(', _} | _]=Ts) ->
case lists:reverse(Ts) of
[{dot, _}, {')', _} | _] ->
{retry, Ts, fun fix_define/1};
[{dot, L} | Ts1] ->
Ts2 = lists:reverse([{dot, L}, {')', L} | Ts1]),
{retry, Ts2, fun fix_define/1};
_ ->
error
end;
fix_form(_Ts) ->
error.
fix_define([{atom, L, ?pp_form}, {'(', _}, {')', _}, {'->', _},
{atom, La, define}, {'(', _}, N, {',', _} | Ts]) ->
[{dot, _}, {')', _} | Ts1] = lists:reverse(Ts),
S = tokens_to_string(lists:reverse(Ts1)),
A = erl_syntax:set_pos(erl_syntax:atom(define), La),
Txt = erl_syntax:set_pos(erl_syntax:text(S), La),
{form, erl_syntax:set_pos(erl_syntax:attribute(A, [N, Txt]), L)};
fix_define(_Ts) ->
error.
%% @spec tokens_to_string(Tokens::[term()]) -> string()
%%
%% @doc Generates a string corresponding to the given token sequence.
%% The string can be re-tokenized to yield the same token list again.
-spec tokens_to_string([term()]) -> string().
tokens_to_string([{atom,_,A} | Ts]) ->
io_lib:write_atom(A) ++ " " ++ tokens_to_string(Ts);
tokens_to_string([{string, _, S} | Ts]) ->
io_lib:write_string(S) ++ " " ++ tokens_to_string(Ts);
tokens_to_string([{char, _, C} | Ts]) ->
io_lib:write_char(C) ++ " " ++ tokens_to_string(Ts);
tokens_to_string([{float, _, F} | Ts]) ->
float_to_list(F) ++ " " ++ tokens_to_string(Ts);
tokens_to_string([{integer, _, N} | Ts]) ->
integer_to_list(N) ++ " " ++ tokens_to_string(Ts);
tokens_to_string([{var, _, A} | Ts]) ->
atom_to_list(A) ++ " " ++ tokens_to_string(Ts);
tokens_to_string([{dot, _} | Ts]) ->
".\n" ++ tokens_to_string(Ts);
tokens_to_string([{A, _} | Ts]) ->
atom_to_list(A) ++ " " ++ tokens_to_string(Ts);
tokens_to_string([]) ->
"".
%% @spec format_error(Descriptor::term()) -> string()
%% @hidden
%% @doc Callback function for formatting error descriptors. Not for
%% normal use.
-spec format_error(term()) -> string().
format_error(macro_args) ->
errormsg("macro call missing end parenthesis");
format_error({error, Error}) ->
Error;
format_error({warning, Error}) ->
Error;
format_error({unknown, Reason}) ->
errormsg(io_lib:format("unknown error: ~tP", [Reason, 15])).
errormsg(String) ->
io_lib:format("~s: ~ts", [?MODULE, String]).
%% ===================================================================== | lib/syntax_tools/src/epp_dodger.erl | 0.684897 | 0.497498 | epp_dodger.erl | starcoder |
%%% @doc module enacl_ext implements various enacl extensions.
%%% <p>None of the extensions listed here are part of the official NaCl library.
%%% Functions may be removed without further notice if it suddenly ends up being
%%% better to do something differently than the solution given here.
%%% </p>
-module(enacl_ext).
-export([
scramble_block_16/2
]).
%% Curve25519
-export([
curve25519_keypair/0,
curve25519_public_key/1,
curve25519_shared/2
]).
%% @doc scramble_block_16/2 scrambles (encrypt) a block under a given key
%% The rules are that the block is 16 bytes and the key is 32 bytes. The block
%% is scrambled by means of the (secret) key. This makes it impossible for an
%% attacker to understand the original input for the scrambling. The intention
%% of this method is to protect counters from leaking to the outside world, by
%% scrambling them before they leave the system.
%%
%% Scrambling is done by means of the TEA algorithm (Tiny Encryption Algorithm)
%% It has known weaknesses and should probably not be used long-term going
%% forward, but CurveCP currently uses it for nonce scrambling.
%% @end
-spec scramble_block_16(binary(), binary()) -> binary().
scramble_block_16(Block, Key) ->
enacl_nif:scramble_block_16(Block, Key).
%% Curve 25519 Crypto
%% ------------------
%% @doc curve25519_keypair/0 creates a new Public/Secret keypair.
%%
%% Generates and returns a new key pair for the Curve 25519 encryption scheme. The return value is a
%% map in order to avoid using the public key as a secret key and vice versa.
%% @end
-spec curve25519_keypair() -> #{ atom() => binary() }.
curve25519_keypair() ->
<<B0:8/integer, B1:30/binary, B2:8/integer>> = enacl:randombytes(32),
SK = <<(B0 band 248), B1/binary, (64 bor (B2 band 127))>>,
PK = curve25519_public_key(SK),
#{ public => PK, secret => SK }.
%% @doc curve25519_public_key/1 creates a public key from a given SecretKey.
%% @end
-spec curve25519_public_key(SecretKey :: binary()) -> binary().
curve25519_public_key(SecretKey) ->
enacl:curve25519_scalarmult(SecretKey, <<9, 0:248>>).
%% @doc curve25519_shared/2 creates a new shared secret from a given SecretKey and PublicKey.
%% @end.
-spec curve25519_shared(SecretKey :: binary(), PublicKey :: binary()) -> binary().
curve25519_shared(SecretKey, PublicKey) ->
enacl:curve25519_scalarmult(SecretKey, PublicKey). | src/enacl_ext.erl | 0.544317 | 0.475666 | enacl_ext.erl | starcoder |
%%==============================================================================
%% Copyright 2010 Erlang Solutions Ltd.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%==============================================================================
-module(escalus_new_assert).
%% This module is meant to replace legacy escalus_assert in future versions
%% of Escalus
-export([assert/2, assert/3, assert_many/2, mix_match/2]).
%%==============================================================================
%% API functions
%%==============================================================================
assert(PredSpec, Arg) ->
Fun = predspec_to_fun(PredSpec),
StanzaStr = arg_to_list(Arg),
assert_true(Fun(Arg),
{assertion_failed, assert, PredSpec, Arg, StanzaStr}).
assert(PredSpec, Params, Arg) ->
Fun = predspec_to_fun(PredSpec, length(Params) + 1),
StanzaStr = arg_to_list(Arg),
assert_true(apply(Fun, Params ++ [Arg]),
{assertion_failed, assert, PredSpec, Params, Arg, StanzaStr}).
assert_many(Predicates, Stanzas) ->
AllStanzas = length(Predicates) == length(Stanzas),
Ok = escalus_utils:mix_match(fun predspec_to_fun/1, Predicates, Stanzas),
StanzasStr = escalus_utils:pretty_stanza_list(Stanzas),
case Ok of
true -> ok;
false ->
escalus_utils:log_stanzas("multi-assertion failed on", Stanzas)
end,
assert_true(Ok and AllStanzas,
{assertion_failed, assert_many, AllStanzas, Predicates, Stanzas, StanzasStr}).
mix_match(Predicates, Stanzas) ->
assert_many(Predicates, Stanzas).
%%==============================================================================
%% Helpers
%%==============================================================================
arg_to_list(A) when is_atom(A) ->
atom_to_list(A);
arg_to_list({'EXIT', {Err, _}}) ->
"Exit:" ++ atom_to_list(Err);
arg_to_list(Arg) ->
exml:to_list(Arg).
predspec_to_fun(F) ->
predspec_to_fun(F, 1).
predspec_to_fun(F, N) when is_atom(F), is_integer(N) ->
%% Fugly, avert your eyes :-/
%% R15B complains about {escalus_pred, F} syntax, where
%% R14B04 doesn't allow fun escalus_pred:F/A yet.
case N of
1 -> fun (A) -> escalus_pred:F(A) end;
2 -> fun (A, B) -> escalus_pred:F(A, B) end;
3 -> fun (A, B, C) -> escalus_pred:F(A, B, C) end;
4 -> fun (A, B, C, D) -> escalus_pred:F(A, B, C, D) end
end;
predspec_to_fun(Other, _) ->
Other.
assert_true(true, _) -> ok;
assert_true(false, Fail) ->
error(Fail);
assert_true(WTF, Pred) ->
error(bad_predicate_return_value, [WTF, Pred]). | src/escalus_new_assert.erl | 0.620852 | 0.67654 | escalus_new_assert.erl | starcoder |
%%--------------------------------------------------------------------
%% Copyright (c) 2020 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(esockd_rate_limit_SUITE).
-compile(export_all).
-compile(nowarn_export_all).
-include_lib("eunit/include/eunit.hrl").
all() -> esockd_ct:all(?MODULE).
t_info(_) ->
Rl = esockd_rate_limit:new({1, 10}),
Info = esockd_rate_limit:info(Rl),
?assertMatch(#{rate := 1,
burst := 10,
tokens := 10
}, Info),
?assert(erlang:system_time(milli_seconds) >= maps:get(time, Info)).
t_check(_) ->
Rl = esockd_rate_limit:new({1, 10}),
#{tokens := 10} = esockd_rate_limit:info(Rl),
{0, Rl1} = esockd_rate_limit:check(5, Rl),
#{tokens := 5} = esockd_rate_limit:info(Rl1),
%% P = 1/r = 1000ms
{1000, Rl2} = esockd_rate_limit:check(5, Rl1),
#{tokens := 0} = esockd_rate_limit:info(Rl2),
%% P = (Tokens-Limit)/r = 5000ms
{5000, Rl3} = esockd_rate_limit:check(5, Rl2),
#{tokens := 0} = esockd_rate_limit:info(Rl3),
ok = timer:sleep(1000),
%% P = (Tokens-Limit)/r = 1000ms
{1000, _} = esockd_rate_limit:check(2, Rl3).
t_check_bignum(_) ->
R1 = 30000000,
R2 = 15000000,
Rl = esockd_rate_limit:new({R1, R1}),
#{tokens := R1} = esockd_rate_limit:info(Rl),
{0, Rl1} = esockd_rate_limit:check(R2, Rl),
#{tokens := R2} = esockd_rate_limit:info(Rl1),
%% P = 1/r = 0.00003333 ~= 0ms
{0, Rl2} = esockd_rate_limit:check(R2, Rl1),
#{tokens := 0} = esockd_rate_limit:info(Rl2),
timer:sleep(1000),
%% P = (Tokens-Limit)/r = 1000ms
{1000, Rl3} = esockd_rate_limit:check(R1*2, Rl2),
#{tokens := 0} = esockd_rate_limit:info(Rl3),
%% P = (Tokens-Limit)/r = 0.5ms ~= 1ms
{1, Rl4} = esockd_rate_limit:check(R1*(1+0.0005), Rl2),
#{tokens := 0} = esockd_rate_limit:info(Rl4). | test/esockd_rate_limit_SUITE.erl | 0.509032 | 0.562026 | esockd_rate_limit_SUITE.erl | starcoder |
%%
%% Copyright (c) dushin.net
%% All rights reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
-module(atomvm_lib).
-export([set_rtc_memory/1, get_rtc_memory/0, random/2, sleep_forever/0, to_hex/2, to_hex/1]).
%%-----------------------------------------------------------------------------
%% @param Data binary data to store
%% @returns ok
%% @doc Store a blob in RTC memory.
%%
%% This operation will store data in RTC memory. This memory will
%% be preserved during a sleep operation, but will be cleared once
%% the device restarts.
%%
%% The input binary data must be no larger than the the value set
%% in configuration at build time of the AtomVM binary. (By default,
%% the maximum binary size is 0 bytes. You may adjust this value
%% via `make menuconfig' when building the AtomVM image.) An attempt
%% to store a blob larger than the maximum allowable size will result
%% in a `badarg' exception.
%% @end
%%-----------------------------------------------------------------------------
-spec set_rtc_memory(Data::binary()) -> ok.
set_rtc_memory(_Data) ->
throw(nif_error).
%%-----------------------------------------------------------------------------
%% @returns data stored in RTC memory, or the empty binary (`<<"">>'), if
%% nothing has been stored.
%% @doc Retrieve a blob stored in RTC memory.
%%
%% This operation will retrieve data stored in RTC memory. This memory will
%% be preserved during a sleep operation, but will be cleared once
%% the device restarts.
%% @end
%%-----------------------------------------------------------------------------
-spec get_rtc_memory() -> binary().
get_rtc_memory() ->
throw(nif_error).
%%-----------------------------------------------------------------------------
%% @returns random 32-bit integer between `Lower' and `Upper'.
%% @doc Returns a random 32-bit integer value between `Lower' and `Upper'.
%%
%% Bother `Lower' and `Upper' must be integers and `Lower' must be less than `Upper'.
%% @end
%%-----------------------------------------------------------------------------
-spec random(Lower::integer(), Upper::integer()) -> integer().
random(Lower, Upper) when is_integer(Lower), is_integer(Upper), Lower < Upper ->
R = atomvm:random(),
P = case R < 0 of true -> -R; _ -> R end,
Lower + (P rem (Upper - Lower));
random(_,_) ->
throw(badarg).
%%-----------------------------------------------------------------------------
%% @doc Sleep forever. This function does not halt.
%% @end
%%-----------------------------------------------------------------------------
sleep_forever() ->
timer:sleep(24*60*60*1000),
sleep_forever().
%%-----------------------------------------------------------------------------
%% @returns hex representation of I, as a string.
%% @doc Returns the hex representation of I, as a string.
%% @end
%%-----------------------------------------------------------------------------
-spec to_hex(I::integer()|binary()) -> string().
to_hex(I) when is_integer(I) ->
to_hex(I, 1);
to_hex(B) when is_binary(B) ->
lists:flatten([ "0x" ++ to_hex(I) ++ "," || I <- erlang:binary_to_list(B)]).
%%-----------------------------------------------------------------------------
%% @returns hex representation of I, as a string.
%% @doc Returns the hex representation of I, as a string.
%% @end
%%-----------------------------------------------------------------------------
-spec to_hex(I::integer, Bytes::non_neg_integer()) -> string().
to_hex(I, Bytes) when is_integer(I) ->
to_hex(I, Bytes * 2, []).
%% @private
to_hex(0, K, Accum) ->
maybe_pad(K, Accum);
to_hex(I, K, Accum) ->
Quartet = I band 16#F,
to_hex(I bsr 4, K - 1, [hex_char(Quartet) | Accum]).
%% @private
maybe_pad(0, Accum) ->
Accum;
maybe_pad(K, Accum) ->
maybe_pad(K - 1, [$0 | Accum]).
%% @private
hex_char(16#0) -> $0;
hex_char(16#1) -> $1;
hex_char(16#2) -> $2;
hex_char(16#3) -> $3;
hex_char(16#4) -> $4;
hex_char(16#5) -> $5;
hex_char(16#6) -> $6;
hex_char(16#7) -> $7;
hex_char(16#8) -> $8;
hex_char(16#9) -> $9;
hex_char(16#A) -> $A;
hex_char(16#B) -> $B;
hex_char(16#C) -> $C;
hex_char(16#D) -> $D;
hex_char(16#E) -> $E;
hex_char(16#F) -> $F. | src/atomvm_lib.erl | 0.674587 | 0.416381 | atomvm_lib.erl | starcoder |
%% Minimal JSON parsing module that both encodes and decodes.
%% When encoding, JSON types are mapped to Erlang terms as follows:
%%
%% * JSON object is Erlang map
%% * JSON string is Erlang binary
%% * JSON list is Erlang list
%% * JSON numbers are Erlang numbers
%%
%% When decoding, everything is vice versa except strings. Strings are
%% parsed to lists. You can alter this behavior to fit your preferences
%% by editing decode_string/2 in this file.
-module(json).
-export([encode/1, decode/1]).
-define(is_digit(X), X >= 48, X =< 57).
-define(is_space(X), X =< 32; X == $,).
-define(is_exponent(X), X == $e; X == $E).
-define(is_sign(X), X == $+; X == $-).
-spec encode(Term) -> JSON when Term :: term(), JSON :: iodata().
%% @doc Encode Term as a JSON value.
%%
%% <table>
%% <tr>
%% <th>Term</th>
%% <th>JSON</th>
%% </tr>
%% <tr>
%% <td>{@type false}</td>
%% <td>false</td>
%% </tr>
%% <tr>
%% <td>{@type null}</td>
%% <td>null</td>
%% </tr>
%% <tr>
%% <td>{@type true}</td>
%% <td>true</td>
%% </tr>
%% <tr>
%% <td>{@type binary()}</td>
%% <td>string</td>
%% </tr>
%% <tr>
%% <td>{@type map()}</td>
%% <td>object</td>
%% </tr>
%% <tr>
%% <td>{@type list()}</td>
%% <td>array</td>
%% </tr>
%% <tr>
%% <td>{@type number()}</td>
%% <td>number</td>
%% </tr>
%% </table>
%%
%% Note that {@type string()} is treated as an array of numbers:
%% ```
%% <<"[106,111,101]">> = iolist_to_binary(json:encode("joe")).
%% '''
%%
%% Throws {@type badarg} if map or tuple key does not encode to a JSON string.
encode(Bin) when is_binary(Bin) -> encode_string(Bin, <<$">>);
encode(I) when is_integer(I) -> integer_to_binary(I);
encode(F) when is_float(F) -> io_lib:format("~p", [F]);
encode(L) when is_list(L) -> encode_list(L, []);
encode(M) when is_map(M) -> encode_map(maps:to_list(M), []);
encode(true) -> <<"true">>;
encode(false) -> <<"false">>;
encode(null) -> <<"null">>.
encode_string(<<>>, Buf) -> <<Buf/binary, $">>;
encode_string(<<$\r, T/binary>>, Buf) -> encode_string(T, <<Buf/binary, $\\, $r>>);
encode_string(<<$\t, T/binary>>, Buf) -> encode_string(T, <<Buf/binary, $\\, $t>>);
encode_string(<<$\n, T/binary>>, Buf) -> encode_string(T, <<Buf/binary, $\\, $n>>);
encode_string(<<$\f, T/binary>>, Buf) -> encode_string(T, <<Buf/binary, $\\, $f>>);
encode_string(<<$\\, T/binary>>, Buf) -> encode_string(T, <<Buf/binary, $\\, $\\>>);
encode_string(<<$", T/binary>>, Buf) -> encode_string(T, <<Buf/binary, $\\, $">>);
encode_string(<<H, T/binary>>, Buf) -> encode_string(T, <<Buf/binary, H>>).
encode_list([], Buf) -> [$[, lists:reverse(Buf), $]];
encode_list([H], Buf) -> encode_list([], [encode(H) | Buf]);
encode_list([H|T], Buf) -> encode_list(T, [$, | [encode(H) | Buf]]).
encode_map([], Buf) -> [${, lists:reverse(Buf), $}];
encode_map([H], Buf) -> encode_map([], [encode_map_pair(H) | Buf]);
encode_map([H|T], Buf) -> encode_map(T, [$, | [encode_map_pair(H) | Buf]]).
encode_map_pair({K,V}) when is_binary(K) -> [encode(K), $:, encode(V)];
encode_map_pair(_) -> error(badarg).
-spec decode(JSON) -> {ok, Term, Rest} | {error, Reason}
when JSON :: iodata(),
Term :: term(),
Rest :: binary(),
Reason :: infinity
| invalid_escape
| invalid_key
| missing_colon
| unterminated_array
| unterminated_exponent
| unterminated_fraction
| unterminated_integer
| unterminated_object
| unterminated_string.
%% @doc Decode JSON value to Term. Unconsumed characters are returned as Rest.
%%
%% <table>
%% <tr>
%% <th>JSON</th>
%% <th>Term</th>
%% </tr>
%% <tr>
%% <td>array</td>
%% <td>{@type list()}</td>
%% </tr>
%% <tr>
%% <td>false</td>
%% <td>{@type false}</td>
%% </tr>
%% <tr>
%% <td>number</td>
%% <td>{@type number()}</td>
%% </tr>
%% <tr>
%% <td>null</td>
%% <td>{@type null}</td>
%% </tr>
%% <tr>
%% <td>object</td>
%% <td>{@type map()}</td>
%% </tr>
%% <tr>
%% <td>string</td>
%% <td>{@type binary()}</td>
%% </tr>
%% <tr>
%% <td>true</td>
%% <td>{@type true}</td>
%% </tr>
%% </table>
%%
%% Decoding is more lenient than the standard in the following:
%% <ul>
%% <li>any character less than or equal to 32 on ASCII table is treated as whitespace</li>
%% <li>
%% commas are treated as whitespace:
%% ```
%% {ok, [1,2,3,4], _} = json:decode(<<",[,,,1 2,3, ,4]">>).
%% {ok, [], _} = json:decode(<<"[,, ,,]">>).
%% '''
%% </li>
%% <li>
%% whitespace is optional on token boundaries:
%% ```
%% {ok, [<<"hello">>, true, 1, null], _} = json:decode(<<"[\"hello\"true1null]">>).
%% '''
%% </li>
%% <li>
%% numbers may contain leading zeros:
%% ```
%% {ok, 4, _} = json:decode(<<"0004">>).
%% {ok, 1.0, _} = json:decode(<<"1e-0000">>).
%% '''
%% </li>
%% <li>
%% numbers may be prefixed with a plus sign:
%% ```
%% {ok, 100, _} = json:decode(<<"+100">>).
%% '''
%% </li>
%% </ul>
%%
%% Does not handle JSON that contains numbers with a fraction part and/or
%% exponent larger than 1.8e308 (IEE 754-1985 double precision):
%% ```
%% {error, infinity} = json:decode(<<"1e1000">>).
%% '''
%%
%% Does <em>not</em> preserve key order in objects, as per RFC 7159.
%%
%% Throws {@type badarg} if JSON is not of type {@type iodata()}.
decode(String) when is_list(String) -> decode(list_to_binary(String));
decode(Bin) when is_binary(Bin) ->
try decode_value(Bin) of
{Rest, Value} -> {ok, Value, Rest}
catch error:Reason -> {error, Reason}
end;
decode(_) -> error(badarg).
decode_value(<<$", T/binary>>) -> decode_string(T, <<>>);
decode_value(<<$[, T/binary>>) -> decode_array(T, []);
decode_value(<<${, T/binary>>) -> decode_object(T, #{});
decode_value(<<H, T/binary>>) when ?is_digit(H); ?is_sign(H) -> decode_number(T, [H]);
decode_value(<<H, T/binary>>) when ?is_space(H) -> decode_value(T);
decode_value(<<"true", T/binary>>) -> {T, true};
decode_value(<<"false", T/binary>>) -> {T, false};
decode_value(<<"null", T/binary>>) -> {T, null};
decode_value(_) -> error(unexpected_token).
decode_number(Bin, Buf) ->
{Rest, Acc} = decode_sign(Bin, Buf),
decode_integer(Rest, Acc).
decode_integer(<<H, T/binary>>, Buf) when ?is_digit(H) -> decode_integer(T, [H|Buf]);
decode_integer(<<$., T/binary>>, Buf) -> decode_fraction(T, [$.|Buf]);
decode_integer(<<H, T/binary>>, Buf) when ?is_exponent(H) ->
{Rest1, Acc} = decode_sign(T, [H, $0, $.] ++ Buf),
decode_exponent(Rest1, Acc);
decode_integer(_, [H|_]) when ?is_sign(H) -> error(unterminated_integer);
decode_integer(Bin, Buf) -> {Bin, list_to_integer(lists:reverse(Buf))}.
decode_fraction(<<H, T/binary>>, Buf) when ?is_digit(H) -> decode_fraction(T, [H|Buf]);
decode_fraction(<<H, T/binary>>, Buf) when ?is_exponent(H) ->
{Rest, Acc} = decode_sign(T, [H|Buf]),
decode_exponent(Rest, Acc);
decode_fraction(_, [H|_]) when H == $.; ?is_sign(H) -> error(unterminated_fraction);
decode_fraction(Bin, Buf) -> {Bin, list_to_float(lists:reverse(Buf))}.
decode_exponent(<<H, T/binary>>, Buf) when ?is_digit(H) -> decode_exponent(T, [H|Buf]);
decode_exponent(_, [H|_]) when ?is_exponent(H); ?is_sign(H) -> error(unterminated_exponent);
decode_exponent(Bin, Buf) ->
try list_to_float(lists:reverse(Buf)) of
Float -> {Bin, Float}
catch error:badarg -> error(infinity)
end.
decode_sign(<<H, T/binary>>, Buf) when ?is_sign(H) -> {T, [H|Buf]};
decode_sign(Bin, Buf) -> {Bin, Buf}.
decode_string(<<$", T/binary>>, Buf) -> {T, Buf};
decode_string(<<$\\, $u, C1, C2, C3, C4, T/binary>>, Buf) ->
try C = list_to_integer([C1, C2, C3, C4], 16),
if C > 16#D7FF, C < 16#DC00 ->
<<$\\, $u, D1, D2, D3, D4, T2/binary>> = T,
D = list_to_integer([D1, D2, D3, D4], 16),
Point = xmerl_ucs:from_utf16be(<<C:16/big-unsigned-integer, D:16/big-unsigned-integer>>),
Char = unicode:characters_to_binary(Point),
decode_string(T2, <<Buf/binary, Char/binary>>);
true ->
Char = unicode:characters_to_binary([C]),
decode_string(T, <<Buf/binary, Char/binary>>)
end
catch error:badarg -> error(invalid_escape)
end;
decode_string(<<$\\, $b, T/binary>>, Buf) -> decode_string(T, <<Buf/binary, $\s>>);
decode_string(<<$\\, $f, T/binary>>, Buf) -> decode_string(T, <<Buf/binary, $\f>>);
decode_string(<<$\\, $n, T/binary>>, Buf) -> decode_string(T, <<Buf/binary, $\n>>);
decode_string(<<$\\, $r, T/binary>>, Buf) -> decode_string(T, <<Buf/binary, $\r>>);
decode_string(<<$\\, $t, T/binary>>, Buf) -> decode_string(T, <<Buf/binary, $\t>>);
decode_string(<<$\\, H, T/binary>>, Buf) -> decode_string(T, <<Buf/binary, H>>);
decode_string(<<H, T/binary>>, Buf) -> decode_string(T, <<Buf/binary, H>>);
decode_string(<<>>, _) -> error(unterminated_string).
decode_array(<<H, T/binary>>, List) when ?is_space(H) -> decode_array(T, List);
decode_array(<<$], T/binary>>, List) -> {T, lists:reverse(List)};
decode_array(<<>>, _) -> error(unterminated_array);
decode_array(Bin, List) ->
{Rest, Value} = decode_value(Bin),
decode_array(Rest, [Value|List]).
decode_object(<<H, T/binary>>, Map) when ?is_space(H) -> decode_object(T, Map);
decode_object(<<$}, T/binary>>, Map) -> {T, Map};
decode_object(<<>>, _) -> error(unterminated_object);
decode_object(<<$", Bin/binary>>, Map) ->
{Rest1, Key} = decode_string(Bin, <<>>),
{Rest2, $:} = decode_colon(Rest1),
{Rest3, Value} = decode_value(Rest2),
decode_object(Rest3, maps:put(Key, Value, Map));
decode_object(_, _) -> error(invalid_key).
decode_colon(<<$:, T/binary>>) -> {T, $:};
decode_colon(<<H, T/binary>>) when ?is_space(H) -> decode_colon(T);
decode_colon(_) -> error(missing_colon). | src/json.erl | 0.529507 | 0.661964 | json.erl | starcoder |
-module(thoas_encode).
-compile([
{no_auto_import, [float/1]},
{inline, [{float, 1}, {integer, 1}, {error_invalid_byte_error, 2}]}
]).
%%% Normal reflection based API that turns Erlang terms into JSON.
-export([encode/2]).
%%% Non-recursive API that expects sub-objects to already be encoded. Likely
%%% useful for statically typed languages such as Gleam.
-export([
true/0, false/0, null/0, boolean/1, integer/1, float/1, string/1,
non_recursive_array/1, non_recursive_object/1
]).
%%% A boolean value as JSON.
-spec boolean(boolean()) -> iodata().
boolean('true') -> <<"true">>;
boolean('false') -> <<"false">>.
%%% The JSON value `true`.
-spec true() -> iodata().
true() -> <<"true">>.
%%% The JSON value `false`.
-spec false() -> iodata().
false() -> <<"false">>.
%%% The JSON value `null`.
-spec null() -> iodata().
null() -> <<"null">>.
%%% A float in JSON format.
-spec float(float()) -> iodata().
float(Float) ->
io_lib_format:fwrite_g(Float).
%%% An integer in JSON format.
-spec integer(integer()) -> iodata().
integer(Int) ->
integer_to_list(Int).
%%% A string in JSON format, using normal JSON escaping of the contents.
-spec string(binary()) -> iodata().
string(String) ->
encode_string(String, fun escape_json/3).
%%% An array of JSON values.
%%%
%%% Important: The values supplied in the list are not processed and **must**
%%% already encoded into JSON using one of the other functions, such as
%%% `integer/1` or `string/1`.
%%%
-spec non_recursive_array(list(iodata())) -> iodata().
non_recursive_array([]) ->
<<"[]">>;
non_recursive_array([First | Rest]) ->
[$[, First | non_recursive_array_loop(Rest)].
non_recursive_array_loop([]) ->
[$]];
non_recursive_array_loop([First | Rest]) ->
[$,, First | non_recursive_array_loop(Rest)].
%%% An object of JSON values.
%%%
%%% Important: The values supplied in the list are not processed and **must**
%%% already encoded into JSON using one of the other functions, such as
%%% `integer/1` or `string/1`.
%%% Keys are processed as strings and get escaped using normal JSON escaping.
%%%
-spec non_recursive_object(list({binary(), iodata()})) -> iodata().
non_recursive_object([]) ->
<<"{}">>;
non_recursive_object([{Key, Value} | Tail]) ->
Escape = fun escape_json/3,
[
<<"{\"">>, key(Key, Escape), <<"\":">>, Value
| non_recursive_object_loop(Tail, Escape)
].
non_recursive_object_loop([], _Escape) ->
[$}];
non_recursive_object_loop([{Key, Value} | Tail], Escape) ->
[
<<",\"">>, key(Key, Escape), <<"\":">>, Value
| non_recursive_object_loop(Tail, Escape)
].
%%%%
%%%% Recursive encoding functions
%%%%
encode(Value, Opts) ->
value(Value, escape_function(Opts)).
encode_atom(null, _Escape) -> <<"null">>;
encode_atom(true, _Escape) -> <<"true">>;
encode_atom(false, _Escape) -> <<"false">>;
encode_atom(Atom, Escape) -> encode_string(atom_to_binary(Atom, utf8), Escape).
encode_string(String, Escape) ->
[$", Escape(String, String, 0), $"].
escape(0) -> <<"\\u0000">>;
escape(1) -> <<"\\u0001">>;
escape(2) -> <<"\\u0002">>;
escape(3) -> <<"\\u0003">>;
escape(4) -> <<"\\u0004">>;
escape(5) -> <<"\\u0005">>;
escape(6) -> <<"\\u0006">>;
escape(7) -> <<"\\u0007">>;
escape(8) -> <<"\\b">>;
escape(9) -> <<"\\t">>;
escape(10) -> <<"\\n">>;
escape(11) -> <<"\\u000B">>;
escape(12) -> <<"\\f">>;
escape(13) -> <<"\\r">>;
escape(14) -> <<"\\u000E">>;
escape(15) -> <<"\\u000F">>;
escape(16) -> <<"\\u0010">>;
escape(17) -> <<"\\u0011">>;
escape(18) -> <<"\\u0012">>;
escape(19) -> <<"\\u0013">>;
escape(20) -> <<"\\u0014">>;
escape(21) -> <<"\\u0015">>;
escape(22) -> <<"\\u0016">>;
escape(23) -> <<"\\u0017">>;
escape(24) -> <<"\\u0018">>;
escape(25) -> <<"\\u0019">>;
escape(26) -> <<"\\u001A">>;
escape(27) -> <<"\\u001B">>;
escape(28) -> <<"\\u001C">>;
escape(29) -> <<"\\u001D">>;
escape(30) -> <<"\\u001E">>;
escape(31) -> <<"\\u001F">>;
escape(32) -> throw(error);
escape(33) -> throw(error);
escape(34) -> <<"\\\"">>;
escape(X) when X < 47 andalso X > 34 -> throw(error);
escape(47) -> <<"\\/">>;
escape(X) when X < 92 andalso X > 47 -> throw(error);
escape(92) -> <<"\\\\">>.
escape_function(Options) ->
case maps:get(escape, Options, json) of
json -> fun escape_json/3;
html -> fun escape_html/3;
unicode -> fun escape_unicode/3;
javascript -> fun escape_js/3
end.
escape_html(Data, Input, Skip) ->
escape_html(Data, [], Input, Skip).
escape_html(<<Byte/integer,Rest/bitstring>>, Acc, Input, Skip) when Byte < 33 ->
Acc2 = [Acc | escape(Byte)],
escape_html(Rest, Acc2, Input, Skip + 1);
escape_html(<<Byte/integer,Rest/bitstring>>, Acc, Input, Skip) when Byte =:= 33 ->
escape_html_chunk(Rest, Acc, Input, Skip, 1);
escape_html(<<Byte/integer,Rest/bitstring>>, Acc, Input, Skip) when Byte =:= 34 ->
Acc2 = [Acc | escape(Byte)],
escape_html(Rest, Acc2, Input, Skip + 1);
escape_html(<<Byte/integer,Rest/bitstring>>, Acc, Input, Skip) when Byte < 47 ->
escape_html_chunk(Rest, Acc, Input, Skip, 1);
escape_html(<<Byte/integer,Rest/bitstring>>, Acc, Input, Skip) when Byte =:= 47 ->
Acc2 = [Acc | escape(Byte)],
escape_html(Rest, Acc2, Input, Skip + 1);
escape_html(<<Byte/integer,Rest/bitstring>>, Acc, Input, Skip) when Byte < 92 ->
escape_html_chunk(Rest, Acc, Input, Skip, 1);
escape_html(<<Byte/integer,Rest/bitstring>>, Acc, Input, Skip) when Byte =:= 92 ->
Acc2 = [Acc | escape(Byte)],
escape_html(Rest, Acc2, Input, Skip + 1);
escape_html(<<Byte/integer,Rest/bitstring>>, Acc, Input, Skip) when Byte < 128 ->
escape_html_chunk(Rest, Acc, Input, Skip, 1);
escape_html(<<Char/utf8,Rest/bitstring>>, Acc, Input, Skip) when Char =< 2047 ->
escape_html_chunk(Rest, Acc, Input, Skip, 2);
escape_html(<<8232/utf8,Rest/bitstring>>, Acc, Input, Skip) ->
Acc2 = [Acc | <<"\\u2028">>],
escape_html(Rest, Acc2, Input, Skip + 3);
escape_html(<<8233/utf8,Rest/bitstring>>, Acc, Input, Skip) ->
Acc2 = [Acc | <<"\\u2029">>],
escape_html(Rest, Acc2, Input, Skip + 3);
escape_html(<<Char/utf8,Rest/bitstring>>, Acc, Input, Skip)
when Char =< 65535 ->
escape_html_chunk(Rest, Acc, Input, Skip, 3);
escape_html(<<_Char/utf8,Rest/bitstring>>, Acc, Input, Skip) ->
escape_html_chunk(Rest, Acc, Input, Skip, 4);
escape_html(<<>>, Acc, _Input, _Skip) ->
Acc;
escape_html(<<Byte/integer,_Rest/bitstring>>, _Acc, Input, _Skip) ->
error_invalid_byte_error(Byte, Input).
escape_html_chunk(<<Byte/integer,Rest/bitstring>>, Acc, Input, Skip, Len) when Byte < 32 ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | escape(Byte)],
escape_html(Rest, Acc2, Input, Skip + Len + 1);
escape_html_chunk(<<Byte/integer,Rest/bitstring>>, Acc, Input, Skip, Len) when Byte < 34 ->
escape_html_chunk(Rest, Acc, Input, Skip, Len + 1);
escape_html_chunk(<<Byte/integer,Rest/bitstring>>, Acc, Input, Skip, Len) when Byte =:= 34 ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | escape(Byte)],
escape_html(Rest, Acc2, Input, Skip + Len + 1);
escape_html_chunk(<<Byte/integer,Rest/bitstring>>, Acc, Input, Skip, Len) when Byte < 47 ->
escape_html_chunk(Rest, Acc, Input, Skip, Len + 1);
escape_html_chunk(<<Byte/integer,Rest/bitstring>>, Acc, Input, Skip, Len) when Byte =:= 47 ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | escape(Byte)],
escape_html(Rest, Acc2, Input, Skip + Len + 1);
escape_html_chunk(<<Byte/integer,Rest/bitstring>>, Acc, Input, Skip, Len) when Byte < 92 ->
escape_html_chunk(Rest, Acc, Input, Skip, Len + 1);
escape_html_chunk(<<Byte/integer,Rest/bitstring>>, Acc, Input, Skip, Len) when Byte =:= 92 ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | escape(Byte)],
escape_html(Rest, Acc2, Input, Skip + Len + 1);
escape_html_chunk(<<Byte/integer,Rest/bitstring>>, Acc, Input, Skip, Len) when Byte < 128 ->
escape_html_chunk(Rest, Acc, Input, Skip, Len + 1);
escape_html_chunk(<<Char/utf8,Rest/bitstring>>, Acc, Input, Skip, Len) when Char =< 2047 ->
escape_html_chunk(Rest, Acc, Input, Skip, Len + 2);
escape_html_chunk(<<8232/utf8,Rest/bitstring>>, Acc, Input, Skip, Len) ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | <<"\\u2028">>],
escape_html(Rest, Acc2, Input, Skip + Len + 3);
escape_html_chunk(<<8233/utf8,Rest/bitstring>>, Acc, Input, Skip, Len) ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | <<"\\u2029">>],
escape_html(Rest, Acc2, Input, Skip + Len + 3);
escape_html_chunk(<<Char/utf8,Rest/bitstring>>, Acc, Input, Skip, Len) when Char =< 65535 ->
escape_html_chunk(Rest, Acc, Input, Skip, Len + 3);
escape_html_chunk(<<_Char/utf8,Rest/bitstring>>, Acc, Input, Skip, Len) ->
escape_html_chunk(Rest, Acc, Input, Skip, Len + 4);
escape_html_chunk(<<>>, Acc, Input, Skip, Len) ->
Part = binary_part(Input, Skip, Len),
[Acc | Part];
escape_html_chunk(<<Byte/integer,_Rest/bitstring>>, _Acc, Input, _Skip, _Len) ->
error_invalid_byte_error(Byte, Input).
escape_js(Data, Input, Skip) ->
escape_js(Data, [], Input, Skip).
escape_js(<<Byte/integer,Rest/bitstring>>, Acc, Input, Skip) when Byte < 32 ->
Acc2 = [Acc | escape(Byte)],
escape_js(Rest, Acc2, Input, Skip + 1);
escape_js(<<Byte/integer,Rest/bitstring>>, Acc, Input, Skip) when Byte < 34 ->
escape_js_chunk(Rest, Acc, Input, Skip, 1);
escape_js(<<Byte/integer,Rest/bitstring>>, Acc, Input, Skip) when Byte =:= 34 ->
Acc2 = [Acc | escape(Byte)],
escape_js(Rest, Acc2, Input, Skip + 1);
escape_js(<<Byte/integer,Rest/bitstring>>, Acc, Input, Skip) when Byte < 92 ->
escape_js_chunk(Rest, Acc, Input, Skip, 1);
escape_js(<<Byte/integer,Rest/bitstring>>, Acc, Input, Skip) when Byte =:= 92 ->
Acc2 = [Acc | escape(Byte)],
escape_js(Rest, Acc2, Input, Skip + 1);
escape_js(<<Byte/integer,Rest/bitstring>>, Acc, Input, Skip) when Byte < 128 ->
escape_js_chunk(Rest, Acc, Input, Skip, 1);
escape_js(<<Char/utf8,Rest/bitstring>>, Acc, Input, Skip) when Char =< 2047 ->
escape_js_chunk(Rest, Acc, Input, Skip, 2);
escape_js(<<8232/utf8,Rest/bitstring>>, Acc, Input, Skip) ->
Acc2 = [Acc | <<"\\u2028">>],
escape_js(Rest, Acc2, Input, Skip + 3);
escape_js(<<8233/utf8,Rest/bitstring>>, Acc, Input, Skip) ->
Acc2 = [Acc | <<"\\u2029">>],
escape_js(Rest, Acc2, Input, Skip + 3);
escape_js(<<Char/utf8,Rest/bitstring>>, Acc, Input, Skip) when Char =< 65535 ->
escape_js_chunk(Rest, Acc, Input, Skip, 3);
escape_js(<<_Char/utf8,Rest/bitstring>>, Acc, Input, Skip) ->
escape_js_chunk(Rest, Acc, Input, Skip, 4);
escape_js(<<>>, Acc, _Input, _Skip) ->
Acc;
escape_js(<<Byte/integer,_Rest/bitstring>>, _Acc, Input, _Skip) ->
error_invalid_byte_error(Byte, Input).
escape_js_chunk(<<Byte/integer,Rest/bitstring>>, Acc, Input, Skip, Len) when Byte < 32 ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | escape(Byte)],
escape_js(Rest, Acc2, Input, Skip + Len + 1);
escape_js_chunk(<<Byte/integer,Rest/bitstring>>, Acc, Input, Skip, Len) when Byte < 34 ->
escape_js_chunk(Rest, Acc, Input, Skip, Len + 1);
escape_js_chunk(<<Byte/integer,Rest/bitstring>>, Acc, Input, Skip, Len) when Byte =:= 34 ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | escape(Byte)],
escape_js(Rest, Acc2, Input, Skip + Len + 1);
escape_js_chunk(<<Byte/integer,Rest/bitstring>>, Acc, Input, Skip, Len) when Byte < 92 ->
escape_js_chunk(Rest, Acc, Input, Skip, Len + 1);
escape_js_chunk(<<Byte/integer,Rest/bitstring>>, Acc, Input, Skip, Len) when Byte =:= 92 ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | escape(Byte)],
escape_js(Rest, Acc2, Input, Skip + Len + 1);
escape_js_chunk(<<Byte/integer,Rest/bitstring>>, Acc, Input, Skip, Len) when Byte < 128 ->
escape_js_chunk(Rest, Acc, Input, Skip, Len + 1);
escape_js_chunk(<<Char/utf8,Rest/bitstring>>, Acc, Input, Skip, Len) when Char =< 2047 ->
escape_js_chunk(Rest, Acc, Input, Skip, Len + 2);
escape_js_chunk(<<8232/utf8,Rest/bitstring>>, Acc, Input, Skip, Len) ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | <<"\\u2028">>],
escape_js(Rest, Acc2, Input, Skip + Len + 3);
escape_js_chunk(<<8233/utf8,Rest/bitstring>>, Acc, Input, Skip, Len) ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | <<"\\u2029">>],
escape_js(Rest, Acc2, Input, Skip + Len + 3);
escape_js_chunk(<<Char/utf8,Rest/bitstring>>, Acc, Input, Skip, Len) when Char =< 65535 ->
escape_js_chunk(Rest, Acc, Input, Skip, Len + 3);
escape_js_chunk(<<_Char/utf8,Rest/bitstring>>, Acc, Input, Skip, Len) ->
escape_js_chunk(Rest, Acc, Input, Skip, Len + 4);
escape_js_chunk(<<>>, Acc, Input, Skip, Len) ->
Part = binary_part(Input, Skip, Len),
[Acc | Part];
escape_js_chunk(<<Byte/integer,_Rest/bitstring>>, _Acc, Input, _Skip, _Len) ->
error_invalid_byte_error(Byte, Input).
escape_json(Data, Input, Skip) ->
escape_json(Data, [], Input, Skip).
escape_json(<<Byte/integer,Rest/bitstring>>, Acc1, Input, Skip) when Byte < 32 ->
Acc2 = [Acc1 | escape(Byte)],
escape_json(Rest, Acc2, Input, Skip + 1);
escape_json(<<Byte/integer,Rest/bitstring>>, Acc1, Input, Skip) when Byte < 34 ->
escape_json_chunk(Rest, Acc1, Input, Skip, 1);
escape_json(<<Byte/integer,Rest/bitstring>>, Acc1, Input, Skip) when Byte =:= 34 ->
Acc2 = [Acc1 | escape(Byte)],
escape_json(Rest, Acc2, Input, Skip + 1);
escape_json(<<Byte/integer,Rest/bitstring>>, Acc1, Input, Skip) when Byte < 92 ->
escape_json_chunk(Rest, Acc1, Input, Skip, 1);
escape_json(<<Byte/integer,Rest/bitstring>>, Acc, Input, Skip) when Byte =:= 92 ->
Acc2 = [Acc | escape(Byte)],
escape_json(Rest, Acc2, Input, Skip + 1);
escape_json(<<Byte/integer,Rest/bitstring>>, Acc, Input, Skip) when Byte < 128 ->
escape_json_chunk(Rest, Acc, Input, Skip, 1);
escape_json(<<Char/utf8,Rest/bitstring>>, Acc, Input, Skip) when Char =< 2047 ->
escape_json_chunk(Rest, Acc, Input, Skip, 2);
escape_json(<<Char/utf8,Rest/bitstring>>, Acc, Input, Skip) when Char =< 65535 ->
escape_json_chunk(Rest, Acc, Input, Skip, 3);
escape_json(<<_Char/utf8,Rest/bitstring>>, Acc, Input, Skip) ->
escape_json_chunk(Rest, Acc, Input, Skip, 4);
escape_json(<<>>, Acc, _Input, _Skip) ->
Acc;
escape_json(<<Byte/integer,_Rest/bitstring>>, _Acc, Input, _Skip) ->
error_invalid_byte_error(Byte, Input).
escape_json_chunk(<<Byte/integer,Rest/bitstring>>, Acc, Input, Skip, Len) when Byte < 32 ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | escape(Byte)],
escape_json(Rest, Acc2, Input, Skip + Len + 1);
escape_json_chunk(<<Byte/integer,Rest/bitstring>>, Acc, Input, Skip, Len) when Byte < 34 ->
escape_json_chunk(Rest, Acc, Input, Skip, Len + 1);
escape_json_chunk(<<Byte/integer,Rest/bitstring>>, Acc, Input, Skip, Len) when Byte =:= 34 ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | escape(Byte)],
escape_json(Rest, Acc2, Input, Skip + Len + 1);
escape_json_chunk(<<Byte/integer,Rest/bitstring>>, Acc, Input, Skip, Len) when Byte < 92 ->
escape_json_chunk(Rest, Acc, Input, Skip, Len + 1);
escape_json_chunk(<<Byte/integer,Rest/bitstring>>, Acc, Input, Skip, Len) when Byte =:= 92 ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | escape(Byte)],
escape_json(Rest, Acc2, Input, Skip + Len + 1);
escape_json_chunk(<<Byte/integer,Rest/bitstring>>, Acc, Input, Skip, Len) when Byte < 128 ->
escape_json_chunk(Rest, Acc, Input, Skip, Len + 1);
escape_json_chunk(<<Char/utf8,Rest/bitstring>>, Acc, Input, Skip, Len) when Char =< 2047 ->
escape_json_chunk(Rest, Acc, Input, Skip, Len + 2);
escape_json_chunk(<<Char/utf8,Rest/bitstring>>, Acc, Input, Skip, Len) when Char =< 65535 ->
escape_json_chunk(Rest, Acc, Input, Skip, Len + 3);
escape_json_chunk(<<_Char/utf8,Rest/bitstring>>, Acc, Input, Skip, Len) ->
escape_json_chunk(Rest, Acc, Input, Skip, Len + 4);
escape_json_chunk(<<>>, Acc, Input, Skip, Len) ->
Part = binary_part(Input, Skip, Len),
[Acc | Part];
escape_json_chunk(<<Byte/integer,_Rest/bitstring>>, _Acc, Input, _Skip, _Len) ->
error_invalid_byte_error(Byte, Input).
escape_unicode(Data, Input, Skip) ->
escape_unicode(Data, [], Input, Skip).
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 0 ->
Acc2 = [Acc | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 1 ->
Acc2 = [Acc | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 2 ->
Acc2 = [Acc | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 3 ->
Acc2 = [Acc | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 4 ->
Acc2 = [Acc | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 5 ->
Acc2 = [Acc | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 6 ->
Acc2 = [Acc | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 7 ->
Acc2 = [Acc | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 8 ->
Acc2 = [Acc | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 9 ->
Acc2 = [Acc | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 10 ->
Acc2 = [Acc | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 11 ->
Acc2 = [Acc | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 12 ->
Acc2 = [Acc | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 13 ->
Acc2 = [Acc | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 14 ->
Acc2 = [Acc | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 15 ->
Acc2 = [Acc | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 16 ->
Acc2 = [Acc | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 17 ->
Acc2 = [Acc | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 18 ->
Acc2 = [Acc | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 19 ->
Acc2 = [Acc | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 20 ->
Acc2 = [Acc | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 21 ->
Acc2 = [Acc | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 22 ->
Acc2 = [Acc | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 23 ->
Acc2 = [Acc | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 24 ->
Acc2 = [Acc | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 25 ->
Acc2 = [Acc | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 26 ->
Acc2 = [Acc | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 27 ->
Acc2 = [Acc | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 28 ->
Acc2 = [Acc | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 29 ->
Acc2 = [Acc | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 30 ->
Acc2 = [Acc | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 31 ->
Acc2 = [Acc | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 32 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 33 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 34 ->
Acc2 = [Acc | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 35 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 36 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 37 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 38 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 39 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 40 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 41 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 42 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 43 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 44 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 45 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 46 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 47 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 48 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 49 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 50 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 51 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 52 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 53 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 54 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 55 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 56 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 57 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 58 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 59 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 60 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 61 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 62 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 63 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 64 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 65 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 66 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 67 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 68 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 69 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 70 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 71 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 72 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 73 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 74 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 75 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 76 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 77 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 78 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 79 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 80 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 81 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 82 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 83 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 84 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 85 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 86 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 87 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 88 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 89 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 90 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 91 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 92 ->
Acc2 = [Acc | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 93 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 94 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 95 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 96 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 97 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 98 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 99 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 100 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 101 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 102 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 103 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 104 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 105 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 106 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 107 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 108 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 109 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 110 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 111 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 112 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 113 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 114 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 115 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 116 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 117 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 118 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 119 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 120 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 121 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 122 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 123 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 124 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 125 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 126 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip)
when Byte =:= 127 ->
escape_unicode_chunk(Rest, Acc, Input, Skip, 1);
escape_unicode(<<Char/utf8,Rest/bitstring>>,
Acc, Input, Skip)
when Char =< 255 ->
Acc2 = [Acc, <<"\\u00">> | integer_to_list(Char, 16)],
escape_unicode(Rest, Acc2, Input, Skip + 2);
escape_unicode(<<Char/utf8,Rest/bitstring>>,
Acc, Input, Skip)
when Char =< 2047 ->
Acc2 = [Acc, <<"\\u0">> | integer_to_list(Char, 16)],
escape_unicode(Rest, Acc2, Input, Skip + 2);
escape_unicode(<<Char/utf8,Rest/bitstring>>,
Acc, Input, Skip)
when Char =< 4095 ->
Acc2 = [Acc, <<"\\u0">> | integer_to_list(Char, 16)],
escape_unicode(Rest, Acc2, Input, Skip + 3);
escape_unicode(<<Char/utf8,Rest/bitstring>>,
Acc, Input, Skip)
when Char =< 65535 ->
Acc2 = [Acc, <<"\\u">> | integer_to_list(Char, 16)],
escape_unicode(Rest, Acc2, Input, Skip + 3);
escape_unicode(<<Char/utf8,Rest/bitstring>>,
Acc, Input, Skip) ->
_char@2 = Char - 65536,
Acc2 =
[Acc,
<<"\\uD">>,
integer_to_list(2048 bor (_char@2 bsr 10), 16),
<<"\\uD">> |
integer_to_list(3072 bor _char@2 band 1023, 16)],
escape_unicode(Rest, Acc2, Input, Skip + 4);
escape_unicode(<<>>, Acc, _Input, _Skip) ->
Acc;
escape_unicode(<<Byte/integer,_Rest/bitstring>>,
_Acc, Input, _Skip) ->
error_invalid_byte_error(Byte, Input).
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 0 ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 1 ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 2 ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 3 ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 4 ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 5 ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 6 ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 7 ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 8 ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 9 ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 10 ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 11 ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 12 ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 13 ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 14 ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 15 ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 16 ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 17 ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 18 ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 19 ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 20 ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 21 ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 22 ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 23 ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 24 ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 25 ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 26 ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 27 ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 28 ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 29 ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 30 ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 31 ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 32 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 33 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 34 ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 35 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 36 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 37 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 38 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 39 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 40 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 41 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 42 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 43 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 44 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 45 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 46 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 47 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 48 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 49 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 50 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 51 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 52 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 53 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 54 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 55 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 56 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 57 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 58 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 59 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 60 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 61 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 62 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 63 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 64 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 65 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 66 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 67 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 68 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 69 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 70 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 71 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 72 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 73 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 74 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 75 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 76 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 77 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 78 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 79 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 80 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 81 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 82 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 83 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 84 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 85 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 86 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 87 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 88 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 89 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 90 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 91 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 92 ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part | escape(Byte)],
escape_unicode(Rest, Acc2, Input, Skip + Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 93 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 94 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 95 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 96 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 97 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 98 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 99 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 100 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 101 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 102 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 103 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 104 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 105 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 106 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 107 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 108 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 109 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 110 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 111 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 112 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 113 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 114 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 115 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 116 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 117 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 118 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 119 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 120 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 121 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 122 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 123 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 124 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 125 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 126 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Byte/integer,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Byte =:= 127 ->
escape_unicode_chunk(Rest, Acc, Input, Skip,
Len + 1);
escape_unicode_chunk(<<Char/utf8,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Char =< 255 ->
Part = binary_part(Input, Skip, Len),
Acc2 =
[Acc, Part, <<"\\u00">> | integer_to_list(Char, 16)],
escape_unicode(Rest, Acc2, Input, Skip + Len + 2);
escape_unicode_chunk(<<Char/utf8,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Char =< 2047 ->
Part = binary_part(Input, Skip, Len),
Acc2 =
[Acc, Part, <<"\\u0">> | integer_to_list(Char, 16)],
escape_unicode(Rest, Acc2, Input, Skip + Len + 2);
escape_unicode_chunk(<<Char/utf8,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Char =< 4095 ->
Part = binary_part(Input, Skip, Len),
Acc2 =
[Acc, Part, <<"\\u0">> | integer_to_list(Char, 16)],
escape_unicode(Rest, Acc2, Input, Skip + Len + 3);
escape_unicode_chunk(<<Char/utf8,Rest/bitstring>>,
Acc, Input, Skip, Len)
when Char =< 65535 ->
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part, <<"\\u">> | integer_to_list(Char, 16)],
escape_unicode(Rest, Acc2, Input, Skip + Len + 3);
escape_unicode_chunk(<<Char/utf8,Rest/bitstring>>,
Acc, Input, Skip, Len) ->
_char@2 = Char - 65536,
Part = binary_part(Input, Skip, Len),
Acc2 = [Acc, Part, <<"\\uD">>,
integer_to_list(2048 bor (_char@2 bsr 10), 16),
<<"\\uD">>
| integer_to_list(3072 bor _char@2 band 1023, 16)
],
escape_unicode(Rest, Acc2, Input, Skip + Len + 4);
escape_unicode_chunk(<<>>, Acc, Input, Skip, Len) ->
Part = binary_part(Input, Skip, Len),
[Acc | Part];
escape_unicode_chunk(<<Byte/integer,_Rest/bitstring>>,
_Acc, Input, _Skip, _Len) ->
error_invalid_byte_error(Byte, Input).
key(String, Escape) when is_binary(String) ->
Escape(String, String, 0);
key(Atom, Escape) when is_atom(Atom) ->
String = atom_to_binary(Atom, utf8),
Escape(String, String, 0);
key(_charlist@1, Escape) when is_list(_charlist@1) ->
String = list_to_binary(_charlist@1),
Escape(String, String, 0).
list([], _Escape) ->
<<"[]">>;
list([First | Tail], Escape) ->
[91, value(First, Escape) | list_loop(Tail, Escape)].
list_loop([], _Escape) ->
[93];
list_loop([First | Tail], Escape) ->
[44, value(First, Escape) | list_loop(Tail, Escape)].
map_naive([{Key, Value} | Tail], Escape) ->
[<<"{\"">>,
key(Key, Escape),
<<"\":">>,
value(Value, Escape) |
map_naive_loop(Tail, Escape)].
map_naive_loop([], _Escape) ->
[$}];
map_naive_loop([{Key, Value} | Tail], Escape) ->
[<<",\"">>,
key(Key, Escape),
<<"\":">>,
value(Value, Escape) |
map_naive_loop(Tail, Escape)].
error_invalid_byte_error(Byte, Input) ->
error({invalid_byte,
<<"0x"/utf8,(integer_to_binary(Byte, 16))/binary>>,
Input}).
value(Value, Escape) when is_atom(Value) ->
encode_atom(Value, Escape);
value(Value, Escape) when is_binary(Value) ->
encode_string(Value, Escape);
value(Value, _Escape) when is_integer(Value) ->
integer(Value);
value(Value, _Escape) when is_float(Value) ->
float(Value);
value(Value, Escape) when is_list(Value) ->
list(Value, Escape);
value(Value, Escape) when is_map(Value) ->
case maps:to_list(Value) of
[] ->
<<"{}">>;
Keyword ->
map_naive(Keyword, Escape)
end. | src/thoas_encode.erl | 0.501709 | 0.412619 | thoas_encode.erl | starcoder |
-module( bencode ).
-author( "<NAME> <<EMAIL>>" ).
-export( [encode/1, decode/1] ).
-type value() :: binary() | integer() | list() | map().
%%
%% Encode a list of erlang terms to a binary string using bencoding
%%
-spec encode( value() ) -> binary().
encode( Val ) ->
encode( Val, [] ).
-spec encode( [value()], [any()] ) -> binary().
encode( [Val | T], Out ) when is_list( Val ) ->
encode( T, [[$l, encode( Val ), $e] | Out] );
encode( [Val | T], Out ) when is_binary( Val ) ->
encode( T, [[want:string( byte_size( Val ) ), $:, Val] | Out ] );
encode( [Val | T], Out ) when is_integer( Val ) or is_float( Val ) ->
encode( T, [[$i, want:string( Val ), $e] | Out] );
encode( [Val | T], Out ) when is_map( Val ) ->
encode( T, [[$d, lists:flatten( lists:foldl( fun( { Key, Value }, D ) -> [encode( [Key] ), encode( [Value] ) | D] end, [], maps:to_list( Val ) ) ), $e] | Out ] );
encode( [], Out ) ->
want:binary( lists:flatten( lists:reverse( Out ) ) ).
%%
%% Given a binary string describing bencoded values, decode the string into a list of erlang terms
%%
-spec decode( binary() ) -> [value()].
decode( Val ) when is_binary( Val ) ->
decode( want:string( Val ), [] ).
-spec decode( [any()], [any()] ) -> [value()].
decode( [$i | T], Out ) ->
{ Rest, Integer } = decode_integer( T, $e ),
decode( Rest, [Integer | Out] );
decode( [$l | T], Out ) ->
{ Rest, List } = decode_list( T ),
decode( Rest, [ List | Out ] );
decode( [$e | T], Out ) ->
{ T, lists:reverse( Out ) };
decode( [$d | T], Out ) ->
{ Rest, Dict } = decode_dict( T ),
decode( Rest, [Dict | Out] );
decode( Value = [_StringStart | _], Out ) ->
{ Rest, String } = decode_string( Value ),
decode( Rest, [ String | Out ] );
decode( [], Out ) ->
lists:reverse( Out ).
decode_string( Value ) ->
{ Rest, Length } = decode_integer( Value, $: ),
{ lists:nthtail( Length, Rest ), want:binary( lists:sublist( Rest, Length ) ) }.
decode_list( Value ) ->
decode( Value, [] ).
decode_dict( T ) ->
{ Rest, List } = decode_list( T ),
{ _, PropList } = lists:foldl( fun( Key, { key, PropList } ) -> { value, [Key | PropList] };
( Value, { value, [Key | PropList] } ) -> { key, [{ Key, Value } | PropList] } end,
{ key, [] },
List ),
{ Rest, maps:from_list( lists:reverse( PropList ) ) }.
decode_integer( Value, EndDelimiter ) ->
decode_integer( Value, [], EndDelimiter ).
decode_integer( [ EndDelimiter | T ], Value, EndDelimiter ) ->
{ T, want:integer( lists:reverse( Value ) ) };
decode_integer( [ Char | T ], Value, EndDelimiter ) ->
decode_integer( T, [ Char | Value ], EndDelimiter ). | src/bencode.erl | 0.523664 | 0.457682 | bencode.erl | starcoder |
% Licensed under the Apache License, Version 2.0 (the "License"); you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
-module(couch_ejson_compare).
-export([less/2, less_json_ids/2, less_json/2]).
-on_load(init/0).
init() ->
NumScheds = erlang:system_info(schedulers),
Dir = code:priv_dir(couch),
ok = erlang:load_nif(filename:join(Dir, ?MODULE), NumScheds).
% partitioned row comparison
less({p, PA, A}, {p, PB, B}) ->
less([PA, A], [PB, B]);
less(A, B) ->
try
less_nif(A, B)
catch
error:badarg ->
% Maybe the EJSON structure is too deep, fallback to Erlang land.
less_erl(A, B)
end.
less_json_ids({JsonA, IdA}, {JsonB, IdB}) ->
case less(JsonA, JsonB) of
0 ->
IdA < IdB;
Result ->
Result < 0
end.
less_json(A, B) ->
less(A, B) < 0.
less_nif(A, B) ->
less_erl(A, B).
less_erl(A, A) -> 0;
less_erl(A, B) when is_atom(A), is_atom(B) -> atom_sort(A) - atom_sort(B);
less_erl(A, _) when is_atom(A) -> -1;
less_erl(_, B) when is_atom(B) -> 1;
less_erl(A, B) when is_number(A), is_number(B) -> A - B;
less_erl(A, _) when is_number(A) -> -1;
less_erl(_, B) when is_number(B) -> 1;
less_erl(A, B) when is_binary(A), is_binary(B) -> couch_util:collate(A, B);
less_erl(A, _) when is_binary(A) -> -1;
less_erl(_, B) when is_binary(B) -> 1;
less_erl(A, B) when is_list(A), is_list(B) -> less_list(A, B);
less_erl(A, _) when is_list(A) -> -1;
less_erl(_, B) when is_list(B) -> 1;
less_erl({A}, {B}) when is_list(A), is_list(B) -> less_props(A, B);
less_erl({A}, _) when is_list(A) -> -1;
less_erl(_, {B}) when is_list(B) -> 1.
atom_sort(null) -> 1;
atom_sort(false) -> 2;
atom_sort(true) -> 3.
less_props([], []) ->
0;
less_props([], [_ | _]) ->
-1;
less_props(_, []) ->
1;
less_props([{AKey, AValue} | RestA], [{BKey, BValue} | RestB]) ->
case couch_util:collate(AKey, BKey) of
0 ->
case less_erl(AValue, BValue) of
0 ->
less_props(RestA, RestB);
Result ->
Result
end;
Result ->
Result
end.
less_list([], []) ->
0;
less_list([], [_ | _]) ->
-1;
less_list(_, []) ->
1;
less_list([A | RestA], [B | RestB]) ->
case less_erl(A, B) of
0 ->
less_list(RestA, RestB);
Result ->
Result
end. | src/couch/src/couch_ejson_compare.erl | 0.555676 | 0.555134 | couch_ejson_compare.erl | starcoder |
% Licensed under the Apache License, Version 2.0 (the "License"); you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% http://www.apache.org/licenses/LICENSE-2.0
%
% Unless required by applicable law or agreed to in writing, software
% distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
-module(couch_stats_aggregator).
-behaviour(gen_server).
-export([
fetch/0,
flush/0,
reload/0
]).
-export([
start_link/0,
init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
code_change/3,
terminate/2
]).
-include("couch_stats.hrl").
-record(st, {
descriptions,
stats,
collect_timer,
reload_timer
}).
fetch() ->
{ok, Stats} = gen_server:call(?MODULE, fetch),
Stats.
flush() ->
gen_server:call(?MODULE, flush).
reload() ->
gen_server:call(?MODULE, reload).
start_link() ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
init([]) ->
{ok, Descs} = reload_metrics(),
CT = erlang:send_after(get_interval(collect), self(), collect),
RT = erlang:send_after(get_interval(reload), self(), reload),
{ok, #st{descriptions=Descs, stats=[], collect_timer=CT, reload_timer=RT}}.
handle_call(fetch, _from, #st{stats = Stats}=State) ->
{reply, {ok, Stats}, State};
handle_call(flush, _From, State) ->
{reply, ok, collect(State)};
handle_call(reload, _from, #st{reload_timer=OldRT} = State) ->
timer:cancel(OldRT),
{ok, Descriptions} = reload_metrics(),
RT = update_timer(reload),
{reply, ok, State#st{descriptions=Descriptions, reload_timer=RT}};
handle_call(Msg, _From, State) ->
{stop, {unknown_call, Msg}, error, State}.
handle_cast(Msg, State) ->
{stop, {unknown_cast, Msg}, State}.
handle_info(collect, State) ->
{noreply, collect(State)};
handle_info(reload, State) ->
{ok, Descriptions} = reload_metrics(),
{noreply, State#st{descriptions=Descriptions}};
handle_info(Msg, State) ->
{stop, {unknown_info, Msg}, State}.
terminate(_Reason, _State) ->
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
comparison_set(Metrics) ->
sets:from_list(
[{Name, proplists:get_value(type, Props)} || {Name, Props} <- Metrics]
).
reload_metrics() ->
Current = load_metrics_for_applications(),
CurrentSet = comparison_set(Current),
Existing = couch_stats:list(),
ExistingSet = comparison_set(Existing),
ToDelete = sets:subtract(ExistingSet, CurrentSet),
ToCreate = sets:subtract(CurrentSet, ExistingSet),
sets:fold(
fun({Name, _}, _) -> couch_stats:delete(Name), nil end,
nil,
ToDelete
),
sets:fold(
fun({Name, Type}, _) ->
couch_stats:new(Type, Name),
nil
end,
nil,
ToCreate
),
{ok, Current}.
load_metrics_for_applications() ->
Apps = [element(1, A) || A <- application:loaded_applications()],
lists:foldl(
fun(AppName, Acc) ->
case load_metrics_for_application(AppName) of
error -> Acc;
Descriptions -> Descriptions ++ Acc
end
end,
[],
Apps
).
load_metrics_for_application(AppName) ->
case code:priv_dir(AppName) of
{error, _Error} ->
error;
Dir ->
case file:consult(Dir ++ "/stats_descriptions.cfg") of
{ok, Descriptions} ->
Descriptions;
{error, _Error} ->
error
end
end.
collect(#st{collect_timer=OldCT} = State) ->
timer:cancel(OldCT),
Stats = lists:map(
fun({Name, Props}) ->
{Name, [{value, couch_stats:sample(Name)}|Props]}
end,
State#st.descriptions
),
CT = update_timer(collect),
State#st{stats=Stats, collect_timer=CT}.
update_timer(Type) ->
Interval = get_interval(Type),
erlang:send_after(Interval, self(), Type).
get_interval(reload) -> 1000 * ?RELOAD_INTERVAL;
get_interval(collect) -> 1000 * config:get_integer("stats", "interval", ?DEFAULT_INTERVAL). | src/couch_stats/src/couch_stats_aggregator.erl | 0.555435 | 0.41567 | couch_stats_aggregator.erl | starcoder |
%%--------------------------------------------------------------------
%% Copyright (c) 2021 EMQ Technologies Co., Ltd. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_limiter_schema).
-include_lib("typerefl/include/types.hrl").
-export([ roots/0, fields/1, to_rate/1, to_capacity/1
, minimum_period/0, to_burst_rate/1, to_initial/1]).
-define(KILOBYTE, 1024).
-type limiter_type() :: bytes_in
| message_in
| connection
| message_routing.
-type bucket_name() :: atom().
-type zone_name() :: atom().
-type rate() :: infinity | float().
-type burst_rate() :: 0 | float().
-type capacity() :: infinity | number(). %% the capacity of the token bucket
-type initial() :: non_neg_integer(). %% initial capacity of the token bucket
%% the processing strategy after the failure of the token request
-type failure_strategy() :: force %% Forced to pass
| drop %% discard the current request
| throw. %% throw an exception
-typerefl_from_string({rate/0, ?MODULE, to_rate}).
-typerefl_from_string({burst_rate/0, ?MODULE, to_burst_rate}).
-typerefl_from_string({capacity/0, ?MODULE, to_capacity}).
-typerefl_from_string({initial/0, ?MODULE, to_initial}).
-reflect_type([ rate/0
, burst_rate/0
, capacity/0
, initial/0
, failure_strategy/0
]).
-export_type([limiter_type/0, bucket_name/0, zone_name/0]).
-import(emqx_schema, [sc/2, map/2]).
roots() -> [emqx_limiter].
fields(emqx_limiter) ->
[ {bytes_in, sc(ref(limiter), #{})}
, {message_in, sc(ref(limiter), #{})}
, {connection, sc(ref(limiter), #{})}
, {message_routing, sc(ref(limiter), #{})}
];
fields(limiter) ->
[ {global, sc(ref(rate_burst), #{})}
, {zone, sc(map("zone name", ref(rate_burst)), #{})}
, {bucket, sc(map("bucket id", ref(bucket)),
#{desc => "token bucket"})}
];
fields(rate_burst) ->
[ {rate, sc(rate(), #{})}
, {burst, sc(burst_rate(), #{default => "0/0s"})}
];
fields(bucket) ->
[ {zone, sc(atom(), #{desc => "the zone which the bucket in"})}
, {aggregated, sc(ref(bucket_aggregated), #{})}
, {per_client, sc(ref(client_bucket), #{})}
];
fields(bucket_aggregated) ->
[ {rate, sc(rate(), #{})}
, {initial, sc(initial(), #{default => "0"})}
, {capacity, sc(capacity(), #{})}
];
fields(client_bucket) ->
[ {rate, sc(rate(), #{})}
, {initial, sc(initial(), #{default => "0"})}
%% low_water_mark add for emqx_channel and emqx_session
%% both modules consume first and then check
%% so we need to use this value to prevent excessive consumption (e.g, consumption from an empty bucket)
, {low_water_mark, sc(initial(),
#{desc => "if the remaining tokens are lower than this value,
the check/consume will succeed, but it will be forced to hang for a short period of time",
default => "0"})}
, {capacity, sc(capacity(), #{desc => "the capacity of the token bucket"})}
, {divisible, sc(boolean(),
#{desc => "is it possible to split the number of tokens requested",
default => false})}
, {max_retry_time, sc(emqx_schema:duration(),
#{ desc => "the maximum retry time when acquire failed"
, default => "5s"})}
, {failure_strategy, sc(failure_strategy(),
#{ desc => "the strategy when all retry failed"
, default => force})}
].
%% minimum period is 100ms
minimum_period() ->
100.
%%--------------------------------------------------------------------
%% Internal functions
%%--------------------------------------------------------------------
ref(Field) -> hoconsc:ref(?MODULE, Field).
to_rate(Str) ->
to_rate(Str, true, false).
to_burst_rate(Str) ->
to_rate(Str, false, true).
to_rate(Str, CanInfinity, CanZero) ->
Tokens = [string:trim(T) || T <- string:tokens(Str, "/")],
case Tokens of
["infinity"] when CanInfinity ->
{ok, infinity};
["0", _] when CanZero ->
{ok, 0}; %% for burst
[Quota, Interval] ->
{ok, Val} = to_capacity(Quota),
case emqx_schema:to_duration_ms(Interval) of
{ok, Ms} when Ms > 0 ->
{ok, Val * minimum_period() / Ms};
_ ->
{error, Str}
end;
_ ->
{error, Str}
end.
to_capacity(Str) ->
Regex = "^\s*(?:(?:([1-9][0-9]*)([a-zA-z]*))|infinity)\s*$",
to_quota(Str, Regex).
to_initial(Str) ->
Regex = "^\s*([0-9]+)([a-zA-z]*)\s*$",
to_quota(Str, Regex).
to_quota(Str, Regex) ->
{ok, MP} = re:compile(Regex),
Result = re:run(Str, MP, [{capture, all_but_first, list}]),
case Result of
{match, [Quota, Unit]} ->
Val = erlang:list_to_integer(Quota),
Unit2 = string:to_lower(Unit),
{ok, apply_unit(Unit2, Val)};
{match, [Quota]} ->
{ok, erlang:list_to_integer(Quota)};
{match, []} ->
{ok, infinity};
_ ->
{error, Str}
end.
apply_unit("", Val) -> Val;
apply_unit("kb", Val) -> Val * ?KILOBYTE;
apply_unit("mb", Val) -> Val * ?KILOBYTE * ?KILOBYTE;
apply_unit("gb", Val) -> Val * ?KILOBYTE * ?KILOBYTE * ?KILOBYTE;
apply_unit(Unit, _) -> throw("invalid unit:" ++ Unit). | apps/emqx/src/emqx_limiter/src/emqx_limiter_schema.erl | 0.617397 | 0.450239 | emqx_limiter_schema.erl | starcoder |
%%
%% %CopyrightBegin%
%%
%% Copyright Ericsson AB 2010-2016. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
%%
%% This module provide OTP's dict interface built on top of ets.
%%
%% Note that while the interface is the same as dict the semantics
%% aren't quite. A Dict here is just a table identifier (although
%% this fact can't be used if you want dict/ets-based implementations
%% to be interchangeable) so changes made to the Dict modify the
%% underlying table. For merge/3, the first argument table is modified.
%%
%% The underlying ets table implementing a dict is deleted when the
%% process from which new() was invoked exits and the dict is only
%% writable from this process.
%%
%% The reason for this is to be able to swap dict/ets-based
%% implementations: the former is easier to debug, the latter is
%% faster for larger tables. It's also just a nice interface even
%% when there's no need for swapability.
%%
-module(diameter_dict).
-export([append/3,
append_list/3,
erase/2,
fetch/2,
fetch_keys/1,
filter/2,
find/2,
fold/3,
from_list/1,
is_key/2,
map/2,
merge/3,
new/0,
store/3,
to_list/1,
update/3,
update/4,
update_counter/3]).
%%% ----------------------------------------------------------
%%% EXPORTED INTERNAL FUNCTIONS
%%% ----------------------------------------------------------
append(Key, Value, Dict) ->
append_list(Key, [Value], Dict).
append_list(Key, ValueList, Dict)
when is_list(ValueList) ->
update(Key, fun(V) -> V ++ ValueList end, ValueList, Dict).
erase(Key, Dict) ->
ets:delete(Dict, Key),
Dict.
fetch(Key, Dict) ->
{ok, V} = find(Key, Dict),
V.
fetch_keys(Dict) ->
ets:foldl(fun({K,_}, Acc) -> [K | Acc] end, [], Dict).
filter(Pred, Dict) ->
lists:foreach(fun({K,V}) -> filter(Pred(K,V), K, Dict) end, to_list(Dict)),
Dict.
find(Key, Dict) ->
case ets:lookup(Dict, Key) of
[{Key, V}] ->
{ok, V};
[] ->
error
end.
fold(Fun, Acc0, Dict) ->
ets:foldl(fun({K,V}, Acc) -> Fun(K, V, Acc) end, Acc0, Dict).
from_list(List) ->
lists:foldl(fun store/2, new(), List).
is_key(Key, Dict) ->
ets:member(Dict, Key).
map(Fun, Dict) ->
lists:foreach(fun({K,V}) -> store(K, Fun(K,V), Dict) end, to_list(Dict)),
Dict.
merge(Fun, Dict1, Dict2) ->
fold(fun(K2,V2,_) ->
update(K2, fun(V1) -> Fun(K2, V1, V2) end, V2, Dict1)
end,
Dict1,
Dict2).
new() ->
ets:new(?MODULE, [set]).
store(Key, Value, Dict) ->
store({Key, Value}, Dict).
to_list(Dict) ->
ets:tab2list(Dict).
update(Key, Fun, Dict) ->
store(Key, Fun(fetch(Key, Dict)), Dict).
update(Key, Fun, Initial, Dict) ->
store(Key, map(Key, Fun, Dict, Initial), Dict).
update_counter(Key, Increment, Dict)
when is_integer(Increment) ->
update(Key, fun(V) -> V + Increment end, Increment, Dict).
%%% ---------------------------------------------------------
%%% INTERNAL FUNCTIONS
%%% ---------------------------------------------------------
store({_,_} = T, Dict) ->
ets:insert(Dict, T),
Dict.
filter(true, _, _) ->
ok;
filter(false, K, Dict) ->
erase(K, Dict).
map(Key, Fun, Dict, Error) ->
case find(Key, Dict) of
{ok, V} ->
Fun(V);
error ->
Error
end. | lib/diameter/src/base/diameter_dict.erl | 0.614047 | 0.409752 | diameter_dict.erl | starcoder |
-module(lists_practice).
-export([product/1, maximum/1, maxt/1]).
-include_lib("eunit/include/eunit.hrl").
%%%%%%%
% 2.6 %
%%%%%%%
% Combining list elements: the product of a list
%
% Using the template from the last session, define an Erlang function to give
% the product of a list of numbers. The product of an empty list is usually
% taken to be 1: why?
% Step-by-step evaluation
%
% product([2, 3, 4])
% product([2, 3, 4], 1)
% product([3, 4], 2)
% product([4], 6)
% product([], 24)
% 24
product(N) ->
product(N, 1).
product([X|[]], Acc) ->
X * Acc;
product([X|Xs], Acc) ->
product(Xs, X * Acc).
product_test() ->
?assertEqual(24, product([2, 3, 4])),
?assertEqual(2, product([2])),
?assertException(error, function_clause, product([])).
% Combining list elements: the maximum of a list
%
% Define an Erlang function to give the maximum of a list of numbers.
%
% You might find it helpful to use the function max/2 that gives the maximum of
% two values.
%
% It’s not obvious what should be the value for the maximum of an empty list of
% numbers. You could therefore choose to define maximum on lists with at least
% one element only: to do this you will need to change the base case of the
% template.
% Step-by-step evaluation
%
% maximum([3, 1, 2, 4])
% max(3, maximum([1, 2, 4])
% max(3, max(1, maximum([2, 4])))
% max(3, max(1, max(2, maximum([4|[]]))))
% max(3, max(1, max(2, 4)))
% max(3, max(1, 4))
% max(3, 4)
% 4
maximum([X|[]]) ->
X;
maximum([X|Xs]) ->
max(X, maximum(Xs)).
% maxt([3, 1, 2, 4])
% maxt([1, 2, 4], 3)
% maxt([2, 4], max(3, 1))
% maxt([2, 4], 3)
% maxt([4], max(3, 2))
% maxt([4|[]], 3)
% max(3, 4)
% 4
%
maxt([X|[]]) ->
X;
maxt([X|Xs]) ->
maxt(Xs, X).
maxt([X|[]], Acc) ->
max(Acc, X);
maxt([X|Xs], Acc) ->
maxt(Xs, max(Acc, X)).
maximum_test() ->
L = [3, 1, 2, 4],
?assertEqual(maxt(L), maximum(L)),
?assertEqual(2, maxt([2])),
?assertException(error, function_clause, maxt([])),
?assertException(error, function_clause, maximum([])). | week2/lists_practice.erl | 0.560974 | 0.717185 | lists_practice.erl | starcoder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.