code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
|---|---|---|---|---|---|
defmodule Phoenix.PubSub.PG2 do
@moduledoc """
Phoenix PubSub adapter based on `:pg`/`:pg2`.
It runs on Distributed Erlang and is the default adapter.
"""
@behaviour Phoenix.PubSub.Adapter
use Supervisor
## Adapter callbacks
@impl true
def node_name(_), do: node()
@impl true
def broadcast(adapter_name, topic, message, dispatcher) do
case pg_members(group(adapter_name)) do
{:error, {:no_such_group, _}} ->
{:error, :no_such_group}
pids ->
message = forward_to_local(topic, message, dispatcher)
for pid <- pids, node(pid) != node() do
send(pid, message)
end
:ok
end
end
@impl true
def direct_broadcast(adapter_name, node_name, topic, message, dispatcher) do
send({group(adapter_name), node_name}, {:forward_to_local, topic, message, dispatcher})
:ok
end
defp forward_to_local(topic, message, dispatcher) do
{:forward_to_local, topic, message, dispatcher}
end
defp group(adapter_name) do
groups = :persistent_term.get(adapter_name)
groups = Tuple.to_list(groups) |> List.delete(PancakeV2.PubSub) |> List.to_tuple()
group_index = :erlang.phash2(self(), tuple_size(groups))
elem(groups, group_index)
end
if Code.ensure_loaded?(:pg) do
defp pg_members(group) do
:pg.get_members(Phoenix.PubSub, group)
end
else
defp pg_members(group) do
:pg2.get_members({:phx, group})
end
end
## Supervisor callbacks
@doc false
def start_link(opts) do
name = Keyword.fetch!(opts, :name)
pool_size = Keyword.get(opts, :pool_size, 1)
adapter_name = Keyword.fetch!(opts, :adapter_name)
Supervisor.start_link(__MODULE__, {name, adapter_name, pool_size}, name: adapter_name)
end
@impl true
def init({name, adapter_name, pool_size}) do
groups =
for number <- 1..pool_size do
:"#{adapter_name}_#{number}"
end
groups = [name] ++ groups
:persistent_term.put(adapter_name, List.to_tuple(groups))
children =
for group <- groups do
Supervisor.child_spec({Phoenix.PubSub.PG2Worker, {name, group}}, id: group)
end
Supervisor.init(children, strategy: :one_for_one)
end
end
defmodule Phoenix.PubSub.PG2Worker do
@moduledoc false
use GenServer
@doc false
def start_link({name, group}) do
# Đoạn này viết thế này để tránh việc trùng tên GenServer với supervisor
gen_server_name = if name == group, do: Module.concat(name, Server), else: group
GenServer.start_link(__MODULE__, {name, group}, name: gen_server_name)
end
@impl true
def init({name, group}) do
:ok = pg_join(group)
{:ok, name}
end
@impl true
def handle_info({:forward_to_local, topic, message, dispatcher}, pubsub) do
Phoenix.PubSub.local_broadcast(pubsub, topic, message, dispatcher)
{:noreply, pubsub}
end
@impl true
def handle_info(message, pubsub) do
IO.inspect(message, label: "UNCAUGHT BROADCAST MESSAGE")
{:noreply, pubsub}
end
if Code.ensure_loaded?(:pg) do
defp pg_join(group) do
:ok = :pg.join(Phoenix.PubSub, group, self())
end
else
defp pg_join(group) do
namespace = {:phx, group}
:ok = :pg2.create(namespace)
:ok = :pg2.join(namespace, self())
:ok
end
end
end
|
lib/phoenix/pubsub/pg2.ex
| 0.730578
| 0.406567
|
pg2.ex
|
starcoder
|
defmodule Infusionsoft.Endpoints.XML.Data do
@moduledoc """
Provides the raw endpoints to Infusionsoft's XML API for Data actions.
"""
alias Infusionsoft.Endpoints.XML.Helpers
@doc """
https://developer.infusionsoft.com/docs/xml-rpc/#data-query-a-data-table
Available options:
page - defaults to 0
limit - defaults to 1000
order_by - defualts to Id
ascending - defaults to false
"""
@spec query_a_data_table(
String.t(),
map(),
[String.t()],
String.t(),
nil | String.t(),
keyword()
) :: {:ok, list(map)} | {:error, any()}
def query_a_data_table(table, query_data, selected_fields, token, app, opts \\ []) do
opts = Keyword.merge([page: 0, limit: 1000, order_by: "Id", ascending: false], opts)
page = Keyword.fetch!(opts, :page)
limit = Keyword.fetch!(opts, :limit)
order_by = Keyword.fetch!(opts, :order_by)
ascending = Keyword.fetch!(opts, :ascending)
params =
Helpers.build_params(
[table, limit, page, query_data, selected_fields, order_by, ascending],
token,
app
)
Helpers.process_endpoint("DataService.query", params, token, app)
end
@doc """
This function is not mapped to any Infusionsoft API endpoint. Instead, it is a helper
to recurse on `query_a_data_table/6` and get the full number of records instead of one page.
https://developer.infusionsoft.com/docs/xml-rpc/#data-query-a-data-table
Available options:
order_by - defualts to Id
ascending - defaults to false
"""
@spec query_all_from_table(
String.t(),
map(),
[String.t()],
String.t(),
nil | String.t(),
keyword()
) :: {:ok, list()} | {:error, String.t()}
def query_all_from_table(table, query_data, fields, token, app, opts \\ []) do
do_query_all_from_table(table, query_data, fields, token, app, opts, 0, [], [])
end
defp do_query_all_from_table(_, _, _, _, _, _, count, acc, []) when count != 0 do
{:ok, acc}
end
defp do_query_all_from_table(table, query_data, fields, token, app, opts, count, acc, new) do
opts = Keyword.merge(opts, page: count, limit: 1000)
with {:ok, next} <- query_a_data_table(table, query_data, fields, token, app, opts) do
current = acc ++ new
count = count + 1
opts = Keyword.merge(opts, page: count, limit: 1000)
do_query_all_from_table(table, query_data, fields, token, app, opts, count, current, next)
end
end
@doc """
https://developer.infusionsoft.com/docs/xml-rpc/#data-find-a-record-by-matching-a-specific-field
Available options:
page - defaults to 0
limit - defaults to 1000
"""
@spec find_by_field(
String.t(),
String.t(),
String.t(),
[String.t()],
String.t(),
nil | String.t(),
keyword()
) :: {:ok, list()} | {:error, String.t()}
def find_by_field(table, field_name, field_value, return_fields, token, app, opts \\ []) do
opts = Keyword.merge([page: 0, limit: 1000], opts)
page = Keyword.fetch!(opts, :page)
limit = Keyword.fetch!(opts, :limit)
params =
Helpers.build_params(
[table, limit, page, field_name, field_value, return_fields],
token,
app
)
Helpers.process_endpoint("DataService.findByField", params, token, app)
end
@doc "https://developer.infusionsoft.com/docs/xml-rpc/#data-create-a-record"
@spec create_a_record(String.t(), map(), String.t(), nil | String.t()) ::
{:ok, integer()} | {:error, String.t()}
def create_a_record(table, values, token, app \\ nil) do
params = Helpers.build_params([table, values], token, app)
Helpers.process_endpoint("DataService.add", params, token, app)
end
@doc "https://developer.infusionsoft.com/docs/xml-rpc/#data-retrieve-a-record"
@spec retrieve_a_record(String.t(), integer(), [String.t()], String.t(), nil | String.t()) ::
{:ok, map()} | {:error, String.t()}
def retrieve_a_record(table, id, fields, token, app \\ nil) do
params = Helpers.build_params([table, id, fields], token, app)
Helpers.process_endpoint("DataService.load", params, token, app)
end
@doc "https://developer.infusionsoft.com/docs/xml-rpc/#data-update-a-record"
@spec update_a_record(String.t(), integer(), map(), String.t(), nil | String.t()) ::
{:ok, integer()} | {:error, String.t()}
def update_a_record(table, record_id, values, token, app \\ nil) do
params = Helpers.build_params([table, record_id, values], token, app)
Helpers.process_endpoint("DataService.update", params, token, app)
end
@doc "https://developer.infusionsoft.com/docs/xml-rpc/#data-delete-a-record"
@spec delete_a_record(String.t(), integer(), String.t(), nil | String.t()) ::
{:ok, boolean()} | {:error, String.t()}
def delete_a_record(table, id, token, app \\ nil) do
params = Helpers.build_params([table, id], token, app)
Helpers.process_endpoint("DataService.delete", params, token, app)
end
@doc "https://developer.infusionsoft.com/docs/xml-rpc/#data-count-a-data-table-s-records"
@spec count_records(String.t(), map(), String.t(), nil | String.t()) ::
{:ok, integer()} | {:error, String.t()}
def count_records(table, query_data, token, app \\ nil) do
params = Helpers.build_params([table, query_data], token, app)
Helpers.process_endpoint("DataService.count", params, token, app)
end
@doc "https://developer.infusionsoft.com/docs/xml-rpc/#data-create-a-custom-field"
@spec create_custom_field(
String.t(),
String.t(),
String.t(),
integer(),
String.t(),
nil | String.t()
) :: {:ok, integer()} | {:error, String.t()}
def create_custom_field(
custom_field_type,
display_name,
data_type,
header_id,
token,
app \\ nil
) do
params =
Helpers.build_params([custom_field_type, display_name, data_type, header_id], token, app)
Helpers.process_endpoint("DataService.addCustomField", params, token, app)
end
@doc "https://developer.infusionsoft.com/docs/xml-rpc/#data-update-a-custom-field"
@spec update_a_custom_field(integer(), map(), String.t(), nil | String.t()) ::
{:ok, boolean()} | {:error, String.t()}
def update_a_custom_field(custom_field_id, values, token, app \\ nil) do
params = Helpers.build_params([custom_field_id, values], token, app)
Helpers.process_endpoint("DataService.updateCustomField", params, token, app)
end
@doc "https://developer.infusionsoft.com/docs/xml-rpc/#data-retrieve-an-appointment-s-icalendar-file"
@spec retrieve_appointments_ical(integer(), String.t(), nil | String.t()) ::
{:ok, String.t()} | {:error, String.t()}
def retrieve_appointments_ical(appointment_id, token, app \\ nil) do
params = Helpers.build_params([appointment_id], token, app)
Helpers.process_endpoint("DataService.getAppointmentICal", params, token, app)
end
@doc "https://developer.infusionsoft.com/docs/xml-rpc/#data-retrieve-application-setting"
@spec retrieve_application_setting(String.t(), String.t(), String.t(), nil | String.t()) ::
{:ok, String.t()} | {:error, String.t()}
def retrieve_application_setting(module, setting, token, app \\ nil) do
params = Helpers.build_params([module, setting], token, app)
Helpers.process_endpoint("DataService.getAppSetting", params, token, app)
end
@doc "https://developer.infusionsoft.com/docs/xml-rpc/#data-validate-a-user-s-credentials"
@spec validate_a_users_credentials(String.t(), String.t(), String.t(), nil | String.t()) ::
{:ok, integer() | String.t()} | {:error, String.t()}
def validate_a_users_credentials(username, password_hash, token, app \\ nil) do
params = Helpers.build_params([username, password_hash], token, app)
Helpers.process_endpoint("DataService.authenticateUser", params, token, app)
end
end
|
lib/infusionsoft/endpoints/xml/data.ex
| 0.846673
| 0.40928
|
data.ex
|
starcoder
|
defmodule ExWeb3EcRecover.SignedType.Message do
@moduledoc """
This module represents a message data structure that is used
to sign and recover, based on EIP 712.
## Domain
For details look at this [website](https://eips.ethereum.org/EIPS/eip-712#definition-of-domainseparator).
```
- string name the user readable name of signing domain, i.e. the name of the DApp or the protocol.
- string version the current major version of the signing domain. Signatures from different versions are not compatible.
- uint256 chainId the EIP-155 chain id. The user-agent should refuse signing if it does not match the currently active chain.
- address verifyingContract the address of the contract that will verify the signature. The user-agent may do contract specific phishing prevention.
- bytes32 salt an disambiguating salt for the protocol. This can be used as a domain separator of last resort.
```
Source: https://eips.ethereum.org/EIPS/eip-712#definition-of-domainseparator
## Types
This field contains types available to be used by `message` field.
It should contain a list of maps containing `name` and `type` values.
For list of available types consult [EIP website](https://eips.ethereum.org/EIPS/eip-712#specification).
### Example
```
%{
"Message" => [
%{"name" => "data", "type" => "Child"},
%{"name" => "intData", "type" => "int8"},
%{"name" => "uintData", "type" => "uint8"},
%{"name" => "bytesData", "type" => "bytes3"},
%{"name" => "boolData", "type" => "bool"},
%{"name" => "addressData", "type" => "address"}
],
"Child" => [%{"name" => "data", "type" => "GrandChild"}],
"GrandChild" => [%{"name" => "data", "type" => "string"}]
}
```
## Primary type
A string designating the root type. The root of the message is expected to be of the `primary_type`.
## Message
Message is map with the data that will be used to build data structure to be encoded.
"""
@enforce_keys [:domain, :message, :types, :primary_type]
defstruct @enforce_keys
@type t :: %__MODULE__{
domain: map(),
message: map(),
types: map(),
primary_type: String.t()
}
@doc """
This function generates a struct representing a message.
Check module doc for details.
"""
@spec from_map(message :: map()) :: {:ok, t()} | :error
def from_map(%{
"domain" => domain,
"message" => message,
"types" => types,
"primaryType" => primary_type
}) do
data = %__MODULE__{
domain: domain,
message: message,
types: types,
primary_type: primary_type
}
{:ok, data}
end
def from_map(_), do: :error
end
|
lib/ex_web3_ec_recover/signed_type/message.ex
| 0.854384
| 0.871201
|
message.ex
|
starcoder
|
defmodule Cldr.Message do
@moduledoc """
Implements the [ICU Message Format](http://userguide.icu-project.org/formatparse/messages)
with functions to parse and interpolate messages.
"""
alias Cldr.Message.{Parser, Interpreter, Print}
import Kernel, except: [to_string: 1, binding: 1]
defdelegate format_list(message, args, options), to: Interpreter
defdelegate format_list!(message, args, options), to: Interpreter
@type message :: binary()
@type bindings :: list() | map()
@type arguments :: bindings()
@type options :: Keyword.t()
@doc false
def cldr_backend_provider(config) do
Cldr.Message.Backend.define_message_module(config)
end
@doc """
Returns the translation of the given ICU-formatted
message string.
Any placeholders are replaced with the value of variables
already in scope at the time of compilation.
`t/1` is a wrapper around the `gettext/2` macro
which should therefore be imported from a `Gettext`
backend prior to calling `t/1`.
## Arguments
* `message` is an ICU format message string.
## Returns
* A translated string.
"""
defmacro t(message) when is_binary(message) do
caller = __CALLER__.module
canonical_message = Cldr.Message.canonical_message!(message, pretty: true)
bindings =
Enum.map(bindings(message), fn binding ->
{String.to_atom(binding), {String.to_atom(binding), [if_undefined: :apply], caller}}
end)
quote do
gettext(unquote(canonical_message), unquote(bindings))
end
end
@doc """
Returns the translation of the given ICU-formatted
message string.
`t/2` is a wrapper around the `gettext/2` macro
which should therefore be imported from a `Gettext`
backend prior to calling `t/2`.
## Arguments
* `message` is an ICU format message string.
* `bindings` is a keyword list or map of bindings used
to replace placeholders in the message.
## Returns
* A translated string.
"""
defmacro t(message, bindings) when is_binary(message) do
canonical_message = Cldr.Message.canonical_message!(message, pretty: true)
quote do
gettext(unquote(canonical_message), unquote(bindings))
end
end
@doc """
Format a message in the [ICU Message Format](https://unicode-org.github.io/icu/userguide/format_parse/messages)
into a string.
The ICU Message Format uses message `"pattern"` strings with
variable-element placeholders enclosed in {curly braces}. The
argument syntax can include formatting details, otherwise a
default format is used.
## Arguments
* `bindings` is a list or map of arguments that
are used to replace placeholders in the message.
* `options` is a keyword list of options.
## Options
* `backend` is any `Cldr` backend. That is, any module that
contains `use Cldr`.
* `:locale` is any valid locale name returned by `Cldr.known_locale_names/0`
or a `t:Cldr.LanguageTag` struct. The default is `Cldr.get_locale/0`.
* `:trim` determines if the message is trimmed
of whitespace before formatting. The default is
`false`.
* `:allow_positional_args` determines if position arguments
are permitted. Positional arguments are in the format
`{0}` in the message. The default is `true`.
* All other aptions are passed to the `to_string/2`
function of a formatting module.
## Returns
* `{:ok, formatted_mesasge}` or
* `{:error, {module, reason}}`
## Examples
iex> Cldr.Message.format "{greeting} to you!", greeting: "Good morning"
{:ok, "Good morning to you!"}
"""
@spec format(String.t(), bindings(), options()) ::
{:ok, String.t()} | {:error, {module(), String.t()}}
def format(message, bindings \\ [], options \\ []) when is_binary(message) do
case format_to_iolist(message, bindings, options) do
{:ok, iolist, _bound, []} ->
{:ok, :erlang.iolist_to_binary(iolist)}
{:error, _iolist, _bound, unbound} ->
{:error, {Cldr.Message.BindError, "No binding was found for #{inspect(unbound)}"}}
other ->
other
end
end
@spec format!(String.t(), bindings(), options()) :: String.t() | no_return
def format!(message, args \\ [], options \\ []) when is_binary(message) do
case format(message, args, options) do
{:ok, binary} ->
binary
{:error, {exception, reason}} ->
raise exception, reason
end
end
@doc """
Format a message in the [ICU Message Format](https://unicode-org.github.io/icu/userguide/format_parse/messages)
into an iolist.
The ICU Message Format uses message `"pattern"` strings with
variable-element placeholders enclosed in {curly braces}. The
argument syntax can include formatting details, otherwise a
default format is used.
## Arguments
* `bindings` is a list or map of arguments that
are used to replace placeholders in the message.
* `options` is a keyword list of options.
## Options
* `backend` is any `Cldr` backend. That is, any module that
contains `use Cldr`.
* `:locale` is any valid locale name returned by `Cldr.known_locale_names/0`
or a `t:Cldr.LanguageTag` struct. The default is `Cldr.get_locale/0`.
* `:trim` determines if the message is trimmed
of whitespace before formatting. The default is
`false`.
* `:allow_positional_args` determines if position arguments
are permitted. Positional arguments are in the format
`{0}` in the message. The default is `true`.
* All other aptions are passed to the `to_string/2`
function of a formatting module.
## Returns
* `{:ok, formatted_mesasge}` or
* `{:error, {module, reason}}`
## Examples
iex> Cldr.Message.format_to_iolist "{greeting} to you!", greeting: "Good morning"
{:ok, ["Good morning", " to you!"], ["greeting"], []}
"""
@spec format_to_iolist(String.t(), bindings(), options()) ::
{:ok, list(), list(), list()}
| {:error, list(), list(), list()}
| {:error, {module(), binary()}}
def format_to_iolist(message, bindings \\ [], options \\ []) when is_binary(message) do
{locale, backend} = Cldr.locale_and_backend_from(options)
options =
default_options()
|> Keyword.merge(options)
|> Keyword.put_new(:locale, locale)
|> Keyword.put_new(:backend, backend)
with {:ok, message} <- maybe_trim(message, options[:trim]),
{:ok, parsed} <- Parser.parse(message, options[:allow_positional_args]) do
format_list(parsed, bindings, options)
end
rescue
e in [Cldr.FormatCompileError, Cldr.Message.ParseError] ->
{:error, {e.__struct__, e.message}}
end
@doc """
Returns the [Jaro distance](https://en.wikipedia.org/wiki/Jaro–Winkler_distance)
between two messages.
This allows for fuzzy matching of message
which can be helpful when a message string
is changed but the semantics remain the same.
## Arguments
* `message1` is a CLDR message in binary form.
* `message2` is a CLDR message in binary form.
* `options` is a keyword list of options. The
default is `[]`.
## Options
* `:trim` determines if the message is trimmed
of whitespace before formatting. The default is
`false`.
## Returns
* `{ok, distance}` where `distance` is a float value between 0.0
(equates to no similarity) and 1.0 (is an
exact match) representing Jaro distance between `message1`
and `message2` or
* `{:error, {exception, reason}}`
## Examples
iex> Cldr.Message.jaro_distance "{greetings} to you!", "{greeting} to you!"
{:ok, 0.9824561403508771}
"""
def jaro_distance(message1, message2, options \\ []) do
with {:ok, message1} <- maybe_trim(message1, options[:trim]),
{:ok, message2} <- maybe_trim(message2, options[:trim]),
{:ok, message1_ast} <- Parser.parse(message1),
{:ok, message2_ast} <- Parser.parse(message2) do
canonical_message1 = Print.to_string(message1_ast)
canonical_message2 = Print.to_string(message2_ast)
{:ok, String.jaro_distance(canonical_message1, canonical_message2)}
end
end
@doc """
Returns the [Jaro distance](https://en.wikipedia.org/wiki/Jaro–Winkler_distance)
between two messages or raises.
This allows for fuzzy matching of message
which can be helpful when a message string
is changed but the semantics remain the same.
## Arguments
* `message1` is a CLDR message in binary form.
* `message2` is a CLDR message in binary form.
* `options` is a keyword list of options. The
default is `[]`.
## Options
* `:trim` determines if the message is trimmed
of whitespace before formatting. The default is
`false`.
## Returns
* `distance` where `distance` is a float value between 0.0
(equates to no similarity) and 1.0 (is an
exact match) representing Jaro distance between `message1`
and `message2` or
* raises an exception
## Examples
iex> Cldr.Message.jaro_distance! "{greetings} to you!", "{greeting} to you!"
0.9824561403508771
"""
def jaro_distance!(message1, message2, options \\ []) do
case jaro_distance(message1, message2, options) do
{:ok, distance} -> distance
{:error, {exception, reason}} -> raise exception, reason
end
end
@doc """
Formats a message into a canonical form.
This allows for messages to be compared
directly, or using `Cldr.Message.jaro_distance/3`.
## Arguments
* `message` is a CLDR message in binary form.
* `options` is a keyword list of options. The
default is `[]`.
## Options
* `:trim` determines if the message is trimmed
of whitespace before formatting. The default is
`true`.
* `:pretty` determines if the message if
formatted with indentation to aid readability.
The default is `false`.
## Returns
* `{ok, canonical_message}` where `canonical_message`
is a string or
* `{:error, {exception, reason}}`
## Examples
iex> Cldr.Message.canonical_message "{greeting } to you!"
{:ok, "{greeting} to you!"}
"""
def canonical_message(message, options \\ []) do
options = Keyword.put_new(options, :trim, true)
with {:ok, message} <- maybe_trim(message, options[:trim]),
{:ok, message_ast} <- Parser.parse(message) do
{:ok, Print.to_string(message_ast, options)}
end
end
@doc """
Formats a message into a canonical form
or raises if the message cannot be parsed.
This allows for messages to be compared
directly, or using `Cldr.Message.jaro_distance/3`.
## Arguments
* `message` is a CLDR message in binary form.
* `options` is a keyword list of options. The
default is `[]`.
## Options
* `:trim` determines if the message is trimmed
of whitespace before formatting. The default is
`true`.
* `:pretty` determines if the message if
formatted with indentation to aid readability.
The default is `false`.
## Returns
* `canonical_message` as a string or
* raises an exception
## Examples
iex> Cldr.Message.canonical_message! "{greeting } to you!"
"{greeting} to you!"
"""
def canonical_message!(message, options \\ []) do
case canonical_message(message, options) do
{:ok, message} -> message
{:error, {exception, reason}} -> raise exception, reason
end
end
@doc """
Extract the binding names from an ICU message.
## Arguments
* `message` is a CLDR message in binary or
parsed form.
### Returns
* A list of binding names as strings or
* `{:error, {exception, reason}}`
### Examples
iex> Cldr.Message.bindings "This {variable} is in the message"
["variable"]
"""
def bindings(message) when is_binary(message) do
with {:ok, parsed} <- Cldr.Message.Parser.parse(message) do
bindings(parsed)
end
end
def bindings(message) when is_list(message) do
Enum.reduce(message, [], fn
{:named_arg, arg}, acc -> [arg | acc]
{:pos_arg, arg}, acc -> [arg | acc]
{:select, {_, arg}, selectors}, acc -> [arg, bindings(selectors) | acc]
{:plural, {_, arg}, _, selectors}, acc -> [arg, bindings(selectors) | acc]
{:select_ordinal, {_, arg}, _, selectors}, acc -> [arg, bindings(selectors) | acc]
_other, acc -> acc
end)
|> List.flatten()
|> Enum.uniq()
end
def bindings(message) when is_map(message) do
Enum.map(message, fn {_selector, message} -> bindings(message) end)
end
@doc false
def default_options do
[trim: false, allow_positional_args: true]
end
if Code.ensure_loaded?(Cldr) and function_exported?(Cldr, :default_backend!, 0) do
@doc false
def default_backend do
Cldr.default_backend!()
end
else
@doc false
def default_backend do
Cldr.default_backend()
end
end
defp maybe_trim(message, true) do
{:ok, String.trim(message)}
end
defp maybe_trim(message, _) do
{:ok, message}
end
end
|
lib/cldr/messages/messages.ex
| 0.885786
| 0.531878
|
messages.ex
|
starcoder
|
defmodule Sparklinex.Smooth do
alias Sparklinex.ChartData
alias Sparklinex.MogrifyDraw
alias Sparklinex.Smooth.Options
def draw(
data,
spec = %Options{height: height, background_color: bk_color, line_color: line_color}
) do
spec_with_width = %{spec | width: width(data, spec)}
normalized_data = ChartData.normalize_data(data, :smooth)
coords = create_coords(normalized_data, spec_with_width)
canvas = MogrifyDraw.create_canvas(spec_with_width.width, height, bk_color)
canvas
|> draw_std_dev_box(normalized_data, spec_with_width)
|> MogrifyDraw.set_line_color(line_color)
|> plot_data(coords, spec_with_width)
|> draw_target_line(data, spec_with_width)
|> draw_max(coords, spec_with_width)
|> draw_min(coords, spec_with_width)
|> draw_last(coords, spec_with_width)
end
defp width(data, %Options{step: step}) do
(length(data) - 1) * step / 1
end
defp each_pair([p1 | [p2 | rest]]) do
[{p1, p2} | each_pair([p2 | rest])]
end
defp each_pair([_p1 | _rest]), do: []
defp create_coords(data, %Options{step: step, height: height}) do
data
|> Enum.with_index()
|> Enum.map(fn {y, x} -> {x * step, height - 3 - y / (101.0 / (height - 4))} end)
end
defp poly_coords(coords, height, width) do
{first_x, first_y} = List.first(coords)
{last_x, last_y} = List.last(coords)
[
{-1, height + 1},
{first_x - 1, first_y},
coords,
{last_x + 1, last_y},
{width + 1, height + 1}
]
|> List.flatten()
end
defp plot_data(canvas, coords, %Options{underneath_color: nil}) do
MogrifyDraw.draw_lines(canvas, each_pair(coords))
end
defp plot_data(canvas, coords, %Options{width: width, underneath_color: color, height: height}) do
MogrifyDraw.polygon(canvas, poly_coords(coords, height, width), color)
end
defp draw_std_dev_box(canvas, data, %Options{
height: height,
has_std_dev: true,
std_dev_color: color,
width: width
}) do
std_dev = Statistics.stdev(data)
mid = Enum.sum(data) / length(data)
lower = height - 3 - (mid - std_dev) / (101.0 / (height - 4))
upper = height - 3 - (mid + std_dev) / (101.0 / (height - 4))
canvas
|> MogrifyDraw.set_line_color("transparent")
|> MogrifyDraw.rectangle({0, lower}, {width, upper}, color)
end
defp draw_std_dev_box(canvas, _data, %Options{has_std_dev: false}) do
canvas
end
defp draw_target_line(canvas, _data, %Options{target: nil}) do
canvas
end
defp draw_target_line(canvas, data, %Options{
target: target,
target_color: color,
height: height,
width: width
}) do
norm_value = ChartData.normalize_value(target, Enum.min(data), Enum.max(data))
adjusted_target_value = height - 3 - norm_value / (101.0 / (height - 4))
canvas
|> MogrifyDraw.draw_line({{-5, adjusted_target_value}, {width + 5, adjusted_target_value}}, color)
end
defp draw_min(canvas, coords, %Options{has_min: true, min_color: color}) do
min_point = Enum.min_by(coords, fn {_x, y} -> -y end)
MogrifyDraw.draw_box(canvas, min_point, 2, color)
end
defp draw_min(canvas, _coords, %Options{has_min: false}) do
canvas
end
defp draw_max(canvas, coords, %Options{has_max: true, max_color: color}) do
max_point = Enum.max_by(coords, fn {_x, y} -> -y end)
MogrifyDraw.draw_box(canvas, max_point, 2, color)
end
defp draw_max(canvas, _coords, %Options{has_max: false}) do
canvas
end
defp draw_last(canvas, coords, %Options{has_last: true, last_color: color}) do
last_point = List.last(coords)
MogrifyDraw.draw_box(canvas, last_point, 2, color)
end
defp draw_last(canvas, _coords, %Options{has_last: false}) do
canvas
end
end
|
lib/sparklinex/smooth.ex
| 0.674587
| 0.565599
|
smooth.ex
|
starcoder
|
defmodule Elixlsx.Util do
alias Elixlsx.XML
@col_alphabet Enum.to_list(?A..?Z)
@doc ~S"""
Returns the column letter(s) associated with a column index.
Col idx starts at 1.
## Examples
iex> encode_col(1)
"A"
iex> encode_col(28)
"AB"
"""
@spec encode_col(non_neg_integer) :: String.t()
def encode_col(0), do: ""
def encode_col(num) when num <= 26, do: <<num + 64>>
def encode_col(num, suffix \\ "")
def encode_col(num, suffix) when num <= 26, do: <<num + 64>> <> suffix
def encode_col(num, suffix) do
mod = div(num, 26)
rem = rem(num, 26)
if rem == 0 do
encode_col(mod - 1, "Z" <> suffix)
else
encode_col(mod, <<rem + 64>> <> suffix)
end
end
@doc ~S"""
Returns the column index associated with a given letter.
## Examples
iex> decode_col("AB")
28
iex> decode_col("A")
1
"""
@spec decode_col(list(char()) | String.t()) :: non_neg_integer
def decode_col(s) when is_list(s), do: decode_col(to_string(s))
def decode_col(""), do: 0
def decode_col(s) when is_binary(s) do
case String.match?(s, ~r/^[A-Z]*$/) do
false ->
raise %ArgumentError{message: "Invalid column string: " <> inspect(s)}
true ->
# translate list of strings to the base-26 value they represent
Enum.map(String.to_charlist(s), fn x -> :string.chr(@col_alphabet, x) end)
# multiply and aggregate them
|> List.foldl(0, fn x, acc -> x + 26 * acc end)
end
end
def decode_col(s) do
raise %ArgumentError{message: "decode_col expects string or charlist, got " <> inspect(s)}
end
@doc ~S"""
Returns the Char/Number representation of a given row/column combination.
Indizes start with 1.
## Examples
iex> to_excel_coords(1, 1)
"A1"
iex> to_excel_coords(10, 27)
"AA10"
"""
@spec to_excel_coords(number, number) :: String.t()
def to_excel_coords(row, col) do
encode_col(col) <> to_string(row)
end
@spec from_excel_coords(String.t()) :: {pos_integer, pos_integer}
@doc ~S"""
Returns a tuple {row, col} corresponding to the input.
Row and col are 1-indexed, use from_excel_coords0 for zero-indexing.
## Examples
iex> from_excel_coords("C2")
{2, 3}
iex> from_excel_coords0("C2")
{1, 2}
"""
def from_excel_coords(input) do
case Regex.run(~r/^([A-Z]+)([0-9]+)$/, input, capture: :all_but_first) do
nil ->
raise %ArgumentError{message: "Invalid excel coordinates: " <> inspect(input)}
[colS, rowS] ->
{row, _} = Integer.parse(rowS)
{row, decode_col(colS)}
end
end
@spec from_excel_coords0(String.t()) :: {non_neg_integer, non_neg_integer}
@doc ~S"See from_excel_coords/1"
def from_excel_coords0(input) do
{row, col} = from_excel_coords(input)
{row - 1, col - 1}
end
@doc ~S"""
Returns the ISO String representation (in UTC) for a erlang datetime() or datetime1970()
object.
## Examples
iex> iso_from_datetime {{2000, 12, 30}, {23, 59, 59}}
"2000-12-30T23:59:59Z"
"""
@type datetime_t :: :calendar.datetime()
@spec iso_from_datetime(datetime_t) :: String.t()
def iso_from_datetime(calendar) do
{{y, m, d}, {hours, minutes, seconds}} = calendar
to_string(
:io_lib.format(
'~4.10.0b-~2.10.0b-~2.10.0bT~2.10.0b:~2.10.0b:~2.10.0bZ',
[y, m, d, hours, minutes, seconds]
)
)
end
@doc ~S"""
Returns
- the current current timestamp if input is nil,
- the UNIX-Timestamp interpretation when given an integer,
both in ISO-Repr.
If input is a String, the string is returned:
iex> iso_timestamp 0
"1970-01-01T00:00:00Z"
iex> iso_timestamp 1447885907
"2015-11-18T22:31:47Z"
It doesn't validate string inputs though:
iex> iso_timestamp "goat"
"goat"
"""
@spec iso_timestamp(String.t() | integer | nil) :: String.t()
def iso_timestamp(input \\ nil) do
cond do
input == nil ->
iso_from_datetime(:calendar.universal_time())
is_integer(input) ->
iso_from_datetime(
:calendar.now_to_universal_time({div(input, 1_000_000), rem(input, 1_000_000), 0})
)
# TODO this case should parse the string i guess
# TODO also prominently absent: [char].
XML.valid?(input) ->
input
true ->
raise "Invalid input to iso_timestamp." <> inspect(input)
end
end
@excel_epoch {{1899, 12, 31}, {0, 0, 0}}
@secs_per_day 86400
@doc ~S"""
Convert an erlang `:calendar` object, or a unix timestamp to an excel timestamp.
Timestampts that are already in excel format are passed through
unmodified.
"""
@spec to_excel_datetime(datetime_t) :: {:excelts, number}
def to_excel_datetime({{yy, mm, dd}, {h, m, s}}) do
in_seconds = :calendar.datetime_to_gregorian_seconds({{yy, mm, dd}, {h, m, s}})
excel_epoch = :calendar.datetime_to_gregorian_seconds(@excel_epoch)
t_diff = (in_seconds - excel_epoch) / @secs_per_day
# Apply the "Lotus 123" bug - 1900 is considered a leap year.
t_diff =
if t_diff > 59 do
t_diff + 1
else
t_diff
end
{:excelts, t_diff}
end
@spec to_excel_datetime(number) :: {:excelts, number}
def to_excel_datetime(input) when is_number(input) do
to_excel_datetime(
:calendar.now_to_universal_time({div(input, 1_000_000), rem(input, 1_000_000), 0})
)
end
@spec to_excel_datetime({:excelts, number}) :: {:excelts, number}
def to_excel_datetime({:excelts, value}) do
{:excelts, value}
end
# Formula's value calculate on opening excel programm.
# We don't need to format this here.
@spec to_excel_datetime({:formula, String.t()}) :: {:formula, String.t()}
def to_excel_datetime({:formula, value}) do
{:formula, value}
end
@doc ~S"""
Replace_all(input, [{search, replace}]).
## Examples
iex> replace_all("Hello World", [{"e", "E"}, {"o", "oO"}])
"HElloO WoOrld"
"""
@spec replace_all(String.t(), [{String.t(), String.t()}]) :: String.t()
def replace_all(input, [{s, r} | srx]) do
String.replace(input, s, r) |> replace_all(srx)
end
def replace_all(input, []) do
input
end
@version Mix.Project.config()[:version]
@doc ~S"""
Returns the application version suitable for the <ApplicationVersion> tag.
"""
def app_version_string do
String.replace(@version, ~r/(\d+)\.(\d+)\.(\d+)/, "\\1.\\2\\3")
end
end
|
lib/elixlsx/util.ex
| 0.80213
| 0.497742
|
util.ex
|
starcoder
|
defmodule Mix.Tasks.Check do
@moduledoc """
One task to efficiently run all code analysis & testing tools in an Elixir project.
## Tools
Task comes out of the box with a rich predefined set of curated tools that are considered to be
reasonable additions for most Elixir and Phoenix projects which care about having bug-free,
maintainable and secure code.
Following standard library tools are configured by default:
- [`:compiler`] - produces compilation warnings that allow to early detect bugs & typos in the
code eg. an attempt to call non-existing or deprecated function
- [`:unused_deps`] - ensures that there are no unused dependencies in the project's `mix.lock`
file (e.g. after removing a previously used dependency)
- [`:formatter`] - ensures that all the code follows the same basic formatting rules such as
maximum number of chars in a line or function indentation
- [`:ex_unit`] - starts the application in test mode and runs all runtime tests against it
(defined as test modules or embedded in docs as doctests)
Following community tools are configured by default:
- [`:credo`] - ensures that all the code follows a further established set of software design,
consistency, readability & misc rules and conventions (still statical)
- [`:dialyzer`] - performs static code analysis around type mismatches and other issues that are
commonly detected by static language compilers
- [`:doctor`] - ensures that the project documentation is healthy by validating the presence of
module docs, functions docs, typespecs and struct typespecs
- [`:ex_doc`] - compiles the project documentation in order to ensure that there are no issues
that would make it impossible for docs to get collected and assembled
- [`:npm_test`] - runs JavaScript tests in projects with front-end assets embedded in `assets`
directory and `package.json` in it (default for Phoenix apps)
- [`:sobelow`] - performs security-focused static analysis mainly focused on the Phoenix
framework, but also detecting vulnerable dependencies in arbitrary Mix projects
You can disable or adjust curated tools as well as add custom ones via the configuration file.
## Workflow
1. `:compiler` tool is run before others in order to compile the project just once and to avoid
reprinting the compilation error multiple times.
2. If the compilation succeded (even if with warnings), further tools are run in parallel while
their output is streamed live one by one for instant insight.
3. Output from tools that have failed gets reprinted for sake of easily reading into them all at
once and identifying all project issues in one go.
4. Summary is presented with a list of all tools that have failed, succeeded or were skipped due
to missing files or project dependencies.
5. Manifest is written to specified file or tmp directory in order to allow running only failed
checks and for sake of reporting to CI.
6. If any of the tools have failed, the Erlang system gets requested to emit exit status 1 upon
shutdown in order to make the CI build fail.
### Tool order
Tools are run in parallel, but their output is presented one by one in order to avoid mixing it
up. You can control the order in which the output is presented for tools that have started at the
same time via the `:order` tool option. You'll probably want to put tools that run quicker and
fail more often before the others in order to get useful feedback as soon as possible. Curated
tools are ordered in such a way out of the box.
### Tool processes and ANSI formatting
Tools are run in separate processes. This has following benefits:
- allows to run tools in parallel & stream their output
- catches exit statuses in order to detect failures
- enables running Mix tasks in multiple envs
- enables including non-Elixir scripts and tools in the check
The downside is that tools will be run outside of TTY which will usually result in disabling ANSI
formatting. This issue is fixed in different ways depending on Elixir version:
- **Elixir 1.9 and newer**: patches all Elixir commands and Mix tasks with `--erl-config` option
to load the Erlang configuration provided by `ex_check` that sets the `ansi_enabled` flag
- **older versions**: patches Mix tasks with `--eval` option to run `Application.put_env/3` that
sets the `ansi_enabled` flag
You may keep your Elixir commands unaffected via the `:enable_ansi` tool option. It's ignored for
non-Elixir tools for which you'll have to enforce ANSI on your own.
### Cross-tool dependencies
Even though tools are run in parallel, it's possible to make sure that specific tool will be run
only after other(s) are completed via the `:deps` tool option. This enables defining complex
workflows, such as the following:
- tools may reuse artifacts from ones executed earlier
- tools may handle the success/failure of those they depend on
- tools may be forced not to run at the same time without giving up on entire parallel execution
By default tools will be run regardless of the exit status of their dependencies, but it's
possible to depend on specific exit status via the `:status` dependency option. Tools will not be
run if their dependencies won't get to run at all e.g. due to using `--except` command line option
or a missing/circular dependency.
### Umbrella projects
Task comes with extensive support for umbrella projects. The most notable feature is the ability
to run tools recursively for each child app separately. It's similar to flagging Mix tasks as
recursive but empowered with following extra benefits:
- runs recursively not just Mix tasks, but also arbitrary scripts & commands
- runs tools on child apps in parallel
- allows tools to target only specific child apps
- presents failures & run durations for each child app separately
- detects if curated tools should run for each child app separately
- builds separate cross-tool dependency chains for each child app
You may want to disable parallel execution of the tool on child apps (`parallel: false` under
`:umbrella` tool option) if it uses the same resources across tool runs against different child
apps. An example of that could be `ex_unit` that, depending on a project and test dependencies,
may involve mutating the same database in test suites belonging to separate child apps.
You may have the tool run *only* at the root level of the umbrella by disabling the recursive
execution (`recursive: false` under `:umbrella` tool option) and targeting an empty list of child
apps (`apps: []` under `:umbrella` tool option).
### Retrying failed tools
You may run only failed tools in the next run by passing the `--retry` command line option in
order to avoid wasting time on checks that have already passed.
In addition, some tools offer the capability to do the same, i.e. run only failed tests or checks.
If the tool provides such capability, it will be automatically executed this way when the
`--retry` command line option is passed. This feature is provided out of the box for `ex_unit`
tool and may be provided for any tool via `:retry` tool option in config.
Task will run in retry mode automatically even if `--retry` was not specified when a previous run
has resulted in any failures. You can change this behavior with `--no-retry` command line option
or by setting `retry: false` in config.
### Fix mode
Some tools are capable of automatically resolving issues by running in the fix mode. You may take
advantage of this feature by passing the `--fix` command line option. This feature is provided out
of the box for `formatter` and `unused_deps` tools and may be provided for any tool via `:fix`
tool option in config.
You may combine `--fix` with `--retry` to only request tools that have failed to do the fixing.
You may also consider adding `~/.check.exs` with `[fix: true]` on a local machine in order to
always run in the fix mode for convenience. You probably don't want this option in the project
config as that would trigger fix mode on CI as well - unless you want CI to perform & commit back
fixes.
### Manifest file
After every run, task writes a list of tool statuses to manifest file specified with `--manifest`
command line option or to temp directory. This allows to run only failed tools in the next run by
passing the `--retry` command line option, but manifest file may also be used for sake of
reporting to CI.
It's a simple plain text file with following syntax that should play well with shell commands:
PASS compiler
FAIL formatter
PASS ex_unit
PASS unused_deps
SKIP credo
SKIP sobelow
SKIP ex_doc
SKIP dialyzer
## Configuration file
Check configuration may be adjusted with the optional `.check.exs` file.
Configuration file should evaluate to keyword list with following options:
- `:parallel` - toggles running tools in parallel; default: `true`
- `:skipped` - toggles printing skipped tools in summary; default: `true`
- `:tools` - a list of tools to run; default: curated tools; more info below
Tool list under `:tools` key may contain following tool tuples:
- `{:tool_name, opts}`
- `{:tool_name, enabled}` where `enabled` corresponds to the `:enabled` option
- `{:tool_name, command}` where `command` corresponds to the `:command` option
- `{:tool_name, command, opts}` where `command` corresponds to the `:command` option
Tool options (`opts` above) is a keyword list with following options:
- `:enabled` - enables/disables already defined tools; default: `true`
- `:command` - command as string or list of strings (executable + arguments)
- `:cd` - directory (relative to cwd) to change to before running the command
- `:env` - environment variables as map with string keys & values
- `:order` - integer that controls the order in which tool output is presented; default: `0`
- `:deps` - list of tools that the given tool depends on; more info below
- `:enable_ansi` - toggles extending Elixir/Mix commands to have ANSI enabled; default: `true`
- `:umbrella` - configures the tool behaviour in an umbrella project; more info below
- `:fix` - fix mode command as string or list of strings (executable + arguments)
- `:retry` - command to retry after failure as string or list of strings (executable + arguments)
Dependency list under `:deps` key may contain `:tool_name` atoms or `{:tool_name, opts}` tuples
where `opts` is a keyword list with following options:
- `:status` - depends on specific exit status; one of `:ok`, `:error`, exit code integer or a list
with any of the above; default: any exit status
- `:else` - specifies the behaviour upon dependency being unsatisfied; one of `:skip` (show the
tool among skipped ones), `:disable` (disable the tool without notice); default: `:skip`
Umbrella configuration under `:umbrella` key is a keyword list with following options:
- `:recursive` - toggles running the tool on each child app separately as opposed to running it
once from umbrella root; default: `true` except for non-recursive Mix tasks
- `:parallel` - toggles running tool in parallel on all child apps; default: `true`
- `:apps` - list of umbrella child app names targeted by the tool; default: all apps
Task will load the configuration in following order:
1. Default stock configuration.
2. `--config` file opt on command line.
3. `.check.exs` in user home directory.
4. `.check.exs` in current project directory (or umbrella root for an umbrella project).
Use the `mix check.gen.config` task to generate sample configuration that comes with well-commented examples to help you get started.
## Command line options
- `--config path/to/check.exs` - override default config file
- `--manifest path/to/manifest` - specify path to file that holds last run results
- `--only dialyzer --only credo ...` - run only specified check(s)
- `--except dialyzer --except credo ...` - don't run specified check(s)
- `--[no-]fix` - (don't) run tools in fix mode in order to resolve issues automatically
- `--[no-]retry` - (don't) run only checks that have failed in the last run
- `--[no-]parallel` - (don't) run tools in parallel
- `--[no-]skipped` - (don't) print skipped tools in summary
[`:compiler`]: https://hexdocs.pm/mix/Mix.Tasks.Compile.html
[`:credo`]: https://hexdocs.pm/credo
[`:dialyzer`]: https://hexdocs.pm/dialyxir
[`:doctor`]: https://github.com/akoutmos/doctor
[`:ex_doc`]: https://hexdocs.pm/ex_doc
[`:ex_unit`]: https://hexdocs.pm/ex_unit
[`:formatter`]: https://hexdocs.pm/mix/Mix.Tasks.Format.html
[`:npm_test`]: https://docs.npmjs.com/cli/test.html
[`:sobelow`]: https://hexdocs.pm/sobelow
[`:unused_deps`]: https://hexdocs.pm/mix/Mix.Tasks.Deps.Unlock.html
"""
use Mix.Task
alias ExCheck.Check
@shortdoc "Runs all code analysis & testing tools in an Elixir project"
@preferred_cli_env :test
@switches [
config: :string,
except: :keep,
exit_status: :boolean,
fix: :boolean,
manifest: :string,
only: :keep,
parallel: :boolean,
retry: :boolean,
skipped: :boolean
]
@aliases [
c: :config,
f: :fix,
m: :manifest,
o: :only,
r: :retry,
x: :except
]
@impl Mix.Task
def run(args) do
{opts, _} = OptionParser.parse!(args, strict: @switches, aliases: @aliases)
opts
|> process_opts()
|> Check.run()
end
defp process_opts(opts) do
Enum.map(opts, fn
{:only, name} -> {:only, String.to_atom(name)}
{:except, name} -> {:except, String.to_atom(name)}
opt -> opt
end)
end
end
|
lib/mix/tasks/check.ex
| 0.85443
| 0.738103
|
check.ex
|
starcoder
|
defmodule StarkInfra do
@moduledoc """
SDK to facilitate Elixir integrations with the Stark Infra API v2.
"""
alias StarkInfra.Project
alias StarkInfra.Organization
alias StarkInfra.Utils.Check
@doc """
The Project struct is an authentication entity for the SDK that is permanently
linked to a specific Workspace.
All requests to the Stark Infra API must be authenticated via an SDK user,
which must have been previously created at the Stark Infra website
[https://web.sandbox.starkinfra.com] or [https://web.starkinfra.com]
before you can use it in this SDK. Projects may be passed as the user parameter on
each request or may be defined as the default user at the start (See README).
## Parameters (required):
- `:environment` [string]: environment where the project is being used. ex: "sandbox" or "production"
- `:id` [string]: unique id required to identify project. ex: "5656565656565656"
- `:private_key` [string]: PEM string of the private key linked to the project. ex: "-----<KEY>"
"""
@spec project(
environment: :production | :sandbox,
id: binary,
private_key: binary
) :: Project.t()
def project(parameters) do
%{environment: environment, id: id, private_key: private_key} =
Enum.into(
parameters |> Check.enforced_keys([:environment, :id, :private_key]),
%{}
)
Project.validate(environment, id, private_key)
end
@doc """
The Organization struct is an authentication entity for the SDK that
represents your entire Organization, being able to access any Workspace
underneath it and even create new Workspaces. Only a legal representative
of your organization can register or change the Organization credentials.
All requests to the Stark Infra API must be authenticated via an SDK user,
which must have been previously created at the Stark Infra website
[https://web.sandbox.starkinfra.com] or [https://web.starkinfra.com]
before you can use it in this SDK. Organizations may be passed as the user parameter on
each request or may be defined as the default user at the start (See README).
If you are accessing a specific Workspace using Organization credentials, you should
specify the workspace ID when building the Organization struct or by request, using
the Organization.replace(organization, workspace_id) method, which creates a copy of the organization
struct with the altered workspace ID. If you are listing or creating new Workspaces, the
workspace_id should be nil.
## Parameters (required):
- `:environment` [string]: environment where the organization is being used. ex: "sandbox" or "production"
- `:id` [string]: unique id required to identify organization. ex: "5656565656565656"
- `:private_key` [EllipticCurve.Organization()]: PEM string of the private key linked to the organization. ex: "-----<KEY>"
## Parameters (optional):
- `:workspace_id` [string]: unique id of the accessed Workspace, if any. ex: nil or "4848484848484848"
"""
@spec organization(
environment: :production | :sandbox,
id: binary,
private_key: binary,
workspace_id: binary | nil
) :: Organization.t()
def organization(parameters) do
%{environment: environment, id: id, private_key: private_key, workspace_id: workspace_id} =
Enum.into(
parameters |> Check.enforced_keys([:environment, :id, :private_key]),
%{workspace_id: nil}
)
Organization.validate(environment, id, private_key, workspace_id)
end
end
|
lib/starkinfra.ex
| 0.808748
| 0.709346
|
starkinfra.ex
|
starcoder
|
defmodule Problem.Data do
alias Problem.CmplFile
require EEx
defstruct [:profits, :capacities, :costs]
@type t :: %__MODULE__{
profits: profits,
capacities: capacities,
costs: costs
}
@type profits :: [pos_integer]
@type capacities :: [pos_integer]
@type costs :: [[pos_integer]]
@template_path Application.get_env(:problem, :template_path)
@template_args [:items, :resources, :profits, :capacities, :costs]
EEx.function_from_file(:defp, :problem_template, @template_path, @template_args)
@spec new(profits, capacities, costs) :: t
def new(profits, capacities, costs) do
%__MODULE__{profits: profits, capacities: capacities, costs: costs}
end
@spec write!(t) :: CmplFile.t | no_return
def write!(data) do
problem_path = Temp.path!(%{suffix: ".cmpl"})
formatted_args = Enum.map(@template_args, &(data |> template_val(&1) |> format_val()))
File.write!(problem_path, apply(&problem_template/5, formatted_args))
problem_path
end
@spec split(t, pos_integer) :: [t]
def split(data, divider) do
data
|> Map.from_struct()
|> Map.keys()
|> Enum.map(&split_key(&1, Map.fetch!(data, &1), divider))
|> Enum.zip()
|> Enum.map(&%__MODULE__{capacities: elem(&1, 0), costs: elem(&1, 1), profits: elem(&1, 2)})
end
defp template_val(data, :items), do: data.profits |> Enum.with_index() |> Keyword.values()
defp template_val(data, :resources), do: data.capacities |> Enum.with_index() |> Keyword.values()
defp template_val(data, arg), do: Map.fetch!(data, arg)
defp format_val(v) when is_list(v), do: "(#{v |> Enum.map(&format_val/1) |> Enum.join(",")})"
defp format_val(v), do: v
defp split_key(:profits, data, divider) do
chunk_enumerable(data, divider)
end
defp split_key(:capacities, data, divider) do
data
|> Enum.map(fn(c) -> 1..c |> chunk_enumerable(divider) |> Enum.map(&Enum.count/1) end)
|> Enum.zip()
|> Enum.map(&Tuple.to_list/1)
end
defp split_key(:costs, data, divider) do
data
|> Enum.map(&chunk_enumerable(&1, divider))
|> Enum.zip()
|> Enum.map(&Tuple.to_list/1)
end
defp chunk_enumerable(e, divider) do
Enum.chunk_every(e, Enum.count(e) / divider |> Float.ceil() |> round())
end
end
|
apps/problem/lib/problem/data.ex
| 0.768473
| 0.483526
|
data.ex
|
starcoder
|
defmodule Engine.DB.Transaction.PaymentV1.Validator.Witness do
@moduledoc """
Contains validation logic for signatures, see validate/2 for more details.
"""
alias Engine.DB.Transaction.PaymentV1.Type
alias ExPlasma.Crypto
@type validation_result_t() ::
:ok
| {:error, {:witnesses, :superfluous_signature}}
| {:error, {:witnesses, :missing_signature}}
| {:error, {:witnesses, :unauthorized_spend}}
@doc """
Validates that the inputs `output_guard` match the recovered witnesses.
Each input must have 1 signature, the witnesses order must match the inputs order.
Returns
- `:ok` if each input match their witness,
or returns:
- `{:error, {:witnesses, :superfluous_signature}}` if there are more witnesses than inputs
- `{:error, {:witnesses, :missing_signature}}` if there are more inputs than witnesses
- `{:error, {:witnesses, :unauthorized_spend}}` if one of the input doesn't have a matching witness
## Example:
iex> Engine.DB.Transaction.PaymentV1.Validator.Witness.validate(
...> [%{output_guard: <<fdf8:f53e:61e4::18>>, token: <<fdf8:f53e:61e4::18>>, amount: 1},
...> %{output_guard: <<fdf8:f53e:61e4::18>>, token: <<fdf8:f53e:61e4::18>>, amount: 2}],
...> [<<fdf8:f53e:61e4::18>>, <<2::160>>])
:ok
"""
@spec validate(Type.output_list_t(), list(Crypto.address_t())) :: validation_result_t()
def validate(inputs, witnesses) do
with :ok <- validate_length(inputs, witnesses),
:ok <- validate_input_ownership(inputs, witnesses) do
:ok
end
end
defp validate_length(inputs, witnesses) when length(witnesses) > length(inputs) do
{:error, {:witnesses, :superfluous_signature}}
end
defp validate_length(inputs, witnesses) when length(witnesses) < length(inputs) do
{:error, {:witnesses, :missing_signature}}
end
defp validate_length(_inputs, _witnesses), do: :ok
defp validate_input_ownership(inputs, witnesses) do
inputs
|> Enum.with_index()
|> Enum.map(fn {input, index} -> can_spend?(input, Enum.at(witnesses, index)) end)
|> Enum.all?()
|> case do
true -> :ok
false -> {:error, {:witnesses, :unauthorized_spend}}
end
end
defp can_spend?(%{output_guard: witness}, witness), do: true
defp can_spend?(_output, _witness), do: false
end
|
apps/engine/lib/engine/db/transaction/payment_v1/validators/witness.ex
| 0.894133
| 0.483405
|
witness.ex
|
starcoder
|
defmodule Ecto.Function do
@moduledoc """
Helper macro for defining helper macros for calling DB functions.
A little Xzibity, but helps.
"""
@doc ~S"""
Define new SQL function call.
## Options
Currently there is only one option allowed:
- `for` - define DB function name to call
## Examples
import Ecto.Function
defqueryfunc foo # Define function without params
defqueryfunc bar(a, b) # Explicit parameter names
defqueryfunc baz/1 # Define function using arity
defqueryfunc qux(a, b \\ 0) # Define function with default arguments
defqueryfunc quux/1, for: "db_quux" # Define with alternative DB call
Then calling such functions in query would be equivalent to:
from _ in "foos", select: %{foo: foo()}
# => SELECT foo() AS foo FROM foos
from q in "bars", select: %{bar: bar(q.a, q.b)}
# => SELECT bar(bars.a, bars.b) AS bar FROM bars
from q in "bazs", where: baz(q.a) == true
# => SELECT * FROM bazs WHERE baz(bazs.a) = TRUE
from q in "quxs", select: %{one: qux(q.a), two: qux(q.a, q.b)}
# => SELECT
# qux(quxs.a, 0) AS one,
# qux(quxs.a, quxs.b) AS two
# FROM "quxs"
from q in "quuxs", select: %{quux: quux(q.a)}
# => SELECT db_quux(quuxs.a) FROM quuxs
## Gotchas
If your function uses "special syntax" like PostgreSQL [`extract`][extract]
then this module won't help you and you will be required to write your own
macro that will handle such case.
defmacro extract(from, field) do
query do: fragment("extract(? FROM ?)", field, from)
end
This case probably will never be supported in this library and you should
handle it on your own.
[extract]: https://www.postgresql.org/docs/current/static/functions-datetime.html#functions-datetime-extract
"""
defmacro defqueryfunc(definition, opts \\ [])
defmacro defqueryfunc({:/, _, [{name, _, _}, params_count]}, opts)
when is_atom(name) and is_integer(params_count) do
require Logger
opts = Keyword.put_new(opts, :for, name)
params = Macro.generate_arguments(params_count, Elixir)
Logger.warn("""
func/arity syntax is deprecated, instead use:
defqueryfunc #{Macro.to_string(quote do: unquote(name)(unquote_splicing(params)))}
""")
macro(name, params, __CALLER__, opts)
end
defmacro defqueryfunc({name, _, params}, opts)
when is_atom(name) and is_list(params) do
opts = Keyword.put_new(opts, :for, name)
macro(name, params, __CALLER__, opts)
end
defmacro defqueryfunc({name, _, _}, opts) when is_atom(name) do
opts = Keyword.put_new(opts, :for, name)
macro(name, [], __CALLER__, opts)
end
defmacro defqueryfunc(tree, _) do
raise CompileError,
file: __CALLER__.file,
line: __CALLER__.line,
description: "Unexpected query function definition #{Macro.to_string(tree)}."
end
defp macro(name, params, caller, opts) do
sql_name = Keyword.fetch!(opts, :for)
{query, args} = build_query(params, caller)
quote do
defmacro unquote(name)(unquote_splicing(params)) do
unquote(body(sql_name, query, args))
end
end
end
defp body(name, query, args) do
fcall = "#{name}(#{query})"
args = Enum.map(args, &{:unquote, [], [&1]})
{:quote, [], [[do: {:fragment, [], [fcall | args]}]]}
end
defp build_query(args, caller) do
query =
"?"
|> List.duplicate(Enum.count(args))
|> Enum.join(",")
args =
args
|> Enum.map(fn
{:\\, _, [{_, _, _} = arg, _default]} ->
arg
{_, _, env} = arg when is_atom(env) ->
arg
_token ->
raise CompileError,
file: caller.file,
line: caller.line,
description: ~S"only variables and \\ are allowed as arguments in definition header."
end)
{query, args}
end
end
|
lib/ecto_function.ex
| 0.768429
| 0.429609
|
ecto_function.ex
|
starcoder
|
defmodule Prometheus.Metric.Gauge do
@moduledoc """
Gauge metric, to report instantaneous values.
Gauge is a metric that represents a single numerical value that can
arbitrarily go up and down.
A Gauge is typically used for measured values like temperatures or current
memory usage, but also "counts" that can go up and down, like the number of
running processes.
Example use cases for Gauges:
- Inprogress requests;
- Number of items in a queue;
- Free memory;
- Total memory;
- Temperature.
Example:
```
defmodule MyPoolInstrumenter do
use Prometheus.Metric
## to be called at app/supervisor startup.
## to tolerate restarts use declare.
def setup() do
Gauge.declare([name: :my_pool_size,
help: "Pool size."])
Gauge.declare([name: :my_pool_checked_out,
help: "Number of sockets checked out from the pool"])
end
def set_size(size) do
Gauge.set([name: :my_pool_size], size)
end
def track_checked_out_sockets(checkout_fun) do
Gauge.track_inprogress([name: :my_pool_checked_out], checkout_fun.())
end
def track_checked_out_sockets_block(socket) do
Gauge.track_inprogress([name: :my_pool_checked_out]) do
# checkout code
socket
end
end
end
```
"""
use Prometheus.Erlang, :prometheus_gauge
@doc """
Creates a gauge using `spec`.
Raises `Prometheus.MissingMetricSpecKeyError` if required `spec` key is missing.<br>
Raises `Prometheus.InvalidMetricNameError` if metric name is invalid.<br>
Raises `Prometheus.InvalidMetricHelpError` if help is invalid.<br>
Raises `Prometheus.InvalidMetricLabelsError` if labels isn't a list.<br>
Raises `Prometheus.InvalidMetricNameError` if label name is invalid.<br>
Raises `Prometheus.InvalidValueError` exception if duration_unit is unknown or
doesn't match metric name.<br>
Raises `Prometheus.MFAlreadyExistsError` if a gauge with the same `spec` exists.
"""
delegate new(spec)
@doc """
Creates a gauge using `spec`.
If a gauge with the same `spec` exists returns `false`.
Raises `Prometheus.MissingMetricSpecKeyError` if required `spec` key is missing.<br>
Raises `Prometheus.InvalidMetricNameError` if metric name is invalid.<br>
Raises `Prometheus.InvalidMetricHelpError` if help is invalid.<br>
Raises `Prometheus.InvalidMetricLabelsError` if labels isn't a list.<br>
Raises `Prometheus.InvalidMetricNameError` if label name is invalid.<br>
Raises `Prometheus.InvalidValueError` exception if duration_unit is unknown or
doesn't match metric name.
"""
delegate declare(spec)
@doc """
Sets the gauge identified by `spec` to `value`.
Raises `Prometheus.InvalidValueError` exception if `value` isn't
a number or `:undefined`.<br>
Raises `Prometheus.UnknownMetricError` exception if a gauge for `spec`
can't be found.<br>
Raises `Prometheus.InvalidMetricArityError` exception if labels count mismatch.
"""
delegate_metric set(spec, value)
@doc """
Increments the gauge identified by `spec` by `value`.
Raises `Prometheus.InvalidValueError` exception if `value` isn't a number.<br>
Raises `Prometheus.UnknownMetricError` exception if a gauge for `spec`
can't be found.<br>
Raises `Prometheus.InvalidMetricArityError` exception if labels count mismatch.
"""
delegate_metric inc(spec, value \\ 1)
@doc """
Decrements the gauge identified by `spec` by `value`.
Raises `Prometheus.InvalidValueError` exception if `value` isn't a number.<br>
Raises `Prometheus.UnknownMetricError` exception if a gauge for `spec`
can't be found.<br>
Raises `Prometheus.InvalidMetricArityError` exception if labels count mismatch.
"""
delegate_metric dec(spec, value \\ 1)
@doc """
Sets the gauge identified by `spec` to the current unixtime.
Raises `Prometheus.UnknownMetricError` exception if a gauge
for `spec` can't be found.<br>
Raises `Prometheus.InvalidMetricArityError` exception if labels count mismatch.
"""
delegate_metric set_to_current_time(spec)
@doc """
Sets the gauge identified by `spec` to the number of currently executing `body`s.
Raises `Prometheus.UnknownMetricError` exception if a gauge
for `spec` can't be found.<br>
Raises `Prometheus.InvalidMetricArityError` exception if labels count mismatch.
Raises `Prometheus.InvalidValueError` exception if fun isn't a function or block.
"""
defmacro track_inprogress(spec, body) do
env = __CALLER__
Prometheus.Injector.inject(
fn block ->
quote do
Prometheus.Metric.Gauge.inc(unquote(spec))
try do
unquote(block)
after
Prometheus.Metric.Gauge.dec(unquote(spec))
end
end
end,
env,
body
)
end
@doc """
Tracks the amount of time spent executing `body`.
Raises `Prometheus.UnknownMetricError` exception if a gauge
for `spec` can't be found.<br>
Raises `Prometheus.InvalidMetricArityError` exception if labels count mismatch.
Raises `Prometheus.InvalidValueError` exception if `fun` isn't a function or block.
"""
defmacro set_duration(spec, body) do
env = __CALLER__
Prometheus.Injector.inject(
fn block ->
quote do
start_time = :erlang.monotonic_time()
try do
unquote(block)
after
end_time = :erlang.monotonic_time()
Prometheus.Metric.Gauge.set(unquote(spec), end_time - start_time)
end
end
end,
env,
body
)
end
@doc """
Removes gauge series identified by spec.
Raises `Prometheus.UnknownMetricError` exception if a gauge
for `spec` can't be found.<br>
Raises `Prometheus.InvalidMetricArityError` exception if labels count mismatch.
"""
delegate_metric remove(spec)
@doc """
Resets the value of the gauge identified by `spec`.
Raises `Prometheus.UnknownMetricError` exception if a gauge
for `spec` can't be found.<br>
Raises `Prometheus.InvalidMetricArityError` exception if labels count mismatch.
"""
delegate_metric reset(spec)
@doc """
Returns the value of the gauge identified by `spec`.
If duration unit set, value will be converted to the duration unit.
[Read more here.](time.html)
Raises `Prometheus.UnknownMetricError` exception if a gauge
for `spec` can't be found.<br>
Raises `Prometheus.InvalidMetricArityError` exception if labels count mismatch.
"""
delegate_metric value(spec)
end
|
astreu/deps/prometheus_ex/lib/prometheus/metric/gauge.ex
| 0.924637
| 0.856092
|
gauge.ex
|
starcoder
|
defmodule Grouper.Data do
@moduledoc """
uniform data layer for groups of processes
"""
require Ex2ms
alias Grouper.Ident
alias Grouper.Data.App
alias Grouper.Data.Group
@type opts :: keyword()
@type type() :: atom()
@type key() :: any()
@type value() :: any()
@type t :: %__MODULE__{
enum: (type() -> [{{type(), key()}, value()}] | [{key(), value()}]),
get: (type(), key() -> value() | nil),
put: (type(), key(), value() -> value() | nil),
del: (type(), key() -> value() | nil),
group_leader: atom() | pid()
}
defstruct [:enum, :get, :put, :del, :group_leader]
# === API ===
@doc """
enumerates all key-values of a given data type
Specify the special type `:_` to read data of all types
"""
@spec enum(type(), opts()) ::
{:ok, [{{type(), key()}, value()}] | [{key(), value()}]} | {:error, :no_group}
def enum(type, opts \\ []) when is_list(opts) do
case api(opts) do
{:ok, %__MODULE__{enum: enum_func}} ->
{:ok, enum_func.(type)}
{:error, _err} = err ->
err
end
end
@doc """
get data value of a given type and a given key
"""
@spec get(type(), key(), opts()) :: {:ok, value() | nil} | {:error, :no_group}
def get(type, key, opts \\ []) when is_list(opts) do
case api(opts) do
{:ok, %__MODULE__{get: get_func}} ->
{:ok, get_func.(type, key)}
{:error, _err} = err ->
err
end
end
@doc """
store data value of a given type and a given key
"""
@spec put(type(), key(), value(), opts()) :: {:ok, value() | nil} | {:error, :no_group}
def put(type, key, val, opts \\ []) when is_list(opts) do
case api(opts) do
{:ok, %__MODULE__{put: put_func}} ->
{:ok, put_func.(type, key, val)}
{:error, _err} = err ->
err
end
end
@doc """
delete data value of a given type and a given key
"""
@spec del(type(), key(), opts()) :: {:ok, value() | nil} | {:error, :no_group}
def del(type, key, opts \\ []) when is_list(opts) do
case api(opts) do
{:ok, %__MODULE__{del: del_func}} ->
{:ok, del_func.(type, key)}
{:error, _err} = err ->
err
end
end
@doc """
identifies which type of group is being used, builds API functions, and
caches them in the process dictionary
## Options
- `:leader` - override detected group leader with specified one (mostly
used in testing)
"""
@spec api(opts()) :: {:ok, t()} | {:error, :no_group}
def api(opts) when is_list(opts) do
no_leader = make_ref()
gl_opt = Keyword.get(opts, :leader, no_leader)
# API will be chosen among (in descending order of preference):
# - API constructed from explictly given pid or atom
# - cached API when no leader is given and a cached copy exists
# - construct API for current process' group leader
gl =
cond do
is_nil(gl_opt) ->
raise ArgumentError, "group leader must be pid or atom, not (nil)"
is_pid(gl_opt) or is_atom(gl_opt) ->
gl_opt
gl_opt == no_leader ->
group_leader = Process.group_leader()
if match?(%__MODULE__{group_leader: ^group_leader}, api = Process.get(:grouper_api)) do
throw({:ok, api})
end
group_leader
true ->
raise ArgumentError, "group leader must be pid or atom, not (#{inspect(gl_opt)})"
end
case Ident.identify_group_leader(gl) do
{:error, :dead} ->
{:error, :no_group}
{:error, :unknown_type} ->
{:error, :no_group}
{:ok, {mod, nil, ^gl}} when mod in [:shell, :user, :capture_io] ->
{:error, :no_group}
{:ok, {mod, meta, ^gl}} when mod in [:application, :group] ->
impl =
case mod do
:application -> App
:group -> Group
end
new_api = %__MODULE__{
enum: &impl.enum(meta, &1),
get: &impl.get(meta, &1, &2),
put: &impl.put(meta, &1, &2, &3),
del: &impl.del(meta, &1, &2),
group_leader: gl
}
:ok = impl.init(meta, opts)
# cache it, but only if we looked up our own group leader
if gl == Process.group_leader(), do: Process.put(:grouper_api, new_api)
{:ok, new_api}
end
catch
{:ok, _api} = result ->
result
end
end
|
lib/grouper/data.ex
| 0.846197
| 0.453201
|
data.ex
|
starcoder
|
defmodule AWS.MarketplaceMetering do
@moduledoc """
AWS Marketplace Metering Service
This reference provides descriptions of the low-level AWS Marketplace Metering
Service API.
AWS Marketplace sellers can use this API to submit usage data for custom usage
dimensions.
For information on the permissions you need to use this API, see [AWS Marketplace metering and entitlement API
permissions](https://docs.aws.amazon.com/marketplace/latest/userguide/iam-user-policy-for-aws-marketplace-actions.html)
in the *AWS Marketplace Seller Guide.*
## Submitting Metering Records
* *MeterUsage* - Submits the metering record for an AWS Marketplace
product. `MeterUsage` is called from an EC2 instance or a container running on
EKS or ECS.
* *BatchMeterUsage* - Submits the metering record for a set of
customers. `BatchMeterUsage` is called from a software-as-a-service (SaaS)
application.
## Accepting New Customers
* *ResolveCustomer* - Called by a SaaS application during the
registration process. When a buyer visits your website during the registration
process, the buyer submits a Registration Token through the browser. The
Registration Token is resolved through this API to obtain a `CustomerIdentifier`
along with the `CustomerAWSAccountId` and `ProductCode`.
## Entitlement and Metering for Paid Container Products
* Paid container software products sold through AWS Marketplace must
integrate with the AWS Marketplace Metering Service and call the `RegisterUsage`
operation for software entitlement and metering. Free and BYOL products for
Amazon ECS or Amazon EKS aren't required to call `RegisterUsage`, but you can do
so if you want to receive usage data in your seller reports. For more
information on using the `RegisterUsage` operation, see [Container-Based Products](https://docs.aws.amazon.com/marketplace/latest/userguide/container-based-products.html).
`BatchMeterUsage` API calls are captured by AWS CloudTrail. You can use
Cloudtrail to verify that the SaaS metering records that you sent are accurate
by searching for records with the `eventName` of `BatchMeterUsage`. You can also
use CloudTrail to audit records over time. For more information, see the * [AWS CloudTrail User
Guide](http://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudtrail-concepts.html).*
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2016-01-14",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "metering.marketplace",
global?: false,
protocol: "json",
service_id: "Marketplace Metering",
signature_version: "v4",
signing_name: "aws-marketplace",
target_prefix: "AWSMPMeteringService"
}
end
@doc """
`BatchMeterUsage` is called from a SaaS application listed on AWS Marketplace to
post metering records for a set of customers.
For identical requests, the API is idempotent; requests can be retried with the
same records or a subset of the input records.
Every request to `BatchMeterUsage` is for one product. If you need to meter
usage for multiple products, you must make multiple calls to `BatchMeterUsage`.
Usage records are expected to be submitted as quickly as possible after the
event that is being recorded, and are not accepted more than 6 hours after the
event.
`BatchMeterUsage` can process up to 25 `UsageRecords` at a time.
A `UsageRecord` can optionally include multiple usage allocations, to provide
customers with usage data split into buckets by tags that you define (or allow
the customer to define).
`BatchMeterUsage` returns a list of `UsageRecordResult` objects, showing the
result for each `UsageRecord`, as well as a list of `UnprocessedRecords`,
indicating errors in the service side that you should retry.
`BatchMeterUsage` requests must be less than 1MB in size.
For an example of using `BatchMeterUsage`, see [ BatchMeterUsage code example](https://docs.aws.amazon.com/marketplace/latest/userguide/saas-code-examples.html#saas-batchmeterusage-example)
in the *AWS Marketplace Seller Guide*.
"""
def batch_meter_usage(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "BatchMeterUsage", input, options)
end
@doc """
API to emit metering records.
For identical requests, the API is idempotent. It simply returns the metering
record ID.
`MeterUsage` is authenticated on the buyer's AWS account using credentials from
the EC2 instance, ECS task, or EKS pod.
`MeterUsage` can optionally include multiple usage allocations, to provide
customers with usage data split into buckets by tags that you define (or allow
the customer to define).
Usage records are expected to be submitted as quickly as possible after the
event that is being recorded, and are not accepted more than 6 hours after the
event.
"""
def meter_usage(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "MeterUsage", input, options)
end
@doc """
Paid container software products sold through AWS Marketplace must integrate
with the AWS Marketplace Metering Service and call the `RegisterUsage` operation
for software entitlement and metering.
Free and BYOL products for Amazon ECS or Amazon EKS aren't required to call
`RegisterUsage`, but you may choose to do so if you would like to receive usage
data in your seller reports. The sections below explain the behavior of
`RegisterUsage`. `RegisterUsage` performs two primary functions: metering and
entitlement.
* *Entitlement*: `RegisterUsage` allows you to verify that the
customer running your paid software is subscribed to your product on AWS
Marketplace, enabling you to guard against unauthorized use. Your container
image that integrates with `RegisterUsage` is only required to guard against
unauthorized use at container startup, as such a
`CustomerNotSubscribedException` or `PlatformNotSupportedException` will only be
thrown on the initial call to `RegisterUsage`. Subsequent calls from the same
Amazon ECS task instance (e.g. task-id) or Amazon EKS pod will not throw a
`CustomerNotSubscribedException`, even if the customer unsubscribes while the
Amazon ECS task or Amazon EKS pod is still running.
* *Metering*: `RegisterUsage` meters software use per ECS task, per
hour, or per pod for Amazon EKS with usage prorated to the second. A minimum of
1 minute of usage applies to tasks that are short lived. For example, if a
customer has a 10 node Amazon ECS or Amazon EKS cluster and a service configured
as a Daemon Set, then Amazon ECS or Amazon EKS will launch a task on all 10
cluster nodes and the customer will be charged: (10 * hourly_rate). Metering for
software use is automatically handled by the AWS Marketplace Metering Control
Plane -- your software is not required to perform any metering specific actions,
other than call `RegisterUsage` once for metering of software use to commence.
The AWS Marketplace Metering Control Plane will also continue to bill customers
for running ECS tasks and Amazon EKS pods, regardless of the customers
subscription state, removing the need for your software to perform entitlement
checks at runtime.
"""
def register_usage(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "RegisterUsage", input, options)
end
@doc """
`ResolveCustomer` is called by a SaaS application during the registration
process.
When a buyer visits your website during the registration process, the buyer
submits a registration token through their browser. The registration token is
resolved through this API to obtain a `CustomerIdentifier` along with the
`CustomerAWSAccountId` and `ProductCode`.
The API needs to called from the seller account id used to publish the SaaS
application to successfully resolve the token.
For an example of using `ResolveCustomer`, see [ ResolveCustomer code example](https://docs.aws.amazon.com/marketplace/latest/userguide/saas-code-examples.html#saas-resolvecustomer-example)
in the *AWS Marketplace Seller Guide*.
"""
def resolve_customer(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ResolveCustomer", input, options)
end
end
|
lib/aws/generated/marketplace_metering.ex
| 0.907621
| 0.571348
|
marketplace_metering.ex
|
starcoder
|
defmodule Mojito.Headers do
@moduledoc ~S"""
Functions for working with HTTP request and response headers, as described
in the [HTTP 1.1 specification](https://www.w3.org/Protocols/rfc2616/rfc2616.html).
Headers are represented in Elixir as a list of `{"header_name", "value"}`
tuples. Multiple entries for the same header name are allowed.
Capitalization of header names is preserved during insertion,
however header names are handled case-insensitively during
lookup and deletion.
"""
@type headers :: Mojito.headers()
@doc ~S"""
Returns the value for the given HTTP request or response header,
or `nil` if not found.
Header names are matched case-insensitively.
If more than one matching header is found, the values are joined with
`","` as specified in [RFC 2616](https://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.2).
Example:
iex> headers = [
...> {"header1", "foo"},
...> {"header2", "bar"},
...> {"Header1", "baz"}
...> ]
iex> Mojito.Headers.get(headers, "header2")
"bar"
iex> Mojito.Headers.get(headers, "HEADER1")
"foo,baz"
iex> Mojito.Headers.get(headers, "header3")
nil
"""
@spec get(headers, String.t()) :: String.t() | nil
def get(headers, name) do
case get_values(headers, name) do
[] -> nil
values -> values |> Enum.join(",")
end
end
@doc ~S"""
Returns all values for the given HTTP request or response header.
Returns an empty list if none found.
Header names are matched case-insensitively.
Example:
iex> headers = [
...> {"header1", "foo"},
...> {"header2", "bar"},
...> {"Header1", "baz"}
...> ]
iex> Mojito.Headers.get_values(headers, "header2")
["bar"]
iex> Mojito.Headers.get_values(headers, "HEADER1")
["foo", "baz"]
iex> Mojito.Headers.get_values(headers, "header3")
[]
"""
@spec get_values(headers, String.t()) :: [String.t()]
def get_values(headers, name) do
get_values(headers, String.downcase(name), [])
end
defp get_values([], _name, values), do: values
defp get_values([{key, value} | rest], name, values) do
new_values =
if String.downcase(key) == name do
values ++ [value]
else
values
end
get_values(rest, name, new_values)
end
@doc ~S"""
Puts the given header `value` under `name`, removing any values previously
stored under `name`. The new header is placed at the end of the list.
Header names are matched case-insensitively, but case of `name` is preserved
when adding the header.
Example:
iex> headers = [
...> {"header1", "foo"},
...> {"header2", "bar"},
...> {"Header1", "baz"}
...> ]
iex> Mojito.Headers.put(headers, "HEADER1", "quux")
[{"header2", "bar"}, {"HEADER1", "quux"}]
"""
@spec put(headers, String.t(), String.t()) :: headers
def put(headers, name, value) do
delete(headers, name) ++ [{name, value}]
end
@doc ~S"""
Removes all instances of the given header.
Header names are matched case-insensitively.
Example:
iex> headers = [
...> {"header1", "foo"},
...> {"header2", "bar"},
...> {"Header1", "baz"}
...> ]
iex> Mojito.Headers.delete(headers, "HEADER1")
[{"header2", "bar"}]
"""
@spec delete(headers, String.t()) :: headers
def delete(headers, name) do
name = String.downcase(name)
Enum.filter(headers, fn {key, _value} -> String.downcase(key) != name end)
end
@doc ~S"""
Returns an ordered list of the header names from the given headers.
Header names are returned in lowercase.
Example:
iex> headers = [
...> {"header1", "foo"},
...> {"header2", "bar"},
...> {"Header1", "baz"}
...> ]
iex> Mojito.Headers.keys(headers)
["header1", "header2"]
"""
@spec keys(headers) :: [String.t()]
def keys(headers) do
keys(headers, [])
end
defp keys([], names), do: Enum.reverse(names)
defp keys([{name, _value} | rest], names) do
name = String.downcase(name)
if name in names do
keys(rest, names)
else
keys(rest, [name | names])
end
end
@doc ~S"""
Returns a copy of the given headers where all header names are lowercased
and multiple values for the same header have been joined with `","`.
Example:
iex> headers = [
...> {"header1", "foo"},
...> {"header2", "bar"},
...> {"Header1", "baz"}
...> ]
iex> Mojito.Headers.normalize(headers)
[{"header1", "foo,baz"}, {"header2", "bar"}]
"""
@spec normalize(headers) :: headers
def normalize(headers) do
headers_map =
Enum.reduce(headers, %{}, fn {name, value}, acc ->
name = String.downcase(name)
values = Map.get(acc, name, [])
Map.put(acc, name, values ++ [value])
end)
headers
|> keys
|> Enum.map(fn name ->
{name, Map.get(headers_map, name) |> Enum.join(",")}
end)
end
@doc ~S"""
Returns an HTTP Basic Auth header from the given username and password.
Example:
iex> Mojito.Headers.auth_header("hello", "world")
{"authorization", "Basic aGVsbG86d29ybGQ="}
"""
@spec auth_header(String.t(), String.t()) :: Mojito.header()
def auth_header(username, password) do
auth64 = "#{username}:#{password}" |> Base.encode64()
{"authorization", "Basic #{auth64}"}
end
@doc ~S"""
Convert non string values to string where is possible.
Example:
iex> Mojito.Headers.convert_values_to_string([{"content-length", 0}])
[{"content-length", "0"}]
"""
@spec convert_values_to_string(headers) :: headers
def convert_values_to_string(headers) do
convert_values_to_string(headers, [])
end
defp convert_values_to_string([], converted_headers),
do: Enum.reverse(converted_headers)
defp convert_values_to_string([{name, value} | rest], converted_headers)
when is_number(value) or is_atom(value) do
convert_values_to_string(rest, [
{name, to_string(value)} | converted_headers
])
end
defp convert_values_to_string([headers | rest], converted_headers) do
convert_values_to_string(rest, [headers | converted_headers])
end
end
|
lib/mojito/headers.ex
| 0.875628
| 0.503357
|
headers.ex
|
starcoder
|
defmodule Artemis.Helpers.Time do
@doc """
Return the beginning of the day
"""
def beginning_of_day(time \\ Timex.now()) do
Timex.beginning_of_day(time)
end
@doc """
Return the beginning of the next day
"""
def get_next_day(time \\ Timex.now()) do
time
|> Timex.shift(days: 1)
|> beginning_of_day()
end
@doc """
Return the closest whole day
"""
def get_closest_day(time \\ Timex.now()) do
case time.hour >= 12 do
true -> get_next_day(time)
false -> beginning_of_day(time)
end
end
@doc """
Return milliseconds from given time. If a time is not passed, current
timestamp is used.
"""
def get_milliseconds_to_next_day(time \\ Timex.now()) do
time
|> get_next_day()
|> Timex.diff(time, :microseconds)
|> Kernel./(1000)
|> ceil()
end
@doc """
Return the beginning of the hour
"""
def beginning_of_hour(time \\ Timex.now()) do
time
|> Map.put(:second, 0)
|> Map.put(:minute, 0)
|> DateTime.truncate(:second)
end
@doc """
Return the beginning of the next hour
"""
def get_next_hour(time \\ Timex.now()) do
time
|> Timex.shift(hours: 1)
|> beginning_of_hour()
end
@doc """
Return the closest whole hour
"""
def get_closest_hour(time \\ Timex.now()) do
case time.minute >= 30 do
true -> get_next_hour(time)
false -> beginning_of_hour(time)
end
end
@doc """
Return milliseconds from given time. If a time is not passed, current
timestamp is used.
"""
def get_milliseconds_to_next_hour(time \\ Timex.now()) do
time
|> get_next_hour()
|> Timex.diff(time, :microseconds)
|> Kernel./(1000)
|> ceil()
end
@doc """
Return the beginning of the minute
"""
def beginning_of_minute(time \\ Timex.now()) do
time
|> Map.put(:second, 0)
|> DateTime.truncate(:second)
end
@doc """
Return the beginning of the next minute
"""
def get_next_minute(time \\ Timex.now()) do
time
|> Timex.shift(minutes: 1)
|> beginning_of_minute()
end
@doc """
Return the closest whole minute
"""
def get_closest_minute(time \\ Timex.now()) do
case time.second >= 30 do
true -> get_next_minute(time)
false -> beginning_of_minute(time)
end
end
@doc """
Return the beginning of the second
"""
def beginning_of_second(time \\ Timex.now()) do
time
|> Map.put(:millisecond, 0)
|> DateTime.truncate(:millisecond)
end
@doc """
Return the beginning of the next second
"""
def get_next_second(time \\ Timex.now()) do
time
|> Timex.shift(seconds: 1)
|> beginning_of_second()
end
@doc """
Return milliseconds from given time. If a time is not passed, current
timestamp is used.
"""
def get_milliseconds_to_next_minute(time \\ Timex.now()) do
time
|> get_next_minute()
|> Timex.diff(time, :microseconds)
|> Kernel./(1000)
|> ceil()
end
@doc """
Return milliseconds from given time. If a time is not passed, current
timestamp is used.
"""
def get_milliseconds_to_next_second(time \\ Timex.now()) do
time
|> get_next_second()
|> Timex.diff(time, :microseconds)
|> Kernel./(1000)
|> ceil()
end
@doc """
Convert milliseconds to a human readable string
"""
def humanize_milliseconds(value) do
value
|> Timex.Duration.from_milliseconds()
|> Timex.Format.Duration.Formatters.Humanized.format()
end
@doc """
Convert seconds to a human readable string
"""
def humanize_seconds(value) do
value
|> Timex.Duration.from_seconds()
|> Timex.Format.Duration.Formatters.Humanized.format()
end
@doc """
Print humanized datetime
"""
def humanize_datetime(value, format \\ "{Mfull} {D}, {YYYY} {h12}:{m}:{s}{am} {Zabbr}") do
value
|> Timex.Timezone.convert("America/New_York")
|> Timex.format!(format)
rescue
_ -> nil
end
end
|
apps/artemis/lib/artemis/helpers/time.ex
| 0.831109
| 0.723016
|
time.ex
|
starcoder
|
defmodule Ecto.Rescope do
@moduledoc """
Rescopes the default query on an Ecto schema.
An Ecto Schmea is typically scoped by the `Ecto.Schema.schema/2` macro,
which defines the query as:
def __schema__(:query) do
%Ecto.Query{
from: %Ecto.Query.FromExpr{
source: {unquote(source), __MODULE__},
prefix: unquote(prefix)
}
}
end
This has a downside in that the schema cannot define a default scope for
all queries to follow. For instance, perhaps one wants to ensure only soft-deleted
records are returned by default. To accomplish this, one might exclude
any record with an `is_deleted` field set to `true`.
def without_deleted(query) do
from(q in query, where: q.is_deleted == false)
end
## Usage
By using the `rescope/1` macro provided by `Ecto.Rescope`, one can override by
passing a function that takes an `Ecto.Query` struct as the sole
argument. This function must return an `Ecto.Query` struct.
> NOTE: The macro must be invoked after the `Ecto.Schema.schema/2` macro.
### Example
import Ecto.Rescope
schema "user" do
field(:is_deleted, :boolean)
end
rescope(&without_deleted/1)
def without_deleted(query) do
from(q in query, where: q.is_deleted == false)
end
At this point, any queries using the schema will now be defined with the new default scope.
"""
@doc """
Resets the default query on the Ecto schema.
Accepts a 1-arity function that takes and returns an `Ecto.Query` struct.
In addition to redefining the default scope, the macro defines two utility functions:
`unscoped/0` and `scoped/0`. These are used in situations where the overridden scope is
either undesirable, or caveats exist that prevent use of the rescoped query.
See: [README](readme.html#caveats) for caveats
"""
defmacro rescope(scope_fn) do
quote do
# Allow override of the schema function.
defoverridable __schema__: 1
@doc """
Returns the default Ecto defined query for `#{__MODULE__}`.
"""
@spec unscoped() :: Ecto.Query.t()
def unscoped(), do: Ecto.Queryable.to_query({__schema__(:source), __MODULE__})
# Invoke the defined query object from the module and pass
# to the defalt scoping function for composition.
def __schema__(:query), do: unquote(scope_fn).(super(:query))
# Ensure we still allow fields, assocs, and embeds to process
def __schema__(args), do: super(args)
@doc """
Returns the redefined scope query for `#{__MODULE__}`.
"""
@spec scoped() :: Ecto.Query.t()
def scoped(), do: __schema__(:query)
end
end
end
|
lib/ecto_rescope.ex
| 0.905084
| 0.416737
|
ecto_rescope.ex
|
starcoder
|
defmodule ExifParser.ImageFileDirectory do
@moduledoc """
Tiff Image File Directory parser. Parses the IFD that provides the number of
tags in the IFD and offset from the start of the file buffer.
## Struct
### num_entries
The length of the IFD desciptor helps in parsing the IFD.
This can be used to find the end of the IFD.
### tag_lists
This holds the map of tags. The keys in the map are tag_names and the values
are tag_values.
### offset
This represents the non-neg-integer that gives the number of bytes offset from
the start of tiff buffer.
"""
defstruct num_entries: nil,
tag_lists: %{},
offset: nil
@type t :: %__MODULE__{
num_entries: non_neg_integer,
tag_lists: map,
offset: non_neg_integer
}
alias ExifParser.Tag
@doc """
The IFDs are parsed from the tiff buffer. The map keys for the IFDs are updated
and prettified if passed as argument.
The output is made pretty by default.
"""
@spec parse_tiff_body(
endian :: :little | :big,
start_of_tiff :: binary,
offset :: non_neg_integer,
prettify :: Boolean
) :: %{atom: __MODULE__}
def parse_tiff_body(endian, start_of_tiff, offset, prettify \\ true)
def parse_tiff_body(endian, start_of_tiff, offset, false) do
parse_ifds(endian, start_of_tiff, offset, :tiff)
|> name_primary_ifds()
end
def parse_tiff_body(endian, start_of_tiff, offset, true) do
parse_tiff_body(endian, start_of_tiff, offset, false)
|> ExifParser.Pretty.prettify()
end
@doc """
This method parses the ifds that are reachable, given the endianess, tiff_buffer,
and the offset.
The IFD are first found and the tags in each of them parsed.
"""
@spec parse_ifds(
endian :: :little | :big,
start_of_tiff :: binary,
offset :: non_neg_integer,
tag_type :: ExifParser.Tag.LookUp.tag_type()
) :: [__MODULE__]
def parse_ifds(endian, start_of_tiff, offset, tag_type) do
find_ifds(endian, start_of_tiff, offset)
|> Enum.map(&parse_tags(&1, endian, start_of_tiff, tag_type))
end
defp name_primary_ifds(ifds) do
ifds
|> Stream.with_index()
|> Enum.reduce(Map.new(), fn {ifd, k}, acc ->
Map.put(acc, String.to_atom("ifd#{k}"), ifd)
end)
end
defp find_ifds(_, _, 0) do
[]
end
defp find_ifds(endian, start_of_tiff, offset) do
<<_::binary-size(offset), num_entries::binary-size(2), _rest::binary>> = start_of_tiff
num_entries = :binary.decode_unsigned(num_entries, endian)
ifd_byte_size = num_entries * 12
# parse ifd
<<_::binary-size(offset), _num_entries::binary-size(2),
ifd_buffer::binary-size(ifd_byte_size), next_ifd_offset::binary-size(4),
_rest::binary>> = start_of_tiff
next_ifd_offset = :binary.decode_unsigned(next_ifd_offset, endian)
[
%__MODULE__{num_entries: num_entries, offset: ifd_buffer}
| find_ifds(endian, start_of_tiff, next_ifd_offset)
]
end
defp parse_tags(%__MODULE__{num_entries: 0}, _endian, _start, _tag_type), do: nil
defp parse_tags(
%__MODULE__{offset: ifd_offset, num_entries: num_entries},
endian,
start_of_tiff,
tag_type
) do
tag_lists =
0..(num_entries - 1)
|> Enum.reduce(Map.new(), fn x, acc ->
tag_offset = x * 12
<<_::binary-size(tag_offset), tag_buffer::binary-size(12), _rest::binary>> = ifd_offset
tag = Tag.parse(tag_buffer, endian, start_of_tiff, tag_type)
Map.put(acc, tag.tag_name, tag)
end)
%__MODULE__{offset: ifd_offset, num_entries: num_entries, tag_lists: tag_lists}
end
end
|
lib/exif_parser/image_file_directory.ex
| 0.642881
| 0.642292
|
image_file_directory.ex
|
starcoder
|
defmodule Concentrate.Encoder.VehiclePositions do
@moduledoc """
Encodes a list of parsed data into a VehiclePositions.pb file.
"""
@behaviour Concentrate.Encoder
alias Concentrate.{TripDescriptor, VehiclePosition}
import Concentrate.Encoder.GTFSRealtimeHelpers
@impl Concentrate.Encoder
def encode_groups(groups) when is_list(groups) do
message = %{
header: feed_header(),
entity: Enum.flat_map(groups, &build_entity/1)
}
:gtfs_realtime_proto.encode_msg(message, :FeedMessage)
end
def feed_entity(list) do
list
|> group
|> Enum.flat_map(&build_entity/1)
end
def build_entity({%TripDescriptor{} = td, vps, _stus}) do
trip = trip_descriptor(td)
for vp <- vps do
%{
id: entity_id(vp),
vehicle: build_vehicle(vp, trip)
}
end
end
def build_entity({nil, vps, _stus}) do
# vehicles without a trip
for vp <- vps do
trip =
if trip_id = VehiclePosition.trip_id(vp) do
%{
trip_id: trip_id,
schedule_relationship: :UNSCHEDULED
}
end
%{
id: entity_id(vp),
vehicle: build_vehicle(vp, trip)
}
end
end
defp build_vehicle(%VehiclePosition{} = vp, trip) do
descriptor =
drop_nil_values(%{
id: VehiclePosition.id(vp),
label: VehiclePosition.label(vp),
license_plate: VehiclePosition.license_plate(vp)
})
position =
drop_nil_values(%{
latitude: VehiclePosition.latitude(vp),
longitude: VehiclePosition.longitude(vp),
bearing: VehiclePosition.bearing(vp),
speed: VehiclePosition.speed(vp)
})
drop_nil_values(%{
trip: trip,
vehicle: descriptor,
position: position,
stop_id: VehiclePosition.stop_id(vp),
current_stop_sequence: VehiclePosition.stop_sequence(vp),
current_status: VehiclePosition.status(vp),
timestamp: VehiclePosition.last_updated(vp),
occupancy_status: VehiclePosition.occupancy_status(vp),
occupancy_percentage: VehiclePosition.occupancy_percentage(vp)
})
end
def entity_id(vp) do
VehiclePosition.id(vp) || VehiclePosition.trip_id(vp) ||
Integer.to_string(:erlang.unique_integer())
end
def trip_descriptor(update) do
drop_nil_values(%{
trip_id: TripDescriptor.trip_id(update),
route_id: TripDescriptor.route_id(update),
direction_id: TripDescriptor.direction_id(update),
start_time: TripDescriptor.start_time(update),
start_date: encode_date(TripDescriptor.start_date(update)),
schedule_relationship: TripDescriptor.schedule_relationship(update)
})
end
end
|
lib/concentrate/encoder/vehicle_positions.ex
| 0.748995
| 0.413152
|
vehicle_positions.ex
|
starcoder
|
defmodule Sanbase.Validation do
import Sanbase.DateTimeUtils, only: [str_to_sec: 1]
defguard is_valid_price(price) when is_number(price) and price >= 0
defguard is_valid_percent(percent) when is_number(percent) and percent >= -100
defguard is_valid_percent_change(percent) when is_number(percent) and percent > 0
defguard is_valid_min_max(min, max)
when min < max and is_number(min) and is_number(max)
defguard is_valid_min_max_price(min, max)
when min < max and is_valid_price(min) and is_valid_price(max)
def valid_percent?(percent) when is_valid_percent(percent), do: :ok
def valid_percent?(percent),
do: {:error, "#{inspect(percent)} is not a valid percent"}
def valid_time_window?(time_window) when is_binary(time_window) do
Regex.match?(~r/^\d+[smhdw]$/, time_window)
|> case do
true -> :ok
false -> {:error, "#{inspect(time_window)} is not a valid time window"}
end
end
def valid_time_window?(time_window),
do: {:error, "#{inspect(time_window)} is not a valid time window"}
def time_window_is_whole_days?(time_window) do
case rem(str_to_sec(time_window), 86_400) do
0 ->
:ok
_ ->
{:error, "Time window should represent whole days. Time window provided: #{time_window}"}
end
end
def time_window_bigger_than?(time_window, min_time_window) do
case str_to_sec(time_window) >= str_to_sec(min_time_window) do
true ->
:ok
false ->
{:error,
"Time window should be bigger than #{min_time_window}. Time window provided: #{time_window}"}
end
end
def valid_iso8601_time_string?(time) when is_binary(time) do
case Time.from_iso8601(time) do
{:ok, _time} ->
:ok
_ ->
{:error, "#{time} is not a valid ISO8601 time"}
end
end
def valid_iso8601_time_string?(str), do: {:error, "#{inspect(str)} is not a valid ISO8601 time"}
def valid_threshold?(t) when is_number(t) and t > 0, do: :ok
def valid_threshold?(t) do
{:error, "#{inspect(t)} is not valid threshold. It must be a number bigger than 0"}
end
def valid_metric?(metric) do
Sanbase.Metric.has_metric?(metric)
end
def valid_signal?(signal) do
Sanbase.Signal.has_signal?(signal)
end
def valid_5m_min_interval_metric?(metric) do
with {:ok, %{min_interval: min_interval}} <- Sanbase.Metric.metadata(metric),
interval_sec when is_number(interval_sec) and interval_sec <= 300 <-
Sanbase.DateTimeUtils.str_to_sec(min_interval) do
:ok
else
_ ->
{:error,
"The metric #{inspect(metric)} is not supported or is mistyped or does not have min interval equal or less than to 5 minutes."}
end
end
def valid_above_5m_min_interval_metric?(metric) do
with {:ok, %{min_interval: min_interval}} <- Sanbase.Metric.metadata(metric),
interval_sec when is_number(interval_sec) and interval_sec > 300 <-
Sanbase.DateTimeUtils.str_to_sec(min_interval) do
:ok
else
_ ->
{:error,
"The metric #{inspect(metric)} is not supported or is mistyped or does not have min interval equal or bigger than to 1 day."}
end
end
def valid_url?(url, opts \\ []) do
check_host_online? = Keyword.get(opts, :check_host_online, false)
uri = URI.parse(url)
cond do
url == "" ->
{:error, "URL is an empty string"}
uri.scheme == nil ->
{:error, "URL '#{url}' is missing a scheme (e.g. https)"}
uri.host == nil ->
{:error, "URL '#{url}' is missing a host"}
uri.path == nil ->
{:error, "URL '#{url}' is missing path (e.g. missing the /image.png part)"}
# If true this will try to DNS resolve the hostname and check if it exists
check_host_online? == true ->
case :inet.gethostbyname(to_charlist(uri.host)) do
{:ok, _} -> :ok
{:error, _} -> {:error, "URL '#{url}' host is not resolvable"}
end
true ->
:ok
end
end
end
|
lib/sanbase/utils/validation.ex
| 0.769427
| 0.548674
|
validation.ex
|
starcoder
|
defmodule IEx.Helpers do
@moduledoc """
A bunch of helpers available in IEx.
* `c` - compiles a file in the given path
* `d` - prints documentation
* `h` - prints history
* `m` - prints loaded modules
* `r` - recompiles and reloads the given module's source file
* `v` - retrieves nth value from console
Documentation for functions in this module can be consulted
directly from the command line, as an example, try:
d(:c, 1)
"""
@doc """
Expects a list of files to compile and a path
to write their object code to. It returns the name
of the compiled modules.
## Examples
c ["foo.ex"], "ebin"
#=> Foo
"""
def c(files, path // ".") do
tuples = Kernel.ParallelCompiler.files_to_path List.wrap(files), path
Enum.map tuples, elem(&1, 1)
end
@doc """
Returns the name and module of all modules loaded.
"""
def m do
all = Enum.map :code.all_loaded, fn { mod, file } -> { inspect(mod), file } end
sorted = List.sort(all)
size = Enum.reduce sorted, 0, fn({ mod, _ }, acc) -> max(byte_size(mod), acc) end
format = "~-#{size}s ~s~n"
Enum.each sorted, fn({ mod, file }) ->
:io.format(format, [mod, file])
end
end
@doc """
Prints commands history and their result.
"""
def h do
history = List.reverse(Process.get(:iex_history))
Enum.each(history, print_history(&1))
end
defp print_history(config) do
IO.puts "#{config.counter}: #{config.cache}#=> #{inspect config.result}\n"
end
@doc """
Shows the documentation for IEx.Helpers.
"""
def d() do
d(IEx.Helpers, :all)
end
@doc """
Shows the documentation for the given module
or for the given function/arity pair.
## Examples
d(Enum)
#=> Prints documentation for Enum
It also accepts functions in the format `fun/arity`
and `module.fun/arity`, for example:
d receive/1
d Enum.all?/2
"""
defmacro d({ :/, _, [{ fun, _, nil }, arity] }) do
quote do
d(unquote(fun), unquote(arity))
end
end
defmacro d({ :/, _, [{ { :., _, [mod, fun] }, _, [] }, arity] }) do
quote do
d(unquote(mod), unquote(fun), unquote(arity))
end
end
defmacro d(other) do
quote do
d(unquote(other), :all)
end
end
@doc """
Prints the documentation for the given function and arity.
The function may either be a function defined inside `IEx.Helpers`
or in `Kernel`. To see functions from other module, use
`d/3` instead.
## Examples
d(:d, 2)
#=> Prints documentation for this function
"""
def d(:d, 1) do
d(__MODULE__, :d, 1)
end
def d(function, arity) when is_atom(function) and is_integer(arity) do
if function_exported?(__MODULE__, function, arity) do
d(__MODULE__, function, arity)
else
d(Kernel, function, arity)
end
end
def d(module, :all) when is_atom(module) do
case Code.ensure_loaded(module) do
{ :module, _ } ->
case module.__info__(:moduledoc) do
{ _, binary } when is_binary(binary) ->
IO.puts "# #{inspect module}\n"
IO.write binary
{ _, _ } ->
IO.puts "No docs for #{inspect module}"
_ ->
IO.puts "#{inspect module} was not compiled with docs"
end
{ :error, reason } ->
IO.puts "Could not load module #{inspect module}: #{reason}"
end
end
@doc """
Shows the documentation for the `function/arity` in `module`.
"""
def d(module, function, arity) when is_atom(module) and is_atom(function) and is_integer(arity) do
if docs = module.__info__(:docs) do
doc =
if tuple = List.keyfind(docs, { function, arity }, 1) do
print_signature(tuple)
end
if doc do
IO.write "\n" <> doc
else
IO.puts "No docs for #{function}/#{arity}"
end
else
IO.puts "#{inspect module} was not compiled with docs"
end
end
# Get the full signature from a function.
defp print_signature({ _info, _line, _kind, _args, false }) do
false
end
defp print_signature({ { name, _arity }, _line, kind, args, docs }) do
args = Enum.map_join(args, ", ", signature_arg(&1))
IO.puts "* #{kind} #{name}(#{args})"
docs
end
defp signature_arg({ ://, _, [left, right] }) do
signature_arg(left) <> " // " <> Macro.to_binary(right)
end
defp signature_arg({ var, _, _ }) do
atom_to_binary(var)
end
@doc """
Retrieves nth query's value from the history. Use negative
values to lookup query's value from latest to earliest.
For instance, v(-1) returns the latest result.
"""
def v(n) when n < 0 do
history = Process.get(:iex_history)
Enum.nth!(history, abs(n)).result
end
def v(n) do
history = Process.get(:iex_history) /> List.reverse
Enum.nth!(history, n).result
end
@doc """
Reloads all modules that were already reloaded
at some point with `r/1`.
"""
def r do
Enum.map iex_reloaded, r(&1)
end
@doc """
Recompiles and reloads the specified module's source file.
Please note that all the modules defined in the specified
files are recompiled and reloaded.
"""
def r(module) do
if source = source(module) do
Process.put(:iex_reloaded, :ordsets.add_element(module, iex_reloaded))
{ module, Code.load_file source }
else
:nosource
end
end
defp iex_reloaded do
Process.get(:iex_reloaded) || :ordsets.new
end
defp source(module) do
compile = module.module_info(:compile)
# Get the source of the compiled module. Due to a bug in Erlang
# R15 and before, we need to look for the source first in the
# options and then into the real source.
options =
case List.keyfind(compile, :options, 1) do
{ :options, opts } -> opts
_ -> []
end
source = List.keyfind(options, :source, 1) || List.keyfind(compile, :source, 1)
case source do
{ :source, source } -> list_to_binary(source)
_ -> nil
end
end
end
|
lib/elixir/lib/iex/helpers.ex
| 0.802246
| 0.486088
|
helpers.ex
|
starcoder
|
defmodule Advent.Y2021.D04 do
@moduledoc """
https://adventofcode.com/2021/day/4
"""
@doc """
To guarantee victory against the giant squid, figure out which board will win
first. What will your final score be if you choose that board?
"""
def part_one(input) do
{calls, boards} = parse_input(input)
{_turn, solution} =
boards
|> Enum.map(&board_solution(&1, calls))
|> Enum.min()
solution
end
@doc """
Figure out which board will win last. Once it wins, what would its final
score be?
"""
def part_two(input) do
{calls, boards} = parse_input(input)
{_turn, solution} =
boards
|> Enum.map(&board_solution(&1, calls))
|> Enum.max()
solution
end
@spec parse_input(Enumerable.t()) :: {[integer()], Enumerable.t()}
defp parse_input(input) do
called =
input
|> Enum.at(0)
|> String.split(",")
|> Enum.map(&String.to_integer/1)
boards =
input
|> Stream.drop(2)
|> Stream.chunk_while(
[],
fn
"", acc -> {:cont, acc, []}
line, acc -> {:cont, [line | acc]}
end,
fn
[] -> {:cont, []}
acc -> {:cont, acc, []}
end
)
|> Stream.map(fn card ->
Enum.map(card, fn row ->
row
|> String.split()
|> Enum.map(&String.to_integer/1)
end)
end)
|> Stream.map(fn rows ->
cols =
rows
|> Enum.zip()
|> Enum.map(&Tuple.to_list/1)
{rows, cols}
end)
{called, boards}
end
@spec board_solution({[[integer()]], integer()}, [integer()]) :: {integer(), integer()}
defp board_solution({rows, cols}, calls) do
board_vals =
rows
|> List.flatten()
|> MapSet.new()
(rows ++ cols)
|> Enum.map(fn arr ->
if Enum.all?(arr, &Enum.member?(calls, &1)) do
last_call = Enum.max_by(arr, fn v -> Enum.find_index(calls, fn c -> c == v end) end)
last_turn = Enum.find_index(calls, fn c -> c == last_call end)
seen_calls = Enum.slice(calls, 0..last_turn)
marked_calls = MapSet.difference(board_vals, MapSet.new(seen_calls))
solution = Enum.sum(marked_calls) * last_call
{last_turn, solution}
else
nil
end
end)
|> Enum.reject(&is_nil/1)
|> Enum.min()
end
end
|
lib/advent/y2021/d04.ex
| 0.779448
| 0.448487
|
d04.ex
|
starcoder
|
defmodule MazesWeb.CircularMazeView do
use MazesWeb, :view
import MazesWeb.MazeHelper
alias Mazes.CircularMaze
def svg_width(maze) do
2 * (svg_padding() + maze_radius(maze))
end
def maze_radius(maze) do
max_width = max_svg_width() - 2 * svg_padding()
trunc(max_width / (2 * maze.radius)) * maze.radius
end
def ring_width(maze) do
maze_radius(maze) / maze.radius
end
def maze_center(maze) do
center_x = svg_padding() + maze_radius(maze)
center_y = svg_padding() + maze_radius(maze)
{center_x, center_y}
end
def vertex_center(maze, {_, 1}), do: maze_center(maze)
def vertex_center(maze, {current_column, ring} = _vertex) do
column_count =
Enum.find(CircularMaze.rings(maze), fn %{ring: r} -> r == ring end).column_count
center = maze_center(maze)
radius_delta = trunc(maze_radius(maze) / maze.radius)
angle_steps = column_count
angle_delta = 2 * :math.pi() / angle_steps
start_angle = (current_column - 1) * angle_delta
end_angle = current_column * angle_delta
outer_arc_radius = ring * radius_delta
inner_arc_radius = (ring - 1) * radius_delta
move_coordinate_by_radius_and_angle(
center,
(outer_arc_radius + inner_arc_radius) / 2,
(start_angle + end_angle) / 2
)
end
def center_vertex(maze, vertex, settings, colors) do
{cx, cy} = maze_center(maze)
content_tag(:circle, "",
cx: cx,
cy: cy,
r: ring_width(maze),
style:
"fill: #{
vertex_color(
maze,
vertex,
colors,
settings.show_colors,
settings.hue,
settings.saturation
)
}"
)
end
def vertex(maze, {current_column, ring} = vertex, column_count, settings, colors) do
%{
outer_arc_radius: outer_radius,
outer_arc_start: {outer_start_x, outer_start_y},
outer_arc_end: {outer_end_x, outer_end_y},
inner_arc_radius: inner_radius,
inner_arc_start: {inner_start_x, inner_start_y},
inner_arc_end: {inner_end_x, inner_end_y}
} = maze_vertex_points(maze, column_count, ring, current_column)
d =
"M #{outer_start_x} #{outer_start_y}" <>
" A #{outer_radius} #{outer_radius} 0 0 1 #{outer_end_x} #{outer_end_y}" <>
" L #{inner_end_x} #{inner_end_y}" <>
" A #{inner_radius} #{inner_radius} 0 0 0 #{inner_start_x} #{inner_start_y}" <>
" L #{outer_start_x} #{outer_start_y}"
content_tag(:path, "",
d: d,
stroke:
vertex_color(
maze,
vertex,
colors,
settings.show_colors,
settings.hue,
settings.saturation
),
style:
"fill: #{
vertex_color(
maze,
vertex,
colors,
settings.show_colors,
settings.hue,
settings.saturation
)
}"
)
end
def inner_wall(maze, {current_column, ring} = _vertex, column_count) do
%{
inner_arc_radius: inner_radius,
inner_arc_start: {inner_start_x, inner_start_y},
inner_arc_end: {inner_end_x, inner_end_y}
} = maze_vertex_points(maze, column_count, ring, current_column)
d =
"M #{inner_end_x} #{inner_end_y}" <>
" A #{inner_radius} #{inner_radius} 0 0 0 #{inner_start_x} #{inner_start_y}"
content_tag(:path, "",
d: d,
fill: "transparent",
style: line_style(maze)
)
end
def outer_wall(maze, {current_column, ring} = _vertex, column_count) do
%{
outer_arc_radius: outer_radius,
outer_arc_start: {outer_start_x, outer_start_y},
outer_arc_end: {outer_end_x, outer_end_y}
} = maze_vertex_points(maze, column_count, ring, current_column)
d =
"M #{outer_end_x} #{outer_end_y}" <>
" A #{outer_radius} #{outer_radius} 0 0 0 #{outer_start_x} #{outer_start_y}"
content_tag(:path, "",
d: d,
fill: "transparent",
style: line_style(maze)
)
end
def cw_wall(maze, {current_column, ring} = _vertex, column_count) do
%{
outer_arc_end: {outer_end_x, outer_end_y},
inner_arc_end: {inner_end_x, inner_end_y}
} = maze_vertex_points(maze, column_count, ring, current_column)
d =
"M #{outer_end_x} #{outer_end_y}" <>
" L #{inner_end_x} #{inner_end_y}"
content_tag(:path, "",
d: d,
fill: "transparent",
style: line_style(maze)
)
end
def maze_vertex_points(maze, column_count, ring, current_column) do
center = maze_center(maze)
radius_delta = trunc(maze_radius(maze) / maze.radius)
angle_steps = column_count
angle_delta = 2 * :math.pi() / angle_steps
start_angle = (current_column - 1) * angle_delta
end_angle = current_column * angle_delta
outer_arc_radius = ring * radius_delta
inner_arc_radius = (ring - 1) * radius_delta
%{
outer_arc_radius: outer_arc_radius,
outer_arc_start: move_coordinate_by_radius_and_angle(center, outer_arc_radius, start_angle),
outer_arc_end: move_coordinate_by_radius_and_angle(center, outer_arc_radius, end_angle),
inner_arc_radius: inner_arc_radius,
inner_arc_start: move_coordinate_by_radius_and_angle(center, inner_arc_radius, start_angle),
inner_arc_end: move_coordinate_by_radius_and_angle(center, inner_arc_radius, end_angle)
}
end
end
|
lib/mazes_web/views/circular_maze_view.ex
| 0.645455
| 0.51812
|
circular_maze_view.ex
|
starcoder
|
defmodule Exop.Validation do
@moduledoc """
Provides high-level functions for a contract validation.
The main function is valid?/2
Mostly invokes Exop.ValidationChecks module functions.
"""
import Exop.ValidationChecks
alias Exop.Utils
defmodule ValidationError do
@moduledoc """
An operation's contract validation failure error.
"""
defexception message: "Contract validation failed"
end
@type validation_error :: {:error, {:validation, map()}}
@spec function_present?(Elixir.Exop.Validation | Elixir.Exop.ValidationChecks, atom()) ::
boolean()
defp function_present?(module, function_name) do
:functions |> module.__info__() |> Keyword.has_key?(function_name)
end
@doc """
Validate received params over a contract.
## Examples
iex> Exop.Validation.valid?([%{name: :param, opts: [required: true]}], [param: "hello"])
:ok
"""
@spec valid?(list(map()), Keyword.t() | map()) :: :ok | validation_error
def valid?(contract, received_params) do
validation_results = validate(contract, received_params, [])
if Enum.empty?(validation_results) || Enum.all?(validation_results, &(&1 == true)) do
:ok
else
error_results = consolidate_errors(validation_results)
{:error, {:validation, error_results}}
end
end
@spec consolidate_errors(list()) :: map()
defp consolidate_errors(validation_results) do
error_results = validation_results |> Enum.reject(&(&1 == true))
Enum.reduce(error_results, %{}, fn error_result, map ->
item_name = error_result |> Map.keys() |> List.first()
error_message = Map.get(error_result, item_name)
Map.put(map, item_name, [error_message | map[item_name] || []])
end)
end
@spec errors_message(map()) :: String.t()
def errors_message(errors) do
errors
|> Enum.map(fn {item_name, error_messages} ->
"#{item_name}: #{Enum.join(error_messages, "\n\t")}"
end)
|> Enum.join("\n")
end
@doc """
Validate received params over a contract. Accumulate validation results into a list.
## Examples
iex> Exop.Validation.validate([%{name: :param, opts: [required: true, type: :string]}], [param: "hello"], [])
[true, true]
"""
@spec validate([map()], map() | Keyword.t(), list()) :: list()
def validate([], _received_params, result), do: result
def validate([%{name: name, opts: opts} = contract_item | contract_tail], received_params, result) do
checks_result =
if !required?(opts) && !present?(received_params, name) do
[]
else
validate_params(contract_item, received_params)
end
validate(contract_tail, received_params, result ++ List.flatten(checks_result))
end
defp present?(received_params, contract_item_name) do
check_item_present?(received_params, contract_item_name)
end
defp required?(opts), do: opts[:required] != false
defp explicit_required(opts) when is_list(opts) do
if required?(opts), do: Keyword.put(opts, :required, true), else: opts
end
defp explicit_required(opts) when is_map(opts) do
if required?(opts), do: Map.put(opts, :required, true), else: opts
end
defp validate_params(%{name: name, opts: opts} = _contract_item, received_params) do
# see changelog for ver. 1.2.0: everything except `required: false` is `required: true`
opts = explicit_required(opts)
is_nil? =
check_item_present?(received_params, name) && is_nil(get_check_item(received_params, name))
if is_nil? do
if opts[:allow_nil] == true do
[]
else
[Exop.ValidationChecks.check_allow_nil(received_params, name, false)]
end
else
for {check_name, check_params} <- opts, into: [] do
check_function_name = String.to_atom("check_#{check_name}")
cond do
function_present?(__MODULE__, check_function_name) ->
apply(__MODULE__, check_function_name, [received_params, name, check_params])
function_present?(Exop.ValidationChecks, check_function_name) ->
apply(Exop.ValidationChecks, check_function_name, [received_params, name, check_params])
true ->
true
end
end
end
end
@doc """
Checks inner item of the contract param (which is a Map itself) with their own checks.
## Examples
iex> Exop.Validation.check_inner(%{a: 1}, :a, [b: [type: :atom], c: [type: :string]])
[%{a: "has wrong type"}]
iex> Exop.Validation.check_inner(%{a: []}, :a, [b: [type: :atom], c: [type: :string]])
[[%{"a[:b]" => "is required"}, true], [%{"a[:c]" => "is required"}, true]]
iex> Exop.Validation.check_inner(%{a: %{b: :atom, c: "string"}}, :a, [b: [type: :atom], c: [type: :string]])
[[true, true], [true, true]]
"""
@spec check_inner(map() | Keyword.t(), atom() | String.t(), map() | Keyword.t()) :: list
def check_inner(check_items, item_name, checks) do
checks = Utils.try_map(checks)
not is_map(checks) ||
check_items
|> get_check_item(item_name)
|> Utils.try_map()
|> do_check_inner(item_name, checks)
end
defp do_check_inner(check_item, item_name, checks) when is_map(check_item) do
received_params =
Enum.reduce(check_item, %{}, fn {inner_param_name, _inner_param_value}, acc ->
Map.put(acc, "#{item_name}[:#{inner_param_name}]", check_item[inner_param_name])
end)
for {inner_param_name, inner_opts} <- checks, into: [] do
inner_name = "#{item_name}[:#{inner_param_name}]"
validate_params(%{name: inner_name, opts: inner_opts}, received_params)
end
end
defp do_check_inner(_check_item, item_name, _checks), do: [%{item_name => "has wrong type"}]
@doc """
Checks items of the contract's list param with specified checks.
## Examples
iex> Exop.Validation.check_list_item(%{a: 1}, :a, [type: :integer])
[%{a: "is not a list"}]
iex> Exop.Validation.check_list_item(%{a: []}, :a, [type: :integer])
[]
iex> Exop.Validation.check_list_item(%{a: [1, :atom]}, :a, [type: :integer])
[[true, true], [true, %{"a[1]" => "has wrong type; expected type: integer, got: :atom"}]]
iex> Exop.Validation.check_list_item(%{a: [1, 2]}, :a, [type: :integer])
[[true, true], [true, true]]
"""
@spec check_list_item(map() | Keyword.t(), atom() | String.t(), map() | Keyword.t()) :: list
def check_list_item(check_items, item_name, checks) when is_list(checks) do
check_list_item(check_items, item_name, Enum.into(checks, %{}))
end
def check_list_item(check_items, item_name, checks) when is_map(checks) do
list = get_check_item(check_items, item_name)
if is_list(list) do
received_params =
list
|> Enum.with_index()
|> Enum.reduce(%{}, fn {item, index}, acc ->
Map.put(acc, "#{item_name}[#{index}]", item)
end)
for {param_name, _} <- received_params, into: [] do
validate_params(%{name: param_name, opts: checks}, received_params)
end
else
[%{String.to_atom("#{item_name}") => "is not a list"}]
end
end
end
|
lib/exop/validation.ex
| 0.875215
| 0.52975
|
validation.ex
|
starcoder
|
defmodule Evision.Mat do
@moduledoc """
OpenCV Mat
"""
import Evision.Errorize
@typedoc """
Types for mat
"""
@type mat_type ::
{:u, 8}
| {:u, 16}
| {:s, 8}
| {:s, 16}
| {:f, 32}
| {:f, 64}
@type channels_from_binary ::
1 | 3 | 4
@doc namespace: :"cv.Mat"
@spec type(reference()) :: {:ok, mat_type()} | {:error, String.t()}
def type(mat) when is_reference(mat) do
:erl_cv_nif.evision_cv_mat_type(img: mat)
end
deferror type(mat)
@doc namespace: :"cv.Mat"
@spec as_type(reference(), mat_type()) :: {:ok, reference()} | {:error, String.t()}
def as_type(mat, _type={t, l}) when is_reference(mat) and is_atom(t) and l > 0 do
:erl_cv_nif.evision_cv_mat_as_type(img: mat, t: t, l: l)
end
deferror as_type(mat, type)
@doc namespace: :"cv.Mat"
def shape(mat) when is_reference(mat) do
:erl_cv_nif.evision_cv_mat_shape(img: mat)
end
deferror shape(mat)
@doc namespace: :"cv.Mat"
@spec clone(reference()) :: {:ok, reference()} | {:error, String.t()}
def clone(mat) when is_reference(mat) do
:erl_cv_nif.evision_cv_mat_clone(img: mat)
end
deferror clone(mat)
@doc namespace: :"cv.Mat"
@spec to_binary(reference()) :: {:ok, binary()} | {:error, String.t()}
def to_binary(mat) when is_reference(mat) do
:erl_cv_nif.evision_cv_mat_to_binary(img: mat)
end
deferror to_binary(mat)
@doc """
Create Mat from binary (pixel) data
- **binary**. The binary pixel data
- **type**. `type={t, l}` is one of [{:u, 8}, {:s, 8}, {:u, 16}, {:s, 16}, {:s, 32}, {:f, 32}, {:f, 64}]
- **rows**. Number of rows (i.e., the height of the image)
- **cols**. Number of cols (i.e., the width of the image)
- **channels**. Number of channels, only valid if in [1, 3, 4]
"""
@doc namespace: :"cv.Mat"
@spec from_binary(binary(), mat_type(), pos_integer(), pos_integer(), channels_from_binary()) :: {:ok, reference()} | {:error, String.t()}
def from_binary(binary, _type = {t, l}, rows, cols, channels)
when is_binary(binary) and rows > 0 and cols > 0 and channels > 0 and
is_atom(t) and is_integer(l) do
:erl_cv_nif.evision_cv_mat_from_binary(
binary: binary,
t: t,
l: l,
cols: cols,
rows: rows,
channels: channels
)
end
deferror from_binary(binary, type, rows, cols, channels)
@doc namespace: :"cv.Mat"
def from_binary_by_shape(binary, _type = {t, l}, shape)
when is_binary(binary) and is_atom(t) and is_integer(l) and is_tuple(shape) do
from_binary_by_shape(binary, {t, l}, Tuple.to_list(shape))
end
@doc namespace: :"cv.Mat"
def from_binary_by_shape(binary, _type = {t, l}, shape)
when is_binary(binary) and is_atom(t) and is_integer(l) and is_list(shape) do
:erl_cv_nif.evision_cv_mat_from_binary_by_shape(
binary: binary,
t: t,
l: l,
shape: shape
)
end
deferror from_binary_by_shape(binary, type, shape)
end
|
lib/opencv_mat.ex
| 0.789721
| 0.515193
|
opencv_mat.ex
|
starcoder
|
defmodule HAP do
@moduledoc """
HAP is an implementation of the [HomeKit Accessory Protocol](https://developer.apple.com/homekit/) specification.
It allows for the creation of Elixir powered HomeKit accessories which can be controlled from a user's
iOS device in a similar manner to commercially available HomeKit accessories such as light bulbs, window
coverings and other smart home accessories.
## The HomeKit Data Model
The data model of the HomeKit Accessory Protocol is represented as a tree structure. At the top level, a single HAP
instance represents an *Accessory Server*. An accessory server hosts one or more *Accessory Objects*. Each accessory object
represents a single, discrete physical accessory. In the case of directly connected devices, an accessory server typically
hosts a single accessory object which represents device itself, whereas bridges will have one accessory object for each discrete
physical object which they bridge to. Within HAP, an accessory server is represented by a `HAP.AccessoryServer` struct, and
an accessory by the `HAP.Accessory` struct.
Each accessory object contains exposes a set of *Services*, each of which represents a unit of functionality. As an
example, a HomeKit accessory server which represented a ceiling fan with a light would contain one accessory object
called 'Ceiling Fan', which would contain two services each representing the light and the fan. In addition to user-visible
services, each accessory exposes an Accessory Information Service which contains information about the service's name,
manufacturer, serial number and other properties. Within HAP, a service is represented by a `HAP.Service` struct.
A service is made up of one or more *Characteristics*, each of which represents a specific aspect of the given service.
For example, a light bulb service exposes an On Characteristic, which is a boolean value reflecting the current on or
off state of the light. If it is a dimmable light, it may also expose a Brightness Characteristic. If it is a color
changing light, it may also expose a Hue Characteristic. Within HAP, a characteristic is represented by a tuple of a
`HAP.CharacteristicDefinition` and a value source.
## Using HAP
HAP provides a high-level interface to the HomeKit Accessory Protocol, allowing an application to
present any number of accessories to an iOS HomeKit controller. HAP is intended to be embedded within a host
application which is responsible for providing the actual backing implementations of the various characteristics
exposed via HomeKit. These are provided to HAP in the form of `HAP.ValueStore` implementations. For example, consider
a Nerves application which exposes itself to HomeKit as a light bulb. Assume that the actual physical control of the
light is controlled by GPIO pin 23. A typical configuration of HAP would look something like this:
```elixir
accessory_server =
%HAP.AccessoryServer{
name: "My HAP Demo Device",
model: "HAP Demo Device",
identifier: "11:22:33:44:12:66",
accessory_type: 5,
accessories: [
%HAP.Accessory{
name: "My HAP Lightbulb",
services: [
%HAP.Services.LightBulb{on: {MyApp.Lightbulb, gpio_pin: 23}}
]
}
]
)
children = [{HAP, accessory_server}]
Supervisor.start_link(children, opts)
...
```
In this example, the application developer is responsible for creating a `MyApp.Lightbulb` module which implements the `HAP.ValueStore`
behaviour. This module would be called by HAP whenever it needs to change or query the current state of the light. The
extra options (`gpio_pin: 23` in the above example) are conveyed to this module on every call, allowing a single value store
implementation to service any number of characteristics or services.
HAP provides structs to represent the most common services, such as light bulbs, switches, and other common device types.
HAP compiles these structs into generic `HAP.Service` structs when starting up, based on each source struct's implementation
of the `HAP.ServiceSource` protocol. This allows for expressive definition of services by the application developer, while
providing for less boilerplate within HAP itself. For users who wish to create additional device types not defined in
HAP, users may define their accessories in terms of low-level `HAP.Service` and `HAP.CharacteristicDefinition` structs. For more
information, consult the type definitions for `t:HAP.AccessoryServer.t/0`, `t:HAP.Accessory.t/0`, `t:HAP.Service.t/0`,
`t:HAP.Characteristic.t/0`, and the `HAP.CharacteristicDefinition` behaviour.
"""
use Supervisor
@doc """
Starts a HAP instance based on the passed config
"""
@spec start_link(HAP.AccessoryServer.t()) :: Supervisor.on_start()
def start_link(config) do
Supervisor.start_link(__MODULE__, config)
end
def init(%HAP.AccessoryServer{} = accessory_server) do
accessory_server = accessory_server |> HAP.AccessoryServer.compile()
children = [
{HAP.PersistentStorage, accessory_server.data_path},
{HAP.AccessoryServerManager, accessory_server},
HAP.EventManager,
HAP.PairSetup,
{ThousandIsland,
handler_module: HAP.HAPSessionHandler,
handler_options: {HAP.HTTPServer, []},
transport_module: HAP.HAPSessionTransport,
port: 0}
]
Supervisor.init(children, strategy: :rest_for_one)
end
@doc """
Called by user applications whenever a characteristic value has changed. The change token is passed to `HAP.ValueStore`
instances via the `c:HAP.ValueStore.set_change_token/2` callback.
"""
@spec value_changed(HAP.ValueStore.change_token()) :: :ok
defdelegate value_changed(change_token), to: HAP.AccessoryServerManager
end
|
lib/hap.ex
| 0.928132
| 0.971074
|
hap.ex
|
starcoder
|
defmodule Steve.Storage do
@moduledoc """
Defines a behaviour for storage adapters to implement.
"""
alias Steve.Config
alias Steve.Job
@doc false
def start_link(state) do
adapter().start_link(state)
end
@doc false
def child_spec(state) do
adapter().child_spec(state)
end
@doc false
def ack!(job) do
adapter().ack!(job)
end
@doc false
def retry!(job) do
adapter().retry!(job)
end
@doc false
def recover!(queue) do
adapter().recover!(queue)
end
@doc false
def enqueue(job, time) do
adapter().enqueue(job, time)
end
@doc false
def dequeue(queue, count) do
adapter().dequeue(queue, count)
end
defp adapter do
Config.get!(:storage)
end
defmacro __using__(_options) do
quote do
@behaviour Steve.Storage
alias Steve.{Config, Time}
alias Steve.Job
@doc false
def start_link(_state) do
:ignore
end
defoverridable [start_link: 1]
@doc false
def child_spec(options) do
%{
id: __MODULE__,
type: :supervisor,
start: {__MODULE__, :start_link, [options]}
}
end
defoverridable [child_spec: 1]
end
end
@doc """
Called after a job has been performed successfully.
"""
@callback ack!(Job.t) :: :ok | no_return
@doc """
Called after an error is encountered when performing a job.
"""
@callback retry!(Job.t) :: :ok | no_return
@doc """
Called upon start to hopefully recover orphaned jobs.
"""
@callback recover!(String.t) :: :ok | no_return
@doc """
Called to enqueue a given job to be performed at a specific time.
"""
@callback enqueue(Job.t, DateTime.t) :: :ok | term
@doc """
Called to dequeue a specific amount of jobs.
"""
@callback dequeue(String.t, pos_integer) :: list
end
|
lib/steve/storage.ex
| 0.756268
| 0.505798
|
storage.ex
|
starcoder
|
defmodule Module.Types.Helpers do
@moduledoc false
@doc """
Guard function to check if an AST node is a variable.
"""
defmacro is_var(expr) do
quote do
is_tuple(unquote(expr)) and
tuple_size(unquote(expr)) == 3 and
is_atom(elem(unquote(expr), 0)) and
is_atom(elem(unquote(expr), 2))
end
end
@doc """
Returns unique identifier for the current assignment of the variable.
"""
def var_name({name, meta, _context}), do: {name, Keyword.fetch!(meta, :version)}
@doc """
Push expression to stack.
The expression stack is used to give the context where a type variable
was refined when show a type conflict error.
"""
def push_expr_stack(expr, stack) do
%{stack | expr_stack: [expr | stack.expr_stack]}
end
@doc """
Like `Enum.reduce/3` but only continues while `fun` returns `{:ok, acc}`
and stops on `{:error, reason}`.
"""
def reduce_ok(list, acc, fun) do
do_reduce_ok(list, acc, fun)
end
defp do_reduce_ok([head | tail], acc, fun) do
case fun.(head, acc) do
{:ok, acc} ->
do_reduce_ok(tail, acc, fun)
result when elem(result, 0) == :ok ->
result = Tuple.delete_at(result, 0)
do_reduce_ok(tail, result, fun)
{:error, reason} ->
{:error, reason}
end
end
defp do_reduce_ok([], acc, _fun), do: {:ok, acc}
@doc """
Like `Enum.unzip/1` but only continues while `fun` returns `{:ok, elem1, elem2}`
and stops on `{:error, reason}`.
"""
def unzip_ok(list) do
do_unzip_ok(list, [], [])
end
defp do_unzip_ok([{:ok, head1, head2} | tail], acc1, acc2) do
do_unzip_ok(tail, [head1 | acc1], [head2 | acc2])
end
defp do_unzip_ok([{:error, reason} | _tail], _acc1, _acc2), do: {:error, reason}
defp do_unzip_ok([], acc1, acc2), do: {:ok, Enum.reverse(acc1), Enum.reverse(acc2)}
@doc """
Like `Enum.map/2` but only continues while `fun` returns `{:ok, elem}`
and stops on `{:error, reason}`.
"""
def map_ok(list, fun) do
do_map_ok(list, [], fun)
end
defp do_map_ok([head | tail], acc, fun) do
case fun.(head) do
{:ok, elem} ->
do_map_ok(tail, [elem | acc], fun)
result when elem(result, 0) == :ok ->
result = Tuple.delete_at(result, 0)
do_map_ok(tail, [result | acc], fun)
{:error, reason} ->
{:error, reason}
end
end
defp do_map_ok([], acc, _fun), do: {:ok, Enum.reverse(acc)}
@doc """
Like `Enum.map_reduce/3` but only continues while `fun` returns `{:ok, elem, acc}`
and stops on `{:error, reason}`.
"""
def map_reduce_ok(list, acc, fun) do
do_map_reduce_ok(list, {[], acc}, fun)
end
defp do_map_reduce_ok([head | tail], {list, acc}, fun) do
case fun.(head, acc) do
{:ok, elem, acc} ->
do_map_reduce_ok(tail, {[elem | list], acc}, fun)
{:error, reason} ->
{:error, reason}
end
end
defp do_map_reduce_ok([], {list, acc}, _fun), do: {:ok, Enum.reverse(list), acc}
@doc """
Given a list of `[{:ok, term()} | {:error, term()}]` it returns a list of
errors `{:error, [term()]}` in case of at least one error or `{:ok, [term()]}`
if there are no errors.
"""
def oks_or_errors(list) do
case Enum.split_with(list, &match?({:ok, _}, &1)) do
{oks, []} -> {:ok, Enum.map(oks, fn {:ok, ok} -> ok end)}
{_oks, errors} -> {:error, Enum.map(errors, fn {:error, error} -> error end)}
end
end
end
|
lib/elixir/lib/module/types/helpers.ex
| 0.743168
| 0.619831
|
helpers.ex
|
starcoder
|
defmodule Tensorflow.AutotuneResult.FailureKind do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
@type t :: integer | :UNKNOWN | :REDZONE_MODIFIED | :WRONG_RESULT
field(:UNKNOWN, 0)
field(:REDZONE_MODIFIED, 1)
field(:WRONG_RESULT, 2)
end
defmodule Tensorflow.CudnnVersion do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
major: integer,
minor: integer,
patch: integer
}
defstruct [:major, :minor, :patch]
field(:major, 1, type: :int32)
field(:minor, 2, type: :int32)
field(:patch, 3, type: :int32)
end
defmodule Tensorflow.ComputeCapability do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
major: integer,
minor: integer
}
defstruct [:major, :minor]
field(:major, 1, type: :int32)
field(:minor, 2, type: :int32)
end
defmodule Tensorflow.AutotuneResult.FailureResult do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
key: {atom, any},
kind: Tensorflow.AutotuneResult.FailureKind.t(),
msg: String.t(),
buffer_address: integer
}
defstruct [:key, :kind, :msg, :buffer_address]
oneof(:key, 0)
field(:kind, 1, type: Tensorflow.AutotuneResult.FailureKind, enum: true)
field(:msg, 2, type: :string)
field(:reference_conv, 11, type: Tensorflow.AutotuneResult.ConvKey, oneof: 0)
field(:reference_gemm, 12, type: Tensorflow.AutotuneResult.GemmKey, oneof: 0)
field(:buffer_address, 13, type: :int64)
end
defmodule Tensorflow.AutotuneResult.ConvKey do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
algorithm: integer,
tensor_ops_enabled: boolean
}
defstruct [:algorithm, :tensor_ops_enabled]
field(:algorithm, 1, type: :int64)
field(:tensor_ops_enabled, 2, type: :bool)
end
defmodule Tensorflow.AutotuneResult.GemmKey do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
algorithm: integer
}
defstruct [:algorithm]
field(:algorithm, 1, type: :int64)
end
defmodule Tensorflow.AutotuneResult do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
key: {atom, any},
scratch_bytes: integer,
run_time: Google.Protobuf.Duration.t() | nil,
failure: Tensorflow.AutotuneResult.FailureResult.t() | nil
}
defstruct [:key, :scratch_bytes, :run_time, :failure]
oneof(:key, 0)
field(:scratch_bytes, 8, type: :int64)
field(:run_time, 9, type: Google.Protobuf.Duration)
field(:failure, 7, type: Tensorflow.AutotuneResult.FailureResult)
field(:conv, 5, type: Tensorflow.AutotuneResult.ConvKey, oneof: 0)
field(:gemm, 6, type: Tensorflow.AutotuneResult.GemmKey, oneof: 0)
end
defmodule Tensorflow.AutotuningLog do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
instr: Google.Protobuf.Any.t() | nil,
results: [Tensorflow.AutotuneResult.t()],
cudnn_version: Tensorflow.CudnnVersion.t() | nil,
compute_capability: Tensorflow.ComputeCapability.t() | nil,
device_pci_bus_id: String.t(),
blas_version: String.t()
}
defstruct [
:instr,
:results,
:cudnn_version,
:compute_capability,
:device_pci_bus_id,
:blas_version
]
field(:instr, 1, type: Google.Protobuf.Any)
field(:results, 2, repeated: true, type: Tensorflow.AutotuneResult)
field(:cudnn_version, 3, type: Tensorflow.CudnnVersion)
field(:compute_capability, 4, type: Tensorflow.ComputeCapability)
field(:device_pci_bus_id, 5, type: :string)
field(:blas_version, 6, type: :string)
end
|
lib/tensorflow/core/protobuf/autotuning.pb.ex
| 0.789193
| 0.473231
|
autotuning.pb.ex
|
starcoder
|
defmodule Util.Tracing do
@moduledoc """
Module to work with tracing.
When starting a Trace, when no trace information is received from an HTTP request or a parent process, use `start_trace/2`.
If Trace informationt is received use `join/6`.
Then to start spans use `start_span/2`.
"""
@doc """
Starts a root trace.
This should probable never be used in a HTTP request, since the trace information should be extracted from the request.
See `Shared.Tracing/join/5` for this use case.
"""
@spec start_trace(String.t(), Keyword.t()) :: Tapper.Id.t()
def start_trace(name, tags \\ []) do
Tapper.Ctx.start(name: name, annotations: to_annotations(tags))
end
@doc """
Finishes the current Trace.
"""
@spec finish_trace(Keyword.t()) :: :ok
def finish_trace(tags \\ []) do
Tapper.Ctx.finish(annotations: to_annotations(tags))
end
@doc """
Joins with a received Trace, originating e.g. from an HTTP request to this Service or from a calling Process.
"""
@spec join(String.t(), non_neg_integer, non_neg_integer, boolean, Keyword.t()) :: Tapper.Id.t()
def join(name, trace_id, parent_span_id, sample, tags \\ [], debug \\ false) do
span_id = Tapper.SpanId.generate()
{:ok, trace_id} = Tapper.TraceId.parse(trace_id)
{:ok, parent_span_id} = Tapper.SpanId.parse(parent_span_id)
Tapper.Ctx.join(trace_id, span_id, parent_span_id, sample, debug,
name: name,
annotations: to_annotations(tags)
)
end
@doc """
Adds a new Child span to the currently running Span.
"""
@spec start_span(String.t(), Keyword.t()) :: Tapper.Id.t()
def start_span(name, tags \\ [], component \\ "Tracing") do
Tapper.Ctx.start_span(name: name, local: component, annotations: to_annotations(tags))
end
@doc """
Finishes the current span.
"""
@spec finish_span(Keyword.t()) :: Tapper.Id.t()
def finish_span(tags \\ []) do
Tapper.Ctx.finish_span(annotations: to_annotations(tags))
end
defp to_annotations(tags), do: Enum.map(tags, fn {name, value} -> Tapper.tag(name, value) end)
end
|
lib/util/tracing/tracing.ex
| 0.813572
| 0.425844
|
tracing.ex
|
starcoder
|
defmodule Uploader.Ecto.UploadableFields do
@moduledoc ~S"""
Provides a macro for defining uploadable fields.
When calling `uploadable_field/2`, two fields are created:
* a virtual field is created named by the given name prefixed by "uploaded_"
and holds the `Plug.Upload` struct (representing the uploaded file).
* a field with the given name is created holding the casted `Plug.Upload` struct
(typically the struct is casted into a filename).
The code below
```elixir
uploadable_field :image
```
will generate
```elixir
field :image, :string
field :uploaded_image, :any, virtual: true
```
Using this module (`use`) provides the caller module with the function
`get_uploadable_fields\0` listing all the uploadable fields and their options.
The following options may be given to uploadable fields:
* `:cast`: a function that casts a `Plug.Upload` struct into the value to be
stored in the database.
* `directory`: the base directory containing the file uploads.
* `filename`: a function that generates the filename based on the given struct.
* `on_file_exists`: specifies the strategy to apply if the file path already
exists. Its value may be:
* `:overwrite`: overwrite the file if the file path already exists
* `:compare_hash`: do not copy the file if the file path already exists;
if the hashes of the files' content are not equal, return an error.
* `print`: a function that prints the field (typically used be the view).
* `type`: the field type.
"""
@callback get_uploadable_fields() :: [atom]
defmacro __using__(_args) do
this_module = __MODULE__
quote do
@behaviour unquote(this_module)
import unquote(this_module),
only: [
uploadable_field: 2
]
Module.register_attribute(__MODULE__, :uploadable_fields, accumulate: true)
@before_compile unquote(this_module)
end
end
defmacro __before_compile__(_env) do
quote do
def get_uploadable_fields(), do: @uploadable_fields
end
end
defmacro uploadable_field(field_name, opts \\ []) do
field_type = Keyword.get(opts, :type, :string)
quote do
fields = Module.get_attribute(__MODULE__, :struct_fields)
unless List.keyfind(fields, unquote(field_name), 0) do
field(unquote(field_name), unquote(field_type))
end
field(String.to_atom("uploaded_" <> Atom.to_string(unquote(field_name))), :any,
virtual: true
)
Module.put_attribute(
__MODULE__,
:uploadable_fields,
{unquote(field_name), unquote(opts)}
)
end
end
end
|
lib/ecto/uploadable_fields.ex
| 0.864582
| 0.881104
|
uploadable_fields.ex
|
starcoder
|
defmodule Pavlov.Callbacks do
@moduledoc """
Allows running tasks in-between test executions.
Currently only supports running tasks before tests are
executed.
## Context
If you return `{:ok, <dict>}` from `before :all`, the dictionary will be merged
into the current context and be available in all subsequent setup_all,
setup and the test itself.
Similarly, returning `{:ok, <dict>}` from `before :each`, the dict returned
will be merged into the current context and be available in all subsequent
setup and the test itself.
Returning `:ok` leaves the context unchanged in both cases.
"""
import ExUnit.Callbacks
@doc false
defmacro __using__(opts \\ []) do
quote do
Agent.start(fn -> %{} end, name: :pavlov_callback_defs)
import Pavlov.Callbacks
end
end
@doc false
defmacro before(periodicity \\ :each, context \\ quote(do: _), contents)
@doc """
Runs before each **test** in the current context is executed or before
**all** tests in the context are executed.
Example:
before :all do
IO.puts "Test batch started!"
:ok
end
before :each do
IO.puts "Here comes a new test!"
:ok
end
"""
defmacro before(:each, context, contents) do
block = contents[:do]
quote do
setup unquote(context), do: unquote(do_block(block))
Agent.update :pavlov_callback_defs, fn(map) ->
Dict.put_new map, __MODULE__, {:each, unquote(Macro.escape context), unquote(Macro.escape contents[:do])}
end
end
end
defmacro before(:all, context, contents) do
block = contents[:do]
quote do
setup_all unquote(context), do: unquote(do_block(block))
Agent.update :pavlov_callback_defs, fn(map) ->
Dict.put_new map, __MODULE__, {:all, unquote(Macro.escape context), unquote(Macro.escape contents[:do])}
end
end
end
defp do_block({:__block__, _, statements} = block) do
[last | _] = Enum.reverse(statements)
if match?(:ok, last) || match?({:ok, _}, last) do
block
else
{:__block__, [], [statements, :ok]}
end
end
defp do_block({:ok, _} = block), do: block
defp do_block(:ok), do: :ok
defp do_block(block) do
{:__block__, [], [block, :ok]}
end
end
|
lib/callbacks.ex
| 0.731059
| 0.50891
|
callbacks.ex
|
starcoder
|
defmodule Poller do
@moduledoc """
Module for continiously polling inputs
"""
use GenServer
@server_name :process_poller
def start_link(state) do
GenServer.start_link(__MODULE__, state, name: @server_name)
end
def init(state) do
init_pollers
{:ok, state}
end
@doc """
Spawns a function for each button that checks if button has been pushed. Also spawns a function that
checks if the elevator has reached a floor.
"""
def init_pollers do
button_map = Utils.get_all_buttons()
Enum.map(button_map[:cab], fn x -> spawn(Poller, :button_check, [:off, x, :cab]) end)
Enum.map(button_map[:up], fn x -> spawn(Poller, :button_check, [:off, x, :up]) end)
Enum.map(button_map[:down], fn x -> spawn(Poller, :button_check, [:off, x, :down]) end)
spawn(Poller, :floor_check, [:between_floors])
end
@doc """
Checks for button presse. Sends the order to the Distro module if a button has been pressed.
"""
def button_check(:off, floor, type) do
if 1 == ElevatorDriver.get_order_button_state(:process_driver, floor, type) do
Distro.new_order(:process_distro, %Order{direction: type, floor: floor})
button_check(:on, floor, type)
end
Process.sleep(100)
button_check(:off, floor, type)
end
def button_check(:on, floor, type) do
if 1 == ElevatorDriver.get_order_button_state(:process_driver, floor, type) do
button_check(:on, floor, type)
end
Process.sleep(100)
button_check(:off, floor, type)
end
@doc """
Function to check if the elevator has arrived at a floor
Creats a statemachine by taking in the atom :between_floors or :at_floor.
When it arrives at a floor it will send a msg to the state machine to tell it which floor it has
arrived at.
"""
def floor_check(:between_floors) do
floor = ElevatorDriver.get_floor_sensor_state(:process_driver)
if !is_atom(floor) do
# Arrived at floor. Call statemachin to tell it where it is.
IO.puts(["Arrived at floor ", to_string(floor)])
ElevatorState.arrived_at_floor(:process_elevator, floor)
floor_check(:at_floor)
end
Process.sleep(20)
floor_check(:between_floors)
end
def floor_check(:at_floor) do
if is_atom(ElevatorDriver.get_floor_sensor_state(:process_driver)) do
IO.puts(["Left floor"])
floor_check(:between_floors)
end
Process.sleep(20)
floor_check(:at_floor)
end
def print_button(floor, type) do
IO.puts([Atom.to_string(type), " button has been pushed for floor ", to_string(floor)])
end
end
|
lib/poller.ex
| 0.631481
| 0.422117
|
poller.ex
|
starcoder
|
defmodule ICalex.Props.VUTCOffset do
@moduledoc false
use ICalex.Props
alias Timex.Duration
@enforce_keys [:value]
defstruct ICalex.Props.common_fields()
def of(%Duration{} = value), do: %__MODULE__{value: value}
def from(value) when is_bitstring(value) do
value_regex = ~r/(?<signal>[+-]?)(?<offset>\d+)/
offset_regex = ~r/(?<days>\d{2})?(?<hours>\d{2})(?<minutes>\d{2})/
with %{"offset" => offset, "signal" => signal} <- Regex.named_captures(value_regex, value),
%{"days" => days, "hours" => hours, "minutes" => minutes} <-
Regex.named_captures(offset_regex, offset) do
days = parse_time_unit(days)
hours = parse_time_unit(hours)
minutes = parse_time_unit(minutes)
duration =
Timex.Duration.zero()
|> Timex.Duration.add(Timex.Duration.from_days(days))
|> Timex.Duration.add(Timex.Duration.from_hours(hours))
|> Timex.Duration.add(Timex.Duration.from_minutes(minutes))
duration = if signal == "-", do: Duration.invert(duration), else: duration
__MODULE__.of(duration)
else
_ -> raise ArgumentError, message: ~s(Expected a date, got: #{value})
end
end
defimpl ICal do
defp format_time_unit(time_unit),
do: Integer.to_string(time_unit) |> String.pad_leading(2, "0")
def to_ical(%{value: value} = _data) do
total_seconds = Duration.to_seconds(value)
sign = if total_seconds >= 0, do: "+", else: "-"
hours = abs(Duration.to_hours(value)) |> trunc
minutes = (abs(Duration.to_minutes(value)) - hours * 60) |> trunc
seconds = (abs(total_seconds) - (hours * 3600 + minutes * 60)) |> trunc
result = "#{sign}#{format_time_unit(hours)}#{format_time_unit(minutes)}"
cond do
seconds > 0 -> "#{result}#{format_time_unit(seconds)}"
true -> result
end
end
end
defp parse_time_unit(time_unit) do
with {time_unit, _} <- Integer.parse(time_unit) do
time_unit
else
_ -> 0
end
end
end
|
lib/props/v_utcoffset.ex
| 0.650356
| 0.422058
|
v_utcoffset.ex
|
starcoder
|
defmodule ElixirSense.Core.EdocReader do
@moduledoc false
alias ElixirSense.Core.Introspection
@spec get_moduledoc(module()) :: list
def get_moduledoc(m) when is_atom(m), do: lookup(m, [:moduledoc])
@spec get_docs(module(), atom(), non_neg_integer() | :any) :: list
def get_docs(m, f \\ :any, arity \\ :any)
when is_atom(m) and is_atom(f) and (is_integer(arity) or arity == :any) do
lookup({m, f, arity}, [:doc, :spec])
end
@spec get_specs(module(), atom(), non_neg_integer() | :any) :: list
def get_specs(m, f, arity \\ :any)
when is_atom(m) and is_atom(f) and (is_integer(arity) or arity == :any) do
lookup({m, f, arity}, [:spec])
end
@spec get_typedocs(module()) :: list
def get_typedocs(m) when is_atom(m) do
lookup(m, [:type])
end
@spec get_typedocs(module(), atom(), non_neg_integer() | :any) :: list
def get_typedocs(m, t, arity \\ :any)
when is_atom(m) and is_atom(t) and (is_integer(arity) or arity == :any) do
lookup({m, t, arity}, [:doc, :type])
end
defp lookup(key, kinds) do
module = key_to_module(key)
if Introspection.elixir_module?(module) do
[]
else
try do
case :docsh_lib.get_docs(module) do
{:error, _reason} ->
[]
{:ok, docs = {:docs_v1, line, :erlang, "text/erlang-edoc", moduledoc, metadata, _docs}} ->
case kinds do
[:moduledoc] ->
[{line, moduledoc, metadata}]
_ ->
case :docsh_format.lookup(docs, key, kinds) do
{:not_found, _message} ->
[]
{:ok, doc_items} ->
doc_items
end
end
end
rescue
_ -> []
end
end
end
@spec extract_docs(:none | :hidden | map) :: nil | false | String.t()
def extract_docs(%{"en" => edoc_xml}) do
:docsh_edoc.format_edoc(edoc_xml, %{})
|> :erlang.iolist_to_binary()
end
def extract_docs(:hidden), do: false
def extract_docs(_), do: nil
defp key_to_module(m) when is_atom(m), do: m
defp key_to_module({m, _, _}), do: m
end
|
lib/elixir_sense/core/edoc_reader.ex
| 0.672439
| 0.438424
|
edoc_reader.ex
|
starcoder
|
defmodule Harald.HCI.Commands.ControllerAndBaseband.SetEventMaskPage2 do
@moduledoc """
Reference: version 5.2, Vol 4, Part E, 7.3.69.
"""
alias Harald.{HCI, HCI.Commands.Command, HCI.ErrorCodes}
@type t() :: %{
event_mask_page_2: %{
physical_link_complete_event: HCI.flag(),
channel_selected_event: HCI.flag(),
disconnection_physical_link_complete_event: HCI.flag(),
physical_link_loss_early_warning_event: HCI.flag(),
physical_link_recovery_event: HCI.flag(),
logical_link_complete_event: HCI.flag(),
disconnection_logical_link_complete_event: HCI.flag(),
flow_spec_modify_complete_event: HCI.flag(),
number_of_completed_data_blocks_event: HCI.flag(),
amp_start_test_event: HCI.flag(),
amp_test_end_event: HCI.flag(),
amp_receiver_report_event: HCI.flag(),
short_range_mode_change_complete_event: HCI.flag(),
amp_status_change_event: HCI.flag(),
triggered_clock_capture_event: HCI.flag(),
synchronization_train_complete_event: HCI.flag(),
synchronization_train_received_event: HCI.flag(),
connectionless_slave_broadcast_receive_event: HCI.flag(),
connectionless_slave_broadcast_timeout_event: HCI.flag(),
truncated_page_complete_event: HCI.flag(),
slave_page_response_timeout_event: HCI.flag(),
connectionless_slave_broadcast_channel_map_change_event: HCI.flag(),
inquiry_response_notification_event: HCI.flag(),
authenticated_payload_timeout_expired_event: HCI.flag(),
sam_status_change_event: HCI.flag(),
reserved_25_to_63: HCI.reserved()
}
}
@behaviour Command
@fields [
:physical_link_complete_event,
:channel_selected_event,
:disconnection_physical_link_complete_event,
:physical_link_loss_early_warning_event,
:physical_link_recovery_event,
:logical_link_complete_event,
:disconnection_logical_link_complete_event,
:flow_spec_modify_complete_event,
:number_of_completed_data_blocks_event,
:amp_start_test_event,
:amp_test_end_event,
:amp_receiver_report_event,
:short_range_mode_change_complete_event,
:amp_status_change_event,
:triggered_clock_capture_event,
:synchronization_train_complete_event,
:synchronization_train_received_event,
:connectionless_slave_broadcast_receive_event,
:connectionless_slave_broadcast_timeout_event,
:truncated_page_complete_event,
:slave_page_response_timeout_event,
:connectionless_slave_broadcast_channel_map_change_event,
:inquiry_response_notification_event,
:authenticated_payload_timeout_expired_event,
:sam_status_change_event,
:reserved_25_to_63
]
@impl Command
def decode(<<encoded_set_event_mask_page_2::little-size(64)>>) do
<<
reserved_25_to_63::size(39),
sam_status_change_event::size(1),
authenticated_payload_timeout_expired_event::size(1),
inquiry_response_notification_event::size(1),
connectionless_slave_broadcast_channel_map_change_event::size(1),
slave_page_response_timeout_event::size(1),
truncated_page_complete_event::size(1),
connectionless_slave_broadcast_timeout_event::size(1),
connectionless_slave_broadcast_receive_event::size(1),
synchronization_train_received_event::size(1),
synchronization_train_complete_event::size(1),
triggered_clock_capture_event::size(1),
amp_status_change_event::size(1),
short_range_mode_change_complete_event::size(1),
amp_receiver_report_event::size(1),
amp_test_end_event::size(1),
amp_start_test_event::size(1),
number_of_completed_data_blocks_event::size(1),
flow_spec_modify_complete_event::size(1),
disconnection_logical_link_complete_event::size(1),
logical_link_complete_event::size(1),
physical_link_recovery_event::size(1),
physical_link_loss_early_warning_event::size(1),
disconnection_physical_link_complete_event::size(1),
channel_selected_event::size(1),
physical_link_complete_event::size(1)
>> = <<encoded_set_event_mask_page_2::size(64)>>
encoded_event_mask_page_2 = %{
physical_link_complete_event: physical_link_complete_event,
channel_selected_event: channel_selected_event,
disconnection_physical_link_complete_event: disconnection_physical_link_complete_event,
physical_link_loss_early_warning_event: physical_link_loss_early_warning_event,
physical_link_recovery_event: physical_link_recovery_event,
logical_link_complete_event: logical_link_complete_event,
disconnection_logical_link_complete_event: disconnection_logical_link_complete_event,
flow_spec_modify_complete_event: flow_spec_modify_complete_event,
number_of_completed_data_blocks_event: number_of_completed_data_blocks_event,
amp_start_test_event: amp_start_test_event,
amp_test_end_event: amp_test_end_event,
amp_receiver_report_event: amp_receiver_report_event,
short_range_mode_change_complete_event: short_range_mode_change_complete_event,
amp_status_change_event: amp_status_change_event,
triggered_clock_capture_event: triggered_clock_capture_event,
synchronization_train_complete_event: synchronization_train_complete_event,
synchronization_train_received_event: synchronization_train_received_event,
connectionless_slave_broadcast_receive_event: connectionless_slave_broadcast_receive_event,
connectionless_slave_broadcast_timeout_event: connectionless_slave_broadcast_timeout_event,
truncated_page_complete_event: truncated_page_complete_event,
slave_page_response_timeout_event: slave_page_response_timeout_event,
connectionless_slave_broadcast_channel_map_change_event:
connectionless_slave_broadcast_channel_map_change_event,
inquiry_response_notification_event: inquiry_response_notification_event,
authenticated_payload_timeout_expired_event: authenticated_payload_timeout_expired_event,
sam_status_change_event: sam_status_change_event,
reserved_25_to_63: reserved_25_to_63
}
decoded_event_mask_page_2 =
Enum.into(encoded_event_mask_page_2, %{}, fn
{:reserved_25_to_63, reserved} -> {:reserved_25_to_63, reserved}
{key, 1} -> {key, true}
{key, 0} -> {key, false}
end)
parameters = %{event_mask_page_2: decoded_event_mask_page_2}
{:ok, parameters}
end
@impl Command
def decode_return_parameters(<<encoded_status>>) do
{:ok, decoded_status} = ErrorCodes.decode(encoded_status)
{:ok, %{status: decoded_status}}
end
@impl Command
def encode(%{
event_mask_page_2:
%{
physical_link_complete_event: _,
channel_selected_event: _,
disconnection_physical_link_complete_event: _,
physical_link_loss_early_warning_event: _,
physical_link_recovery_event: _,
logical_link_complete_event: _,
disconnection_logical_link_complete_event: _,
flow_spec_modify_complete_event: _,
number_of_completed_data_blocks_event: _,
amp_start_test_event: _,
amp_test_end_event: _,
amp_receiver_report_event: _,
short_range_mode_change_complete_event: _,
amp_status_change_event: _,
triggered_clock_capture_event: _,
synchronization_train_complete_event: _,
synchronization_train_received_event: _,
connectionless_slave_broadcast_receive_event: _,
connectionless_slave_broadcast_timeout_event: _,
truncated_page_complete_event: _,
slave_page_response_timeout_event: _,
connectionless_slave_broadcast_channel_map_change_event: _,
inquiry_response_notification_event: _,
authenticated_payload_timeout_expired_event: _,
sam_status_change_event: _,
reserved_25_to_63: _
} = decoded_event_mask_page_2
}) do
encoded_event_mask_page_2 =
Enum.into(decoded_event_mask_page_2, %{}, fn
{:reserved_25_to_63, reserved} -> {:reserved_25_to_63, reserved}
{key, true} -> {key, 1}
{key, false} -> {key, 0}
end)
<<encoded_set_event_mask_page_2::little-size(64)>> = <<
encoded_event_mask_page_2.reserved_25_to_63::size(39),
encoded_event_mask_page_2.sam_status_change_event::size(1),
encoded_event_mask_page_2.authenticated_payload_timeout_expired_event::size(1),
encoded_event_mask_page_2.inquiry_response_notification_event::size(1),
encoded_event_mask_page_2.connectionless_slave_broadcast_channel_map_change_event::size(1),
encoded_event_mask_page_2.slave_page_response_timeout_event::size(1),
encoded_event_mask_page_2.truncated_page_complete_event::size(1),
encoded_event_mask_page_2.connectionless_slave_broadcast_timeout_event::size(1),
encoded_event_mask_page_2.connectionless_slave_broadcast_receive_event::size(1),
encoded_event_mask_page_2.synchronization_train_received_event::size(1),
encoded_event_mask_page_2.synchronization_train_complete_event::size(1),
encoded_event_mask_page_2.triggered_clock_capture_event::size(1),
encoded_event_mask_page_2.amp_status_change_event::size(1),
encoded_event_mask_page_2.short_range_mode_change_complete_event::size(1),
encoded_event_mask_page_2.amp_receiver_report_event::size(1),
encoded_event_mask_page_2.amp_test_end_event::size(1),
encoded_event_mask_page_2.amp_start_test_event::size(1),
encoded_event_mask_page_2.number_of_completed_data_blocks_event::size(1),
encoded_event_mask_page_2.flow_spec_modify_complete_event::size(1),
encoded_event_mask_page_2.disconnection_logical_link_complete_event::size(1),
encoded_event_mask_page_2.logical_link_complete_event::size(1),
encoded_event_mask_page_2.physical_link_recovery_event::size(1),
encoded_event_mask_page_2.physical_link_loss_early_warning_event::size(1),
encoded_event_mask_page_2.disconnection_physical_link_complete_event::size(1),
encoded_event_mask_page_2.channel_selected_event::size(1),
encoded_event_mask_page_2.physical_link_complete_event::size(1)
>>
{:ok, <<encoded_set_event_mask_page_2::size(64)>>}
end
@impl Command
def encode_return_parameters(%{status: decoded_status}) do
{:ok, encoded_status} = ErrorCodes.encode(decoded_status)
{:ok, <<encoded_status>>}
end
@doc """
Return a map ready for encoding.
Keys under `:event_mask` will be defaulted if not supplied.
## Options
`encoded` - `boolean()`. `false`. Whether the return value is encoded or not.
`:default` - `boolean()`. `false`. The default value for unspecified fields under the
`:event_mask` field.
"""
def new(%{event_mask_page_2: event_mask_page_2}, opts \\ []) do
default = Keyword.get(opts, :default, false)
with {:ok, event_mask_page_2} <- resolve_mask(event_mask_page_2, default) do
maybe_encode(%{event_mask_page_2: event_mask_page_2}, Keyword.get(opts, :encoded, false))
end
end
@impl Command
def ocf(), do: 0x63
defp maybe_encode(decoded_set_event_mask, true) do
encode(decoded_set_event_mask)
end
defp maybe_encode(decoded_set_event_mask, false), do: {:ok, decoded_set_event_mask}
defp resolve_mask(fields, default) do
truthy_reserved = 549_755_813_888
falsey_reserved = 0
reserved_default = if default, do: truthy_reserved, else: falsey_reserved
Enum.reduce_while(@fields, %{}, fn
:reserved_25_to_63, acc ->
case Map.fetch(fields, :reserved_25_to_63) do
{:ok, value} when is_integer(value) -> {:cont, Map.put(acc, :reserved_25_to_63, value)}
{:ok, _value} -> {:halt, {:error, :reserved_25_to_63}}
:error -> {:cont, Map.put(acc, :reserved_25_to_63, reserved_default)}
end
field, acc ->
{:cont, Map.put(acc, field, Map.get(fields, field, default))}
end)
|> case do
{:error, _} = e -> e
mask -> {:ok, mask}
end
end
end
|
src/lib/harald/hci/commands/controller_and_baseband/set_event_mask_page_2.ex
| 0.679391
| 0.405802
|
set_event_mask_page_2.ex
|
starcoder
|
defmodule Gamefield do
use GenServer
# Interface
@doc """
Entry point. No use for a name since there are always multiple
instances running
"""
def start_link do
GenServer.start_link(__MODULE__, produce_empty_field())
end
@doc """
Reset the game field to empty state
"""
def reset_game(server) do
GenServer.cast(server, :resetGame)
end
@doc """
Put a coin of `playerID` into `slot`
"""
def update_field(server, slot, playerID) do
GenServer.call(server, {:updateField, slot, playerID})
end
@doc """
Check wether the field contains a winner. This call will only
return `true` or `false` and not the winning player. By the rules
of this game only the last active player can be the winner.
"""
def check_win_condition(server) do
GenServer.call(server, :checkWinCondition)
end
# Produce an empty game field. A game field is a nested array of
# eight arrays. Each of the nested arrays contains eight times the
# value `nil` (empty state).
defp produce_empty_field do
for _ <- 0..7, do: for(_ <- 0..7, do: nil)
end
# Implementation
def handle_cast(:resetGame, _) do
{:noreply, produce_empty_field()}
end
def handle_call({:updateField, slot, playerID}, _, gamefield) do
column = Enum.at(gamefield, slot)
newColumn = put_coin(column, playerID, 0)
newGameField = Listhelp.put_item(gamefield, slot, newColumn)
{:reply, :ok, newGameField}
end
def handle_call(:checkWinCondition, _, gamefield) do
values = [
gamefield,
Fieldchecker.rotate(gamefield),
Fieldchecker.arrow_right(gamefield),
Fieldchecker.arrow_left(gamefield)
]
status =
Enum.reduce(values, false, fn x, acc ->
acc or check_field(x)
end)
{:reply, status, gamefield}
end
# Check the game field. If one of the columns contains
# a winning condition (four coins of the same type in
# a row) then return true.
defp check_field(field) do
values =
Enum.map(field, fn column ->
case Fieldchecker.count(column) do
{_, 4} ->
true
_ ->
false
end
end)
Enum.reduce(values, false, fn x, acc -> x or acc end)
end
# Put a coin of `player` in `column`
defp put_coin(column, _, level) when level > 7, do: column
defp put_coin(column, player, level) do
if Enum.at(column, level) == nil do
Listhelp.put_item(column, level, player)
else
put_coin(column, player, level + 1)
end
end
end
|
lib/Backends/connect_four/gamefield.ex
| 0.837952
| 0.416144
|
gamefield.ex
|
starcoder
|
defmodule ExRtmidi.Message.Spec do
@moduledoc """
Contains methods that present a human-friendly function signature for creating MIDI messages.
Note that this module doesn't create messages but is an intermediate step to help enforce message shape.
"""
alias __MODULE__
@enforce_keys [:command, :status_byte, :data]
defstruct [:command, :status_byte, :data]
# Per MIDI spec
@status_bytes %{
note_off: 0x80,
note_on: 0x90,
polytouch: 0xA0,
control_change: 0xB0,
program_change: 0xC0,
aftertouch: 0xD0,
pitchwheel: 0xE0,
sysex: 0xF0,
quarter_frame: 0xF1,
songpos: 0xF2,
song_select: 0xF3,
tune_request: 0xF6,
clock: 0xF8,
start: 0xFA,
continue: 0xFB,
stop: 0xFC,
active_sensing: 0xFE,
reset: 0xFF
}
@commands_without_control_data [
:tune_request,
:clock,
:start,
:continue,
:stop,
:active_sensing,
:reset
]
@doc """
Enforces arguments for a given command and returns a Spec struct for eventual encoding into a MIDI message.
Notes:
- No defaults are offered (eg: assuming channel 0 unless specified),
so creating a wrapper to suit your needs is recommended
- Trivial commands (such as :start) still have an arity of 2, even though they don't have any additional data.
The Message module addresses this, but this is noteworthy if you're extending this lib directly
"""
@spec construct(atom(), list()) :: %Spec{}
def construct(command = :note_off, [channel: _, note: _, velocity: _] = data) do
do_construct_spec(command, data)
end
def construct(command = :note_on, [channel: _, note: _, velocity: _] = data) do
do_construct_spec(command, data)
end
def construct(command = :polytouch, [channel: _, note: _, value: _] = data) do
do_construct_spec(command, data)
end
def construct(command = :control_change, [channel: _, control: _, value: _] = data) do
do_construct_spec(command, data)
end
def construct(command = :program_change, [channel: _, program: _] = data) do
do_construct_spec(command, data)
end
def construct(command = :aftertouch, [channel: _, value: _] = data) do
do_construct_spec(command, data)
end
def construct(command = :pitchwheel, [channel: _, pitch: _] = data) do
do_construct_spec(command, data)
end
def construct(command = :sysex, [data: _] = data) do
do_construct_spec(command, data)
end
def construct(command = :quarter_frame, [frame_type: _, frame_value: _] = data) do
do_construct_spec(command, data)
end
def construct(command = :songpos, [pos: _] = data) do
do_construct_spec(command, data)
end
def construct(command = :song_select, [song: _] = data) do
do_construct_spec(command, data)
end
def construct(command, _) when command in @commands_without_control_data do
do_construct_spec(command, [])
end
defp do_construct_spec(command, data) do
%Spec{command: command, status_byte: @status_bytes[command], data: data}
end
end
|
lib/ex_rtmidi/message/spec.ex
| 0.792384
| 0.519095
|
spec.ex
|
starcoder
|
defmodule Graphvix.Record do
@moduledoc """
Models a graph vertex that has a shape of `record`.
A record's label can be a single string, a single row or column, or a nested
alternation of rows and columns.
Once a record is created by `Graphvix.Record.new/2` it can be added to a graph using
`Graphvix.Graph.add_record/2`.
See `new/2` for more complete usage examples.
## Example
iex> import Record, only: [column: 1]
iex> graph = Graph.new()
iex> record = Record.new(["a", "B", column(["c", "D"])], color: "blue")
iex> {graph, _rid} = Graph.add_record(graph, record)
iex> Graph.to_dot(graph)
~s(digraph G {\\n\\n v0 [label="a | B | { c | D }",shape="record",color="blue"]\\n\\n})
"""
defstruct [
body: nil,
properties: []
]
alias __MODULE__
alias Graphvix.RecordSubset
@type body :: String.t | [any()] | RecordSubset.t()
@type t :: %__MODULE__{body: Record.t(), properties: keyword()}
@doc """
Returns a new `Graphvix.Record` struct that can be added to a graph as a vertex.
## Examples
A record's can be a simple text label
iex> record = Record.new("just a plain text record")
iex> Record.to_label(record)
"just a plain text record"
or it can be a single row or column of strings
iex> import Record, only: [row: 1]
iex> record = Record.new(row(["a", "b", "c"]))
iex> Record.to_label(record)
"a | b | c"
iex> import Record, only: [column: 1]
iex> record = Record.new(column(["a", "b", "c"]))
iex> Record.to_label(record)
"{ a | b | c }"
or it can be a series of nested rows and columns
iex> import Record, only: [row: 1, column: 1]
iex> record = Record.new(
...> row([
...> "a",
...> column([
...> "b", "c", "d"
...> ]),
...> column([
...> "e",
...> "f",
...> row([
...> "g", "h", "i"
...> ])
...> ])
...> ])
...> )
iex> Record.to_label(record)
"a | { b | c | d } | { e | f | { g | h | i } }"
passing a plain list defaults to a row
iex> record = Record.new(["a", "b", "c"])
iex> Record.to_label(record)
"a | b | c"
Each cell can contain a plain string, or a string with a port attached,
allowing edges to be drawn directly to and from that cell, rather than the
vertex. Ports are created by passing a tuple of the form `{port_name, label}`
iex> record = Record.new(["a", {"port_b", "b"}])
iex> Record.to_label(record)
"a | <port_b> b"
A second, optional argument can be passed specifying other formatting and
styling properties for the vertex.
iex> record = Record.new(["a", {"port_b", "b"}, "c"], color: "blue")
iex> graph = Graph.new()
iex> {graph, _record_id} = Graph.add_record(graph, record)
iex> Graph.to_dot(graph)
~s(digraph G {\\n\\n v0 [label="a | <port_b> b | c",shape="record",color="blue"]\\n\\n})
"""
def new(body, properties \\ [])
def new(string, properties) when is_bitstring(string) do
%__MODULE__{body: string, properties: properties}
end
def new(list, properties) when is_list(list) do
%__MODULE__{body: Graphvix.RecordSubset.new(list), properties: properties}
end
def new(row_or_column = %Graphvix.RecordSubset{}, properties) do
%__MODULE__{body: row_or_column, properties: properties}
end
@doc """
A helper method that takes a list of cells and returns them as a row inside a
`Graphvix.Record` struct.
The list can consist of a mix of string labels or tuples of cell labels +
port names.
This function provides little functionality on its own. See the documentation
for `Graphvix.Record.new/2` for usage examples in context.
"""
def row(cells) do
%RecordSubset{cells: cells, is_column: false}
end
@doc """
A helper method that takes a list of cells and returns them as a column inside a
`Graphvix.Record` struct.
The list can consist of a mix of string labels or tuples of cell labels +
port names.
This function provides little functionality on its own. See the documentation
for `Graphvix.Record.new/2` for usage examples in context.
"""
def column(cells) do
%RecordSubset{cells: cells, is_column: true}
end
@doc false
def to_label(record)
def to_label(%{body: string}) when is_bitstring(string) do
string
end
def to_label(%{body: subset = %RecordSubset{}}) do
RecordSubset.to_label(subset, true)
end
end
|
lib/graphvix/record.ex
| 0.913489
| 0.687886
|
record.ex
|
starcoder
|
defmodule Drab.Live.EExEngine do
@moduledoc """
This is an implementation of EEx.Engine that injects `Drab.Live` behaviour.
It parses the template during compile-time and inject Drab markers into it. Because of this, template must be
a proper HTML. Also, there are some rules to obey, see limitations below.
### Limitations
Because `Drab.Live` always tries to update the smallest portion of the html, it has some limits described below.
It is very important to understand how Drab re-evaluates the expressions with the new assign values. Consider the
comprehension with the condition as below:
<%= for u <- @users do %>
<%= if u != @user do %>
<%= u %> <br>
<% end %>
<% end %>
The template above contains two Drabbable expression: `for` comprehension and `if` condition. When the new
value of `@users` is poked, all works as expected: the list is refreshed. But when you poke the `@user` assign,
system will return an error that the `u()` function is not defined. This is because Drab tries to re-evaluate
the expression with the `@user` assign - the `if` statement, and the `u` variable is defined elsewhere.
Updating `@user` will raise `CompileError`:
iex> poke socket, user: "Changed"
** (CompileError) undefined function u/0
Using local variables defined in external blocks is prohibited in Drab.
Please check the following documentation page for more details:
https://hexdocs.pm/drab/Drab.Live.EExEngine.html#module-limitations
But what was your goal when poking the `@user` assign? You wanted to update the whole `for` expression, because
the displayed users list should be refreshed. The best way to accomplish the goal - reload the whole for
comprehension - is to move `@user` assign to the parent expression. In this case it would be a filter:
<%= for u <- @users, u != @user do %>
<%= u %> <br>
<% end %>
In this case the whole `for` expression is evaluated when the `@user` assign is changed.
There is also the other way to solve this issue, described in the next paragraph.
#### Parent/child Expression Detection
Drab is able to detect when updating both parent and child expression (child is the one inside the block).
In the case above, the parent expression is the `for` comprehension with `@users` assign, and the child
is the `if` containing only `@user`. When you update both assigns with the same `poke`, Drab would
be able to detect that the `if` is inside `for`, and should not be refreshed.
This means that you may solve the case above with:
poke socket, users: peek(socket, :users), user: "Changed"
This statement will update the whole `for` loop, without any changes to `@users`, but with changed
`@user` assign.
#### Avalibility of Assigns
To make the assign avaliable within Drab, it must show up in the template with "`@assign`" format. Passing it
to `render` in the controller is not enough.
Also, the living assign must be inside the `<%= %>` mark. If it lives in `<% %>`, it will not be updated by
`Drab.Live.poke/2`. This means that in the following template:
<% local = @assign %>
<%= local %>
poking `@assign` will not update anything.
#### Local Variables
Local variables are only visible in its `do...end` block. You can't use a local variable from outside the block.
So, the following is allowed:
<%= for user <- @users do %>
<li><%= user %></li>
<% end %>
and after poking a new value of `@users`, the list will be updated.
But the next example is not valid and will raise `undefined function` exception while trying to update an `@anything`
assign:
<% local = @assign1 %>
<%= if @anything do %>
<%= local %>
<% end %>
#### Attributes
The attribute must be well defined, and you can't use the expression as an attribute name.
The following is valid:
<button class="btn <%= @button_class %>">
<a href="<%= build_href(@site) %>">
But following constructs are prohibited:
<tag <%="attr='" <> @value <> "'"%>>
<tag <%=build_attr(@name, @value)%>>
The above will compile (with warnings), but it will not be correctly updated with `Drab.Live.poke`.
The tag name can not be build with the expression.
<<%= @tag_name %> attr=value ...>
Nested expressions are not valid in the attribute pattern. The following is not allowed:
<tag attribute="<%= if clause do %><%= expression %><% end %>">
Do a flat expression instead:
<tag attribute="<%= if clause, do: expression %>">
#### Scripts
Tag name must be defined in the template as `<script>`, and can't be defined with the expression.
Nested expressions are not valid in the script pattern. The following is not allowed:
<script>
<%= if clause do %>
<%= expression %>
<% end %>>
</script>
Do a flat expression instead:
<script>
<%= if clause, do: expression %>
</script>
#### Properties
Property must be defined inside the tag, using strict `@property.path.from.node=<%= expression %>` syntax.
One property may be bound only to the one assign.
"""
import Drab.Live.Crypto
import Drab.Live.HTML
use EEx.Engine
require IEx
require Logger
@jsvar "__drab"
@drab_id "drab-ampere"
@anno (if :erlang.system_info(:otp_release) >= '19' do
[generated: true]
else
[line: -1]
end)
@impl true
def init(opts) do
unless opts[:file]
|> Path.basename(Drab.Config.drab_extension())
|> Path.extname()
|> String.downcase() == ".html" do
raise EEx.SyntaxError,
message: """
Drab.Live may work only with html partials.
Invalid extention of file: #{opts[:file]}.
"""
end
partial = opts[:file]
partial_hash = hash(partial)
Logger.info("Compiling Drab partial: #{partial} (#{partial_hash})")
Drab.Live.Cache.start()
Drab.Live.Cache.set(partial, partial_hash)
Drab.Live.Cache.set(partial_hash, partial)
buffer = "{{{{@drab-partial:#{partial_hash}}}}}"
{:safe, buffer}
end
@impl true
def handle_body({:safe, body}) do
body = List.flatten(body)
partial_hash = partial(body)
init_js = """
if (typeof window.#{@jsvar} == 'undefined') {#{@jsvar} = {assigns: {}, properties: {}}};
if (typeof #{@jsvar}.index == 'undefined') {#{@jsvar}.index = '#{partial_hash}'};
#{@jsvar}.assigns['#{partial_hash}'] = {};
"""
found_amperes = amperes_from_buffer({:safe, body})
amperes_to_assigns =
for {ampere_id, vals} <- found_amperes do
ampere_values =
for {gender, tag, prop_or_attr, pattern} <- vals do
compiled =
gender
|> compiled_from_pattern(pattern, tag, prop_or_attr)
|> remove_drab_marks()
{assigns, parents} = assigns_and_parents_from_pattern(pattern)
{gender, tag, prop_or_attr, compiled, assigns, parents}
end
Drab.Live.Cache.set({partial_hash, ampere_id}, ampere_values)
for {_, _, _, _, assigns, _} <- ampere_values,
assign <- assigns do
{assign, ampere_id}
end
end
|> List.flatten()
|> Enum.uniq()
|> Enum.group_by(fn {k, _v} -> k end, fn {_k, v} -> v end)
# ampere-to_assign list
for {assign, amperes} <- amperes_to_assigns do
Drab.Live.Cache.set({partial_hash, assign}, amperes)
end
found_assigns = for({assign, _} <- amperes_to_assigns, do: assign) |> Enum.uniq()
assigns_js =
found_assigns
|> Enum.map(fn assign ->
assign_js(partial_hash, assign)
end)
|> script_tag()
partial_path = Drab.Live.Cache.get(partial_hash)
Drab.Live.Cache.set(partial_hash, {partial_path, found_assigns})
Drab.Live.Cache.set(partial_path, {partial_hash, found_assigns})
properies_js =
for {ampere_id, vals} <- found_amperes do
found_props =
for {:prop, tag, property, pattern} <- vals do
property_js(ampere_id, property, compiled_from_pattern(:prop, pattern, tag, property))
end
[
case found_props do
[] -> ""
_ -> "#{@jsvar}.properties['#{ampere_id}'] = {};"
end
| found_props
]
end
|> script_tag()
final =
[
script_tag(init_js),
remove_drab_marks(body),
assigns_js,
properies_js
]
|> List.flatten()
{:safe, final}
end
@expr ~r/{{{{@drab-expr-hash:(\S+)}}}}.*{{{{\/@drab-expr-hash:\S+}}}}/Us
@spec compiled_from_pattern(atom, String.t(), String.t(), String.t()) :: Macro.t() | [Macro.t()] | no_return
defp compiled_from_pattern(:prop, pattern, tag, property) do
case compiled_from_pattern(:other, pattern, tag, property) do
[expr | []] when is_tuple(expr) ->
expr
_ ->
raise EEx.SyntaxError,
message: """
syntax error in Drab property special form for tag: <#{tag}>, property: #{property}
You can only combine one Elixir expression with one node property.
Allowed:
<tag @property=<%=expression%>>
Prohibited:
<tag @property="other text <%=expression%>">
<tag @property="<%=expression1%><%=expression2%>">
"""
end
end
defp compiled_from_pattern(_, pattern, _, _) do
pattern
|> String.split(@expr, include_captures: true, trim: true)
|> Enum.map(&expr_from_cache/1)
end
@spec expr_from_cache(String.t()) :: Macro.t()
defp expr_from_cache(text) do
# TODO: not sure
case Regex.run(@expr, text) do
[_, expr_hash] ->
{:expr, expr, _, _} = Drab.Live.Cache.get(expr_hash)
quote do
unquote(expr)
end
nil ->
text
end
end
@doc false
@spec assigns_and_parents_from_pattern(String.t()) :: {[atom], [atom]}
def assigns_and_parents_from_pattern(pattern) do
# do not search under nested ampered tags
# IO.inspect pattern
pattern =
case Floki.parse(pattern) do
{_, _, _} ->
pattern
list when is_list(list) ->
list
|> Enum.reject(&ered_tag?/1)
|> Floki.raw_html()
string when is_binary(string) ->
pattern
end
expressions = for [_, expr_hash] <- Regex.scan(@expr, pattern), do: expr_hash
{assigns, parents} =
for expr_hash <- expressions do
{:expr, _, assigns, parents} = Drab.Live.Cache.get(expr_hash)
{assigns, parents}
end
|> Enum.unzip()
{assigns
|> List.flatten()
|> Enum.uniq(),
parents
|> List.flatten()
|> Enum.uniq()}
end
@spec ampered_tag?({any, [String.t()], any} | String.t()) :: boolean
defp ampered_tag?({_, attributes, _}) do
Enum.find(attributes, fn {attribute, _} -> attribute == @drab_id end)
end
defp ampered_tag?(string) when is_binary(string) do
false
end
@impl true
def handle_text({:safe, buffer}, text) do
{:safe,
quote do
[unquote(buffer), unquote(text)]
end}
end
@impl true
def handle_text("", text) do
handle_text({:safe, ""}, text)
end
@impl true
def handle_begin(_previous) do
{:safe, ""}
end
@impl true
def handle_end(quoted) do
quoted
end
@impl true
def handle_expr("", marker, expr) do
handle_expr({:safe, ""}, marker, expr)
end
@impl true
def handle_expr({:safe, buffer}, "", expr) do
expr = Macro.prewalk(expr, &handle_assign/1)
{:safe,
quote do
tmp2 = unquote(buffer)
unquote(expr)
tmp2
end}
end
@impl true
def handle_expr({:safe, buffer}, "=", expr) do
# check if the expression is in the nodrab/1
{expr, nodrab} =
case expr do
{:nodrab, _, [only_one_parameter]} -> {only_one_parameter, true}
_ -> {expr, false}
end
inject_span? = not in_opened_tag?(buffer)
line = line_from_expr(expr)
expr = Macro.prewalk(expr, &handle_assign/1)
found_assigns = find_assigns(expr)
shallow_assigns = shallow_find_assigns(expr)
found_assigns? = found_assigns != []
# set up parent assigns for all found children
for child_expr_hash <- find_expr_hashes(expr) do
{:expr, expression, assigns, parent_assigns} = Drab.Live.Cache.get(child_expr_hash)
parent_assigns = Enum.uniq(parent_assigns ++ shallow_assigns) -- assigns
Drab.Live.Cache.set(child_expr_hash, {:expr, expression, assigns, parent_assigns})
end
ampere_id = hash({buffer, expr})
attribute = "#{@drab_id}=\"#{ampere_id}\""
html = buffer |> to_flat_html()
buffer =
if !inject_span? && found_assigns? do
case inject_attribute_to_last_opened(buffer, attribute) do
# injected!
{:ok, buf, _} ->
buf
# it was already there
{:already_there, _, _} ->
buffer
{:not_found, _, _} ->
raise EEx.SyntaxError,
message: """
can't find the parent tag for an expression in line #{line}.
"""
end
else
buffer
end
hash = hash(expr)
Drab.Live.Cache.set(
hash,
{:expr, remove_drab_marks(expr), found_assigns, []}
)
# TODO: REFACTOR
attr = html |> find_attr_in_html()
is_property = Regex.match?(~r/<\S+/s, no_tags(html)) && attr && String.starts_with?(attr, "@")
expr = if is_property, do: encoded_expr(expr), else: to_safe(expr, line)
span_begin = "<span #{attribute}>"
span_end = "</span>"
expr_begin = "{{{{@drab-expr-hash:#{hash}}}}}"
expr_end = "{{{{/@drab-expr-hash:#{hash}}}}}"
nodrab = if shallow_find_assigns(expr) == [:conn], do: true, else: nodrab
nodrab = if found_assigns?, do: nodrab, else: true
buf =
case {inject_span?, nodrab} do
{_, true} ->
# do not drab expressions with @conn only, as it is readonly
# and when marked with nodrab()
quote do
tmp1 = unquote(buffer)
[tmp1, unquote(expr)]
end
{true, _} ->
quote do
tmp1 = unquote(buffer)
[
tmp1,
unquote(span_begin),
unquote(expr_begin),
unquote(expr),
unquote(expr_end),
unquote(span_end)
]
end
{false, _} ->
quote do
tmp1 = unquote(buffer)
[tmp1, unquote(expr_begin), unquote(expr), unquote(expr_end)]
end
end
{:safe, buf}
end
@impl true
def handle_expr({:safe, buffer}, "/", expr) do
line = line_from_expr(expr)
expr = Macro.prewalk(expr, &handle_assign/1)
{:safe,
quote do
tmp1 = unquote(buffer)
[tmp1, unquote(to_safe(expr, line))]
end}
end
@spec partial(list) :: String.t() | nil
defp partial(body) do
html = to_flat_html(body)
p = Regex.run(~r/{{{{@drab-partial:([^']+)}}}}/Uis, html)
# TODO: possibly dangerous - returning nil when partial not found
if p, do: List.last(p), else: nil
end
@spec find_attr_in_html(String.t()) :: String.t() | nil
defp find_attr_in_html(html) do
args_removed = args_removed(html)
if String.contains?(args_removed, "=") do
args_removed
|> String.split("=")
|> take_at(-2)
|> String.split(~r/\s+/)
|> Enum.filter(fn x -> x != "" end)
|> List.last()
else
nil
end
end
@spec args_removed(String.t()) :: String.t()
defp args_removed(html) do
html
|> String.split(~r/<\S+/s)
|> List.last()
|> remove_full_args()
end
@spec remove_full_args(String.t()) :: String.t()
defp remove_full_args(string) do
string
|> String.replace(~r/\S+\s*=\s*'[^']*'/s, "")
|> String.replace(~r/\S+\s*=\s*"[^"]*"/s, "")
|> String.replace(~r/\S+\s*=\s*[^'"\s]+\s+/s, "")
end
@spec take_at(list, integer) :: term
defp take_at(list, index) do
{item, _} = List.pop_at(list, index)
item
end
@spec no_tags(String.t()) :: String.t()
defp no_tags(html), do: String.replace(html, ~r/<\S+.*>/s, "")
@spec script_tag(String.t() | []) :: [String.t()] | []
defp script_tag([]), do: []
defp script_tag(js) do
["<script drab-script>", js, "</script>\n"]
end
@spec assign_js(String.t(), atom) :: [String.t()]
defp assign_js(partial, assign) do
["#{@jsvar}.assigns['#{partial}']['#{assign}'] = '", encoded_assign(assign), "';"]
end
@spec property_js(String.t(), atom, Macro.t()) :: [String.t()]
defp property_js(ampere, property, expr) do
["#{@jsvar}.properties['#{ampere}']['#{property}'] = ", encoded_expr(expr), ";"]
end
@spec encoded_assign(atom) :: Macro.t()
defp encoded_assign(assign) do
assign_expr =
quote @anno do
Phoenix.HTML.Engine.fetch_assign(var!(assigns), unquote(assign))
end
base64_encoded_expr(assign_expr)
end
@spec base64_encoded_expr(Macro.t()) :: Macro.t()
defp base64_encoded_expr(expr) do
quote @anno do
Drab.Live.Crypto.encode64(unquote(expr))
end
end
@doc false
@spec encoded_expr(Macro.t()) :: Macro.t()
def encoded_expr(expr) do
quote @anno do
Drab.Core.encode_js(unquote(expr))
end
end
@spec line_from_expr(Macro.t()) :: integer | nil
defp line_from_expr({_, meta, _}) when is_list(meta), do: Keyword.get(meta, :line)
defp line_from_expr(_), do: nil
# @doc false
# defp to_safe(literal), do: to_safe(literal, @anno)
@spec to_safe(Macro.t(), integer | nil) :: iodata
defp to_safe(literal, _line) when is_binary(literal) or is_atom(literal) or is_number(literal) do
Phoenix.HTML.Safe.to_iodata(literal)
end
defp to_safe(literal, line) when is_list(literal) do
quote line: line, do: Phoenix.HTML.Safe.to_iodata(unquote(literal))
end
defp to_safe(expr, line) do
# Keep stacktraces for protocol dispatch...
fallback = quote line: line, do: Phoenix.HTML.Safe.to_iodata(other)
# However ignore them for the generated clauses to avoid warnings
quote @anno do
case unquote(expr) do
{:safe, data} -> data
bin when is_binary(bin) -> Plug.HTML.html_escape(bin)
other -> unquote(fallback)
end
end
end
@spec handle_assign(Macro.t()) :: Macro.t()
defp handle_assign({:@, meta, [{name, _, atom}]}) when is_atom(name) and is_atom(atom) do
quote line: meta[:line] || 0 do
Phoenix.HTML.Engine.fetch_assign(var!(assigns), unquote(name))
end
end
defp handle_assign(arg), do: arg
@spec find_assigns(Macro.t()) :: [atom]
defp find_assigns(ast) do
{_, result} =
Macro.prewalk(ast, [], fn node, acc ->
case node do
{{:., _, [{:__aliases__, _, [:Phoenix, :HTML, :Engine]}, :fetch_assign]}, _, [_, name]}
when is_atom(name) ->
{node, [name | acc]}
_ ->
{node, acc}
end
end)
result |> Enum.uniq() |> Enum.sort()
end
@doc false
@spec shallow_find_assigns(Macro.t()) :: [atom]
def shallow_find_assigns(ast) do
{_, assigns} = do_find(ast, [])
Enum.uniq(assigns)
end
defp do_find({:safe, _}, acc) do
{nil, acc}
end
defp do_find({form, meta, args}, acc) when is_atom(form) do
{args, acc} = do_find_args(args, acc)
{{form, meta, args}, acc}
end
defp do_find({form, meta, args} = ast, acc) do
found_assign = find_assign(ast)
{form, acc} = do_find(form, acc)
{args, acc} = do_find_args(args, acc)
acc = if found_assign, do: [found_assign | acc], else: acc
{{form, meta, args}, acc}
end
defp do_find({left, right}, acc) do
{left, acc} = do_find(left, acc)
{right, acc} = do_find(right, acc)
{{left, right}, acc}
end
defp do_find(list, acc) when is_list(list) do
do_find_args(list, acc)
end
defp do_find(x, acc) do
{x, acc}
end
defp do_find_args(args, acc) when is_atom(args) do
{args, acc}
end
defp do_find_args(args, acc) when is_list(args) do
Enum.map_reduce(args, acc, fn x, acc ->
do_find(x, acc)
end)
end
@spec find_assign(Macro.t()) :: atom | false
defp find_assign({{:., _, [{:__aliases__, _, [:Phoenix, :HTML, :Engine]}, :fetch_assign]}, _, [_, name]})
when is_atom(name),
do: name
defp find_assign(_), do: false
end
|
lib/drab/live/eex_engine.ex
| 0.761361
| 0.698021
|
eex_engine.ex
|
starcoder
|
defmodule ExSlackLogger do
@moduledoc """
ExSlackLogger is a logger backend module for Slack.
## Example
At frist, add ExSlackLogger as logger backend in your config.exs.
```elixir
config :logger, backends: [:console, ExSlackLogger]
```
Next, configure logger backend.
```elixir
config :logger, ExSlackLogger,
level: :error,
hook_url: {:system, "SLACK_WEBHOOK_URL"},
channel: "#your_slack_channel_name",
username: "slack_user_name"
```
After that, you can receive application log in your slack channel.
"""
@behaviour :gen_event
def init(__MODULE__) do
{:ok, configure([])}
end
def handle_event({_level, gl, {Logger, _, _, _}}, state) when node(gl) != node() do
{:ok, state}
end
def handle_event({level, _gl, {Logger, msg, timestamps, _details}}, %{level: log_level} = state) do
if meet_level?(level, log_level) do
post_to_slack(level, msg, timestamps, state)
end
{:ok, state}
end
def handle_event(:flush, state) do
{:ok, state}
end
def handle_info({:io_reply, _, :ok}, state) do
{:ok, state}
end
defp meet_level?(_lvl, nil), do: true
defp meet_level?(lvl, min) do
Logger.compare_levels(lvl, min) != :lt
end
def handle_call({:configure, opts}, state) do
{:ok, {:ok, configure(opts, state), configure(opts, state)}}
end
defp configure(opts) do
state = %{level: nil, hook_url: nil, channel: nil, username: nil}
configure(opts, state)
end
defp configure(opts, state) do
env = Application.get_env(:logger, __MODULE__, [])
opts = Keyword.merge(env, opts)
Application.put_env(:logger, __MODULE__, opts)
level = Keyword.get(opts, :level)
hook_url = retrieve_runtime_value(Keyword.get(opts, :hook_url))
channel = Keyword.get(opts, :channel)
username = Keyword.get(opts, :username)
%{state | level: level, hook_url: hook_url, channel: channel, username: username}
end
defp retrieve_runtime_value({:system, env_key}) do
System.get_env(env_key)
end
defp post_to_slack(level, message, timestamps, %{hook_url: hook_url} = state) do
message = flatten_message(message) |> Enum.join("\n")
{:ok, time} = parse_timex(timestamps) |> Timex.to_datetime() |> Timex.format("{ISO:Extended}")
payload = slack_payload(level, message, time, state)
HTTPoison.post(hook_url, payload)
end
defp flatten_message(msg) do
case msg do
[n | body] -> ["#{n}: #{body}"]
_ -> [msg]
end
end
defp parse_timex(timestamps) do
{date, {h, m, s, _min}} = timestamps
{date, {h, m, s}}
end
defp slack_payload(level, message, time, %{channel: channel, username: username}) do
icon = slack_icon(level)
color = slack_color(level)
{:ok, event} =
%{
channel: channel,
username: username,
text: "*[#{time}] #{level}*",
icon_emoji: icon,
attachments: attachments_payload(message, color)
}
|> Poison.encode()
event
end
defp slack_icon(:debug), do: ":thought_balloon:"
defp slack_icon(:info), do: ":speaker:"
defp slack_icon(:warn), do: ":warning:"
defp slack_icon(:error), do: ":skull_and_crossbones:"
defp slack_color(:debug), do: "#a0a0a0"
defp slack_color(:info), do: "good"
defp slack_color(:warn), do: "warning"
defp slack_color(:error), do: "danger"
defp attachments_payload(message, color) do
[
%{
color: color,
text: "```#{message}```",
mrkdwn_in: [
"text"
]
}
]
end
end
|
lib/ex_slack_logger.ex
| 0.759404
| 0.687669
|
ex_slack_logger.ex
|
starcoder
|
defmodule Ergo.Terminals do
alias Ergo.{Context, Parser}
import Ergo.Utils
require Logger
@moduledoc ~S"""
`Ergo.Terminals` contains the terminal parsers, which are those parsers not
parameterized with other parsers and therefore work more at the level of text
than structure.
"""
@doc """
The eoi parser is a terminal parser that checks whether the input
has been fully consumed. If there is input remaining to be parsed
the return context status is set to :error.
## Examples
iex> alias Ergo.{Context, Parser}
iex> import Ergo.Terminals
iex> ctx = Context.new("")
iex> assert %Context{status: :ok, ast: nil} = Parser.invoke(ctx, eoi())
iex> alias Ergo.{Context, Parser}
iex> import Ergo.Terminals
iex> ctx = Context.new("Hello World")
iex> assert %Context{status: {:error, [{:not_eoi, {1, 1}, "Input not empty: Hello World"}]}, input: "Hello World"} = Parser.invoke(ctx, eoi())
"""
def eoi() do
Parser.terminal(
:eoi,
"eoi",
fn
%Context{input: ""} = ctx ->
%{ctx | status: :ok, ast: nil}
%Context{input: input} = ctx ->
Context.add_error(ctx, :not_eoi, "Input not empty: #{ellipsize(input)}")
end
)
end
@doc """
The `any/0` parser matches any character.
# Examples
iex> alias Ergo.Context
iex> import Ergo.Terminals
iex> parser = any()
iex> assert %Context{status: :ok, ast: ?H} = Ergo.parse(parser, "H")
iex> assert %Context{status: :ok, ast: ?e} = Ergo.parse(parser, "e")
iex> assert %Context{status: :ok, ast: ?!} = Ergo.parse(parser, "!")
iex> assert %Context{status: :ok, ast: ?0} = Ergo.parse(parser, "0")
"""
def any() do
Parser.terminal(
:any,
"any",
fn ctx ->
Context.next_char(ctx)
end
)
end
@doc """
The `char/1` parser is a terminal parser that matches a specific character.
## Examples
iex> alias Ergo.Context
iex> import Ergo.Terminals
iex> parser = char(?H)
iex> assert %Context{status: :ok, ast: ?H, input: "ello World", index: 1, line: 1, col: 2} = Ergo.parse(parser, "Hello World")
iex> alias Ergo.Context
iex> import Ergo.Terminals
iex> parser = char(?h)
iex> assert %Context{status: {:error, [{:unexpected_char, {1, 1}, "Expected: h Actual: H"}]}, input: "Hello World"} = Ergo.parse(parser, "Hello World")
iex> alias Ergo.Context
iex> import Ergo.Terminals
iex> parser = char(?H)
iex> assert %Context{status: {:error, [{:unexpected_eoi, {1, 1}, "Unexpected end of input"}]}} = Ergo.parse(parser, "")
iex> alias Ergo.Context
iex> import Ergo.Terminals
iex> parser = char(?A..?Z)
iex> assert %Context{status: :ok, ast: ?H, input: "ello World", index: 1, line: 1, col: 2} = Ergo.parse(parser, "Hello World")
iex> alias Ergo.Context
iex> import Ergo.Terminals
iex> parser = char(?a..?z)
iex> assert %Context{status: {:error, [{:unexpected_char, {1, 1}, "Expected: a..z Actual: H"}]},input: "Hello World"} = Ergo.parse(parser, "Hello World")
iex> alias Ergo.Context
iex> import Ergo.Terminals
iex> parser = char(?A..?Z)
iex> assert %Context{status: {:error, [{:unexpected_eoi, {1, 1}, "Unexpected end of input"}]}} = Ergo.parse(parser, "")
iex> alias Ergo.Context
iex> import Ergo.Terminals
iex> parser = char([?a..?z, ?A..?Z])
iex> assert %Context{status: :ok, ast: ?H, input: "ello World", index: 1, line: 1, col: 2} = Ergo.parse(parser, "Hello World")
iex> alias Ergo.Context
iex> import Ergo.Terminals
iex> parser = char([?a..?z, ?A..?Z])
iex> assert %Context{status: {:error, [{:unexpected_char, {1, 1}, "Expected: [a..z, A..Z] Actual: 0"}]}, input: "0000"} = Ergo.parse(parser, "0000")
iex> alias Ergo.Context
iex> import Ergo.Terminals
iex> parser = char(-?0)
iex> assert %Context{status: {:error, [{:unexpected_char, {1, 1}, "Should not have matched 0"}]}, input: "0000"} = Ergo.parse(parser, "0000")
iex> alias Ergo.Context
iex> import Ergo.Terminals
iex> parser = char(-?a)
iex> assert %Context{status: :ok, input: "000", ast: ?0, index: 1, col: 2} = Ergo.parse(parser, "0000")
"""
def char(c, opts \\ [])
def char(c, opts) when is_integer(c) and c >= 0 do
label = Keyword.get(opts, :label, "?#{char_to_string(c)}")
Parser.terminal(
:char,
label,
fn ctx ->
case Context.next_char(ctx) do
%Context{status: :ok, ast: ^c} = new_ctx ->
new_ctx
%Context{status: :ok, ast: u} ->
Context.add_error(ctx, :unexpected_char, "Expected: #{describe_char_match(c)} Actual: #{char_to_string(u)}")
%Context{status: {:error, _}} = new_ctx ->
new_ctx
end
end
)
end
def char(c, opts) when is_integer(c) and c < 0 do
c = -c
label = Keyword.get(opts, :label, "?-#{char_to_string(c)}")
Parser.terminal(
:not_char,
label,
fn ctx ->
case Context.next_char(ctx) do
%Context{status: :ok, ast: ^c} ->
Context.add_error(ctx, :unexpected_char, "Should not have matched #{describe_char_match(c)}")
%Context{status: :ok, ast: _} = new_ctx ->
new_ctx
%Context{status: {:error, _}} = err_ctx ->
err_ctx
end
end
)
end
def char(min..max, opts) when is_integer(min) and is_integer(max) do
label = Keyword.get(opts, :label, "?(#{char_to_string(min)}..#{char_to_string(max)})")
Parser.terminal(
:char_range,
label,
fn ctx ->
case Context.next_char(ctx) do
%Context{status: :ok, ast: c} = new_ctx when c in min..max ->
new_ctx
%Context{status: :ok, ast: c} ->
Context.add_error(ctx, :unexpected_char, "Expected: #{describe_char_match(min..max)} Actual: #{char_to_string(c)}")
%Context{status: {:error, _}} = new_ctx ->
new_ctx
end
end
)
end
def char(l, opts) when is_list(l) do
label = Keyword.get(opts, :label, "?[#{inspect(l)}]")
Parser.terminal(
:char_list,
label,
fn ctx ->
with %Context{status: :ok} = peek_ctx <- Context.peek(ctx) do
err_ctx = Context.add_error(ctx, :unexpected_char, "Expected: #{describe_char_match(l)} Actual: #{describe_char_match(peek_ctx.ast)}")
Enum.reduce_while(l, err_ctx, fn matcher, err_ctx ->
case Parser.invoke(ctx, char(matcher)) do
%Context{status: :ok} = new_ctx -> {:halt, new_ctx}
_no_match -> {:cont, err_ctx}
end
end)
end
end
)
end
@doc """
The not_char matcher accepts a char or a list of chars and will match any
char that is not in the list.
# Examples
iex> alias Ergo.Context
iex> import Ergo.Terminals
iex> parser = not_char(?0)
iex> assert %Context{status: {:error, [{:unexpected_char, {1, 1}, "Should not have matched 0"}]}, input: "0000"} = Ergo.parse(parser, "0000")
iex> assert %Context{status: :ok, ast: ?1} = Ergo.parse(parser, "1111")
iex> parser = not_char([?{, ?}])
iex> assert %Context{status: {:error, [{:unexpected_char, {1, 1}, "Should not have matched {"}]}, input: "{}"} = Ergo.parse(parser, "{}")
iex> assert %Context{status: {:error, [{:unexpected_char, {1, 1}, "Should not have matched }"}]}, input: "}"} = Ergo.parse(parser, "}")
"""
def not_char(c_or_l, opts \\ [])
def not_char(char, opts) when is_integer(char) do
not_char([char], opts)
end
def not_char(l, opts) when is_list(l) do
label = Keyword.get(opts, :label, "?-[#{inspect(l)}]")
Parser.terminal(
:not_char_list,
label,
fn ctx ->
with %Context{status: :ok, ast: ast} <- Context.peek(ctx) do
case Enum.member?(l, ast) do
true ->
Context.add_error(ctx, :unexpected_char, "Should not have matched #{describe_char_match(ast)}")
false ->
Context.next_char(ctx)
end
end
end
)
end
defp describe_char_match(c) when is_integer(c) and c < 0 do
"!#{char_to_string(-c)}"
end
defp describe_char_match(9) do
"~TB~"
end
defp describe_char_match(10) do
"~LF~"
end
defp describe_char_match(11) do
"~VT~"
end
defp describe_char_match(13) do
"~CR~"
end
defp describe_char_match(32) do
"~SP~"
end
defp describe_char_match(c) when is_integer(c) and c > 32 do
char_to_string(c)
end
defp describe_char_match(c) when is_integer(c) do
"_#{String.pad_leading(Integer.to_string(c, 16), 2, "0")}_"
end
defp describe_char_match(min..max) when is_integer(min) and is_integer(max) do
"#{char_to_string(min)}..#{char_to_string(max)}"
end
defp describe_char_match(l) when is_list(l) do
s = l |> Enum.map(fn e -> describe_char_match(e) end) |> Enum.join(", ")
"[#{s}]"
end
@doc ~S"""
The `digit/0` parser accepts a character in the range of 0..9
## Examples
iex> alias Ergo.Context
iex> import Ergo.Terminals
iex> parser = digit()
iex> assert %Context{status: :ok, ast: ?0, input: "000", index: 1, line: 1, col: 2} = Ergo.parse(parser, "0000")
iex> alias Ergo.Context
iex> import Ergo.Terminals
iex> import Ergo.Terminals
iex> parser = digit()
iex> assert %Context{status: {:error, [{:unexpected_char, {1, 1}, "Expected: 0..9 Actual: A"}]}, input: "AAAA", index: 0, line: 1, col: 1} = Ergo.parse(parser, "AAAA")
iex> alias Ergo.{Context, Parser}
iex> import Ergo.Terminals
iex> ctx = Context.new("")
iex> parser = digit()
iex> assert %Context{status: {:error, [{:unexpected_eoi, {1, 1}, "Unexpected end of input"}]}, input: "", index: 0, line: 1, col: 1} = Parser.invoke(ctx, parser)
"""
def digit(options \\ []) do
label = Keyword.get(options, :label, "digit")
char(?0..?9, label: label)
end
@doc """
The `alpha/0` parser accepts a single character in the range a..z or A..Z.
## Examples
iex> alias Ergo.Context
iex> import Ergo.Terminals
iex> parser = alpha()
iex> assert %Context{status: :ok, input: "ello World", ast: ?H, index: 1, line: 1, col: 2} = Ergo.parse(parser, "Hello World")
iex> alias Ergo.Context
iex> import Ergo.Terminals
iex> parser = alpha()
iex> assert %Context{status: :ok, input: "llo World", ast: ?e, index: 1, line: 1, col: 2} = Ergo.parse(parser, "ello World")
iex> alias Ergo.Context
iex> import Ergo.Terminals
iex> parser = alpha()
iex> assert %Context{status: {:error, [{:unexpected_char, {1, 1}, "Expected: [a..z, A..Z] Actual: ~SP~"}]}, input: " World"} = Ergo.parse(parser, " World")
"""
def alpha(options \\ []) do
label = Keyword.get(options, :label, "alpha")
char([?a..?z, ?A..?Z], label: label)
end
@doc ~S"""
The `ws/0` parser accepts a white space character and is equivalent to the \s regular expression.
## Examples
iex> alias Ergo.Context
iex> import Ergo.Terminals
iex> parser = ws()
iex> assert %Context{status: :ok, ast: ?\s, input: "World", index: 1, line: 1, col: 2}= Ergo.parse(parser, " World")
iex> alias Ergo.Context
iex> import Ergo.Terminals
iex> parser = ws()
iex> assert %Context{status: :ok, ast: ?\t, input: "World", index: 1, line: 1, col: 2} = Ergo.parse(parser, "\tWorld")
iex> alias Ergo.Context
iex> import Ergo.Terminals
iex> parser = ws()
iex> assert %Context{status: :ok, ast: ?\n, input: "World", index: 1, line: 2, col: 1} = Ergo.parse(parser, "\nWorld")
iex> alias Ergo.Context
iex> import Ergo.Terminals
iex> parser = ws()
iex> assert %Context{status: {:error, [{:unexpected_char, {1, 1}, "Expected: [~SP~, ~TB~, ~CR~, ~LF~, ~VT~] Actual: H"}]}, input: "Hello World"} = Ergo.parse(parser, "Hello World")
"""
def ws() do
char([?\s, ?\t, ?\r, ?\n, ?\v], label: "ws")
end
@doc ~S"""
The `wc/0` parser parses a word character and is analagous to the \w regular expression.
## Examples
iex> alias Ergo.Context
iex> import Ergo.Terminals
iex> parser = wc()
iex> assert %Context{status: :ok, ast: ?H, input: "ello World", index: 1, col: 2} = Ergo.parse(parser, "Hello World")
iex> alias Ergo.Context
iex> import Ergo.Terminals
iex> parser = wc()
iex> assert %Context{status: :ok, ast: ?0, input: " World", index: 1, col: 2} = Ergo.parse(parser, "0 World")
iex> alias Ergo.Context
iex> import Ergo.Terminals
iex> parser = wc()
iex> assert %Context{status: :ok, ast: ?_, input: "Hello", index: 1, col: 2} = Ergo.parse(parser, "_Hello")
iex> alias Ergo.Context
iex> import Ergo.Terminals
iex> parser = wc()
iex> assert %Context{status: {:error, [{:unexpected_char, {1, 1}, "Expected: [0..9, a..z, A..Z, _] Actual: ~SP~"}]}, input: " Hello"} = Ergo.parse(parser, " Hello")
"""
def wc() do
char([?0..?9, ?a..?z, ?A..?Z, ?_], label: "wc")
end
@doc ~S"""
The `literal/1` parser matches the specified string character by character.
## Examples
iex> alias Ergo.Context
iex> import Ergo.Terminals
iex> parser = literal("Hello")
iex> assert %Context{status: :ok, input: " World", ast: "Hello", index: 5, line: 1, col: 6} = Ergo.parse(parser, "Hello World")
iex> alias Ergo.Context
iex> import Ergo.Terminals
iex> parser = literal("Hello")
iex> assert %Context{status: {:error, [{:bad_literal, {1, 5}, "literal<Hello>"}, {:unexpected_char, {1, 5}, "Expected: o Actual: x"}]}, input: "x World", index: 4, line: 1, col: 5} = Ergo.parse(parser, "Hellx World")
"""
def literal(s, opts \\ []) when is_binary(s) do
map_fn = Keyword.get(opts, :map, nil)
label = Keyword.get(opts, :label, "literal<#{s}>")
Parser.terminal(
:literal,
label,
fn %Context{} = ctx ->
with %Context{status: :ok} = new_ctx <-
literal_reduce(String.to_charlist(s), %{ctx | ast: []}) do
new_ctx
|> Context.ast_in_parsed_order()
|> Context.ast_to_string()
|> Context.ast_transform(map_fn)
else
%Context{} = err_ctx ->
Context.add_error(err_ctx, :bad_literal, label)
end
end
)
end
defp literal_reduce(chars, ctx) do
Enum.reduce_while(chars, ctx, fn c, ctx ->
case Parser.invoke(ctx, char(c)) do
%Context{status: :ok} = new_ctx ->
{:cont, %{new_ctx | ast: [new_ctx.ast | ctx.ast]}}
%Context{status: {:error, errors}} ->
{:halt, Context.add_errors(ctx, errors)}
end
end)
end
@doc """
The delimited_text/2 parser matches a sequence of text delimited `open_char` and
`close_char`. Because it is expected that `open_char` may appear multiple times
within the sequence it balances the tokens to ensure the right number of closing
tokens is matched.
# Examples
iex> alias Ergo
iex> alias Ergo.Context
iex> import Ergo.Terminals
iex> parser = delimited_text(?{, ?})
iex> assert %Context{status: :ok, ast: "{return {foo: \\"bar\\", bar: {baz: \\"quux\\"}};}", input: ""} = Ergo.parse(parser, "{return {foo: \\"bar\\", bar: {baz: \\"quux\\"}};}")
iex> assert %Context{status: :ok, ast: "{function b(y) {return x + y;}; return b;}", input: "foo"} = Ergo.parse(parser, "{function b(y) {return x + y;}; return b;}foo")
"""
def delimited_text(open_char, close_char, opts \\ []) do
label = Keyword.get(opts, :label, "delimited_text<#{char_to_string(open_char)}, #{char_to_string(close_char)}>")
Parser.terminal(
:delimited_text,
label,
fn ctx -> nested_next_char(ctx, {0, []}, open_char, close_char) end
)
end
defp nested_next_char(ctx, {count, chars}, open_char, close_char) when open_char != close_char do
with %{status: :ok, ast: ast} = new_ctx <- Context.next_char(ctx) do
case ast do
^open_char ->
nested_next_char(new_ctx, {count + 1, [ast | chars]}, open_char, close_char)
^close_char ->
case count do
0 -> Context.add_error(new_ctx, :unexpected_char, "Expected #{describe_char_match(open_char)} Actual: #{describe_char_match(close_char)}")
_ ->
count = count - 1
case count do
0 -> %{new_ctx | ast: [ast | chars] |> Enum.reverse() |> List.to_string()}
_ -> nested_next_char(new_ctx, {count, [ast | chars]}, open_char, close_char)
end
end
_char ->
case count do
0 -> Context.add_error(new_ctx, :unexpected_char, "Expected #{describe_char_match(open_char)} Actual: #{describe_char_match(ast)}")
_ ->
nested_next_char(new_ctx, {count, [ast | chars]}, open_char, close_char)
end
end
end
end
end
|
lib/ergo/terminals.ex
| 0.775095
| 0.400046
|
terminals.ex
|
starcoder
|
defmodule Ewebmachine.Builder.Handlers do
@moduledoc """
`use` this module will `use Plug.Builder` (so a plug pipeline
described with the `plug module_or_function_plug` macro), but gives
you an `:add_handler` local function plug which adds to the conn
the locally defined ewebmachine handlers (see `Ewebmachine.Handlers`).
So :
- Construct your automate decision handler through multiple `:add_handler` plugs
- Pipe the plug `Ewebmachine.Plug.Run` to run the HTTP automate which
will call these handlers to take decisions.
- Pipe the plug `Ewebmachine.Plug.Send` to send and halt any conn previsously passed
through an automate run.
To define handlers, use the following helpers :
- the handler specific macros (like `Ewebmachine.Builder.Handlers.resource_exists/1`)
- the macro `defh/2` to define any helpers, usefull for body
producing handlers or to have multiple function clauses
- in handler implementation `conn` and `state` binding are available
- the response of the handler implementation is wrapped, so that
returning `:my_response` is the same as returning `{:my_response,conn,state}`
Below a full example :
```
defmodule MyJSONApi do
use Ewebmachine.Builder.Handlers
plug :cors
plug :add_handlers, init: %{}
content_types_provided do: ["application/json": :to_json]
defh to_json, do: Poison.encode!(state[:json_obj])
defp cors(conn,_), do:
put_resp_header(conn,"Access-Control-Allow-Origin","*")
end
defmodule GetUser do
use Ewebmachine.Builder.Handlers
plug MyJSONApi
plug :add_handlers
plug Ewebmachine.Plug.Run
plug Ewebmachine.Plug.Send
resource_exists do:
pass( !is_nil(user=DB.User.get(conn.params["q"])), json_obj: user)
end
defmodule GetOrder do
use Ewebmachine.Builder.Handlers
plug MyJSONApi
plug :add_handlers
plug Ewebmachine.Plug.Run
plug Ewebmachine.Plug.Send
resource_exists do:
pass(!is_nil(order=DB.Order.get(conn.params["q"])), json_obj: order)
end
defmodule API do
use Plug.Router
plug :match
plug :dispatch
get "/get/user", do: GetUser.call(conn,[])
get "/get/order", do: GetOrder.call(conn,[])
end
```
"""
defmacro __before_compile__(_env) do
quote do
defp add_handlers(conn, opts) do
conn = case Access.fetch(opts, :init) do
{:ok, init} when not (init in [false, nil]) -> put_private(conn, :machine_init, init)
_ -> conn
end
Plug.Conn.put_private(conn, :resource_handlers,
Enum.into(@resource_handlers, conn.private[:resource_handlers] || %{}))
end
end
end
defmacro __using__(_opts) do
quote location: :keep do
use Plug.Builder
import Ewebmachine.Builder.Handlers
@before_compile Ewebmachine.Builder.Handlers
@resource_handlers %{}
ping do: :pong
end
end
@resource_fun_names [
:resource_exists,:service_available,:is_authorized,:forbidden,:allow_missing_post,:malformed_request,:known_methods,
:base_uri,:uri_too_long,:known_content_type,:valid_content_headers,:valid_entity_length,:options,:allowed_methods,
:delete_resource,:delete_completed,:post_is_create,:create_path,:process_post,:content_types_provided,
:content_types_accepted,:charsets_provided,:encodings_provided,:variances,:is_conflict,:multiple_choices,
:previously_existed,:moved_permanently,:moved_temporarily,:last_modified,:expires,:generate_etag, :ping, :finish_request
]
defp sig_to_sigwhen({:when,_,[{name,_,params},guard]}), do: {name,params,guard}
defp sig_to_sigwhen({name,_,params}) when is_list(params), do: {name,params,true}
defp sig_to_sigwhen({name,_,_}), do: {name,[quote(do: _),quote(do: _)],true}
defp handler_quote(name,body,guard,conn_match,state_match) do
quote do
@resource_handlers Map.put(@resource_handlers,unquote(name),__MODULE__)
def unquote(name)(unquote(conn_match)=var!(conn),unquote(state_match)=var!(state)) when unquote(guard) do
res = unquote(body)
wrap_response(res,var!(conn),var!(state))
end
end
end
defp handler_quote(name,body) do
handler_quote(name,body,true,quote(do: _),quote(do: _))
end
@doc """
define a resource handler function as described at
`Ewebmachine.Handlers`.
Since there is a specific macro in this module for each handler,
this macro is useful :
- to define body producing and body processing handlers (the one
referenced in the response of `Ewebmachine.Handlers.content_types_provided/2` or
`Ewebmachine.Handlers.content_types_accepted/2`)
- to explicitly take the `conn` and the `state` parameter, which
allows you to add guards and pattern matching for instance to
define multiple clauses for the handler
```
defh to_html, do: "hello you"
defh from_json, do: pass(:ok, json: Poison.decode!(read_body conn))
```
```
defh resources_exists(conn,%{obj: obj}) when obj !== nil, do: true
defh resources_exists(conn,_), do: false
```
"""
defmacro defh(signature, do_block) do
{name, [conn_match,state_match], guard} = sig_to_sigwhen(signature)
handler_quote(name, do_block[:do], guard, conn_match, state_match)
end
for resource_fun_name<-@resource_fun_names do
Module.eval_quoted(Ewebmachine.Builder.Handlers, quote do
@doc "see `Ewebmachine.Handlers.#{unquote(resource_fun_name)}/2`"
defmacro unquote(resource_fun_name)(do_block) do
name = unquote(resource_fun_name)
handler_quote(name,do_block[:do])
end
end)
end
@doc false
def wrap_response({_,%Plug.Conn{},_}=tuple,_,_), do: tuple
def wrap_response(r,conn,state), do: {r,conn,state}
@doc """
Shortcut macro for :
{response,var!(conn),Enum.into(update_state,var!(state))}
use it if your handler wants to add some value to a collectable
state (a map for instance), but using default "conn" current
binding.
for instance a resources_exists implementation "caching" the result
in the state could be :
pass (user=DB.get(state.id)) != nil, current_user: user
# same as returning :
{true,conn,%{id: "arnaud", current_user: %User{id: "arnaud"}}}
"""
defmacro pass(response,update_state) do
quote do
{unquote(response),var!(conn),Enum.into(unquote(update_state),var!(state))}
end
end
end
|
lib/ewebmachine/builder.handlers.ex
| 0.893524
| 0.722499
|
builder.handlers.ex
|
starcoder
|
defmodule AWS.SecretsManager do
@moduledoc """
Amazon Web Services Secrets Manager
Amazon Web Services Secrets Manager provides a service to enable you to store,
manage, and retrieve, secrets.
This guide provides descriptions of the Secrets Manager API. For more
information about using this service, see the [Amazon Web Services Secrets Manager User
Guide](https://docs.aws.amazon.com/secretsmanager/latest/userguide/introduction.html).
## API Version
This version of the Secrets Manager API Reference documents the Secrets Manager
API version 2017-10-17.
## Support and Feedback for Amazon Web Services Secrets Manager
We welcome your feedback. Send your comments to
[<EMAIL>](mailto:<EMAIL>), or post your feedback and questions in the [Amazon Web Services Secrets Manager
Discussion Forum](http://forums.aws.amazon.com/forum.jspa?forumID=296). For more
information about the Amazon Web Services Discussion Forums, see [Forums Help](http://forums.aws.amazon.com/help.jspa).
## Logging API Requests
Amazon Web Services Secrets Manager supports Amazon Web Services CloudTrail, a
service that records Amazon Web Services API calls for your Amazon Web Services
account and delivers log files to an Amazon S3 bucket. By using information
that's collected by Amazon Web Services CloudTrail, you can determine the
requests successfully made to Secrets Manager, who made the request, when it was
made, and so on. For more about Amazon Web Services Secrets Manager and support
for Amazon Web Services CloudTrail, see [Logging Amazon Web Services Secrets Manager Events with Amazon Web Services
CloudTrail](https://docs.aws.amazon.com/secretsmanager/latest/userguide/monitoring.html#monitoring_cloudtrail)
in the *Amazon Web Services Secrets Manager User Guide*. To learn more about
CloudTrail, including enabling it and find your log files, see the [Amazon Web Services CloudTrail User
Guide](https://docs.aws.amazon.com/awscloudtrail/latest/userguide/what_is_cloud_trail_top_level.html).
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2017-10-17",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "secretsmanager",
global?: false,
protocol: "json",
service_id: "Secrets Manager",
signature_version: "v4",
signing_name: "secretsmanager",
target_prefix: "secretsmanager"
}
end
@doc """
Turns off automatic rotation, and if a rotation is currently in progress,
cancels the rotation.
To turn on automatic rotation again, call `RotateSecret`.
If you cancel a rotation in progress, it can leave the `VersionStage` labels in
an unexpected state. Depending on the step of the rotation in progress, you
might need to remove the staging label `AWSPENDING` from the partially created
version, specified by the `VersionId` response value. We recommend you also
evaluate the partially rotated new version to see if it should be deleted. You
can delete a version by removing all staging labels from it.
**Required permissions: ** `secretsmanager:CancelRotateSecret`. For more
information, see [ IAM policy actions for Secrets Manager](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awssecretsmanager.html#awssecretsmanager-actions-as-permissions)
and [Authentication and access control in Secrets Manager](https://docs.aws.amazon.com/secretsmanager/latest/userguide/auth-and-access.html).
"""
def cancel_rotate_secret(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CancelRotateSecret", input, options)
end
@doc """
Creates a new secret.
A *secret* is a set of credentials, such as a user name and password, that you
store in an encrypted form in Secrets Manager. The secret also includes the
connection information to access a database or other service, which Secrets
Manager doesn't encrypt. A secret in Secrets Manager consists of both the
protected secret data and the important information needed to manage the secret.
For information about creating a secret in the console, see [Create a secret](https://docs.aws.amazon.com/secretsmanager/latest/userguide/manage_create-basic-secret.html).
To create a secret, you can provide the secret value to be encrypted in either
the `SecretString` parameter or the `SecretBinary` parameter, but not both. If
you include `SecretString` or `SecretBinary` then Secrets Manager creates an
initial secret version and automatically attaches the staging label `AWSCURRENT`
to it.
If you don't specify an KMS encryption key, Secrets Manager uses the Amazon Web
Services managed key `aws/secretsmanager`. If this key doesn't already exist in
your account, then Secrets Manager creates it for you automatically. All users
and roles in the Amazon Web Services account automatically have access to use
`aws/secretsmanager`. Creating `aws/secretsmanager` can result in a one-time
significant delay in returning the result.
If the secret is in a different Amazon Web Services account from the credentials
calling the API, then you can't use `aws/secretsmanager` to encrypt the secret,
and you must create and use a customer managed KMS key.
**Required permissions: ** `secretsmanager:CreateSecret`. For more information,
see [ IAM policy actions for Secrets Manager](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awssecretsmanager.html#awssecretsmanager-actions-as-permissions)
and [Authentication and access control in Secrets Manager](https://docs.aws.amazon.com/secretsmanager/latest/userguide/auth-and-access.html).
"""
def create_secret(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateSecret", input, options)
end
@doc """
Deletes the resource-based permission policy attached to the secret.
To attach a policy to a secret, use `PutResourcePolicy`.
**Required permissions: ** `secretsmanager:DeleteResourcePolicy`. For more
information, see [ IAM policy actions for Secrets Manager](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awssecretsmanager.html#awssecretsmanager-actions-as-permissions)
and [Authentication and access control in Secrets Manager](https://docs.aws.amazon.com/secretsmanager/latest/userguide/auth-and-access.html).
"""
def delete_resource_policy(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteResourcePolicy", input, options)
end
@doc """
Deletes a secret and all of its versions.
You can specify a recovery window during which you can restore the secret. The
minimum recovery window is 7 days. The default recovery window is 30 days.
Secrets Manager attaches a `DeletionDate` stamp to the secret that specifies the
end of the recovery window. At the end of the recovery window, Secrets Manager
deletes the secret permanently.
For information about deleting a secret in the console, see
[https://docs.aws.amazon.com/secretsmanager/latest/userguide/manage_delete-secret.html](https://docs.aws.amazon.com/secretsmanager/latest/userguide/manage_delete-secret.html). Secrets Manager performs the permanent secret deletion at the end of the waiting
period as a background task with low priority. There is no guarantee of a
specific time after the recovery window for the permanent delete to occur.
At any time before recovery window ends, you can use `RestoreSecret` to remove
the `DeletionDate` and cancel the deletion of the secret.
In a secret scheduled for deletion, you cannot access the encrypted secret
value. To access that information, first cancel the deletion with
`RestoreSecret` and then retrieve the information.
**Required permissions: ** `secretsmanager:DeleteSecret`. For more information,
see [ IAM policy actions for Secrets
Manager](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awssecretsmanager.html#awssecretsmanager-actions-as-permissions)
and [Authentication and access control in Secrets Manager](https://docs.aws.amazon.com/secretsmanager/latest/userguide/auth-and-access.html).
"""
def delete_secret(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteSecret", input, options)
end
@doc """
Retrieves the details of a secret.
It does not include the encrypted secret value. Secrets Manager only returns
fields that have a value in the response.
**Required permissions: ** `secretsmanager:DescribeSecret`. For more
information, see [ IAM policy actions for Secrets Manager](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awssecretsmanager.html#awssecretsmanager-actions-as-permissions)
and [Authentication and access control in Secrets Manager](https://docs.aws.amazon.com/secretsmanager/latest/userguide/auth-and-access.html).
"""
def describe_secret(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeSecret", input, options)
end
@doc """
Generates a random password.
We recommend that you specify the maximum length and include every character
type that the system you are generating a password for can support.
**Required permissions: ** `secretsmanager:GetRandomPassword`. For more
information, see [ IAM policy actions for Secrets Manager](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awssecretsmanager.html#awssecretsmanager-actions-as-permissions)
and [Authentication and access control in Secrets Manager](https://docs.aws.amazon.com/secretsmanager/latest/userguide/auth-and-access.html).
"""
def get_random_password(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetRandomPassword", input, options)
end
@doc """
Retrieves the JSON text of the resource-based policy document attached to the
secret.
For more information about permissions policies attached to a secret, see
[Permissions policies attached to a secret](https://docs.aws.amazon.com/secretsmanager/latest/userguide/auth-and-access_resource-policies.html).
**Required permissions: ** `secretsmanager:GetResourcePolicy`. For more
information, see [ IAM policy actions for Secrets Manager](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awssecretsmanager.html#awssecretsmanager-actions-as-permissions)
and [Authentication and access control in Secrets Manager](https://docs.aws.amazon.com/secretsmanager/latest/userguide/auth-and-access.html).
"""
def get_resource_policy(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetResourcePolicy", input, options)
end
@doc """
Retrieves the contents of the encrypted fields `SecretString` or `SecretBinary`
from the specified version of a secret, whichever contains content.
We recommend that you cache your secret values by using client-side caching.
Caching secrets improves speed and reduces your costs. For more information, see
[Cache secrets for your applications](https://docs.aws.amazon.com/secretsmanager/latest/userguide/retrieving-secrets.html).
**Required permissions: ** `secretsmanager:GetSecretValue`. If the secret is
encrypted using a customer-managed key instead of the Amazon Web Services
managed key `aws/secretsmanager`, then you also need `kms:Decrypt` permissions
for that key. For more information, see [ IAM policy actions for Secrets Manager](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awssecretsmanager.html#awssecretsmanager-actions-as-permissions)
and [Authentication and access control in Secrets Manager](https://docs.aws.amazon.com/secretsmanager/latest/userguide/auth-and-access.html).
"""
def get_secret_value(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetSecretValue", input, options)
end
@doc """
Lists the versions for a secret.
To list the secrets in the account, use `ListSecrets`.
To get the secret value from `SecretString` or `SecretBinary`, call
`GetSecretValue`.
**Required permissions: ** `secretsmanager:ListSecretVersionIds`. For more
information, see [ IAM policy actions for Secrets Manager](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awssecretsmanager.html#awssecretsmanager-actions-as-permissions)
and [Authentication and access control in Secrets Manager](https://docs.aws.amazon.com/secretsmanager/latest/userguide/auth-and-access.html).
"""
def list_secret_version_ids(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListSecretVersionIds", input, options)
end
@doc """
Lists the secrets that are stored by Secrets Manager in the Amazon Web Services
account.
To list the versions of a secret, use `ListSecretVersionIds`.
To get the secret value from `SecretString` or `SecretBinary`, call
`GetSecretValue`.
For information about finding secrets in the console, see [Enhanced search capabilities for secrets in Secrets
Manager](https://docs.aws.amazon.com/secretsmanager/latest/userguide/manage_search-secret.html).
**Required permissions: ** `secretsmanager:ListSecrets`. For more information,
see [ IAM policy actions for Secrets Manager](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awssecretsmanager.html#awssecretsmanager-actions-as-permissions)
and [Authentication and access control in Secrets Manager](https://docs.aws.amazon.com/secretsmanager/latest/userguide/auth-and-access.html).
"""
def list_secrets(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListSecrets", input, options)
end
@doc """
Attaches a resource-based permission policy to a secret.
A resource-based policy is optional. For more information, see [Authentication and access control for Secrets
Manager](https://docs.aws.amazon.com/secretsmanager/latest/userguide/auth-and-access.html)
For information about attaching a policy in the console, see [Attach a permissions policy to a
secret](https://docs.aws.amazon.com/secretsmanager/latest/userguide/auth-and-access_resource-based-policies.html).
**Required permissions: ** `secretsmanager:PutResourcePolicy`. For more
information, see [ IAM policy actions for Secrets Manager](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awssecretsmanager.html#awssecretsmanager-actions-as-permissions)
and [Authentication and access control in Secrets Manager](https://docs.aws.amazon.com/secretsmanager/latest/userguide/auth-and-access.html).
"""
def put_resource_policy(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutResourcePolicy", input, options)
end
@doc """
Creates a new version with a new encrypted secret value and attaches it to the
secret.
The version can contain a new `SecretString` value or a new `SecretBinary`
value.
We recommend you avoid calling `PutSecretValue` at a sustained rate of more than
once every 10 minutes. When you update the secret value, Secrets Manager creates
a new version of the secret. Secrets Manager removes outdated versions when
there are more than 100, but it does not remove versions created less than 24
hours ago. If you call `PutSecretValue` more than once every 10 minutes, you
create more versions than Secrets Manager removes, and you will reach the quota
for secret versions.
You can specify the staging labels to attach to the new version in
`VersionStages`. If you don't include `VersionStages`, then Secrets Manager
automatically moves the staging label `AWSCURRENT` to this version. If this
operation creates the first version for the secret, then Secrets Manager
automatically attaches the staging label `AWSCURRENT` to it .
If this operation moves the staging label `AWSCURRENT` from another version to
this version, then Secrets Manager also automatically moves the staging label
`AWSPREVIOUS` to the version that `AWSCURRENT` was removed from.
This operation is idempotent. If a version with a `VersionId` with the same
value as the `ClientRequestToken` parameter already exists, and you specify the
same secret data, the operation succeeds but does nothing. However, if the
secret data is different, then the operation fails because you can't modify an
existing version; you can only create new ones.
**Required permissions: ** `secretsmanager:PutSecretValue`. For more
information, see [ IAM policy actions for Secrets Manager](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awssecretsmanager.html#awssecretsmanager-actions-as-permissions)
and [Authentication and access control in Secrets Manager](https://docs.aws.amazon.com/secretsmanager/latest/userguide/auth-and-access.html).
"""
def put_secret_value(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutSecretValue", input, options)
end
@doc """
For a secret that is replicated to other Regions, deletes the secret replicas
from the Regions you specify.
**Required permissions: ** `secretsmanager:RemoveRegionsFromReplication`. For
more information, see [ IAM policy actions for Secrets Manager](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awssecretsmanager.html#awssecretsmanager-actions-as-permissions)
and [Authentication and access control in Secrets Manager](https://docs.aws.amazon.com/secretsmanager/latest/userguide/auth-and-access.html).
"""
def remove_regions_from_replication(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "RemoveRegionsFromReplication", input, options)
end
@doc """
Replicates the secret to a new Regions.
See [Multi-Region secrets](https://docs.aws.amazon.com/secretsmanager/latest/userguide/create-manage-multi-region-secrets.html).
**Required permissions: ** `secretsmanager:ReplicateSecretToRegions`. For more
information, see [ IAM policy actions for Secrets Manager](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awssecretsmanager.html#awssecretsmanager-actions-as-permissions)
and [Authentication and access control in Secrets Manager](https://docs.aws.amazon.com/secretsmanager/latest/userguide/auth-and-access.html).
"""
def replicate_secret_to_regions(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ReplicateSecretToRegions", input, options)
end
@doc """
Cancels the scheduled deletion of a secret by removing the `DeletedDate` time
stamp.
You can access a secret again after it has been restored.
**Required permissions: ** `secretsmanager:RestoreSecret`. For more information,
see [ IAM policy actions for Secrets Manager](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awssecretsmanager.html#awssecretsmanager-actions-as-permissions)
and [Authentication and access control in Secrets Manager](https://docs.aws.amazon.com/secretsmanager/latest/userguide/auth-and-access.html).
"""
def restore_secret(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "RestoreSecret", input, options)
end
@doc """
Configures and starts the asynchronous process of rotating the secret.
If you include the configuration parameters, the operation sets the values for
the secret and then immediately starts a rotation. If you don't include the
configuration parameters, the operation starts a rotation with the values
already stored in the secret. For more information about rotation, see [Rotate secrets](https://docs.aws.amazon.com/secretsmanager/latest/userguide/rotating-secrets.html).
To configure rotation, you include the ARN of an Amazon Web Services Lambda
function and the schedule for the rotation. The Lambda rotation function creates
a new version of the secret and creates or updates the credentials on the
database or service to match. After testing the new credentials, the function
marks the new secret version with the staging label `AWSCURRENT`. Then anyone
who retrieves the secret gets the new version. For more information, see [How rotation
works](https://docs.aws.amazon.com/secretsmanager/latest/userguide/rotate-secrets_how.html).
When rotation is successful, the `AWSPENDING` staging label might be attached to
the same version as the `AWSCURRENT` version, or it might not be attached to any
version.
If the `AWSPENDING` staging label is present but not attached to the same
version as `AWSCURRENT`, then any later invocation of `RotateSecret` assumes
that a previous rotation request is still in progress and returns an error.
**Required permissions: ** `secretsmanager:RotateSecret`. For more information,
see [ IAM policy actions for Secrets Manager](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awssecretsmanager.html#awssecretsmanager-actions-as-permissions)
and [Authentication and access control in Secrets Manager](https://docs.aws.amazon.com/secretsmanager/latest/userguide/auth-and-access.html).
You also need `lambda:InvokeFunction` permissions on the rotation function. For
more information, see [ Permissions for rotation](https://docs.aws.amazon.com/secretsmanager/latest/userguide/rotating-secrets-required-permissions-function.html).
"""
def rotate_secret(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "RotateSecret", input, options)
end
@doc """
Removes the link between the replica secret and the primary secret and promotes
the replica to a primary secret in the replica Region.
You must call this operation from the Region in which you want to promote the
replica to a primary secret.
**Required permissions: ** `secretsmanager:StopReplicationToReplica`. For more
information, see [ IAM policy actions for Secrets Manager](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awssecretsmanager.html#awssecretsmanager-actions-as-permissions)
and [Authentication and access control in Secrets Manager](https://docs.aws.amazon.com/secretsmanager/latest/userguide/auth-and-access.html).
"""
def stop_replication_to_replica(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StopReplicationToReplica", input, options)
end
@doc """
Attaches tags to a secret.
Tags consist of a key name and a value. Tags are part of the secret's metadata.
They are not associated with specific versions of the secret. This operation
appends tags to the existing list of tags.
The following restrictions apply to tags:
* Maximum number of tags per secret: 50
* Maximum key length: 127 Unicode characters in UTF-8
* Maximum value length: 255 Unicode characters in UTF-8
* Tag keys and values are case sensitive.
* Do not use the `aws:` prefix in your tag names or values because
Amazon Web Services reserves it for Amazon Web Services use. You can't edit or
delete tag names or values with this prefix. Tags with this prefix do not count
against your tags per secret limit.
* If you use your tagging schema across multiple services and
resources, other services might have restrictions on allowed characters.
Generally allowed characters: letters, spaces, and numbers representable in
UTF-8, plus the following special characters: + - = . _ : / @.
If you use tags as part of your security strategy, then adding or removing a tag
can change permissions. If successfully completing this operation would result
in you losing your permissions for this secret, then the operation is blocked
and returns an Access Denied error.
**Required permissions: ** `secretsmanager:TagResource`. For more information,
see [ IAM policy actions for Secrets Manager](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awssecretsmanager.html#awssecretsmanager-actions-as-permissions)
and [Authentication and access control in Secrets Manager](https://docs.aws.amazon.com/secretsmanager/latest/userguide/auth-and-access.html).
"""
def tag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "TagResource", input, options)
end
@doc """
Removes specific tags from a secret.
This operation is idempotent. If a requested tag is not attached to the secret,
no error is returned and the secret metadata is unchanged.
If you use tags as part of your security strategy, then removing a tag can
change permissions. If successfully completing this operation would result in
you losing your permissions for this secret, then the operation is blocked and
returns an Access Denied error.
**Required permissions: ** `secretsmanager:UntagResource`. For more information,
see [ IAM policy actions for Secrets Manager](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awssecretsmanager.html#awssecretsmanager-actions-as-permissions)
and [Authentication and access control in Secrets Manager](https://docs.aws.amazon.com/secretsmanager/latest/userguide/auth-and-access.html).
"""
def untag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UntagResource", input, options)
end
@doc """
Modifies the details of a secret, including metadata and the secret value.
To change the secret value, you can also use `PutSecretValue`.
To change the rotation configuration of a secret, use `RotateSecret` instead.
We recommend you avoid calling `UpdateSecret` at a sustained rate of more than
once every 10 minutes. When you call `UpdateSecret` to update the secret value,
Secrets Manager creates a new version of the secret. Secrets Manager removes
outdated versions when there are more than 100, but it does not remove versions
created less than 24 hours ago. If you update the secret value more than once
every 10 minutes, you create more versions than Secrets Manager removes, and you
will reach the quota for secret versions.
If you include `SecretString` or `SecretBinary` to create a new secret version,
Secrets Manager automatically attaches the staging label `AWSCURRENT` to the new
version.
If you call this operation with a `VersionId` that matches an existing version's
`ClientRequestToken`, the operation results in an error. You can't modify an
existing version, you can only create a new version. To remove a version, remove
all staging labels from it. See `UpdateSecretVersionStage`.
If you don't specify an KMS encryption key, Secrets Manager uses the Amazon Web
Services managed key `aws/secretsmanager`. If this key doesn't already exist in
your account, then Secrets Manager creates it for you automatically. All users
and roles in the Amazon Web Services account automatically have access to use
`aws/secretsmanager`. Creating `aws/secretsmanager` can result in a one-time
significant delay in returning the result.
If the secret is in a different Amazon Web Services account from the credentials
calling the API, then you can't use `aws/secretsmanager` to encrypt the secret,
and you must create and use a customer managed key.
**Required permissions: ** `secretsmanager:UpdateSecret`. For more information,
see [ IAM policy actions for Secrets Manager](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awssecretsmanager.html#awssecretsmanager-actions-as-permissions)
and [Authentication and access control in Secrets Manager](https://docs.aws.amazon.com/secretsmanager/latest/userguide/auth-and-access.html).
If you use a customer managed key, you must also have `kms:GenerateDataKey` and
`kms:Decrypt` permissions on the key. For more information, see [ Secret encryption and
decryption](https://docs.aws.amazon.com/secretsmanager/latest/userguide/security-encryption.html).
"""
def update_secret(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateSecret", input, options)
end
@doc """
Modifies the staging labels attached to a version of a secret.
Secrets Manager uses staging labels to track a version as it progresses through
the secret rotation process. Each staging label can be attached to only one
version at a time. To add a staging label to a version when it is already
attached to another version, Secrets Manager first removes it from the other
version first and then attaches it to this one. For more information about
versions and staging labels, see [Concepts: Version](https://docs.aws.amazon.com/secretsmanager/latest/userguide/getting-started.html#term_version).
The staging labels that you specify in the `VersionStage` parameter are added to
the existing list of staging labels for the version.
You can move the `AWSCURRENT` staging label to this version by including it in
this call.
Whenever you move `AWSCURRENT`, Secrets Manager automatically moves the label
`AWSPREVIOUS` to the version that `AWSCURRENT` was removed from.
If this action results in the last label being removed from a version, then the
version is considered to be 'deprecated' and can be deleted by Secrets Manager.
**Required permissions: ** `secretsmanager:UpdateSecretVersionStage`. For more
information, see [ IAM policy actions for Secrets Manager](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awssecretsmanager.html#awssecretsmanager-actions-as-permissions)
and [Authentication and access control in Secrets Manager](https://docs.aws.amazon.com/secretsmanager/latest/userguide/auth-and-access.html).
"""
def update_secret_version_stage(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateSecretVersionStage", input, options)
end
@doc """
Validates that a resource policy does not grant a wide range of principals
access to your secret.
A resource-based policy is optional for secrets.
The API performs three checks when validating the policy:
* Sends a call to
[Zelkova](https://aws.amazon.com/blogs/security/protect-sensitive-data-in-the-cloud-with-automated-reasoning-zelkova/), an automated reasoning engine, to ensure your resource policy does not allow
broad access to your secret, for example policies that use a wildcard for the
principal.
* Checks for correct syntax in a policy.
* Verifies the policy does not lock out a caller.
**Required permissions: ** `secretsmanager:ValidateResourcePolicy`. For more
information, see [ IAM policy actions for Secrets
Manager](https://docs.aws.amazon.com/service-authorization/latest/reference/list_awssecretsmanager.html#awssecretsmanager-actions-as-permissions)
and [Authentication and access control in Secrets Manager](https://docs.aws.amazon.com/secretsmanager/latest/userguide/auth-and-access.html).
"""
def validate_resource_policy(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ValidateResourcePolicy", input, options)
end
end
|
lib/aws/generated/secrets_manager.ex
| 0.864754
| 0.577078
|
secrets_manager.ex
|
starcoder
|
defmodule TimeGroup do
alias Schedules.Schedule
@type time_block ::
:early_morning | :am_rush | :midday | :pm_rush | :evening | :night | :late_night
@doc """
Given a list of schedules, returns those schedules grouped by the hour of day.
Returns a keyword list rather than a map so that the times appear in order.
Precondition: the schedules are already sorted by time.
"""
@spec by_hour([%Schedule{}]) :: [{non_neg_integer, [%Schedule{}]}]
def by_hour(schedules) do
do_by_fn(schedules, & &1.time.hour)
end
@doc """
Given a list of schedules, returns those schedules grouped into subway schedule periods:
* OPEN - 6:30 AM (:early_morning)
* 6:30 AM - 9:30 AM (:am_rush)
* 9:30 AM - 3:30 PM (:midday)
* 3:30 PM - 6:30 PM (:pm_rush)
* 6:30 PM - 9:00 PM (:evening)
* 9:00 PM - 12:00 AM (:night)
* 12:00 AM - CLOSE (:late_night)
Returns a keyword list, and expects that the schedules are already sorted.
"""
@type subway_schedule :: time_block
@spec by_subway_period([Schedule.t()]) :: [{subway_schedule, [Schedule.t()]}]
def by_subway_period(schedules) do
schedules
|> do_by_fn(fn %Schedule{time: time} -> subway_period(time) end)
end
@doc """
Given a list of schedules, return the frequency of service in minutes.
If there are multiple schedules, returns either a min/max pair if there's a
variation, or a single integer. Otherwise, returns nil.
"""
@spec frequency([Schedule.t()]) :: {non_neg_integer, non_neg_integer} | non_neg_integer | nil
def frequency(schedules) do
schedules
|> Enum.uniq_by(& &1.time)
|> do_frequency
end
defp do_frequency([_, _ | _] = schedules) do
schedules
|> Enum.zip(Enum.drop(schedules, 1))
|> Enum.map(fn {x, y} -> Timex.diff(y.time, x.time, :minutes) end)
|> Enum.min_max()
end
defp do_frequency(_) do
nil
end
@spec frequency_for_time([Schedule.t()], atom) :: Schedules.Frequency.t()
def frequency_for_time(schedules, time_block) do
{min, max} =
schedules
|> Enum.filter(fn schedule -> subway_period(schedule.time) == time_block end)
|> frequency
|> verify_min_max
%Schedules.Frequency{time_block: time_block, min_headway: min, max_headway: max}
end
@spec verify_min_max({integer, integer} | nil) :: {:infinity, :infinity} | {integer, integer}
defp verify_min_max(nil), do: {:infinity, :infinity}
defp verify_min_max({_min, _max} = min_max), do: min_max
@spec frequency_by_time_block([Schedule.t()]) :: [Schedules.Frequency.t()]
def frequency_by_time_block(schedules) do
Enum.map(
[:early_morning, :am_rush, :midday, :pm_rush, :evening, :night, :late_night],
&frequency_for_time(schedules, &1)
)
end
defp do_by_fn([], _) do
[]
end
defp do_by_fn(schedules, func) do
schedules
|> Enum.reduce([], &reduce_by_fn(&1, &2, func))
|> reverse_first_group
|> Enum.reverse()
end
defp reduce_by_fn(schedule, [], func) do
[{func.(schedule), [schedule]}]
end
defp reduce_by_fn(schedule, [{value, grouped} | rest], func) do
if value == func.(schedule) do
head = {value, [schedule | grouped]}
[head | rest]
else
head = {func.(schedule), [schedule]}
previous_head = {value, Enum.reverse(grouped)}
[head, previous_head | rest]
end
end
defp reverse_first_group([{value, grouped} | rest]) do
head = {value, Enum.reverse(grouped)}
[head | rest]
end
@start {4, 0}
@early_morning_end {6, 30}
@am_rush_end {9, 30}
@midday_end {15, 30}
@pm_rush_end {18, 30}
@evening_end {21, 0}
@night_end {24, 0}
def subway_period(time) do
tup = {time.hour, time.minute}
cond do
tup < @start ->
:late_night
tup <= @early_morning_end ->
:early_morning
tup <= @am_rush_end ->
:am_rush
tup <= @midday_end ->
:midday
tup <= @pm_rush_end ->
:pm_rush
tup <= @evening_end ->
:evening
tup <= @night_end ->
:night
true ->
:late_night
end
end
@spec display_frequency_range(Schedules.Frequency.t()) :: iodata
def display_frequency_range(%Schedules.Frequency{min_headway: value, max_headway: value}) do
Integer.to_string(value)
end
def display_frequency_range(%Schedules.Frequency{min_headway: min, max_headway: max}) do
[
Integer.to_string(min),
"-",
Integer.to_string(max)
]
end
end
|
apps/schedules/lib/time_group.ex
| 0.894682
| 0.525491
|
time_group.ex
|
starcoder
|
defmodule Alphametics do
@type puzzle :: binary
@type solution :: %{required(?A..?Z) => 0..9}
@non_uppercase_characters ~r/[^A-Z]/
@single_digits Enum.to_list(0..9)
@doc """
Takes an alphametics puzzle and returns a solution where every letter
replaced by its number will make a valid equation. Returns `nil` when
there is no valid solution to the given puzzle.
## Examples
iex> Alphametics.solve("I + BB == ILL")
%{?I => 1, ?B => 9, ?L => 0}
iex> Alphametics.solve("A == B")
nil
"""
@spec solve(puzzle) :: solution | nil
def solve(puzzle) do
letters = to_letters(puzzle)
@single_digits
|> permutations(length(letters))
|> Enum.reduce_while(nil, &evaluate_permutation(puzzle, letters, &1, &2))
end
defp evaluate_permutation(puzzle, letters, permutation, acc) do
translations = Enum.zip(letters, permutation)
expression = generate_expression(puzzle, translations)
cond do
any_leading_zeroes?(expression) ->
{:cont, acc}
evaluate(expression) ->
solution = Enum.reduce(translations, %{}, &add_char_key_value/2)
{:halt, solution}
true ->
{:cont, acc}
end
end
defp to_letters(puzzle) do
puzzle
|> String.replace(@non_uppercase_characters, "")
|> String.graphemes()
|> Enum.uniq()
end
defp permutations([], _length), do: [[]]
defp permutations(_digits, 0), do: [[]]
defp permutations(digits, length) do
for digit <- digits,
rest = digits -- [digit],
rest <- permutations(rest, length - 1) do
[digit | rest]
end
end
defp generate_expression(puzzle, translations) do
translations
|> Enum.reduce(puzzle, &letter_to_permutation/2)
|> String.split()
end
defp letter_to_permutation({letter, permutation}, acc) do
String.replace(acc, letter, Integer.to_string(permutation))
end
defp any_leading_zeroes?(expression) do
Enum.any?(expression, &String.starts_with?(&1, "0"))
end
defp evaluate([head | tail]) do
tail
|> Enum.chunk_every(2)
|> Enum.reduce(String.to_integer(head), &accumulate_expression/2)
end
defp accumulate_expression([fun, value], acc) do
apply(Kernel, String.to_atom(fun), [acc, String.to_integer(value)])
end
defp add_char_key_value({<<translation::utf8>>, value}, acc) do
Map.put(acc, translation, value)
end
end
|
elixir/alphametics/lib/alphametics.ex
| 0.86712
| 0.45944
|
alphametics.ex
|
starcoder
|
defmodule Cog.Command.Service.DataStore do
@moduledoc """
Stores an arbitrary data structure for a given key. The only requirement is
that the data structure must be able to be encoded as JSON.
Keys may be fetched, replaced, and deleted.
The JSON data is stored on the filesystem of the Cog host. See the
Cog.Command.Service.DataStore.NestedFile module for more details.
"""
use GenServer
alias Cog.Command.Service.DataStore.NestedFile
defstruct [:base_path]
@doc """
Starts the #{inspect __MODULE__} service. Accepts a path to use for the
base directory to store content under.
"""
def start_link(base_path),
do: GenServer.start_link(__MODULE__, base_path, name: __MODULE__)
@doc """
Fetches the given key. Returns `{:ok, value}` if the key exists or `{:error,
:unknown_key}` if it doesn't exist.
"""
def fetch(namespace, key),
do: GenServer.call(__MODULE__, {:fetch, namespace, key})
@doc """
Replaces or sets the given key with the value. Returns `{:ok, value}`.
"""
def replace(namespace, key, value),
do: GenServer.call(__MODULE__, {:replace, namespace, key, value})
@doc """
Deletes the given key. Returns `{:ok, key}` when successfully
deleted or `{:error, :unknown_key}` if it doesn't exist.
"""
def delete(namespace, key),
do: GenServer.call(__MODULE__, {:delete, namespace, key})
def init(nil),
do: {:stop, "Unable to start #{__MODULE__}: Data path not configured"}
def init(base_path) do
state = %__MODULE__{base_path: base_path}
{:ok, state}
end
def handle_call({:fetch, namespace, key}, _from, state) do
case NestedFile.fetch([state.base_path] ++ namespace, key, "json") do
{:ok, content} ->
data = Poison.decode!(content)
{:reply, {:ok, data}, state}
{:error, reason} ->
{:reply, {:error, reason}, state}
end
end
def handle_call({:replace, namespace, key, value}, _from, state) do
content = Poison.encode!(value)
case NestedFile.replace([state.base_path] ++ namespace, key, content, "json") do
{:ok, ^content} ->
{:reply, {:ok, value}, state}
{:error, reason} ->
{:reply, {:error, reason}, state}
end
end
def handle_call({:delete, namespace, key}, _from, state) do
case NestedFile.delete([state.base_path] ++ namespace, key, "json") do
:ok ->
{:reply, {:ok, key}, state}
{:error, reason} ->
{:reply, {:error, reason}, state}
end
end
end
|
lib/cog/command/service/data_store.ex
| 0.832747
| 0.423845
|
data_store.ex
|
starcoder
|
defmodule CadetWeb.AssessmentsHelpers do
@moduledoc """
Helper functions for Assessments and Grading
"""
import CadetWeb.ViewHelper
defp build_library(%{library: library}) do
transform_map_for_view(library, %{
chapter: :chapter,
globals: :globals,
external: &build_external_library(%{external_library: &1.external})
})
end
defp build_external_library(%{external_library: external_library}) do
transform_map_for_view(external_library, [:name, :symbols])
end
def build_question_by_question_config(
%{question: question},
all_testcases? \\ false
) do
Map.merge(
build_generic_question_fields(%{question: question}),
build_question_content_by_config(
%{question: question},
all_testcases?
)
)
end
def build_question_with_answer_and_solution_if_ungraded(%{question: question}) do
components = [
build_question_by_question_config(%{
question: question
}),
build_answer_fields_by_question_type(%{question: question}),
build_solution_if_ungraded_by_config(%{question: question})
]
components
|> Enum.filter(& &1)
|> Enum.reduce(%{}, &Map.merge/2)
end
defp build_generic_question_fields(%{question: question}) do
transform_map_for_view(question, %{
id: :id,
type: :type,
library: &build_library(%{library: &1.library}),
maxXp: :max_xp,
blocking: :blocking
})
end
defp build_solution_if_ungraded_by_config(%{
question: %{question: question, type: question_type, show_solution: show_solution}
}) do
if show_solution do
solution_getter =
case question_type do
:programming -> &Map.get(&1, "solution")
:mcq -> &find_correct_choice(&1["choices"])
:voting -> nil
end
transform_map_for_view(question, %{solution: solution_getter})
end
end
defp answer_builder_for(:programming), do: & &1.answer["code"]
defp answer_builder_for(:mcq), do: & &1.answer["choice_id"]
defp answer_builder_for(:voting), do: nil
defp build_answer_fields_by_question_type(%{
question: %{answer: answer, type: question_type}
}) do
# No need to check if answer exists since empty answer would be a
# `%Answer{..., answer: nil}` and nil["anything"] = nil
%{grader: grader} = answer
transform_map_for_view(answer, %{
answer: answer_builder_for(question_type),
grader: grader_builder(grader),
gradedAt: graded_at_builder(grader),
xp: &((&1.xp || 0) + (&1.xp_adjustment || 0)),
autogradingStatus: :autograding_status,
autogradingResults: build_results(%{results: answer.autograding_results}),
comments: :comments
})
end
defp build_results(%{results: results}) do
case results do
nil -> nil
_ -> &Enum.map(&1.autograding_results, fn result -> build_result(result) end)
end
end
def build_result(result) do
transform_map_for_view(result, %{
resultType: "resultType",
expected: "expected",
actual: "actual",
errorType: "errorType",
errors: build_errors(result["errors"])
})
end
defp build_errors(errors) do
case errors do
nil -> nil
_ -> &Enum.map(&1["errors"], fn error -> build_error(error) end)
end
end
defp build_error(error) do
transform_map_for_view(error, %{
errorType: "errorType",
line: "line",
location: "location",
errorLine: "errorLine",
errorExplanation: "errorExplanation"
})
end
defp build_contest_entry(entry) do
transform_map_for_view(entry, %{
submission_id: :submission_id,
answer: :answer,
score: :score
})
end
defp build_contest_leaderboard_entry(leaderboard_ans) do
Map.put(
transform_map_for_view(leaderboard_ans, %{
submission_id: :submission_id,
answer: :answer,
student_name: :student_name
}),
"final_score",
Float.round(leaderboard_ans.relative_score, 2)
)
end
defp build_choice(choice) do
transform_map_for_view(choice, %{
id: "choice_id",
content: "content",
hint: "hint"
})
end
defp build_testcase(testcase, type) do
transform_map_for_view(testcase, %{
answer: "answer",
score: "score",
program: "program",
# Create a 1-arity function to return the type of the testcase as a string
type: fn _ -> type end
})
end
defp build_testcases(all_testcases?) do
if all_testcases? do
&Enum.concat(
Enum.concat(
Enum.map(&1["public"], fn testcase -> build_testcase(testcase, "public") end),
Enum.map(&1["opaque"], fn testcase -> build_testcase(testcase, "opaque") end)
),
Enum.map(&1["secret"], fn testcase -> build_testcase(testcase, "secret") end)
)
else
&Enum.concat(
Enum.map(&1["public"], fn testcase -> build_testcase(testcase, "public") end),
Enum.map(&1["opaque"], fn testcase -> build_testcase(testcase, "opaque") end)
)
end
end
defp build_question_content_by_config(
%{
question: %{
question: question,
type: question_type
}
},
all_testcases?
) do
case question_type do
:programming ->
transform_map_for_view(question, %{
content: "content",
prepend: "prepend",
solutionTemplate: "template",
postpend: "postpend",
testcases: build_testcases(all_testcases?)
})
:mcq ->
transform_map_for_view(question, %{
content: "content",
choices: &Enum.map(&1["choices"], fn choice -> build_choice(choice) end)
})
:voting ->
transform_map_for_view(question, %{
content: "content",
prepend: "prepend",
solutionTemplate: "template",
contestEntries:
&Enum.map(&1[:contest_entries], fn entry -> build_contest_entry(entry) end),
contestLeaderboard:
&Enum.map(&1[:contest_leaderboard], fn entry ->
build_contest_leaderboard_entry(entry)
end)
})
end
end
defp find_correct_choice(choices) do
choices
|> Enum.find(&Map.get(&1, "is_correct"))
|> Map.get("choice_id")
end
end
|
lib/cadet_web/views/assessments_helpers.ex
| 0.615781
| 0.414395
|
assessments_helpers.ex
|
starcoder
|
defmodule Chopperbot.Split.OrderCalculator do
alias Chopperbot.Split.{
CalculatedOrdersResult,
Order
}
@type orders :: [Order.t()]
@doc """
Calculate the given orders.
## Examples
iex> calculate([{"a", 100}, {"b", 200}, {"c", 300}], 1.177)
%Chopperbot.Split.CalculatedOrdersResult{
orders: [{"a", 117.7}, {"b", 235.4}, {"c", 353.1}],
total: 706.2
}
iex> calculate([{"a", 100}, {"b", 200}, {"a", 300}], 1.177)
%Chopperbot.Split.CalculatedOrdersResult{
orders: [{"a", 470.8}, {"b", 235.4}],
total: 706.2
}
iex> calculate([{"a", 100}, {"b", 200}, {"c", 300}, {"share", 300}], 1.177)
%Chopperbot.Split.CalculatedOrdersResult{
orders: [{"a", 235.4}, {"b", 353.1}, {"c", 470.8}],
total: 1059.3
}
"""
@spec calculate(orders(), float()) :: CalculatedOrdersResult.t()
def calculate(orders, multiplier) do
uniq_orders =
orders
|> sum_orders_by_name()
|> split_share()
|> apply_multiplier(multiplier)
%CalculatedOrdersResult{
orders: uniq_orders,
total: sum_orders_amount(uniq_orders)
}
end
@doc """
Group and sum the orders with the same name.
## Examples
iex> sum_orders_by_name([{"a", 100}, {"a", 200}, {"b", 300}, {"c", 400}])
[{"a", 300}, {"b", 300}, {"c", 400}]
"""
@spec sum_orders_by_name(orders()) :: orders()
def sum_orders_by_name(orders) do
orders
|> Enum.group_by(fn {name, _amount} -> name end)
|> Enum.map(fn {name, orders} -> {name, sum_orders_amount(orders)} end)
end
@doc """
Split the order with name "share" to all other orders equally.
## Examples
iex> split_share([{"a", 100}, {"b", 300}, {"share", 400}])
[{"a", 300.0}, {"b", 500.0}]
"""
@spec split_share(orders()) :: orders()
def split_share(orders) do
case Enum.split_with(orders, fn {name, _} -> name == "share" end) do
{[], ^orders} ->
orders
{[{"share", share_amount}], normal_orders} ->
share_per_order = share_amount / length(normal_orders)
Enum.map(normal_orders, fn {name, amount} -> {name, amount + share_per_order} end)
end
end
@doc """
Multiply each order amount with the given multiplier.
## Examples
iex> apply_multiplier([{"a", 100}, {"b", 300}], 1.07)
[{"a", 107.0}, {"b", 321.0}]
"""
@spec apply_multiplier(orders(), float()) :: orders()
def apply_multiplier(orders, multiplier) do
Enum.map(orders, fn {name, amount} ->
new_amount = Float.round(amount * multiplier, 15)
{name, new_amount}
end)
end
defp sum_orders_amount(orders) do
orders
|> Enum.map(fn {_name, amount} -> amount end)
|> Enum.sum()
end
end
|
lib/chopperbot/split/order_calculator.ex
| 0.847905
| 0.486332
|
order_calculator.ex
|
starcoder
|
defmodule Rank do
@moduledoc "Rank a poker hand"
def rank(hand) do
cond do
# Rank 9
Hand.is_straight_flush_ace_high(hand) -> format_straight_flush_ace_high(hand)
# Rank 9
Hand.is_straight_flush_ace_low(hand) -> format_straight_flush_ace_low(hand)
# Rank 8
Hand.is_four_of_a_kind(hand) -> format_four_of_a_kind(hand)
# Rank 7
Hand.is_full_house(hand) -> format_full_house(hand)
# Rank 6
Hand.is_flush(hand) -> format_flush(hand)
# Rank 5
Hand.is_straight_ace_high(hand) -> format_straight_ace_high(hand)
# Rank 5
Hand.is_straight_ace_low(hand) -> format_straight_ace_low(hand)
# Rank 4
Hand.is_three_of_a_kind(hand) -> format_three_of_a_kind(hand)
# Rank 3
Hand.is_two_pair(hand) -> format_two_pair(hand)
# Rank 2
Hand.is_pair(hand) -> format_pair(hand)
# Rank 1
Hand.is_high_card(hand) -> format_high_card(hand)
end
end
@doc """
Format methods:
Input: [[:heart, :seven], [:club, :seven], [:diamond, :two], [:spade, :seven], [:diamond, :three]]
Output: "4,0707070302"
"""
def format_high_card(hand) do
sort_by_face(hand, "1", :ace)
end
def format_pair(hand) do
sort_by_count_and_face(hand, "2", :ace)
end
def format_two_pair(hand) do
sort_by_count_and_face(hand, "3", :ace)
end
def format_three_of_a_kind(hand) do
sort_by_count_and_face(hand, "4", :ace)
end
def format_straight_ace_high(hand) do
sort_by_face(hand, "5", :ace)
end
def format_straight_ace_low(hand) do
sort_by_face(hand, "5", :ace_low)
end
def format_flush(hand) do
sort_by_face(hand, "6", :ace)
end
def format_full_house(hand) do
sort_by_count_and_face(hand, "7", :ace)
end
def format_four_of_a_kind(hand) do
sort_by_count_and_face(hand, "8", :ace)
end
def format_straight_flush_ace_high(hand) do
sort_by_count_and_face(hand, "9", :ace)
end
def format_straight_flush_ace_low(hand) do
sort_by_count_and_face(hand, "9", :ace_low)
end
defp sort_by_face(hand, rank_code, ace) do
Enum.map(hand, fn card -> Card.rank(card, ace) end)
|> Enum.sort(fn face_rank1, face_rank2 -> face_rank1 > face_rank2 end)
|> format_rank(rank_code)
end
defp sort_by_count_and_face(hand, rank_code, ace) do
Enum.group_by(hand, fn [_, face] -> face end)
|> Enum.sort(fn {face1, cards1}, {face2, cards2} ->
"#{length(cards1)}|#{Card.rank(face1, ace)}" > "#{length(cards2)}|#{Card.rank(face2, ace)}"
end)
|> Enum.map(fn {_, cards} -> cards end)
|> Enum.concat()
|> Enum.map(fn card -> Card.rank(card, ace) end)
|> format_rank(rank_code)
end
defp format_rank(hand, rank_code) do
Enum.into(hand, ["#{rank_code},"])
|> Enum.join()
end
end
|
lib/rank.ex
| 0.639624
| 0.404066
|
rank.ex
|
starcoder
|
defmodule Vivid.Font.Char do
alias __MODULE__
alias Vivid.{Group, Path, Point}
defstruct ~w(character vertices left_pos right_pos coordinates)a
@moduledoc """
Describes an individual character defined by a Hershey font file.
"""
@opaque t :: %Char{}
@doc """
Returns the (documented) width of a specific character.
This is not the maximum width of the character, as some go beyond or don't reach their documented bounds.
I assume this is for kerning. I may be wrong.
"""
@spec width(Char.t(), number) :: number
def width(%Char{left_pos: l, right_pos: r}, scale \\ 1.0), do: round((abs(l) + abs(r)) * scale)
@doc """
Returns the left padding specified for this character.
"""
@spec left_pad(Char.t(), number) :: number
def left_pad(%Char{left_pos: l}, scale \\ 1.0), do: round(abs(l) * scale)
@doc """
Returns the right padding specified for this character.
"""
@spec right_pad(Char.t(), number) :: number
def right_pad(%Char{right_pos: r}, scale \\ 1.0), do: round(abs(r) * scale)
@doc """
Rendered width of a character.
"""
@spec rendered_width(Char.t(), number) :: number
def rendered_width(%Char{} = char, scale \\ 1.0), do: rendered_dimension(char, scale, 0)
@doc """
Rendered height of a character.
"""
@spec rendered_height(Char.t(), number) :: number
def rendered_height(%Char{} = char, scale \\ 1.0), do: rendered_dimension(char, scale, 1)
@doc """
Convert a %Char{} into a shape which can be rendered.
* `char` is a `%Char{}` struct.
* `center` the center `%Point{}` around which to render the character.
* `scale` how much to scale the character by.
"""
@spec to_shape(Char.t(), Point.t(), number) :: Shape.t()
def to_shape(%Char{coordinates: coords}, %Point{} = center, scale \\ 1.0) do
x_center = center |> Point.x()
y_center = center |> Point.y()
coords
|> Enum.reduce([[]], fn
:pen_up, acc ->
[[] | acc]
{x, y}, [last | rest] ->
x = round(x_center + x * scale)
y = round(y_center + y * scale)
[[Point.init(x, y) | last] | rest]
end)
|> Enum.map(&Path.init(&1))
|> Group.init()
end
defp rendered_dimension(%Char{coordinates: coords}, scale, i) do
coords =
coords
|> Enum.reject(fn c -> c == :pen_up end)
|> Enum.map(&elem(&1, i))
if coords == [] do
0
else
max = coords |> Enum.max()
min = coords |> Enum.min()
round((max - min) * scale)
end
end
end
|
lib/vivid/font/char.ex
| 0.92972
| 0.594934
|
char.ex
|
starcoder
|
defmodule SMPPEX.MC do
@moduledoc """
Module for implementing custom SMPP MC entities.
In general, an SMPP MC entity represents a TCP server, which accepts connections
and handles them. The TCP server is represented by a Ranch listener started
by `start/2` call. On new connection the listener spawns `SMPPEX.Session` process
coupled with `SMPPEX.MC` `GenServer` handler. The session interacts with the socket
while the MC handler keeps state and does actual PDU handling. One also interacts with
MC handler to send PDUs, PDU replies, etc.
The session makes all requests to the MC handler process *syncronously* (via `GenServer.call`),
while the MC handler process makes only *asyncronous*(via `GenServer.cast`) requests to the session.
This is made intentionally since this allows:
* to avoid any kind of deadlocks while the session and the MC handler process interact actively;
* to control incoming SMPP message rate to avoid overflooding;
* not to lose any control over connection because of the asyncronous nature of TCP implementation in OTP.
To implement an MC entitiy, one should implement several callbacks for MC handler processes
(`SMPPEX.MC` behaviour). The most proper way to do it is to `use` `SMPPEX.MC`:
```
defmodule MyMC do
use SMPPEX.MC
# ...Callback implementation
end
```
In this case all callbacks have reasonable defaults.
Note that `SMPPEX.MC` does not have a `start_link` method since `SMPPEX.MC` instances (handler processes)
are launched when a Ranch listener created by `start/2` receives a new incoming connection.
"""
alias :erlang, as: Erlang
alias :ranch, as: Ranch
alias SMPPEX.MC
alias SMPPEX.Pdu
alias SMPPEX.PduStorage
alias SMPPEX.Session
alias SMPPEX.SMPPTimers
use GenServer
require Logger
defstruct [
:smpp_session,
:module,
:module_state,
:pdu_storage,
:timers,
:response_limit,
:time,
:timer_resolution,
:tick_timer_ref
]
@default_enquire_link_limit 30000
@default_enquire_link_resp_limit 30000
@default_session_init_limit 10000
@default_inactivity_limit :infinity
@default_response_limit 60000
@default_timer_resolution 100
@default_call_timeout 5000
@type state :: term
@type request :: term
@type socket :: port | :ssl.sslsocket
@type transport :: module
@doc """
Invoked when the listener has accepted a connection and tries to created an `SMPPEX.MC`.
The Ranch acceptor handling the connection is busy till the function returns.
`args` argument is taken directly from `start/2` call. `socket` and `transport` arguments
are Ranch socket and transport respectively. They can be used, for example, to inspect
peer address, etc.
The return value should be either `{:ok, state}`, then MC handler will successfully start and returned
state will be later passed to the other callbacks, or `{:stop, reason}`, then MC handler `GenServer` will stop
and the connection closed.
"""
@callback init(socket, transport, args :: term) :: {:ok, state} | {:stop, reason :: term}
@doc """
Invoked when the MC handler receives an incoming PDU (which is not a response PDU).
The returned value is used as the new state.
"""
@callback handle_pdu(pdu :: Pdu.t, state) :: state
@doc """
Invoked when the MC handler receives a response to a previously sent PDU.
`pdu` argument contains the received response PDU, `original_pdu` contains
the previously sent pdu for which the handled response is received.
The returned value is used as the new state.
"""
@callback handle_resp(pdu :: Pdu.t, original_pdu :: Pdu.t, state) :: state
@doc """
Invoked when the MC handler does not receive a response to a previously sent PDU
for the specified timeout.
`pdu` argument contains the PDU for which no response was received. If the response
will be received later it will be dropped (with an `info` log message).
The returned value is used as the new state.
"""
@callback handle_resp_timeout(pdu :: Pdu.t, state) :: state
@doc """
Invoked when the SMPP session successfully sent PDU to transport or failed to do this.
`pdu` argument contains the PDU for which send status is reported. `send_pdu_result` can be
either `:ok` or `{:error, reason}`.
The returned value is used as the new state.
"""
@callback handle_send_pdu_result(pdu :: Pdu.t, send_pdu_result :: SMPPEX.SMPPHandler.send_pdu_result, state) :: state
@doc """
Invoked when the SMPP session is about to stop.
The returned value is ignored.
"""
@callback handle_stop(state) :: any
@doc """
Invoked for handling `call/3` calls.
The callback is called syncronously for handling.
The returned values have the same meaning as in `GenServer` `handle_call` callback
(but note that only two kinds of responses are possible). In case of delaying a reply (`{:noreply, state}` callback result)
it can be later send using `GenServer.reply(from, reply)`
"""
@callback handle_call(request, from :: GenServer.from, state) :: {:reply, reply :: term, state} | {:noreply, state}
@doc """
Invoked for handling `cast/2` calls.
The callback is called asyncronously.
The returned value is used as the new state.
"""
@callback handle_cast(request, state) :: state
@doc """
Invoked for handling generic messages sent to the MC handler process.
The returned value is used as the new state.
"""
@callback handle_info(request, state) :: state
defmacro __using__(_) do
quote location: :keep do
@behaviour SMPPEX.MC
@doc false
def init(_socket, _transport, args) do
{:ok, args}
end
@doc false
def handle_pdu(_pdu, state), do: state
@doc false
def handle_resp(_pdu, _original_pdu, state), do: state
@doc false
def handle_resp_timeout(_pdu, state), do: state
@doc false
def handle_send_pdu_result(_pdu, _result, state), do: state
@doc false
def handle_stop(_state), do: nil
@doc false
def handle_call(_request, _from, state), do: {:reply, :ok, state}
@doc false
def handle_cast(_request, state), do: state
@doc false
def handle_info(_request, state), do: state
defoverridable [
init: 3,
handle_pdu: 2,
handle_resp: 3,
handle_resp_timeout: 2,
handle_send_pdu_result: 3,
handle_stop: 1,
handle_call: 3,
handle_cast: 2,
handle_info: 2
]
end
end
# Public interface
@default_transport :ranch_tcp
@default_acceptor_count 50
@spec start({module, args :: term}, opts :: Keyword.t) :: {:ok, listener_ref :: Ranch.ref} | {:error, reason :: term}
@doc """
Starts listener for MC entitiy.
`module` is the callback module which should implement `SMPPEX.MC` behaviour.
`args` is the argument passed to the `init` callback.
`opts` is a keyword list of different options:
* `:transport` is Ranch transport used for TCP connections: either `ranch_tcp` (the default) or
`ranch_ssl`;
* `:transport_opts` is a list of Ranch transport options. The major option is `{:port, port}`. The port is
set to `0` by default, which means that the listener will accept connections on a random free port.
* `:acceptor_count` is the number of Ranch listener acceptors, #{@default_acceptor_count} by default.
* `:gen_server_opts` is a list of options passed directly to the underlying `GenServer.start_link` call,
the default is `[]`;
* `:mc_opts` is a keyword list of MC options:
- `:timer_resolution` is interval of internal `ticks` on which time related events happen, like checking timeouts
for pdus, checking SMPP timers, etc. The default is #{@default_timer_resolution} ms;
- `:session_init_limit` is the maximum time for which the MC handler waits an incoming bind request.
If no bind request is received within this interval of time, MC handler stops.
The default value is #{@default_session_init_limit} ms;
- `:enquire_link_limit` is value for enquire_link SMPP timer, i.e. the interval of SMPP session inactivity after which
enquire_link PDU is send to "ping" the connetion. The default value is #{@default_enquire_link_limit} ms;
- `:enquire_link_resp_limit` is the maximum time for which MC handler waits for enquire_link PDU response. If the
response is not received within this interval of time and no activity from the peer occurs, the session is then considered
dead and the MC handler stops. The default value is #{@default_enquire_link_resp_limit} ms;
- `:inactivity_limit` is the maximum time for which the peer is allowed not to send PDUs (which are not response PDUs).
If no such PDUs are received within this interval of time, MC handler stops. The default is #{@default_inactivity_limit} ms;
- `:response_limit` is the maximum time to wait for a response for a previously sent PDU. If the response is
not received within this interval, `handle_resp_timeout` callback is triggered for the original pdu. If the response
is received later, it is discarded. The default value is #{@default_response_limit} ms.
If `:mc_opts` list of options is ommited, all options take their default values.
The returned value is either `{:ok, ref}` or `{:error, reason}`. The `ref` can be later used
to stop the whole MC listener and all sessions received by it.
"""
def start({_module, _args} = mod_with_args, opts \\ []) do
acceptor_count = Keyword.get(opts, :acceptor_count, @default_acceptor_count)
transport = Keyword.get(opts, :transport, @default_transport)
transport_opts = Keyword.get(opts, :transport_opts, [{:port, 0}])
mc_opts = Keyword.get(opts, :mc_opts, [])
handler = fn(ref, socket, transport, session) ->
case start_mc(mod_with_args, ref, socket, transport, session, mc_opts) do
{:ok, mc} -> {:ok, SMPPEX.MC.SMPPHandler.new(mc)}
{:error, _} = error -> error
end
end
ref = make_ref()
case Ranch.start_listener(ref, acceptor_count, transport, transport_opts, Session, [handler: handler]) do
{:error, _} = error -> error
{:ok, _, _} -> {:ok, ref}
{:ok, _} -> {:ok, ref}
end
end
@spec stop(Ranch.ref) :: :ok
@doc """
Stops MC listener and all its sessions.
The very moment of the SMPP session termination can be traced via `handle_stop` callback.
"""
def stop(mc_server) do
Ranch.stop_listener(mc_server)
end
@spec send_pdu(mc :: pid, pdu :: Pdu.t) :: :ok
@doc """
Sends outcoming PDU from the MC handler.
The whole command is sent to the MC handler asyncronously. The further lifecycle of the PDU
can be traced through callbacks.
"""
def send_pdu(mc, pdu) do
GenServer.cast(mc, {:send_pdu, pdu})
end
@spec reply(mc :: pid, pdu :: Pdu.t, reply_pdu :: Pdu.t) :: :ok
@doc """
Sends reply to previously received PDU from the MC handler.
The whole command is sent to the MC handler asyncronously. The further lifecycle of the response PDU
can be traced through callbacks.
"""
def reply(mc, pdu, reply_pdu) do
GenServer.cast(mc, {:reply, pdu, reply_pdu})
end
@spec stop_session(pid) :: :ok
@doc """
Stops MC handler asyncronously.
The very moment of the SMPP session termination can be traced via `handle_stop` callback.
"""
def stop_session(mc) do
GenServer.cast(mc, :stop)
end
@spec call(pid, term, timeout) :: term
@doc """
Makes a syncronous call to MC handler.
The call is handled by `handle_call/3` MC callback.
"""
def call(mc, request, timeout \\ @default_call_timeout) do
GenServer.call(mc, {:call, request}, timeout)
end
@spec cast(pid, term) :: :ok
@doc """
Makes an asyncronous call to MC handler.
The call is handled by `handle_cast/2` MC callback.
"""
def cast(mc, request) do
GenServer.cast(mc, {:cast, request})
end
@spec handle_pdu(pid, Pdu.t) :: :ok
@doc false
def handle_pdu(mc, pdu) do
GenServer.call(mc, {:handle_pdu, pdu})
end
@spec handle_stop(pid) :: :ok
@doc false
def handle_stop(mc) do
GenServer.call(mc, :handle_stop)
end
@type send_pdu_result :: :ok | {:error, term}
@spec handle_send_pdu_result(pid, Pdu.t, send_pdu_result) :: :ok
@doc false
def handle_send_pdu_result(mc, pdu, send_pdu_result) do
GenServer.call(mc, {:handle_send_pdu_result, pdu, send_pdu_result})
end
# GenServer callbacks
def init([{module, args}, mc_opts, _ref, socket, transport, session]) do
case module.init(socket, transport, args) do
{:ok, state} ->
timer_resolution = Keyword.get(mc_opts, :timer_resolution, @default_timer_resolution)
timer_ref = Erlang.start_timer(timer_resolution, self(), :emit_tick)
enquire_link_limit = Keyword.get(mc_opts, :enquire_link_limit, @default_enquire_link_limit)
enquire_link_resp_limit = Keyword.get(mc_opts, :enquire_link_resp_limit, @default_enquire_link_resp_limit)
inactivity_limit = Keyword.get(mc_opts, :inactivity_limit, @default_inactivity_limit)
session_init_limit = Keyword.get(mc_opts, :session_init_limit, @default_session_init_limit)
time = SMPPEX.Time.monotonic
timers = SMPPTimers.new(
time,
session_init_limit,
enquire_link_limit,
enquire_link_resp_limit,
inactivity_limit
)
pdu_storage_pid = case Keyword.get(mc_opts, :pdu_storage_pid, nil) do
nil ->
{:ok, pid} = PduStorage.start_link()
pid
pid -> pid
end
response_limit = Keyword.get(mc_opts, :response_limit, @default_response_limit)
{:ok, %MC{
smpp_session: session,
module: module,
module_state: state,
pdu_storage: pdu_storage_pid,
timers: timers,
response_limit: response_limit,
time: time,
timer_resolution: timer_resolution,
tick_timer_ref: timer_ref
}}
{:stop, _} = stop ->
stop
end
end
def handle_call({:handle_pdu, pdu}, _from, st) do
case Pdu.resp?(pdu) do
true -> do_handle_resp(pdu, st)
false -> do_handle_pdu(pdu, st)
end
end
def handle_call(:handle_stop, _from, st) do
do_handle_stop(st)
end
def handle_call({:handle_send_pdu_result, pdu, send_pdu_result}, _from, st) do
do_handle_send_pdu_result(pdu, send_pdu_result, st)
end
def handle_call({:call, request}, from, st) do
case st.module.handle_call(request, from, st.module_state) do
{:reply, reply, new_module_state} ->
new_st = %MC{st | module_state: new_module_state}
{:reply, reply, new_st}
{:noreply, new_module_state} ->
new_st = %MC{st | module_state: new_module_state}
{:noreply, new_st}
end
end
def handle_cast({:send_pdu, pdu}, st) do
new_st = do_send_pdu(pdu, st)
{:noreply, new_st}
end
def handle_cast({:reply, pdu, reply_pdu}, st) do
new_st = do_reply(pdu, reply_pdu, st)
{:noreply, new_st}
end
def handle_cast(:stop, st) do
Session.stop(st.smpp_session)
{:noreply, st}
end
def handle_cast({:cast, request}, st) do
new_module_state = st.module.handle_cast(request, st.module_state)
new_st = %MC{st | module_state: new_module_state}
{:noreply, new_st}
end
def handle_info({:timeout, _timer_ref, :emit_tick}, st) do
new_tick_timer_ref = Erlang.start_timer(st.timer_resolution, self(), :emit_tick)
Erlang.cancel_timer(st.tick_timer_ref)
Kernel.send self(), {:tick, SMPPEX.Time.monotonic}
{:noreply, %MC{st | tick_timer_ref: new_tick_timer_ref}}
end
def handle_info({:tick, time}, st) do
do_handle_tick(time, st)
end
def handle_info(request, st) do
new_module_state = st.module.handle_info(request, st.module_state)
new_st = %MC{st | module_state: new_module_state}
{:noreply, new_st}
end
# Private functions
defp start_mc(mod_with_args, ref, socket, transport, session, opts) do
gen_server_opts = Keyword.get(opts, :gen_server_opts, [])
mc_opts = Keyword.get(opts, :mc_opts, [])
GenServer.start_link(
__MODULE__,
[mod_with_args, mc_opts, ref, socket, transport, session],
gen_server_opts
)
end
defp do_handle_pdu(pdu, st) do
new_module_state = st.module.handle_pdu(pdu, st.module_state)
new_timers = SMPPTimers.handle_peer_transaction(st.timers, st.time)
new_st = %MC{st | module_state: new_module_state, timers: new_timers}
if Pdu.bind?(pdu) do
do_handle_bind(new_st)
else
{:reply, :ok, new_st}
end
end
defp do_handle_resp(pdu, st) do
sequence_number = Pdu.sequence_number(pdu)
new_timers = SMPPTimers.handle_peer_action(st.timers, st.time)
new_st = %MC{st | timers: new_timers}
case PduStorage.fetch(st.pdu_storage, sequence_number) do
[] ->
Logger.info("mc #{inspect self()}, resp for unknown pdu(sequence_number: #{sequence_number}), dropping")
{:reply, :ok, new_st}
[original_pdu] ->
do_handle_resp_for_pdu(pdu, original_pdu, new_st)
end
end
defp do_handle_resp_for_pdu(pdu, original_pdu, st) do
new_module_state = st.module.handle_resp(pdu, original_pdu, st.module_state)
new_st = %MC{st | module_state: new_module_state}
{:reply, :ok, new_st}
end
defp do_handle_bind(st) do
new_timers = SMPPTimers.handle_bind(st.timers, st.time)
new_st = %MC{st | timers: new_timers}
{:reply, :ok, new_st}
end
defp do_handle_stop(st) do
_ = st.module.handle_stop(st.module_state)
{:stop, :normal, :ok, st}
end
defp do_handle_send_pdu_result(pdu, send_pdu_result, st) do
new_module_state = st.module.handle_send_pdu_result(pdu, send_pdu_result, st.module_state)
new_st = %MC{st | module_state: new_module_state}
{:reply, :ok, new_st}
end
defp do_handle_tick(time, st) do
expired_pdus = PduStorage.fetch_expired(st.pdu_storage, time)
new_st = do_handle_expired_pdus(expired_pdus, st)
do_handle_timers(time, new_st)
end
defp do_handle_expired_pdus([], st), do: st
defp do_handle_expired_pdus([pdu | pdus], st) do
new_module_state = st.module.handle_resp_timeout(pdu, st.module_state)
new_st = %MC{st | module_state: new_module_state}
do_handle_expired_pdus(pdus, new_st)
end
defp do_handle_timers(time, st) do
case SMPPTimers.handle_tick(st.timers, time) do
{:ok, new_timers} ->
new_st = %MC{st | timers: new_timers, time: time}
{:noreply, new_st}
{:stop, reason} ->
Logger.info("mc #{inspect self()}, being stopped by timers(#{reason})")
Session.stop(st.smpp_session)
{:noreply, st}
{:enquire_link, new_timers} ->
new_st = %MC{st | timers: new_timers, time: time}
do_send_enquire_link(new_st)
end
end
defp do_send_enquire_link(st) do
enquire_link = SMPPEX.Pdu.Factory.enquire_link
new_st = do_send_pdu(enquire_link, st)
{:noreply, new_st}
end
defp do_send_pdu(pdu, st) do
pdu = case pdu.sequence_number do
0 -> %Pdu{pdu | sequence_number: PduStorage.reserve_sequence_number(st.pdu_storage)}
_ -> pdu
end
true = PduStorage.store(st.pdu_storage, pdu, st.time + st.response_limit)
Session.send_pdu(st.smpp_session, pdu)
st
end
defp do_reply(pdu, reply_pdu, st) do
new_reply_pdu = %Pdu{reply_pdu | sequence_number: pdu.sequence_number}
Session.send_pdu(st.smpp_session, new_reply_pdu)
st
end
end
|
lib/smppex/mc.ex
| 0.879412
| 0.828939
|
mc.ex
|
starcoder
|
defmodule AWS.Shield do
@moduledoc """
AWS Shield Advanced
This is the *AWS Shield Advanced API Reference*. This guide is for
developers who need detailed information about the AWS Shield Advanced API
actions, data types, and errors. For detailed information about AWS WAF and
AWS Shield Advanced features and an overview of how to use the AWS WAF and
AWS Shield Advanced APIs, see the [AWS WAF and AWS Shield Developer
Guide](https://docs.aws.amazon.com/waf/latest/developerguide/).
"""
@doc """
Authorizes the DDoS Response team (DRT) to access the specified Amazon S3
bucket containing your flow logs. You can associate up to 10 Amazon S3
buckets with your subscription.
To use the services of the DRT and make an `AssociateDRTLogBucket` request,
you must be subscribed to the [Business Support
plan](https://aws.amazon.com/premiumsupport/business-support/) or the
[Enterprise Support
plan](https://aws.amazon.com/premiumsupport/enterprise-support/).
"""
def associate_d_r_t_log_bucket(client, input, options \\ []) do
request(client, "AssociateDRTLogBucket", input, options)
end
@doc """
Authorizes the DDoS Response team (DRT), using the specified role, to
access your AWS account to assist with DDoS attack mitigation during
potential attacks. This enables the DRT to inspect your AWS WAF
configuration and create or update AWS WAF rules and web ACLs.
You can associate only one `RoleArn` with your subscription. If you submit
an `AssociateDRTRole` request for an account that already has an associated
role, the new `RoleArn` will replace the existing `RoleArn`.
Prior to making the `AssociateDRTRole` request, you must attach the
[AWSShieldDRTAccessPolicy](https://console.aws.amazon.com/iam/home?#/policies/arn:aws:iam::aws:policy/service-role/AWSShieldDRTAccessPolicy)
managed policy to the role you will specify in the request. For more
information see [Attaching and Detaching IAM Policies](
https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies_manage-attach-detach.html).
The role must also trust the service principal ` drt.shield.amazonaws.com`.
For more information, see [IAM JSON Policy Elements:
Principal](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements_principal.html).
The DRT will have access only to your AWS WAF and Shield resources. By
submitting this request, you authorize the DRT to inspect your AWS WAF and
Shield configuration and create and update AWS WAF rules and web ACLs on
your behalf. The DRT takes these actions only if explicitly authorized by
you.
You must have the `iam:PassRole` permission to make an `AssociateDRTRole`
request. For more information, see [Granting a User Permissions to Pass a
Role to an AWS
Service](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use_passrole.html).
To use the services of the DRT and make an `AssociateDRTRole` request, you
must be subscribed to the [Business Support
plan](https://aws.amazon.com/premiumsupport/business-support/) or the
[Enterprise Support
plan](https://aws.amazon.com/premiumsupport/enterprise-support/).
"""
def associate_d_r_t_role(client, input, options \\ []) do
request(client, "AssociateDRTRole", input, options)
end
@doc """
Enables AWS Shield Advanced for a specific AWS resource. The resource can
be an Amazon CloudFront distribution, Elastic Load Balancing load balancer,
AWS Global Accelerator accelerator, Elastic IP Address, or an Amazon Route
53 hosted zone.
You can add protection to only a single resource with each CreateProtection
request. If you want to add protection to multiple resources at once, use
the [AWS WAF console](https://console.aws.amazon.com/waf/). For more
information see [Getting Started with AWS Shield
Advanced](https://docs.aws.amazon.com/waf/latest/developerguide/getting-started-ddos.html)
and [Add AWS Shield Advanced Protection to more AWS
Resources](https://docs.aws.amazon.com/waf/latest/developerguide/configure-new-protection.html).
"""
def create_protection(client, input, options \\ []) do
request(client, "CreateProtection", input, options)
end
@doc """
Activates AWS Shield Advanced for an account.
As part of this request you can specify `EmergencySettings` that
automaticaly grant the DDoS response team (DRT) needed permissions to
assist you during a suspected DDoS attack. For more information see
[Authorize the DDoS Response Team to Create Rules and Web ACLs on Your
Behalf](https://docs.aws.amazon.com/waf/latest/developerguide/authorize-DRT.html).
When you initally create a subscription, your subscription is set to be
automatically renewed at the end of the existing subscription period. You
can change this by submitting an `UpdateSubscription` request.
"""
def create_subscription(client, input, options \\ []) do
request(client, "CreateSubscription", input, options)
end
@doc """
Deletes an AWS Shield Advanced `Protection`.
"""
def delete_protection(client, input, options \\ []) do
request(client, "DeleteProtection", input, options)
end
@doc """
Removes AWS Shield Advanced from an account. AWS Shield Advanced requires a
1-year subscription commitment. You cannot delete a subscription prior to
the completion of that commitment.
"""
def delete_subscription(client, input, options \\ []) do
request(client, "DeleteSubscription", input, options)
end
@doc """
Describes the details of a DDoS attack.
"""
def describe_attack(client, input, options \\ []) do
request(client, "DescribeAttack", input, options)
end
@doc """
Returns the current role and list of Amazon S3 log buckets used by the DDoS
Response team (DRT) to access your AWS account while assisting with attack
mitigation.
"""
def describe_d_r_t_access(client, input, options \\ []) do
request(client, "DescribeDRTAccess", input, options)
end
@doc """
Lists the email addresses that the DRT can use to contact you during a
suspected attack.
"""
def describe_emergency_contact_settings(client, input, options \\ []) do
request(client, "DescribeEmergencyContactSettings", input, options)
end
@doc """
Lists the details of a `Protection` object.
"""
def describe_protection(client, input, options \\ []) do
request(client, "DescribeProtection", input, options)
end
@doc """
Provides details about the AWS Shield Advanced subscription for an account.
"""
def describe_subscription(client, input, options \\ []) do
request(client, "DescribeSubscription", input, options)
end
@doc """
Removes the DDoS Response team's (DRT) access to the specified Amazon S3
bucket containing your flow logs.
To make a `DisassociateDRTLogBucket` request, you must be subscribed to the
[Business Support
plan](https://aws.amazon.com/premiumsupport/business-support/) or the
[Enterprise Support
plan](https://aws.amazon.com/premiumsupport/enterprise-support/). However,
if you are not subscribed to one of these support plans, but had been
previously and had granted the DRT access to your account, you can submit a
`DisassociateDRTLogBucket` request to remove this access.
"""
def disassociate_d_r_t_log_bucket(client, input, options \\ []) do
request(client, "DisassociateDRTLogBucket", input, options)
end
@doc """
Removes the DDoS Response team's (DRT) access to your AWS account.
To make a `DisassociateDRTRole` request, you must be subscribed to the
[Business Support
plan](https://aws.amazon.com/premiumsupport/business-support/) or the
[Enterprise Support
plan](https://aws.amazon.com/premiumsupport/enterprise-support/). However,
if you are not subscribed to one of these support plans, but had been
previously and had granted the DRT access to your account, you can submit a
`DisassociateDRTRole` request to remove this access.
"""
def disassociate_d_r_t_role(client, input, options \\ []) do
request(client, "DisassociateDRTRole", input, options)
end
@doc """
Returns the `SubscriptionState`, either `Active` or `Inactive`.
"""
def get_subscription_state(client, input, options \\ []) do
request(client, "GetSubscriptionState", input, options)
end
@doc """
Returns all ongoing DDoS attacks or all DDoS attacks during a specified
time period.
"""
def list_attacks(client, input, options \\ []) do
request(client, "ListAttacks", input, options)
end
@doc """
Lists all `Protection` objects for the account.
"""
def list_protections(client, input, options \\ []) do
request(client, "ListProtections", input, options)
end
@doc """
Updates the details of the list of email addresses that the DRT can use to
contact you during a suspected attack.
"""
def update_emergency_contact_settings(client, input, options \\ []) do
request(client, "UpdateEmergencyContactSettings", input, options)
end
@doc """
Updates the details of an existing subscription. Only enter values for
parameters you want to change. Empty parameters are not updated.
"""
def update_subscription(client, input, options \\ []) do
request(client, "UpdateSubscription", input, options)
end
@spec request(map(), binary(), map(), list()) ::
{:ok, Poison.Parser.t | nil, Poison.Response.t} |
{:error, Poison.Parser.t} |
{:error, HTTPoison.Error.t}
defp request(client, action, input, options) do
client = %{client | service: "shield"}
host = get_host("shield", client)
url = get_url(host, client)
headers = [{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "AWSShield_20160616.#{action}"}]
payload = Poison.Encoder.encode(input, [])
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} ->
{:ok, nil, response}
{:ok, response=%HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body)
exception = error["__type"]
message = error["message"]
{:error, {exception, message}}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp get_host(endpoint_prefix, client) do
if client.region == "local" do
"localhost"
else
"#{endpoint_prefix}.#{client.region}.#{client.endpoint}"
end
end
defp get_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
|
lib/aws/shield.ex
| 0.85493
| 0.640228
|
shield.ex
|
starcoder
|
defmodule RedBlackTree do
@moduledoc """
Module for creating and managing red-black trees.
Tree nodes have keys (a number that defines the relation between nodes) and data (anything you want).
A red-black tree is a approximately balanced binary tree that satisfies the following red-black properties:
1. Every node is either red or black.
2. The root is black. * We relax this rule as per to make deletion simpler
3. Every leaf (NIL) is black.
4. If a node is red, then both its children are black.
5. For each node, all simple paths from the node to descendant leaves contain the
same number of black nodes.
Using the implementantion of insert from the article
"Red-black trees in a functional setting" by <NAME>
Using the delete implementation from the article
"FUNCTIONAL PEARL Deletion: The curse of the red-black tree" by <NAME> and <NAME>
"""
alias RedBlackTree, as: RBNode
defstruct key: nil, left: nil, right: nil, data: nil, color: nil
@doc """
Creates a tree node.
Returns `${key: nil, left: nil, right: nil, data: nil, color: nil}`.
## Examples
iex> RedBlackTree.create()
${key: nil, left: nil, right: nil, data: nil, color: nil}
"""
def create() do
%RBNode{}
end
@doc """
Creates a tree node.
Returns `${key: nil, left: nil, right: nil, data: nil, color: nil}`.
## Examples
iex> RedBlackTree.create(1, ${x: "my data"})
${key: 1, left: nil, right: nil, data: ${x: "my data"}), color: nil}
"""
def create(key, data) do
%RBNode{key: key, data: data}
end
defp balance(
_node_color = :black,
left_node = %RBNode{color: :red, left: %RBNode{color: :red}},
actual_node,
right_node
) do
%RBNode{
color: :red,
left: %{left_node.left | color: :black},
key: left_node.key,
data: left_node.data,
right: %RBNode{
color: :black,
key: actual_node.key,
data: actual_node.data,
left: left_node.right,
right: right_node
}
}
end
defp balance(
_node_color = :black,
left_node = %RBNode{color: :red, right: %RBNode{color: :red}},
actual_node,
right_node
) do
%RBNode{
color: :red,
left: %RBNode{
color: :black,
key: left_node.key,
data: left_node.data,
left: left_node.left,
right: left_node.right.left
},
key: left_node.right.key,
data: left_node.right.data,
right: %RBNode{
color: :black,
key: actual_node.key,
data: actual_node.data,
left: left_node.right.right,
right: right_node
}
}
end
defp balance(
_node_color = :black,
left_node,
actual_node,
right_node = %RBNode{color: :red, left: %RBNode{color: :red}}
) do
%RBNode{
color: :red,
left: %RBNode{
color: :black,
key: actual_node.key,
data: actual_node.data,
left: left_node,
right: right_node.left.left
},
key: right_node.left.key,
data: right_node.left.data,
right: %RBNode{
color: :black,
key: right_node.key,
data: right_node.data,
left: right_node.left.right,
right: right_node.right
}
}
end
defp balance(
_node_color = :black,
left_node,
actual_node,
right_node = %RBNode{color: :red, right: %RBNode{color: :red}}
) do
%RBNode{
color: :red,
left: %RBNode{
color: :black,
key: actual_node.key,
data: actual_node.data,
left: left_node,
right: right_node.left
},
key: right_node.key,
data: right_node.data,
right: %{right_node.right | color: :black}
}
end
defp balance(
_node_color = :blackblack,
left_node,
actual_node,
right_node = %RBNode{color: :red, left: %RBNode{color: :red}}
) do
%RBNode{
color: :black,
left: %RBNode{
color: :black,
key: actual_node.key,
data: actual_node.data,
left: left_node,
right: right_node.left.left
},
key: right_node.left.key,
data: right_node.left.data,
right: %RBNode{
color: :black,
key: right_node.key,
data: right_node.data,
left: right_node.left.right,
right: right_node.right
}
}
end
defp balance(
_node_color = :blackblack,
left_node = %RBNode{color: :red, right: %RBNode{color: :red}},
actual_node,
right_node
) do
%RBNode{
color: :black,
left: %RBNode{
color: :black,
key: left_node.key,
data: left_node.data,
left: left_node.left,
right: left_node.right.left
},
key: left_node.right.key,
data: left_node.right.data,
right: %RBNode{
color: :black,
key: actual_node.key,
data: actual_node.data,
left: left_node.right.right,
right: right_node
}
}
end
defp balance(node_color, left_node, actual_node, right_node) do
%RBNode{
color: node_color,
left: left_node,
key: actual_node.key,
data: actual_node.data,
right: right_node
}
end
@doc """
Inserts a node in a tree.
Returns `${key: Number, left: %RBNode{}, right: %RBNode{}, data: :data, color: :red | :black | :blackblack }`.
## Examples
iex> RedBlackTree.create(%RBNode{})
%RBNode{}
"""
def insert(new_node = %RBNode{}) do
blacken(insert_rec(nil, new_node))
end
@doc """
Inserts a node in a tree.
Returns `${key: Number, left: %RBNode{}, right: %RBNode{}, data: :data, color: :red | :black | :blackblack }`.
## Examples
iex> RedBlackTree.create(nil, %RBNode{})
%RBNode{}
"""
def insert(nil, new_node = %RBNode{}) do
blacken(insert_rec(nil, new_node))
end
@doc """
Inserts a given `new_node` in a `tree_root_node` tree.
Returns `${key: Number, left: %RBNode{}, right: %RBNode{}, data: :data, color: :red | :black | :blackblack }`.
## Examples
iex> RedBlackTree.create(%RBNode{}, %RBNode{})
%RBNode{}
"""
def insert(tree_root_node = %RBNode{}, new_node = %RBNode{}) do
blacken(insert_rec(tree_root_node, new_node))
end
defp blacken(node = %RBNode{color: :red, left: %RBNode{color: :red}}) do
%{node | color: :black}
end
defp blacken(node = %RBNode{color: :red, right: %RBNode{color: :red}}) do
%{node | color: :black}
end
defp blacken(node = %RBNode{}) do
node
end
defp insert_rec(_actual_tree_node = nil, new_node = %RBNode{}) do
%{new_node | color: :red}
end
defp insert_rec(actual_tree_node = %RBNode{}, new_node = %RBNode{}) do
cond do
actual_tree_node.key > new_node.key ->
balance(
actual_tree_node.color,
insert_rec(actual_tree_node.left, new_node),
actual_tree_node,
actual_tree_node.right
)
actual_tree_node.key == new_node.key ->
actual_tree_node
actual_tree_node.key < new_node.key ->
balance(
actual_tree_node.color,
actual_tree_node.left,
actual_tree_node,
insert_rec(actual_tree_node.right, new_node)
)
end
end
@doc """
deletes a node that has the given `key` in a `tree_root_node` tree.
Returns `${key: Number, left: %RBNode{}, right: %RBNode{}, data: :data, color: :red | :black | :blackblack }`.
## Examples
iex> RedBlackTree.delete(%RBNode{}, %RBNode{})
%RBNode{}
"""
def delete(tree_root_node = %RBNode{}, delete_key) do
result = delete_rec(redden(tree_root_node), delete_key)
result
end
defp redden(
node = %RBNode{
color: :black,
left: %RBNode{color: :black},
right: %RBNode{color: :black}
}
) do
%{node | color: :red}
end
defp redden(node = %RBNode{}) do
node
end
defp delete_rec(actual_tree_node = %RBNode{color: :red, left: nil, right: nil}, delete_key) do
if actual_tree_node.key == delete_key do
nil
else
actual_tree_node
end
end
defp delete_rec(actual_tree_node = %RBNode{color: :black, left: nil, right: nil}, delete_key) do
if actual_tree_node.key == delete_key do
:emptyempty
else
actual_tree_node
end
end
defp delete_rec(
actual_tree_node = %RBNode{
color: :black,
left: %RBNode{
color: :red,
left: nil,
right: nil
},
right: nil
},
delete_key
) do
cond do
actual_tree_node.key > delete_key ->
%{
actual_tree_node
| color: :black,
left: delete_rec(actual_tree_node.left, delete_key),
right: nil
}
actual_tree_node.key == delete_key ->
%{actual_tree_node.left | color: :black, left: nil, right: nil}
actual_tree_node.key < delete_key ->
actual_tree_node
end
end
defp delete_rec(actual_tree_node = %RBNode{}, delete_key) do
cond do
actual_tree_node.key < delete_key ->
rotate(
actual_tree_node.color,
actual_tree_node.left,
%{actual_tree_node | left: nil, right: nil},
delete_rec(actual_tree_node.right, delete_key)
)
actual_tree_node.key == delete_key ->
{y, b} = min_del(actual_tree_node.right)
rotate(
actual_tree_node.color,
actual_tree_node.left,
y,
b
)
actual_tree_node.key > delete_key ->
rotate(
actual_tree_node.color,
delete_rec(actual_tree_node.left, delete_key),
%{actual_tree_node | left: nil, right: nil},
actual_tree_node.right
)
end
end
defp rotate(
_color = :red,
left_node = %RBNode{color: :blackblack},
actual_node = %RBNode{},
right_node = %RBNode{color: :black}
) do
balance(
:black,
%{actual_node | color: :red, left: %{left_node | color: :black}, right: right_node.left},
%{right_node | left: nil, right: nil},
right_node.right
)
end
defp rotate(
_color = :red,
_left_node = :emptyempty,
actual_node = %RBNode{},
right_node = %RBNode{color: :black}
) do
balance(
:black,
%{actual_node | color: :red, left: nil, right: right_node.left},
%{right_node | left: nil, right: nil},
right_node.right
)
end
defp rotate(
_color = :red,
left_node = %RBNode{color: :black},
actual_node = %RBNode{},
right_node = %RBNode{color: :blackblack}
) do
balance(:black, left_node.left, %{left_node | left: nil, right: nil}, %{
actual_node
| color: :red,
left: left_node.right,
right: %{right_node | color: :black}
})
end
defp rotate(
_color = :red,
left_node = %RBNode{color: :black},
actual_node = %RBNode{},
_right_node = :emptyempty
) do
balance(:black, left_node.left, %{left_node | left: nil, right: nil}, %{
actual_node
| color: :red,
left: left_node.right,
right: nil
})
end
defp rotate(
_color = :black,
left_node = %RBNode{color: :blackblack},
actual_node = %RBNode{},
right_node = %RBNode{color: :black}
) do
balance(
:blackblack,
%{actual_node | color: :red, left: %{left_node | color: :black}, right: right_node.left},
%{right_node | left: nil, right: nil},
right_node.right
)
end
defp rotate(
_color = :black,
_left_node = :emptyempty,
actual_node = %RBNode{},
right_node = %RBNode{color: :black}
) do
balance(
:blackblack,
%{actual_node | color: :red, left: nil, right: right_node.left},
%{right_node | left: nil, right: nil},
right_node.right
)
end
defp rotate(
_color = :black,
left_node = %RBNode{color: :black},
actual_node = %RBNode{},
right_node = %RBNode{color: :blackblack}
) do
balance(:blackblack, left_node.left, %{left_node | left: nil, right: nil}, %{
actual_node
| color: :red,
left: left_node.right,
right: %{right_node | color: :black}
})
end
defp rotate(
_color = :black,
left_node = %RBNode{color: :black},
actual_node = %RBNode{},
_right_node = :emptyempty
) do
balance(:blackblack, left_node.left, %{left_node | left: nil, right: nil}, %{
actual_node
| color: :red,
left: left_node.right,
right: nil
})
end
defp rotate(
_color = :black,
left_node = %RBNode{color: :blackblack},
actual_node = %RBNode{},
right_node = %RBNode{color: :red, left: %RBNode{color: :black}}
) do
%{
right_node
| color: :black,
left:
balance(
:black,
%{
actual_node
| color: :red,
left: %{left_node | color: :black},
right: right_node.left.left
},
%{right_node.left | left: nil, right: nil},
right_node.left.left
)
}
end
defp rotate(
_color = :black,
_left_node = :emptyempty,
actual_node = %RBNode{},
right_node = %RBNode{color: :red, left: %RBNode{color: :black}}
) do
%{
right_node
| color: :black,
left:
balance(
:black,
%{actual_node | color: :red, left: nil, right: right_node.left.left},
%{right_node.left | left: nil, right: nil},
right_node.left.right
)
}
end
defp rotate(
_color = :black,
left_node = %RBNode{color: :red, right: %RBNode{color: :black}},
actual_node = %RBNode{},
right_node = %RBNode{color: :blackblack}
) do
%{
left_node
| color: :black,
right:
balance(:black, left_node.right.left, %{left_node.right | left: nil, right: nil}, %{
actual_node
| color: :red,
left: left_node.right.right,
right: %{right_node | color: :black}
})
}
end
defp rotate(
_color = :black,
left_node = %RBNode{color: :red, right: %RBNode{color: :black}},
actual_node = %RBNode{},
_right_node = :emptyempty
) do
%{
left_node
| color: :black,
right:
balance(:black, left_node.right.left, %{left_node.right | left: nil, right: nil}, %{
actual_node
| color: :red,
left: left_node.right.right,
right: nil
})
}
end
defp rotate(color, left_node, actual_node, right_node) do
%{actual_node | color: color, left: left_node, right: right_node}
end
defp min_del(actual_tree_node = %RBNode{color: :red, right: nil, left: nil}) do
{actual_tree_node, nil}
end
defp min_del(actual_tree_node = %RBNode{color: :black, right: nil, left: nil}) do
{actual_tree_node, :emptyempty}
end
defp min_del(
actual_tree_node = %RBNode{
color: :black,
left: nil,
right: %RBNode{color: :red, left: nil, right: nil}
}
) do
{actual_tree_node, %{actual_tree_node.right | color: :black}}
end
defp min_del(actual_tree_node = %RBNode{}) do
{xl, al} = min_del(actual_tree_node.left)
x = %{actual_tree_node | left: nil, right: nil}
{xl, rotate(actual_tree_node.color, al, x, actual_tree_node.right)}
end
@doc """
Searches for a node that has the given `key` in a `tree_root_node` tree.
Returns `${key: Number, left: %RBNode{}, right: %RBNode{}, data: :data, color: :red | :black | :blackblack }`.
## Examples
iex> RedBlackTree.search(%RBNode{}, nil)
%RBNode{}
"""
def search(_tree_root_node, _key = nil) do
nil
end
@doc """
Searches for a node that has the given `key` in a `tree_root_node` tree.
Returns `${key: Number, left: %RBNode{}, right: %RBNode{}, data: :data, color: :red | :black | :blackblack }`.
## Examples
iex> RedBlackTree.search(nil, 5)
%RBNode{}
"""
def search(_tree_root_node = nil, _key) do
nil
end
@doc """
Searches for a node that has the given `key` in a `tree_root_node` tree.
Returns `${key: Number, left: %RBNode{}, right: %RBNode{}, data: :data, color: :red | :black | :blackblack }`.
## Examples
iex> RedBlackTree.search(%RBNode{}, 5)
%RBNode{}
"""
def search(tree_root_node = %RBNode{}, key) do
if tree_root_node.key == key do
tree_root_node
else
if tree_root_node.key > key do
search(tree_root_node.left, key)
else
search(tree_root_node.right, key)
end
end
end
end
|
lib/rb_tree.ex
| 0.927429
| 0.728048
|
rb_tree.ex
|
starcoder
|
defmodule Baseball.Base64.Encoder do
@moduledoc """
Functions for Base64 Encoding
The Base64 output is emitted in blocks of four characters, each character representing 6 bits of input.
By definition in RFC4648, the input bitstring must contain an integral number of octets,
in other words the total bit count in the input is divisible by 8. Since that is also the
definition of a binary in Elixir, these functions only accept binaries.
This implementation reads blocks of either 24, 16 or 8 bits, and transforms them block-by-block
to Base64 encoding. It reads and encodes as many 24-bit blocks as possible. The final block is either
24, 16 or 8 bits long. Therefore there are 3 possibilities for the final block:
1) The last block is 24 bits, which is 4 chunks of 6 bits, so no padding is needed.
2) The last block is 16 bits. This is 2 whole 6-bit chunks and 4 extra bits. The extra bits
will be padded with 0-bits to form a third 6-bit chunk. This is 3 encoded characters, and
the final character will be a padding character, "=".
3) The last block is 8 bits. This is 1 whole 6-bit chunk and 2 extra bits, which are padded
to form a second 6-bit chunk. This is two encoded characters. The other two characters
will be padding characters, "==".
This is the reason we never see 3 padding characters in Base64.
Reference: https://datatracker.ietf.org/doc/html/rfc4648
"""
use Bitwise
# Base64-encoding table
@encoding_table "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"
|> String.codepoints()
|> Enum.with_index()
|> Enum.map(fn {char, position} -> {position, char} end)
|> Enum.into(%{})
@doc """
Encode a binary into Base64
"""
def encode_binary(input) when is_binary(input) do
input
|> stream_in_3_byte_chunks()
|> Stream.map(&convert_to_6_bit_values/1)
|> Stream.map(&convert_to_characters/1)
|> Enum.join()
end
defp convert_to_6_bit_values(<<_input::24>> = bitstring) do
<<value1::6, value2::6, value3::6, value4::6>> = bitstring
[value1, value2, value3, value4]
end
# The rest is a 4-bit integer. Pad it with 2 0-bits and return it as an integer.
defp convert_to_6_bit_values(<<value1::6, value2::6, rest::4>>) do
[value1, value2, Bitwise.<<<(rest, 2)]
end
# The rest is a 2-bit integer. Pad it with 4 0-bits and return it as an integer.
defp convert_to_6_bit_values(<<value1::6, rest::2>>) do
[value1, Bitwise.<<<(rest, 4)]
end
# Stream a binary in 3-byte chunks, allowing endings of both 1, 2 and 3 byte lengths
defp stream_in_3_byte_chunks(input) do
Stream.unfold(input, fn
<<>> -> nil
<<a, b, c, rest::binary>> -> {<<a, b, c>>, rest}
<<a>> -> {<<a>>, <<>>}
<<a, b>> -> {<<a, b>>, <<>>}
end)
end
# Take a list of values and use the base64 characters to represent them
defp convert_to_characters(list_of_6_bit_values) do
list_of_6_bit_values
|> Enum.map(&Map.fetch!(@encoding_table, &1))
|> Enum.join()
|> String.pad_trailing(4, "=")
end
end
|
lib/baseball/base64/encoder.ex
| 0.856602
| 0.61868
|
encoder.ex
|
starcoder
|
defmodule Mux.Trace do
@moduledoc """
Trace context for Mux.Context.
This module provides B3-propagation over (multiple) Mux dispatch contexts and
passes the trace id downstream.
"""
use Bitwise
@behaviour Mux.Context
@uint128_max (1 <<< 128) - 1
@uint64_max (1 <<< 64) - 1
@wire_key "com.twitter.finagle.tracing.TraceContext"
@debug (1 <<< 0)
@sampling_known (1 <<< 1)
@sampled (1 <<< 2)
@root (1 <<< 3)
@enforce_keys [:trace_id, :parent_id, :span_id, :flags]
defstruct [:trace_id, :parent_id, :span_id, :flags]
@type trace_id :: 0..unquote(@uint128_max)
@type span_id :: 0..unquote(@uint64_max)
@type flag :: :debug | :sampling_known | :sampled | :root
@type t :: %Mux.Trace{trace_id: trace_id,
parent_id: span_id,
span_id: span_id,
flags: [flag]}
@doc """
Start a new trace.
If the argument is an existing trace, a child trace with new span id is
returned. Otherwise if the argument is a list of trace flags a new root trace
is returned (`:root` flag is implicit).
## Examples
Mux.Trace.start([:sampling_known])
This function does not use Mux.Context.
"""
@spec start(t) :: t
@spec start([flag]) :: t
def start(%Mux.Trace{trace_id: trace_id, span_id: parent_id, flags: flags}) do
start(trace_id, parent_id, flags)
end
def start(flags) do
# trace_id should be 128 bits but wire protocol only speals 64 bits for now.
trace_id = uint64()
join(trace_id, trace_id, trace_id, [:root | flags])
end
@doc """
Start a new child trace.
## Examples
Mux.Trace.start(11412414, 1233232, [:debug])
This function does not use Mux.Context.
"""
@spec start(trace_id, span_id, [flag]) :: t
def start(trace_id, parent_id, flags) do
join(trace_id, parent_id, uint64(), Enum.filter(flags, &(&1 != :root)))
end
@doc """
Join an existing trace.
## Examples
Mux.Trace.join(1141241467676, 1233232787878686, 8895314843, [:debug])
This function does not use Mux.Context.
"""
@spec join(trace_id, span_id, span_id, [flag]) :: t
def join(trace_id, parent_id, span_id, flags) do
%Mux.Trace{trace_id: trace_id, parent_id: parent_id, span_id: span_id,
flags: flags}
end
@doc """
Bind a trace to the scope of an anonymous function and run the function.
If a trace already exists in the current context the function raises a
RuntimeError.
## Examples
Mux.Trace.bind(Mux.Trace.start([]), fn -> RPC.call() end)
"""
@spec bind(t, (() -> result)) :: result when result: var
def bind(%Mux.Trace{} = trace, fun) do
if Mux.Context.has_key?(Mux.Trace) do
raise "Mux.Trace already bound in Mux.Context"
else
Mux.Context.bind(Mux.Trace, trace, fun)
end
end
@doc """
Bind a trace to the scope of an anonymous function and run the function.
If a trace already exists in the current context the function raises a
RuntimeError.
## Examples
Mux.Trace.bind(124323127846, 44323123123125, 95421439, [], fn ->
Mux.Trace.span(fn -> RPC.call() end)
end)
"""
@spec bind(trace_id, span_id, span_id, [flag], (() -> result)) ::
result when result: var
def bind(trace_id, parent_id, span_id, flags, fun) do
trace = join(trace_id, parent_id, span_id, flags)
bind(trace, fun)
end
@doc """
Start a trace in the scope of an anonymous function and run the function.
The trace in the current context is used as the parent trace.
## Examples
Mux.Trace.span([:sampling_known], fn ->
Mux.Trace.span(fn -> RPC.call() end)
end)
"""
@spec span((() -> result)) :: result when result: var
def span(fun) do
trace = Mux.Context.fetch!(Mux.Trace)
Mux.Context.bind(Mux.Trace, start(trace), fun)
end
@doc """
Start a trace in the scope of an anonymous function and run the function.
If the first argument is an existing trace, a child trace with new span id is
used. If the first argument is a list of trace flags a new root trace is
started (`:root` flag is implicit).
If a trace already exists in the current context the function raises a
RuntimeError.
## Examples
Mux.Trace.span([:sampling_known], fn ->
Mux.Trace.span(fn -> RPC.call() end)
end
"""
@spec span(t | [flag], (() -> result)) :: result when result: var
def span(trace_or_flags, fun) do
trace_or_flags
|> start()
|> bind(fun)
end
@doc false
@impl Mux.Context
@spec put_wire(Mux.Context.wire, t) :: Mux.Context.wire
def put_wire(ctx, trace) do
%Mux.Trace{trace_id: trace_id, parent_id: parent_id, span_id: span_id,
flags: flags} = trace
# only encodes trace_id as 64bit, discarding hi 64 bits
data = <<span_id::64, parent_id::64, trace_id::64, flags_to_int(flags)::64>>
Map.put(ctx, @wire_key, data)
end
@doc false
@impl Mux.Context
@spec fetch_wire(Mux.Context.wire) :: {:ok, t} | :error
def fetch_wire(%{@wire_key => data}) do
case data do
<<span_id::64, parent_id::64, trace_id::64, int::64>> ->
{:ok, join(trace_id, parent_id, span_id, int_to_flags(int))}
_ ->
raise ArgumentError, "expected 32 bytes, got: #{inspect data}"
end
end
def fetch_wire(_),
do: :error
## Helpers
defp uint64(),
do: :rand.uniform(@uint64_max + 1) - 1
defp int_to_flags(int) do
pairs =
[root: @root, debug: @debug, sampling_known: @sampling_known,
sampled: @sampled]
int_to_flags(pairs, int, [])
end
defp int_to_flags([], _int, flags),
do: flags
defp int_to_flags([{flag, val} | pairs], int, flags) do
case val &&& int do
^val ->
int_to_flags(pairs, int, [flag | flags])
_ ->
int_to_flags(pairs, int, flags)
end
end
defp flags_to_int(flags, int \\ 0)
defp flags_to_int([], int),
do: int
defp flags_to_int([:root | flags], int),
do: flags_to_int(flags, int ||| @root)
defp flags_to_int([:debug | flags], int),
do: flags_to_int(flags, int ||| @debug)
defp flags_to_int([:sampling_known | flags], int),
do: flags_to_int(flags, int ||| @sampling_known)
defp flags_to_int([:sampled | flags], int),
do: flags_to_int(flags, int ||| @sampled)
end
|
lib/mux/trace.ex
| 0.845528
| 0.453806
|
trace.ex
|
starcoder
|
defmodule Ameritrade.Order do
@moduledoc false
@derive Jason.Encoder
defstruct session: nil,
duration: nil,
orderType: nil,
cancelTime: nil,
complexOrderStrategyType: nil,
quantity: 0,
filledQuantity: 0,
remainingQuantity: 0,
requestedDestination: nil,
destinationLinkName: nil,
releaseTime: nil,
stopPrice: 0,
stopPriceLinkBasis: nil,
stopPriceLinkType: nil,
stopPriceOffset: 0,
stopType: nil,
PriceLinkBasis: nil,
PriceLinkType: nil,
price: nil,
taxLotMethod: nil,
orderLegCollection: [],
activationPrice: 0,
specialInstruction: nil,
orderStrategyType: nil,
orderId: nil,
cancelable: false,
editable: false,
status: nil,
enteredTime: nil,
closeTime: nil,
accountId: 0,
orderActivityCollection: [],
replacingOrderCollection: [],
childOrderStrategies: [],
statusDescription: nil
end
defmodule Ameritrade.Order.Saved do
@moduledoc false
@derive Jason.Encoder
defstruct session: nil,
duration: nil,
orderType: nil,
cancelTime: nil,
complexOrderStrategyType: nil,
quantity: 0,
filledQuantity: 0,
remainingQuantity: 0,
requestedDestination: nil,
destinationLinkName: nil,
releaseTime: nil,
stopPrice: 0,
stopPriceLinkBasis: nil,
stopPriceLinkType: nil,
stopPriceOffset: 0,
stopType: nil,
priceLinkBasis: nil,
priceLinkType: nil,
price: 0,
taxLotMethod: nil,
orderLegCollection: [],
activationPrice: 0,
specialInstruction: nil,
orderStrategyType: nil,
orderId: 0,
cancelable: false,
editable: false,
status: nil,
enteredTime: nil,
closeTime: nil,
tag: nil,
accountId: 0,
orderActivityCollection: [],
replacingOrderCollection: [],
childOrderStrategies: [],
statusDescription: nil,
savedOrderId: 0,
savedTime: nil
end
defmodule Ameritrade.Order.Strategies do
@moduledoc false
@derive Jason.Encoder
defstruct session: nil,
duration: nil,
orderType: nil,
cancelTime: nil,
complexOrderStrategyType: nil,
quantity: 0,
filledQuantity: 0,
remainingQuantity: 0,
requestedDestination: nil,
destinationLinkName: nil,
releaseTime: nil,
stopPrice: 0,
stopPriceLinkBasis: nil,
stopPriceLinkType: nil,
stopPriceOffset: 0,
stopType: nil,
priceLinkBasis: nil,
priceLinkType: nil,
price: 0,
taxLotMethod: nil,
orderLegCollection: []
end
defmodule Ameritrade.Order.Leg.Collection do
@moduledoc false
@derive Jason.Encoder
defstruct orderLegType: nil,
legId: 0,
instrument: nil,
instruction: nil,
positionEffect: nil,
quantity: 0,
quantityType: nil
end
|
lib/schema/order.ex
| 0.594316
| 0.423339
|
order.ex
|
starcoder
|
defmodule Firebirdex do
alias Firebirdex.Query
@type conn :: DBConnection.conn()
@spec start_link(keyword()) :: {:ok, pid()} | {:error, Firebirdex.Error.t()}
def start_link(opts) do
DBConnection.start_link(Firebirdex.Protocol, opts)
end
@spec query(conn, iodata, list, keyword()) ::
{:ok, Firebirdex.Result.t()} | {:error, Firebirdex.Error.t()}
def query(conn, statement, params \\ [], opts \\ []) do
query = %Query{name: "", statement: statement}
case DBConnection.prepare_execute(conn, query, params, opts) do
{:ok, _, result} ->
{:ok, result}
{:error, _} = error ->
error
end
end
@spec query!(conn, iodata, list, keyword()) :: Firebirdex.Result.t()
def query!(conn, statement, params \\ [], opts \\ []) do
case query(conn, statement, params, opts) do
{:ok, result} -> result
{:error, exception} -> raise exception
end
end
@spec prepare(conn(), iodata(), iodata(), keyword()) ::
{:ok, Firebirdex.Query.t()} | {:error, Firebirdex.Error.t()}
def prepare(conn, name, statement, opts \\ []) do
query = %Firebirdex.Query{name: name, statement: statement, ref: make_ref()}
DBConnection.prepare(conn, query, opts)
end
@spec prepare!(conn(), iodata(), iodata(), keyword()) :: Firebirdex.Query.t()
def prepare!(conn, name, statement, opts \\ []) do
query = %Firebirdex.Query{name: name, statement: statement, ref: make_ref()}
DBConnection.prepare!(conn, query, opts)
end
@spec prepare_execute(conn, iodata, iodata, list, keyword()) ::
{:ok, Firebirdex.Query.t(), Firebirdex.Result.t()} | {:error, Firebirdex.Error.t()}
def prepare_execute(conn, name, statement, params \\ [], opts \\ [])
when is_binary(statement) or is_list(statement) do
query = %Firebirdex.Query{name: name, statement: statement, ref: make_ref()}
DBConnection.prepare_execute(conn, query, params, opts)
end
@spec prepare_execute!(conn, iodata, iodata, list, keyword()) ::
{Firebirdex.Query.t(), Firebirdex.Result.t()}
def prepare_execute!(conn, name, statement, params \\ [], opts \\ [])
when is_binary(statement) or is_list(statement) do
query = %Firebirdex.Query{name: name, statement: statement, ref: make_ref()}
DBConnection.prepare_execute!(conn, query, params, opts)
end
@spec execute(conn(), Firebirdex.Query.t(), list(), keyword()) ::
{:ok, Firebirdex.Query.t(), Firebirdex.Result.t()} | {:error, Firebirdex.Error.t()}
defdelegate execute(conn, query, params \\ [], opts \\ []), to: DBConnection
@spec execute!(conn(), Firebirdex.Query.t(), list(), keyword()) :: Firebirdex.Result.t()
defdelegate execute!(conn, query, params \\ [], opts \\ []), to: DBConnection
@spec close(conn(), Firebirdex.Query.t(), keyword()) :: :ok
def close(conn, %Firebirdex.Query{} = query, opts \\ []) do
case DBConnection.close(conn, query, opts) do
{:ok, _} ->
:ok
{:error, _} = error ->
error
end
end
@spec transaction(conn, (DBConnection.t() -> result), keyword()) ::
{:ok, result} | {:error, any}
when result: var
defdelegate transaction(conn, fun, opts \\ []), to: DBConnection
@spec rollback(DBConnection.t(), any()) :: no_return()
defdelegate rollback(conn, reason), to: DBConnection
@spec child_spec(keyword()) :: Supervisor.child_spec()
def child_spec(opts) do
DBConnection.child_spec(Firebirdex.Protocol, opts)
end
end
|
lib/firebirdex.ex
| 0.766643
| 0.419529
|
firebirdex.ex
|
starcoder
|
defmodule AMQP.Confirm do
@moduledoc """
Functions that work with publisher confirms (RabbitMQ extension to AMQP
0.9.1).
"""
import AMQP.Core
alias AMQP.{Basic, Channel}
@doc """
Activates publishing confirmations on the channel.
"""
@spec select(Channel.t()) :: :ok | Basic.error()
def select(%Channel{pid: pid}) do
case :amqp_channel.call(pid, confirm_select()) do
confirm_select_ok() -> :ok
error -> {:error, error}
end
end
@doc """
Wait until all messages published since the last call have been either ack'd
or nack'd by the broker.
Same as `wait_for_confirms/2` but with the default timeout of 60 seconds.
"""
@spec wait_for_confirms(Channel.t()) :: boolean | :timeout
def wait_for_confirms(%Channel{pid: pid}) do
:amqp_channel.wait_for_confirms(pid)
end
defguardp is_int_timeout(timeout) when is_integer(timeout) and timeout >= 0
# The typespec for this timeout is:
# non_neg_integer() | {non_neg_integer(), :second | :millisecond}
defguardp is_wait_for_confirms_timeout(timeout)
when is_int_timeout(timeout) or
(is_tuple(timeout) and tuple_size(timeout) == 2 and
is_int_timeout(elem(timeout, 0)) and
elem(timeout, 1) in [:second, :millisecond])
@doc """
Wait until all messages published since the last call have been either ack'd
or nack'd by the broker, or until timeout elapses.
Returns `true` if all messages are ack'd. Returns `false` if *any* of the messages
are nack'd. Returns `:timeout` on timeouts.
`timeout` can be an integer or a tuple with the "time unit" (see the spec). If just an integer
is provided, it's assumed to be *in seconds*. This is unconventional Elixir/Erlang API
(since usually the convention is milliseconds), but we are forwarding to the underlying
AMQP Erlang library here and it would be a breaking change for this library to default
to milliseconds.
"""
@spec wait_for_confirms(
Channel.t(),
non_neg_integer | {non_neg_integer, :second | :millisecond}
) :: boolean | :timeout
def wait_for_confirms(%Channel{pid: pid}, timeout) when is_wait_for_confirms_timeout(timeout) do
:amqp_channel.wait_for_confirms(pid, timeout)
end
@doc """
Wait until all messages published since the last call have been either ack'd
or nack'd by the broker, or until timeout elapses.
If any of the messages were nack'd, the calling process dies.
Same as `wait_for_confirms_or_die/2` but with the default timeout of 60 seconds.
"""
@spec wait_for_confirms_or_die(Channel.t()) :: true
def wait_for_confirms_or_die(%Channel{pid: pid}) do
:amqp_channel.wait_for_confirms_or_die(pid)
end
@spec wait_for_confirms_or_die(
Channel.t(),
non_neg_integer | {non_neg_integer, :second | :millisecond}
) :: true
def wait_for_confirms_or_die(%Channel{pid: pid}, timeout)
when is_wait_for_confirms_timeout(timeout) do
:amqp_channel.wait_for_confirms_or_die(pid, timeout)
end
@doc """
On channel with confirm activated, return the next message sequence number.
To use in combination with `register_handler/2`
"""
@spec next_publish_seqno(Channel.t()) :: non_neg_integer
def next_publish_seqno(%Channel{pid: pid}) do
:amqp_channel.next_publish_seqno(pid)
end
@doc """
Register a handler for confirms on channel.
The handler will receive either:
* `{:basic_ack, seqno, multiple}`
* `{:basic_nack, seqno, multiple}`
The `seqno` (delivery_tag) is an integer, the sequence number of the message.
`multiple` is a boolean, when `true` means multiple messages confirm, up to
`seqno`.
See https://www.rabbitmq.com/confirms.html
"""
@spec register_handler(Channel.t(), pid) :: :ok
def register_handler(%Channel{} = chan, handler_pid) do
:amqp_channel.call_consumer(chan.pid, {:register_confirm_handler, chan, handler_pid})
end
@doc """
Remove the return handler.
It does nothing if there is no such handler.
"""
@spec unregister_handler(Channel.t()) :: :ok
def unregister_handler(%Channel{pid: pid}) do
# Currently we don't remove the receiver.
# The receiver will be deleted automatically when channel is closed.
:amqp_channel.unregister_confirm_handler(pid)
end
end
|
lib/amqp/confirm.ex
| 0.820972
| 0.424203
|
confirm.ex
|
starcoder
|
defmodule HandimanApi.User do
use HandimanApi.Web, :model
@differentials %{
5 => 1,
6 => 1,
7 => 2,
8 =>2,
9 => 3,
10 => 3,
11 => 4,
12 => 4,
13 => 5,
14 => 5,
15 => 6,
16 => 6,
17 => 7,
18 => 8,
19 => 9,
20 => 10
}
schema "users" do
field :email, :string
field :name, :string
field :encrypted_password, :string
field :authentication_token, :string
field :password, :string, virtual: true
field :password_confirmation, :string, virtual: true
has_many :rounds, HandimanApi.Round
timestamps
end
@required_fields ~w(email name password password_confirmation)
@optional_fields ~w()
@doc """
Creates a changeset based on the `model` and `params`.
If no params are provided, an invalid changeset is returned
with no validation performed.
"""
def changeset(model, params \\ :empty) do
model
|> cast(params, @required_fields, @optional_fields)
|> unique_constraint(:email, on: HandimanApi.Repo, downcase: true)
|> validate_length(:name, min: 1)
|> validate_length(:password, min: 1)
|> validate_length(:password_confirmation, min: 1)
|> validate_confirmation(:password)
end
import Ecto.Changeset, only: [put_change: 3]
@doc """
Creates the user in the database with the password encrypted.
"""
def create(changeset) do
changeset
|> put_change(:encrypted_password, hashed_password(changeset.params["password"]))
|> HandimanApi.Repo.insert
end
def with_rounds(query) do
from q in query, preload: [:rounds]
end
defp hashed_password(password) do
Comeonin.Bcrypt.hash<PASSWORD>alt(password)
end
@doc """
Calculate a handicap for the user defined by the user_id. Use the count to determine the number of differentials to
consider when calculating the handicap.
"""
def calculate_handicap(count, user_id) do
if count < 5 do
{:error, "User needs more rounds before a handicap can be calculated"}
else
if count > 20, do: count = 20 # If the number of rounds is greater than 20 just use 20.
num_differentials = @differentials[count]
query = from u in HandimanApi.User,
join: r in assoc(u, :rounds),
where: r.user_id == u.id,
where: u.id == "#{user_id}",
select: r.differential,
order_by: [desc: r.inserted_at],
limit: "#{num_differentials}"
diffs = HandimanApi.Repo.all(query)
# TODO move sum into query.
{_, diff_sum} = Enum.map_reduce(diffs, 0, fn(x, acc) -> {x, acc + x} end)
{:ok, Float.floor((diff_sum/num_differentials) * 0.96, 1)}
end
end
@doc """
Get the number of rounds a user has.
"""
def round_count(user_id) do
query = from u in HandimanApi.User, join: r in assoc(u, :rounds), where: r.user_id == "#{user_id}", select: count(r.id)
HandimanApi.Repo.one(query)
end
@doc """
Get the number of unique courses the user has played.
"""
def unique_course_count(user_id) do
query = from u in HandimanApi.User,
join: r in assoc(u, :rounds),
join: t in assoc(r, :tee),
join: c in assoc(t, :course),
where: u.id == "#{user_id}", select: count(fragment("DISTINCT ?", c.name))
HandimanApi.Repo.one(query)
end
def with_preloaded_assoc(user_id) do
query = from u in HandimanApi.User,
join: r in assoc(u, :rounds),
join: t in assoc(r, :tee),
join: c in assoc(t, :course),
where: u.id == "#{user_id}", preload: [rounds: {r, tee: {t, rounds: r, course: c}}]
HandimanApi.Repo.one(query)
end
end
|
web/models/user.ex
| 0.513668
| 0.431105
|
user.ex
|
starcoder
|
defmodule Day24 do
def part1(input) do
weights = parse(input)
target = div(Enum.sum(weights), 3)
q = init_queue(weights)
all_solutions(q, target, :infinity, :infinity)
|> Enum.sort
|> hd
|> elem(1)
|> quantum_entanglement
end
def part2(input) do
weights = parse(input)
target = div(Enum.sum(weights), 4)
q = init_queue(weights)
all_solutions(q, target, :infinity, :infinity)
|> Enum.sort
|> hd
|> elem(1)
|> quantum_entanglement
end
defp all_solutions(q, target, num_weights, best_qe) do
case solve(q, target, num_weights) do
nil ->
[]
{{included, _excluded}, q} ->
case quantum_entanglement(included) do
qe when qe < best_qe ->
num_weights = min(num_weights, length(included))
[{qe, included} | all_solutions(q, target, num_weights, qe)]
_ ->
all_solutions(q, target, num_weights, best_qe)
end
end
end
defp init_queue(weights) do
weights = weights
|> Enum.sort
|> Enum.reverse
Enum.map_reduce(weights, weights, fn weight, excluded ->
excluded = excluded -- [weight]
{{weight, [weight], excluded}, excluded}
end)
|> elem(0)
|> :gb_sets.from_list
end
defp solve(q, target, num_weights) do
case :gb_sets.is_empty(q) do
true ->
nil
false ->
{{largest, included, excluded}, q} = :gb_sets.take_largest(q)
cond do
largest == target ->
{{included, excluded}, q}
largest < target and length(included) < num_weights ->
q = add_to_queue(q, largest, included, excluded, target)
solve(q, target, num_weights)
true ->
solve(q, target, num_weights)
end
end
end
defp add_to_queue(q, current, included, excluded, target) do
Enum.reduce(excluded, q, fn other, q ->
current = current + other
if (current <= target) do
element = {current, [other | included], excluded -- [other]}
:gb_sets.insert(element, q)
else
q
end
end)
end
defp quantum_entanglement(weights) do
Enum.reduce(weights, 1, &(&1 * &2))
end
defp parse(input) do
Enum.map(input, &String.to_integer/1)
end
end
|
day24/lib/day24.ex
| 0.653016
| 0.584508
|
day24.ex
|
starcoder
|
defmodule ElixirExamples.Fraction do
@moduledoc """
This module handles the creation of and the operations upon fractions.
"""
defstruct numerator: nil, denominator: nil
alias ElixirExamples.Fraction
@doc """
Creates a new fraction for a given numerator and denominator.
"""
@spec new!(integer,integer) :: %Fraction{}
def new!(numerator, denominator) do
if ( denominator == 0 ) do
raise("The denominator cannot be 0!")
end
# Struct instance => %Module{key: value}
%Fraction{ numerator: numerator, denominator: denominator }
end
@doc """
Returns the floating point representation for a given fraction.
"""
@spec to_float(%Fraction{numerator: integer, denominator: integer}) :: float
def to_float(%Fraction{numerator: n, denominator: d}) do
n / d
end
@doc """
Returns the floating point representation for a given numerator and denominator.
"""
@spec to_float(integer, integer) :: float
def to_float(numerator, denominator), do: to_float(new!(numerator, denominator))
@doc """
Adds two fractions together. Fractions are not required to have common denominators.
"""
@spec add(%Fraction{}, %Fraction{}) :: %Fraction{}
def add(%Fraction{numerator: n1, denominator: d1}, %Fraction{numerator: n2, denominator: d2}) do
add!({n1, d1}, {n2, d2})
end
@doc """
Adds two numerator and denominator tuples. Fractions are not required to have common denominators.
"""
@spec add!(tuple, tuple) :: %Fraction{}
def add!({ n1, d1 }, { n2, d2 }), do: new!(n1 * d2 + n2 * d1, d2 * d1)
@doc """
Subtracts one fraction from another. Fractions are not required to have common denominators.
"""
@spec subtract(%Fraction{}, %Fraction{}) :: %Fraction{}
def subtract(%Fraction{numerator: n1, denominator: d1}, %Fraction{numerator: n2, denominator: d2}) do
subtract!({n1, d1}, {n2, d2})
end
@doc """
Subtracts one fraction tuple representation from another. Fractions are not required to have common denominators.
"""
@spec subtract!(tuple, tuple) :: %Fraction{}
def subtract!({ n1, d1 }, { n2, d2 }), do: new!(n1 * d2 - n2 * d1, d2 * d1)
@doc """
Multiplies two fractions together.
"""
@spec multiply(%Fraction{}, %Fraction{}) :: %Fraction{}
def multiply(%Fraction{numerator: n1, denominator: d1}, %Fraction{numerator: n2, denominator: d2}) do
multiply!({ n1, d1 }, { n2, d2 })
end
@doc """
Multiplies two tuples representing fractions together.
"""
@spec multiply!(tuple, tuple) :: %Fraction{}
def multiply!({ n1, d1 }, { n2, d2 }), do: new!(n1 * n2, d1 * d2)
@doc """
Divides one fraction from another. Multiplies the first fraction with the reciprocal of the second.
"""
@spec divide!(%Fraction{}, %Fraction{}) :: %Fraction{}
def divide!(%Fraction{numerator: n1, denominator: d1}, %Fraction{numerator: n2, denominator: d2}) do
divide!({ n1, d1 }, { n2, d2 })
end
@doc """
Divides one fraction tuple representation from another. Multiplies the first fraction with the reciprocal of the second.
"""
@spec divide!(tuple, tuple) :: %Fraction{}
def divide!({ n1, d1 }, { n2, d2 }), do: new!(n1 * d2, n2 * d1)
end
|
lib/elixir_examples/fraction.ex
| 0.932099
| 0.586878
|
fraction.ex
|
starcoder
|
defmodule ExDoubles do
alias ExDoubles.{ErrorMessages, State, ListenerFactory}
@spec mock(atom, integer) :: {:ok, function}
def mock(name, arity) do
listener_fn = ListenerFactory.make_listener(arity, fn args ->
State.invoke_function(name, args)
end)
:ok = State.add_mock(%{name: name, arity: arity})
{:ok, listener_fn}
end
@doc """
Allows the definition of stubbed values for a mocked function.
```elixir
test "returns stubbed value from a mock" do
{:ok, mock_fn} = mock(:mock_label, 0)
when_called(:mock_label, :stub_value)
assert :stub_value == mock_fn.()
end
```
It is possible to define multiple stub values. These are values are returned by the mock in the order they were passed to `when_called`.
```elixir
test "returns stubbed values in the order they were passed to `when_called`" do
{:ok, mock_fn} = mock(:mock_label, 0)
when_called(:mock_label, :stub_value_1)
when_called(:mock_label, :stub_value_2)
when_called(:mock_label, :stub_value_3)
assert :stub_value_1 == mock_fn.()
assert :stub_value_2 == mock_fn.()
assert :stub_value_3 == mock_fn.()
end
```
"""
@spec mock(atom, any) :: :ok
def when_called(name, stub_value) do
:ok = State.add_stub(name, stub_value)
end
@type call_count_matcher :: %{times: integer}
@type argument_matcher :: %{called_with: list(any())}
@spec verify(atom(), call_count_matcher) :: bool
def verify(name, %{called_with: args}) do
%{arity: arity, calls: calls} = State.get_mock(name)
case arity do
0 -> raise ErrorMessages.unsupported_call()
_ ->
case matches?(calls, args) do
true ->
true
false ->
raise ErrorMessages.not_called_error(name, args, calls)
end
end
end
@spec verify(atom(), argument_matcher) :: bool
def verify(name, %{times: n}) do
call_count = State.call_count(name)
case call_count == n do
true ->
true
false ->
State.stop()
raise ErrorMessages.call_count_incorrect(n, call_count)
end
end
defp matches?(calls, args) do
calls
|> Enum.find_index(fn c -> c == args end)
|> is_integer()
end
@spec once :: call_count_matcher
def once() do
%{times: 1}
end
@spec twice :: call_count_matcher
def twice() do
%{times: 2}
end
@spec thrice :: call_count_matcher
def thrice() do
%{times: 3}
end
@spec times(integer) :: call_count_matcher
def times(n) do
%{times: n}
end
@spec called_with(list(any)) :: argument_matcher
def called_with(args) do
%{called_with: args}
end
end
|
lib/ex_doubles.ex
| 0.841826
| 0.884139
|
ex_doubles.ex
|
starcoder
|
defmodule BSV.Mnemonic do
@moduledoc """
Module for generating and restoring mnemonic phrases, for the generation of
deterministic keys. Implements BIP-39.
A mnemonic phrase is a group of easy to remember words. The phrase can be
converted to a binary seed, which in turn is used to generate deterministic
keys.
"""
alias BSV.Crypto.Hash
alias BSV.Util
@typedoc "Mnemonic phrase"
@type t :: String.t
@words :code.priv_dir(:bsv)
|> Path.join("words.txt")
|> File.stream!()
|> Stream.map(&String.trim/1)
|> Enum.to_list()
@allowed_lengths [128, 160, 192, 224, 256]
@rounds 2048
@doc """
Returns the mnemonic word list.
"""
@spec words :: list
def words, do: @words
@doc """
Returns a list of the allowed mnemonic entropy lengths.
"""
@spec allowed_lengths :: list
def allowed_lengths, do: @allowed_lengths
@doc """
Generates a new mnemonic phrase from the given entropy length (defaults to
256 bits / 24 words).
## Examples
iex> BSV.Mnemonic.generate
...> |> String.split
...> |> length
24
iex> BSV.Mnemonic.generate(128)
...> |> String.split
...> |> length
12
"""
@spec generate(integer) :: __MODULE__.t
def generate(entropy_length \\ List.last(@allowed_lengths))
def generate(entropy_length)
when not (entropy_length in @allowed_lengths),
do: {:error, "Entropy length must be one of #{inspect(@allowed_lengths)}"}
def generate(entropy_length) do
div(entropy_length, 8)
|> Util.random_bytes
|> from_entropy
end
@doc """
Returns a mnemonic phrase derived from the given binary.
## Examples
iex> BSV.Test.mnemonic_entropy
...> |> BSV.Mnemonic.from_entropy
"organ boring cushion feature wheat juice quality replace concert baby topic scrub"
"""
@spec from_entropy(binary) :: __MODULE__.t
def from_entropy(entropy) when is_binary(entropy) do
<<entropy::bits, checksum(entropy)::bits>>
|> mnemonic
end
@doc """
Returns a binary derived from the given mnemonic phrase.
## Examples
iex> "organ boring cushion feature wheat juice quality replace concert baby topic scrub"
...> |> BSV.Mnemonic.to_entropy
<<156, 99, 60, 217, 170, 31, 158, 241, 171, 205, 182, 46, 162, 35, 148, 96>>
"""
@spec to_entropy(__MODULE__.t) :: binary
def to_entropy(mnemonic) do
String.split(mnemonic)
|> Enum.map(&word_index/1)
|> entropy
end
@doc """
Returns a wallet seed derived from the given mnemonic phrase and optionally a
passphrase.
## Options
The accepted options are:
* `:passphrase` - Optionally protect the seed with an additional passphrase
* `:encoding` - Optionally encode the seed with either the `:base64` or `:hex` encoding scheme.
## Examples
iex> BSV.Mnemonic.from_entropy(BSV.Test.mnemonic_entropy)
...> |> BSV.Mnemonic.to_seed(encoding: :hex)
"<KEY>"
iex> BSV.Mnemonic.from_entropy(BSV.Test.mnemonic_entropy)
...> |> BSV.Mnemonic.to_seed(passphrase: "<PASSWORD>")
...> |> byte_size
64
"""
@spec to_seed(__MODULE__.t, keyword) :: binary
def to_seed(mnemonic, options \\ []) do
passphrase = Keyword.get(options, :passphrase, "")
encoding = Keyword.get(options, :encoding)
<<"mnemonic", passphrase::binary, 1::integer-32>>
|> Hash.hmac(:sha512, mnemonic)
|> pbkdf2(mnemonic)
|> Util.encode(encoding)
end
defp checksum(entropy) do
with size <- bit_size(entropy) |> div(32),
<<checksum::bits-size(size), _::bits>> <- Hash.sha256(entropy),
do: checksum
end
defp mnemonic(entropy) do
chunks = for <<chunk::11 <- entropy>>, do: Enum.at(words(), chunk)
Enum.join(chunks, " ")
end
defp entropy(indices) do
bytes = for i <- indices, into: <<>>, do: <<i::11>>
with size = bit_size(bytes) |> Kernel.*(32) |> div(33),
<<entropy::bits-size(size), _::bits>> <- bytes,
do: entropy
end
defp word_index(word), do: Enum.find_index(words(), &(&1 == word))
defp pbkdf2(hmac_block, mnemonic) do
iterate(mnemonic, 1, hmac_block, hmac_block)
end
defp iterate(_mnemonic, round_num, _hmac_block, result)
when round_num == @rounds,
do: result
defp iterate(mnemonic, round_num, hmac_block, result) do
with next_block <- Hash.hmac(hmac_block, :sha512, mnemonic),
result <- :crypto.exor(next_block, result),
do: iterate(mnemonic, round_num + 1, next_block, result)
end
end
|
lib/bsv/mnemonic.ex
| 0.846435
| 0.493653
|
mnemonic.ex
|
starcoder
|
defmodule ResponseSnapshot.Changes do
@moduledoc """
Structure for changes of a diff. An object which is completely identical will
be equal to `Changes.empty()`. Any modifications, additions, and removals are
accessible from this struct.
Object traversal is notated with a `.`. Array traversal is notated with an `_`.
For example, the follow would indicate that key "a" (list), index 1 was added:
additions: ["a_1"].
"""
@enforce_keys [:additions, :removals, :modifications]
@doc """
Defines the Changes struct:
* additions - Paths that were added versus what is stored in the fixture
* removals - Paths that were removed versus what is stored in the fixture
* modified - Paths that were modified versus what is stored in the fixture
* mode - Internal function helper, should always be nil
"""
defstruct @enforce_keys ++ [mode: nil]
@doc false
def empty() do
%__MODULE__{additions: [], removals: [], modifications: []}
end
@doc false
def addition(changes = %{additions: additions}, path) do
new = List.flatten([path | additions])
Map.put(changes, :additions, new)
end
@doc false
def removal(changes = %{removals: removals}, path) do
new = List.flatten([path | removals])
Map.put(changes, :removals, new)
end
@doc false
def modification(changes = %{modifications: modifications}, path) do
new = List.flatten([path | modifications])
Map.put(changes, :modifications, new)
end
@doc false
def clear(changes, :modifications) do
Map.put(changes, :modifications, [])
end
@doc false
def set_mode(changes, mode) when mode in [:addition, :removal, :modification, nil] do
Map.put(changes, :mode, mode)
end
@doc false
def insert_by_mode(changes = %{mode: :addition}, path), do: addition(changes, path)
def insert_by_mode(changes = %{mode: :removal}, path), do: removal(changes, path)
def insert_by_mode(changes = %{mode: :modification}, path), do: modification(changes, path)
def insert_by_mode(changes = %{mode: nil}, path), do: modification(changes, path)
end
|
lib/response_snapshot/changes.ex
| 0.886948
| 0.526891
|
changes.ex
|
starcoder
|
defmodule TwoFactorInACan.Secrets do
@moduledoc """
Functions for generating cryptographic secrets.
"""
@type secret_generation_opts :: keyword(atom() | integer())
@default_totp_secret_byte_size 20
@doc """
Generates a secret suitable for use in time based one time password (TOTP)
two factor authentication.
Generates a 160-bit key which is the size recommended by RFC4226
(https://www.ietf.org/rfc/rfc4226.txt).
The following options are supported:
- `:format` (Default: `:binary`) - The format of the generated secret.
Options include:
- `:binary` (default)
- `:base32`
- `:base64`
- `:bytes` (Default: `20`) - The size in bytes of the generated secret. 20
bytes (160 bits) is the size specified by RFC 4226. It is also the size
supported by most authenticator applications.
# Examples
iex> TwoFactorInACan.Secrets.generate_totp_secret()
<<195, 110, 253, 36, 185, 138, 174, 16, 54, 176, 135, 67, 97, 11, 159, 63, 75, 80, 65, 6>>
iex> TwoFactorInACan.Secrets.generate_totp_secret(format: :base32)
"F2EJJEYSJA67QHI6DEAI2I6AGCEG7G5E"
iex> TwoFactorInACan.Secrets.generate_totp_secret(format: :base64)
"xKXOSYcRVlHfnazLMlRinpb252U="
"""
@spec generate_totp_secret(secret_generation_opts) :: binary()
def generate_totp_secret(opts \\ []) do
bytes = Keyword.get(opts, :bytes, @default_totp_secret_byte_size)
bytes
|> :crypto.strong_rand_bytes()
|> format_secret(opts)
end
@spec format_secret(binary(), secret_generation_opts) :: binary()
defp format_secret(secret, opts) do
format = Keyword.get(opts, :format, :binary)
case format do
:binary ->
secret
:base32 ->
Base.encode32(secret)
:base64 ->
Base.encode64(secret)
_ ->
raise ArgumentError, """
Invalid format supplied when generating secret:
format: #{format}
Valid options include:
- :binary
- :base32
- :base64
"""
end
end
end
|
lib/secrets.ex
| 0.811228
| 0.509154
|
secrets.ex
|
starcoder
|
defmodule Form do
@moduledoc """
A collection of loosely related functions helpful for filling out various forms at the city office.
"""
@doc """
Generates a string of a given length.
This string can be used to fill out a form field that is supposed to have no value.
Such fields cannot be left empty because a malicious third party could fill them out with false data.
"""
@spec blanks(non_neg_integer()) :: String.t()
def blanks(n) do
String.duplicate("X", n)
end
@doc """
Splits the string into a list of uppercase letters.
This is needed for form fields that don't offer a single input for the whole string,
but instead require splitting the string into a predefined number of single-letter inputs.
"""
@spec letters(String.t()) :: [String.t()]
def letters(word) do
word
|> String.upcase()
|> String.split("", trim: true)
end
@doc """
Checks if the value has no more than the maximum allowed number of letters.
This is needed to check that the values of fields do not exceed the maximum allowed length.
It also tells you by how much the value exceeds the maximum.
"""
@spec check_length(String.t(), non_neg_integer()) :: :ok | {:error, pos_integer()}
def check_length(word, length) do
diff = String.length(word) - length
if diff <= 0 do
:ok
else
{:error, diff}
end
end
@type address_map :: %{street: String.t(), postal_code: String.t(), city: String.t()}
@type address_tuple :: {street :: String.t(), postal_code :: String.t(), city :: String.t()}
@type address :: address_map | address_tuple
@doc """
Formats the address as an uppercase multiline string.
"""
@spec format_address(address) :: String.t()
def format_address(%{street: street, postal_code: postal_code, city: city}) do
format_address({street, postal_code, city})
end
def format_address({street, postal_code, city}) do
"""
#{String.upcase(street)}
#{String.upcase(postal_code)} #{String.upcase(city)}
"""
end
end
|
exercism/city-office/form.ex
| 0.825519
| 0.595493
|
form.ex
|
starcoder
|
defmodule Cldr.Calendar.Composite do
@moduledoc """
A composite calendar is one in which a certain range of dates
is interpreted in one calendar and a different range of dates
is interpreted in a different calendar.
The canonical example is the transition from the Julian to
Gregorian calendar for European countries during the 16th to
20th centuries.
A configuration is simply a list of dates in the the
appropriate calendar indicating when the calendar transitions
occur.
For example, assuming England moved from the Julian to
Gregorian calendar on `14th of September, 1752 Gregorian`
then the configuration would be:
```elixir
defmodule Cldr.Calendar.England do
use Cldr.Calendar.Composite,
calendars: [
~D[1752-09-14 Cldr.Calendar.Gregorian]
],
base_calendar: Cldr.Calendar.Julian
end
```
The `:base_calendar` option indicates the calendar in use
before any other configured calendars. The default is
`Cldr.Calendar.Julian`.
### Julian to Gregorian transition
One of the uses of this calendar is to define a calendar that
reflects the transition from the Julian to the Gregorian calendar.
Applicable primarily to western european countries and
related colonies, the transition to the Gregorian calendar
occured between the 16th and 20th centuries. One strong
reference is the [Perpetual Calendar](https://norbyhus.dk/calendar.php)
site maintained by [<NAME>](mailto:<EMAIL>).
An additional source of information is
[Wikipedia](https://en.wikipedia.org/wiki/Adoption_of_the_Gregorian_calendar).
### Multiple compositions
A more complex example composes more than one calendar. For example,
Egypt used the [Coptic calendar](https://en.wikipedia.org/wiki/Coptic_calendar)
from 238 BCE until Rome introduced the Julian calendar in approximately
30 BCE. The Gregorian calendar was then introduced in 1875. Although the
exact dates of introduction aren't known we can approximate the composition
of calendars with:
```elixir
defmodule Cldr.Calendar.Composite.Egypt do
use Cldr.Calendar.Composite,
calendars: [
~D[-0045-01-01 Cldr.Calendar.Julian],
~D[1875-09-01]
],
base_calendar: Cldr.Calendar.Coptic
end
"""
alias Cldr.Calendar.Composite.Config
defmacro __using__(options \\ []) do
quote bind_quoted: [options: options] do
require Cldr.Calendar.Composite.Compiler
@options options
@before_compile Cldr.Calendar.Composite.Compiler
end
end
@doc """
Creates a new composite compiler.
## Arguments
* `calendar_module` is any module name. This will be the
name of the composite calendar if it is successfully
created.
* `options` is a keyword list of options.
## Options
* `:calendars` is a list of dates representing the first
new date at which a calendar is introduced. These dates
should be expressed in the calendar of the new period.
* `:base_calendar` is any calendar module that is used
as the calendar for any dates prior to the first
transition. The default is `Cldr.Calendar.Julian`.
## Returns
* `{:ok, module}` or
* `{:module_already_exists, calendar_module}`
## Examples
iex> Cldr.Calendar.Composite.new Cldr.Calendar.Denmark,
...> calendars: ~D[1700-03-01 Cldr.Calendar.Gregorian]
{:ok, Cldr.Calendar.Denmark}
"""
@spec new(module(), Keyword.t()) ::
{:ok, Cldr.Calendar.calendar()} | {:module_already_exists, module()}
def new(calendar_module, options) when is_atom(calendar_module) and is_list(options) do
if Code.ensure_loaded?(calendar_module) do
{:module_already_exists, calendar_module}
else
create_calendar(calendar_module, options)
end
end
defp create_calendar(calendar_module, config) do
with {:ok, config} <- Config.validate_options(config) do
contents =
quote do
use unquote(__MODULE__),
unquote(Macro.escape(config))
end
{:module, module, _, :ok} =
Module.create(calendar_module, contents, Macro.Env.location(__ENV__))
{:ok, module}
end
end
end
|
lib/cldr/calendar/composite.ex
| 0.931626
| 0.953013
|
composite.ex
|
starcoder
|
defmodule Consul.Request do
@moduledoc """
This module is used to build an HTTP request
"""
@path_template_regex ~r/{(\+?[^}]+)}/i
defstruct method: :get, url: "", body: [], query: [], header: []
@type param_location :: :body | :query | :header
@type method :: :head | :get | :delete | :trace | :options | :post | :put | :patch
@type t :: %__MODULE__{
method: method(),
url: String.t(),
body: keyword(),
query: keyword(),
header: keyword()
}
@spec new() :: Consul.Request.t()
def new do
%__MODULE__{}
end
@spec method(Consul.Request.t()) :: {:ok, atom()} | :error
def method(request), do: Map.fetch(request, :method)
@doc """
Specify the request method when building a request
## Parameters
* `request` (*type:* `Consul.Request.t`) - Collected request options
* `m` (*type:* `String`) - Request method
## Returns
* `Consul.Request.t`
"""
@spec method(Consul.Request.t(), atom()) :: Consul.Request.t()
def method(request, m) do
%{request | method: m}
end
@spec url(Consul.Request.t()) :: {:ok, String.t()} | :error
def url(request), do: Map.fetch(request, :url)
@doc """
Specify the request URL when building a request
## Parameters
* `request` (*type:* `Consul.Request.t`) - Collected request options
* `u` (*type:* `String`) - Request URL
## Returns
* `Consul.Request.t`
"""
@spec url(Consul.Request.t(), String.t(), map()) :: Consul.Request.t()
def url(request, u, replacements) do
url(request, replace_path_template_vars(u, replacements))
end
def url(request, u) do
Map.put(request, :url, u)
end
defp replace_path_template_vars(u, replacements) do
Regex.replace(@path_template_regex, u, fn _, var -> replacement_value(var, replacements) end)
end
defp replacement_value("+" <> name, replacements) do
URI.decode(replacement_value(name, replacements))
end
defp replacement_value(name, replacements) do
replacements
|> Map.get(name, "")
|> to_string
end
@doc """
Add optional parameters to the request
## Parameters
* `request` (*type:* `Consul.Request.t`) - Collected request options
* `definitions` (*type:* `Map`) - Map of parameter name to parameter location
* `options` (*type:* `keyword()`) - The provided optional parameters
## Returns
* `Consul.Request.t`
"""
@spec add_optional_params(
Consul.Request.t(),
%{optional(atom()) => param_location()},
keyword()
) :: Consul.Request.t()
def add_optional_params(request, _, []), do: request
def add_optional_params(request, definitions, [{key, value} | tail]) do
case definitions do
%{^key => location} ->
request
|> add_param(location, key, value)
|> add_optional_params(definitions, tail)
_ ->
add_optional_params(request, definitions, tail)
end
end
@doc """
Add optional parameters to the request
## Parameters
* `request` (*type:* `Consul.Request.t`) - Collected request options
* `location` (*type:* `atom()`) - Where to put the parameter
* `key` (*type:* `atom()`) - The name of the parameter
* `value` (*type:* `any()`) - The value of the parameter
## Returns
* `Consul.Request.t`
"""
@spec add_param(Consul.Request.t(), param_location(), atom(), any()) ::
Consul.Request.t()
def add_param(request, :query, key, values) when is_list(values) do
Enum.reduce(values, request, fn value, req ->
add_param(req, :query, key, value)
end)
end
def add_param(request, location, key, value) do
Map.update!(request, location, &(&1 ++ [{key, value}]))
end
end
|
lib/consul/request.ex
| 0.835551
| 0.444444
|
request.ex
|
starcoder
|
defmodule LearnKit.Knn.Classify do
@moduledoc """
Module for knn classify functions
"""
alias LearnKit.{Preprocessing, Math}
defmacro __using__(_opts) do
quote do
defp prediction(data_set, options) do
data_set
|> filter_features_by_size(options[:feature])
|> check_normalization(options)
|> calc_distances_for_features(options)
|> sort_distances()
|> select_closest_features(options)
|> check_zero_distance(options)
end
# knn uses only features with the same size as current feature
defp filter_features_by_size(data_set, current_feature) do
Enum.map(data_set, fn {key, features} ->
{
key,
Enum.filter(features, fn feature -> length(feature) == length(current_feature) end)
}
end)
end
# normalize features
defp check_normalization(data_set, options) do
type = options[:normalization]
case type do
t when t in ["minimax", "z_normalization"] -> normalize(data_set, type)
_ -> data_set
end
end
# select algorithm for prediction
defp calc_distances_for_features(data_set, options) do
case options[:algorithm] do
"brute" -> brute_algorithm(data_set, options)
_ -> []
end
end
# sort distances
defp sort_distances(features), do: Enum.sort(features, &(elem(&1, 0) <= elem(&2, 0)))
# take closest features
defp select_closest_features(features, options), do: Enum.take(features, options[:k])
# check existeness of current feature in data set
defp check_zero_distance(closest_features, options) do
{distance, label} = Enum.at(closest_features, 0)
cond do
distance == 0 -> {label, 0}
true -> select_best_label(closest_features, options)
end
end
# select best result based on weights
defp select_best_label(features, options) do
features
|> calc_feature_weights(options)
|> accumulate_weight_of_labels([])
|> sort_result()
end
# normalize each feature
defp normalize(data_set, type) do
coefficients = find_coefficients_for_normalization(data_set, type)
Enum.map(data_set, fn {key, features} ->
{
key,
Enum.map(features, fn feature -> Preprocessing.normalize_feature(feature, coefficients, type) end)
}
end)
end
# find coefficients for normalization
defp find_coefficients_for_normalization(data_set, type) do
Enum.reduce(data_set, [], fn {_, features}, acc ->
Enum.reduce(features, acc, fn feature, acc -> [feature | acc] end)
end)
|> Preprocessing.coefficients(type)
end
defp calc_feature_weights(features, options) do
Enum.map(features, fn feature ->
Tuple.append(feature, calc_feature_weight(options[:weight], elem(feature, 0)))
end)
end
defp sort_result(features) do
features
|> Enum.sort(&(elem(&1, 1) >= elem(&2, 1)))
|> List.first()
end
# brute algorithm for prediction
defp brute_algorithm(data_set, options) do
data_set
|> Keyword.keys()
|> handle_features_in_label(data_set, options[:feature])
|> List.flatten()
end
defp handle_features_in_label(keys, data_set, current_feature) do
Enum.map(keys, fn key ->
data_set[key]
|> calc_distances_in_label(current_feature, key)
end)
end
defp calc_distances_in_label(features, current_feature, key) do
Enum.reduce(features, [], fn feature, acc ->
distance = calc_distance_between_features(feature, current_feature)
acc = [{distance, key} | acc]
end)
end
defp calc_distance_between_features(feature_from_data_set, feature) do
Enum.zip(feature_from_data_set, feature)
|> calc_distance_between_points()
|> :math.sqrt()
end
defp calc_distance_between_points(list) do
Enum.reduce(list, 0, fn {xi, yi}, acc ->
xi - yi
|> :math.pow(2)
|> Math.summ(acc)
end)
end
defp calc_feature_weight(weight, distance) do
case weight do
"uniform" -> 1
"distance" -> 1 / :math.pow(distance, 2)
_ -> 1
end
end
defp accumulate_weight_of_labels([], acc), do: acc
defp accumulate_weight_of_labels([{_, key, weight} | tail], acc) do
previous = if Keyword.has_key?(acc, key), do: acc[key], else: 0
acc = Keyword.put(acc, key, previous + weight)
accumulate_weight_of_labels(tail, acc)
end
end
end
end
|
lib/learn_kit/knn/classify.ex
| 0.809765
| 0.564008
|
classify.ex
|
starcoder
|
defmodule Bitcoin.DERSig do
@moduledoc """
DER Signature.
DER encoded signatures are used in Bitcoin scripts (with sighash byte at the end).
This module provides parsing, serialization, normalization and checking if
the signature meets BIP66 requirements.
We need to normalize signatures before passing them to erlang's :crypto.verify
because it will return false if R or S are zero padded (while libsecp256k1 returns true).
DER Signature format:
`<<type, total_length, r_type, r_length, r :: binary, s_type, s_length, s :: binary>>`
Plus sighash byte at the end for the signatures present in the script, but this module
deals with signatures that already have the sighash byte stripped.
In strict DER signature `type` should be `0x30` (compound), and `r_encoding` and `s_encoding` should
equal `0x02` (integer).
## Links:
* https://github.com/bitcoin/bips/blob/master/bip-0066.mediawiki
* https://en.wikipedia.org/wiki/X.690
* https://www.itu.int/rec/T-REC-X.690/en
"""
use Bitwise
alias Bitcoin.Secp256k1
defstruct [:length, :r_type, :r, :s_type, :s, :type]
@type t :: %__MODULE__{}
# Upper bound for what's considered a low S value, inclusive (see BIP62)
@low_s_max Secp256k1.params()[:n] / 2
@doc """
Parse binary signature into %DERSig{} struct.
"""
@spec parse(binary) :: t
def parse(sig) do
struct(__MODULE__, parse_raw(sig))
end
@doc """
Serialize signature struct into binary.
`length` from the struct is used in serialization, even if it's incorrect.
"""
@spec serialize(t) :: binary
def serialize(%__MODULE__{} = der) do
<<der.type, der.length, der.r_type, byte_size(der.r), der.r::binary, der.s_type,
byte_size(der.s), der.s::binary>>
end
@doc """
Normalize DER signature.
* remove leading null bytes from R and S
* fix total_length if it's incorrect
* fix negative S
* fix negative R
* ensure low S
"""
@spec normalize(t | binary) :: t | binary
def normalize(sig)
def normalize(sig) when is_binary(sig) do
sig
|> parse
|> normalize
|> serialize
end
def normalize(%__MODULE__{} = der) do
r = der.r |> trim |> fix_negative
s = der.s |> trim |> low_s |> fix_negative
der
|> Map.put(:r, r)
|> Map.put(:s, s)
|> Map.put(:length, byte_size(r) + byte_size(s) + 4)
end
@doc """
Returns false when S > order/2
See https://github.com/bitcoin/bips/blob/master/bip-0062.mediawiki#Low_S_values_in_signatures for details.
"""
@spec low_s?(t | binary) :: boolean
def low_s?(sig)
def low_s?(sig) when is_binary(sig), do: sig |> parse |> low_s?
def low_s?(der), do: Binary.to_integer(der.s) <= @low_s_max
@doc """
Check if the signature is a strict DER signature (BIP66)
Note that we operate on sig that already has the sighash byte stripped.
"""
@spec strict?(binary) :: boolean
def strict?(sig) when is_binary(sig) do
der = parse_raw(sig)
cond do
# Minimum size constraint
byte_size(sig) < 8 ->
false
# Maximum size constraint
byte_size(sig) > 72 ->
false
# A signature is of type 0x30 (compound).
der.type != 0x30 ->
false
# Length covers the entire signature
der.length != byte_size(sig) - 2 ->
false
# Make sure the length of the S element is still inside the signature
# -> Our parser will currently raise if it's not
# Length of the signature matches the sum of the length of the elements
der.length != der.r_length + der.s_length + 4 ->
false
# R element is an integer.
der.r_type != 0x02 ->
false
# R length > 0
der.r == <<>> ->
false
# R is positive
(Binary.at(sig, 4) &&& 0x80) == 0x80 ->
false
# No unecessary null bytes at the start of R
trim(der.r) != der.r ->
false
# Check whether the S element is an integer.
der.s_type != 0x02 ->
false
# S length > 0
der.s == <<>> ->
false
# S is not negative
(Binary.at(der.s, 0) &&& 0x80) == 0x80 ->
false
# No unecessary null bytes at the start of S
trim(der.s) != der.s ->
false
# All passed
true ->
true
end
end
# Parses signature
defp parse_raw(sig) do
<<type, total_length, sig::binary>> = sig
<<r_type, r_length, sig::binary>> = sig
<<r::binary-size(r_length), sig::binary>> = sig
<<s_type, s_length, sig::binary>> = sig
<<s::binary-size(s_length), _bin::binary>> = sig
%{
length: total_length,
type: type,
r_type: r_type,
r_length: r_length,
r: r,
s_type: s_type,
s_length: s_length,
s: s
}
end
# Trim leading null bytes
# But we need to be careful because if the null byte is followed by a byte with 0x80 bit set,
# removing the null byte would change the number sign.
defp trim(<<0, b, _bin::binary>> = sig) when (b &&& 0x80) == 0x80, do: sig
defp trim(<<0, bin::binary>>), do: trim(bin)
defp trim(bin), do: bin
# Ensure that the low S value is used
defp low_s(s), do: s |> Binary.to_integer() |> low_s_num |> Binary.from_integer()
defp low_s_num(s) when s > @low_s_max, do: Secp256k1.params()[:n] - s
defp low_s_num(s), do: s
# S should not be negative. But you can find it negative e.g in tx 70f7c15c6f62139cc41afa858894650344eda9975b46656d893ee59df8914a3d
# You can also find negative R in tx 251d9cc59d1fc23b0ec6e62aff6106f1890bf9ed4eb0b7df70319d3e555f4fd2
# These are encoding errors, null byte must be appendend at the beginning so that these numbers are interpreted as positive
defp fix_negative(<<b, _::binary>> = bin) when (b &&& 0x80) == 0x80, do: <<0, bin::binary>>
defp fix_negative(bin), do: bin
end
|
lib/bitcoin/der_sig.ex
| 0.898009
| 0.554591
|
der_sig.ex
|
starcoder
|
defmodule Raxx.Server do
@moduledoc """
Interface to handle server side communication in an HTTP message exchange.
If simple `request -> response` transformation is possible, try `Raxx.SimpleServer`
*A module implementing `Raxx.Server` is run by an HTTP server.
For example [Ace](https://github.com/CrowdHailer/Ace)
can run such a module for both HTTP/1.x and HTTP/2 exchanges*
## Getting Started
**Send complete response as soon as request headers are received.**
defmodule SimpleServer do
use Raxx.Server
def handle_head(%Raxx.Request{method: :GET, path: []}, _state) do
response(:ok)
|> set_header("content-type", "text/plain")
|> set_body("Hello, World!")
end
end
**Store data as it is available from a clients request**
defmodule StreamingRequest do
use Raxx.Server
def handle_head(%Raxx.Request{method: :PUT, body: true}, _state) do
{:ok, io_device} = File.open("my/path")
{[], {:file, device}}
end
def handle_data(body, state = {:file, device}) do
IO.write(device, body)
{[], state}
end
def handle_tail(_trailers, state) do
response(:see_other)
|> set_header("location", "/")
end
end
**Subscribe server to event source and forward notifications to client.**
defmodule SubscribeToMessages do
use Raxx.Server
def handle_head(_request, _state) do
{:ok, _} = ChatRoom.join()
response(:ok)
|> set_header("content-type", "text/event-stream")
|> set_body(true)
end
def handle_info({ChatRoom, data}, state) do
{[body(data)], state}
end
end
### Notes
- `handle_head/2` will always be called with a request that has body as a boolean.
For small requests where buffering the whole request is acceptable a simple middleware can be used.
- Acceptable return values are the same for all callbacks;
either a `Raxx.Response`, which must be complete or
a list of message parts and a new state.
## Streaming
`Raxx.Server` defines an interface to stream the body of request and responses.
This has several advantages:
- Large payloads do not need to be help in memory
- Server can push information as it becomes available, using Server Sent Events.
- If a request has invalid headers then a reply can be set without handling the body.
- Content can be generated as requested using HTTP/2 flow control
The body of a Raxx message (Raxx.Request or `Raxx.Response`) may be one of three types:
- `io_list` - This is the complete body for the message.
- `:false` - There **is no** body, for example `:GET` requests never have a body.
- `:true` - There **is** a body, it can be processed as it is received
## Server Isolation
To start an exchange a client sends a request.
The server, upon receiving this message, sends a reply.
A logical HTTP exchange consists of a single request and response.
Methods such as [pipelining](https://en.wikipedia.org/wiki/HTTP_pipelining)
and [multiplexing](http://qnimate.com/what-is-multiplexing-in-http2/)
combine multiple logical exchanges onto a single connection.
This is done to improve performance and is a detail not exposed a server.
A Raxx server handles a single HTTP exchange.
Therefore a single connection my have multiple servers each isolated in their own process.
## Termination
An exchange can be stopped early by terminating the server process.
Support for early termination is not consistent between versions of HTTP.
- HTTP/2: server exit with reason `:normal`, stream reset with error `CANCEL`.
- HTTP/2: server exit any other reason, stream reset with error `INTERNAL_ERROR`.
- HTTP/1.x: server exit with any reason, connection is closed.
`Raxx.Server` does not provide a terminate callback.
Any cleanup that needs to be done from an aborted exchange should be handled by monitoring the server process.
"""
@typedoc """
The behaviour and state of a raxx server
"""
@type t :: {module, state}
@typedoc """
State of application server.
Original value is the configuration given when starting the raxx application.
"""
@type state :: any()
@typedoc """
Possible return values instructing server to send client data and update state if appropriate.
"""
@type next :: {[Raxx.part()], state} | Raxx.Response.t()
@doc """
Called once when a client starts a stream,
Passed a `Raxx.Request` and server configuration.
Note the value of the request body will be a boolean.
This callback can be relied upon to execute before any other callbacks
"""
@callback handle_head(Raxx.Request.t(), state()) :: next
@doc """
Called every time data from the request body is received
"""
@callback handle_data(binary(), state()) :: next
@doc """
Called once when a request finishes.
This will be called with an empty list of headers is request is completed without trailers.
"""
@callback handle_tail([{binary(), binary()}], state()) :: next
@doc """
Called for all other messages the server may recieve
"""
@callback handle_info(any(), state()) :: next
use Raxx.View, template: "server.html.eex", arguments: [:module]
defmacro __using__(_options) do
quote do
@behaviour unquote(__MODULE__)
import Raxx
@impl unquote(__MODULE__)
def handle_head(_request, _state) do
response(:not_found)
|> Raxx.Server.render(__MODULE__)
end
@impl unquote(__MODULE__)
def handle_data(data, state) do
import Logger
Logger.warn("Received unexpected data: #{inspect(data)}")
{[], state}
end
@impl unquote(__MODULE__)
def handle_tail(trailers, state) do
import Logger
Logger.warn("Received unexpected trailers: #{inspect(trailers)}")
{[], state}
end
@impl unquote(__MODULE__)
def handle_info(message, state) do
import Logger
Logger.warn("Received unexpected message: #{inspect(message)}")
{[], state}
end
defoverridable unquote(__MODULE__)
@before_compile unquote(__MODULE__)
end
end
# DEBT Remove this for 1.0 release
defmacro __before_compile__(_env) do
quote do
# If handle_request is implemented the module may have been created with raxx < 0.17.0
# In this case a warning is emitted suggesting using Raxx.SimpleServer instead.
# This warning can be disabled by adding @raxx_safe_server to the module.
if Module.defines?(__MODULE__, {:handle_request, 2}) and
!Module.get_attribute(__MODULE__, :raxx_safe_server) do
%{file: file, line: line} = __ENV__
:elixir_errors.warn(__ENV__.line, __ENV__.file, """
The server `#{inspect(__MODULE__)}` implements `handle_request/2.
In place of `use Raxx.Server` try `use Raxx.SimpleServer.`
The behaviour Raxx.Server changes in release 0.17.0, see CHANGELOG for details.
""")
end
end
end
@doc """
Execute a server module and current state in response to a new message
"""
@spec handle(t, term) :: {[Raxx.part()], t}
def handle({module, state}, request = %Raxx.Request{}) do
normalize_reaction(module.handle_head(request, state), state)
end
def handle({module, state}, %Raxx.Data{data: data}) do
normalize_reaction(module.handle_data(data, state), state)
end
def handle({module, state}, %Raxx.Tail{headers: headers}) do
normalize_reaction(module.handle_tail(headers, state), state)
end
def handle({module, state}, other) do
normalize_reaction(module.handle_info(other, state), state)
end
defp normalize_reaction(response = %Raxx.Response{body: true}, _initial_state) do
raise %ReturnError{return: response}
end
defp normalize_reaction(response = %Raxx.Response{}, initial_state) do
{[response], initial_state}
end
defp normalize_reaction({parts, new_state}, _initial_state) when is_list(parts) do
{parts, new_state}
end
defp normalize_reaction(other, _initial_state) do
raise %ReturnError{return: other}
end
@doc """
Verify server can be run?
A runnable server consists of a tuple of server module and initial state.
The server module must implement this modules behaviour.
The initial state can be any term
## Examples
# Could just call verify
iex> Raxx.Server.verify_server({Raxx.ServerTest.DefaultServer, %{}})
{:ok, {Raxx.ServerTest.DefaultServer, %{}}}
iex> Raxx.Server.verify_server({GenServer, %{}})
{:error, {:not_a_server_module, GenServer}}
iex> Raxx.Server.verify_server({NotAModule, %{}})
{:error, {:not_a_module, NotAModule}}
"""
def verify_server({module, term}) do
case verify_implementation(module) do
{:ok, _} ->
{:ok, {module, term}}
{:error, reason} ->
{:error, reason}
end
end
@doc false
def verify_implementation!(module) do
case Raxx.Server.verify_implementation(module) do
{:ok, _} ->
:no_op
{:error, {:not_a_server_module, module}} ->
raise ArgumentError, "module `#{module}` does not implement `Raxx.Server` behaviour."
{:error, {:not_a_module, module}} ->
raise ArgumentError, "module `#{module}` could not be loaded."
end
end
@doc false
def verify_implementation(module) do
case fetch_behaviours(module) do
{:ok, behaviours} ->
if Enum.member?(behaviours, __MODULE__) do
{:ok, module}
else
{:error, {:not_a_server_module, module}}
end
{:error, reason} ->
{:error, reason}
end
end
defp fetch_behaviours(module) do
case Code.ensure_compiled?(module) do
true ->
behaviours =
module.module_info[:attributes]
|> Keyword.take([:behaviour])
|> Keyword.values()
|> List.flatten()
{:ok, behaviours}
false ->
{:error, {:not_a_module, module}}
end
end
end
|
lib/raxx/server.ex
| 0.908476
| 0.541773
|
server.ex
|
starcoder
|
defmodule Ecto.Adapter do
@moduledoc """
This module specifies the adapter API that an adapter is required to
implement.
"""
use Behaviour
@type t :: module
@doc """
The callback invoked when the adapter is used.
"""
defmacrocallback __using__(opts :: Keyword.t) :: Macro.t
@doc """
Starts any connection pooling or supervision and return `{:ok, pid}`
or just `:ok` if nothing needs to be done.
Returns `{:error, {:already_started, pid}}` if the repo already
started or `{:error, term}` in case anything else goes wrong.
"""
defcallback start_link(repo :: Ecto.Repo.t, options :: Keyword.t) ::
{:ok, pid} | :ok | {:error, {:already_started, pid}} | {:error, term}
@doc """
Stops any connection pooling or supervision started with `start_link/1`.
"""
defcallback stop(repo :: Ecto.Repo.t) :: :ok
@doc """
Fetches all results from the data store based on the given query.
"""
defcallback all(repo :: Ecto.Repo.t, query :: Ecto.Query.t,
params :: map(), opts :: Keyword.t) :: [[term]] | no_return
@doc """
Updates all entities matching the given query with the values given. The
query shall only have `where` expressions and a single `from` expression. Returns
the number of affected entities.
"""
defcallback update_all(repo :: Ecto.Repo.t, query :: Ecto.Query.t,
filter :: Keyword.t, params :: map(),
opts :: Keyword.t) :: integer | no_return
@doc """
Deletes all entities matching the given query.
The query shall only have `where` expressions and a `from` expression.
Returns the number of affected entities.
"""
defcallback delete_all(repo :: Ecto.Repo.t, query :: Ecto.Query.t,
params :: map(), opts :: Keyword.t) :: integer | no_return
@doc """
Inserts a single new model in the data store.
"""
defcallback insert(repo :: Ecto.Repo.t, source :: binary,
fields :: Keyword.t, returning :: [atom],
opts :: Keyword.t) :: {:ok, tuple} | no_return
@doc """
Updates a single model with the given filters.
While `filter` can be any record column, it is expected that
at least the primary key (or any other key that uniquely
identifies an existing record) to be given as filter. Therefore,
in case there is no record matching the given filters,
`{:error, :stale}` is returned.
"""
defcallback update(repo :: Ecto.Repo.t, source :: binary,
filter :: Keyword.t, fields :: Keyword.t,
returning :: [atom], opts :: Keyword.t) ::
{:ok, tuple} | {:error, :stale} | no_return
@doc """
Deletes a sigle model with the given filters.
While `filter` can be any record column, it is expected that
at least the primary key (or any other key that uniquely
identifies an existing record) to be given as filter. Therefore,
in case there is no record matching the given filters,
`{:error, :stale}` is returned.
"""
defcallback delete(repo :: Ecto.Repo.t, source :: binary,
filter :: Keyword.t, opts :: Keyword.t) ::
{:ok, tuple} | {:error, :stale} | no_return
end
|
lib/ecto/adapter.ex
| 0.918745
| 0.509093
|
adapter.ex
|
starcoder
|
defmodule AWS.AutoScalingPlans do
@moduledoc """
AWS Auto Scaling
Use AWS Auto Scaling to quickly discover all the scalable AWS resources for
your application and configure dynamic scaling and predictive scaling for
your resources using scaling plans. Use this service in conjunction with
the Amazon EC2 Auto Scaling, Application Auto Scaling, Amazon CloudWatch,
and AWS CloudFormation services.
Currently, predictive scaling is only available for Amazon EC2 Auto Scaling
groups.
For more information about AWS Auto Scaling, including information about
granting IAM users required permissions for AWS Auto Scaling actions, see
the [AWS Auto Scaling User
Guide](https://docs.aws.amazon.com/autoscaling/plans/userguide/what-is-aws-auto-scaling.html).
"""
@doc """
Creates a scaling plan.
"""
def create_scaling_plan(client, input, options \\ []) do
request(client, "CreateScalingPlan", input, options)
end
@doc """
Deletes the specified scaling plan.
Deleting a scaling plan deletes the underlying `ScalingInstruction` for all
of the scalable resources that are covered by the plan.
If the plan has launched resources or has scaling activities in progress,
you must delete those resources separately.
"""
def delete_scaling_plan(client, input, options \\ []) do
request(client, "DeleteScalingPlan", input, options)
end
@doc """
Describes the scalable resources in the specified scaling plan.
"""
def describe_scaling_plan_resources(client, input, options \\ []) do
request(client, "DescribeScalingPlanResources", input, options)
end
@doc """
Describes one or more of your scaling plans.
"""
def describe_scaling_plans(client, input, options \\ []) do
request(client, "DescribeScalingPlans", input, options)
end
@doc """
Retrieves the forecast data for a scalable resource.
Capacity forecasts are represented as predicted values, or data points,
that are calculated using historical data points from a specified
CloudWatch load metric. Data points are available for up to 56 days.
"""
def get_scaling_plan_resource_forecast_data(client, input, options \\ []) do
request(client, "GetScalingPlanResourceForecastData", input, options)
end
@doc """
Updates the specified scaling plan.
You cannot update a scaling plan if it is in the process of being created,
updated, or deleted.
"""
def update_scaling_plan(client, input, options \\ []) do
request(client, "UpdateScalingPlan", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, Poison.Parser.t() | nil, Poison.Response.t()}
| {:error, Poison.Parser.t()}
| {:error, HTTPoison.Error.t()}
defp request(client, action, input, options) do
client = %{client | service: "autoscaling-plans"}
host = build_host("autoscaling-plans", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "AnyScaleScalingPlannerFrontendService.#{action}"}
]
payload = Poison.Encoder.encode(input, %{})
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, %HTTPoison.Response{status_code: 200, body: ""} = response} ->
{:ok, nil, response}
{:ok, %HTTPoison.Response{status_code: 200, body: body} = response} ->
{:ok, Poison.Parser.parse!(body, %{}), response}
{:ok, %HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body, %{})
{:error, error}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
|
lib/aws/auto_scaling_plans.ex
| 0.845481
| 0.512266
|
auto_scaling_plans.ex
|
starcoder
|
defmodule AWS.Detective do
@moduledoc """
Detective uses machine learning and purpose-built visualizations to help you
analyze and investigate security issues across your Amazon Web Services (AWS)
workloads.
Detective automatically extracts time-based events such as login attempts, API
calls, and network traffic from AWS CloudTrail and Amazon Virtual Private Cloud
(Amazon VPC) flow logs. It also extracts findings detected by Amazon GuardDuty.
The Detective API primarily supports the creation and management of behavior
graphs. A behavior graph contains the extracted data from a set of member
accounts, and is created and managed by an administrator account.
Every behavior graph is specific to a Region. You can only use the API to manage
graphs that belong to the Region that is associated with the currently selected
endpoint.
A Detective administrator account can use the Detective API to do the following:
* Enable and disable Detective. Enabling Detective creates a new
behavior graph.
* View the list of member accounts in a behavior graph.
* Add member accounts to a behavior graph.
* Remove member accounts from a behavior graph.
A member account can use the Detective API to do the following:
* View the list of behavior graphs that they are invited to.
* Accept an invitation to contribute to a behavior graph.
* Decline an invitation to contribute to a behavior graph.
* Remove their account from a behavior graph.
All API actions are logged as CloudTrail events. See [Logging Detective API Calls with
CloudTrail](https://docs.aws.amazon.com/detective/latest/adminguide/logging-using-cloudtrail.html).
We replaced the term "master account" with the term "administrator account." An
administrator account is used to centrally manage multiple accounts. In the case
of Detective, the administrator account manages the accounts in their behavior
graph.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2018-10-26",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "api.detective",
global?: false,
protocol: "rest-json",
service_id: "Detective",
signature_version: "v4",
signing_name: "detective",
target_prefix: nil
}
end
@doc """
Accepts an invitation for the member account to contribute data to a behavior
graph.
This operation can only be called by an invited member account.
The request provides the ARN of behavior graph.
The member account status in the graph must be `INVITED`.
"""
def accept_invitation(%Client{} = client, input, options \\ []) do
url_path = "/invitation"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Creates a new behavior graph for the calling account, and sets that account as
the administrator account.
This operation is called by the account that is enabling Detective.
Before you try to enable Detective, make sure that your account has been
enrolled in Amazon GuardDuty for at least 48 hours. If you do not meet this
requirement, you cannot enable Detective. If you do meet the GuardDuty
prerequisite, then when you make the request to enable Detective, it checks
whether your data volume is within the Detective quota. If it exceeds the quota,
then you cannot enable Detective.
The operation also enables Detective for the calling account in the currently
selected Region. It returns the ARN of the new behavior graph.
`CreateGraph` triggers a process to create the corresponding data tables for the
new behavior graph.
An account can only be the administrator account for one behavior graph within a
Region. If the same account calls `CreateGraph` with the same administrator
account, it always returns the same behavior graph ARN. It does not create a new
behavior graph.
"""
def create_graph(%Client{} = client, input, options \\ []) do
url_path = "/graph"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Sends a request to invite the specified AWS accounts to be member accounts in
the behavior graph.
This operation can only be called by the administrator account for a behavior
graph.
`CreateMembers` verifies the accounts and then invites the verified accounts.
The administrator can optionally specify to not send invitation emails to the
member accounts. This would be used when the administrator manages their member
accounts centrally.
The request provides the behavior graph ARN and the list of accounts to invite.
The response separates the requested accounts into two lists:
* The accounts that `CreateMembers` was able to start the
verification for. This list includes member accounts that are being verified,
that have passed verification and are to be invited, and that have failed
verification.
* The accounts that `CreateMembers` was unable to process. This list
includes accounts that were already invited to be member accounts in the
behavior graph.
"""
def create_members(%Client{} = client, input, options \\ []) do
url_path = "/graph/members"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Disables the specified behavior graph and queues it to be deleted.
This operation removes the graph from each member account's list of behavior
graphs.
`DeleteGraph` can only be called by the administrator account for a behavior
graph.
"""
def delete_graph(%Client{} = client, input, options \\ []) do
url_path = "/graph/removal"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Deletes one or more member accounts from the administrator account's behavior
graph.
This operation can only be called by a Detective administrator account. That
account cannot use `DeleteMembers` to delete their own account from the behavior
graph. To disable a behavior graph, the administrator account uses the
`DeleteGraph` API method.
"""
def delete_members(%Client{} = client, input, options \\ []) do
url_path = "/graph/members/removal"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Removes the member account from the specified behavior graph.
This operation can only be called by a member account that has the `ENABLED`
status.
"""
def disassociate_membership(%Client{} = client, input, options \\ []) do
url_path = "/membership/removal"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Returns the membership details for specified member accounts for a behavior
graph.
"""
def get_members(%Client{} = client, input, options \\ []) do
url_path = "/graph/members/get"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Returns the list of behavior graphs that the calling account is an administrator
account of.
This operation can only be called by an administrator account.
Because an account can currently only be the administrator of one behavior graph
within a Region, the results always contain a single behavior graph.
"""
def list_graphs(%Client{} = client, input, options \\ []) do
url_path = "/graphs/list"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Retrieves the list of open and accepted behavior graph invitations for the
member account.
This operation can only be called by a member account.
Open invitations are invitations that the member account has not responded to.
The results do not include behavior graphs for which the member account declined
the invitation. The results also do not include behavior graphs that the member
account resigned from or was removed from.
"""
def list_invitations(%Client{} = client, input, options \\ []) do
url_path = "/invitations/list"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Retrieves the list of member accounts for a behavior graph.
Does not return member accounts that were removed from the behavior graph.
"""
def list_members(%Client{} = client, input, options \\ []) do
url_path = "/graph/members/list"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Rejects an invitation to contribute the account data to a behavior graph.
This operation must be called by a member account that has the `INVITED` status.
"""
def reject_invitation(%Client{} = client, input, options \\ []) do
url_path = "/invitation/removal"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Sends a request to enable data ingest for a member account that has a status of
`ACCEPTED_BUT_DISABLED`.
For valid member accounts, the status is updated as follows.
* If Detective enabled the member account, then the new status is
`ENABLED`.
* If Detective cannot enable the member account, the status remains
`ACCEPTED_BUT_DISABLED`.
"""
def start_monitoring_member(%Client{} = client, input, options \\ []) do
url_path = "/graph/member/monitoringstate"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
end
|
lib/aws/generated/detective.ex
| 0.884258
| 0.684936
|
detective.ex
|
starcoder
|
if Code.ensure_loaded?(HTTPoison) do
defmodule NewRelic.Instrumented.HTTPoison do
use NewRelic.Tracer
@moduledoc """
To track outbound requests as part of a Distributed Trace, an additional request
header needs to be added. Simply alias this module and use `HTTPoison` as normal,
all your requests will be automatically instrumented.
```elixir
alias NewRelic.Instrumented.HTTPoison
HTTPoison.get("http://www.example.com")
```
This module mirrors the interface of `HTTPoison`
"""
defp instrument(method, url, headers) do
NewRelic.set_span(:http, url: url, method: method, component: "HTTPoison")
headers ++ NewRelic.create_distributed_trace_payload(:http)
end
@trace {:get, category: :external}
def get(url, headers \\ [], options \\ []) do
headers = instrument("GET", url, headers)
HTTPoison.get(url, headers, options)
end
@trace {:get!, category: :external}
def get!(url, headers \\ [], options \\ []) do
headers = instrument("GET", url, headers)
HTTPoison.get!(url, headers, options)
end
@trace {:put, category: :external}
def put(url, body \\ "", headers \\ [], options \\ []) do
headers = instrument("PUT", url, headers)
HTTPoison.put(url, body, headers, options)
end
@trace {:put!, category: :external}
def put!(url, body \\ "", headers \\ [], options \\ []) do
headers = instrument("PUT", url, headers)
HTTPoison.put!(url, body, headers, options)
end
@trace {:head, category: :external}
def head(url, headers \\ [], options \\ []) do
headers = instrument("HEAD", url, headers)
HTTPoison.head(url, headers, options)
end
@trace {:head!, category: :external}
def head!(url, headers \\ [], options \\ []) do
headers = instrument("HEAD", url, headers)
HTTPoison.head!(url, headers, options)
end
@trace {:post, category: :external}
def post(url, body, headers \\ [], options \\ []) do
headers = instrument("POST", url, headers)
HTTPoison.post(url, body, headers, options)
end
@trace {:post!, category: :external}
def post!(url, body, headers \\ [], options \\ []) do
headers = instrument("POST", url, headers)
HTTPoison.post!(url, body, headers, options)
end
@trace {:patch, category: :external}
def patch(url, body, headers \\ [], options \\ []) do
headers = instrument("PATCH", url, headers)
HTTPoison.patch(url, body, headers, options)
end
@trace {:patch!, category: :external}
def patch!(url, body, headers \\ [], options \\ []) do
headers = instrument("PATCH", url, headers)
HTTPoison.patch!(url, body, headers, options)
end
@trace {:delete, category: :external}
def delete(url, headers \\ [], options \\ []) do
headers = instrument("DELETE", url, headers)
HTTPoison.delete(url, headers, options)
end
@trace {:delete!, category: :external}
def delete!(url, headers \\ [], options \\ []) do
headers = instrument("DELETE", url, headers)
HTTPoison.delete!(url, headers, options)
end
@trace {:options, category: :external}
def options(url, headers \\ [], options \\ []) do
headers = instrument("OPTIONS", url, headers)
HTTPoison.options(url, headers, options)
end
@trace {:options!, category: :external}
def options!(url, headers \\ [], options \\ []) do
headers = instrument("OPTIONS", url, headers)
HTTPoison.options!(url, headers, options)
end
@trace {:request, category: :external}
def request(method, url, body \\ "", headers \\ [], options \\ []) do
headers = instrument(String.upcase(to_string(method)), url, headers)
HTTPoison.request(method, url, body, headers, options)
end
@trace {:request!, category: :external}
def request!(method, url, body \\ "", headers \\ [], options \\ []) do
headers = instrument(String.upcase(to_string(method)), url, headers)
HTTPoison.request!(method, url, body, headers, options)
end
end
end
|
lib/new_relic/instrumented/httpoison.ex
| 0.797004
| 0.563288
|
httpoison.ex
|
starcoder
|
defmodule CommendableComments do
@attributes [
:modulecomment,
:comment,
:typecomment
]
@moduledoc """
A trivial library that ensures that following module attributes do not throw a warning:
#{
@attributes
|> Enum.map(fn attribute ->
" * `@#{attribute}`"
end)
|> Enum.join("\n")
}
The value of this package is more from the semantical perspective where developers would like
to document along their code base a variety of implementation and/or domain related matters that
do not concern any prospective public user of the `API`.
These could vary from documenting to what type of algorithm was leveraged to realize a particular
implementation to highlighting the various elements of the conceptual framework that was developed
under `Domain Driven Design`.
`@moduledoc`, `@doc` and `@typedoc` have their usage reserved for the public `API` and thus concern
themselves with the 'how'-usage aspect of things. The usage of the module attributes in this
package is to anchor the 'why' aspect of things along the relevant parts of the codebase.
For those who prefer to keep those discussions outside the codebase; inside tickets or various wikis
they could still embed a direct URL:
```elixir
defmodule A do
use CommendableComments
@modulecomment url: link_to_wiki/link_to_jira
@comment url: link_to_wiki/link_to_jira
def do_something(a, b) do
...
end
end
```
In the case of diagrams that are persisted with the project locally, one could express:
```elixir
@comment diagram: path_to_png
```
or in case it is somewhere remote:
```elixir
@comment diagram_url: link_to_online_diagram_in_google_drive
```
All of these annotations are up to your personal discretion. Currently there is no real functionality
associated with any of this.
The ultimate ambition of this project is to enrich `ex_doc` to pick up on these attributes. Documentation can thus
have multiple perspective views; that of a conventional user of your API as well as that of the implementor of your
organization. Next would be to introduce the concept of `commenttest` that may be beneficial for library developers
who would still like to document certain functionality that is not relevant for the user of such a library to know
about.
Whether or not this will ever come to fruitition will depend on your feedback. If you strongly oppose this concept
altogether; then even more so I would love to learn from you.
"""
@doc false
def attributes, do: @attributes
defmacro __using__(_) do
quote do
CommendableComments.attributes()
|> Enum.each(fn e ->
Module.register_attribute(__MODULE__, e, accumulate: true)
end)
end
end
end
|
lib/commendable_comments.ex
| 0.824533
| 0.842992
|
commendable_comments.ex
|
starcoder
|
defmodule XtbClient.Messages.SymbolInfo do
defmodule Query do
@moduledoc """
Info about the query for symbol info.
## Parameters
- `symbol` symbol name.
"""
@type t :: %__MODULE__{
symbol: binary()
}
@enforce_keys [:symbol]
@derive Jason.Encoder
defstruct symbol: ""
def new(symbol)
when is_binary(symbol) do
%__MODULE__{
symbol: symbol
}
end
end
alias XtbClient.Messages.{MarginMode, ProfitMode, QuoteId}
@moduledoc """
Information relevant to the symbol of security.
Please be advised that result values for profit and margin calculation can be used optionally, because server is able to perform all profit/margin calculations for Client application by commands described later in this document.
## Parameters
- `ask` ask price in base currency,
- `bid` bid price in base currency,
- `category_name` category name,
- `contract_size` size of 1 lot,
- `currency` currency,
- `currency_pair` indicates whether the symbol represents a currency pair,
- `currency_profit` the currency of calculated profit,
- `description` description,
- `expiration` expiration, `null` if not applicable,
- `group_name` symbol group name,
- `high` the highest price of the day in base currency,
- `initial_margin` initial margin for 1 lot order, used for profit/margin calculation,
- `instant_max_volume` maximum instant volume multiplied by 100 (in lots),
- `leverage` symbol leverage,
- `long_only` indicates whether the symbol is long only,
- `lot_max` maximum size of trade,
- `lot_min` minimum size of trade,
- `lot_step` a value of minimum step by which the size of trade can be changed (within `lotMin` - `lotMax` range),
- `low` the lowest price of the day in base currency,
- `margin_hedged` used for profit calculation,
- `margin_hedged_strong` for margin calculation,
- `margin_maintenance` for margin calculation, `null` if not applicable,
- `margin_mode` for margin calculation,
- `percentage` percentage,
- `pips_precision` number of symbol's pip decimal places,
- `precision` number of symbol's price decimal places,
- `profit_mode` for profit calculation,
- `quote_id` source of price, see `XtbClient.Messages.QuoteId`,
- `short_selling` indicates whether short selling is allowed on the instrument,
- `spread_raw` the difference between raw ask and bid prices,
- `spread_table` spread representation,
- `starting` `null` if not applicable,
- `step_rule_id` appropriate step rule ID from `XtbClient.Connection.get_step_rules/1` command response,
- `stops_level` minimal distance (in pips) from the current price where the stopLoss/takeProfit can be set,
- `swap_rollover_3_days` time when additional swap is accounted for weekend,
- `swap_enable` indicates whether swap value is added to position on end of day,
- `swap_long` swwap value for long positions in pips,
- `swap_short` swap value for short positions in pips,
- `swap_type` type of swap calculated,
- `symbol` symbol name,
- `tick_size` smallest possible price change, used for profit/margin calculation, `null` if not applicable,
- `tick_value` value of smallest possible price change (in base currency), used for profit/margin calculation, `null` if not applicable,
- `time` ask & bid tick time,
- `time_string` time in string,
- `trailing_enabled` indicates whether trailing stop (offset) is applicable to the instrument,
- `type` instrument class number.
## Handled Api methods
- `getSymbol`
"""
@type t :: %__MODULE__{
ask: float(),
bid: float(),
category_name: binary(),
contract_size: integer(),
currency: binary(),
currency_pair: true | false,
currency_profit: binary(),
description: binary(),
expiration: DateTime.t() | nil,
group_name: binary(),
high: float(),
initial_margin: integer(),
instant_max_volume: integer(),
leverage: float(),
long_only: true | false,
lot_max: float(),
lot_min: float(),
lot_step: float(),
low: float(),
margin_hedged: integer(),
margin_hedged_strong: true | false,
margin_maintenance: integer(),
margin_mode: MarginMode.t(),
percentage: float(),
pips_precision: integer(),
precision: integer(),
profit_mode: ProfitMode.t(),
quote_id: QuoteId.t() | nil,
short_selling: true | false,
spread_raw: float(),
spread_table: float(),
starting: DateTime.t() | nil,
step_rule_id: integer(),
stops_level: integer(),
swap_rollover_3_days: integer(),
swap_enable: true | false,
swap_long: float(),
swap_short: float(),
swap_type: integer(),
symbol: binary(),
tick_size: float(),
tick_value: float(),
time: DateTime.t(),
time_string: binary(),
trailing_enabled: true | false,
type: integer()
}
@enforce_keys [
:ask,
:bid,
:category_name,
:contract_size,
:currency,
:currency_pair,
:currency_profit,
:description,
:expiration,
:group_name,
:high,
:initial_margin,
:instant_max_volume,
:leverage,
:long_only,
:lot_max,
:lot_min,
:lot_step,
:low,
:margin_hedged,
:margin_hedged_strong,
:margin_maintenance,
:margin_mode,
:percentage,
:pips_precision,
:precision,
:profit_mode,
:quote_id,
:short_selling,
:spread_raw,
:spread_table,
:starting,
:step_rule_id,
:stops_level,
:swap_rollover_3_days,
:swap_enable,
:swap_long,
:swap_short,
:swap_type,
:symbol,
:tick_size,
:tick_value,
:time,
:time_string,
:trailing_enabled,
:type
]
@derive Jason.Encoder
defstruct ask: 0.0,
bid: 0.0,
category_name: "",
contract_size: 0,
currency: "",
currency_pair: nil,
currency_profit: "",
description: "",
expiration: nil,
group_name: "",
high: 0.0,
initial_margin: 0,
instant_max_volume: 0,
leverage: 0.0,
long_only: nil,
lot_max: 0.0,
lot_min: 0.0,
lot_step: 0.0,
low: 0.0,
margin_hedged: 0,
margin_hedged_strong: nil,
margin_maintenance: 0,
margin_mode: nil,
percentage: 0.0,
pips_precision: 0,
precision: 0,
profit_mode: nil,
quote_id: nil,
short_selling: nil,
spread_raw: 0.0,
spread_table: 0.0,
starting: nil,
step_rule_id: 0,
stops_level: 0,
swap_rollover_3_days: 0,
swap_enable: nil,
swap_long: 0.0,
swap_short: 0.0,
swap_type: 0,
symbol: "",
tick_size: 0.0,
tick_value: 0.0,
time: nil,
time_string: "",
trailing_enabled: nil,
type: 0
def new(%{
"ask" => ask,
"bid" => bid,
"categoryName" => category_name,
"contractSize" => contract_size,
"currency" => currency,
"currencyPair" => currency_pair,
"currencyProfit" => currency_profit,
"description" => description,
"expiration" => expiration,
"groupName" => group_name,
"high" => high,
"initialMargin" => initial_margin,
"instantMaxVolume" => instant_max_volume,
"leverage" => leverage,
"longOnly" => long_only,
"lotMax" => lot_max,
"lotMin" => lot_min,
"lotStep" => lot_step,
"low" => low,
"marginHedged" => margin_hedged,
"marginHedgedStrong" => margin_hedged_strong,
"marginMaintenance" => margin_maintenance,
"marginMode" => margin_mode,
"percentage" => percentage,
"pipsPrecision" => pips_precision,
"precision" => precision,
"profitMode" => profit_mode,
"quoteId" => quote_id,
"shortSelling" => short_selling,
"spreadRaw" => spread_raw,
"spreadTable" => spread_table,
"starting" => starting,
"stepRuleId" => step_rule_id,
"stopsLevel" => stops_level,
"swap_rollover3days" => swap_rollover_3_days,
"swapEnable" => swap_enabled,
"swapLong" => swap_long,
"swapShort" => swap_short,
"swapType" => swap_type,
"symbol" => symbol,
"tickSize" => tick_size,
"tickValue" => tick_value,
"time" => time_value,
"timeString" => time_string,
"trailingEnabled" => trailing_enabled,
"type" => type
})
when is_number(ask) and is_number(bid) and
is_binary(category_name) and is_number(contract_size) and
is_binary(currency) and is_boolean(currency_pair) and is_binary(currency_profit) and
is_binary(description) and is_binary(group_name) and
is_number(high) and is_number(initial_margin) and is_number(instant_max_volume) and
is_number(leverage) and is_boolean(long_only) and
is_number(lot_max) and is_number(lot_min) and is_number(lot_step) and
is_number(low) and
is_number(margin_hedged) and is_boolean(margin_hedged_strong) and
is_number(margin_maintenance) and is_number(margin_mode) and
is_number(percentage) and is_number(pips_precision) and is_number(precision) and
is_number(profit_mode) and is_number(quote_id) and
is_boolean(short_selling) and is_number(spread_raw) and is_number(spread_table) and
is_number(step_rule_id) and is_number(stops_level) and
is_number(swap_rollover_3_days) and
is_boolean(swap_enabled) and is_number(swap_long) and is_number(swap_short) and
is_number(swap_type) and
is_binary(symbol) and is_number(tick_size) and is_number(tick_value) and
is_number(time_value) and is_binary(time_string) and
is_boolean(trailing_enabled) and is_number(type) do
%__MODULE__{
ask: ask,
bid: bid,
category_name: category_name,
contract_size: contract_size,
currency: currency,
currency_pair: currency_pair,
currency_profit: currency_profit,
description: description,
expiration: expiration,
group_name: group_name,
high: high,
initial_margin: initial_margin,
instant_max_volume: instant_max_volume,
leverage: leverage,
long_only: long_only,
lot_max: lot_max,
lot_min: lot_min,
lot_step: lot_step,
low: low,
margin_hedged: margin_hedged,
margin_hedged_strong: margin_hedged_strong,
margin_maintenance: margin_maintenance,
margin_mode: MarginMode.parse(margin_mode),
percentage: percentage,
pips_precision: pips_precision,
precision: precision,
profit_mode: ProfitMode.parse(profit_mode),
quote_id: QuoteId.parse(quote_id),
short_selling: short_selling,
spread_raw: spread_raw,
spread_table: spread_table,
starting: starting,
step_rule_id: step_rule_id,
stops_level: stops_level,
swap_rollover_3_days: swap_rollover_3_days,
swap_enable: swap_enabled,
swap_long: swap_long,
swap_short: swap_short,
swap_type: swap_type,
symbol: symbol,
tick_size: tick_size,
tick_value: tick_value,
time: DateTime.from_unix!(time_value, :millisecond),
time_string: time_string,
trailing_enabled: trailing_enabled,
type: type
}
end
def match(method, data) when method in ["getSymbol"] do
{:ok, __MODULE__.new(data)}
end
def match(_method, _data) do
{:no_match}
end
end
|
lib/xtb_client/messages/symbol_info.ex
| 0.890509
| 0.508605
|
symbol_info.ex
|
starcoder
|
defmodule Nostrum.Cache.UserCache do
@moduledoc """
Cache for users.
The ETS table name associated with the User Cache is `:users`. Besides the
methods provided below you can call any other ETS methods on the table.
## Example
```elixir
info = :ets.info(:users)
[..., heir: :none, name: :users, size: 1, ...]
size = info[:size]
1
```
"""
alias Nostrum.Struct.User
alias Nostrum.Util
import Nostrum.Snowflake, only: [is_snowflake: 1]
@doc ~s"""
Retrieves a user from the cache by id.
If successful, returns `{:ok, user}`. Otherwise, returns `{:error, reason}`.
## Example
```elixir
case Nostrum.Cache.UserCache.get(1111222233334444) do
{:ok, user} ->
"We found " <> user.username
{:error, _reason} ->
"No es bueno"
end
```
"""
@spec get(User.id()) :: {:error, atom} | {:ok, User.t()}
def get(id) when is_snowflake(id) do
case lookup(id) do
{:ok, user} -> {:ok, User.to_struct(user)}
error -> error
end
end
@doc """
Same as `get/1`, but raises `Nostrum.Error.CacheError` in case of a failure.
"""
@spec get!(User.id()) :: no_return | User.t()
def get!(id) when is_snowflake(id), do: id |> get |> Util.bangify_find(id, __MODULE__)
@doc false
@spec create(map) :: User.t()
def create(user) do
:ets.insert(:users, {user.id, user})
User.to_struct(user)
end
@doc false
@spec create([map]) :: :ok
def bulk_create(members) do
Enum.each(members, &:ets.insert(:users, {&1.user.id, &1.user}))
end
@doc false
@spec update(map) :: :noop | {User.t(), User.t()}
def update(info) do
with {:ok, u} <- lookup(info.id),
new_user = Map.merge(u, info),
false <- u == new_user do
:ets.insert(:users, {new_user.id, new_user})
{User.to_struct(u), User.to_struct(new_user)}
else
{:error, _} ->
# User just came online, make sure to cache if possible
if Enum.all?([:username, :discriminator], &Map.has_key?(info, &1)),
do: :ets.insert(:users, {info.id, info})
:noop
true ->
:noop
end
end
@doc false
@spec delete(User.id()) :: :noop | User.t()
def delete(id) do
case lookup(id) do
{:ok, user} ->
:ets.delete(:users, id)
User.to_struct(user)
_ ->
:noop
end
end
@doc false
@spec lookup(User.id()) :: {:error, :user_not_found} | {:ok, map}
def lookup(id) do
case :ets.lookup(:users, id) do
[] ->
{:error, :user_not_found}
[{^id, user}] ->
{:ok, user}
end
end
end
|
lib/nostrum/cache/user_cache.ex
| 0.857887
| 0.745861
|
user_cache.ex
|
starcoder
|
defmodule Validatex.Validators do
@moduledoc """
This module provides a few functions for validating data.
All functions return [Result](https://hexdocs.pm/result/api-reference.html).
It means, if an input value is correct (i.e. the value satisfies to given validation),
function returns a tuple `{:ok, val}`. If not then `{:error, "msg"}`.
Usage:
Let say that you have a few input forms on your page, for instance: name,
surname, password etc. Now you want to validate whether the filled data are correct
for each field. So you create somewhere inside your project a module
[Validators](https://github.com/iodevs/validatex_example/blob/master/lib/server/validators.ex)
which will be containing any of functions like bellow.
Note:
Almost each function has a default error message. This message can be rewritten
according to your needs.
"""
alias Validatex.Validation
@type raw() :: String.t()
@type error_msg() :: String.t()
@doc """
Guard for verifying if `raw` is a string.
"""
defguard raw?(str) when is_binary(str)
@doc """
Guard for verifying if error `msg` is a string.
"""
defguard error_msg?(msg) when is_binary(msg)
@doc """
Validates if the input value is empty or not.
## Example:
# Validators.ex
@spec name(String.t()) :: Result.t(String.t(), String.t())
def name(value) do
Validators.not_empty(value, "Name is required!")
end
"""
@spec not_empty(raw(), error_msg()) :: Result.t(error_msg(), raw())
def not_empty(value, msg \\ "The value must not be an empty!")
def not_empty("", msg) when is_binary(msg) do
Result.error(msg)
end
def not_empty(value, _) when is_binary(value) do
Result.ok(value)
end
@doc """
Validates if the input value is integer.
## Example:
@spec score(String.t()) :: Result.t(String.t(), integer())
def score(value) do
Validators.integer(value, "The score has to be integer!")
end
"""
@spec integer(raw(), error_msg()) :: Result.t(error_msg(), integer())
def integer(value, msg \\ "The value has to be an integer!")
when raw?(value) and error_msg?(msg) do
value
|> not_empty()
|> Result.map(&Integer.parse/1)
|> Result.and_then(&to_result(&1, msg))
end
@doc """
Validates if the input value is float.
## Example:
@spec temperature(String.t()) :: Result.t(String.t(), float())
def temperature(value) do
Validators.float(value, "The temperature has to be float!")
end
"""
@spec float(raw(), error_msg()) :: Result.t(error_msg(), float())
def float(value, msg \\ "The value has to be a float!")
when raw?(value) and error_msg?(msg) do
value |> not_empty() |> Result.map(&Float.parse/1) |> Result.and_then(&to_result(&1, msg))
end
@doc """
Validates if the input value is less than required value.
## Example:
@spec count(String.t()) :: Result.t(String.t(), number())
def count(value) do
Validators.less_than(value, 10, "The value has to be less than 10!")
end
"""
@spec less_than(raw(), number(), error_msg()) :: Result.t(error_msg(), number())
def less_than(value, req_val, msg \\ "The value has to be less than required value!")
def less_than(value, req_val, msg)
when raw?(value) and is_integer(req_val) and error_msg?(msg) do
validate(value, req_val, msg, &integer/1, &is_less_than/3)
end
def less_than(value, req_val, msg)
when raw?(value) and is_float(req_val) and error_msg?(msg) do
validate(value, req_val, msg, &float/1, &is_less_than/3)
end
@doc """
Validates if the input value is less or equal to required value.
## Example:
@spec total_count(String.t()) :: Result.t(String.t(), number())
def total_count(value) do
Validators.at_most(value, 10, "The value has to be less or equal to 10!")
end
"""
@spec at_most(raw(), number(), error_msg()) :: Result.t(error_msg(), number())
def at_most(value, req_val, msg \\ "The value has to be less or equal to required value!")
def at_most(value, req_val, msg)
when raw?(value) and is_integer(req_val) and error_msg?(msg) do
validate(value, req_val, msg, &integer/1, &is_at_most/3)
end
def at_most(value, req_val, msg)
when raw?(value) and is_float(req_val) and error_msg?(msg) do
validate(value, req_val, msg, &float/1, &is_at_most/3)
end
@doc """
Validates if the input value is greater than required value.
"""
@spec greater_than(raw(), number(), error_msg()) :: Result.t(error_msg(), number())
def greater_than(value, req_val, msg \\ "The value has to be greater than required value!")
def greater_than(value, req_val, msg)
when raw?(value) and is_integer(req_val) and error_msg?(msg) do
validate(value, req_val, msg, &integer/1, &is_greater_than/3)
end
def greater_than(value, req_val, msg)
when raw?(value) and is_float(req_val) and error_msg?(msg) do
validate(value, req_val, msg, &float/1, &is_greater_than/3)
end
@doc """
Validates if the input value is greater or equal to required value.
"""
@spec at_least(raw(), number(), error_msg()) :: Result.t(error_msg(), number())
def at_least(value, req_val, msg \\ "The value has to be greater or equal to required value!")
def at_least(value, req_val, msg)
when raw?(value) and is_integer(req_val) and error_msg?(msg) do
validate(value, req_val, msg, &integer/1, &is_at_least/3)
end
def at_least(value, req_val, msg)
when raw?(value) and is_float(req_val) and error_msg?(msg) do
validate(value, req_val, msg, &float/1, &is_at_least/3)
end
@doc """
Validates if the input value is between two numbers (integer or float)
(mathematically speaking it's closed interval).
## Example:
@spec password(String.t()) :: Result.t(String.t(), String.t())
def password(pass) do
min = 6
max = 12
[
Validators.not_empty(value, "Passsword is required!"),
pass
|> String.length()
|> Kernel.to_string()
|> Validators.in_range(min, max,
"Password has to be at most #\{min\} and at least #\{max\} lenght!"
)
]
|> Result.product()
|> Result.map(&hd/1)
end
"""
@spec in_range(raw(), number(), number(), error_msg()) ::
Result.t(error_msg(), number())
def in_range(value, min, max, msg)
when raw?(value) and is_integer(min) and is_integer(max) and error_msg?(msg) do
value
|> integer()
|> Result.and_then(&is_in_range(&1, min, max, msg))
end
def in_range(value, min, max, msg)
when raw?(value) and is_float(min) and is_float(max) and error_msg?(msg) do
value
|> float()
|> Result.and_then(&is_in_range(&1, min, max, msg))
end
@doc """
Validates if the input value is equal to required value.
## Example:
@spec captcha(String.t()) :: Result.t(String.t(), number())
def captcha(value) do
Validators.equal?(value, 10, "The summation has to be equal to 10!")
end
"""
@spec equal?(raw(), number() | String.t(), error_msg()) ::
Result.t(error_msg(), number() | String.t())
def equal?(value, req_val, msg \\ "The value has to be equal to required value!")
def equal?(value, req_val, msg)
when raw?(value) and is_integer(req_val) and error_msg?(msg) do
validate(value, req_val, msg, &integer/1, &is_equal_to/3)
end
def equal?(value, req_val, msg)
when raw?(value) and is_float(req_val) and error_msg?(msg) do
validate(value, req_val, msg, &float/1, &is_equal_to/3)
end
def equal?(value, req_val, msg)
when raw?(value) and is_binary(req_val) and error_msg?(msg) do
validate(value, req_val, msg, ¬_empty/1, &is_equal_to/3)
end
@doc """
Validates if the input value is equal to another input value. For example `password` input form
and `confirm_password` form.
## Example:
@spec conf_password(Validation.field(any(), a)) :: (a -> Result.t(String.t(), a)) when a: var
def conf_password(pass) do
&Validators.equal_to?(&1, pass, "The passwords don't match!")
end
"""
@spec equal_to?(a, Validation.field(any(), a), error_msg()) :: Result.t(error_msg, a)
when a: var
def equal_to?(value, field, msg \\ "Fields do not match.")
def equal_to?(value, {:field, _raw, {:valid, a}}, _msg) when value == a do
Result.ok(a)
end
def equal_to?(_value, _field, msg), do: Result.error(msg)
@doc """
Validates if the input value is true or false.
"""
@spec true?(boolean(), error_msg()) :: Result.t(error_msg(), true)
def true?(true, _msg), do: {:ok, true}
def true?(false, msg), do: {:error, msg}
@doc """
Validates if the input value is inside required list.
"""
@spec in_list(a, [a], error_msg()) :: Result.t(error_msg(), a) when a: var
def in_list(value, list, msg \\ "The value has to be in list!")
when is_list(list) and error_msg?(msg) do
if value in list do
Result.ok(value)
else
Result.error(msg)
end
end
@doc """
In case of validating complex input data you can use regex.
## Example:
@spec date(String.t()) :: Result.t(String.t(), Date.t())
def date(value) do
Validators.format(
value,
~r/^\\d{1,2}\\.\\d{1,2}\\.(\\d{4})?$/,
"Correct date is e.g. in format 11.12.1918 or 03.08.2008."
)
end
@spec email(String.t()) :: Result.t(String.t(), String.t())
def email(value) do
Validators.format(
value,
~r/^[a-zA-Z0-9.!#$%&'*+\\\/=?^_`{|}~-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\\\\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$/,
"It's required valid email!"
)
end
"""
@spec format(raw(), Regex.t(), error_msg()) :: Result.t(error_msg(), raw())
def format(value, %Regex{} = regex, msg) when raw?(value) and error_msg?(msg) do
if value =~ regex do
{:ok, value}
else
{:error, msg}
end
end
# Private
defp validate(value, req_val, msg, cast, fun) when is_function(fun, 3) do
value
|> cast.()
|> Result.and_then(&fun.(&1, req_val, msg))
end
defp to_result({val, ""}, _msg), do: Result.ok(val)
defp to_result(_, msg), do: Result.error(msg)
defp is_less_than(num, limit, _msg) when num < limit do
Result.ok(num)
end
defp is_less_than(_num, _limit, msg) do
Result.error(msg)
end
defp is_at_most(num, limit, _msg) when num <= limit do
Result.ok(num)
end
defp is_at_most(_num, _limit, msg) do
Result.error(msg)
end
defp is_greater_than(num, limit, _msg) when limit < num do
Result.ok(num)
end
defp is_greater_than(_num, _limit, msg) do
Result.error(msg)
end
defp is_at_least(num, limit, _msg) when limit <= num do
Result.ok(num)
end
defp is_at_least(_num, _limit, msg) do
Result.error(msg)
end
defp is_in_range(num, min, max, _msg) when min <= num and num <= max do
Result.ok(num)
end
defp is_in_range(_num, _min, _max, msg) do
Result.error(msg)
end
defp is_equal_to(num, limit, _msg) when num == limit do
Result.ok(num)
end
defp is_equal_to(_num, _limit, msg) do
Result.error(msg)
end
end
|
lib/validatex/validators.ex
| 0.921645
| 0.705297
|
validators.ex
|
starcoder
|
defmodule Absinthe.Blueprint do
@moduledoc """
Represents the graphql document to be executed.
Please see the code itself for more information on individual blueprint sub
modules.
"""
alias __MODULE__
defstruct [
operations: [],
types: [],
directives: [],
fragments: [],
name: nil,
schema: nil,
adapter: nil,
# Added by phases
flags: %{},
errors: [],
input: nil,
execution: %Blueprint.Execution{},
result: %{},
]
@type t :: %__MODULE__{
operations: [Blueprint.Document.Operation.t],
types: [Blueprint.Schema.t],
directives: [Blueprint.Schema.DirectiveDefinition.t],
name: nil | String.t,
fragments: [Blueprint.Document.Fragment.Named.t],
schema: nil | Absinthe.Schema.t,
adapter: nil | Absinthe.Adapter.t,
# Added by phases
errors: [Absinthe.Phase.Error.t],
flags: flags_t,
execution: Blueprint.Execution.t,
result: result_t,
}
@type result_t :: %{
optional(:data) => term,
optional(:errors) => [term],
optional(:extensions) => term,
}
@type node_t ::
Blueprint.t
| Blueprint.Directive.t
| Blueprint.Document.t
| Blueprint.Schema.t
| Blueprint.Input.t
| Blueprint.TypeReference.t
@type use_t ::
Blueprint.Document.Fragment.Named.Use.t
| Blueprint.Input.Variable.Use.t
@type flags_t :: %{atom => module}
defdelegate prewalk(blueprint, fun), to: Absinthe.Blueprint.Transform
defdelegate prewalk(blueprint, acc, fun), to: Absinthe.Blueprint.Transform
defdelegate postwalk(blueprint, fun), to: Absinthe.Blueprint.Transform
defdelegate postwalk(blueprint, acc, fun), to: Absinthe.Blueprint.Transform
def find(blueprint, fun) do
{_, found} = Blueprint.prewalk(blueprint, nil, fn
node, nil ->
if fun.(node) do
{node, node}
else
{node, nil}
end
node, found ->
# Already found
{node, found}
end)
found
end
@spec fragment(t, String.t) :: nil | Blueprint.Document.Fragment.Named.t
def fragment(blueprint, name) do
Enum.find(blueprint.fragments, &(&1.name == name))
end
@doc """
Add a flag to a node.
"""
@spec put_flag(node_t, atom, module) :: node_t
def put_flag(node, flag, mod) do
update_in(node.flags, &Map.put(&1, flag, mod))
end
@doc """
Determine whether a flag has been set on a node.
"""
@spec flagged?(node_t, atom) :: boolean
def flagged?(node, flag) do
Map.has_key?(node.flags, flag)
end
@doc """
Get the currently selected operation.
"""
@spec current_operation(t) :: nil | Blueprint.Document.Operation.t
def current_operation(blueprint) do
Enum.find(blueprint.operations, &(&1.current == true))
end
@doc """
Update the current operation.
"""
@spec update_current(t, (Blueprint.Document.Operation.t -> Blueprint.Document.Operation.t)) :: t
def update_current(blueprint, change) do
ops = Enum.map(blueprint.operations, fn
%{current: true} = op ->
change.(op)
other ->
other
end)
%{blueprint | operations: ops}
end
end
|
deps/absinthe/lib/absinthe/blueprint.ex
| 0.600305
| 0.435121
|
blueprint.ex
|
starcoder
|
defmodule Collision.Vector.Vector2 do
@moduledoc """
Two-dimensional vectors used in detecting collisions.
"""
defstruct x: 0.0, y: 0.0
alias Collision.Vector.Vector2
@type t :: Vector2.t
@doc """
Convert a tuple to a vector.
## Examples
iex> Collision.Vector.Vector2.from_tuple({1.0, 1.5})
%Collision.Vector.Vector2{x: 1.0, y: 1.5}
"""
@spec from_tuple({float, float}) :: t
def from_tuple({x, y}), do: %Vector2{x: x, y: y}
@doc """
Get a vector from a set of points.
## Examples
iex> Collision.Vector.Vector2.from_points(%{x: 5, y: 3}, %{x: 10, y: 6})
%Collision.Vector.Vector2{x: 5, y: 3}
"""
def from_points(%{x: x1, y: y1}, %{x: x2, y: y2}) do
%Vector2{x: x2 - x1, y: y2 - y1}
end
def from_points({x1, y1}, {x2, y2}) do
%Vector2{x: x2 - x1, y: y2 - y1}
end
@doc """
Right normal of a vector.
## Examples
iex> Collision.Vector.Vector2.right_normal(
...> %Collision.Vector.Vector2{x: 3.0, y: 4.0}
...> )
%Collision.Vector.Vector2{x: -4.0, y: 3.0}
"""
@spec right_normal(t) :: t
def right_normal(%Vector2{x: x1, y: y1}) do
%Vector2{x: -y1, y: x1}
end
@doc """
Left normal of a vector.
This is the equivalent of a cross product of a single 2D vector.
## Examples
iex> Collision.Vector.Vector2.left_normal(
...> %Collision.Vector.Vector2{x: 3.0, y: 4.0}
...> )
%Collision.Vector.Vector2{x: 4.0, y: -3.0}
"""
@spec left_normal(t) :: t
def left_normal(%Vector2{x: x1, y: y1}) do
%Vector2{x: y1, y: -x1}
end
@doc """
Per product of a pair of 2D vectors.
## Examples
iex> Collision.Vector.Vector2.per_product(
...> %Collision.Vector.Vector2{x: 3.0, y: 4.0},
...> %Collision.Vector.Vector2{x: -1.0, y: 2.0}
...> )
-10.0
"""
@spec per_product(t, t) :: float
def per_product(%Vector2{} = v1, %Vector2{} = v2) do
Vector.dot_product(v1, right_normal(v2))
end
defimpl Vector, for: Vector2 do
@type t :: Vector2.t
@type scalar :: float
@spec to_tuple(t) :: {float, float}
def to_tuple(%Vector2{x: x1, y: y1}), do: {x1, y1}
@spec round_components(t, integer) :: t
def round_components(%Vector2{x: x, y: y}, n) do
%Vector2{x: Float.round(x, n), y: Float.round(y, n)}
end
@spec scalar_mult(t, scalar) :: t
def scalar_mult(%Vector2{x: x, y: y}, k) do
%Vector2{x: x * k, y: y * k}
end
@spec add(t, t) :: t
def add(%Vector2{x: x1, y: y1}, %Vector2{x: x2, y: y2}) do
%Vector2{x: x1 + x2, y: y1 + y2}
end
@spec subtract(t, t) :: t
def subtract(%Vector2{x: x1, y: y1}, %Vector2{x: x2, y: y2}) do
%Vector2{x: x1 - x2, y: y1 - y2}
end
@spec magnitude(t) :: float
def magnitude(%Vector2{} = v1) do
:math.sqrt(magnitude_squared(v1))
end
@spec magnitude_squared(t) :: float
def magnitude_squared(%Vector2{} = v1) do
dot_product(v1, v1)
end
@spec normalize(t) :: t
def normalize(%Vector2{x: x1, y: y1} = v1) do
mag = magnitude(v1)
%Vector2{x: x1 / mag, y: y1 / mag}
end
@spec dot_product(t, t) :: float
def dot_product(%Vector2{x: x1, y: y1}, %Vector2{x: x2, y: y2}) do
x1 * x2 + y1 * y2
end
@spec projection(t, t) :: t
def projection(%Vector2{} = v1, %Vector2{} = v2) do
scaled_dot = dot_product(v1, v2) / magnitude_squared(v2)
Vector.scalar_mult(v2, scaled_dot)
end
@spec cross_product(t, t) :: t
def cross_product(%Vector2{} = v1, _v2) do
Vector2.right_normal(v1)
end
end
defimpl String.Chars, for: Vector2 do
def to_string(vector) do
"%Vector{x: #{vector.x}, y: #{vector.y}}"
end
end
end
|
lib/collision/vector/vector2.ex
| 0.927683
| 0.991505
|
vector2.ex
|
starcoder
|
defmodule EverythingLocation do
@moduledoc ~S"""
Everything Location is as SAAS that can verify the correctness of and address, update or correct values, and return a certainty of those values.
This will also return a formatted address according to the rules of the country of that address.
For more information see : https://www.everythinglocation.com
## Example
iex> EverythingLocation.verify("Bastion 16, 3823 BP, Amersfoort, Netherlands")
%EverythingLocation.Result{
administrative_area: "Utrecht",
certainty_percentage: 100,
city: "Amersfoort",
country: "Netherlands",
country_alpha2: "NL",
country_alpha3: "NLD",
formatted_address: "Bastion 16\n3823 BP Amersfoort\nNetherlands",
postal_code: "3823 BP",
street: "Bastion 16",
verified: true
]
"""
@base_url "https://api.everythinglocation.com"
@address_verify_path "/address/verify"
alias EverythingLocation.Options
alias EverythingLocation.Result
@doc """
Pass an address as string to update/complete it and verify its correctness.
"""
@spec verify(string) :: {:ok, %EverythingLocation.Result{}} | {:error, string}
def verify(string) when is_binary(string) and string != "" do
%{address1: string}
|> verify
end
@doc """
Pass a Map of options to update/complete and verify its correctness.
For accepted options, please refer to %EverythingLocation.Options{}.
You must pass the options as a map, for example ```%{api_key: "my_api_key", address1: "my address"}```
"""
@spec verify(map) :: {:ok, %EverythingLocation.Result{}} | {:error, string}
def verify(options) when is_map(options) do
result = options
|> insert_api_key_if_missing
|> create_changeset
|> apply_changes
|> Options.create
|> request
|> handle_request_result
|> format_result
end
def verify(_), do: {:error, "Invalid input"}
defp format_result({:ok, result}), do: result |> Result.create
defp format_result(error), do: error
defp insert_api_key_if_missing(options) do
Map.put_new(options, :api_key, Application.get_env(:everything_location, :api_key))
end
defp create_changeset(options) do
Options.changeset(%Options{}, options)
end
defp request({:error, _} = error), do: error
defp request(%{} = options) do
options = [
body: Poison.encode!(options),
headers: ["Accept": "application/json", "Content-Type": "application/json"]
]
try do
HTTPotion.request :post, @base_url <> @address_verify_path, options
rescue
e -> {:error, e}
end
end
defp handle_request_result(%HTTPotion.Response{status_code: 200, body: body} = _response) do
%{"Status" => "OK", "output" => [address_result|_]} = Poison.decode!(body)
{:ok, address_result}
end
defp handle_request_result({:error, _} = error), do: error
defp handle_request_result(%HTTPotion.HTTPError{} = error), do: {:error, error}
defp handle_request_result(%HTTPotion.Response{body: body}) do
{:ok, result} = Poison.decode(body)
{:error, result["Error"]}
end
defp apply_changes(%{valid?: false} = changeset), do: {:error, changeset.errors}
defp apply_changes(%{valid?: true} = changeset) do
changeset |> Ecto.Changeset.apply_changes
end
end
|
lib/everything_location.ex
| 0.714827
| 0.755231
|
everything_location.ex
|
starcoder
|
defmodule Harald.HCI.LEController do
@moduledoc """
> The LE Controller Commands provide access and control to various capabilities of the Bluetooth
> hardware, as well as methods for the Host to affect how the Link Layer manages the piconet,
> and controls connections.
Reference: Version 5.0, Vol 2, Part E, 7.8
"""
alias Harald.HCI
@ogf 0x08
@doc """
> The LE_Set_Scan_Enable command is used to start scanning. Scanning is used to discover
> advertising devices nearby.
>
> The Filter_Duplicates parameter controls whether the Link Layer should filter out duplicate
> advertising reports (Filtering_Enabled) to the Host, or if the Link Layer should generate
> advertising reports for each packet received (Filtering_Disabled). See [Vol 6] Part B, Section
> 4.4.3.5.
>
> If the scanning parameters' Own_Address_Type parameter is set to 0x01 or 0x03 and the random
> address for the device has not been initialized, the Controller shall return the error code
> Invalid HCI Command Parameters (0x12).
>
> If the LE_Scan_Enable parameter is set to 0x01 and scanning is already enabled, any change to
> the Filter_Duplicates setting shall take effect. Note: Disabling scanning when it is disabled
> has no effect.
Reference: Version 5.0, Vol 2, Part E, 7.8.11
iex> set_enable_scan(true)
<<12, 32, 2, 1, 0>>
iex> set_enable_scan(false)
<<12, 32, 2, 0, 0>>
"""
@spec set_enable_scan(HCI.opt(), HCI.opt()) :: HCI.command()
def set_enable_scan(enable, filter_duplicates \\ false) do
@ogf |> HCI.opcode(0x000C) |> HCI.command([enable, filter_duplicates])
end
@doc """
> The LE_Set_Scan_Parameters command is used to set the scan parameters. The LE_Scan_Type
> parameter controls the type of scan to perform.
>
> The LE_Scan_Interval and LE_Scan_Window parameters are recommendations from the Host on how
> long (LE_Scan_Window) and how frequently (LE_Scan_Interval) the Controller should scan
> (See [Vol 6] Part B, Section 4.5.3). The LE_Scan_Window parameter shall always be set to a
> value smaller or equal to the value set for the LE_Scan_Interval parameter. If they are set to
> the same value scanning should be run continuously.
>
> Own_Address_Type parameter indicates the type of address being used in the scan request
> packets.
>
> The Host shall not issue this command when scanning is enabled in the Controller; if it is the
> Command Disallowed error code shall be used.
Reference: Version 5.0, Vol 2, Part E, 7.8.10
iex> set_scan_parameters(le_scan_type: 0x01)
<<11, 32, 7, 1, 16, 0, 16, 0, 0, 0>>
iex> set_scan_parameters(
iex> le_scan_type: 0x01,
iex> le_scan_interval: 0x0004,
iex> le_scan_window: 0x0004,
iex> own_address_type: 0x01,
iex> scanning_filter_policy: 0x01
iex> )
<<11, 32, 7, 1, 4, 0, 4, 0, 1, 1>>
"""
@spec set_scan_parameters(keyword) :: HCI.command()
def set_scan_parameters(new_params) do
# Defaults according to the Bluetooth Core Spec v5.
params =
[
le_scan_type: 0x00,
le_scan_interval: 0x0010,
le_scan_window: 0x0010,
own_address_type: 0x00,
scanning_filter_policy: 0x00
]
|> Keyword.merge(new_params)
opts = <<
params[:le_scan_type],
params[:le_scan_interval]::size(16)-little,
params[:le_scan_window]::size(16)-little,
params[:own_address_type],
params[:scanning_filter_policy]
>>
@ogf |> HCI.opcode(0x000B) |> HCI.command(opts)
end
end
|
lib/harald/hci/le_controller.ex
| 0.767777
| 0.610221
|
le_controller.ex
|
starcoder
|
defmodule ExFix do
@moduledoc """
Elixir implementation of FIX Session Protocol FIXT.1.1.
Currently only supports FIX session initiator (buy side).
## Usage
```
defmodule MySessionHandler do
@behaviour ExFix.SessionHandler
require Logger
alias ExFix.InMessage
alias ExFix.OutMessage
alias ExFix.Parser
@msg_new_order_single "D"
@field_account "1"
@field_cl_ord_id "11"
@field_order_qty "38"
@field_ord_type "40"
@field_price "44"
@field_side "54"
@field_symbol "55"
@field_transact_time "60"
@value_side_buy "1"
@value_ord_type_limit "2"
def on_logon(session_name, _env) do
spawn fn() ->
\#\# Buy 10 shares of SYM1 for $1.23 per share
@msg_new_order_single
|> OutMessage.new()
|> OutMessage.set_field(@field_account, 1234)
|> OutMessage.set_field(@field_cl_ord_id, "cod12345")
|> OutMessage.set_field(@field_order_qty, 10)
|> OutMessage.set_field(@field_ord_type, @value_ord_type_limit)
|> OutMessage.set_field(@field_price, 1.23)
|> OutMessage.set_field(@field_side, @value_side_buy)
|> OutMessage.set_field(@field_symbol, "SYM1")
|> OutMessage.set_field(@field_transact_time, DateTime.utc_now())
|> ExFix.send_message!(session_name)
end
end
def on_app_message(_session_name, _msg_type, %InMessage{} = msg, _env) do
Logger.info "App msg received: \#{inspect Parser.parse2(msg)}"
end
def on_session_message(_session_name, _msg_type, %InMessage{} = msg, _env) do
Logger.info "Session msg received: \#{inspect Parser.parse2(msg)}"
end
def on_logout(_session_id, _env), do: :ok
end
ExFix.start_session_initiator("simulator", "BUY", "SELL", MySessionHandler,
hostname: "localhost", port: 9876, username: "user1", password: "<PASSWORD>",
transport_mod: :ssl)
```
"""
alias ExFix.Session
alias ExFix.SessionConfig
alias ExFix.SessionRegistry
alias ExFix.SessionWorker
alias ExFix.OutMessage
alias ExFix.SessionHandler
@default_dictionary Application.get_env(:ex_fix, :default_dictionary, ExFix.DefaultDictionary)
@session_registry Application.get_env(:ex_fix, :session_registry, ExFix.DefaultSessionRegistry)
@doc """
Starts FIX session initiator
"""
@spec start_session_initiator(String.t(), String.t(), String.t(), SessionHandler, list()) :: :ok
def start_session_initiator(
session_name,
sender_comp_id,
target_comp_id,
session_handler,
opts \\ []
) do
opts =
Enum.into(opts, %{
hostname: "localhost",
port: 9876,
username: nil,
password: <PASSWORD>,
dictionary: @default_dictionary,
log_incoming_msg: true,
log_outgoing_msg: true,
default_applverid: "9",
logon_encrypt_method: "0",
heart_bt_int: 60,
max_output_buf_count: 1_000,
reconnect_interval: 15,
reset_on_logon: true,
validate_incoming_message: true,
transport_mod: :gen_tcp,
transport_options: [],
time_service: nil,
env: %{}
})
config =
struct(
%SessionConfig{
name: session_name,
mode: :initiator,
sender_comp_id: sender_comp_id,
target_comp_id: target_comp_id,
session_handler: session_handler
},
opts
)
session_registry = opts[:session_registry] || @session_registry
session_registry.start_session(session_name, config)
end
@doc """
Send FIX message to a session
"""
@spec send_message!(OutMessage.t(), Session.session_name()) :: :ok | no_return
def send_message!(out_message, session_name) do
SessionWorker.send_message!(session_name, out_message)
end
@doc """
Stop FIX session
"""
@spec stop_session(Session.session_name(), SessionRegistry | nil) :: :ok | no_return
def stop_session(session_name, registry \\ nil) do
session_registry = registry || @session_registry
session_registry.stop_session(session_name)
end
end
|
lib/ex_fix.ex
| 0.524395
| 0.527742
|
ex_fix.ex
|
starcoder
|
defmodule Rajska.RateLimiter do
@moduledoc """
Rate limiter absinthe middleware. Uses [Hammer](https://github.com/ExHammer/hammer).
## Usage
First configure Hammer, following its documentation. For example:
config :hammer,
backend: {Hammer.Backend.ETS, [expiry_ms: 60_000 * 60 * 4,
cleanup_interval_ms: 60_000 * 10]}
Add your middleware to the query that should be limited:
field :default_config, :string do
middleware Rajska.RateLimiter
resolve fn _, _ -> {:ok, "ok"} end
end
You can also configure it and use multiple rules for limiting in one query:
field :login_user, :session do
arg :email, non_null(:string)
arg :password, non_null(:string)
middleware Rajska.RateLimiter, limit: 10 # Using the default identifier (user IP)
middleware Rajska.RateLimiter, keys: :email, limit: 5 # Using the value provided in the email arg
resolve &AccountsResolver.login_user/2
end
The allowed configuration are:
* `scale_ms`: The timespan for the maximum number of actions. Defaults to 60_000.
* `limit`: The maximum number of actions in the specified timespan. Defaults to 10.
* `id`: An atom or string to be used as the bucket identifier. Note that this will always be the same, so by using this the limit will be global instead of by user.
* `keys`: An atom or a list of atoms to get a query argument as identifier. Use a list when the argument is nested.
* `error_msg`: The error message to be displayed when rate limit exceeds. Defaults to `"Too many requests"`.
Note that when neither `id` or `keys` is provided, the default is to use the user's IP. For that, the default behaviour is to use
`c:Rajska.Authorization.get_ip/1` to fetch the IP from the absinthe context. That means you need to manually insert the user's IP in the
absinthe context before using it as an identifier. See the [absinthe docs](https://hexdocs.pm/absinthe/context-and-authentication.html#content)
for more information.
"""
@behaviour Absinthe.Middleware
alias Absinthe.Resolution
def call(%Resolution{state: :resolved} = resolution, _config), do: resolution
def call(%Resolution{} = resolution, config) do
scale_ms = Keyword.get(config, :scale_ms, 60_000)
limit = Keyword.get(config, :limit, 10)
identifier = get_identifier(resolution, config[:keys], config[:id])
error_msg = Keyword.get(config, :error_msg, "Too many requests")
case Hammer.check_rate("query:#{identifier}", scale_ms, limit) do
{:allow, _count} -> resolution
{:deny, _limit} -> Resolution.put_result(resolution, {:error, error_msg})
end
end
defp get_identifier(%Resolution{context: context}, nil, nil),
do: Rajska.apply_auth_mod(context, :get_ip, [context])
defp get_identifier(%Resolution{arguments: arguments}, keys, nil),
do: get_in(arguments, List.wrap(keys)) || raise "Invalid configuration in Rate Limiter. Key not found in arguments."
defp get_identifier(%Resolution{}, nil, id), do: id
defp get_identifier(%Resolution{}, _keys, _id), do: raise "Invalid configuration in Rate Limiter. If key is defined, then id must not be defined"
end
|
lib/middlewares/rate_limiter.ex
| 0.796767
| 0.596815
|
rate_limiter.ex
|
starcoder
|
defmodule EVM.Block.Header do
@moduledoc """
This structure codifies the header of a block in the blockchain.
"""
@empty_trie MerklePatriciaTree.Trie.empty_trie_root_hash()
@empty_keccak [] |> ExRLP.encode() |> :keccakf1600.sha3_256()
defstruct parent_hash: nil,
# Hp P(BH)Hr
# Ho KEC(RLP(L∗H(BU)))
ommers_hash: @empty_keccak,
# Hc
beneficiary: nil,
# Hr TRIE(LS(Π(σ, B)))
state_root: @empty_trie,
# Ht TRIE({∀i < kBTk, i ∈ P : p(i, LT (BT[i]))})
transactions_root: @empty_trie,
# He TRIE({∀i < kBRk, i ∈ P : p(i, LR(BR[i]))})
receipts_root: @empty_trie,
# Hb bloom
logs_bloom: <<0::2048>>,
# Hd
difficulty: nil,
# Hi
number: nil,
# Hl
gas_limit: 0,
# Hg
gas_used: 0,
# Hs
timestamp: nil,
# Hx
extra_data: <<>>,
# Hm
mix_hash: nil,
# Hn
nonce: nil
# As defined in Eq.(35)
@type t :: %__MODULE__{
parent_hash: EVM.hash(),
ommers_hash: EVM.trie_root(),
beneficiary: EVM.address(),
state_root: EVM.trie_root(),
transactions_root: EVM.trie_root(),
receipts_root: EVM.trie_root(),
# TODO
logs_bloom: binary(),
difficulty: integer() | nil,
number: integer() | nil,
gas_limit: EVM.val(),
gas_used: EVM.val(),
timestamp: EVM.timestamp() | nil,
extra_data: binary(),
mix_hash: EVM.hash() | nil,
# TODO: 64-bit hash?
nonce: <<_::64>> | nil
}
# The start of the Homestead block, as defined in Eq.(13) of the Yellow Paper (N_H)
@homestead_block 1_150_000
# d_0 from Eq.(40)
@initial_difficulty 131_072
# Mimics d_0 in Eq.(39), but variable on different chains
@minimum_difficulty @initial_difficulty
@difficulty_bound_divisor 2048
# Eq.(58)
@max_extra_data_bytes 32
# Constant from Eq.(45) and Eq.(46)
@gas_limit_bound_divisor 1024
# Eq.(47)
@min_gas_limit 125_000
@doc """
Returns the block that defines the start of Homestead.
This should be a constant, but it's configurable on different
chains, and as such, as allow you to pass that configuration
variable (which ends up making this the identity function, if so).
"""
@spec homestead(integer()) :: integer()
def homestead(homestead_block \\ @homestead_block), do: homestead_block
@doc """
This functions encode a header into a value that can
be RLP encoded. This is defined as L_H Eq.(32) in the Yellow Paper.
## Examples
iex> EVM.Block.Header.serialize(%EVM.Block.Header{parent_hash: <<1::256>>, ommers_hash: <<2::256>>, beneficiary: <<3::160>>, state_root: <<4::256>>, transactions_root: <<5::256>>, receipts_root: <<6::256>>, logs_bloom: <<>>, difficulty: 5, number: 1, gas_limit: 5, gas_used: 3, timestamp: 6, extra_data: "Hi mom", mix_hash: <<7::256>>, nonce: <<8::64>>})
[<<1::256>>, <<2::256>>, <<3::160>>, <<4::256>>, <<5::256>>, <<6::256>>, <<>>, 5, 1, 5, 3, 6, "Hi mom", <<7::256>>, <<8::64>>]
"""
@spec serialize(t) :: ExRLP.t()
def serialize(h) do
[
h.parent_hash,
h.ommers_hash,
h.beneficiary,
h.state_root,
h.transactions_root,
h.receipts_root,
h.logs_bloom,
h.difficulty,
if(h.number == 0, do: <<>>, else: h.number),
h.gas_limit,
if(h.number == 0, do: <<>>, else: h.gas_used),
h.timestamp,
h.extra_data,
h.mix_hash,
h.nonce
]
end
@doc """
Deserializes a block header from an RLP encodable structure.
This effectively undoes the encoding defined in L_H Eq.(32) of the
Yellow Paper.
## Examples
iex> EVM.Block.Header.deserialize([<<1::256>>, <<2::256>>, <<3::160>>, <<4::256>>, <<5::256>>, <<6::256>>, <<>>, <<5>>, <<1>>, <<5>>, <<3>>, <<6>>, "Hi mom", <<7::256>>, <<8::64>>])
%EVM.Block.Header{parent_hash: <<1::256>>, ommers_hash: <<2::256>>, beneficiary: <<3::160>>, state_root: <<4::256>>, transactions_root: <<5::256>>, receipts_root: <<6::256>>, logs_bloom: <<>>, difficulty: 5, number: 1, gas_limit: 5, gas_used: 3, timestamp: 6, extra_data: "Hi mom", mix_hash: <<7::256>>, nonce: <<8::64>>}
"""
@spec deserialize(ExRLP.t()) :: t
def deserialize(rlp) do
[
parent_hash,
ommers_hash,
beneficiary,
state_root,
transactions_root,
receipts_root,
logs_bloom,
difficulty,
number,
gas_limit,
gas_used,
timestamp,
extra_data,
mix_hash,
nonce
] = rlp
%__MODULE__{
parent_hash: parent_hash,
ommers_hash: ommers_hash,
beneficiary: beneficiary,
state_root: state_root,
transactions_root: transactions_root,
receipts_root: receipts_root,
logs_bloom: logs_bloom,
difficulty: :binary.decode_unsigned(difficulty),
number: :binary.decode_unsigned(number),
gas_limit: :binary.decode_unsigned(gas_limit),
gas_used: :binary.decode_unsigned(gas_used),
timestamp: :binary.decode_unsigned(timestamp),
extra_data: extra_data,
mix_hash: mix_hash,
nonce: nonce
}
end
@doc """
Computes hash of a block header, which is simply the hash of the serialized block header.
This is defined in Eq.(37) of the Yellow Paper.
## Examples
iex> %EVM.Block.Header{number: 5, parent_hash: <<1, 2, 3>>, beneficiary: <<2, 3, 4>>, difficulty: 100, timestamp: 11, mix_hash: <<1>>, nonce: <<2>>}
...> |> EVM.Block.Header.hash()
<<78, 28, 127, 10, 192, 253, 127, 239, 254, 179, 39, 34, 245, 44, 152, 98, 128, 71, 238, 155, 100, 161, 199, 71, 243, 223, 172, 191, 74, 99, 128, 63>>
iex> %EVM.Block.Header{number: 0, parent_hash: <<1, 2, 3>>, beneficiary: <<2, 3, 4>>, difficulty: 100, timestamp: 11, mix_hash: <<1>>, nonce: <<2>>}
...> |> EVM.Block.Header.hash()
<<218, 225, 46, 241, 196, 160, 136, 96, 109, 216, 73, 167, 92, 174, 91, 228, 85, 112, 234, 129, 99, 200, 158, 61, 223, 166, 165, 132, 187, 24, 142, 193>>
"""
@spec hash(t) :: EVM.hash()
def hash(header) do
header |> serialize() |> ExRLP.encode() |> :keccakf1600.sha3_256()
end
@doc """
Returns true if a given block is before the
Homestead block.
## Examples
iex> EVM.Block.Header.is_before_homestead?(%EVM.Block.Header{number: 5})
true
iex> EVM.Block.Header.is_before_homestead?(%EVM.Block.Header{number: 5_000_000})
false
iex> EVM.Block.Header.is_before_homestead?(%EVM.Block.Header{number: 1_150_000})
false
iex> EVM.Block.Header.is_before_homestead?(%EVM.Block.Header{number: 5}, 6)
true
iex> EVM.Block.Header.is_before_homestead?(%EVM.Block.Header{number: 5}, 4)
false
"""
@spec is_before_homestead?(t, integer()) :: boolean()
def is_before_homestead?(h, homestead_block \\ @homestead_block) do
h.number < homestead_block
end
@doc """
Returns true if a given block is at or after the
Homestead block.
## Examples
iex> EVM.Block.Header.is_after_homestead?(%EVM.Block.Header{number: 5})
false
iex> EVM.Block.Header.is_after_homestead?(%EVM.Block.Header{number: 5_000_000})
true
iex> EVM.Block.Header.is_after_homestead?(%EVM.Block.Header{number: 1_150_000})
true
iex> EVM.Block.Header.is_after_homestead?(%EVM.Block.Header{number: 5}, 6)
false
"""
@spec is_after_homestead?(t, integer()) :: boolean()
def is_after_homestead?(h, homestead_block \\ @homestead_block),
do: not is_before_homestead?(h, homestead_block)
@doc """
Returns true if the block header is valid. This defines
Eq.(50), Eq.(51), Eq.(52), Eq.(53), Eq.(54), Eq.(55),
Eq.(56), Eq.(57) and Eq.(58) of the Yellow Paper, commonly
referred to as V(H).
# TODO: Add proof of work check
## Examples
iex> EVM.Block.Header.is_valid?(%EVM.Block.Header{number: 0, difficulty: 131_072, gas_limit: 200_000}, nil)
:valid
iex> EVM.Block.Header.is_valid?(%EVM.Block.Header{number: 0, difficulty: 5, gas_limit: 5}, nil, true)
{:invalid, [:invalid_difficulty, :invalid_gas_limit]}
iex> EVM.Block.Header.is_valid?(%EVM.Block.Header{number: 1, difficulty: 131_136, gas_limit: 200_000, timestamp: 65}, %EVM.Block.Header{number: 0, difficulty: 131_072, gas_limit: 200_000, timestamp: 55})
:valid
iex> EVM.Block.Header.is_valid?(%EVM.Block.Header{number: 1, difficulty: 131_000, gas_limit: 200_000, timestamp: 65}, %EVM.Block.Header{number: 0, difficulty: 131_072, gas_limit: 200_000, timestamp: 55}, true)
{:invalid, [:invalid_difficulty]}
iex> EVM.Block.Header.is_valid?(%EVM.Block.Header{number: 1, difficulty: 131_136, gas_limit: 200_000, timestamp: 45}, %EVM.Block.Header{number: 0, difficulty: 131_072, gas_limit: 200_000, timestamp: 55})
{:invalid, [:child_timestamp_invalid]}
iex> EVM.Block.Header.is_valid?(%EVM.Block.Header{number: 1, difficulty: 131_136, gas_limit: 300_000, timestamp: 65}, %EVM.Block.Header{number: 0, difficulty: 131_072, gas_limit: 200_000, timestamp: 55})
{:invalid, [:invalid_gas_limit]}
iex> EVM.Block.Header.is_valid?(%EVM.Block.Header{number: 2, difficulty: 131_136, gas_limit: 200_000, timestamp: 65}, %EVM.Block.Header{number: 0, difficulty: 131_072, gas_limit: 200_000, timestamp: 55})
{:invalid, [:child_number_invalid]}
iex> EVM.Block.Header.is_valid?(%EVM.Block.Header{number: 1, difficulty: 131_136, gas_limit: 200_000, timestamp: 65, extra_data: "0123456789012345678901234567890123456789"}, %EVM.Block.Header{number: 0, difficulty: 131_072, gas_limit: 200_000, timestamp: 55})
{:invalid, [:extra_data_too_large]}
# TODO: Add tests for setting homestead_block
# TODO: Add tests for setting initial_difficulty
# TODO: Add tests for setting minimum_difficulty
# TODO: Add tests for setting difficulty_bound_divisor
# TODO: Add tests for setting gas_limit_bound_divisor
# TODO: Add tests for setting min_gas_limit
"""
@spec is_valid?(t, t | nil, integer(), integer(), integer(), integer(), integer(), integer()) ::
:valid | {:invalid, [atom()]}
def is_valid?(
header,
parent_header,
homestead_block \\ @homestead_block,
initial_difficulty \\ @initial_difficulty,
minimum_difficulty \\ @minimum_difficulty,
difficulty_bound_divisor \\ @difficulty_bound_divisor,
gas_limit_bound_divisor \\ @gas_limit_bound_divisor,
min_gas_limit \\ @min_gas_limit
) do
parent_gas_limit = if parent_header, do: parent_header.gas_limit, else: nil
# Eq.(51)
# Eq.(52)
# Eq.(53), Eq.(54) and Eq.(55)
# Eq.(56)
# Eq.(57)
difficulty =
get_difficulty(
header,
parent_header,
initial_difficulty,
minimum_difficulty,
difficulty_bound_divisor,
homestead_block
)
difficulty_errors = check_difficulty(difficulty, header.difficulty)
gas_errors = check_gas(header.gas_used, header.gas_limit)
gas_limit_errors =
if(
is_gas_limit_valid?(
header.gas_limit,
parent_gas_limit,
gas_limit_bound_divisor,
min_gas_limit
),
do: [],
else: [:invalid_gas_limit]
)
header_timestamps_errors = check_header_timestamps(parent_header, header)
header_errors = check_header(parent_header, header)
extra_data_size_errors = check_extra_data_size(header.extra_data)
errors =
difficulty_errors ++
gas_errors ++
gas_limit_errors ++ header_timestamps_errors ++ header_errors ++ extra_data_size_errors
case errors do
[] -> :valid
_ -> {:invalid, errors}
end
end
@doc """
Returns the total available gas left for all transactions in
this block. This is the total gas limit minus the gas used
in transactions.
## Examples
iex> EVM.Block.Header.available_gas(%EVM.Block.Header{gas_limit: 50_000, gas_used: 30_000})
20_000
"""
@spec available_gas(t) :: EVM.Gas.t()
def available_gas(header) do
header.gas_limit - header.gas_used
end
@doc """
Calculates the difficulty of a new block header. This implements Eq.(39),
Eq.(40), Eq.(41), Eq.(42), Eq.(43) and Eq.(44) of the Yellow Paper.
## Examples
iex> EVM.Block.Header.get_difficulty(
...> %EVM.Block.Header{number: 0, timestamp: 55},
...> nil
...> )
131_072
iex> EVM.Block.Header.get_difficulty(
...> %EVM.Block.Header{number: 1, timestamp: 1479642530},
...> %EVM.Block.Header{number: 0, timestamp: 0, difficulty: 1_048_576}
...> )
1_048_064
iex> EVM.Block.Header.get_difficulty(
...> %EVM.Block.Header{number: 33, timestamp: 66},
...> %EVM.Block.Header{number: 32, timestamp: 55, difficulty: 300_000}
...> )
300_146
iex> EVM.Block.Header.get_difficulty(
...> %EVM.Block.Header{number: 33, timestamp: 88},
...> %EVM.Block.Header{number: 32, timestamp: 55, difficulty: 300_000}
...> )
299_854
# TODO: Is this right? These numbers are quite a jump
iex> EVM.Block.Header.get_difficulty(
...> %EVM.Block.Header{number: 3_000_001, timestamp: 66},
...> %EVM.Block.Header{number: 3_000_000, timestamp: 55, difficulty: 300_000}
...> )
268_735_456
iex> EVM.Block.Header.get_difficulty(
...> %EVM.Block.Header{number: 3_000_001, timestamp: 155},
...> %EVM.Block.Header{number: 3_000_000, timestamp: 55, difficulty: 300_000}
...> )
268_734_142
Test actual Ropsten genesis block
iex> EVM.Block.Header.get_difficulty(
...> %EVM.Block.Header{number: 0, timestamp: 0},
...> nil,
...> 0x100000,
...> 0x020000,
...> 0x0800,
...> 0
...> )
1_048_576
# Test actual Ropsten first block
iex> EVM.Block.Header.get_difficulty(
...> %EVM.Block.Header{number: 1, timestamp: 1_479_642_530},
...> %EVM.Block.Header{number: 0, timestamp: 0, difficulty: 1_048_576},
...> 0x100000,
...> 0x020000,
...> 0x0800,
...> 0
...> )
997_888
"""
@spec get_difficulty(t, t | nil, integer()) :: integer()
def get_difficulty(
header,
parent_header,
initial_difficulty \\ @initial_difficulty,
minimum_difficulty \\ @minimum_difficulty,
difficulty_bound_divisor \\ @difficulty_bound_divisor,
homestead_block \\ @homestead_block
) do
cond do
header.number == 0 ->
initial_difficulty
is_before_homestead?(header, homestead_block) ->
# Find the delta from parent block
difficulty_delta =
difficulty_x(parent_header.difficulty, difficulty_bound_divisor) *
difficulty_s1(header, parent_header) + difficulty_e(header)
# Add delta to parent block
next_difficulty = parent_header.difficulty + difficulty_delta
# Return next difficulty, capped at minimum
max(minimum_difficulty, next_difficulty)
true ->
# Find the delta from parent block (note: we use difficulty_s2 since we're after Homestead)
difficulty_delta =
difficulty_x(parent_header.difficulty, difficulty_bound_divisor) *
difficulty_s2(header, parent_header) + difficulty_e(header)
# Add delta to parent's difficulty
next_difficulty = parent_header.difficulty + difficulty_delta
# Return next difficulty, capped at minimum
max(minimum_difficulty, next_difficulty)
end
end
# Eq.(42) ς1 - Effectively decides if blocks are being mined too quicky or too slower
@spec difficulty_s1(t, t) :: integer()
defp difficulty_s1(header, parent_header) do
if header.timestamp < parent_header.timestamp + 13, do: 1, else: -1
end
# Eq.(43) ς2
@spec difficulty_s2(t, t) :: integer()
defp difficulty_s2(header, parent_header) do
s = MathHelper.floor((header.timestamp - parent_header.timestamp) / 10)
max(1 - s, -99)
end
# Eq.(41) x - Creates some multiplier for how much we should change difficulty based on previous difficulty
@spec difficulty_x(integer(), integer()) :: integer()
defp difficulty_x(parent_difficulty, difficulty_bound_divisor),
do: MathHelper.floor(parent_difficulty / difficulty_bound_divisor)
# Eq.(44) ε - Adds a delta to ensure we're increasing difficulty over time
@spec difficulty_e(t) :: integer()
defp difficulty_e(header) do
MathHelper.floor(
:math.pow(
2,
MathHelper.floor(header.number / 100_000) - 2
)
)
end
@doc """
Function to determine if the gas limit set is valid. The miner gets to
specify a gas limit, so long as it's in range. This allows about a 0.1% change
per block.
This function directly implements Eq.(45), Eq.(46) and Eq.(47).
## Examples
iex> EVM.Block.Header.is_gas_limit_valid?(1_000_000, nil)
true
iex> EVM.Block.Header.is_gas_limit_valid?(1_000, nil)
false
iex> EVM.Block.Header.is_gas_limit_valid?(1_000_000, 1_000_000)
true
iex> EVM.Block.Header.is_gas_limit_valid?(1_000_000, 2_000_000)
false
iex> EVM.Block.Header.is_gas_limit_valid?(1_000_000, 500_000)
false
iex> EVM.Block.Header.is_gas_limit_valid?(1_000_000, 999_500)
true
iex> EVM.Block.Header.is_gas_limit_valid?(1_000_000, 999_000)
false
iex> EVM.Block.Header.is_gas_limit_valid?(1_000_000, 2_000_000, 1)
true
iex> EVM.Block.Header.is_gas_limit_valid?(1_000, nil, 1024, 500)
true
"""
@spec is_gas_limit_valid?(EVM.Gas.t(), EVM.Gas.t() | nil) :: boolean()
def is_gas_limit_valid?(
gas_limit,
parent_gas_limit,
gas_limit_bound_divisor \\ @gas_limit_bound_divisor,
min_gas_limit \\ @min_gas_limit
) do
if parent_gas_limit == nil do
# It's not entirely clear from the Yellow Paper
# whether a genesis block should have any limits
# on gas limit, other than min gas limit.
gas_limit > min_gas_limit
else
max_delta = MathHelper.floor(parent_gas_limit / gas_limit_bound_divisor)
gas_limit < parent_gas_limit + max_delta and gas_limit > parent_gas_limit - max_delta and
gas_limit > min_gas_limit
end
end
defp check_difficulty(difficulty, difficulty), do: []
defp check_difficulty(_difficulty, _header_difficulty), do: [:invalid_difficulty]
defp check_gas(gas_used, gas_limit) when gas_used <= gas_limit, do: []
defp check_gas(_gas_used, _gas_limit), do: [:exceeded_gas_limit]
defp check_header_timestamps(nil, _header), do: []
defp check_header_timestamps(parent_header, header),
do: do_check_header_timestamps(parent_header.timestamp, header.timestamp)
defp do_check_header_timestamps(parent_header_timestamp, header_timestamp)
when parent_header_timestamp < header_timestamp,
do: []
defp do_check_header_timestamps(_, _), do: [:child_timestamp_invalid]
defp check_header(nil, _), do: []
defp check_header(_, nil), do: [:child_number_invalid]
defp check_header(parent_header, header),
do: do_check_header(parent_header.number, header.number)
defp do_check_header(_, 0), do: []
defp do_check_header(parent_header_number, header_number)
when parent_header_number + 1 == header_number,
do: []
defp do_check_header(_, _), do: [:child_number_invalid]
defp check_extra_data_size(header_extra_data)
when byte_size(header_extra_data) <= @max_extra_data_bytes,
do: []
defp check_extra_data_size(_), do: [:extra_data_too_large]
end
|
apps/evm/lib/evm/block/header.ex
| 0.61832
| 0.457864
|
header.ex
|
starcoder
|
defmodule ExPng.Chunks.Transparency do
@moduledoc """
Representation of transparency data for an image
"""
@type t :: %__MODULE__{
type: :tRNS,
data: ExPng.maybe(binary()),
transparency: ExPng.maybe([integer()] | binary())
}
defstruct [:data, :transparency, type: :tRNS]
alias ExPng.Chunks.Header
import ExPng.Utilities, only: [reduce_to_binary: 1]
import Bitwise
@spec new(:tRNS, binary) :: {:ok, __MODULE__.t()}
def new(:tRNS, data) do
{:ok, %__MODULE__{data: data}}
end
@spec build_from_pixel_palette([ExPng.Color.t()]) :: ExPng.maybe(__MODULE__.t())
def build_from_pixel_palette(pixels) do
transparency =
pixels
|> Enum.map(fn <<_, _, _, a>> -> a end)
|> Enum.take_while(fn a -> a < 255 end)
case transparency do
[] -> nil
_ -> %__MODULE__{transparency: transparency}
end
end
@spec parse_data(ExPng.maybe(__MODULE__.t()), Header.t()) ::
{:ok, ExPng.maybe(__MODULE__.t())} | {:error, binary()}
def parse_data(nil, _), do: {:ok, nil}
def parse_data(%__MODULE__{data: data} = transparency, %Header{color_mode: 3}) do
transparencies = for <<a <- data>>, do: a
{:ok, %{transparency | transparency: transparencies}}
end
def parse_data(%__MODULE__{data: data} = transparency, %Header{
color_mode: 0,
bit_depth: bit_depth
}) do
gray =
data
|> :binary.decode_unsigned()
|> shape_transparency_bit(bit_depth)
{:ok, %{transparency | transparency: <<gray, gray, gray>>}}
end
def parse_data(%__MODULE__{data: data} = transparency, %Header{
color_mode: 2,
bit_depth: bit_depth
}) do
<<r::bytes-size(2), g::bytes-size(2), b::bytes-size(2)>> = data
[r, g, b] = [r, g, b] |> Enum.map(&:binary.decode_unsigned/1)
transparent_pixel =
case bit_depth do
1 ->
if r == 0x01, do: <<255, 255, 255>>, else: <<0, 0, 0>>
_ ->
[r, g, b] = Enum.map([r, g, b], &shape_transparency_bit(&1, bit_depth))
<<r, g, b>>
end
{:ok, %{transparency | transparency: transparent_pixel}}
end
def parse_data(_, _), do: {:error, "invalid transparency chunk"}
@behaviour ExPng.Encodeable
@impl true
def to_bytes(%__MODULE__{transparency: transparency}, _encoding_options \\ []) do
data =
Enum.map(transparency, fn a -> <<a>> end)
|> reduce_to_binary()
length = byte_size(data)
type = <<116, 82, 78, 83>>
crc = :erlang.crc32([type, data])
<<length::32>> <> type <> data <> <<crc::32>>
end
defp shape_transparency_bit(0x01, 1), do: 0xFF
defp shape_transparency_bit(_, 1), do: 0x0
defp shape_transparency_bit(bit, 2) do
bit <<< 6 ||| bit <<< 4 ||| bit <<< 2 ||| bit
end
defp shape_transparency_bit(bit, 4) do
bit <<< 4 ||| bit
end
defp shape_transparency_bit(bit, 8) do
bit
end
defp shape_transparency_bit(bit, 16) do
bit >>> 8
end
end
|
lib/ex_png/chunks/transparency.ex
| 0.844441
| 0.611556
|
transparency.ex
|
starcoder
|
defmodule Blurhash.Decoder do
import Blurhash.Utils
alias Blurhash.Base83
use Bitwise
defp size_flag(blurhash) do
with {:ok, encoded_flag, rest} <- Base83.decode_number(blurhash, 1) do
x = rem(encoded_flag, 9) + 1
y = floor(encoded_flag / 9) + 1
{:ok, {x, y}, rest}
end
end
defp max_ac(blurhash) do
with {:ok, quantized_max, rest} <- Base83.decode_number(blurhash, 1) do
{:ok, (quantized_max + 1) / 166, rest}
end
end
defp average_color_and_dc(blurhash) do
with {:ok, raw, rest} <- Base83.decode_number(blurhash, 4) do
{r, g, b} = color = {bsr(raw, 16), band(bsr(raw, 8), 255), band(raw, 255)}
dc = {srgb_to_linear(r), srgb_to_linear(g), srgb_to_linear(b)}
{:ok, {color, dc}, rest}
end
end
def construct_matrix(encoded_ac, max_ac, x, y, dc) do
size = x * y - 1
try do
# We start with 1 because {0, 0} is the DC
{ac_values, rest} =
Enum.map_reduce(1..size, encoded_ac, fn index, rest ->
case Base83.decode_number(rest, 2) do
{:ok, value, rest} ->
# add matrix position with the color since we will need it for
# inverse dct later
matrix_pos = {rem(index, x), floor(index / x)}
quantized_r = floor(value / (19 * 19))
quantized_g = floor(rem(floor(value / 19), 19))
quantized_b = rem(value, 19)
r = unquantize_color(quantized_r, max_ac)
g = unquantize_color(quantized_g, max_ac)
b = unquantize_color(quantized_b, max_ac)
{{matrix_pos, {r, g, b}}, rest}
# Haven't found a more elegant solution to throwing in this case
error ->
throw(error)
end
end)
if rest != "" do
{:error, :unexpected_components}
else
{r, g, b} = dc
matrix = [{{0, 0}, {r, g, b}} | ac_values]
{:ok, matrix}
end
catch
error -> error
end
end
def construct_pixel_iodata(width, height, matrix) do
Enum.reduce((height - 1)..0, [], fn y, acc ->
pixel_row =
Enum.reduce((width - 1)..0, [], fn x, acc ->
{linear_r, linear_g, linear_b} =
Enum.reduce(matrix, {0, 0, 0}, fn {{component_x, component_y},
{current_red, current_green, current_blue}},
{red, green, blue} ->
idct_basis =
:math.cos(:math.pi() * x * component_x / width) *
:math.cos(:math.pi() * y * component_y / height)
{red + current_red * idct_basis, green + current_green * idct_basis,
blue + current_blue * idct_basis}
end)
r = linear_to_srgb(linear_r)
g = linear_to_srgb(linear_g)
b = linear_to_srgb(linear_b)
[<<r::8, g::8, b::8>> | acc]
end)
[pixel_row | acc]
end)
end
def decode(blurhash, width, height) do
with {:ok, {components_x, components_y}, rest} <- size_flag(blurhash),
{:ok, max_ac, rest} <- max_ac(rest),
{:ok, {average_color, dc}, rest} <- average_color_and_dc(rest),
{:ok, matrix} <- construct_matrix(rest, max_ac, components_x, components_y, dc) do
pixels = construct_pixel_iodata(width, height, matrix)
{:ok, pixels, average_color}
end
end
end
|
lib/blurhash/decoder.ex
| 0.601828
| 0.496155
|
decoder.ex
|
starcoder
|
defmodule Monetized.Currency do
@currencies [
%{name: "Argentinian Peso", symbol: "A$", key: "ARS"},
%{name: "Canadian Dollar", symbol: "C$", key: "CAD"},
%{name: "Euro", symbol: "€", key: "EUR"},
%{name: "Pound Sterling", symbol: "£", key: "GBP"},
%{name: "Hong Kong Dollar", symbol: "HK$", key: "HKD"},
%{name: "Philippine Peso", symbol: "₱", key: "PHP"},
%{name: "Thai Baht", symbol: "฿", key: "THB"},
%{name: "US Dollar", symbol: "$", key: "USD"},
]
@currency_map Enum.reduce(@currencies, %{}, fn(currency, acc) -> Map.put(acc, currency.key, currency) end)
@moduledoc """
Defines available currencies and functions to handle them.
"""
@doc """
Attempts to parse the currency from the given string
based on both the currency key and the symbol.
## Examples
iex> Monetized.Currency.parse("EUR 200.00")
%{name: "Euro", symbol: "€", key: "EUR"}
iex> Monetized.Currency.parse("£ 200.00")
%{name: "Pound Sterling", symbol: "£", key: "GBP"}
iex> Monetized.Currency.parse("200.00 USD")
%{name: "US Dollar", symbol: "$", key: "USD"}
iex> Monetized.Currency.parse("200.00 THB")
%{name: "Thai Baht", symbol: "฿", key: "THB"}
iex> Monetized.Currency.parse("200.00 PHP")
%{key: "PHP", name: "<NAME>", symbol: "₱"}
"""
def parse(str) do
parse_by_symbol(str) || parse_by_key(str)
end
@doc """
Attempts to parse the currency from the given string
based on the currency key.
## Examples
iex> Monetized.Currency.parse_by_key("EUR 200.00")
%{name: "Euro", symbol: "€", key: "EUR"}
iex> Monetized.Currency.parse_by_key("200.00 USD")
%{name: "US Dollar", symbol: "$", key: "USD"}
iex> Monetized.Currency.parse_by_key("200.00 GBP")
%{name: "Pound Sterling", symbol: "£", key: "GBP"}
iex> Monetized.Currency.parse_by_key("200.00 THB")
%{name: "<NAME>", symbol: "฿", key: "THB"}
iex> Monetized.Currency.parse_by_key("200.00 PHP")
%{key: "PHP", name: "<NAME>", symbol: "₱"}
iex> Monetized.Currency.parse_by_key("200.00 ARS")
%{key: "ARS", name: "Argentinian Peso", symbol: "A$"}
"""
def parse_by_key(str) do
piped = Enum.join(keys(), "|")
case Regex.scan(~r/#{piped}/, str) do
[[a]] ->
get(a)
_ ->
nil
end
end
@doc """
Attempts to guess the currency from the given string
based on the currency symbol.
## Examples
iex> Monetized.Currency.parse_by_symbol("€ 200.00")
%{name: "Euro", symbol: "€", key: "EUR"}
iex> Monetized.Currency.parse_by_symbol("$200.00")
%{name: "US Dollar", symbol: "$", key: "USD"}
iex> Monetized.Currency.parse_by_symbol("£200.00")
%{name: "Pound Sterling", symbol: "£", key: "GBP"}
iex> Monetized.Currency.parse_by_symbol("฿200.00")
%{name: "<NAME>", symbol: "฿", key: "THB"}
iex> Monetized.Currency.parse_by_symbol("₱200.00")
%{key: "PHP", name: "<NAME>", symbol: "₱"}
"""
@currencies
|> Enum.map(&Map.to_list/1)
|> Enum.each(fn (currency) ->
def parse_by_symbol(unquote(Keyword.get(currency, :symbol)) <> _rest), do: Enum.into(unquote(currency), %{})
end)
def parse_by_symbol(_), do: nil
@doc """
Retrieves the currency struct for the given key.
## Examples
iex> Monetized.Currency.get("EUR")
%{name: "Euro", symbol: "€", key: "EUR"}
iex> Monetized.Currency.get("THB")
%{name: "<NAME>", symbol: "฿", key: "THB"}
iex> Monetized.Currency.get("PHP")
%{key: "PHP", name: "<NAME>", symbol: "₱"}
"""
@spec get(String.t) :: struct
def get(key), do: all()[key]
@doc """
Retrieves a struct holding all the currency options.
"""
@spec all() :: %{bitstring() => %{name: bitstring(), symbol: bitstring(), key: bitstring()}}
def all, do: @currency_map
@doc """
Retrieves a list of currency options keys
"""
@spec keys :: list
def keys, do: Map.keys(all())
end
|
lib/currency.ex
| 0.799521
| 0.446314
|
currency.ex
|
starcoder
|
defmodule Square.CustomerGroups do
@moduledoc """
Documentation for `Square.CustomerGroups`.
"""
@doc """
Retrieves the list of customer groups of a business.
```
def list_customer_groups(client, [
cursor: nil
])
```
### Parameters
| Parameter | Type | Tags | Description |
| --- | --- | --- | --- |
| `cursor` | `String` | Query, Optional | A pagination cursor returned by a previous call to this endpoint.<br>Provide this to retrieve the next set of results for your original query.<br><br>See the [Pagination guide](https://developer.squareup.com/docs/working-with-apis/pagination) for more information. |
### Response Type
[`List Customer Groups Response Map`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/list-customer-groups-response.md)
### Example Usage
iex> Square.client |> Square.CustomerGroups.list_customer_groups()
"""
@spec list_customer_groups(Tesla.Client.t(), list) :: {:error, any} | {:ok, Tesla.Env.t()}
def list_customer_groups(client, params \\ []),
do: Tesla.get(client, "customers/groups", query: params)
@doc """
Creates a new customer group for a business.
The request must include the `name` value of the group.
```
def create_customer_group(client, body)
```
### Parameters
| Parameter | Type | Tags | Description |
| --- | --- | --- | --- |
| `body` | [`Create Customer Group Request Map`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/create-customer-group-request.md) | Body, Required | An object containing the fields to POST for the request.<br><br>See the corresponding object definition for field details. |
### Response Type
[`Create Customer Group Response Map`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/create-customer-group-response.md)
### Example Usage
iex> body = %{
group: {
name: "Loyal Customers"
}
}
iex> Square.client |> Square.CustomerGroups.create_customer_group(body)
"""
@spec create_customer_group(Tesla.Client.t(), map) ::
{:error, any} | {:ok, Tesla.Env.t()}
def create_customer_group(client, body \\ %{}), do: Tesla.post(client, "customers/groups", body)
@doc """
Deletes a customer group as identified by the `group_id` value.
```
def delete_customer_group(client, group_id)
```
### Parameters
| Parameter | Type | Tags | Description |
| --- | --- | --- | --- |
| `group_id` | `String` | Template, Required | The ID of the customer group to delete. |
### Response Type
[`Delete Customer Group Response Map`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/delete-customer-group-response.md)
### Example Usage
iex> group_id = "group_id0"
iex> Square.client |> Square.CustomerGroups.delete_customer_group(group_id)
"""
@spec delete_customer_group(Tesla.Client.t(), binary) ::
{:error, any} | {:ok, Tesla.Env.t()}
def delete_customer_group(client, group_id),
do: Tesla.delete(client, "customers/groups/#{group_id}")
@doc """
Retrieves a specific customer group as identified by the `group_id` value.
```
def retrieve_customer_group(client, group_id)
```
### Parameters
| Parameter | Type | Tags | Description |
| --- | --- | --- | --- |
| `group_id` | `String` | Template, Required | The ID of the customer group to retrieve. |
### Response Type
[`Retrieve Customer Group Response Map`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/retrieve-customer-group-response.md)
### Example Usage
iex> group_id = "group_id0"
iex> Square.client |> Square.CustomerGroups.retrieve_customer_group(group_id)
"""
@spec retrieve_customer_group(Tesla.Client.t(), binary) ::
{:error, any} | {:ok, Tesla.Env.t()}
def retrieve_customer_group(client, group_id),
do: Tesla.get(client, "customers/groups/#{group_id}")
@doc """
Updates a customer group as identified by the `group_id` value.
```
def update_customer_group(client, group_id, body)
```
### Parameters
| Parameter | Type | Tags | Description |
| --- | --- | --- | --- |
| `group_id` | `String` | Template, Required | The ID of the customer group to update. |
| `body` | [`Update Customer Group Request Map`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/update-customer-group-request.md) | Body, Required | An object containing the fields to POST for the request.<br><br>See the corresponding object definition for field details. |
### Response Type
[`Update Customer Group Response Map`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/update-customer-group-response.md)
### Example Usage
iex> group_id = "group_id0"
iex> body = %{
group: {
name: "Loyal Customers 2"
}
}
iex> Square.client |> Square.CustomerGroups.update_customer_group(group_id, body)
"""
@spec update_customer_group(Tesla.Client.t(), binary, map) ::
{:error, any} | {:ok, Tesla.Env.t()}
def update_customer_group(client, group_id, body \\ %{}),
do: Tesla.put(client, "customers/groups/#{group_id}", body)
end
|
lib/api/customer_groups_api.ex
| 0.91511
| 0.807043
|
customer_groups_api.ex
|
starcoder
|
defmodule Resourceful.Type.Attribute do
@moduledoc """
Attributes represent "value" fields for a given `Resourceful.Type`. This
governs a few common operations:
## Casting Inputs
Values will often come in the form of strings from the edge. Attributes use
Ecto's typecasting to cast inputs into proper datatype or return appropriate
errors.
## Mapping to Underlying Resources
Generally speaking, outside representation of resources will use strings as
keys on maps whereas internal structures tend to be atoms. Additionally,
internal structures will often use snake case whereas external structures may
take multiple forms (e.g. camel case or dasherized when dealing with
JSON:API). Attributes map values to their appropriate internal keys.
## Configuring Queries
APIs may choose to restrict what attributes can be filtered and sorted. For
instance, you make wish to only allow public queries against indexed
attributes or none at all.
This allows the system restrict access and return meaningful errors when a
client attempts to query against an attribute.
"""
import Map, only: [put: 3]
alias __MODULE__
alias Resourceful.{Error, Type}
alias Resourceful.Collection.Filter
alias Resourceful.Type.GraphedField
@enforce_keys [
:filter?,
:map_to,
:name,
:sort?,
:type
]
defstruct @enforce_keys
@doc """
Creates a new attribute, coerces values, and sets defaults.
"""
@spec new(String.t(), atom(), keyword()) :: %Attribute{}
def new(name, type, opts \\ []) do
opts = opts_with_query(opts)
map_to = Keyword.get(opts, :map_to) || as_atom(name)
%Attribute{
filter?: opt_bool(Keyword.get(opts, :filter)),
map_to: Type.validate_map_to!(map_to),
name: Type.validate_name!(name),
sort?: opt_bool(Keyword.get(opts, :sort)),
type: to_type(type)
}
end
defp to_type(type) when is_binary(type), do: as_atom(type)
defp to_type(type), do: type
@doc """
Casts an input into an attribute's type. If `cast_as_list` is true, it will
wrap the attribute's type in a list. This is specifically for dealing with
filters that take a list of values.
"""
@spec cast(Type.queryable(), any(), boolean()) :: {:ok, any()} | Error.t()
def cast(attr_or_graph, input, cast_as_list \\ false)
def cast(%Attribute{name: name, type: type}, input, cast_as_list) do
do_cast(name, type, input, cast_as_list)
end
def cast(%GraphedField{field: %Attribute{type: type}, name: name}, input, cast_as_list) do
do_cast(name, type, input, cast_as_list)
end
defp do_cast(name, type, input, cast_as_list) do
{type, input} =
case cast_as_list do
true -> {{:array, type}, List.wrap(input)}
_ -> {type, input}
end
case Ecto.Type.cast(type, input) do
{:ok, _} = ok ->
ok
_ ->
Error.with_context(
:type_cast_failure,
%{attribute: name, input: input, type: type}
)
end
end
@doc """
Creates a contextual error wthe attribute's name in the context.
"""
@spec error(Type.queryable(), atom(), map()) :: Error.contextual()
def error(%{name: name}, error_type, context \\ %{}) do
Error.with_context(error_type, Map.merge(context, %{attribute: name}))
end
@doc """
Sets the `filter?` key for an attribute. Collections cannot be filtered by an
attribute unless this is set to `true`.
"""
@spec filter(%Attribute{}, boolean()) :: %Attribute{}
def filter(attr, filter), do: put(attr, :filter?, opt_bool(filter))
@doc """
Sets the `map_to` key for an attribute. `map_to` is the key of the underlying
map to use in place of the attribute's actual `name`. Unlike a `name` which
can only be a string, this can be an atom or a string (dots allowed).
"""
@spec map_to(%Attribute{}, atom() | String.t()) :: %Attribute{}
def map_to(attr, map_to) when is_atom(map_to) or is_binary(map_to) do
put(attr, :map_to, Type.validate_map_to!(map_to))
end
@doc """
Sets the name for the attribute. This is the "edge" name that clients will
interact with. It can be any string as long as it doesn't contain dots. This
will also serve as its key name if used in conjunction with a
`Resourceful.Type` which is important in that names must be unique within a
type.
"""
@spec name(%Attribute{}, String.t()) :: %Attribute{}
def name(attr, name), do: put(attr, :name, Type.validate_name!(name))
@doc """
A shortcut for setting both `filter?` and `sort?` to the same value.
"""
@spec query(%Attribute{}, boolean()) :: %Attribute{}
def query(attr, query) do
attr
|> filter(query)
|> sort(query)
end
@doc """
Sets the `sort?` key for an attribute. Collections cannot be sorted by an
attribute unless this is set to `true`.
"""
@spec sort(%Attribute{}, boolean()) :: %Attribute{}
def sort(attr, sort), do: put(attr, :sort?, opt_bool(sort))
@doc """
Sets the data type for casting. This must be an `Ecto.Type` or atom that works
in its place such as `:string`.
"""
@spec type(%Attribute{}, any()) :: %Attribute{}
def type(attr, type), do: put(attr, :type, to_type(type))
@doc """
Validates a filter against the attribute and the data given. It ensures the
data type is correct for the operator and that the attribute allows filtering
at all.
"""
@spec validate_filter(Type.queryable(), String.t(), any()) :: {:ok, Filter.t()} | Error.t()
def validate_filter(attr_or_graph, op, val) do
with :ok <- check_query_attr(attr_or_graph, :filter?, :cannot_filter_by_attribute),
{:ok, cast_val} <- cast(attr_or_graph, val, Filter.cast_as_list?(op)),
{:ok, _} = ok <- validate_filter_with_operator(attr_or_graph, op, cast_val),
do: ok
end
@doc """
Validates a sorter against the attribute ensuring that sorting is allowed for
the attribute.
"""
@spec validate_sorter(Type.queryable(), :asc | :desc) :: {:ok, Sort.t()} | Error.t()
def validate_sorter(attr_or_graph, order \\ :asc)
def validate_sorter(attr_or_graph, order) do
with :ok <- check_query_attr(attr_or_graph, :sort?, :cannot_sort_by_attribute),
do: {:ok, {order, attr_or_graph}}
end
defp as_atom(value) when is_atom(value), do: value
defp as_atom(value) when is_binary(value), do: String.to_existing_atom(value)
defp check_query_attr(attr_or_graph, key, error_type) do
attr =
case attr_or_graph do
%Attribute{} = attr -> attr
%GraphedField{field: %Attribute{} = attr} -> attr
end
case Map.get(attr, key) do
true -> :ok
_ -> error(attr_or_graph, error_type)
end
end
defp opt_bool(nil), do: false
defp opt_bool(bool) when is_boolean(bool), do: bool
defp opts_with_query(opts) do
cond do
Keyword.get(opts, :query) -> Keyword.merge(opts, filter: true, sort: true)
true -> opts
end
end
defp validate_filter_with_operator(attr_or_graph, op, val) do
case Filter.valid_operator?(op, val) do
true -> {:ok, {attr_or_graph, op, val}}
_ -> error(attr_or_graph, :invalid_filter_operator, %{operator: op, value: val})
end
end
end
|
lib/resourceful/type/attribute.ex
| 0.907563
| 0.681024
|
attribute.ex
|
starcoder
|
defmodule DBConnection.OwnershipError do
defexception [:message]
def exception(message), do: %DBConnection.OwnershipError{message: message}
end
defmodule DBConnection.Ownership do
@moduledoc """
A `DBConnection.Pool` that requires explicit checkout and checking
as a mechanism to coordinate between processes.
### Options
* `:ownership_pool` - The actual pool to use to power the ownership
mechanism. The pool is started when the ownership pool is started,
although this option may also be given on `ownership_checkout/2`
allowing developers to customize the pool checkout/checkin
* `:ownership_mode` - When mode is `:manual`, all connections must
be explicitly checked out before by using `ownership_checkout/2`.
Otherwise, mode is `:auto` and connections are checked out
implicitly. `{:shared, owner}` mode is also supported so
processes are allowed on demand. On all cases, checkins are
explicit via `ownership_checkin/2`. Defaults to `:auto`.
* `:ownership_timeout` - The maximum time that a process is allowed to own
a connection, default `15_000`.
* `:ownership_log` - The `Logger.level` to log ownership changes, or `nil`
not to log, default `nil`.
If the `:ownership_pool` has an atom name given in the `:name` option,
an ETS table will be created and automatically used for lookups whenever
the name is used on checkout.
Finally, if the `:caller` option is given on checkout with a pid and no
pool is assigned to the current process, a connection will be allowed
from the given pid and used on checkout with `:pool_timeout` of `:infinity`.
This is useful when multiple tasks need to collaborate on the same
connection (hence the `:infinity` timeout).
"""
@behaviour DBConnection.Pool
alias DBConnection.Ownership.Manager
alias DBConnection.Ownership.Proxy
## Ownership API
@doc """
Explicitly checks a connection out from the ownership manager.
It may return `:ok` if the connection is checked out.
`{:already, :owner | :allowed}` if the caller process already
has a connection, `:error` if it could be not checked out or
raise if there was an error.
"""
@spec ownership_checkout(GenServer.server, Keyword.t) ::
:ok | {:already, :owner | :allowed} | :error | no_return
def ownership_checkout(manager, opts) do
case Manager.checkout(manager, opts) do
{:init, proxy} -> Proxy.init(proxy, opts)
{:already, _} = already -> already
end
end
@doc """
Changes the ownwership mode.
`mode` may be `:auto`, `:manual` or `{:shared, owner}`.
The operation will always succeed when setting the mode to
`:auto` or `:manual`. It may fail with reason `:not_owner`
or `:not_found` when setting `{:shared, pid}` and the
given pid does not own any connection. May return
`:already_shared` if another process set the ownership
mode to `{:shared, _}` and is still alive.
"""
@spec ownership_mode(GenServer.server, :auto | :manual | {:shared, pid}, Keyword.t) ::
:ok | :already_shared | :not_owner | :not_found
defdelegate ownership_mode(manager, mode, opts), to: Manager, as: :mode
@doc """
Checks a connection back in.
A connection can only be checked back in by its owner.
"""
@spec ownership_checkin(GenServer.server, Keyword.t) ::
:ok | :not_owner | :not_found
defdelegate ownership_checkin(manager, opts), to: Manager, as: :checkin
@doc """
Allows the process given by `allow` to use the connection checked out
by `owner_or_allowed`.
It may return `:ok` if the connection is checked out.
`{:already, :owner | :allowed}` if the `allow` process already
has a connection. `owner_or_allowed` may either be the owner or any
other allowed process. Returns `:not_found` if the given process
does not have any connection checked out.
"""
@spec ownership_allow(GenServer.server, owner_or_allowed :: pid, allow :: pid, Keyword.t) ::
:ok | {:already, :owner | :allowed} | :not_found
defdelegate ownership_allow(manager, owner, allow, opts), to: Manager, as: :allow
## Pool callbacks
@doc false
def ensure_all_started(opts, type) do
Keyword.get(opts, :ownership_pool, DBConnection.Poolboy).ensure_all_started(opts, type)
end
@doc false
def start_link(module, opts) do
Manager.start_link(module, opts)
end
@doc false
def child_spec(module, opts, child_opts) do
Supervisor.Spec.worker(Manager, [module, opts], child_opts)
end
@doc false
def checkout(manager, opts) do
case Manager.lookup(manager, opts) do
{:init, proxy} ->
case Proxy.init(proxy, opts) do
:ok -> Proxy.checkout(proxy, opts)
{:error, _} = error -> error
end
{:ok, proxy} ->
Proxy.checkout(proxy, opts)
:not_found ->
case Keyword.pop(opts, :caller) do
{nil, _} ->
msg = """
cannot find ownership process for #{inspect self()}.
When using ownership, you must manage connections in one
of the three ways:
* By explicitly checking out a connection
* By explicitly allowing a spawned process
* By running the pool in shared mode
The first two options require every new process to explicitly
check a connection out or be allowed by calling checkout or
allow respectively.
The third option requires a {:shared, pid} mode to be set.
If using shared mode in tests, make sure your tests are not
async.
If you are reading this error, it means you have not done one
of the steps above or that the owner process has crashed.
"""
{:error, DBConnection.OwnershipError.exception(msg)}
{owner, opts} ->
ownership_allow(manager, owner, self(), opts)
checkout(manager, [pool_timeout: :infinity] ++ opts)
end
end
end
@doc false
def checkin(proxy, state, opts) do
Proxy.checkin(proxy, state, opts)
end
@doc false
def disconnect(proxy, exception, state, opts) do
Proxy.disconnect(proxy, exception, state, opts)
end
@doc false
def stop(proxy, err, state, opts) do
Proxy.stop(proxy, err, state, opts)
end
end
|
deps/db_connection/lib/db_connection/ownership.ex
| 0.850438
| 0.476397
|
ownership.ex
|
starcoder
|
defmodule ElixirSense.Core.ErlangHtml do
@moduledoc false
# those typedefs mimic erl_docgen types (as of OTP 24) to not introduce dependency
@type chunk_elements() :: [chunk_element()]
@type chunk_element() ::
{chunk_element_type(), chunk_element_attrs(), chunk_elements()}
| :unicode.unicode_binary()
@type chunk_element_attrs() :: [chunk_element_attr()]
@type chunk_element_attr() ::
{atom, :unicode.unicode_binary()}
@type chunk_element_type() :: chunk_element_inline_type() | chunk_element_block_type()
@type chunk_element_inline_type() :: :a | :code | :strong | :b | :em | :i
@type chunk_element_block_type() ::
:p
| :div
| :br
| :pre
| :ul
| :ol
| :li
| :dl
| :dt
| :dd
| :h1
| :h2
| :h3
| :h4
| :h5
| :h6
@doc """
Transform application/erlang+html AST into markdown string.
Document AST is defined in http://erlang.org/doc/apps/erl_docgen/doc_storage.html
"""
@spec to_markdown(chunk_element()) :: String.t()
def to_markdown(ast), do: to_markdown(ast, [], :normal)
def to_markdown(binary, _parents, sanitize_mode) when is_binary(binary) do
sanitize(binary, sanitize_mode)
end
def to_markdown(list, parents, sanitize_mode) when is_list(list) do
Enum.map_join(list, "", &to_markdown(&1, parents, sanitize_mode))
end
def to_markdown({:br, _attrs, _inner}, parents, _sanitize_mode) do
" \n" <> build_prefix(parents)
end
def to_markdown({:p, _attrs, inner}, parents, sanitize_mode) do
prefix = build_prefix(parents)
to_markdown(inner, parents, sanitize_mode) <> "\n" <> prefix <> "\n" <> prefix
end
def to_markdown({tag, _attrs, inner}, parents, sanitize_mode) when tag in [:em, :i] do
"*" <> to_markdown(inner, parents, sanitize_mode) <> "*"
end
def to_markdown({tag, _attrs, inner}, parents, sanitize_mode) when tag in [:strong, :b] do
"**" <> to_markdown(inner, parents, sanitize_mode) <> "**"
end
def to_markdown({:ul, _attrs, inner}, parents, sanitize_mode) do
prefix = build_prefix(parents)
items =
inner
|> Enum.map_join("", fn {:li, _attrs, li_inner} ->
"- #{to_markdown(li_inner, [:li, :ul | parents], sanitize_mode)}\n" <> prefix
end)
items <> "\n" <> prefix
end
def to_markdown({:ol, _attrs, inner}, parents, sanitize_mode) do
prefix = build_prefix(parents)
items =
inner
|> Enum.with_index(1)
|> Enum.map_join("", fn {{:li, _attrs, li_inner}, index} ->
"#{index}. #{to_markdown(li_inner, [:li, :ol | parents], sanitize_mode)}\n" <> prefix
end)
items <> "\n" <> prefix
end
def to_markdown({:dl, _attrs, inner}, parents, sanitize_mode) do
prefix = build_prefix(parents)
to_markdown(inner, parents, sanitize_mode) <> "\n" <> prefix
end
def to_markdown({:dt, _attrs, inner}, parents, sanitize_mode) do
"**" <> to_markdown(inner, parents, sanitize_mode) <> ":** "
end
def to_markdown({:dd, _attrs, inner}, parents, sanitize_mode) do
prefix = build_prefix(parents)
to_markdown(inner, parents, sanitize_mode) <> " \n" <> prefix
end
def to_markdown({:pre, _attrs1, [{:code, _attrs2, inner}]}, parents, _sanitize_mode) do
prefix = build_prefix(parents)
"```\n" <> prefix <> to_markdown(inner, parents, :none) <> "\n" <> prefix <> "```\n" <> prefix
end
for i <- 1..6,
tag = :"h#{i}",
prefix = for(_ <- 1..i, into: "", do: "#") do
def to_markdown({unquote(tag), _attrs, inner}, parents, sanitize_mode) do
unquote(prefix) <> " " <> to_markdown(inner, parents, sanitize_mode) <> "\n\n"
end
end
def to_markdown({:div, attrs, inner}, parents, sanitize_mode) do
class = attrs[:class]
prefix = build_prefix(parents)
maybe_class = if class != nil, do: String.upcase(class) <> ": \n" <> prefix, else: ""
"\n" <>
prefix <>
"\n" <>
prefix <>
"---" <>
"\n" <>
prefix <>
"\n" <>
prefix <>
maybe_class <>
to_markdown(inner, parents, sanitize_mode) <>
"\n" <> prefix <> "\n" <> prefix <> "---" <> "\n" <> prefix <> "\n" <> prefix
end
def to_markdown({:code, _attrs, inner}, parents, _sanitize_mode) do
"`" <> to_markdown(inner, parents, :backtick) <> "`"
end
def to_markdown({:a, _attrs, []}, _parents, _sanitize_mode) do
""
end
def to_markdown({:a, _attrs, inner}, parents, sanitize_mode) do
"[" <> to_markdown(inner, parents, sanitize_mode) <> "]"
end
defp build_prefix(list), do: build_prefix(list, "")
defp build_prefix([], acc), do: acc
defp build_prefix([:li | rest], acc), do: build_prefix(rest, " " <> acc)
defp build_prefix([:ul | rest], acc), do: build_prefix(rest, " " <> acc)
defp build_prefix([:ol | rest], acc), do: build_prefix(rest, " " <> acc)
defp build_prefix([_other | rest], acc), do: build_prefix(rest, acc)
@special_chars [
"\\",
"`",
"*",
"_",
"{",
"}",
"[",
"]",
"<",
">",
"(",
")",
"#",
"+",
"-",
".",
"!",
"|"
]
defp sanitize(binary, :normal) do
Enum.reduce(@special_chars, binary, fn pattern, acc ->
String.replace(acc, pattern, "\\" <> pattern)
end)
end
defp sanitize(binary, :backtick) do
if String.contains?(binary, "`") do
"`" <> binary <> "`"
else
binary
end
end
defp sanitize(binary, :none), do: binary
end
|
lib/elixir_sense/core/erlang_html.ex
| 0.590425
| 0.426292
|
erlang_html.ex
|
starcoder
|
defmodule RemoteIp.Parsers.Forwarded do
use Combine
@behaviour RemoteIp.Parser
@moduledoc """
[RFC 7239](https://tools.ietf.org/html/rfc7239) compliant parser for
`Forwarded` headers.
This module implements the `RemoteIp.Parser` behaviour. IPs are parsed out of
the `for=` pairs across each forwarded element.
## Examples
iex> RemoteIp.Parsers.Forwarded.parse("for=1.2.3.4;by=2.3.4.5")
[{1, 2, 3, 4}]
iex> RemoteIp.Parsers.Forwarded.parse("for=\\"[::1]\\", for=\\"[::2]\\"")
[{0, 0, 0, 0, 0, 0, 0, 1}, {0, 0, 0, 0, 0, 0, 0, 2}]
iex> RemoteIp.Parsers.Forwarded.parse("invalid")
[]
"""
@impl RemoteIp.Parser
def parse(header) do
case Combine.parse(header, forwarded()) do
[elements] -> Enum.flat_map(elements, &parse_forwarded_for/1)
_ -> []
end
end
defp parse_forwarded_for(pairs) do
case fors_from(pairs) do
[string] -> parse_ip(string)
_ambiguous -> []
end
end
defp fors_from(pairs) do
for {key, val} <- pairs, String.downcase(key) == "for", do: val
end
defp parse_ip(string) do
case Combine.parse(string, ip_address()) do
[ip] -> [ip]
_ -> []
end
end
# https://tools.ietf.org/html/rfc7239#section-4
defp forwarded do
sep_by(forwarded_element(), comma()) |> eof()
end
defp forwarded_element do
sep_by1(forwarded_pair(), char(";"))
end
defp forwarded_pair do
pair = [token(), ignore(char("=")), value()]
pipe(pair, &List.to_tuple/1)
end
defp value do
either(token(), quoted_string())
end
# https://tools.ietf.org/html/rfc7230#section-3.2.6
defp token do
word_of(~r/[!#$%&'*+\-.^_`|~0-9a-zA-Z]/)
end
defp quoted_string do
quoted(string_of(either(qdtext(), quoted_pair())))
end
defp quoted(parser) do
between(char("\""), parser, char("\""))
end
defp string_of(parser) do
map(many(parser), &Enum.join/1)
end
defp qdtext do
word_of(~r/[\t \x21\x23-\x5B\x5D-\x7E\x80-\xFF]/)
end
@quotable ([?\t] ++ Enum.to_list(0x21..0x7E) ++ Enum.to_list(0x80..0xFF))
|> Enum.map(&<<&1::utf8>>)
defp quoted_pair do
ignore(char("\\")) |> one_of(char(), @quotable)
end
# https://tools.ietf.org/html/rfc7230#section-7
defp comma do
skip(many(either(space(), tab())))
|> char(",")
|> skip(many(either(space(), tab())))
end
# https://tools.ietf.org/html/rfc7239#section-6
defp ip_address do
node_name()
|> ignore(option(ignore(char(":")) |> node_port()))
|> eof()
end
defp node_name do
choice([
ipv4_address(),
between(char("["), ipv6_address(), char("]")),
ignore(string("unknown")),
ignore(obfuscated())
])
end
defp node_port(previous) do
previous |> either(port(), obfuscated())
end
defp port do
# Have to try to parse the wider integers first due to greediness. For
# example, the port "12345" would be matched by fixed_integer(1) and the
# remaining "2345" would cause a parse error for the eof in ip_address/0.
choice(Enum.map(5..1, &fixed_integer/1))
end
defp obfuscated do
word_of(~r/^_[a-zA-Z0-9._\-]+/)
end
# Could follow the ABNF described in
# https://tools.ietf.org/html/rfc3986#section-3.2.2, but prefer to lean on
# the existing :inet parser - we want its output anyway.
defp ipv4_address do
map(word_of(~r/[0-9.]/), fn string ->
case :inet.parse_ipv4strict_address(string |> to_charlist()) do
{:ok, ip} -> ip
{:error, :einval} -> {:error, "Invalid IPv4 address"}
end
end)
end
defp ipv6_address do
map(word_of(~r/[0-9a-f:.]/i), fn string ->
case :inet.parse_ipv6strict_address(string |> to_charlist()) do
{:ok, ip} -> ip
{:error, :einval} -> {:error, "Invalid IPv6 address"}
end
end)
end
end
|
lib/remote_ip/parsers/forwarded.ex
| 0.832441
| 0.502014
|
forwarded.ex
|
starcoder
|
defmodule FIQLEx.QueryBuilders.SQLQueryBuilder do
@moduledoc ~S"""
Builds SQL queries from FIQL AST.
Possible options for this query builder are:
* `table`: The table name to use in the `FROM` statement (defaults to `"table"`)
* `select`: `SELECT` statement to build (_see below_).
* `ecto`: Tuple containing the ecto repo and the ecto schema to use for the query. This will execute the query and return the result as a list
* `only`: A list with the only fields to accept in the query (if `only` and `except` are both provided, `only` is used)
* `except`: A list with the fields to reject in the query (if `only` and `except` are both provided, `only` is used)
* `order_by`: A string order by to be added to the query
* `limit`: A limit for the query
* `offset`: An offset for the query
* `case_sensitive`: Boolean value (default to true) to set equals case sensitive or not
* `transformer`: Function that takes a selector and its value as parameter and must return the transformed value
### Select option
Possible values of the `select` option are:
* `:all`: use `SELECT *` (default value)
* `:from_selectors`: Searches for all selectors in the FIQL AST and use them as `SELECT` statement.
For instance, for the following query: `age=ge=25;name==*Doe`, the `SELECT` statement will be `SELECT age, name`
* `selectors`: You specify a list of items you want to use in the `SELECT` statement.
### Ecto option
You can directly execute the SQL query in an Ecto context by proving in a tuple the repo and the schema to use.
For instance:
```elixir
FIQLEx.build_query(FIQLEx.parse!("name==John"), FIQLEx.QueryBuilders.SQLQueryBuilder, echo: {Repo, User})
```
May return something like this:
```elixir
{:ok, [%User{name: "John", age: 18}, %User{name: "John", age: 21}]}
```
## Examples
iex> FIQLEx.build_query(FIQLEx.parse!("name==John"), FIQLEx.QueryBuilders.SQLQueryBuilder, select: :from_selectors)
{:ok, "SELECT name FROM table WHERE name = 'John'"}
iex> FIQLEx.build_query(FIQLEx.parse!("name==John"), FIQLEx.QueryBuilders.SQLQueryBuilder, select: :from_selectors, table: "author")
{:ok, "SELECT name FROM author WHERE name = 'John'"}
iex> FIQLEx.build_query(FIQLEx.parse!("name==John"), FIQLEx.QueryBuilders.SQLQueryBuilder, select: :all)
{:ok, "SELECT * FROM table WHERE name = 'John'"}
iex> FIQLEx.build_query(FIQLEx.parse!("name==John"), FIQLEx.QueryBuilders.SQLQueryBuilder, select: ["another", "other"])
{:ok, "SELECT another, other FROM table WHERE name = 'John'"}
iex> FIQLEx.build_query(FIQLEx.parse!("name==John;(age=gt=25,age=lt=18)"), FIQLEx.QueryBuilders.SQLQueryBuilder, select: :from_selectors)
{:ok, "SELECT name, age FROM table WHERE (name = 'John' AND (age > 25 OR age < 18))"}
iex> FIQLEx.build_query(FIQLEx.parse!("name=ge=John"), FIQLEx.QueryBuilders.SQLQueryBuilder, select: :from_selectors)
{:error, :invalid_format}
iex> FIQLEx.build_query(FIQLEx.parse!("name=ge=2019-02-02T18:23:03Z"), FIQLEx.QueryBuilders.SQLQueryBuilder, select: :from_selectors)
{:ok, "SELECT name FROM table WHERE name >= '2019-02-02T18:23:03Z'"}
iex> FIQLEx.build_query(FIQLEx.parse!("name!=12.4"), FIQLEx.QueryBuilders.SQLQueryBuilder, select: :from_selectors)
{:ok, "SELECT name FROM table WHERE name <> 12.4"}
iex> FIQLEx.build_query(FIQLEx.parse!("name!=true"), FIQLEx.QueryBuilders.SQLQueryBuilder, select: :from_selectors)
{:ok, "SELECT name FROM table WHERE name <> true"}
iex> FIQLEx.build_query(FIQLEx.parse!("name!=false"), FIQLEx.QueryBuilders.SQLQueryBuilder, select: :from_selectors)
{:ok, "SELECT name FROM table WHERE name <> false"}
iex> FIQLEx.build_query(FIQLEx.parse!("name"), FIQLEx.QueryBuilders.SQLQueryBuilder, select: :from_selectors)
{:ok, "SELECT name FROM table WHERE name IS NOT NULL"}
iex> FIQLEx.build_query(FIQLEx.parse!("name!=(1,2,Hello)"), FIQLEx.QueryBuilders.SQLQueryBuilder, select: :from_selectors)
{:ok, "SELECT name FROM table WHERE name NOT IN (1, 2, 'Hello')"}
iex> FIQLEx.build_query(FIQLEx.parse!("name!='Hello \\'World'"), FIQLEx.QueryBuilders.SQLQueryBuilder, select: :from_selectors)
{:ok, "SELECT name FROM table WHERE name <> 'Hello ''World'"}
iex> FIQLEx.build_query(FIQLEx.parse!("name!=*Hello"), FIQLEx.QueryBuilders.SQLQueryBuilder, select: :from_selectors)
{:ok, "SELECT name FROM table WHERE name NOT LIKE '%Hello'"}
iex> FIQLEx.build_query(FIQLEx.parse!("name==Hello*"), FIQLEx.QueryBuilders.SQLQueryBuilder, select: :from_selectors)
{:ok, "SELECT name FROM table WHERE name LIKE 'Hello%'"}
iex> FIQLEx.build_query(FIQLEx.parse!("name==Hello;age=ge=10;friend==true"), FIQLEx.QueryBuilders.SQLQueryBuilder, select: :from_selectors)
{:ok, "SELECT name, age, friend FROM table WHERE (name = 'Hello' AND (age >= 10 AND friend = true))"}
iex> FIQLEx.build_query(FIQLEx.parse!("name==Hello,age=ge=10,friend==true"), FIQLEx.QueryBuilders.SQLQueryBuilder, select: :from_selectors)
{:ok, "SELECT name, age, friend FROM table WHERE (name = 'Hello' OR (age >= 10 OR friend = true))"}
iex> FIQLEx.build_query(FIQLEx.parse!("name==Hello;age=ge=10;friend==true;ok"), FIQLEx.QueryBuilders.SQLQueryBuilder, select: :from_selectors)
{:ok, "SELECT name, age, friend, ok FROM table WHERE (name = 'Hello' AND (age >= 10 AND (friend = true AND ok IS NOT NULL)))"}
iex> FIQLEx.build_query(FIQLEx.parse!("name==Hello,age=ge=10,friend==true,ok"), FIQLEx.QueryBuilders.SQLQueryBuilder, select: :from_selectors)
{:ok, "SELECT name, age, friend, ok FROM table WHERE (name = 'Hello' OR (age >= 10 OR (friend = true OR ok IS NOT NULL)))"}
iex> FIQLEx.build_query(FIQLEx.parse!("name==John"), FIQLEx.QueryBuilders.SQLQueryBuilder, only: ["bad"])
{:error, :selector_not_allowed}
iex> FIQLEx.build_query(FIQLEx.parse!("name==John"), FIQLEx.QueryBuilders.SQLQueryBuilder, only: ["name"])
{:ok, "SELECT * FROM table WHERE name = 'John'"}
iex> FIQLEx.build_query(FIQLEx.parse!("name==John"), FIQLEx.QueryBuilders.SQLQueryBuilder, except: ["name"])
{:error, :selector_not_allowed}
iex> FIQLEx.build_query(FIQLEx.parse!("name==John"), FIQLEx.QueryBuilders.SQLQueryBuilder, except: ["bad"])
{:ok, "SELECT * FROM table WHERE name = 'John'"}
iex> FIQLEx.build_query(FIQLEx.parse!("name==John"), FIQLEx.QueryBuilders.SQLQueryBuilder, order_by: "name DESC")
{:ok, "SELECT * FROM table WHERE name = 'John' ORDER BY name DESC"}
iex> FIQLEx.build_query(FIQLEx.parse!("name==John"), FIQLEx.QueryBuilders.SQLQueryBuilder, limit: "10")
{:ok, "SELECT * FROM table WHERE name = 'John' LIMIT 10"}
iex> FIQLEx.build_query(FIQLEx.parse!("name==John"), FIQLEx.QueryBuilders.SQLQueryBuilder, offset: "10")
{:ok, "SELECT * FROM table WHERE name = 'John' OFFSET 10"}
iex> FIQLEx.build_query(FIQLEx.parse!("name==John"), FIQLEx.QueryBuilders.SQLQueryBuilder, order_by: "name", limit: "5", offset: "10")
{:ok, "SELECT * FROM table WHERE name = 'John' ORDER BY name LIMIT 5 OFFSET 10"}
iex> FIQLEx.build_query(FIQLEx.parse!("name==John"), FIQLEx.QueryBuilders.SQLQueryBuilder, select: :from_selectors, case_sensitive: false)
{:ok, "SELECT name FROM table WHERE LOWER(name) = LOWER('John')"}
iex> FIQLEx.build_query(FIQLEx.parse!("name!=John"), FIQLEx.QueryBuilders.SQLQueryBuilder, select: :from_selectors, case_sensitive: false)
{:ok, "SELECT name FROM table WHERE LOWER(name) <> LOWER('John')"}
iex> FIQLEx.build_query(FIQLEx.parse!("name!=*Hello"), FIQLEx.QueryBuilders.SQLQueryBuilder, select: :from_selectors, case_sensitive: false)
{:ok, "SELECT name FROM table WHERE name NOT ILIKE '%Hello'"}
iex> FIQLEx.build_query(FIQLEx.parse!("name==Hello*"), FIQLEx.QueryBuilders.SQLQueryBuilder, select: :from_selectors, case_sensitive: false)
{:ok, "SELECT name FROM table WHERE name ILIKE 'Hello%'"}
iex> FIQLEx.build_query(FIQLEx.parse!("name==John;age==18"), FIQLEx.QueryBuilders.SQLQueryBuilder, select: :from_selectors, transformer: fn selector, value -> if(selector == "name", do: "Johny", else: value) end)
{:ok, "SELECT name, age FROM table WHERE (name = 'Johny' AND age = 18)"}
"""
use FIQLEx.QueryBuilder
@impl true
def init(_ast, opts) do
{"", opts}
end
@impl true
def build(ast, {query, opts}) do
select =
case Keyword.get(opts, :select, :all) do
:all ->
"*"
:from_selectors ->
ast
|> get_selectors()
|> Enum.join(", ")
selectors ->
selectors |> Enum.join(", ")
end
table = Keyword.get(opts, :table, "table")
order_by = Keyword.get(opts, :order_by, "")
limit = Keyword.get(opts, :limit, "")
offset = Keyword.get(opts, :offset, "")
final_query =
("SELECT " <> select <> " FROM " <> table <> " WHERE " <> query)
|> add_to_query("ORDER BY", order_by)
|> add_to_query("LIMIT", limit)
|> add_to_query("OFFSET", offset)
case Keyword.get(opts, :ecto, nil) do
nil ->
{:ok, final_query}
{repo, model} ->
Ecto.Adapters.SQL.query(repo, final_query, [])
|> load_into_model(repo, model)
end
end
defp add_to_query(query, _command, ""), do: query
defp add_to_query(query, command, suffix), do: query <> " " <> command <> " " <> suffix
defp load_into_model({:ok, %{rows: rows, columns: columns}}, repo, model) do
{:ok,
Enum.map(rows, fn row ->
fields =
Enum.reduce(Enum.zip(columns, row), %{}, fn {key, value}, map ->
Map.put(map, key, value)
end)
repo.load(model, fields)
end)}
end
defp load_into_model(_response, _repo, _model) do
{:error, :invalid_response}
end
defp is_selector_allowed(selector, opts) do
case Keyword.get(opts, :only, nil) do
nil ->
case Keyword.get(opts, :except, nil) do
nil ->
true
fields ->
not Enum.member?(fields, selector)
end
fields ->
Enum.member?(fields, selector)
end
end
defp is_case_insensitive(opts) do
not Keyword.get(opts, :case_sensitive, true)
end
def binary_equal(selector_name, value, opts) do
if is_case_insensitive(opts) do
"LOWER(" <> selector_name <> ") = LOWER(" <> value <> ")"
else
selector_name <> " = " <> value
end
end
def binary_like(selector_name, value, opts) do
if is_case_insensitive(opts) do
selector_name <> " ILIKE " <> String.replace(escape_string(value), "*", "%", global: true)
else
selector_name <> " LIKE " <> String.replace(escape_string(value), "*", "%", global: true)
end
end
def binary_not_equal(selector_name, value, opts) do
if is_case_insensitive(opts) do
"LOWER(" <> selector_name <> ") <> LOWER(" <> value <> ")"
else
selector_name <> " <> " <> value
end
end
def binary_not_like(selector_name, value, opts) do
if is_case_insensitive(opts) do
selector_name <>
" NOT ILIKE " <> String.replace(escape_string(value), "*", "%", global: true)
else
selector_name <>
" NOT LIKE " <> String.replace(escape_string(value), "*", "%", global: true)
end
end
def identity_transformer(_selector, value), do: value
@impl true
def handle_or_expression(exp1, exp2, ast, {query, opts}) do
with {:ok, {left, _opts}} <- handle_ast(exp1, ast, {query, opts}),
{:ok, {right, _opts}} <- handle_ast(exp2, ast, {query, opts}) do
{:ok, {"(" <> left <> " OR " <> right <> ")", opts}}
else
{:error, err} -> {:error, err}
end
end
@impl true
def handle_and_expression(exp1, exp2, ast, {query, opts}) do
with {:ok, {left, _opts}} <- handle_ast(exp1, ast, {query, opts}),
{:ok, {right, _opts}} <- handle_ast(exp2, ast, {query, opts}) do
{:ok, {"(" <> left <> " AND " <> right <> ")", opts}}
else
{:error, err} -> {:error, err}
end
end
@impl true
def handle_expression(exp, ast, {query, opts}) do
with {:ok, {constraint, _opts}} <- handle_ast(exp, ast, {query, opts}) do
{:ok, {constraint, opts}}
else
{:error, err} -> {:error, err}
end
end
@impl true
def handle_selector(selector_name, _ast, {_query, opts}) do
if is_selector_allowed(selector_name, opts) do
{:ok, {selector_name <> " IS NOT NULL", opts}}
else
{:error, :selector_not_allowed}
end
end
@impl true
def handle_selector_and_value(selector_name, op, value, ast, {query, opts}) do
new_value = Keyword.get(opts, :transformer, &identity_transformer/2).(selector_name, value)
do_handle_selector_and_value(selector_name, op, new_value, ast, {query, opts})
end
defp do_handle_selector_and_value(selector_name, :equal, value, _ast, {_query, opts})
when is_binary(value) do
if is_selector_allowed(selector_name, opts) do
if String.starts_with?(value, "*") || String.ends_with?(value, "*") do
{:ok, {binary_like(selector_name, value, opts), opts}}
else
{:ok, {binary_equal(selector_name, escape_string(value), opts), opts}}
end
else
{:error, :selector_not_allowed}
end
end
defp do_handle_selector_and_value(selector_name, :equal, value, _ast, {_query, opts})
when is_list(value) do
if is_selector_allowed(selector_name, opts) do
values = value |> escape_list() |> Enum.join(", ")
{:ok, {selector_name <> " IN (" <> values <> ")", opts}}
else
{:error, :selector_not_allowed}
end
end
defp do_handle_selector_and_value(selector_name, :equal, true, _ast, {_query, opts}) do
if is_selector_allowed(selector_name, opts) do
{:ok, {selector_name <> " = true", opts}}
else
{:error, :selector_not_allowed}
end
end
defp do_handle_selector_and_value(selector_name, :equal, false, _ast, {_query, opts}) do
if is_selector_allowed(selector_name, opts) do
{:ok, {selector_name <> " = false", opts}}
else
{:error, :selector_not_allowed}
end
end
defp do_handle_selector_and_value(selector_name, :equal, value, _ast, {_query, opts}) do
if is_selector_allowed(selector_name, opts) do
{:ok, {binary_equal(selector_name, to_string(value), opts), opts}}
else
{:error, :selector_not_allowed}
end
end
defp do_handle_selector_and_value(selector_name, :not_equal, value, _ast, {_query, opts})
when is_binary(value) do
if is_selector_allowed(selector_name, opts) do
if String.starts_with?(value, "*") || String.ends_with?(value, "*") do
{:ok, {binary_not_like(selector_name, value, opts), opts}}
else
{:ok, {binary_not_equal(selector_name, escape_string(value), opts), opts}}
end
else
{:error, :selector_not_allowed}
end
end
defp do_handle_selector_and_value(selector_name, :not_equal, value, _ast, {_query, opts})
when is_list(value) do
if is_selector_allowed(selector_name, opts) do
values = value |> escape_list() |> Enum.join(", ")
{:ok, {selector_name <> " NOT IN (" <> values <> ")", opts}}
else
{:error, :selector_not_allowed}
end
end
defp do_handle_selector_and_value(selector_name, :not_equal, true, _ast, {_query, opts}) do
if is_selector_allowed(selector_name, opts) do
{:ok, {selector_name <> " <> true", opts}}
else
{:error, :selector_not_allowed}
end
end
defp do_handle_selector_and_value(selector_name, :not_equal, false, _ast, {_query, opts}) do
if is_selector_allowed(selector_name, opts) do
{:ok, {selector_name <> " <> false", opts}}
else
{:error, :selector_not_allowed}
end
end
defp do_handle_selector_and_value(selector_name, :not_equal, value, _ast, {_query, opts}) do
if is_selector_allowed(selector_name, opts) do
{:ok, {binary_not_equal(selector_name, to_string(value), opts), opts}}
else
{:error, :selector_not_allowed}
end
end
@impl true
def handle_selector_and_value_with_comparison(selector_name, op, value, ast, {query, opts}) do
new_value = Keyword.get(opts, :transformer, &identity_transformer/2).(selector_name, value)
do_handle_selector_and_value_with_comparison(selector_name, op, new_value, ast, {query, opts})
end
defp do_handle_selector_and_value_with_comparison(
selector_name,
"ge",
value,
_ast,
{_query, opts}
)
when is_number(value) do
if is_selector_allowed(selector_name, opts) do
{:ok, {selector_name <> " >= " <> to_string(value), opts}}
else
{:error, :selector_not_allowed}
end
end
defp do_handle_selector_and_value_with_comparison(
selector_name,
"gt",
value,
_ast,
{_query, opts}
)
when is_number(value) do
if is_selector_allowed(selector_name, opts) do
{:ok, {selector_name <> " > " <> to_string(value), opts}}
else
{:error, :selector_not_allowed}
end
end
defp do_handle_selector_and_value_with_comparison(
selector_name,
"le",
value,
_ast,
{_query, opts}
)
when is_number(value) do
if is_selector_allowed(selector_name, opts) do
{:ok, {selector_name <> " <= " <> to_string(value), opts}}
else
{:error, :selector_not_allowed}
end
end
defp do_handle_selector_and_value_with_comparison(
selector_name,
"lt",
value,
_ast,
{_query, opts}
)
when is_number(value) do
if is_selector_allowed(selector_name, opts) do
{:ok, {selector_name <> " < " <> to_string(value), opts}}
else
{:error, :selector_not_allowed}
end
end
defp do_handle_selector_and_value_with_comparison(
selector_name,
"ge",
value,
_ast,
{_query, opts}
)
when is_binary(value) do
if is_selector_allowed(selector_name, opts) do
case DateTime.from_iso8601(value) do
{:ok, _date, _} -> {:ok, {selector_name <> " >= '" <> value <> "'", opts}}
{:error, err} -> {:error, err}
end
else
{:error, :selector_not_allowed}
end
end
defp do_handle_selector_and_value_with_comparison(
selector_name,
"gt",
value,
_ast,
{_query, opts}
)
when is_binary(value) do
if is_selector_allowed(selector_name, opts) do
case DateTime.from_iso8601(value) do
{:ok, _date, _} -> {:ok, {selector_name <> " > '" <> value <> "'", opts}}
{:error, err} -> {:error, err}
end
else
{:error, :selector_not_allowed}
end
end
defp do_handle_selector_and_value_with_comparison(
selector_name,
"le",
value,
_ast,
{_query, opts}
)
when is_binary(value) do
if is_selector_allowed(selector_name, opts) do
case DateTime.from_iso8601(value) do
{:ok, _date, _} -> {:ok, {selector_name <> " <= '" <> value <> "'", opts}}
{:error, err} -> {:error, err}
end
else
{:error, :selector_not_allowed}
end
end
defp do_handle_selector_and_value_with_comparison(
selector_name,
"lt",
value,
_ast,
{_query, opts}
)
when is_binary(value) do
if is_selector_allowed(selector_name, opts) do
case DateTime.from_iso8601(value) do
{:ok, _date, _} -> {:ok, {selector_name <> " < '" <> value <> "'", opts}}
{:error, err} -> {:error, err}
end
else
{:error, :selector_not_allowed}
end
end
defp do_handle_selector_and_value_with_comparison(_selector_name, op, value, _ast, _state)
when is_number(value) do
{:error, "Unsupported " <> op <> " operator"}
end
defp do_handle_selector_and_value_with_comparison(_selector_name, _op, value, _ast, _state) do
{:error,
"Comparisons must be done against number or date values (got: " <> to_string(value) <> ")"}
end
defp escape_string(str) when is_binary(str),
do: "'" <> String.replace(str, "'", "''", global: true) <> "'"
defp escape_string(str), do: to_string(str)
defp escape_list(list), do: Enum.map(list, &escape_string/1)
end
|
lib/query_builders/sql_query_builder.ex
| 0.787073
| 0.755186
|
sql_query_builder.ex
|
starcoder
|
defmodule Exop.ValidationChecks do
@moduledoc """
Provides low-level validation functions:
* check_type/3
* check_required/3
* check_numericality/3
* check_in/3
* check_not_in/3
* check_format/3
* check_length/3
* check_struct/3
* check_func/3
* check_equals/3
* check_exactly/3
* check_allow_nil/3
"""
alias Exop.TypeValidation
@no_check_item :exop_no_check_item
@type check_error :: %{(atom() | String.t()) => String.t()}
@doc """
Returns an check_item's value from either a Keyword or a Map by an atom-key.
## Examples
iex> Exop.ValidationChecks.get_check_item(%{a: 1, b: 2}, :a)
1
iex> Exop.ValidationChecks.get_check_item([a: 1, b: 2], :b)
2
iex> Exop.ValidationChecks.get_check_item(%{a: 1, b: 2}, :c)
nil
"""
@spec get_check_item(map(), atom() | String.t()) :: any() | nil
def get_check_item(check_items, item_name) when is_map(check_items) do
Map.get(check_items, item_name)
end
def get_check_item(check_items, item_name) when is_list(check_items) do
Keyword.get(check_items, item_name)
end
def get_check_item(_check_items, _item), do: nil
@doc """
Checks whether a check_item has been provided.
Returns a boolean.
## Examples
iex> Exop.ValidationChecks.check_item_present?(%{a: 1, b: 2}, :a)
true
iex> Exop.ValidationChecks.check_item_present?([a: 1, b: 2], :b)
true
iex> Exop.ValidationChecks.check_item_present?(%{a: 1, b: 2}, :c)
false
iex> Exop.ValidationChecks.check_item_present?(%{a: 1, b: nil}, :b)
true
"""
@spec check_item_present?(map(), atom() | String.t()) :: boolean()
def check_item_present?(check_items, item_name) when is_map(check_items) do
Map.get(check_items, item_name, @no_check_item) != @no_check_item
end
def check_item_present?(check_items, item_name) when is_list(check_items) do
Keyword.get(check_items, item_name, @no_check_item) != @no_check_item
end
def check_item_present?(_check_items, _item), do: false
@doc """
Checks if an item_name presents in params if its required (true).
## Examples
iex> Exop.ValidationChecks.check_required(%{}, :some_item, false)
true
iex> Exop.ValidationChecks.check_required([a: 1, b: 2], :a, true)
true
iex> Exop.ValidationChecks.check_required(%{a: 1, b: 2}, :b, true)
true
"""
@spec check_required(map(), atom() | String.t(), boolean) :: true | check_error
def check_required(_check_items, _item, false), do: true
def check_required(check_items, item_name, true) do
check_item_present?(check_items, item_name) || %{item_name => "is required"}
end
@doc """
Checks the type of an item_name.
## Examples
iex> Exop.ValidationChecks.check_type(%{a: 1}, :a, :integer)
true
iex> Exop.ValidationChecks.check_type(%{a: "1"}, :a, :string)
true
iex> Exop.ValidationChecks.check_type(%{a: nil}, :a, :string)
%{:a => "has wrong type; expected type: string, got: nil"}
"""
@spec check_type(map(), atom() | String.t(), atom()) :: true | check_error
def check_type(check_items, item_name, check) do
if check_item_present?(check_items, item_name) do
check_item = get_check_item(check_items, item_name)
TypeValidation.check_value(check_item, check) ||
%{item_name => "has wrong type; expected type: #{check}, got: #{inspect(check_item)}"}
else
true
end
end
@doc """
Checks an item_name over numericality constraints.
## Examples
iex> Exop.ValidationChecks.check_numericality(%{a: 3}, :a, %{ equal_to: 3 })
true
iex> Exop.ValidationChecks.check_numericality(%{a: 5}, :a, %{ greater_than_or_equal_to: 3 })
true
iex> Exop.ValidationChecks.check_numericality(%{a: 3}, :a, %{ less_than_or_equal_to: 3 })
true
"""
@spec check_numericality(map(), atom() | String.t(), map()) :: true | check_error
def check_numericality(check_items, item_name, checks) do
if check_item_present?(check_items, item_name) do
check_item = get_check_item(check_items, item_name)
cond do
is_number(check_item) ->
result = checks |> Enum.map(&check_number(check_item, item_name, &1))
if Enum.all?(result, &(&1 == true)), do: true, else: result
true ->
%{item_name => "not a number. got: #{inspect(check_item)}"}
end
else
true
end
end
@spec check_number(number, atom() | String.t(), {atom, number}) :: boolean
defp check_number(number, item_name, {:equal_to, check_value}) do
if number == check_value do
true
else
%{item_name => "must be equal to #{check_value}; got: #{inspect(number)}"}
end
end
defp check_number(number, item_name, {:eq, check_value}) do
check_number(number, item_name, {:equal_to, check_value})
end
defp check_number(number, item_name, {:equals, check_value}) do
check_number(number, item_name, {:equal_to, check_value})
end
defp check_number(number, item_name, {:is, check_value}) do
check_number(number, item_name, {:equal_to, check_value})
end
defp check_number(number, item_name, {:greater_than, check_value}) do
if number > check_value do
true
else
%{item_name => "must be greater than #{check_value}; got: #{inspect(number)}"}
end
end
defp check_number(number, item_name, {:gt, check_value}) do
check_number(number, item_name, {:greater_than, check_value})
end
defp check_number(number, item_name, {:greater_than_or_equal_to, check_value}) do
if number >= check_value do
true
else
%{item_name => "must be greater than or equal to #{check_value}; got: #{inspect(number)}"}
end
end
defp check_number(number, item_name, {:min, check_value}) do
check_number(number, item_name, {:greater_than_or_equal_to, check_value})
end
defp check_number(number, item_name, {:gte, check_value}) do
check_number(number, item_name, {:greater_than_or_equal_to, check_value})
end
defp check_number(number, item_name, {:less_than, check_value}) do
if number < check_value do
true
else
%{item_name => "must be less than #{check_value}; got: #{inspect(number)}"}
end
end
defp check_number(number, item_name, {:lt, check_value}) do
check_number(number, item_name, {:less_than, check_value})
end
defp check_number(number, item_name, {:less_than_or_equal_to, check_value}) do
if number <= check_value do
true
else
%{item_name => "must be less than or equal to #{check_value}; got: #{inspect(number)}"}
end
end
defp check_number(number, item_name, {:lte, check_value}) do
check_number(number, item_name, {:less_than_or_equal_to, check_value})
end
defp check_number(number, item_name, {:max, check_value}) do
check_number(number, item_name, {:less_than_or_equal_to, check_value})
end
defp check_number(_number, item_name, {check, _check_value}) do
%{item_name => "unknown check '#{check}'"}
end
@doc """
Checks whether an item_name is a memeber of a list.
## Examples
iex> Exop.ValidationChecks.check_in(%{a: 1}, :a, [1, 2, 3])
true
"""
@spec check_in(map(), atom() | String.t(), list()) :: true | check_error
def check_in(check_items, item_name, check_list) when is_list(check_list) do
check_item = get_check_item(check_items, item_name)
if Enum.member?(check_list, check_item) do
true
else
%{item_name => "must be one of #{inspect(check_list)}; got: #{inspect(check_item)}"}
end
end
def check_in(_check_items, _item_name, _check_list), do: true
@doc """
Checks whether an item_name is not a memeber of a list.
## Examples
iex> Exop.ValidationChecks.check_not_in(%{a: 4}, :a, [1, 2, 3])
true
"""
@spec check_not_in(map(), atom() | String.t(), list()) :: true | check_error
def check_not_in(check_items, item_name, check_list) when is_list(check_list) do
check_item = get_check_item(check_items, item_name)
if Enum.member?(check_list, check_item) do
%{item_name => "must not be included in #{inspect(check_list)}; got: #{inspect(check_item)}"}
else
true
end
end
def check_not_in(_check_items, _item_name, _check_list), do: true
@doc """
Checks whether an item_name conforms the given format.
## Examples
iex> Exop.ValidationChecks.check_format(%{a: "bar"}, :a, ~r/bar/)
true
"""
@spec check_format(map(), atom() | String.t(), Regex.t()) :: true | check_error
def check_format(check_items, item_name, check) do
check_item = get_check_item(check_items, item_name)
if is_binary(check_item) do
if Regex.match?(check, check_item) do
true
else
%{item_name => "has invalid format.; got: #{inspect(check_item)}"}
end
else
true
end
end
@doc """
The alias for `check_format/3`.
Checks whether an item_name conforms the given format.
## Examples
iex> Exop.ValidationChecks.check_regex(%{a: "bar"}, :a, ~r/bar/)
true
"""
@spec check_regex(map(), atom() | String.t(), Regex.t()) :: true | check_error
def check_regex(check_items, item_name, check) do
check_format(check_items, item_name, check)
end
@doc """
Checks an item_name over length constraints.
## Examples
iex> Exop.ValidationChecks.check_length(%{a: "123"}, :a, %{min: 0})
[true]
iex> Exop.ValidationChecks.check_length(%{a: ~w(1 2 3)}, :a, %{in: 2..4})
[true]
iex> Exop.ValidationChecks.check_length(%{a: ~w(1 2 3)}, :a, %{is: 3, max: 4})
[true, true]
"""
@spec check_length(map(), atom() | String.t(), map()) :: true | [check_error]
def check_length(check_items, item_name, checks) do
check_item = get_check_item(check_items, item_name)
actual_length = get_length(check_item)
for {check, check_value} <- checks, into: [] do
check_length(check, item_name, actual_length, check_value)
end
end
@spec get_length(any) :: pos_integer() | {:error, :wrong_type}
defp get_length(param) when is_list(param), do: length(param)
defp get_length(param) when is_binary(param), do: String.length(param)
defp get_length(param) when is_atom(param), do: param |> Atom.to_string() |> get_length()
defp get_length(param) when is_map(param), do: param |> Map.keys() |> get_length()
defp get_length(param) when is_tuple(param), do: tuple_size(param)
defp get_length(_param), do: {:error, :wrong_type}
@spec check_length(atom(), atom() | String.t(), pos_integer | {:error, :wrong_type}, number) ::
true | check_error
defp check_length(_check, item_name, {:error, :wrong_type}, _check_value) do
%{item_name => "length check supports only lists, binaries, atoms, maps and tuples"}
end
defp check_length(:min, item_name, actual_length, check_value) do
check_length(:gte, item_name, actual_length, check_value)
end
defp check_length(:gte, item_name, actual_length, check_value) do
actual_length >= check_value ||
%{
item_name =>
"length must be greater than or equal to #{check_value}; got length: #{
inspect(actual_length)
}"
}
end
defp check_length(:gt, item_name, actual_length, check_value) do
actual_length > check_value ||
%{
item_name =>
"length must be greater than #{check_value}; got length: #{inspect(actual_length)}"
}
end
defp check_length(:max, item_name, actual_length, check_value) do
check_length(:lte, item_name, actual_length, check_value)
end
defp check_length(:lte, item_name, actual_length, check_value) do
actual_length <= check_value ||
%{
item_name =>
"length must be less than or equal to #{check_value}; got length: #{
inspect(actual_length)
}"
}
end
defp check_length(:lt, item_name, actual_length, check_value) do
actual_length < check_value ||
%{
item_name =>
"length must be less than #{check_value}; got length: #{inspect(actual_length)}"
}
end
defp check_length(:is, item_name, actual_length, check_value) do
actual_length == check_value ||
%{
item_name => "length must be equal to #{check_value}; got length: #{inspect(actual_length)}"
}
end
defp check_length(:in, item_name, actual_length, check_value) do
Enum.member?(check_value, actual_length) ||
%{
item_name => "length must be in range #{check_value}; got length: #{inspect(actual_length)}"
}
end
defp check_length(check, item_name, _actual_length, _check_value) do
%{item_name => "unknown check '#{check}'"}
end
@doc """
Checks whether an item is expected structure.
## Examples
defmodule SomeStruct1, do: defstruct [:a, :b]
defmodule SomeStruct2, do: defstruct [:b, :c]
Exop.ValidationChecks.check_struct(%{a: %SomeStruct1{}}, :a, %SomeStruct1{})
# true
Exop.ValidationChecks.check_struct(%{a: %SomeStruct1{}}, :a, %SomeStruct2{})
# false
"""
@spec check_struct(map(), atom() | String.t(), struct()) :: true | check_error
def check_struct(check_items, item_name, check) do
check_items
|> get_check_item(item_name)
|> validate_struct(check, item_name)
end
@doc """
Checks whether an item is valid over custom validation function.
## Examples
iex> Exop.ValidationChecks.check_func(%{a: 1}, :a, fn({:a, value}, _all_param)-> value > 0 end)
true
iex> Exop.ValidationChecks.check_func(%{a: 1}, :a, fn({:a, _value}, _all_param)-> :ok end)
true
iex> Exop.ValidationChecks.check_func(%{a: 1}, :a, fn({:a, value}, _all_param)-> is_nil(value) end)
%{a: "not valid"}
iex> Exop.ValidationChecks.check_func(%{a: 1}, :a, fn({:a, _value}, _all_param)-> :error end)
%{a: "not valid"}
iex> Exop.ValidationChecks.check_func(%{a: -1}, :a, fn({:a, _value}, _all_param)-> {:error, :my_error} end)
%{a: :my_error}
"""
@spec check_func(
map(),
atom() | String.t(),
({atom() | String.t(), any()}, map() -> any())
) :: true | check_error
def check_func(check_items, item_name, check) do
check_item = get_check_item(check_items, item_name)
check_result = check.({item_name, check_item}, check_items)
case check_result do
{:error, msg} -> %{item_name => msg}
check_result when check_result in [false, :error] -> %{item_name => "not valid"}
_ -> true
end
end
@doc """
Checks whether a parameter's value exactly equals given value (with type equality).
## Examples
iex> Exop.ValidationChecks.check_equals(%{a: 1}, :a, 1)
true
"""
@spec check_equals(map(), atom() | String.t(), any()) :: true | check_error
def check_equals(check_items, item_name, check_value) do
check_item = get_check_item(check_items, item_name)
if check_item === check_value do
true
else
%{item_name => "must be equal to #{inspect(check_value)}; got: #{inspect(check_item)}"}
end
end
@doc """
The alias for `check_equals/3`.
Checks whether a parameter's value exactly equals given value (with type equality).
## Examples
iex> Exop.ValidationChecks.check_exactly(%{a: 1}, :a, 1)
true
"""
@spec check_exactly(map(), atom() | String.t(), any()) :: true | check_error
def check_exactly(check_items, item_name, check_value) do
check_equals(check_items, item_name, check_value)
end
@spec check_allow_nil(map(), atom() | String.t(), boolean()) :: true | check_error
def check_allow_nil(_check_items, _item_name, true), do: true
def check_allow_nil(check_items, item_name, false) do
check_item = get_check_item(check_items, item_name)
!is_nil(check_item) || %{item_name => "doesn't allow nil"}
end
@spec check_subset_of(map(), atom() | String.t(), list()) :: true | check_error
def check_subset_of(check_items, item_name, check_list) when is_list(check_list) do
check_item = get_check_item(check_items, item_name)
cond do
is_list(check_item) and length(check_item) > 0 ->
case check_item -- check_list do
[] ->
true
_ ->
%{
item_name => "must be a subset of #{inspect(check_list)}; got: #{inspect(check_item)}"
}
end
is_list(check_item) and length(check_item) == 0 ->
%{
item_name => "must be a subset of #{inspect(check_list)}; got: #{inspect(check_item)}"
}
not is_list(check_item) ->
%{
item_name => "must be a list; got: #{inspect(check_item)}"
}
end
end
@spec validate_struct(any(), any(), atom() | String.t()) :: boolean()
defp validate_struct(%struct{}, %struct{}, _item_name), do: true
defp validate_struct(%struct{}, struct, _item_name) when is_atom(struct), do: true
defp validate_struct(%struct{}, check_struct, item_name) when is_atom(check_struct) do
%{
item_name =>
"is not expected struct; expected: #{inspect(check_struct)}; got: #{inspect(struct)}"
}
end
defp validate_struct(item, check_struct, item_name) when is_atom(item) do
%{
item_name =>
"is not expected struct; expected: #{inspect(check_struct)}; got: #{inspect(item)}"
}
end
defp validate_struct(%struct{} = _item, %check_struct{}, item_name) do
%{
item_name =>
"is not expected struct; expected: #{inspect(check_struct)}; got: #{inspect(struct)}"
}
end
defp validate_struct(item, check_struct, item_name) do
%{
item_name =>
"is not expected struct; expected: #{inspect(check_struct)}; got: #{inspect(item)}"
}
end
end
|
lib/exop/validation_checks.ex
| 0.870989
| 0.599251
|
validation_checks.ex
|
starcoder
|
defmodule Cables.Handler do
@moduledoc """
Handlers specify how to consume response data.
The handler flow starts with `init/3` being called and returning the initial state or an error. In `init`,
additional data can be sent with the request by using `Cabels.send_data/2` and `Cabels.send_final_data/2`.
After getting the new state we wait until we recieve the headers. `handle_headers/3` will be called with the
status, headers and the state taken from `init`. A new state should be returned.
After processing the headers, Cables will loop with `handle_data/2` until there is no more response data. Each call to `handle_data` should return a new state for the loop.
After all response data is recieved, `handle_finish/1` will be called with the state from `handle_data` to finish any processing.
"""
@callback init(gun_pid :: pid, stream_ref :: reference, init_arg :: term) :: {:ok, state} when state: any
@callback handle_headers(integer, [{String.t, String.t}], state) :: state when state: any
@callback handle_data(String.t, state) :: state when state: any
@callback handle_finish(state) :: {:ok, any} when state: any
@callback handle_down(reason :: any, state) :: {:error, any} when state: any
defmacro __using__(_opts) do
quote do
@behaviour Cables.Handler
def handle(gun_pid, stream_ref, timeout, init_arg) do
state = init(gun_pid, stream_ref, init_arg)
mref = :erlang.monitor(:process, gun_pid)
timer = Process.send_after(self(), {:timeout, stream_ref}, timeout)
receive do
{:gun_response, ^gun_pid, ^stream_ref, :fin, status, headers} ->
Process.cancel_timer(timer, async: true, info: false)
handle_headers(status, headers, state) |> handle_finish()
{:gun_response, ^gun_pid, ^stream_ref, :nofin, status, headers} ->
new_state = handle_headers(status, headers, state)
handle_data_loop(mref, gun_pid, stream_ref, timer, new_state)
{:DOWN, ^mref, :process, ^gun_pid, reason} ->
Process.cancel_timer(timer, async: true, info: false)
handle_down(reason, state)
{:timeout, ^stream_ref} ->
:gun.cancel(gun_pid, stream_ref)
{:error, :connection_timeout}
end
end
def handle_finish(state) do
{:ok, state}
end
def handle_down(reason, state) do
{:error, {:conn_closed, reason}}
end
defp handle_data_loop(mref, gun_pid, stream_ref, timer, state) do
receive do
{:gun_data, ^gun_pid, ^stream_ref, :nofin, chunk} ->
handle_data_loop(mref, gun_pid, stream_ref, timer, handle_data(chunk, state))
{:gun_data, ^gun_pid, ^stream_ref, :fin, chunk} ->
Process.cancel_timer(timer, async: true, info: false)
handle_data(chunk, state) |> handle_finish()
{:DOWN, ^mref, :process, ^gun_pid, reason} ->
Process.cancel_timer(timer, async: true, info: false)
handle_down(reason, state)
{:timeout, ^stream_ref} ->
:gun.cancel(gun_pid, stream_ref)
{:error, :connection_timeout}
end
end
defoverridable handle_finish: 1, handle_down: 2
end
end
end
|
lib/cables/handler.ex
| 0.825273
| 0.619068
|
handler.ex
|
starcoder
|
defmodule Aoc.Year2015.Day01 do
@moduledoc """
Solution to Day 01 of 2015: Not Quite Lisp
## --- Day 01: Not Quite Lisp ---
Santa was hoping for a white Christmas, but his weather machine's "snow"
function is powered by stars, and he's fresh out! To save Christmas, he needs
you to collect *fifty stars* by December 25th.
Collect stars by helping Santa solve puzzles. Two puzzles will be made available
on each day in the advent calendar; the second puzzle is unlocked when you
complete the first. Each puzzle grants *one star*. Good luck!
Here's an easy puzzle to warm you up.
Santa is trying to deliver presents in a large apartment building, but he can't
find the right floor - the directions he got are a little confusing. He starts
on the ground floor (floor `0`) and then follows the instructions one character
at a time.
An opening parenthesis, `(`, means he should go up one floor, and a closing
parenthesis, `)`, means he should go down one floor.
The apartment building is very tall, and the basement is very deep; he will
never find the top or bottom floors.
For example:
- `(())` and `()()` both result in floor `0`.
- `(((` and `(()(()(` both result in floor `3`.
- `))(((((` also results in floor `3`.
- `())` and `))(` both result in floor `-1` (the first basement level).
- `)))` and `)())())` both result in floor `-3`.
To *what floor* do the instructions take Santa?
## --- Part Two ---
Now, given the same instructions, find the *position* of the first character
that causes him to enter the basement (floor `-1`). The first character in the
instructions has position `1`, the second character has position `2`, and so on.
For example:
- `)` causes him to enter the basement at character position `1`.
- `()())` causes him to enter the basement at character position `5`.
What is the *position* of the character that causes Santa to first enter the
basement?
"""
@doc """
Converts the input into a `charlist` and reduces it
"""
def part_1(input) do
input
|> String.to_charlist()
|> go_to_floor()
end
# If the list is empty, Santa does not move!
defp go_to_floor([]), do: 0
# The list is not empty, let's sum or subtract 1 from the initial floor
defp go_to_floor(list), do: reduce(list, 0)
# 40 is the char '(', so we add 1
defp reduce([40 | tail], acc), do: reduce(tail, acc + 1)
# 41 is the char ')', so we subtract 1
defp reduce([41 | tail], acc), do: reduce(tail, acc - 1)
# Stop and return the accumulator value when the list is finally empty
defp reduce([], acc), do: acc
@doc """
Converts the input into a `charlist` and reduces it until Santa
reaches the basement for the first time
"""
def part_2(input) do
input
|> String.to_charlist()
|> position_first_basement()
end
# If the list is empty, we return nothing
defp position_first_basement([]), do: nil
# The list is not empty, let's see if Santa steps in the basement or not
defp position_first_basement(list), do: calculate_position(list, 0, 0)
# 40 is the char '(', so Santa moves up a floor and we increase our position tracker
defp calculate_position([40 | tail], acc, position) do
calculate_position(tail, acc + 1, position + 1)
end
# 41 is the char ')', as Santa was at the ground floor he goes to the basement
# we stop the recursion and return the position we got plus 1
defp calculate_position([41 | _], 0, position), do: position + 1
# 41 is the char ')' and Santa is above the ground floor, so
# he moves down a floor and we increase our position tracker
defp calculate_position([41 | tail], acc, position) do
calculate_position(tail, acc - 1, position + 1)
end
# If we traversed all elements of the list and
# never get to the basement, we return nothing
defp calculate_position([], _, _), do: nil
end
|
lib/aoc/year_2015/day_01.ex
| 0.804598
| 0.774967
|
day_01.ex
|
starcoder
|
defmodule Day06.SpaceObject do
@moduledoc """
A recursively defined tree.
"""
defstruct [:name, :children]
end
defmodule Day06 do
@moduledoc """
Advent of Code 2019
Day 6: Universal Orbit Map
"""
alias Day06.{Part1, Part2}
def get_orbits() do
Path.join(__DIR__, "inputs/day06.txt")
|> File.open!()
|> IO.stream(:line)
|> Stream.map(&String.trim/1)
|> Enum.map(&String.split(&1, ")"))
end
def execute() do
orbits = get_orbits()
IO.puts("Part 1: #{Part1.run(orbits)}")
IO.puts("Part 2: #{Part2.run(orbits)}")
end
end
defmodule Day06.Part1 do
alias Day06.SpaceObject
def run(orbits) do
orbits
|> map_parents_to_children()
|> construct_tree()
|> count_orbits()
end
def map_parents_to_children(orbits, map \\ %{})
def map_parents_to_children([], map), do: map
def map_parents_to_children([[parent, child] | orbits], map) do
map = Map.update(map, parent, [child], &[child | &1])
map_parents_to_children(orbits, map)
end
def construct_tree(orbits, object_name \\ "COM") do
children_names =
if Map.has_key?(orbits, object_name),
do: orbits[object_name],
else: []
children =
for child_name <- children_names,
do: construct_tree(orbits, child_name)
%SpaceObject{name: object_name, children: children}
end
def count_orbits(%{children: children} = _orbit_tree, depth \\ 1) do
for(child <- children, do: depth + count_orbits(child, depth + 1))
|> Enum.sum()
end
end
defmodule Day06.Part2 do
alias Day06.Part1
def run(orbits) do
orbits
|> Part1.map_parents_to_children()
|> Part1.construct_tree()
|> calculate_orbital_transfers()
|> (&elem(&1, 1)).()
end
def calculate_orbital_transfers(%{name: name, children: []} = _orbit_tree) do
if name == "YOU" or name == "SAN", do: {:unconnected, -1}, else: nil
end
def calculate_orbital_transfers(%{children: children} = _orbit_tree) do
child_hops =
children
|> Enum.map(fn child ->
case calculate_orbital_transfers(child) do
{:connected, _hops} = child_hop -> child_hop
{:unconnected, hops} -> {:unconnected, hops + 1}
nil -> nil
end
end)
|> Enum.filter(&(&1 != nil))
case length(child_hops) do
0 ->
nil
1 ->
Enum.at(child_hops, 0)
2 ->
child_hops
|> Enum.map(&elem(&1, 1))
|> Enum.sum()
|> (&{:connected, &1}).()
end
end
end
|
lib/day06.ex
| 0.703957
| 0.533397
|
day06.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.