code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
|---|---|---|---|---|---|
defmodule Datix.Time do
@moduledoc """
A `Time` parser using `Calendar.strftime` format-string.
"""
@doc """
Parses a time string according to the given `format`.
See the `Calendar.strftime` documentation for how to specify a format-string.
## Options
* `:calendar` - the calendar to build the `Time`, defaults to `Calendar.ISO`
* `:preferred_time` - a string for the preferred format to show times,
it can't contain the `%X` format and defaults to `"%H:%M:%S"`
if the option is not received
* `:am_pm_names` - a keyword list with the names of the period of the day,
defaults to `[am: "am", pm: "pm"]`.
Missing values will be set to minimum.
## Examples
```elixir
iex> Datix.Time.parse("11:12:55", "%X")
{:ok, ~T[11:12:55]}
iex> Datix.Time.parse("10 PM", "%I %p")
{:ok, ~T[22:00:00]}
```
"""
@spec parse(String.t(), String.t(), list()) ::
{:ok, Time.t()}
| {:error, :invalid_time}
| {:error, :invalid_input}
| {:error, {:parse_error, expected: String.t(), got: String.t()}}
| {:error, {:conflict, [expected: term(), got: term(), modifier: String.t()]}}
| {:error, {:invalid_string, [modifier: String.t()]}}
| {:error, {:invalid_integer, [modifier: String.t()]}}
| {:error, {:invalid_modifier, [modifier: String.t()]}}
def parse(time_str, format_str, opts \\ []) do
with {:ok, data} <- Datix.strptime(time_str, format_str, sweep(opts)) do
new(data, opts)
end
end
@doc """
Parses a date string according to the given `format`, erroring out for
invalid arguments.
"""
@spec parse!(String.t(), String.t(), list()) :: Time.t()
def parse!(time_str, format_str, opts \\ []) do
time_str
|> Datix.strptime!(format_str, sweep(opts))
|> new(opts)
|> case do
{:ok, time} ->
time
{:error, reason} ->
raise ArgumentError, "cannot build time, reason: #{inspect(reason)}"
end
end
@doc false
def new(%{hour: hour, hour_12: hour_12} = data, opts) do
with {:ok, hour_24} <- to_hour_24(hour_12, Map.get(data, :am_pm)) do
case hour == hour_24 do
true -> data |> Map.delete(:hour_12) |> new(opts)
false -> {:error, :invalid_time}
end
end
end
def new(%{hour: h, minute: m, second: s, microsecond: ms}, opts) do
Time.new(h, m, s, microsecond(ms), Datix.calendar(opts))
end
def new(%{hour_12: h_12, minute: m, second: s, microsecond: ms} = data, opts) do
with {:ok, h} <- to_hour_24(h_12, Map.get(data, :am_pm)) do
Time.new(h, m, s, microsecond(ms), Datix.calendar(opts))
end
end
def new(data, opts), do: data |> Datix.assume(Time) |> new(opts)
defp to_hour_24(_hour_12, nil), do: {:error, :invalid_time}
defp to_hour_24(12, :am), do: {:ok, 0}
defp to_hour_24(12, :pm), do: {:ok, 12}
defp to_hour_24(hour_12, :am), do: {:ok, hour_12}
defp to_hour_24(hour_12, :pm), do: {:ok, hour_12 + 12}
defp microsecond(ms) when is_tuple(ms), do: ms
defp microsecond(ms) do
digits = Integer.digits(ms)
precision = length(digits)
new_ms = Integer.undigits(digits ++ List.duplicate(0, max(0, 6 - precision)))
{new_ms, precision}
end
defp sweep(opts), do: Keyword.delete(opts, :calendar)
end
|
lib/datix/time.ex
| 0.927421
| 0.938801
|
time.ex
|
starcoder
|
defmodule NeuralNetwork.Neuron do
defstruct in_conn: %{}, out_conn: [],
output: 0, forward_err_derivs: %{}
use GenServer
alias NeuralNetwork.Neuron
alias NeuralNetwork.Connection
alias NeuralNetwork.Sigmoid
alias NeuralNetwork.Backpropagation
def create do
{:ok, pid} = GenServer.start_link(__MODULE__, %Neuron{})
pid
end
def connect_to(state, neuron_pid) do
GenServer.call(neuron_pid, {:update_input, self, 0})
add_out_conn(state, neuron_pid)
end
def add_out_conn(state, neuron_pid) do
%{state | out_conn: [neuron_pid | state.out_conn]}
end
def prop_forward(state) do
Enum.each state.out_conn, fn dest_pid ->
GenServer.call(dest_pid, {:update_input, self, state.output})
end
state
end
def update_input(state, neuron_pid, value) do
existing_conn = Map.get(state.in_conn, neuron_pid)
new_conn = case existing_conn do
nil -> %Connection{value: value, index: new_in_conn_index(state), weight: random_weight}
%Connection{} -> %{existing_conn | value: value}
end
%{state | in_conn: Map.put(state.in_conn, neuron_pid, new_conn)}
end
def random_weight do
:rand.uniform - 0.5
end
def new_in_conn_index(state) do
Map.keys(state.in_conn) |> length
end
def update_output(state) do
output = input_sum(state)
|> apply_activation
%{state | output: output}
end
def input_sum(state) do
state.in_conn
|> Enum.map(fn {_pid, conn} -> conn.value * conn.weight end)
|> Enum.sum
end
def apply_activation(value) do
Sigmoid.value(value)
end
def update_forward_err_deriv(state, neuron_pid, err_deriv) do
forward_err_derivs = Map.put(
state.forward_err_derivs, neuron_pid, err_deriv)
%{state | forward_err_derivs: forward_err_derivs}
end
def prop_backward(state, target_output) do
err_deriv = Backpropagation.backward_output_err_deriv(state, target_output)
Enum.each state.in_conn, fn {neuron_pid, conn} ->
weighted_err_derriv = err_deriv * conn.weight
GenServer.call(neuron_pid, {:update_forward_err_deriv, self, weighted_err_derriv})
end
state
end
def adjust_weights(state, target_output) do
adj = calculate_weight_adjustments(state, target_output)
update_weights(state, adj)
end
def calculate_weight_adjustments(state, target_output) do
Enum.map(state.in_conn, fn {input_neuron_pid, _conn} ->
adj = Backpropagation.weight_adjustment(state, input_neuron_pid, target_output)
{input_neuron_pid, adj}
end)
|> Enum.into(%{})
end
def update_weights(state, weight_adjustments) do
in_conn = Enum.reduce state.in_conn, %{}, fn {input_neuron_pid, conn}, acc ->
weight_adjustment = Map.get(weight_adjustments, input_neuron_pid)
weight = conn.weight + weight_adjustment
Map.put(acc, input_neuron_pid, %{conn | weight: weight})
end
%{state | in_conn: in_conn}
end
def get_in_conn(state) do
state.in_conn
|> Map.values
|> Enum.sort_by(fn conn -> conn.index end)
|> Enum.map(fn conn -> %{weight: conn.weight, index: conn.index} end)
end
def set_in_conn(state, conn_list) do
new_in_conn = Enum.reduce conn_list, state.in_conn, fn conn, map ->
Util.replace_in_map map, conn, fn (new_conn, old_conn) ->
new_conn.index == old_conn.index
end
end
%{state | in_conn: new_in_conn}
end
# Cast Callbacks
# ================
def handle_cast({:connect_to, neuron_pid}, state) do
{:noreply, connect_to(state, neuron_pid)}
end
def handle_cast(:prop_forward, state) do
{:noreply, prop_forward(state)}
end
def handle_cast({:update_input, neuron_pid, conn}, state) do
{:noreply, update_input(state, neuron_pid, conn)}
end
def handle_cast({:set_output, output}, state) do
{:noreply, %{state | output: output}}
end
def handle_cast({:update_forward_err_deriv, neuron_pid, err_deriv}, state) do
{:noreply, update_forward_err_deriv(state, neuron_pid, err_deriv)}
end
# Call Callbacks
# ================
def handle_call(:get_state, _from, state) do
{:reply, state, state}
end
def handle_call({:connect_to, neuron_pid}, _from, state) do
new_state = connect_to(state, neuron_pid)
{:reply, new_state, new_state}
end
def handle_call(:prop_forward, _from, state) do
new_state = prop_forward(state)
{:reply, new_state, new_state}
end
def handle_call({:update_input, neuron_pid, value}, _from, state) do
new_state = update_input(state, neuron_pid, value)
{:reply, new_state, new_state}
end
def handle_call(:update_output, _from, state) do
new_state = update_output(state)
{:reply, new_state, new_state}
end
def handle_call({:set_output, output}, _from, state) do
new_state = %{state | output: output}
{:reply, new_state, new_state}
end
def handle_call({:update_forward_err_deriv, neuron_pid, err_deriv}, _from, state) do
new_state = update_forward_err_deriv(state, neuron_pid, err_deriv)
{:reply, new_state, new_state}
end
def handle_call({:prop_backward, target_output}, _from, state) do
new_state = prop_backward(state, target_output)
{:reply, new_state, new_state}
end
def handle_call({:adjust_weights, target_output}, _from, state) do
new_state = adjust_weights(state, target_output)
{:reply, new_state, new_state}
end
def handle_call(:get_in_conn, _from, state) do
{:reply, get_in_conn(state), state}
end
def handle_call({:set_in_conn, conn_list}, _from, state) do
new_state = set_in_conn(state, conn_list)
{:reply, new_state, new_state}
end
end
|
lib/neural_network/neuron.ex
| 0.716913
| 0.498352
|
neuron.ex
|
starcoder
|
defmodule Ecto.Query.Util do
@moduledoc """
This module provide utility functions on queries.
"""
alias Ecto.Queryable
alias Ecto.Query.Query
alias Ecto.Query.JoinExpr
alias Ecto.Query.AssocJoinExpr
@doc """
Validates the query to check if it is correct. Should be called before
compilation by the query adapter.
"""
def validate(query, query_apis, opts // []) do
Ecto.Query.Validator.validate(query, query_apis, opts)
end
@doc """
Validates an update query to check if it is correct. Should be called before
compilation by the query adapter.
"""
def validate_update(query, query_apis, values) do
Ecto.Query.Validator.validate_update(query, query_apis, values)
end
@doc """
Validates a delete query to check if it is correct. Should be called before
compilation by the query adapter.
"""
def validate_delete(query, query_apis) do
Ecto.Query.Validator.validate_delete(query, query_apis)
end
@doc """
Validates a get query to check if it is correct. Should be called before
compilation by the query adapter.
"""
def validate_get(query, query_apis) do
Ecto.Query.Validator.validate_get(query, query_apis)
end
@doc """
Normalizes the query. Should be called before validation and compilation by
the query adapter.
"""
def normalize(query, opts // []) do
Ecto.Query.Normalizer.normalize(query, opts)
end
@doc """
Look up a source with a variable.
"""
def find_source(sources, { :&, _, [ix] }) when is_tuple(sources) do
elem(sources, ix)
end
def find_source(sources, { :&, _, [ix] }) when is_list(sources) do
Enum.at(sources, ix)
end
@doc """
Look up the expression where the variable was bound.
"""
def find_expr(Query[from: from], { :&, _, [0] }) do
from
end
def find_expr(Query[joins: joins], { :&, _, [ix] }) do
Enum.at(joins, ix - 1)
end
@doc "Returns the source from a source tuple."
def source({ source, _entity, _model }), do: source
@doc "Returns entity from a source tuple or nil if there is none."
def entity({ _source, entity, _model }), do: entity
@doc "Returns model from a source tuple or nil if there is none."
def model({ _source, _entity, model }), do: model
# Merges a Queryable with a query expression
@doc false
def merge(queryable, type, expr) do
query = Queryable.to_query(queryable)
if type == :on do
merge_on(query, expr)
else
check_merge(query, Query.new([{ type, expr }]))
case type do
:from -> query.from(expr)
:join -> query.update_joins(&(&1 ++ [expr]))
:where -> query.update_wheres(&(&1 ++ [expr]))
:select -> query.select(expr)
:order_by -> query.update_order_bys(&(&1 ++ [expr]))
:limit -> query.limit(expr)
:offset -> query.offset(expr)
:group_by -> query.update_group_bys(&(&1 ++ [expr]))
:having -> query.update_havings(&(&1 ++ [expr]))
:preload -> query.update_preloads(&(&1 ++ [expr]))
end
end
end
@doc false
def merge_on(Query[joins: joins] = query, expr) do
case Enum.split(joins, -1) do
{ joins, [JoinExpr[] = join] } ->
joins = joins ++ [join.on(expr)]
query.joins(joins)
{ _, [AssocJoinExpr[]] } ->
raise Ecto.InvalidQuery, reason: "an `on` query expression cannot follow an assocation join"
_ ->
raise Ecto.InvalidQuery, reason: "an `on` query expression must follow a `join`"
end
end
# Count the number of entities on the query
@doc false
def count_entities(queryable) do
Query[from: from, joins: joins] = Queryable.to_query(queryable)
count = if from, do: 1, else: 0
count + length(joins)
end
# Converts list of variables to list of atoms
@doc false
def escape_binding(binding) when is_list(binding) do
vars = Enum.map(binding, &escape_var(&1))
bound_vars = Enum.filter(vars, &(&1 != :_))
dup_vars = bound_vars -- Enum.uniq(bound_vars)
unless dup_vars == [] do
raise Ecto.InvalidQuery, reason: "variable `#{hd dup_vars}` is already defined in query"
end
vars
end
def escape_binding(_) do
raise Ecto.InvalidQuery, reason: "binding should be list of variables"
end
# Converts internal type format to "typespec" format
@doc false
def type_to_ast({ type, inner }), do: { type, [], [type_to_ast(inner)] }
def type_to_ast(type) when is_atom(type), do: { type, [], nil }
# Takes an elixir value an returns its ecto type
@doc false
def value_to_type(nil), do: { :ok, nil }
def value_to_type(value) when is_boolean(value), do: { :ok, :boolean }
def value_to_type(value) when is_binary(value), do: { :ok, :string }
def value_to_type(value) when is_integer(value), do: { :ok, :integer }
def value_to_type(value) when is_float(value), do: { :ok, :float }
def value_to_type(Ecto.DateTime[] = dt) do
valid = is_integer(dt.year) and is_integer(dt.month) and is_integer(dt.day) and
is_integer(dt.hour) and is_integer(dt.min) and is_integer(dt.sec)
if valid do
{ :ok, :datetime }
else
{ :error, "all datetime elements has to be a literal of integer type" }
end
end
def value_to_type(Ecto.Interval[] = dt) do
valid = is_integer(dt.year) and is_integer(dt.month) and is_integer(dt.day) and
is_integer(dt.hour) and is_integer(dt.min) and is_integer(dt.sec)
if valid do
{ :ok, :interval }
else
{ :error, "all interval elements has to be a literal of integer type" }
end
end
def value_to_type(Ecto.Binary[value: binary]) do
if is_binary(binary) do
{ :ok, :binary }
else
{ :error, "binary/1 argument has to be a literal of binary type" }
end
end
def value_to_type(list) when is_list(list) do
types = Enum.map(list, &value_to_type/1)
case types do
[] ->
{ :ok, { :list, :any } }
[type|rest] ->
if Enum.all?(rest, &type_eq?(type, &1)) do
{ :ok, { :list, type } }
else
{ :error, "all elements in list has to be of same type" }
end
end
end
def value_to_type(value), do: { :error, "`unknown type of value `#{inspect value}`" }
# Returns true if the two types are considered equal by the type system
@doc false
def type_eq?(_, :any), do: true
def type_eq?(:any, _), do: true
def type_eq?({ outer, inner1 }, { outer, inner2 }), do: type_eq?(inner1, inner2)
def type_eq?(x, x), do: true
def type_eq?(_, _), do: false
# Get var for given model in query
def model_var(Query[] = query, model) do
sources = tuple_to_list(query.sources)
pos = Enum.find_index(sources, &(model(&1) == model))
{ :&, [], [pos] }
end
# Find var in select clause. Returns a list of tuple and list indicies to
# find the var.
def locate_var({ left, right }, var) do
locate_var({ :{}, [], [left, right] }, var)
end
def locate_var({ :{}, _, list }, var) do
locate_var(list, var)
end
def locate_var({ :assoc, _, [left, _right] }, var) do
if left == var, do: []
end
def locate_var(list, var) when is_list(list) do
list = Stream.with_index(list)
{ poss, pos } = Enum.find_value(list, fn { elem, ix } ->
if poss = locate_var(elem, var) do
{ poss, ix }
else
nil
end
end)
[pos|poss]
end
def locate_var(expr, var) do
if expr == var, do: []
end
defp escape_var(var) when is_atom(var) do
var
end
defp escape_var({ var, _, context }) when is_atom(var) and is_atom(context) do
var
end
defp escape_var(_) do
raise Ecto.InvalidQuery, reason: "binding should be list of variables"
end
defmacrop check_merge_dup(left, right, fields) do
Enum.map(fields, fn field ->
quote do
if unquote(left).unquote(field) && unquote(right).unquote(field) do
raise Ecto.InvalidQuery, reason: "only one #{unquote(field)} expression is allowed in query"
end
end
end)
end
# Checks if a query merge can be done
defp check_merge(Query[] = left, Query[] = right) do
check_merge_dup(left, right, [:select, :from, :limit, :offset])
end
end
|
lib/ecto/query/util.ex
| 0.80038
| 0.498108
|
util.ex
|
starcoder
|
defmodule SpryCov.Files do
@doc """
Returns the list of files and/or directories given to `mix test`
## Examples
iex> mix_test_files()
iex> mix_test_files(["test"])
[]
iex> mix_test_files(["test", "--cover"])
[]
iex> mix_test_files(["test", "test/spry_cov/utils_test.exs"])
["test/spry_cov/utils_test.exs"]
iex> mix_test_files(["test", "--cover", "test/spry_cov/utils_test.exs"])
["test/spry_cov/utils_test.exs"]
iex> mix_test_files(["test", "test/spry_cov/"])
["test/spry_cov/"]
iex> mix_test_files(["test", "test/spry_cov/a_test.exs", "test/spry_cov/b_test_fixture.exs"])
["test/spry_cov/a_test.exs", "test/spry_cov/b_test_fixture.exs"]
"""
def mix_test_files(args \\ System.argv()) do
args
|> Enum.filter(&String.starts_with?(&1, "test/"))
end
@doc """
Returns the supposed production file names for the test files and/or directories
## Examples
iex> supposed_lib_files(["test"], [])
[]
iex> supposed_lib_files(["test"], ["test/spry_cov/utils_test.exs"])
["lib/spry_cov/utils"]
"""
def supposed_lib_files(test_paths, mix_test_files) do
mix_test_files
|> Enum.map(&supposed_lib_file(test_paths, &1))
end
@doc """
Returns the supposed production file name for the test file and/or directory
Parameter `test_paths` is the `:test_paths` of your `mix.exs` configuration,
`test_file` is the test file to determine the supposed lib file.
Replaces `test_paths` in the start of `test_file` with `"lib/"`
## Examples
iex> SpryCov.Files.supposed_lib_file(["test"], "test/spry_cov/utils_test.exs")
"lib/spry_cov/utils"
iex> SpryCov.Files.supposed_lib_file(["test"], "test/spry_cov/")
"lib/spry_cov/"
iex> SpryCov.Files.supposed_lib_file(["test"], "test/spry_cov/utils2_test.exs")
"lib/spry_cov/utils2"
iex> SpryCov.Files.supposed_lib_file(["test"], "test/spry_cov/utils_test.exs")
"lib/spry_cov/utils"
iex> SpryCov.Files.supposed_lib_file(["test"], "test/spry_cov/")
"lib/spry_cov/"
iex> SpryCov.Files.supposed_lib_file(["test/unit"], "test/unit/spry_cov/")
"lib/spry_cov/"
"""
def supposed_lib_file(test_paths, test_file) do
test_paths
|> Enum.reduce(test_file, &String.replace_leading(&2, "#{&1}/", "lib/"))
|> String.replace(~r"_test(_\w+)?.exs$", "")
end
end
|
lib/spry_cov/files.ex
| 0.763748
| 0.465387
|
files.ex
|
starcoder
|
defmodule GrovePi.Buzzer do
@moduledoc """
Control a Grove buzzer. While a buzzer can be controlled solely using
`GrovePi.Digital`, this module provides some helpers.
Example usage:
```
iex> {:ok, buzzer} = GrovePi.Buzzer.start_link(3)
:ok
iex> GrovePi.Buzzer.buzz(3)
:ok
```
"""
use GenServer
@type duration :: integer
alias GrovePi.Digital
alias GrovePi.Registry.Pin
defmodule State do
@moduledoc false
defstruct [:pin, :turnoff_time, :prefix]
end
@spec start_link(GrovePi.pin(), atom) :: Supervisor.on_start()
def start_link(pin, opts \\ []) do
prefix = Keyword.get(opts, :prefix, Default)
opts = Keyword.put(opts, :name, Pin.name(prefix, pin))
GenServer.start_link(__MODULE__, [pin, prefix], opts)
end
@spec buzz(GrovePi.pin(), duration, atom) :: :ok
def buzz(pin, duration, prefix) do
GenServer.cast(Pin.name(prefix, pin), {:buzz, duration})
end
def buzz(pin, duration_or_prefix) when is_atom(duration_or_prefix) do
buzz(pin, 1000, duration_or_prefix)
end
def buzz(pin, duration_or_prefix) when is_integer(duration_or_prefix) do
buzz(pin, duration_or_prefix, Default)
end
@spec off(GrovePi.pin()) :: :ok
def off(pin, prefix \\ Default) do
GenServer.cast(Pin.name(prefix, pin), :off)
end
def init([pin, prefix]) do
state = %State{pin: pin, prefix: prefix}
send(self(), :setup_pin)
{:ok, state}
end
def handle_cast(:off, state) do
:ok = Digital.write(state.prefix, state.pin, 0)
{:noreply, state}
end
def handle_cast({:buzz, duration}, state) do
turnoff_at = System.monotonic_time(:millisecond) + duration
new_state = %{state | turnoff_time: turnoff_at}
:ok = Digital.write(state.prefix, state.pin, 1)
:timer.send_after(duration, self(), :timeout)
{:noreply, new_state}
end
def handle_info(:setup_pin, state) do
# Turn off the buzzer on initialization just in case it happens to be
# on from a previous crash.
:ok = Digital.set_pin_mode(state.prefix, state.pin, :output)
:ok = Digital.write(state.prefix, state.pin, 0)
{:noreply, state}
end
def handle_info(:timeout, state) do
if System.monotonic_time(:millisecond) >= state.turnoff_time do
:ok = Digital.write(state.prefix, state.pin, 0)
end
{:noreply, state}
end
end
|
lib/grovepi/buzzer.ex
| 0.906213
| 0.831485
|
buzzer.ex
|
starcoder
|
defmodule AshPolicyAuthorizer.Policy do
@moduledoc false
# For now we just write to `checks` and move them to `policies`
# on build, when we support nested policies we can change that.
defstruct [
:condition,
:policies,
:bypass?,
:checks,
:description,
:access_type
]
@type t :: %__MODULE__{}
defmodule Check do
@moduledoc false
defstruct [:check, :check_module, :check_opts, :type]
@doc false
def transform(%{check: {check_module, opts}} = policy) do
{:ok, %{policy | check_module: check_module, check_opts: opts}}
end
@type t :: %__MODULE__{}
end
def solve(authorizer) do
authorizer.policies
|> build_requirements_expression(authorizer.facts)
|> AshPolicyAuthorizer.SatSolver.solve()
end
defp build_requirements_expression(policies, facts) do
policy_expression = compile_policy_expression(policies, facts)
facts_expression =
AshPolicyAuthorizer.SatSolver.facts_to_statement(Map.drop(facts, [true, false]))
if facts_expression do
{:and, facts_expression, policy_expression}
else
policy_expression
end
end
def fetch_fact(facts, %{check_module: mod, check_opts: opts}) do
fetch_fact(facts, {mod, opts})
end
def fetch_fact(facts, {mod, opts}) do
Map.fetch(facts, {mod, Keyword.delete(opts, :access_type)})
end
defp condition_expression(condition, facts) do
condition
|> List.wrap()
|> Enum.reduce(nil, fn
condition, nil ->
case fetch_fact(facts, condition) do
{:ok, true} ->
true
{:ok, false} ->
false
_ ->
condition
end
_condition, false ->
false
condition, expression ->
case fetch_fact(facts, condition) do
{:ok, true} ->
expression
{:ok, false} ->
false
_ ->
{:and, condition, expression}
end
end)
end
defp compile_policy_expression(policies, facts)
defp compile_policy_expression([], _facts) do
false
end
defp compile_policy_expression(
[%__MODULE__{condition: condition, policies: policies}],
facts
) do
compiled_policies = compile_policy_expression(policies, facts)
condition_expression = condition_expression(condition, facts)
case condition_expression do
true ->
compiled_policies
false ->
true
nil ->
compiled_policies
condition_expression ->
{:and, condition_expression, compiled_policies}
end
end
defp compile_policy_expression(
[
%__MODULE__{condition: condition, policies: policies, bypass?: bypass?} | rest
],
facts
) do
condition_expression = condition_expression(condition, facts)
case condition_expression do
true ->
if bypass? do
{:or, compile_policy_expression(policies, facts),
compile_policy_expression(rest, facts)}
else
{:and, compile_policy_expression(policies, facts),
compile_policy_expression(rest, facts)}
end
false ->
compile_policy_expression(rest, facts)
nil ->
if bypass? do
{:or, compile_policy_expression(policies, facts),
compile_policy_expression(rest, facts)}
else
{:and, compile_policy_expression(policies, facts),
compile_policy_expression(rest, facts)}
end
condition_expression ->
if bypass? do
{:or, {:and, condition_expression, compile_policy_expression(policies, facts)},
compile_policy_expression(rest, facts)}
else
{:or, {:and, condition_expression, compile_policy_expression(policies, facts)},
{:and, {:not, condition_expression}, compile_policy_expression(rest, facts)}}
end
end
end
defp compile_policy_expression(
[%{type: :authorize_if} = clause],
facts
) do
case fetch_fact(facts, clause) do
{:ok, true} ->
true
{:ok, false} ->
false
:error ->
{clause.check_module, clause.check_opts}
end
end
defp compile_policy_expression(
[%{type: :authorize_if} = clause | rest],
facts
) do
case fetch_fact(facts, clause) do
{:ok, true} ->
true
{:ok, false} ->
compile_policy_expression(rest, facts)
:error ->
{:or, {clause.check_module, clause.check_opts}, compile_policy_expression(rest, facts)}
end
end
defp compile_policy_expression(
[%{type: :authorize_unless} = clause],
facts
) do
case fetch_fact(facts, clause) do
{:ok, true} ->
false
{:ok, false} ->
true
:error ->
{clause.check_module, clause.check_opts}
end
end
defp compile_policy_expression(
[%{type: :authorize_unless} = clause | rest],
facts
) do
case fetch_fact(facts, clause) do
{:ok, true} ->
compile_policy_expression(rest, facts)
{:ok, false} ->
true
:error ->
{:or, {clause.check_module, clause.check_opts}, compile_policy_expression(rest, facts)}
end
end
defp compile_policy_expression([%{type: :forbid_if}], _facts) do
false
end
defp compile_policy_expression(
[%{type: :forbid_if} = clause | rest],
facts
) do
case fetch_fact(facts, clause) do
{:ok, true} ->
false
{:ok, false} ->
compile_policy_expression(rest, facts)
:error ->
{:and, {:not, clause}, compile_policy_expression(rest, facts)}
end
end
defp compile_policy_expression([%{type: :forbid_unless}], _facts) do
false
end
defp compile_policy_expression(
[%{type: :forbid_unless} = clause | rest],
facts
) do
case fetch_fact(facts, clause) do
{:ok, true} ->
compile_policy_expression(rest, facts)
{:ok, false} ->
false
:error ->
{:and, clause, compile_policy_expression(rest, facts)}
end
end
end
|
lib/ash_policy_authorizer/policy.ex
| 0.675978
| 0.533701
|
policy.ex
|
starcoder
|
defmodule Sneex.Memory do
@moduledoc """
This module wraps memory access.
"""
defstruct [:data]
use Bitwise
@opaque t :: %__MODULE__{
data: binary()
}
@spec new(binary()) :: __MODULE__.t()
def new(data) when is_binary(data) do
%__MODULE__{data: data}
end
@spec raw_data(__MODULE__.t()) :: binary()
def raw_data(%__MODULE__{data: data}) do
data
end
@spec read_byte(__MODULE__.t(), Sneex.BasicTypes.address()) :: byte()
def read_byte(%__MODULE__{data: data}, address) do
{_, result, _} = split_data(data, address, 1)
result
end
@spec write_byte(__MODULE__.t(), Sneex.BasicTypes.address(), byte()) :: __MODULE__.t()
def write_byte(%__MODULE__{data: data}, address, byte) do
{pre, _, post} = split_data(data, address, 1)
new_data = pre <> <<byte>> <> post
%__MODULE__{data: new_data}
end
@spec read_word(__MODULE__.t(), Sneex.BasicTypes.address()) :: Sneex.BasicTypes.word()
def read_word(%__MODULE__{data: data}, address) do
{_, result, _} = split_data(data, address, 2)
result
end
@spec write_word(__MODULE__.t(), Sneex.BasicTypes.address(), Sneex.BasicTypes.word()) ::
__MODULE__.t()
def write_word(%__MODULE__{data: data}, address, word) do
{pre, _, post} = split_data(data, address, 2)
new_data = pre <> unformat_data(word, 2) <> post
%__MODULE__{data: new_data}
end
@spec read_long(__MODULE__.t(), Sneex.BasicTypes.address()) :: Sneex.BasicTypes.long()
def read_long(%__MODULE__{data: data}, address) do
{_, result, _} = split_data(data, address, 3)
result
end
@spec write_long(__MODULE__.t(), Sneex.BasicTypes.address(), Sneex.BasicTypes.long()) ::
__MODULE__.t()
def write_long(%__MODULE__{data: data}, address, long) do
{pre, _, post} = split_data(data, address, 3)
new_data = pre <> unformat_data(long, 3) <> post
%__MODULE__{data: new_data}
end
defp split_data(memory, 0, length) when is_binary(memory) and length <= byte_size(memory) do
<<data::binary-size(length), rest::binary>> = memory
{<<>>, format_data(data), rest}
end
defp split_data(memory, address, length)
when is_binary(memory) and address + length <= byte_size(memory) do
<<before::binary-size(address), data::binary-size(length), rest::binary>> = memory
{before, format_data(data), rest}
end
defp format_data(<<b::size(8)>>), do: b
defp format_data(<<b0::size(8), b1::size(8)>>) do
b1 <<< 8 ||| b0
end
defp format_data(<<b0::size(8), b1::size(8), b2::size(8)>>) do
b2 <<< 16 ||| b1 <<< 8 ||| b0
end
defp unformat_data(data, 2) do
hi = data |> band(0xFF00) |> bsr(8)
lo = data &&& 0x00FF
<<lo, hi>>
end
defp unformat_data(data, 3) do
hi = data |> band(0xFF0000) |> bsr(16)
med = data |> band(0x00FF00) |> bsr(8)
lo = data &&& 0x0000FF
<<lo, med, hi>>
end
end
|
lib/sneex/memory.ex
| 0.730866
| 0.514766
|
memory.ex
|
starcoder
|
defmodule Ecto.Repo do
@moduledoc """
This module is used to define a repository. A repository maps to a data
store, for example an SQL database. A repository must implement `url/0` and
set an adapter (see `Ecto.Adapter`) to be used for the repository.
When used, the following options are allowed:
* `:adapter` - the adapter to be used for the repository; it will be used
to handle connections to the data store and to compile queries
## Example
defmodule MyRepo do
use Ecto.Repo, adapter: Ecto.Adapters.Postgres
def url do
"ecto://postgres:postgres@localhost/postgres"
end
end
"""
use Behaviour
@type t :: module
@doc false
defmacro __using__(opts) do
adapter = Keyword.fetch!(opts, :adapter)
quote do
use unquote(adapter)
@behaviour Ecto.Repo
import Ecto.Utils, only: [app_dir: 2]
def start_link do
Ecto.Repo.Backend.start_link(__MODULE__, unquote(adapter))
end
def stop do
Ecto.Repo.Backend.stop(__MODULE__, unquote(adapter))
end
def get(queryable, id) do
Ecto.Repo.Backend.get(__MODULE__, unquote(adapter), queryable, id)
end
def all(queryable) do
Ecto.Repo.Backend.all(__MODULE__, unquote(adapter), queryable)
end
def create(entity) do
Ecto.Repo.Backend.create(__MODULE__, unquote(adapter), entity)
end
def update(entity) do
Ecto.Repo.Backend.update(__MODULE__, unquote(adapter), entity)
end
defmacro update_all(queryable, values) do
Ecto.Repo.Backend.update_all(__MODULE__, unquote(adapter), queryable, values)
end
def delete(entity) do
Ecto.Repo.Backend.delete(__MODULE__, unquote(adapter), entity)
end
def delete_all(queryable) do
Ecto.Repo.Backend.delete_all(__MODULE__, unquote(adapter), queryable)
end
def query_apis do
[ Ecto.Query.API ]
end
def adapter do
unquote(adapter)
end
def __repo__ do
true
end
defoverridable [query_apis: 0]
end
end
@doc """
Should return the Ecto URL to be used for the repository. A URL is of the
following format: `ecto://username:password@hostname:port/database?opts=123`
where the `password`, `port` and `options` are optional. This function must be
implemented by the user.
"""
defcallback url() :: String.t
@doc """
Starts any connection pooling or supervision and return `{ :ok, pid }`
or just `:ok` if nothing needs to be done.
Returns `{ :error, { :already_started, pid } }` if the repo already
started or `{ :error, term }` in case anything else goes wrong.
"""
defcallback start_link() :: { :ok, pid } | :ok |
{ :error, { :already_started, pid } } |
{ :error, term }
@doc """
Stops any connection pooling or supervision started with `start_link/1`.
"""
defcallback stop() :: :ok
@doc """
Fetches a single entity from the data store where the primary key matches the
given id. id should be an integer or a string that can be cast to an
integer. Returns `nil` if no result was found. If the entity in the queryable
has no primary key `Ecto.NoPrimaryKey` will be raised. `Ecto.AdapterError`
will be raised if there is an adapter error.
"""
defcallback get(Ecto.Queryable.t, integer) :: Ecto.Entity.t | nil | no_return
@doc """
Fetches all results from the data store based on the given query. May raise
`Ecto.InvalidQuery` if query validation fails. `Ecto.AdapterError` will be
raised if there is an adapter error.
## Example
# Fetch all post titles
query = from p in Post,
select: post.title
MyRepo.all(query)
"""
defcallback all(Ecto.Query.t) :: [Ecto.Entity.t] | no_return
@doc """
Stores a single new entity in the data store and returns its stored
representation. May raise `Ecto.AdapterError` if there is an adapter error.
## Example
post = Post.new(title: "Ecto is great", text: "really, it is")
|> MyRepo.create
"""
defcallback create(Ecto.Entity.t) :: Ecto.Entity.t | no_return
@doc """
Updates an entity using the primary key as key. If the entity has no primary
key `Ecto.NoPrimaryKey` will be raised. `Ecto.AdapterError` will be raised if
there is an adapter error.
## Example
[post] = from p in Post, where: p.id == 42
post = post.title("New title")
MyRepo.update(post)
"""
defcallback update(Ecto.Entity.t) :: :ok | no_return
@doc """
Updates all entities matching the given query with the given values.
`Ecto.AdapterError` will be raised if there is an adapter error.
## Examples
MyRepo.update_all(Post, title: "New title")
MyRepo.update_all(p in Post, visits: p.visits + 1)
from(p in Post, where: p.id < 10) |>
MyRepo.update_all(visits: title: "New title")
"""
defmacrocallback update_all(Macro.t, Keyword.t) :: integer | no_return
@doc """
Deletes an entity using the primary key as key. If the entity has no primary
key `Ecto.NoPrimaryKey` will be raised. `Ecto.AdapterError` will be raised if
there is an adapter error.
## Example
[post] = from p in Post, where: p.id == 42
MyRepo.delete(post)
"""
defcallback delete(Ecto.Entity.t) :: :ok | no_return
@doc """
Deletes all entities matching the given query with the given values.
`Ecto.AdapterError` will be raised if there is an adapter error.
## Examples
MyRepo.delete_all(Post)
from(p in Post, where: p.id < 10) |> MyRepo.delete_all
"""
defcallback delete_all(Ecto.Queryable.t) :: integer | no_return
@doc """
Returns the adapter tied to the repository.
"""
defcallback adapter() :: Ecto.Adapter.t
@doc """
Returns the supported query APIs.
"""
defcallback query_apis :: [module]
end
|
lib/ecto/repo.ex
| 0.900492
| 0.498779
|
repo.ex
|
starcoder
|
defmodule Exq.Api do
@moduledoc """
Interface for retrieving Exq stats.
Pid is currently Exq.Api process.
"""
def start_link(opts \\ []) do
Exq.start_link(Keyword.put(opts, :mode, :api))
end
@doc """
List of queues with jobs (empty queues are deleted).
Expected args:
* `pid` - Exq.Api process
Returns:
* `{:ok, queues}` - list of queue
"""
def queues(pid) do
GenServer.call(pid, :queues)
end
@doc """
Clear / Remove queue
Expected args:
* `pid` - Exq.Api process
* `queue` - Queue name
Returns:
* `{:ok, queues}` - list of queue
"""
def remove_queue(pid, queue) do
GenServer.call(pid, {:remove_queue, queue})
end
@doc """
Number of busy workers
Expected args:
* `pid` - Exq.Api process
Returns:
* `{:ok, num_busy}` - number of busy workers
"""
def busy(pid) do
GenServer.call(pid, :busy)
end
@doc """
List of worker nodes currently running
"""
def nodes(pid) do
GenServer.call(pid, :nodes)
end
@doc """
List of processes currently running
Expected args:
* `pid` - Exq.Api process
Returns:
* `{:ok, [processes]}`
"""
def processes(pid) do
GenServer.call(pid, :processes)
end
def clear_processes(pid) do
GenServer.call(pid, :clear_processes)
end
@doc """
List jobs enqueued
Expected args:
* `pid` - Exq.Api process
Returns:
* `{:ok, [{queue, [jobs]}, {queue, [jobs]}]}`
"""
def jobs(pid) do
GenServer.call(pid, :jobs)
end
@doc """
List jobs enqueued
Expected args:
* `pid` - Exq.Api process
* `queue` - Queue name
* `options`
- size: (integer) size of list
- offset: (integer) start offset of the list
- raw: (boolean) whether to deserialize the job
Returns:
* `{:ok, [jobs]}`
"""
def jobs(pid, queue, options \\ []) do
GenServer.call(pid, {:jobs, queue, options})
end
@doc """
List jobs that will be retried because they previously failed and have not exceeded their retry_count.
Expected args:
* `pid` - Exq.Api process
* `options`
- score: (boolean) whether to include job score
- size: (integer) size of list
- offset: (integer) start offset of the list
Returns:
* `{:ok, [jobs]}`
"""
def retries(pid, options \\ []) do
GenServer.call(pid, {:retries, options})
end
@doc """
List jobs that are enqueued and scheduled to be run at a future time.
Expected args:
* `pid` - Exq.Api process
* `options`
- score: (boolean) whether to include job score
- size: (integer) size of list
- offset: (integer) start offset of the list
Returns:
* `{:ok, [jobs]}`
"""
def scheduled(pid, options \\ []) do
GenServer.call(pid, {:jobs, :scheduled, options})
end
@doc """
List jobs that are enqueued and scheduled to be run at a future time, along with when they are scheduled to run.
Expected args:
* `pid` - Exq.Api process
Returns:
* `{:ok, [{job, scheduled_at}]}`
"""
def scheduled_with_scores(pid) do
GenServer.call(pid, {:jobs, :scheduled_with_scores})
end
def find_job(pid, queue, jid) do
GenServer.call(pid, {:find_job, queue, jid})
end
@doc """
Removes a job from the queue specified.
Expected args:
* `pid` - Exq.Api process
* `queue` - The name of the queue to remove the job from
* `jid` - Unique identifier for the job
Returns:
* `:ok`
"""
@deprecated "use remove_enqueued_jobs/3"
def remove_job(pid, queue, jid) do
GenServer.call(pid, {:remove_job, queue, jid})
end
@doc """
Removes a job from the queue specified.
Expected args:
* `pid` - Exq.Api process
* `queue` - The name of the queue to remove the job from
* `raw_job` - raw json encoded job value
Returns:
* `:ok`
"""
def remove_enqueued_jobs(pid, queue, raw_jobs) do
GenServer.call(pid, {:remove_enqueued_jobs, queue, raw_jobs})
end
@doc """
A count of the number of jobs in the queue, for each queue.
Expected args:
* `pid` - Exq.Api process
Returns:
* `{:ok, [{queue, num_jobs}, {queue, num_jobs}]}`
"""
def queue_size(pid) do
GenServer.call(pid, :queue_size)
end
@doc """
A count of the number of jobs in the queue, for a provided queue.
Expected args:
* `pid` - Exq.Api process
* `queue` - The name of the queue to find the number of jobs for
Returns:
* `{:ok, num_jobs}`
"""
def queue_size(pid, queue) do
GenServer.call(pid, {:queue_size, queue})
end
@doc """
List jobs that have failed and will not retry, as they've exceeded their retry count.
Expected args:
* `pid` - Exq.Api process
* `options`
- score: (boolean) whether to include job score
- size: (integer) size of list
- offset: (integer) start offset of the list
Returns:
* `{:ok, [jobs]}`
"""
def failed(pid, options \\ []) do
GenServer.call(pid, {:failed, options})
end
@deprecated "use find_failed/4"
def find_failed(pid, jid) do
GenServer.call(pid, {:find_failed, jid})
end
@doc """
Find failed job
Expected args:
* `pid` - Exq.Api process
* `score` - Job score
* `jid` - Job jid
* `options`
- raw: (boolean) whether to deserialize the job
Returns:
* `{:ok, job}`
"""
def find_failed(pid, score, jid, options \\ []) do
GenServer.call(pid, {:find_failed, score, jid, options})
end
@doc """
Removes a job in the queue of jobs that have failed and exceeded their retry count.
Expected args:
* `pid` - Exq.Api process
* `jid` - Unique identifier for the job
Returns:
* `:ok`
"""
@deprecated "use remove_failed_jobs/2"
def remove_failed(pid, jid) do
GenServer.call(pid, {:remove_failed, jid})
end
@doc """
Removes jobs from dead queue.
Expected args:
* `pid` - Exq.Api process
* `raw_job` - raw json encoded job value
Returns:
* `:ok`
"""
def remove_failed_jobs(pid, raw_jobs) do
GenServer.call(pid, {:remove_failed_jobs, raw_jobs})
end
def clear_failed(pid) do
GenServer.call(pid, :clear_failed)
end
@doc """
Re Enqueue jobs from dead queue.
Expected args:
* `pid` - Exq.Api process
* `raw_job` - raw json encoded job value
Returns:
* `{:ok, num_enqueued}`
"""
def dequeue_failed_jobs(pid, raw_jobs) do
GenServer.call(pid, {:dequeue_failed_jobs, raw_jobs})
end
@doc """
Number of jobs that have failed and exceeded their retry count.
Expected args:
* `pid` - Exq.Api process
Returns:
* `{:ok, num_failed}` - number of failed jobs
"""
def failed_size(pid) do
GenServer.call(pid, :failed_size)
end
@deprecated "use find_retry/4"
def find_retry(pid, jid) do
GenServer.call(pid, {:find_retry, jid})
end
@doc """
Find job in retry queue
Expected args:
* `pid` - Exq.Api process
* `score` - Job score
* `jid` - Job jid
* `options`
- raw: (boolean) whether to deserialize the job
Returns:
* `{:ok, job}`
"""
def find_retry(pid, score, jid, options \\ []) do
GenServer.call(pid, {:find_retry, score, jid, options})
end
@doc """
Removes a job in the retry queue from being enqueued again.
Expected args:
* `pid` - Exq.Api process
* `jid` - Unique identifier for the job
Returns:
* `:ok`
"""
@deprecated "use remove_retry_jobs/2"
def remove_retry(pid, jid) do
GenServer.call(pid, {:remove_retry, jid})
end
@doc """
Removes jobs from retry queue.
Expected args:
* `pid` - Exq.Api process
* `raw_job` - raw json encoded job value
Returns:
* `:ok`
"""
def remove_retry_jobs(pid, raw_jobs) do
GenServer.call(pid, {:remove_retry_jobs, raw_jobs})
end
def clear_retries(pid) do
GenServer.call(pid, :clear_retries)
end
@doc """
Re enqueue jobs from retry queue immediately.
Expected args:
* `pid` - Exq.Api process
* `raw_job` - raw json encoded job value
Returns:
* `{:ok, num_enqueued}`
"""
def dequeue_retry_jobs(pid, raw_jobs) do
GenServer.call(pid, {:dequeue_retry_jobs, raw_jobs})
end
@doc """
Number of jobs in the retry queue.
Expected args:
* `pid` - Exq.Api process
Returns:
* `{:ok, num_retry}` - number of jobs to be retried
"""
def retry_size(pid) do
GenServer.call(pid, :retry_size)
end
@deprecated "use find_scheduled/4"
def find_scheduled(pid, jid) do
GenServer.call(pid, {:find_scheduled, jid})
end
@doc """
Find job in scheduled queue
Expected args:
* `pid` - Exq.Api process
* `score` - Job score
* `jid` - Job jid
* `options`
- raw: (boolean) whether to deserialize the job
Returns:
* `{:ok, job}`
"""
def find_scheduled(pid, score, jid, options \\ []) do
GenServer.call(pid, {:find_scheduled, score, jid, options})
end
@doc """
Removes a job scheduled to run in the future from being enqueued.
Expected args:
* `pid` - Exq.Api process
* `jid` - Unique identifier for the job
Returns:
* `:ok`
"""
@deprecated "use remove_scheduled_jobs/2"
def remove_scheduled(pid, jid) do
GenServer.call(pid, {:remove_scheduled, jid})
end
@doc """
Removes jobs from scheduled queue.
Expected args:
* `pid` - Exq.Api process
* `raw_job` - raw json encoded job value
Returns:
* `:ok`
"""
def remove_scheduled_jobs(pid, raw_jobs) do
GenServer.call(pid, {:remove_scheduled_jobs, raw_jobs})
end
def clear_scheduled(pid) do
GenServer.call(pid, :clear_scheduled)
end
@doc """
Enqueue jobs from scheduled queue immediately.
Expected args:
* `pid` - Exq.Api process
* `raw_job` - raw json encoded job value
Returns:
* `{:ok, num_enqueued}`
"""
def dequeue_scheduled_jobs(pid, raw_jobs) do
GenServer.call(pid, {:dequeue_scheduled_jobs, raw_jobs})
end
@doc """
Number of scheduled jobs enqueued.
Expected args:
* `pid` - Exq.Api process
Returns:
* `{:ok, num_scheduled}` - number of scheduled jobs enqueued
"""
def scheduled_size(pid) do
GenServer.call(pid, :scheduled_size)
end
@doc """
Return stat for given key.
Examples of keys are `processed` and `failed`.
Expected args:
* `pid` - Exq.Api process
* `key` - Key for stat
* `queue` - Queue name
Returns:
* `{:ok, stat}` stat for key
"""
def stats(pid, key) do
GenServer.call(pid, {:stats, key})
end
def stats(pid, key, dates) when is_list(dates) do
GenServer.call(pid, {:stats, key, dates})
end
def stats(pid, key, date) do
with {:ok, [count]} <- GenServer.call(pid, {:stats, key, [date]}) do
{:ok, count}
end
end
def realtime_stats(pid) do
GenServer.call(pid, :realtime_stats)
end
def retry_job(pid, jid) do
GenServer.call(pid, {:retry_job, jid})
end
end
|
lib/exq/api.ex
| 0.867345
| 0.535766
|
api.ex
|
starcoder
|
defmodule Lixie.Cog do
@moduledoc """
A behaviour module for defining a set of functionality that can be loaded and unloaded.
## Important notes
This behaviour assumes the module is a GenServer, and the use macro will call
`GenServer.__using__`.
Do not manually un/register commands. Return the command registration maps using `commands/1`
instead.
## Lifecycle
Cogs follow a specific flow:
1. Process spawned - This happens regardless of whether the cog is loaded or not. There
should be minimal prep work done in `GenServer.init/1`,
2. Commands registered - If an empty list is returned, no commands registered. This is the
default for `commands/1`.
3. Cog loaded - After `load/1` returns, the cog will start receiving events, and commands if any
were registered.
4. Commands unregistered - This happens immediately before unloading to ensure commands aren't
missed.
5. Cog unloaded - On user command or the bot is stopped, the cog will be unloaded.
6, Process stopped - This will happen only on crash or bot stop. The cog may be loaded again
before this stage.
## Usage
All events, save interactions, from the Discord gateway will be forwarded to the cog via
`handle_discord/2`. In the future, this may change to an opt-in system. Interactions will come
through `handle_interaction/3`, See functions for details.
The cog can stop itself, which will trigger deregistration of interactions and commands before
calling `unload/1`.
Unless stated otherwise, error returns are supported for convenience of logging only, and don't
change the flow of execution. Errors will be cast to the cog to enable asynchronous recovery.
"""
defmacro __using__(opts) do
quote location: :keep, bind_quoted: [opts: opts] do
@behaviour Lixie.Cog
use GenServer, opts
def start_link(args) do
GenServer.start_link(__MODULE__, args, name: {:global, inspect(__MODULE__)})
end
@impl true
def handle_call(:load, state) do
case __MODULE__.load(state) do
{:ok, state} -> {:reply, :ok, state}
{:error, reason, state} -> {:reply, {:error, reason}, state}
end
end
@impl true
def handle_call(:commands, state), do: {:reply, __MODULE__.commands(state), state}
end
end
@spec load(cog) :: :ok | {:error, reason} when cog: atom(), reason: term()
def load(cog), do: GenServer.call(cog, :load)
@doc """
Invoked before events are sent to the cog so it can do any setup work it needs to do, ie load
configurations, check a database, etc.
Returning an error will stop the loading of the cog, but not stop the process.
"""
@callback load(state) :: {:ok, state} | {:error, reason, state}
when state: term(), reason: term()
@doc """
Invoked before the cog is stopped so it can do any cleanup work it needs to do, ie save
configurations, close connections, etc.
"""
@callback unload(state) :: {:ok, state} | {:error, reason, state}
when state: term(), reason: term()
@doc """
Invoked on all Discord events, save interactions.
"""
@callback handle_discord(payload, state) :: {:ok, state} | {:stop | :error, reason, state}
when payload: term(), state: term(), reason: term()
@doc """
Invoked on commands registered by this cog, and all interactions. In the future, this will
only invoke on interactions created by this cog.
"""
@callback handle_interaction(type, payload, state)
:: {:ok, state} | {:stop | :error, reason, state}
when type: :command | :component, payload: term(), state: term(), reason: term()
@doc """
Invoked to get the commands that the cog listens for. Command interactions received by the cog
will be limited to the commands returned by this method.
"""
@spec commands(state) :: [command] when state: term(), command: map()
def commands(_), do: []
# These functions will be used later when component filtering is implemented
def get_nonce(%{interaction_nonce: nonce} = state) do
<<cog::3, date::24, _::5>> = nonce
<<_::3, date::24, inc::5>> = if date != (new_date = hours_from_dt(DateTime.utc_now)) do
<<cog, new_date, 0>>
else nonce end
{<<cog, date, inc>>, %{state | interaction_nonce: <<cog, date, inc + 1>>}}
end
def dt_from_nonce(<<_::3, dt::24, _::5>>) do
DateTime.add(Lixie.Utils.epoch, dt * 3600)
end
defp hours_from_dt(dt) do
DateTime.diff(dt, Lixie.Utils.epoch) |> Integer.floor_div(3600)
end
defoverridable commands: 1
end
|
lib/lixie/cog.ex
| 0.835718
| 0.500122
|
cog.ex
|
starcoder
|
defmodule Riemannx.Connections.Batch do
@moduledoc """
The batch connector is a pass through module that adds batching functionality
on top of the existing protocol connections.
Batching will aggregate events you send and then send them in bulk in
intervals you specify, if the events reach a certain size you can set it so
they publish the events before the interval.
NOTE: Batching **only** works with send_async.
Below is how the batching settings look in config:
```elixir
config :riemannx, [
type: :batch
batch_settings: [
type: :combined
size: 50 # Sends when the batch size reaches 50
interval: {5, :seconds} # How often to send the batches if they don't reach :size (:seconds, :minutes or :milliseconds)
]
]
## Synchronous Sending
When you send synchronously the events are passed directly through to the underlying connection
module. They are not batched or put in the queue.
```
"""
import Riemannx.Settings
import Kernel, except: [send: 2]
alias Riemannx.Proto.Msg
use GenServer
@behaviour Riemannx.Connection
# ===========================================================================
# API
# ===========================================================================
def send(e, t), do: batch_module().send(e, t)
def send_async(e), do: GenServer.cast(__MODULE__, {:push, e})
def query(m, t), do: batch_module().query(m, t)
# ===========================================================================
# GenStage Callbacks
# ===========================================================================
def start_link([]) do
GenServer.start_link(__MODULE__, Qex.new(), name: __MODULE__)
end
def init(queue) do
Process.send_after(self(), :flush, batch_interval())
{:ok, queue}
end
def handle_cast({:push, event}, queue) do
queue = Qex.push(queue, event)
if Enum.count(queue) >= batch_size(),
do: {:noreply, flush(queue)},
else: {:noreply, queue}
end
def handle_info(:flush, queue), do: {:noreply, flush(queue)}
def handle_info(_, queue), do: {:noreply, queue}
# ===========================================================================
# Private
# ===========================================================================
defp flush(items) when is_list(items) do
batch =
Enum.flat_map(items, fn item ->
item
end)
[events: batch]
|> Msg.new()
|> Msg.encode()
|> batch_module().send_async()
Process.send_after(self(), :flush, batch_interval())
end
defp flush(queue) do
queue |> Enum.to_list() |> flush()
Qex.new()
end
end
|
lib/riemannx/connections/batch.ex
| 0.837554
| 0.792384
|
batch.ex
|
starcoder
|
defmodule MangoPay.BankAccount do
@moduledoc """
Functions for MangoPay [bank account](https://docs.mangopay.com/endpoints/v2.01/bank-accounts#e24_the-bankaccount-object) API.
"""
use MangoPay.Query.Base, "bankaccounts"
@doc """
Get a bank account of a user.
## Examples
{:ok, client} = MangoPay.BankAccount.get_by_user("user_id")
"""
def get_by_user user_id, bank_account_id do
_get user_resource(user_id, bank_account_id)
end
@doc """
Get a bank account of a user.
## Examples
client = MangoPay.BankAccount.get_by_user!("user_id")
"""
def get_by_user! user_id, bank_account_id do
_get! user_resource(user_id, bank_account_id)
end
@doc """
Create a iban bank account.
## Examples
params = %{
"Tag": "custom meta",
"OwnerAddress": %{
"AddressLine1": "1 Mangopay Street",
"AddressLine2": "The Loop",
"City": "Paris",
"Region": "Ile de France",
"PostalCode": "75001",
"Country": "FR"
},
"OwnerName": "<NAME>",
"IBAN": "FR7630004000031234567890143",
"BIC": "CRLYFRPP"
}
{:ok, bank_account} = MangoPay.BankAccount.create_iban("user_id", params)
"""
def create_iban user_id, params do
_create params, user_resource(user_id, "iban")
end
@doc """
Create a iban bank account.
## Examples
params = %{
"Tag": "custom meta",
"OwnerAddress": %{
"AddressLine1": "1 Mangopay Street",
"AddressLine2": "The Loop",
"City": "Paris",
"Region": "Ile de France",
"PostalCode": "75001",
"Country": "FR"
},
"OwnerName": "<NAME>",
"IBAN": "FR7630004000031234567890143",
"BIC": "CRLYFRPP"
}
bank_account = MangoPay.BankAccount.create_iban!("user_id", params)
"""
def create_iban! user_id, params do
_create! params, user_resource(user_id, "iban")
end
@doc """
Create a US bank account.
## Examples
params = %{
"Tag": "custom meta",
"OwnerAddress": %{
"AddressLine1": "1 Mangopay Street",
"AddressLine2": "The Loop",
"City": "Paris",
"Region": "Ile de France",
"PostalCode": "75001",
"Country": "FR"
},
"OwnerName": "<NAME>",
"AccountNumber": "11696419",
"ABA": "071000288",
"DepositAccountType": "CHECKING"
}
{:ok, bank_account} = MangoPay.BankAccount.create_us("user_id", params)
"""
def create_us id, params do
_create params, user_resource(id, "us")
end
@doc """
Create a US bank account.
## Examples
params = %{
"Tag": "custom meta",
"OwnerAddress": %{
"AddressLine1": "1 Mangopay Street",
"AddressLine2": "The Loop",
"City": "Paris",
"Region": "Ile de France",
"PostalCode": "75001",
"Country": "FR"
},
"OwnerName": "<NAME>",
"AccountNumber": "11696419",
"ABA": "071000288",
"DepositAccountType": "CHECKING"
}
bank_account = MangoPay.BankAccount.create_us!("user_id", params)
"""
def create_us! id, params do
_create! params, user_resource(id, "us")
end
@doc """
Create a CA bank account.
## Examples
params = %{
"Tag": "custom meta",
"OwnerAddress": %{
"AddressLine1": "1 Mangopay Street",
"AddressLine2": "The Loop",
"City": "Paris",
"Region": "Ile de France",
"PostalCode": "75001",
"Country": "FR"
},
"OwnerName": "<NAME>",
"BranchCode": "00152",
"InstitutionNumber": "614",
"AccountNumber": "11696419",
"BankName": "The Big Bank"
}
{:ok, bank_account} = MangoPay.BankAccount.create_ca("user_id", params)
"""
def create_ca id, params do
_create params, user_resource(id, "ca")
end
@doc """
Create a CA bank account.
## Examples
params = %{
"Tag": "custom meta",
"OwnerAddress": %{
"AddressLine1": "1 Mangopay Street",
"AddressLine2": "The Loop",
"City": "Paris",
"Region": "Ile de France",
"PostalCode": "75001",
"Country": "FR"
},
"OwnerName": "<NAME>",
"BranchCode": "00152",
"InstitutionNumber": "614",
"AccountNumber": "11696419",
"BankName": "The Big Bank"
}
bank_account = MangoPay.BankAccount.create_ca!("user_id", params)
"""
def create_ca! id, params do
_create! params, user_resource(id, "ca")
end
@doc """
Create a GB bank account.
## Examples
params = %{
"Tag": "custom meta",
"OwnerAddress": %{
"AddressLine1": "1 Mangopay Street",
"AddressLine2": "The Loop",
"City": "Paris",
"Region": "Ile de France",
"PostalCode": "75001",
"Country": "FR"
},
"OwnerName": "<NAME>",
"SortCode": "010039",
"AccountNumber": "11696419"
}
{:ok, bank_account} = MangoPay.BankAccount.create_gb("user_id", params)
"""
def create_gb id, params do
_create params, user_resource(id, "gb")
end
@doc """
Create a GB bank account.
## Examples
params = %{
"Tag": "custom meta",
"OwnerAddress": %{
"AddressLine1": "1 Mangopay Street",
"AddressLine2": "The Loop",
"City": "Paris",
"Region": "Ile de France",
"PostalCode": "75001",
"Country": "FR"
},
"OwnerName": "<NAME>",
"SortCode": "010039",
"AccountNumber": "11696419"
}
bank_account = MangoPay.BankAccount.create_gb!("user_id", params)
"""
def create_gb! id, params do
_create! params, user_resource(id, "gb")
end
@doc """
Create an other type of bank account.
## Examples
params = %{
"Tag": "custom meta",
"OwnerAddress": %{
"AddressLine1": "1 Mangopay Street",
"AddressLine2": "The Loop",
"City": "Paris",
"Region": "Ile de France",
"PostalCode": "75001",
"Country": "FR"
},
"OwnerName": "<NAME>",
"Country": "FR",
"BIC": "CRLYFRPP",
"AccountNumber": "11696419"
}
{:ok, bank_account} = MangoPay.BankAccount.create_other("user_id", params)
"""
def create_other id, params do
_create params, user_resource(id, "other")
end
@doc """
Create an other type of bank account.
## Examples
params = %{
"Tag": "custom meta",
"OwnerAddress": %{
"AddressLine1": "1 Mangopay Street",
"AddressLine2": "The Loop",
"City": "Paris",
"Region": "Ile de France",
"PostalCode": "75001",
"Country": "FR"
},
"OwnerName": "<NAME>",
"Country": "FR",
"BIC": "CRLYFRPP",
"AccountNumber": "11696419"
}
bank_account = MangoPay.BankAccount.create_other!("user_id", params)
"""
def create_other! id, params do
_create! params, user_resource(id, "other")
end
@doc """
List all bank account for a user.
## Examples
{:ok, bank_accounts} = MangoPay.BankAccount.all_by_user("user_id")
"""
def all_by_user id, query \\ %{} do
_all [MangoPay.User.path(id), resource()], query
end
@doc """
List all bank account for a user.
## Examples
bank_accounts = MangoPay.BankAccount.all_by_user!("user_id")
"""
def all_by_user! id, query \\ %{} do
_all! [MangoPay.User.path(id), resource()], query
end
@doc """
Deactivate a bank account of a user.
## Examples
params = %{
"Active": false
}
{:ok, bank_account} = MangoPay.BankAccount.deactivate("user_id", "bank_account", params)
"""
def deactivate user_id, bank_account_id, params do
_update params, user_resource(user_id, bank_account_id)
end
@doc """
Deactivate a bank account of a user.
## Examples
params = %{
"Active": false
}
{:ok, bank_account} = MangoPay.BankAccount.deactivate("user_id", "bank_account", params)
"""
def deactivate! user_id, bank_account_id, params do
_update! params, user_resource(user_id, bank_account_id)
end
end
|
lib/mango_pay/bank_account.ex
| 0.63341
| 0.424233
|
bank_account.ex
|
starcoder
|
defmodule Tradehub.Trade do
@moduledoc """
This module uses to fetch trade, orders information of the chain.
"""
import Tradehub.Raising
@doc """
Requests orders of the given account
## Examples
iex> Tradehub.Trade.get_orders("swth1fdqkq5gc5x8h6a0j9hamc30stlvea6zldprt6q")
"""
@spec get_orders(String.t()) :: {:ok, list(Tradehub.order())} | {:error, HTTPoison.Error.t()}
@spec get_orders!(String.t()) :: list(Tradehub.order())
def get_orders(account) do
case Tradehub.get("get_orders", params: %{account: String.downcase(account)}) do
{:ok, response} -> {:ok, response.body}
other -> other
end
end
raising(:get_orders, account)
@doc """
Requests an order details information by its order id
## Examples
iex> Tradehub.Trade.get_order("A186AC5F560BBD4B2C1F9B21C6EF1814F3295EBD863FA3655F74942CDB198530")
"""
@spec get_order(String.t()) :: {:ok, Tradehub.order()} | {:error, HTTPoison.Error.t()}
@spec get_order!(String.t()) :: Tradehub.order()
def get_order(order_id) do
case Tradehub.get("get_order", params: %{order_id: String.upcase(order_id)}) do
{:ok, response} -> {:ok, response.body}
other -> other
end
end
raising(:get_order, order_id)
@doc """
Requests avaiable positions of the given account in all markets which the account get involved
## Examples
iex> Tradehub.Trade.positions("swth1fdqkq5gc5x8h6a0j9hamc30stlvea6zldprt6q")
"""
@spec positions(String.t()) :: {:ok, list(Tradehub.position())} | {:error, HTTPoison.Error.t()}
@spec positions!(String.t()) :: list(Tradehub.position())
@doc deprecated: "The API does not well documetation, and I do not have much info about this endpoint"
def positions(account) do
case Tradehub.get("get_positions", params: %{account: String.downcase(account)}) do
{:ok, response} -> {:ok, response.body}
other -> other
end
end
raising(:positions, account)
@doc """
Get positions sorted by size of the given market.
## Examples
iex> Tradehub.Trade.positions_sorted_size("swth_eth1")
"""
@doc deprecated: "The API is not well documentation"
@spec positions_sorted_size(String.t()) :: {:error, HTTPoison.Error.t()} | {:ok, any}
@spec positions_sorted_size!(String.t()) :: any
def positions_sorted_size(market) do
case Tradehub.get("get_positions_sorted_by_size", params: %{market: String.downcase(market)}) do
{:ok, response} -> {:ok, response.body}
other -> other
end
end
raising(:positions_sorted_size, market)
@doc """
Get positions sorted by risk of the given market.
## Examples
iex> Tradehub.Trade.positions_sorted_risk("swth_eth1", "unknown")
"""
@doc deprecated: "The API is not well documentation"
@spec positions_sorted_risk(String.t(), String.t()) :: {:error, HTTPoison.Error.t()} | {:ok, any}
@spec positions_sorted_risk!(String.t(), String.t()) :: any
def positions_sorted_risk(market, direction) do
case Tradehub.get("get_positions_sorted_by_risk", params: %{market: String.downcase(market), direction: direction}) do
{:ok, response} -> {:ok, response.body}
other -> other
end
end
raising(:positions_sorted_risk, market, direction)
@doc """
Get positions sorted by pnl of the given market.
## Examples
iex> Tradehub.Trade.positions_sorted_pnl("swth_eth1")
"""
@doc deprecated: "The API is not well documentation"
@spec positions_sorted_pnl(String.t()) :: {:error, HTTPoison.Error.t()} | {:ok, any}
@spec positions_sorted_pnl!(String.t()) :: any
def positions_sorted_pnl(market) do
case Tradehub.get("get_positions_sorted_by_pnl", params: %{market: String.downcase(market)}) do
{:ok, response} -> {:ok, response.body}
other -> other
end
end
raising(:positions_sorted_pnl, market)
@doc """
Requests the position of the given account in a particular market
## Examples
iex> Tradehub.Trade.position("swth1fdqkq5gc5x8h6a0j9hamc30stlvea6zldprt6q", "swth_eth1")
"""
@spec position(String.t(), String.t()) :: {:ok, Tradehub.position()} | {:error, HTTPoison.Error.t()}
@spec position!(String.t(), String.t()) :: Tradehub.position()
def position(account, market) do
case Tradehub.get("get_position", params: %{account: String.downcase(account), market: String.downcase(market)}) do
{:ok, response} -> {:ok, response.body}
other -> other
end
end
raising(:position, account, market)
@doc """
Get leverage of the given account in a specific market
## Examples
iex> Tradehub.Trade.leverage("swth1fdqkq5gc5x8h6a0j9hamc30stlvea6zldprt6q", "eth_h21")
"""
@spec leverage(String.t(), String.t()) :: {:ok, Tradehub.leverage()} | {:error, HTTPoison.Error.t()}
@spec leverage!(String.t(), String.t()) :: Tradehub.leverage()
def leverage(account, market) do
case Tradehub.get("get_leverage", params: %{account: String.downcase(account), market: String.downcase(market)}) do
{:ok, response} -> {:ok, response.body}
other -> other
end
end
raising(:leverage, account, market)
@doc """
Requests recent trades of the market or filtered by the specific params
## Examples
iex> Tradehub.Trade.trades
"""
@typedoc """
Query params for the `/get_trades` endpoint.
- **market**: market ticker used by the chain, e.g `swth_eth1`
- **before_id**: filter trades before id
- **after_id**: filter trades after id
- **order_by**: TODO
- **limit**: limit the responsed results, max is 200
"""
@type trade_options :: %{
market: String.t(),
before_id: String.t(),
after_id: String.t(),
order_by: String.t(),
limit: String.t()
}
@spec trades(%{}) :: {:ok, list(Tradehub.trade())} | {:error, HTTPoison.Error.t()}
@spec trades(trade_options()) :: {:ok, list(Tradehub.trade())} | {:error, HTTPoison.Error.t()}
@spec trades!(trade_options()) :: list(Tradehub.trade())
def trades(trade_options \\ %{}) do
case Tradehub.get("get_trades", params: trade_options) do
{:ok, response} -> {:ok, response.body}
other -> other
end
end
raising(:trades)
raising(:trades, trade_options)
@doc """
Requests recent trades by the given account
## Examples
iex> Tradehub.Trade.trades_by_account("<KEY>")
"""
@typedoc """
Query params for the `/get_trades_by_account` endpoint.
- **before_id**: filter trades before id
- **after_id**: filter trades after id
- **order_by**: TODO
- **limit**: limit the responsed results, max is 200
"""
@type trade_account_options :: %{
before_id: String.t(),
after_id: String.t(),
order_by: String.t(),
limit: String.t()
}
@spec trades_by_account(String.t(), %{}) ::
{:ok, list(Tradehub.account_trade())} | {:error, HTTPoison.Error.t()}
@spec trades_by_account(String.t(), trade_account_options()) ::
{:ok, list(Tradehub.account_trade())} | {:error, HTTPoison.Error.t()}
@spec trades_by_account!(String.t(), trade_account_options()) ::
list(Tradehub.account_trade())
def trades_by_account(account, trade_account_options \\ %{}) do
case Tradehub.get("get_trades_by_account", params: Map.put(trade_account_options, :account, account)) do
{:ok, response} -> {:ok, response.body}
other -> other
end
end
raising(:trades_by_account, account)
raising(:trades_by_account, account, trade_account_options)
@doc """
Requests recent liquidations
## Examples
iex> Tradehub.Trade.liquidations
"""
@typedoc """
Query params for the `/get_liquidations` endpoint.
- **before_id**: filter trades before id
- **after_id**: filter trades after id
- **order_by**: TODO
- **limit**: limit the responsed results, max is 200
"""
@type liquidation_options :: %{
before_id: String.t(),
after_id: String.t(),
order_by: String.t(),
limit: String.t()
}
@spec liquidations(%{}) ::
{:ok, list(Tradehub.account_trade())} | {:error, HTTPoison.Error.t()}
@spec liquidations(liquidation_options()) ::
{:ok, list(Tradehub.account_trade())} | {:error, HTTPoison.Error.t()}
@spec liquidations!(liquidation_options()) ::
list(Tradehub.account_trade())
def liquidations(before_id \\ nil, after_id \\ nil, order_by \\ nil, limit \\ nil) do
case Tradehub.get("get_liquidations",
params: %{before_id: before_id, after_id: after_id, order_by: order_by, limit: limit}
) do
{:ok, response} -> {:ok, response.body}
other -> other
end
end
raising(:liquidations)
raising(:liquidations, liquidation_options)
end
|
lib/tradehub/trade.ex
| 0.884514
| 0.457016
|
trade.ex
|
starcoder
|
defmodule Weather.TableFormatter do
import Enum, only: [each: 2, map: 2, map_join: 3, max: 1 ]
@doc """
Takes a list of row data, where each row is a HashDict, and a list of
headers. Prints a table to STDOUT of the data from each row
identified by each header. E.g. each header identifies a column,
and those columns are extracted and printed from the rows.
We calculate the width of each column to fit the longest element
in that column.
"""
def print_table_for_columns(rows, headers) do
data_by_columns = split_into_columns(rows,headers)
columns_width = widths_of(data_by_columns)
format = format_for(columns_width)
puts_one_line_in_columns headers, format
IO.puts separator(columns_width)
puts_in_columns data_by_columns, format
end
@doc """
Given a list of rows, where each row contains a keyed list
of columns, return a list containing lists of the data in
each column. The `headers` parameter contains the list
of columns to extracted
## Examples
iex> list = [Enum.into([{"a", "1"}, {"b","2"}, {"c", "3"}], HashDict.new),
...> Enum.into([{"a", "4"}, {"b","5"}, {"c", "6"}], HashDict.new)]
iex> Issues.TableFormatter.split_into_columns(list, ["a", "b", "c" ])
[ ["1", "4"], ["2","5"], ["3", "6"] ]
"""
def split_into_columns(rows,headers) do
for header <- headers do
for row <- rows, do: printable(row[header])
end
end
@doc """
Return a binary (string) version of our parameter.
## Examples
iex> Issues.TableFormatter.printable("a")
"a"
iex> Issues.TableFormatter.printable(99)
"99"
"""
def printable(str) when is_binary(str), do: str
def printable(str), do: to_string(str)
@doc """
Given a list containing sublists, where each sublist contains the data for
a column, return a list containing the maximum width of each column
## Examples
iex> data = [ [ "cat", "wombat", "elk"], ["mongoose", "ant", "gnu"] ]
iex> Issues.TableFormatter.widths_of(data)
[6, 8 ]
"""
def widths_of(columns) do
for column <- columns, do: column |> map(&String.length/1) |> max
end
@doc """
Return a format string that hard codes the widths of a set of columns.
We put `" |"` between each column.
## Examples
iex> widths = [5,6,99]
iex> Issues.TableFormatter.format_for(widths)
"~-5s | ~-6s | ~-99s~n"
"""
def format_for(columns_width) do
map_join(columns_width, " | ", fn width -> "~-#{width}s" end) <> "~n"
end
@doc """
Given a list containing rows of data, a list containing the header selectors,
and a format string, write the extracted data under control of the format string.
## Examples
iex> widths = [5,6,9]
iex> Issues.TableFormatter.separator(widths)
"------+--------+----------"
"""
def separator(columns_width) do
map_join(columns_width, "-+-", fn width -> List.duplicate("-", width) end )
end
def puts_in_columns(data_by_columns, format) do
data_by_columns
|> List.zip
|> map(&Tuple.to_list/1)
|> each(&puts_one_line_in_columns(&1,format))
end
def puts_one_line_in_columns(fields, format) do
:io.format(format, fields)
end
end
|
prag-programing/Part1/ch13proj/weather/lib/weather/table_formatter.ex
| 0.867612
| 0.782663
|
table_formatter.ex
|
starcoder
|
defmodule Mazes.HexagonalMaze do
@behaviour Mazes.Maze
# hexes in the maze are arranged to have a flat top and bottom
# the rows are zig-zag
# ___ ___
# /1,1\___/3,1\___
# \___/2,1\___/4,1\
# /1,2\___/3,2\___/
# \___/2,2\___/4,2\
# \___/ \___/
@impl true
def new(opts) do
radius = Keyword.get(opts, :radius)
width = radius * 2 - 1
height = radius * 2 - 1
all_vertices_adjacent? = Keyword.get(opts, :all_vertices_adjacent?, false)
vertices =
Enum.reduce(1..width, [], fn x, acc ->
Enum.reduce(1..height, acc, fn y, acc2 ->
[{x, y} | acc2]
end)
end)
vertices =
vertices
|> Enum.filter(fn {x, y} ->
distance({x, y}, {radius, radius}) < radius
end)
adjacency_matrix =
vertices
|> Enum.map(fn {from_x, from_y} = from ->
value =
vertices
|> Enum.filter(fn {x, y} ->
if Integer.mod(x, 2) == 1 do
{x, y} in [
{from_x, from_y - 1},
{from_x - 1, from_y},
{from_x + 1, from_y},
{from_x - 1, from_y + 1},
{from_x, from_y + 1},
{from_x + 1, from_y + 1}
]
else
{x, y} in [
{from_x - 1, from_y - 1},
{from_x, from_y - 1},
{from_x + 1, from_y - 1},
{from_x - 1, from_y},
{from_x + 1, from_y},
{from_x, from_y + 1}
]
end
end)
|> Enum.map(&{&1, all_vertices_adjacent?})
|> Enum.into(%{})
{from, value}
end)
|> Enum.into(%{})
%{
width: width,
height: height,
adjacency_matrix: adjacency_matrix,
module: __MODULE__,
from: nil,
to: nil
}
end
def distance({x1, y1}, {x2, y2}) do
# https://www.redblobgames.com/grids/hexagons/#conversions
cx1 = x1
cz1 = y1 - (x1 + Integer.mod(x1, 2)) / 2
cy1 = -cx1 - cz1
cx2 = x2
cz2 = y2 - (x2 + Integer.mod(x2, 2)) / 2
cy2 = -cx2 - cz2
(abs(cx1 - cx2) + abs(cy1 - cy2) + abs(cz1 - cz2)) / 2
end
@impl true
def center(maze) do
{trunc(Float.ceil(maze.width / 2)), trunc(Float.ceil(maze.height / 2))}
end
# Not part of the behavior, functions needed for drawing the grid
def north({x, y}), do: {x, y - 1}
def south({x, y}), do: {x, y + 1}
def northeast({x, y}) do
if Integer.mod(x, 2) == 1 do
{x + 1, y - 1}
else
{x + 1, y}
end
end
def northwest({x, y}) do
if Integer.mod(x, 2) == 1 do
{x - 1, y - 1}
else
{x - 1, y}
end
end
def southeast({x, y}) do
if Integer.mod(x, 2) == 1 do
{x + 1, y}
else
{x + 1, y + 1}
end
end
def southwest({x, y}) do
if Integer.mod(x, 2) == 1 do
{x - 1, y}
else
{x - 1, y + 1}
end
end
end
|
lib/mazes/hexagonal_maze.ex
| 0.708717
| 0.622588
|
hexagonal_maze.ex
|
starcoder
|
defmodule StarkInfra.PixReversal do
alias __MODULE__, as: PixReversal
alias StarkInfra.Utils.Rest
alias StarkInfra.Utils.Check
alias StarkInfra.User.Project
alias StarkInfra.Utils.Parse
alias StarkInfra.User.Organization
alias StarkInfra.Error
@moduledoc """
Groups PixReversal related functions
"""
@doc """
PixReversals are instant payments used to revert PixReversals. You can only
revert inbound PixReversals.
When you initialize a PixReversal, the entity will not be automatically
created in the Stark Infra API. The 'create' function sends the objects
to the Stark Infra API and returns the list of created objects.
## Parameters (required):
- `:amount` [integer]: amount in cents to be reversed from the PixReversal. ex: 1234 (= R$ 12.34)
- `:external_id` [string]: string that must be unique among all your PixReversals. Duplicated external IDs will cause failures. By default, this parameter will block any PixReversal that repeats amount and receiver information on the same date. ex: "my-internal-id-123456"
- `:end_to_end_id` [string]: central bank's unique transaction ID. ex: "E79457883202101262140HHX553UPqeq"
- `:reason` [string]: reason why the PixReversal is being reversed. Options are "bankError", "fraud", "chashierError", "customerRequest"
## Parameters (optional):
- `:tags` [list of strings, default nil]: list of strings for reference when searching for PixReversals. ex: ["employees", "monthly"]
## Attributes (return-only):
- `:id` [string]: unique id returned when the PixReversal is created. ex: "5656565656565656".
- `:return_id` [string]: central bank's unique reversal transaction ID. ex: "D20018183202202030109X3OoBHG74wo".
- `:bank_code` [string]: code of the bank institution in Brazil. ex: "20018183"
- `:fee` [string]: fee charged by this PixReversal. ex: 200 (= R$ 2.00)
- `:status` [string]: current PixReversal status. ex: "registered" or "paid"
- `:flow` [string]: direction of money flow. ex: "in" or "out"
- `:created` [DateTime]: creation datetime for the PixReversal. ex: ~U[2020-03-10 10:30:0:0]
- `:updated` [DateTime]: latest update datetime for the PixReversal. ex: ~U[2020-03-10 10:30:0:0]
"""
@enforce_keys [
:amount,
:external_id,
:end_to_end_id,
:reason
]
defstruct [
:amount,
:external_id,
:end_to_end_id,
:reason,
:id,
:return_id,
:bank_code,
:fee,
:status,
:flow,
:created,
:updated,
:tags,
]
@type t() :: %__MODULE__{}
@doc """
Send a list of PixReversal structs for creation in the Stark Infra API
## Parameters (required):
- `:reversals` [list of PixReversal objects]: list of PixReversal structs to be created in the API
## Options:
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- list of PixReversal structs with updated attributes
"""
@spec create(
[PixReversal.t() | map()],
user: Project.t() | Organization.t() | nil
) ::
{:ok, [PixReversal.t()]} |
{:error, [error: Error.t()]}
def create(keys, options \\ []) do
Rest.post(
resource(),
keys,
options
)
end
@doc """
Same as create(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec create!(
[PixReversal.t() | map()],
user: Project.t() | Organization.t() | nil
) ::
{:ok, [PixReversal.t()]} |
{:error, [error: Error.t()]}
def create!(keys, options \\ []) do
Rest.post!(
resource(),
keys,
options
)
end
@doc """
Receive a single PixReversal struct previously created in the Stark Infra API by its id
## Parameters (required):
- `:id` [string]: struct unique id. ex: "5656565656565656"
## Options:
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- PixReversal struct with updated attributes
"""
@spec get(
id: binary,
user: Project.t() | Organization.t() | nil
) ::
{:ok, PixReversal.t()} |
{:error, [error: Error.t()]}
def get(id, options \\ []) do
Rest.get_id(
resource(),
id,
options
)
end
@doc """
Same as get(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec get!(
id: binary,
user: Project.t() | Organization.t() | nil
) ::
{:ok, PixReversal.t()} |
{:error, [error: Error.t()]}
def get!(id, options \\ []) do
Rest.get_id!(
resource(),
id,
options
)
end
@doc """
Receive a stream of PixReversal structs previously created in the Stark Infra API
## Options:
- `:limit` [integer, default nil]: maximum number of structs to be retrieved. Unlimited if nil. ex: 35
- `:after` [Date or string, default nil]: date filter for structs created after a specified date. ex: ~D[2020, 3, 10]
- `:before` [Date or string, default nil]: date filter for structs created before a specified date. ex: ~D[2020, 3, 10]
- `:status` [list of strings, default nil]: filter for status of retrieved objects. Options: “created”, “processing”, “success”, “failed”
- `:tags` [list of strings, default nil]: tags to filter retrieved objects. ex: ["tony", "stark"]
- `:ids` [list of strings, default nil]: list of ids to filter retrieved objects. ex: ["5656565656565656", "4545454545454545"]
- `:return_ids` [list of strings, default nil]: central bank's unique reversal transaction IDs. ex: ["D20018183202202030109X3OoBHG74wo", "D20018183202202030109X3OoBHG72rd"].
- `:external_ids` [list of strings, default nil]: url safe strings that must be unique among all your PixReversals. Duplicated external IDs will cause failures. By default, this parameter will block any PixReversal that repeats amount and receiver information on the same date. ex: ["my-internal-id-123456", "my-internal-id-654321"]
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- stream of PixReversal structs with updated attributes
"""
@spec query(
limit: integer,
after: Date.t() | binary,
before: Date.t() | binary,
status: [binary],
tags: [binary],
ids: [binary],
return_ids: [binary],
external_ids: [binary],
user: Project.t() | Organization.t() | nil
) ::
{:ok, PixReversal.t()} |
{:error, [error: Error.t()]}
def query(options \\ []) do
Rest.get_list(
resource(),
options
)
end
@doc """
Same as query(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec query!(
limit: integer,
after: Date.t() | binary,
before: Date.t() | binary,
status: [binary],
tags: [binary],
ids: [binary],
return_ids: [binary],
external_ids: [binary],
user: Project.t() | Organization.t() | nil
) ::
{:ok, PixReversal.t()} |
{:error, [error: Error.t()]}
def query!(options \\ []) do
Rest.get_list!(
resource(),
options
)
end
@doc """
Receive a list of up to 100 PixReversal structs previously created in the Stark Infra API and the cursor to the next page.
Use this function instead of query if you want to manually page your reversals.
## Options:
- `:cursor` [string, default nil]: cursor returned on the previous page function call
- `:limit` [integer, default 100]: maximum number of structs to be retrieved. Max = 100. ex: 35
- `:after` [Date or string, default nil]: date filter for structs created after a specified date. ex: ~D[2020, 3, 10]
- `:before` [Date or string, default nil]: date filter for structs created before a specified date. ex: ~D[2020, 3, 10]
- `:status` [list of strings, default nil]: filter for status of retrieved objects. Options: “created”, “processing”, “success”, “failed”
- `:tags` [list of strings, default nil]: tags to filter retrieved objects. ex: ["tony", "stark"]
- `:ids` [list of strings, default nil]: list of ids to filter retrieved objects. ex: ["5656565656565656", "4545454545454545"]
- `:return_ids` [list of strings, default nil]: central bank's unique reversal transaction ID. ex: ["D20018183202202030109X3OoBHG74wo", "D20018183202202030109X3OoBHG72rd"].
- `:external_ids` [list of strings, default nil]: url safe string that must be unique among all your PixReversals. Duplicated external IDs will cause failures. By default, this parameter will block any PixReversal that repeats amount and receiver information on the same date. ex: ["my-internal-id-123456", "my-internal-id-654321"]
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- list of PixReversal structs with updated attributes
- cursor to retrieve the next page of PixReversal objects
"""
@spec page(
cursor: binary,
limit: integer,
after: Date.t() | binary,
before: Date.t() | binary,
status: [binary],
tags: [binary],
ids: [binary],
return_ids: [binary],
external_ids: [binary],
user: Project.t() | Organization.t() | nil
) ::
{:ok, PixReversal.t()} |
{:error, [error: Error.t()]}
def page(options \\ []) do
Rest.get_page(
resource(),
options
)
end
@doc """
Same as page(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec page!(
cursor: binary,
limit: integer,
after: Date.t() | binary,
before: Date.t() | binary,
status: [binary],
tags: [binary],
ids: [binary],
return_ids: [binary],
external_ids: [binary],
user: Project.t() | Organization.t() | nil
) ::
{:ok, PixReversal.t()} |
{:error, [error: Error.t()]}
def page!(options \\ []) do
Rest.get_page!(
resource(),
options
)
end
@doc """
Create a single PixReversal struct from a content string received from a handler listening at the request url.
If the provided digital signature does not check out with the StarkInfra public key, a
starkinfra.error.InvalidSignatureError will be raised.
## Parameters (required):
- `:content` [string]: response content from request received at user endpoint (not parsed)
- `:signature` [string]: base-64 digital signature received at response header "Digital-Signature"
## Options:
- `cache_pid` [PID, default nil]: PID of the process that holds the public key cache, returned on previous parses. If not provided, a new cache process will be generated.
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- Parsed PixReversal object
"""
@spec parse(
content: binary,
signature: binary,
cache_pid: PID,
user: Project.t() | Organization.t()
)::
{:ok, PixReversal.t()} |
{:error, [error: Error.t()]}
def parse(options \\ []) do
%{content: content, signature: signature, cache_pid: cache_pid, user: user} =
Enum.into(
options |> Check.enforced_keys([:content, :signature]),
%{cache_pid: nil, user: nil}
)
Parse.parse_and_verify(
content: content,
signature: signature,
cache_pid: cache_pid,
key: nil,
resource_maker: &resource_maker/1,
user: user
)
end
@doc """
Same as parse(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec parse!(
content: binary,
signature: binary,
cache_pid: PID,
user: Project.t() | Organization.t()
) :: any
def parse!(options \\ []) do
%{content: content, signature: signature, cache_pid: cache_pid, user: user} =
Enum.into(
options |> Check.enforced_keys([:content, :signature]),
%{cache_pid: nil, user: nil}
)
Parse.parse_and_verify(
content: content,
signature: signature,
cache_pid: cache_pid,
key: nil,
resource_maker: &resource_maker/1,
user: user
)
end
@doc false
def resource() do
{
"PixReversal",
&resource_maker/1
}
end
@doc false
def resource_maker(json) do
%PixReversal{
amount: json[:amount],
external_id: json[:external_id],
end_to_end_id: json[:end_to_end_id],
reason: json[:reason],
id: json[:id],
tags: json[:tags],
return_id: json[:return_id],
bank_code: json[:bank_code],
fee: json[:fee],
status: json[:status],
flow: json[:flow],
created: json[:created] |> Check.datetime(),
updated: json[:updated] |> Check.datetime()
}
end
end
|
lib/pix_reversal/pix_reversal.ex
| 0.909232
| 0.629305
|
pix_reversal.ex
|
starcoder
|
defmodule AdventOfCode2019.IntcodeComputer do
@moduledoc """
The Intcode Computer is used in the following days
- Day 2 — https://adventofcode.com/2019/day/2
- Day 5 — https://adventofcode.com/2019/day/5
- Day 7 — https://adventofcode.com/2019/day/7
- Day 9 — https://adventofcode.com/2019/day/9
- Day 11 — https://adventofcode.com/2019/day/11
- Day 11 — https://adventofcode.com/2019/day/11
- Day 13 — https://adventofcode.com/2019/day/13
- Day 15 — https://adventofcode.com/2019/day/15
- Day 17 — https://adventofcode.com/2019/day/17
"""
@spec load_program(Enumerable.t()) :: map()
def load_program(line) do
line
|> String.trim()
|> String.split(",")
|> Stream.with_index()
|> Stream.map(fn {a, b} -> {b, String.to_integer(a)} end)
|> Map.new()
end
@spec compute(map(), integer(), integer(), any | integer()) ::
{map(), integer(), integer(), integer()}
def compute(program, ptr, rel_base, input \\ []),
do: compute({:start, {program, ptr, rel_base}, nil}, input)
defp compute({:done, {program, ptr, rel_base}, _input}, output),
do: {program, ptr, rel_base, output}
defp compute({:output, state, output}, input) do
step(state, input)
|> compute(output)
end
defp compute({:input, state, input}, _output) do
step(state, input)
|> compute(input)
end
defp compute({_result, state, _output}, input) do
step(state, input)
|> compute(input)
end
@spec step({map(), integer(), integer()}, list()) :: tuple()
def step({program, ptr, rel_base}, input \\ []) do
program[ptr]
|> Integer.to_string()
|> String.pad_leading(5, "0")
|> String.codepoints()
|> step(program, ptr, rel_base, input)
end
@spec step(list(), map(), integer(), integer(), list()) :: tuple()
defp step(["0", "0", "0", "9", "9"], program, ptr, rel_base, _input),
do: {:done, {program, ptr, rel_base}, nil}
defp step([a, b, c, "0", "1"], program, ptr, rel_base, _input) do
program =
(read(program, ptr + 1, c, rel_base) + read(program, ptr + 2, b, rel_base))
|> write(program, ptr + 3, a, rel_base)
{:noop, {program, ptr + 4, rel_base}, nil}
end
defp step([a, b, c, "0", "2"], program, ptr, rel_base, _input) do
program =
(read(program, ptr + 1, c, rel_base) * read(program, ptr + 2, b, rel_base))
|> write(program, ptr + 3, a, rel_base)
{:noop, {program, ptr + 4, rel_base}, nil}
end
defp step(["0", "0", c, "0", "3"], program, ptr, rel_base, [input | tail]) do
program = write(input, program, ptr + 1, c, rel_base)
{:input, {program, ptr + 2, rel_base}, tail}
end
defp step(["0", "0", c, "0", "4"], program, ptr, rel_base, _input) do
{:output, {program, ptr + 2, rel_base}, read(program, ptr + 1, c, rel_base)}
end
defp step(["0", "0", c, "0", "9"], program, ptr, rel_base, _input) do
{:noop, {program, ptr + 2, rel_base + read(program, ptr + 1, c, rel_base)}, nil}
end
defp step([a, b, c, "0", e], program, ptr, rel_base, _input)
when e == "5" or e == "6" or e == "7" or e == "8" do
{program, ptr} =
{read(program, ptr + 1, c, rel_base), read(program, ptr + 2, b, rel_base)}
|> jump_less_equal(a, e, program, ptr, rel_base)
{:noop, {program, ptr, rel_base}, nil}
end
@spec read(map(), integer(), String.t(), integer()) :: integer()
defp read(program, ptr, "0", _rel_base), do: Map.get(program, Map.get(program, ptr, 0), 0)
defp read(program, ptr, "1", _rel_base), do: Map.get(program, ptr, 0)
defp read(program, ptr, "2", rel_base),
do: Map.get(program, rel_base + Map.get(program, ptr, 0), 0)
@spec write(integer(), map(), integer, String.t(), integer()) :: map()
defp write(result, program, ptr, "2", rel_base),
do: Map.put(program, rel_base + Map.get(program, ptr, 0), result)
defp write(result, program, ptr, "0", _rel_base),
do: Map.put(program, Map.get(program, ptr, 0), result)
@spec jump_less_equal(
{integer(), integer()},
String.t(),
String.t(),
map(),
integer(),
integer()
) :: {map(), integer()}
defp jump_less_equal({p1, p2}, _a, e, program, _ptr, _rel_base)
when (e == "5" and p1 != 0) or (e == "6" and p1 == 0),
do: {program, p2}
defp jump_less_equal(_params, _a, e, program, ptr, _rel_base)
when e == "5" or e == "6",
do: {program, ptr + 3}
defp jump_less_equal({p1, p2}, a, e, program, ptr, rel_base)
when (e == "7" and p1 < p2) or (e == "8" and p1 == p2),
do: {write(1, program, ptr + 3, a, rel_base), ptr + 4}
defp jump_less_equal(_params, a, _e, program, ptr, rel_base),
do: {write(0, program, ptr + 3, a, rel_base), ptr + 4}
end
|
lib/advent_of_code_2019/intcode_computer.ex
| 0.768125
| 0.482429
|
intcode_computer.ex
|
starcoder
|
defmodule Artemis.Helpers.BulkAction do
defmodule Result do
defstruct data: [],
errors: []
end
@moduledoc """
Iterate over a list of records, calling the passed function for each. Return
a standardized result set.
Options include:
halt_on_error: boolean (default false)
When true, execution will stop after first failure.
## Example Usage
The function can be passed as a `do` block:
BulkAction.call([1,2,3]) do
fn (item) -> item + 1 end
end
=> %BulkAction.Result{
data: [{3, 4}, {2, 3}, {1, 2}],
errors: []
}
Or under the `action` key:
BulkAction.call([1,2,3], action: fn (item) -> item + 1 end)
=> %BulkAction.Result{
data: [{3, 4}, {2, 3}, {1, 2}],
errors: []
}
Additional parameters can be passed as a list as an optional second argument:
BulkAction.call([1,2,3], [8, 20]) do
fn (item, add_by, multiply_by) -> (item + add_by) * multiply_by
end
=> %BulkAction.Result{
data: [{3, 220}, {2, 200}, {1, 180}],
errors: []
}
The second argument changes the arity of the action function.
## Return Value
Returns a struct:
%Artemis.Helpers.BulkAction{
data: [],
errors: []
}
Where `data` is a keyword list of successful results and `errors` is a list
of errors.
"""
@spec call(function(), List.t(), List.t()) :: any()
def call(records, params \\ [], options) do
action = Keyword.get(options, :do, Keyword.get(options, :action))
halt_on_error? = Keyword.get(options, :halt_on_error, false)
Enum.reduce_while(records, %Result{}, fn record, acc ->
result =
try do
apply(action, [record | params])
rescue
error -> {:error, error}
end
error? = is_tuple(result) && elem(result, 0) == :error
halt? = error? && halt_on_error?
updated_data =
case error? do
true -> acc.data
false -> [{record, result} | acc.data]
end
updated_errors =
case error? do
true -> [{record, result} | acc.errors]
false -> acc.errors
end
acc =
acc
|> Map.put(:data, updated_data)
|> Map.put(:errors, updated_errors)
case halt? do
true -> {:halt, acc}
false -> {:cont, acc}
end
end)
end
end
|
apps/artemis/lib/artemis/helpers/bulk_action.ex
| 0.835484
| 0.631921
|
bulk_action.ex
|
starcoder
|
defmodule Aoc2021.Day3 do
@moduledoc """
See https://adventofcode.com/2021/day/3
"""
defmodule Part1 do
@moduledoc false
@spec gamma([{any, {integer, integer}}]) :: integer
def gamma(list) do
digits_to_number(list, &gamma_digit/1)
end
@spec epsilon([{any, {integer, integer}}]) :: integer
def epsilon(list) do
digits_to_number(list, &epsilon_digit/1)
end
defp digits_to_number(list, f) do
list
|> Enum.map(fn {_, x} -> x end)
|> Enum.map(f)
|> Enum.reverse()
|> Integer.undigits(2)
end
defp gamma_digit({a, b}) when a > b, do: 0
defp gamma_digit(_), do: 1
defp epsilon_digit({a, b}) when a > b, do: 1
defp epsilon_digit(_), do: 0
end
defmodule Part2 do
@moduledoc false
alias Aoc2021.Day3
def oxygen_generator_rating(input) do
l = input |> hd() |> String.length()
rating(input, {0, l}, &oxygen_generator_criteria/1)
end
def co2_scrubber_rating(input) do
l = input |> hd() |> String.length()
rating(input, {0, l}, &co2_scrubber_criteria/1)
end
defp oxygen_generator_criteria({z, o}) when z > o, do: "0"
defp oxygen_generator_criteria(_), do: "1"
defp co2_scrubber_criteria({z, o}) when z > o, do: "1"
defp co2_scrubber_criteria(_), do: "0"
def rating([x], _, _) do
{x, _} = Integer.parse(x, 2)
x
end
def rating(input, {p, l}, f) do
bit_counts =
input
|> Day3.count_bits_by_position()
|> Map.get(l - p - 1)
mcv = f.(bit_counts)
nis =
Enum.filter(input, fn i ->
String.at(i, p) == mcv
end)
rating(nis, {p + 1, l}, f)
end
end
@spec solve_part1 :: integer
def solve_part1() do
input = read_input() |> count_bits_by_position() |> Enum.sort()
Part1.epsilon(input) * Part1.gamma(input)
end
def solve_part2() do
input = read_input("priv/day3/input.txt") |> Enum.to_list()
o2 = Part2.oxygen_generator_rating(input)
co2 = Part2.co2_scrubber_rating(input)
o2 * co2
end
@spec read_input() :: Enum.t()
def read_input() do
read_input("priv/day3/input.txt")
end
@spec read_input(Path.t()) :: Enum.t()
def read_input(path) do
File.stream!(path)
|> Stream.map(&String.trim/1)
|> Stream.reject(fn line -> line == "" end)
end
@spec count_bits_by_position(Enum.t()) :: %{integer() => {non_neg_integer(), non_neg_integer()}}
def count_bits_by_position(stream) do
stream
|> Enum.reduce(%{}, &count_bits/2)
end
defp count_bits(line, acc) do
{map, _} =
line
|> String.graphemes()
|> Enum.reduce({acc, String.length(line) - 1}, fn d, {acc, n} ->
{update_digit(d, acc, n), n - 1}
end)
map
end
defp update_digit("1", acc, n) do
Map.update(acc, n, {0, 1}, fn {a, b} -> {a, b + 1} end)
end
defp update_digit("0", acc, n) do
Map.update(acc, n, {1, 0}, fn {a, b} -> {a + 1, b} end)
end
end
|
lib/aoc2021/day3.ex
| 0.79858
| 0.560072
|
day3.ex
|
starcoder
|
defmodule Pass.ResetPassword do
@moduledoc """
Handles password resets by generating, verifying, and redeeming JWTs.
The idea is that you would use `Pass.ResetPassword.generate_token/1` to
create a JWT that you could then send to the user (probably in a link in an
email).
When the user accesses your interface redeem the token and reset their
password, you would use `Pass.ResetPassword.verify_token/2` to first
verify the JWT and that the time has not expired before asking for the new
password.
Once the user has given you the new password, you would use
`Pass.ResetPassword.redeem_token/2` which would first verify the JWT and
then reset the password.
To prevent replay attacks, we generate a random string to send in the jti
attribute of the JWT and store it in the data store.
"""
defp config, do: Application.get_env(:pass, __MODULE__, %{})
defp timeout, do: config[:timeout] || 60 * 60 * 2
@doc """
Returns the secret key used to sign the JWT.
"""
def key, do: config[:key]
@doc """
Takes in an email address and creates a JWT with the following claims:
- sub: The email address passed in
- aud: "Pass.ResetPassword"
- jti: Random 16 bytes encoded as URL-safe base 64 string with no padding
- iat: The current time from epoch in seconds
"""
def generate_token(email) do
jti = Base.url_encode64(:crypto.strong_rand_bytes(16), padding: false)
Pass.DataStore.adapter.set_password_reset_token(email, jti)
%{
sub: email,
aud: "Pass.ResetPassword",
jti: jti,
iat: :os.system_time(:seconds)
} |> JsonWebToken.sign(%{key: key})
end
@doc """
Takes in a JWT to verify and the new password that will be set for the user if
the JWT is valid and hasn't expired.
"""
def redeem_token(token, password) do
case verify_token(token) do
{:ok, claims} ->
Pass.DataStore.adapter.update_password_for(claims.sub, password)
:ok
error ->
error
end
end
@doc """
Takes in a password reset JWT and verifies that the JWT is valid, that the JWT
hasn't expired, and that the email address in the sub attribute and the random
string in the jti attribute match a user in the data store.
"""
def verify_token(token) do
case JsonWebToken.verify(token, %{key: key}) do
{:error, _} ->
{:error, "Invalid JWT"}
{:ok, claims} ->
cond do
:os.system_time(:seconds) - claims.iat > timeout ->
{:error, "Password reset time period expired"}
not Pass.DataStore.adapter.vaild_password_reset_token?(claims.sub, claims.jti) ->
{:error, "Invalid password reset token"}
true ->
{:ok, claims}
end
end
end
end
|
lib/pass/actions/reset_password.ex
| 0.733929
| 0.584242
|
reset_password.ex
|
starcoder
|
defmodule Oli.Delivery.Attempts.PageLifecycle.AttemptState do
@moduledoc """
The complete state of a page attempt
resource_attempt - The resource attempt record itself
attempt_hierarchy - The state of the activity attempts required for rendering
"""
alias Oli.Delivery.Attempts.Core.ResourceAttempt
alias Oli.Resources.Revision
alias Oli.Delivery.Attempts.PageLifecycle.Hierarchy
@enforce_keys [
:resource_attempt,
:attempt_hierarchy
]
defstruct [
:resource_attempt,
:attempt_hierarchy
]
@type t() :: %__MODULE__{
resource_attempt: any(),
attempt_hierarchy: any()
}
@doc """
The required attempt state for page rendering differs between basic and adaptive pages.
A basic page needs the "full attempt hierarchy", that is, the resource attempt, and then a
map of activity ids on that page to tuples of activity attempt and a part attempt mapping. For
example:
```
%{
232 => {%ActivityAttempt{}, %{ "1" => %PartAttempt{}, "2" => %PartAttempt{}}},
233 => {%ActivityAttempt{}, %{ "1" => %PartAttempt{}, "2" => %PartAttempt{}}}
}
```
The adaptive page requires less information, which is also arranged in a different format. It
uses simply a mapping of activity resource ids to a small set of data including the
attempt guid and the name of the delivery element to use rendering. That looks like:any()
```
%{
232 => %{
id: 232,
attemptGuid: 2398298233,
deliveryElement: "oli-adaptive-delivery"
},
233 => %{
id: 233,
attemptGuid: 223923892389,
deliveryElement: "oli-adaptive-delivery"
}
}
```
"""
def fetch_attempt_state(%ResourceAttempt{} = resource_attempt, %Revision{
content: %{"advancedDelivery" => true}
}) do
{:ok,
%__MODULE__{
resource_attempt: resource_attempt,
attempt_hierarchy: Hierarchy.thin_hierarchy(resource_attempt)
}}
end
def fetch_attempt_state(%ResourceAttempt{} = resource_attempt, _) do
{:ok,
%__MODULE__{
resource_attempt: resource_attempt,
attempt_hierarchy: Hierarchy.full_hierarchy(resource_attempt)
}}
end
end
|
lib/oli/delivery/attempts/page_lifecycle/attempt_state.ex
| 0.812793
| 0.625181
|
attempt_state.ex
|
starcoder
|
defmodule WechatPay.JSAPI do
@moduledoc """
The **JSAPI** payment method.
[Official document](https://pay.weixin.qq.com/wiki/doc/api/jsapi.php?chapter=7_1)
## Example
Set up a client:
```elixir
{:ok, client} = WechatPay.Client.new(
app_id: "the-app_id",
mch_id: "the-mch-id",
api_key: "the-api_key",
ssl: [
ca_cert: File.read!("fixture/certs/rootca.pem"),
cert: File.read!("fixture/certs/apiclient_cert.pem"),
key: File.read!("fixture/certs/apiclient_key.pem")
]
)
```
Place an order:
```elixir
WechatPay.JSAPI.place_order(client, %{
body: "Plan 1",
out_trade_no: "12345",
fee_type: "CNY",
total_fee: "600",
spbill_create_ip: Void.Utils.get_system_ip(),
notify_url: "http://example.com/",
trade_type: "JSAPI",
product_id: "12345"
})
```
"""
alias WechatPay.Utils.NonceStr
alias WechatPay.Utils.Signature
alias WechatPay.Client
alias WechatPay.API
@doc """
Place an order
[Official document](https://pay.weixin.qq.com/wiki/doc/api/jsapi.php?chapter=9_1)
"""
@spec place_order(Client.t(), map, keyword) ::
{:ok, map} | {:error, WechatPay.Error.t() | HTTPoison.Error.t()}
defdelegate place_order(client, attrs, options \\ []), to: API
@doc """
Query the order
[Official document](https://pay.weixin.qq.com/wiki/doc/api/jsapi.php?chapter=9_2)
"""
@spec query_order(Client.t(), map, keyword) ::
{:ok, map} | {:error, WechatPay.Error.t() | HTTPoison.Error.t()}
defdelegate query_order(client, attrs, options \\ []), to: API
@doc """
Close the order
[Official document](https://pay.weixin.qq.com/wiki/doc/api/jsapi.php?chapter=9_3)
"""
@spec close_order(Client.t(), map, keyword) ::
{:ok, map} | {:error, WechatPay.Error.t() | HTTPoison.Error.t()}
defdelegate close_order(client, attrs, options \\ []), to: API
@doc """
Request to refund
[Official document](https://pay.weixin.qq.com/wiki/doc/api/jsapi.php?chapter=9_4)
"""
@spec refund(Client.t(), map, keyword) ::
{:ok, map} | {:error, WechatPay.Error.t() | HTTPoison.Error.t()}
defdelegate refund(client, attrs, options \\ []), to: API
@doc """
Query the refund
[Official document](https://pay.weixin.qq.com/wiki/doc/api/jsapi.php?chapter=9_5)
"""
@spec query_refund(Client.t(), map, keyword) ::
{:ok, map} | {:error, WechatPay.Error.t() | HTTPoison.Error.t()}
defdelegate query_refund(client, attrs, options \\ []), to: API
@doc """
Download bill
[Official document](https://pay.weixin.qq.com/wiki/doc/api/jsapi.php?chapter=9_6)
"""
@spec download_bill(Client.t(), map, keyword) ::
{:ok, String.t()} | {:error, HTTPoison.Error.t()}
defdelegate download_bill(client, attrs, options \\ []), to: API
@doc """
Download fund flow
[Official document](https://pay.weixin.qq.com/wiki/doc/api/jsapi.php?chapter=9_18&index=7)
"""
@spec download_fund_flow(Client.t(), map, keyword) ::
{:ok, String.t()} | {:error, HTTPoison.Error.t()}
defdelegate download_fund_flow(client, attrs, options \\ []), to: API
@doc """
Report
[Official document](https://pay.weixin.qq.com/wiki/doc/api/jsapi.php?chapter=9_8)
"""
@spec report(Client.t(), map, keyword) ::
{:ok, map} | {:error, WechatPay.Error.t() | HTTPoison.Error.t()}
defdelegate report(client, attrs, options \\ []), to: API
@doc """
Query comments in a batch
[Official document](https://pay.weixin.qq.com/wiki/doc/api/native.php?chapter=9_17&index=12)
"""
@spec batch_query_comments(Client.t(), map, keyword) ::
{:ok, String.t()} | {:error, HTTPoison.Error.t()}
defdelegate batch_query_comments(client, attrs, options \\ []), to: API
@doc """
Generate pay request info, which is required for the JavaScript API
[Official document](https://pay.weixin.qq.com/wiki/doc/api/jsapi.php?chapter=7_7&index=6)
"""
@spec generate_pay_request(Client.t(), String.t()) :: map
def generate_pay_request(client, prepay_id) do
data = %{
"appId" => client.app_id,
"timeStamp" => Integer.to_string(:os.system_time(:seconds)),
"nonceStr" => NonceStr.generate(),
"package" => "prepay_id=#{prepay_id}",
"signType" => client.sign_type
}
data
|> Map.merge(%{"paySign" => Signature.sign(data, client.api_key, client.sign_type)})
end
end
|
lib/wechat_pay/payment_methods/jsapi.ex
| 0.790813
| 0.580798
|
jsapi.ex
|
starcoder
|
defmodule Plymio.Codi.Pattern.Struct do
@moduledoc ~S"""
The *struct* patterns create a range of transform functions for a module's struct.
See `Plymio.Codi` for an overview and documentation terms.
## Set and Unset Fields
These patterns use *the unset value*
(see `Plymio.Fontais.Guard.the_unset_value/0`) to decide whether a
field has a useful value. Fields can be unset by default by using
*the unset value* in the `Kernel.defstruct/1` e.g.
defstruct [x: Plymio.Fontais.Guard.the_unset_value()]
For example, a function built using the `struct_fetch` pattern will return
`{error, error}` if the target field's value is unset. Similary,
for `struct_get`, `{:ok, default}` will be returned if the field's
value is unset.
## Errors
The code generated by most *struct* patterns checks the first
argument is an instance of the target module's *struct* and returns
`{:error, error}` if not.
## Test Environment
The doctests use a helper (`codi_helper_struct_compile_module/1`) to
compile the generated function(s) in a dynamically created module,
returning `{:ok, {forms, test_mod}}`.
The `forms` are the generated code and another helper
(`Harnais.Helper.harnais_helper_format_forms!/2`) is used to
"textify" the code using the Elixir code formatter.
The `test_mod` is the dynamically created module and is used to call the generated function(s).
The default `Kernel.defstruct/1` for the doctests is shown here. Note the `:z` field is unset.
defstruct [x: 42, y: nil, z: Plymio.Fontais.Guard.the_unset_value()]
The first example for each pattern just shows the generated code with
subsequent examples performing the actual test(s).
## Common Codi Pattern Opts (cpo) Keys
As well as the top level common options, these keys are valid in any *struct* pattern's *cpo*:
| Key | Aliases |
| :--- | :--- |
| `:fun_name` | *:name, :function_name* |
| `:fun_field` | *:field, ::function_field, :key, :fun_key, :function_key* |
| `:fun_args` | *:args, :function_args* |
| `:fun_arity` | *:arity, :function_arity* |
| `:fun_doc` | *:doc, :function_doc* |
| `:typesepc_spec_args` | *:spec_args* |
| `:typespec_spec_result` |*:spec_result, :result, :fun_result, :function_result* |
| `:since` | |
All *struct* patterns support the generation of `@doc`, `@spec` and
`@since` module attribute forms.
All *struct* patterns generate a `@doc` by default. (It can be disabled in
the usual way by specifying `doc: false` in the *cpo*.)
> In the doctests below the `@doc` forms are mostly disabled (i.e. `doc: nil`) for clarity.
## Pattern: *struct_get*
This pattern is a convenience to generate both *struct_get1* and *struct_get2* patterns.
The *cpo* must be configured for a *struct_get2* and will be "reduced" to suite a *struct_get1*.
## Pattern: *struct_get1*
The *struct_get1* pattern creates a function to get the value of a
struct's field and if the value is unset return a fixed default.
## Examples
This example generate a `get/1` function for the `:z` field. Note
the `:field` in the *cpo* is a `Keyword` with the fixed default for
the `:z` field (`"z get default"`)
iex> {:ok, {forms, _test_mod}} = [
...> struct_get1: [args: :t, name: :get_z, field: [z: "z get default"], doc: nil],
...> ] |> codi_helper_struct_compile_module
...> forms |> harnais_helper_format_forms!
["def(get_z(t))",
"",
"def(get_z(%__MODULE__{z: field_value})) do",
" field_value",
" |> case do",
" x when Plymio.Fontais.Guard.is_value_set(x) ->",
" {:ok, x}",
"",
" _ ->",
" {:ok, \"z get default\"}",
" end",
"end",
"",
"def(get_z(state)) do",
" Plymio.Codi.Error.new_error_result(m: \"struct invalid\", v: state)",
"end"]
iez> {:ok, {_forms, test_mod}} = [
...> struct_get1: [args: :t, name: :get_z, field: [z: "z get default"], doc: nil],
...> ] |> codi_helper_struct_compile_module
...> # z is unset by default so the get/1 default will be returned.
...> struct(test_mod) |> test_mod.get_z
{:ok, "z get default"}
## Pattern: *struct_get2*
The *struct_get2* pattern creates a function to get the value of a
struct's field and, if the value is unset, return the second argument.
## Examples
This example generate a `get/2` function for the `:z` field.
iex> {:ok, {forms, _test_mod}} = [
...> struct_get2: [args: [:t, :the_default_for_z], name: :get_z, field: :z, doc: nil],
...> ] |> codi_helper_struct_compile_module
...> forms |> harnais_helper_format_forms!
["def(get_z(t, the_default_for_z))",
"",
"def(get_z(%__MODULE__{z: field_value}, default)) do",
" field_value",
" |> case do",
" x when Plymio.Fontais.Guard.is_value_set(x) ->",
" {:ok, x}",
"",
" _ ->",
" {:ok, default}",
" end",
"end",
"",
"def(get_z(state, _default)) do",
" Plymio.Codi.Error.new_error_result(m: \"struct invalid\", v: state)",
"end"]
iez> {:ok, {_forms, test_mod}} = [
...> struct_get2: [args: [:t, :the_default_for_z], name: :get_z, field: :z, doc: nil],
...> ] |> codi_helper_struct_compile_module
...> # z is unset by default so the get/2 2nd argument will be returned.
...> struct(test_mod) |> test_mod.get_z("an explicit default")
{:ok, "an explicit default"}
## Pattern: *struct_fetch*
The *struct_fetch* pattern creates a function to fetch the value of a struct's field.
## Examples
This example generate a `fetch/1` function for the `:x` field.
iex> {:ok, {forms, _test_mod}} = [
...> struct_fetch: [args: :t, name: :fetch_x, field: :x, doc: nil],
...> ] |> codi_helper_struct_compile_module
...> forms |> harnais_helper_format_forms!
["def(fetch_x(t))",
"",
"def(fetch_x(%__MODULE__{x: field_value} = state)) do",
" field_value",
" |> case do",
" x when Plymio.Fontais.Guard.is_value_set(x) ->",
" {:ok, x}",
"",
" _ ->",
" Plymio.Codi.Error.new_error_result(m: \"struct field \#{:x} unset\", v: state)",
" end",
"end",
"",
"def(fetch_x(state)) do",
" Plymio.Codi.Error.new_error_result(m: \"struct invalid\", v: state)",
"end"]
iex> {:ok, {_forms, test_mod}} = [
...> struct_fetch: [args: :t, name: :fetch_x, field: :x, doc: nil],
...> ] |> codi_helper_struct_compile_module
...> {:ok, 42} = struct(test_mod) |> test_mod.fetch_x
...> # setting x to the unset value causes the fetch to fail
...> {:error, error} = struct(test_mod, x: Plymio.Fontais.Guard.the_unset_value) |> test_mod.fetch_x
...> true = error |> Exception.message |> String.starts_with?("struct field x unset")
...> # the argument must be an instance of the module's struct
...> {:error, error} = :not_a_struct |> test_mod.fetch_x
...> error |> Exception.message
"struct invalid, got: :not_a_struct"
## Pattern: *struct_put*
The *struct_put* pattern creates a function to put a value for a struct's field.
## Examples
This example generates a `put/2` function for the `:x` field.
iex> {:ok, {forms, _test_mod}} = [
...> struct_put: [args: [:t, :value], name: :put_x, field: :x, doc: nil],
...> ] |> codi_helper_struct_compile_module
...> forms |> harnais_helper_format_forms!
["def(put_x(t, value))",
"",
"def(put_x(%__MODULE__{x: _} = state, value)) do",
" {:ok, state |> struct!(x: value)}",
"end",
"",
"def(put_x(state, _value)) do",
" Plymio.Codi.Error.new_error_result(m: \"struct invalid\", v: state)",
"end"]
iex> {:ok, {_forms, test_mod}} = [
...> struct_put: [args: [:t, :value], name: :put_x, field: :x, doc: nil],
...> struct_fetch: [args: :t, name: :fetch_x, field: :x, doc: nil],
...> ] |> codi_helper_struct_compile_module
...> # set the :x field's value to 123
...> {:ok, %test_mod{} = t1} = struct(test_mod) |> test_mod.put_x(123)
...> # use `fetch_x/1` to check
...> {:ok, 123} = t1 |> test_mod.fetch_x
...> # the argument must be an instance of the module's struct
...> {:error, error} = :not_a_struct |> test_mod.put_x(123)
...> error |> Exception.message
"struct invalid, got: :not_a_struct"
## Pattern: *struct_maybe_put*
The *struct_maybe_put* pattern create a function to put a value for
a struct's field *only* if the field's current value is unset.
## Examples
This code shows a `maybe_put/2` function for the `:z` field:
iex> {:ok, {forms, _test_mod}} = [
...> struct_maybe_put: [args: [:t, :value], name: :maybe_put_z, field: :z, doc: nil],
...> ] |> codi_helper_struct_compile_module
...> forms |> harnais_helper_format_forms!
["def(maybe_put_z(t, value))",
"",
"def(",
" maybe_put_z(%__MODULE__{z: field_value} = state, _value)",
" when Plymio.Fontais.Guard.is_value_set(field_value)",
") do",
" {:ok, state}",
"end",
"",
"def(",
" maybe_put_z(%__MODULE__{z: field_value} = state, value)",
" when Plymio.Fontais.Guard.is_value_unset(field_value)",
") do",
" value",
" |> Plymio.Fontais.Guard.is_value_unset()",
" |> case do",
" true ->",
" {:ok, state}",
"",
" _ ->",
" {:ok, state |> struct!(z: value)}",
" end",
"end",
"",
"def(maybe_put_z(state, _value)) do",
" Plymio.Codi.Error.new_error_result(m: \"struct invalid\", v: state)",
"end"]
Here `maybe_put/2` and `fetch/1` functions are generated for two fields: the
`:x` field has `42` for its default, whereas the `:z` field is
unset.
iex> {:ok, {_forms, test_mod}} = [
...> struct_maybe_put: [args: [:t, :value], name: :maybe_put_x, field: :x, doc: nil],
...> struct_fetch: [args: :t, name: :fetch_x, field: :x, doc: nil],
...> struct_maybe_put: [args: [:t, :value], name: :maybe_put_z, field: :z, doc: nil],
...> struct_fetch: [args: :t, name: :fetch_z, field: :z, doc: nil],
...> ] |> codi_helper_struct_compile_module
...> # by default the `:z` field is unset so `fetch_z/1` will fail
...> t1 = struct(test_mod)
...> {:error, _error} = t1 |> test_mod.fetch_z
...> # maybe_put/2 will update the field since it is unset
...> {:ok, %test_mod{} = t1} = t1 |> test_mod.maybe_put_z(123)
...> # use `fetch_z/1` to check
...> {:ok, 123} = t1 |> test_mod.fetch_z
...> # field `:x` has a default of `42` so maybe_put/2 will not update the field
...> {:ok, %test_mod{} = t1} = t1 |> test_mod.maybe_put_x("will be ignored")
...> # the `:x` field will still be `42`
...> t1 |> test_mod.fetch_x
{:ok, 42}
## Pattern: *struct_has?*
The *struct_has?* pattern creates a function that returns `true` if
the field's value is set, otherwise `false`.
> Note: if the first argument is not a valid struct, `false` is returned..
## Examples
This code shows `has?/1` function for the `:z` field:
iex> {:ok, {forms, _test_mod}} = [
...> struct_has?: [args: :t, name: :has_z?, field: :z, doc: nil],
...> ] |> codi_helper_struct_compile_module
...> forms |> harnais_helper_format_forms!
["def(has_z?(t))",
"",
"def(has_z?(%__MODULE__{z: field_value}) when Plymio.Fontais.Guard.is_value_set(field_value)) do",
" true",
"end",
"",
"def(has_z?(_state)) do",
" false",
"end"]
Here `has?/1` functions are generated for two fields: the
`:x` field has `42` for its default, whereas the `:z` field is
unset.
iex> {:ok, {_forms, test_mod}} = [
...> struct_has?: [args: :t, name: :has_x?, field: :x, doc: nil],
...> struct_has?: [args: :t, name: :has_z?, field: :z, doc: nil],
...> ] |> codi_helper_struct_compile_module
...> t1 = struct(test_mod)
...> false = t1 |> test_mod.has_z?
...> t1 |> test_mod.has_x?
true
## Pattern: *struct_update*
The *struct_update* pattern creates a function to call the modules's `update/2` function.
The module's `update/2` function is a standard `Plymio` module state
function that works like a validated put. Apart from showing the generated code, it is not
documented or tested further here.
## Examples
This example generates an `update/2` function for the `:x` field.
iex> {:ok, {forms, _codi}} = [
...> struct_update: [args: [:t, :value], name: :update_x, field: :x, doc: nil],
...> ] |> CODI.produce_codi
...> forms |> harnais_helper_format_forms!
["def(update_x(t, value))",
"",
"def(update_x(%__MODULE__{x: _} = state, value)) do",
" {:ok, state |> update(x: value)}",
"end",
"",
"def(update_x(state, _value)) do",
" Plymio.Codi.Error.new_error_result(m: \"struct invalid\", v: state)",
"end"]
## Pattern: *struct_set*
The *struct_set* pattern is a simple but versatile pattern for
creating a function that sets one or more fields in the *struct* to
specific values, defaulting to *the unset value*.
## Examples
This example generates an `set/2` function to set the `:x` field to value `123`.
Note the `:field` in the *cpo* is a `Keyword`.
iex> {:ok, {forms, _codi}} = [
...> struct_set: [args: :t, name: :set_x, field: [x: 123], doc: nil],
...> ] |> codi_helper_struct_compile_module
...> forms |> harnais_helper_format_forms!
["def(set_x(t))",
"",
"def(set_x(%__MODULE__{x: _} = state)) do",
" {:ok, state |> struct!(x: 123)}",
"end",
"",
"def(set_x(state)) do",
" Plymio.Codi.Error.new_error_result(m: \"struct invalid\", v: state)",
"end"]
This example create a `set/1` function that sets the `:x` and `:z` fields.
iex> {:ok, {_forms, test_mod}} = [
...> struct_set: [args: :t, name: :set_xz, doc: nil,
...> field: [x: 123, z: "z is no longer unset"]],
...> struct_fetch: [args: :t, name: :fetch_x, field: :x, doc: nil],
...> struct_fetch: [args: :t, name: :fetch_y, field: :y, doc: nil],
...> struct_fetch: [args: :t, name: :fetch_z, field: :z, doc: nil],
...> ] |> codi_helper_struct_compile_module
...> # set the :x and :z fields
...> {:ok, %test_mod{} = t1} = struct(test_mod) |> test_mod.set_xz
...> # use fetch to check
...> {:ok, 123} = t1 |> test_mod.fetch_x
...> {:ok, nil} = t1 |> test_mod.fetch_y
...> t1 |> test_mod.fetch_z
{:ok, "z is no longer unset"}
This example create a `set/1` function that sets the `:x` and `:z`
fields to specific values, but unsets the `:y` field. Note the
`:field` in the *cpo* is not a `Keyword`: The value for `:y` is not given and
defaults to be the unset value.
iex> {:ok, {_forms, test_mod}} = [
...> struct_set: [args: :t, name: :set_xyz, doc: nil,
...> field: [{:x, 123}, :y, {:z, "z is no longer unset"}]],
...> struct_fetch: [args: :t, name: :fetch_x, field: :x, doc: nil],
...> struct_fetch: [args: :t, name: :fetch_y, field: :y, doc: nil],
...> struct_fetch: [args: :t, name: :fetch_z, field: :z, doc: nil],
...> ] |> codi_helper_struct_compile_module
...> # set all 3 fields
...> {:ok, %test_mod{} = t1} = struct(test_mod) |> test_mod.set_xyz
...> {:ok, 123} = t1 |> test_mod.fetch_x
...> {:ok, "z is no longer unset"} = t1 |> test_mod.fetch_z
...> # :y is now unset
...> {:error, error} = t1 |> test_mod.fetch_y
...> error |> Exception.message |> String.starts_with?("struct field y unset")
true
## Pattern: *struct_export*
The *struct_export* pattern creates
a function that exports one or more fields in the *struct* to
an *opts* (`Keyword`).
The export (*opts*) is sparse: only keys that are set are included.
Default values can be provided; if the value of the key in the *struct* is unset, the default is used.
## Examples
This example generates an `export/1` function for all three fields
in the test *struct*. Note, since no default export values were given
in the `:field` in the *cpo*, the defaults are the unset value and
the field will only appear in the export if the *struct* value is set.
iex> {:ok, {forms, _codi}} = [
...> struct_export: [args: :t, name: :export_all, field: [:x, :y, :z], doc: nil],
...> ] |> codi_helper_struct_compile_module
...> forms |> harnais_helper_format_forms!
["def(export_all(t))", "",
"def(export_all(%__MODULE__{x: field_value, y: field_value1, z: field_value2})) do",
" tuples =",
" [",
" x: :plymio_fontais_t3h1e4_u9n8s7e2t7_v1a8l3u8e,",
" y: :plymio_fontais_t3h1e4_u9n8s7e2t7_v1a8l3u8e,",
" z: :plymio_fontais_t3h1e4_u9n8s7e2t7_v1a8l3u8e",
" ] ++ [x: field_value, y: field_value1, z: field_value2]",
"",
" export =",
" tuples",
" |> Keyword.keys()",
" |> Stream.uniq()",
" |> Stream.map(fn k ->",
" tuples",
" |> Keyword.get_values(k)",
" |> Enum.filter(fn v -> v |> Plymio.Fontais.Guard.is_value_set() end)",
" |> case do",
" [] ->",
" {k, @plymio_fontais_the_unset_value}",
"",
" values ->",
" {k, values |> List.last()}",
" end",
" end)",
" |> Stream.filter(fn {_k, v} -> v |> Plymio.Fontais.Guard.is_value_set() end)",
" |> Keyword.new()",
"",
" {:ok, export}",
"end",
"",
"def(export_all(state)) do",
" Plymio.Codi.Error.new_error_result(m: \"struct invalid\", v: state)",
"end"]
This example creates an `export/1` function that exports the `:x` and `:z`
fields. The `:z` field is unset by default so will not appear in the export unless set explicitly.
iex> {:ok, {_forms, test_mod}} = [
...> struct_export: [args: :t, name: :export_xz, doc: nil,
...> field: [:x, :z]],
...> struct_put: [args: [:t, :value], name: :put_z, field: :z, doc: nil],
...> ] |> codi_helper_struct_compile_module
...> # the :z field is by default unset and will not be in the export
...> {:ok, [x: 42]} = struct(test_mod) |> test_mod.export_xz
...> # set z and export
...> {:ok, %test_mod{} = t1} = struct(test_mod) |> test_mod.put_z("z is now set")
...> t1 |> test_mod.export_xz
{:ok, [x: 42, z: "z is now set"]}
Another example but providing default values for each key in the export by
supplying a `Keyword` for `:field` in the *cpo*:.
iex> {:ok, {_forms, test_mod}} = [
...> struct_export: [args: :t, name: :export_xz, doc: nil,
...> field: [x: :x_default, z: :z_default]],
...> struct_put: [args: [:t, :value], name: :put_z, field: :z, doc: nil],
...> ] |> codi_helper_struct_compile_module
...> # the :z field has a default export value
...> struct(test_mod) |> test_mod.export_xz
{:ok, [x: 42, z: :z_default]}
"""
alias Plymio.Codi, as: CODI
alias Plymio.Codi.Utility, as: CODIUTIL
alias Plymio.Codi.Utility.Depend, as: DEPEND
use Plymio.Fontais.Attribute
use Plymio.Codi.Attribute
import Plymio.Codi.Error,
only: [
new_error_result: 1
]
import Plymio.Fontais.Option,
only: [
opts_take_canonical_keys: 2,
opts_create_aliases_dict: 1
]
import Plymio.Codi.Utility,
only: [
cpo_resolve_fun_name: 1,
cpo_resolve_guard_fun_args: 2,
cpo_resolve_guard_field_match: 1,
cpo_resolve_guard_field_match: 2,
cpo_resolve_typespec_spec_args: 1
]
import Plymio.Codi.CPO
@pattern_struct_get_kvs_alias [
@plymio_codi_key_alias_pattern,
@plymio_codi_key_alias_status,
@plymio_codi_key_alias_form,
@plymio_codi_key_alias_since,
# limited aliases
{@plymio_codi_key_typespec_spec_args, [:spec_args]},
@plymio_codi_key_alias_typespec_spec_result,
@plymio_codi_key_alias_fun_name,
@plymio_codi_key_alias_fun_doc,
@plymio_codi_key_alias_fun_args,
@plymio_codi_key_alias_fun_arity,
@plymio_codi_key_alias_fun_key,
@plymio_codi_key_alias_fun_default,
@plymio_codi_key_alias_forms_edit
]
@pattern_struct_get_dict_alias @pattern_struct_get_kvs_alias
|> opts_create_aliases_dict
@doc false
def cpo_pattern_struct_get_normalise(opts, dict \\ nil) do
opts |> opts_take_canonical_keys(dict || @pattern_struct_get_dict_alias)
end
@pattern_struct_fetch_kvs_alias [
@plymio_codi_key_alias_pattern,
@plymio_codi_key_alias_status,
@plymio_codi_key_alias_form,
@plymio_codi_key_alias_since,
# limited aliases
{@plymio_codi_key_typespec_spec_args, [:spec_args]},
@plymio_codi_key_alias_typespec_spec_result,
@plymio_codi_key_alias_fun_name,
@plymio_codi_key_alias_fun_doc,
@plymio_codi_key_alias_fun_args,
@plymio_codi_key_alias_fun_arity,
@plymio_codi_key_alias_fun_key,
@plymio_codi_key_alias_forms_edit
]
@pattern_struct_fetch_dict_alias @pattern_struct_fetch_kvs_alias
|> opts_create_aliases_dict
@doc false
def cpo_pattern_struct_fetch_normalise(opts, dict \\ nil) do
opts |> opts_take_canonical_keys(dict || @pattern_struct_fetch_dict_alias)
end
@pattern_struct_put_kvs_alias [
@plymio_codi_key_alias_pattern,
@plymio_codi_key_alias_status,
@plymio_codi_key_alias_form,
@plymio_codi_key_alias_since,
# limited aliases
{@plymio_codi_key_typespec_spec_args, [:spec_args]},
@plymio_codi_key_alias_typespec_spec_result,
@plymio_codi_key_alias_fun_name,
@plymio_codi_key_alias_fun_doc,
@plymio_codi_key_alias_fun_args,
@plymio_codi_key_alias_fun_arity,
@plymio_codi_key_alias_fun_key,
@plymio_codi_key_alias_forms_edit
]
@pattern_struct_put_dict_alias @pattern_struct_put_kvs_alias
|> opts_create_aliases_dict
@doc false
def cpo_pattern_struct_put_normalise(opts, dict \\ nil) do
opts |> opts_take_canonical_keys(dict || @pattern_struct_put_dict_alias)
end
@pattern_struct_maybe_put_kvs_alias [
@plymio_codi_key_alias_pattern,
@plymio_codi_key_alias_status,
@plymio_codi_key_alias_form,
@plymio_codi_key_alias_since,
# limited aliases
{@plymio_codi_key_typespec_spec_args, [:spec_args]},
@plymio_codi_key_alias_typespec_spec_result,
@plymio_codi_key_alias_fun_name,
@plymio_codi_key_alias_fun_doc,
@plymio_codi_key_alias_fun_args,
@plymio_codi_key_alias_fun_arity,
@plymio_codi_key_alias_fun_key,
@plymio_codi_key_alias_forms_edit
]
@pattern_struct_maybe_put_dict_alias @pattern_struct_maybe_put_kvs_alias
|> opts_create_aliases_dict
@doc false
def cpo_pattern_struct_maybe_put_normalise(opts, dict \\ nil) do
opts |> opts_take_canonical_keys(dict || @pattern_struct_maybe_put_dict_alias)
end
@pattern_struct_set_kvs_alias [
@plymio_codi_key_alias_pattern,
@plymio_codi_key_alias_status,
@plymio_codi_key_alias_form,
@plymio_codi_key_alias_since,
# limited aliases
{@plymio_codi_key_typespec_spec_args, [:spec_args]},
@plymio_codi_key_alias_typespec_spec_result,
@plymio_codi_key_alias_fun_name,
@plymio_codi_key_alias_fun_doc,
@plymio_codi_key_alias_fun_args,
@plymio_codi_key_alias_fun_arity,
@plymio_codi_key_alias_fun_key,
@plymio_codi_key_alias_fun_default,
@plymio_codi_key_alias_forms_edit
]
@pattern_struct_set_dict_alias @pattern_struct_set_kvs_alias
|> opts_create_aliases_dict
@doc false
def cpo_pattern_struct_set_normalise(opts, dict \\ nil) do
opts |> opts_take_canonical_keys(dict || @pattern_struct_set_dict_alias)
end
@pattern_struct_has_kvs_alias [
@plymio_codi_key_alias_pattern,
@plymio_codi_key_alias_status,
@plymio_codi_key_alias_form,
@plymio_codi_key_alias_since,
# limited aliases
{@plymio_codi_key_typespec_spec_args, [:spec_args]},
@plymio_codi_key_alias_typespec_spec_result,
@plymio_codi_key_alias_fun_name,
@plymio_codi_key_alias_fun_doc,
@plymio_codi_key_alias_fun_args,
@plymio_codi_key_alias_fun_arity,
@plymio_codi_key_alias_fun_key,
@plymio_codi_key_alias_fun_default,
@plymio_codi_key_alias_forms_edit
]
@pattern_struct_has_dict_alias @pattern_struct_has_kvs_alias
|> opts_create_aliases_dict
@doc false
def cpo_pattern_struct_has_normalise(opts, dict \\ nil) do
opts |> opts_take_canonical_keys(dict || @pattern_struct_has_dict_alias)
end
@pattern_struct_update_kvs_alias [
@plymio_codi_key_alias_pattern,
@plymio_codi_key_alias_status,
@plymio_codi_key_alias_form,
@plymio_codi_key_alias_since,
# limited aliases
{@plymio_codi_key_typespec_spec_args, [:spec_args]},
@plymio_codi_key_alias_typespec_spec_result,
@plymio_codi_key_alias_fun_name,
@plymio_codi_key_alias_fun_doc,
@plymio_codi_key_alias_fun_args,
@plymio_codi_key_alias_fun_arity,
@plymio_codi_key_alias_fun_key,
@plymio_codi_key_alias_forms_edit
]
@pattern_struct_update_dict_alias @pattern_struct_update_kvs_alias
|> opts_create_aliases_dict
@doc false
def cpo_pattern_struct_update_normalise(opts, dict \\ nil) do
opts |> opts_take_canonical_keys(dict || @pattern_struct_update_dict_alias)
end
@pattern_struct_export_kvs_alias [
@plymio_codi_key_alias_pattern,
@plymio_codi_key_alias_status,
@plymio_codi_key_alias_form,
@plymio_codi_key_alias_since,
# limited aliases
{@plymio_codi_key_typespec_spec_args, [:spec_args]},
@plymio_codi_key_alias_typespec_spec_result,
@plymio_codi_key_alias_fun_name,
@plymio_codi_key_alias_fun_doc,
@plymio_codi_key_alias_fun_args,
@plymio_codi_key_alias_fun_arity,
@plymio_codi_key_alias_fun_key,
@plymio_codi_key_alias_forms_edit
]
@pattern_struct_export_dict_alias @pattern_struct_export_kvs_alias
|> opts_create_aliases_dict
@doc false
def cpo_pattern_struct_export_normalise(opts, dict \\ nil) do
opts |> opts_take_canonical_keys(dict || @pattern_struct_export_dict_alias)
end
@doc false
def express_pattern(codi, pattern, opts)
# expectation if the cpo is initially configured for get2
def express_pattern(%CODI{} = state, pattern, cpo)
when pattern == @plymio_codi_pattern_struct_get do
with {:ok, cpo} <- cpo |> cpo_pattern_struct_get_normalise,
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_arity(2),
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_args([:struct, :default]),
{:ok, ctrl} <- [] |> ctrl_put_fun_arity_value(2),
{:ok, get2_fun_args} <- cpo |> cpo_resolve_guard_fun_args(ctrl),
# typespec if any
{:ok, cpo} <-
cpo
|> cpo_maybe_transform(&cpo_has_typespec_spec_opts?/1, [
{:cpo_maybe_put_typespec_spec_result, :result},
{:cpo_maybe_put_typespec_spec_args, [[:struct, :any]]}
]),
{:ok, cpo_get1} <- cpo |> cpo_put_pattern(@plymio_codi_pattern_struct_get1),
{:ok, cpo_get1} <- cpo_get1 |> cpo_put_fun_args(get2_fun_args |> Enum.take(1)),
{:ok, cpo_get1} <- cpo_get1 |> cpo_put_fun_arity(1),
{:ok, cpo_get1} <-
cpo_get1
|> cpo_maybe_transform(
&cpo_has_typespec_spec_opts?/1,
fn cpo ->
with {:ok, spec_args} <- cpo_get1 |> cpo_resolve_typespec_spec_args,
{:ok, cpo} <- cpo |> cpo_put_typespec_spec_args(spec_args |> Enum.take(1)),
true <- true do
{:ok, cpo}
else
{:error, %{__exception__: true}} = result -> result
end
end
),
{:ok, cpo_get2} <- cpo |> cpo_put_pattern(@plymio_codi_pattern_struct_get2),
{:ok, cpo_get2} <- cpo_get2 |> cpo_put_fun_args(get2_fun_args),
true <- true do
{:ok, {[cpo_get1, cpo_get2], state}}
else
{:error, %{__exception__: true}} = result -> result
end
end
def express_pattern(%CODI{} = state, pattern, cpo)
when pattern == @plymio_codi_pattern_struct_get1 do
with {:ok, cpo} <- cpo |> cpo_pattern_struct_get_normalise,
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_arity(1),
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_args(:struct),
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_doc(pattern),
{:ok, ctrl} <- [] |> ctrl_put_fun_arity_value(1),
{:ok, ctrl} <- ctrl |> ctrl_put_fun_key_length(1),
{:ok, {{field_name, field_value_var}, field_vars, field_tuples, field_match}} <-
cpo
|> cpo_resolve_guard_field_match(ctrl),
{:ok, cpo} <-
cpo
|> cpo_maybe_put_fun_name("struct_#{to_string(field_name)}_get" |> String.to_atom()),
{:ok, fun_name} <- cpo |> cpo_resolve_fun_name,
{:ok, fun_args} <- cpo |> cpo_resolve_guard_fun_args(ctrl),
# save resolved for e.g. doc pattern
{:ok, cpo} <- cpo |> cpo_put_fun_key(field_tuples),
{:ok, cpo} <- cpo |> cpo_put_fun_args(fun_args),
true <- true do
fun_default = field_tuples |> hd |> elem(1)
pattern_form =
quote do
def unquote(fun_name)(unquote_splicing(fun_args))
def unquote(fun_name)(unquote(field_match)) do
unquote(field_value_var)
|> case do
x when Plymio.Fontais.Guard.is_value_set(x) ->
{:ok, x}
_ ->
{:ok, unquote(fun_default)}
end
end
def unquote(fun_name)(state) do
Plymio.Codi.Error.new_error_result(m: "struct invalid", v: state)
end
end
depend_args = [
{&cpo_has_fun_doc?/1, &DEPEND.cpo_transform_doc_depend/1},
{&cpo_has_since?/1, &DEPEND.cpo_transform_since_depend/1},
{&cpo_has_typespec_spec_opts?/1,
[
&DEPEND.cpo_transform_typespec_spec_depend/1,
{:cpo_maybe_put_typespec_spec_result, :result},
{:cpo_maybe_put_typespec_spec_args, :struct}
]}
]
with {:ok, cpo} <- cpo |> cpo_done_with_edited_form(pattern_form),
{:ok, {depend_cpos, %CODI{}}} <- state |> DEPEND.create_depend_cpos(cpo, depend_args) do
cpos = depend_cpos ++ [cpo]
{:ok, {cpos, state}}
else
{:error, %{__exception__: true}} = result -> result
end
else
{:error, %{__exception__: true}} = result -> result
end
end
def express_pattern(%CODI{} = state, pattern, cpo)
when pattern == @plymio_codi_pattern_struct_get2 do
with {:ok, cpo} <- cpo |> cpo_pattern_struct_get_normalise,
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_arity(2),
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_args([:struct, :default]),
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_doc(pattern),
{:ok, ctrl} <- [] |> ctrl_put_fun_arity_value(2),
{:ok, ctrl} <- ctrl |> ctrl_put_fun_key_length(1),
{:ok, {{field_name, field_value_var}, field_vars, field_tuples, field_match}} <-
cpo
|> cpo_resolve_guard_field_match(ctrl),
{:ok, cpo} <-
cpo
|> cpo_maybe_put_fun_name("struct_#{to_string(field_name)}_get" |> String.to_atom()),
{:ok, fun_name} <- cpo |> cpo_resolve_fun_name,
{:ok, fun_args} <- cpo |> cpo_resolve_guard_fun_args(ctrl),
# save resolved for e.g. doc pattern
{:ok, cpo} <- cpo |> cpo_put_fun_key(field_tuples),
{:ok, cpo} <- cpo |> cpo_put_fun_args(fun_args),
true <- true do
pattern_form =
quote do
def unquote(fun_name)(unquote_splicing(fun_args))
def unquote(fun_name)(unquote(field_match), default) do
unquote(field_value_var)
|> case do
x when Plymio.Fontais.Guard.is_value_set(x) ->
{:ok, x}
_ ->
{:ok, default}
end
end
def unquote(fun_name)(state, _default) do
Plymio.Codi.Error.new_error_result(m: "struct invalid", v: state)
end
end
depend_args = [
{&cpo_has_fun_doc?/1, &DEPEND.cpo_transform_doc_depend/1},
{&cpo_has_since?/1, &DEPEND.cpo_transform_since_depend/1},
{&cpo_has_typespec_spec_opts?/1,
[
&DEPEND.cpo_transform_typespec_spec_depend/1,
{:cpo_maybe_put_typespec_spec_result, :result},
{:cpo_maybe_put_typespec_spec_args, [[:struct, :any]]}
]}
]
with {:ok, cpo} <- cpo |> cpo_done_with_edited_form(pattern_form),
{:ok, {depend_cpos, %CODI{}}} <- state |> DEPEND.create_depend_cpos(cpo, depend_args) do
cpos = depend_cpos ++ [cpo]
{:ok, {cpos, state}}
else
{:error, %{__exception__: true}} = result -> result
end
else
{:error, %{__exception__: true}} = result -> result
end
end
def express_pattern(%CODI{} = state, pattern, cpo)
when pattern == @plymio_codi_pattern_struct_fetch do
with {:ok, cpo} <- cpo |> cpo_pattern_struct_fetch_normalise,
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_arity(1),
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_args(:struct),
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_doc(pattern),
{:ok, ctrl} <- [] |> ctrl_put_fun_arity_value(1),
{:ok, ctrl} <- ctrl |> ctrl_put_fun_key_length(1),
{:ok, {{field_name, field_value_var}, field_vars, field_tuples, field_match}} <-
cpo
|> cpo_resolve_guard_field_match(ctrl),
{:ok, cpo} <-
cpo
|> cpo_maybe_put_fun_name("struct_#{to_string(field_name)}_fetch" |> String.to_atom()),
{:ok, fun_name} <- cpo |> cpo_resolve_fun_name,
{:ok, fun_args} <- cpo |> cpo_resolve_guard_fun_args(ctrl),
# save resolved for e.g. doc pattern
{:ok, cpo} <- cpo |> cpo_put_fun_key(field_tuples),
{:ok, cpo} <- cpo |> cpo_put_fun_args(fun_args),
true <- true do
pattern_form =
quote do
def unquote(fun_name)(unquote_splicing(fun_args))
def unquote(fun_name)(unquote(field_match) = state) do
unquote(field_value_var)
|> case do
x when Plymio.Fontais.Guard.is_value_set(x) ->
{:ok, x}
_ ->
Plymio.Codi.Error.new_error_result(
m: "struct field #{unquote(field_name)} unset",
v: state
)
end
end
def unquote(fun_name)(state) do
Plymio.Codi.Error.new_error_result(m: "struct invalid", v: state)
end
end
depend_args = [
{&cpo_has_fun_doc?/1, &DEPEND.cpo_transform_doc_depend/1},
{&cpo_has_since?/1, &DEPEND.cpo_transform_since_depend/1},
{&cpo_has_typespec_spec_opts?/1,
[
&DEPEND.cpo_transform_typespec_spec_depend/1,
{:cpo_maybe_put_typespec_spec_result, :result},
{:cpo_maybe_put_typespec_spec_args, :struct}
]}
]
with {:ok, cpo} <- cpo |> cpo_done_with_edited_form(pattern_form),
{:ok, {depend_cpos, %CODI{}}} <- state |> DEPEND.create_depend_cpos(cpo, depend_args) do
cpos = depend_cpos ++ [cpo]
{:ok, {cpos, state}}
else
{:error, %{__exception__: true}} = result -> result
end
else
{:error, %{__exception__: true}} = result -> result
end
end
def express_pattern(%CODI{} = state, pattern, cpo)
when pattern == @plymio_codi_pattern_struct_put do
with {:ok, cpo} <- cpo |> cpo_pattern_struct_put_normalise,
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_arity(2),
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_args([:struct, :value]),
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_doc(pattern),
{:ok, ctrl} <- [] |> ctrl_put_fun_arity_value(2),
{:ok, ctrl} <- ctrl |> ctrl_put_fun_key_length(1),
{:ok, ctrl} <- ctrl |> ctrl_put_fun_build_value(&CODIUTIL.field_build_anon_var/1),
{:ok, {{field_name, field_value_var}, field_vars, field_tuples, field_match}} <-
cpo
|> cpo_resolve_guard_field_match(ctrl),
{:ok, cpo} <-
cpo
|> cpo_maybe_put_fun_name("struct_#{to_string(field_name)}_put" |> String.to_atom()),
{:ok, fun_name} <- cpo |> cpo_resolve_fun_name,
{:ok, fun_args} <- cpo |> cpo_resolve_guard_fun_args(ctrl),
# save resolved for e.g. doc pattern
{:ok, cpo} <- cpo |> cpo_put_fun_key(field_tuples),
{:ok, cpo} <- cpo |> cpo_put_fun_args(fun_args),
true <- true do
pattern_form =
quote do
def unquote(fun_name)(unquote_splicing(fun_args))
def unquote(fun_name)(unquote(field_match) = state, value) do
{:ok, state |> struct!([{unquote(field_name), value}])}
end
def unquote(fun_name)(state, _value) do
Plymio.Codi.Error.new_error_result(m: "struct invalid", v: state)
end
end
depend_args = [
{&cpo_has_fun_doc?/1, &DEPEND.cpo_transform_doc_depend/1},
{&cpo_has_since?/1, &DEPEND.cpo_transform_since_depend/1},
{&cpo_has_typespec_spec_opts?/1,
[
&DEPEND.cpo_transform_typespec_spec_depend/1,
{:cpo_maybe_put_typespec_spec_result, :struct_result},
{:cpo_maybe_put_typespec_spec_args, [[:struct, :any]]}
]}
]
with {:ok, cpo} <- cpo |> cpo_done_with_edited_form(pattern_form),
{:ok, {depend_cpos, %CODI{}}} <- state |> DEPEND.create_depend_cpos(cpo, depend_args) do
cpos = depend_cpos ++ [cpo]
{:ok, {cpos, state}}
else
{:error, %{__exception__: true}} = result -> result
end
else
{:error, %{__exception__: true}} = result -> result
end
end
def express_pattern(%CODI{} = state, pattern, cpo)
when pattern == @plymio_codi_pattern_struct_maybe_put do
with {:ok, cpo} <- cpo |> cpo_pattern_struct_maybe_put_normalise,
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_arity(2),
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_args([:struct, :value]),
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_doc(pattern),
{:ok, ctrl} <- [] |> ctrl_put_fun_arity_value(2),
{:ok, ctrl} <- ctrl |> ctrl_put_fun_key_length(1),
{:ok, {{field_name, field_value_var}, field_vars, field_tuples, field_match}} <-
cpo
|> cpo_resolve_guard_field_match,
{:ok, cpo} <-
cpo
|> cpo_maybe_put_fun_name(
"struct_#{to_string(field_name)}_maybe_put"
|> String.to_atom()
),
{:ok, fun_name} <- cpo |> cpo_resolve_fun_name,
{:ok, fun_args} <- cpo |> cpo_resolve_guard_fun_args(ctrl),
# save resolved for e.g. doc pattern
{:ok, cpo} <- cpo |> cpo_put_fun_key(field_tuples),
{:ok, cpo} <- cpo |> cpo_put_fun_args(fun_args),
true <- true do
pattern_form =
quote do
def unquote(fun_name)(unquote_splicing(fun_args))
def unquote(fun_name)(unquote(field_match) = state, _value)
when Plymio.Fontais.Guard.is_value_set(unquote(field_value_var)) do
{:ok, state}
end
def unquote(fun_name)(unquote(field_match) = state, value)
when Plymio.Fontais.Guard.is_value_unset(unquote(field_value_var)) do
value
|> Plymio.Fontais.Guard.is_value_unset()
|> case do
true ->
{:ok, state}
_ ->
{:ok, state |> struct!([{unquote(field_name), value}])}
end
end
def unquote(fun_name)(state, _value) do
Plymio.Codi.Error.new_error_result(m: "struct invalid", v: state)
end
end
depend_args = [
{&cpo_has_fun_doc?/1, &DEPEND.cpo_transform_doc_depend/1},
{&cpo_has_since?/1, &DEPEND.cpo_transform_since_depend/1},
{&cpo_has_typespec_spec_opts?/1,
[
&DEPEND.cpo_transform_typespec_spec_depend/1,
{:cpo_maybe_put_typespec_spec_result, :struct_result},
{:cpo_maybe_put_typespec_spec_args, [[:struct, :any]]}
]}
]
with {:ok, cpo} <- cpo |> cpo_done_with_edited_form(pattern_form),
{:ok, {depend_cpos, %CODI{}}} <- state |> DEPEND.create_depend_cpos(cpo, depend_args) do
cpos = depend_cpos ++ [cpo]
{:ok, {cpos, state}}
else
{:error, %{__exception__: true}} = result -> result
end
else
{:error, %{__exception__: true}} = result -> result
end
end
def express_pattern(%CODI{} = state, pattern, cpo)
when pattern == @plymio_codi_pattern_struct_has? do
with {:ok, cpo} <- cpo |> cpo_pattern_struct_has_normalise,
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_arity(1),
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_args(:struct),
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_doc(pattern),
{:ok, ctrl} <- [] |> ctrl_put_fun_arity_value(1),
{:ok, ctrl} <- ctrl |> ctrl_put_fun_key_length(1),
{:ok, {{field_name, field_value_var}, field_vars, field_tuples, field_match}} <-
cpo
|> cpo_resolve_guard_field_match,
{:ok, cpo} <-
cpo
|> cpo_maybe_put_fun_name("struct_#{to_string(field_name)}_has?" |> String.to_atom()),
{:ok, fun_name} <- cpo |> cpo_resolve_fun_name,
{:ok, fun_args} <- cpo |> cpo_resolve_guard_fun_args(ctrl),
# save resolved for e.g. doc pattern
{:ok, cpo} <- cpo |> cpo_put_fun_key(field_tuples),
{:ok, cpo} <- cpo |> cpo_put_fun_args(fun_args),
true <- true do
pattern_form =
quote do
def unquote(fun_name)(unquote_splicing(fun_args))
def unquote(fun_name)(unquote(field_match))
when Plymio.Fontais.Guard.is_value_set(unquote(field_value_var)) do
true
end
def unquote(fun_name)(_state) do
false
end
end
depend_args = [
{&cpo_has_fun_doc?/1, &DEPEND.cpo_transform_doc_depend/1},
{&cpo_has_since?/1, &DEPEND.cpo_transform_since_depend/1},
{&cpo_has_typespec_spec_opts?/1,
[
&DEPEND.cpo_transform_typespec_spec_depend/1,
{:cpo_put_typespec_spec_result, :boolean},
{:cpo_maybe_put_typespec_spec_args, :struct}
]}
]
with {:ok, cpo} <- cpo |> cpo_done_with_edited_form(pattern_form),
{:ok, {depend_cpos, %CODI{}}} <- state |> DEPEND.create_depend_cpos(cpo, depend_args) do
cpos = depend_cpos ++ [cpo]
{:ok, {cpos, state}}
else
{:error, %{__exception__: true}} = result -> result
end
else
{:error, %{__exception__: true}} = result -> result
end
end
def express_pattern(%CODI{} = state, pattern, cpo)
when pattern == @plymio_codi_pattern_struct_update do
with {:ok, cpo} <- cpo |> cpo_pattern_struct_update_normalise,
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_arity(2),
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_args([:struct, :value]),
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_doc(pattern),
{:ok, ctrl} <- [] |> ctrl_put_fun_arity_value(2),
{:ok, ctrl} <- ctrl |> ctrl_put_fun_key_length(1),
{:ok, ctrl} <- ctrl |> ctrl_put_fun_build_value(&CODIUTIL.field_build_anon_var/1),
{:ok, {{field_name, field_value_var}, field_vars, field_tuples, field_match}} <-
cpo
|> cpo_resolve_guard_field_match(ctrl),
{:ok, cpo} <-
cpo
|> cpo_maybe_put_fun_name("struct_#{to_string(field_name)}_update" |> String.to_atom()),
{:ok, fun_name} <- cpo |> cpo_resolve_fun_name,
{:ok, fun_args} <- cpo |> cpo_resolve_guard_fun_args(ctrl),
# save resolved for e.g. doc pattern
{:ok, cpo} <- cpo |> cpo_put_fun_key(field_tuples),
{:ok, cpo} <- cpo |> cpo_put_fun_args(fun_args),
true <- true do
pattern_form =
quote do
def unquote(fun_name)(unquote_splicing(fun_args))
def unquote(fun_name)(unquote(field_match) = state, value) do
{:ok, state |> update([{unquote(field_name), value}])}
end
def unquote(fun_name)(state, _value) do
Plymio.Codi.Error.new_error_result(m: "struct invalid", v: state)
end
end
depend_args = [
{&cpo_has_fun_doc?/1, &DEPEND.cpo_transform_doc_depend/1},
{&cpo_has_since?/1, &DEPEND.cpo_transform_since_depend/1},
{&cpo_has_typespec_spec_opts?/1,
[
&DEPEND.cpo_transform_typespec_spec_depend/1,
{:cpo_maybe_put_typespec_spec_result, :struct_result},
{:cpo_maybe_put_typespec_spec_args, [[:struct, :keyword]]}
]}
]
with {:ok, cpo} <- cpo |> cpo_done_with_edited_form(pattern_form),
{:ok, {depend_cpos, %CODI{}}} <- state |> DEPEND.create_depend_cpos(cpo, depend_args) do
cpos = depend_cpos ++ [cpo]
{:ok, {cpos, state}}
else
{:error, %{__exception__: true}} = result -> result
end
else
{:error, %{__exception__: true}} = result -> result
end
end
def express_pattern(%CODI{} = state, pattern, cpo)
when pattern == @plymio_codi_pattern_struct_set do
with {:ok, cpo} <- cpo |> cpo_pattern_struct_set_normalise,
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_arity(1),
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_args([:struct]),
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_doc(pattern),
{:ok, ctrl} <- [] |> ctrl_put_fun_arity_value(1),
{:ok, ctrl} <- ctrl |> ctrl_put_fun_build_value(&CODIUTIL.field_build_anon_var/1),
{:ok, {{field_name, field_value_var}, field_vars, field_tuples, field_match}} <-
cpo
|> cpo_resolve_guard_field_match(ctrl),
{:ok, cpo} <-
cpo
|> cpo_maybe_put_fun_name("struct_#{to_string(field_name)}_set" |> String.to_atom()),
{:ok, fun_name} <- cpo |> cpo_resolve_fun_name,
{:ok, fun_args} <- cpo |> cpo_resolve_guard_fun_args(ctrl),
# save resolved for e.g. doc pattern
{:ok, cpo} <- cpo |> cpo_put_fun_key(field_tuples),
{:ok, cpo} <- cpo |> cpo_put_fun_args(fun_args),
true <- true do
pattern_form =
quote do
def unquote(fun_name)(unquote_splicing(fun_args))
def unquote(fun_name)(unquote(field_match) = state) do
{:ok, state |> struct!(unquote(field_tuples))}
end
def unquote(fun_name)(state) do
Plymio.Codi.Error.new_error_result(m: "struct invalid", v: state)
end
end
depend_args = [
{&cpo_has_fun_doc?/1, &DEPEND.cpo_transform_doc_depend/1},
{&cpo_has_since?/1, &DEPEND.cpo_transform_since_depend/1},
{&cpo_has_typespec_spec_opts?/1,
[
&DEPEND.cpo_transform_typespec_spec_depend/1,
{:cpo_maybe_put_typespec_spec_result, :struct_result},
{:cpo_maybe_put_typespec_spec_args, :struct}
]}
]
with {:ok, cpo} <- cpo |> cpo_done_with_edited_form(pattern_form),
{:ok, {depend_cpos, %CODI{}}} <- state |> DEPEND.create_depend_cpos(cpo, depend_args) do
cpos = depend_cpos ++ [cpo]
{:ok, {cpos, state}}
else
{:error, %{__exception__: true}} = result -> result
end
else
{:error, %{__exception__: true}} = result -> result
end
end
def express_pattern(%CODI{} = state, pattern, cpo)
when pattern == @plymio_codi_pattern_struct_export do
with {:ok, cpo} <- cpo |> cpo_pattern_struct_export_normalise,
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_arity(1),
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_args(:struct),
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_doc(pattern),
{:ok, cpo} <- cpo |> cpo_maybe_put_fun_name("struct_export" |> String.to_atom()),
{:ok, ctrl} <- [] |> ctrl_put_fun_arity_value(1),
{:ok, {_, field_vars, field_tuples, field_match}} <-
cpo |> cpo_resolve_guard_field_match(ctrl),
# saves resolved fields for e.g. doc pattern
{:ok, cpo} <- cpo |> cpo_put_fun_key(field_tuples),
{:ok, fun_name} <- cpo |> cpo_resolve_fun_name,
{:ok, fun_args} <- cpo |> cpo_resolve_guard_fun_args(ctrl) do
pattern_form =
quote do
def unquote(fun_name)(unquote_splicing(fun_args))
def unquote(fun_name)(unquote(field_match)) do
# must maintain order
tuples = unquote(field_tuples) ++ unquote(field_vars)
export =
tuples
|> Keyword.keys()
|> Stream.uniq()
|> Stream.map(fn k ->
tuples
|> Keyword.get_values(k)
# dop unset values
|> Enum.filter(fn v -> v |> Plymio.Fontais.Guard.is_value_set() end)
|> case do
# no set values => mark to drop in next stage
[] ->
{k, @plymio_fontais_the_unset_value}
# want last value
values ->
{k, values |> List.last()}
end
end)
# make the export sparse i.e. no unset values
|> Stream.filter(fn {_k, v} -> v |> Plymio.Fontais.Guard.is_value_set() end)
# take last tuple for each key
|> Keyword.new()
{:ok, export}
end
def unquote(fun_name)(state) do
Plymio.Codi.Error.new_error_result(m: "struct invalid", v: state)
end
end
:ok = pattern_form |> Macro.validate()
## pattern_form |> Code.eval_quoted([], __ENV__)
depend_args = [
{&cpo_has_fun_doc?/1, &DEPEND.cpo_transform_doc_depend/1},
{&cpo_has_since?/1, &DEPEND.cpo_transform_since_depend/1},
{&cpo_has_typespec_spec_opts?/1,
[
&DEPEND.cpo_transform_typespec_spec_depend/1,
{:cpo_put_typespec_spec_result, :opts_result},
{:cpo_maybe_put_typespec_spec_args, :struct}
]}
]
with {:ok, cpo} <- cpo |> cpo_done_with_edited_form(pattern_form),
{:ok, {depend_cpos, %CODI{}}} <- state |> DEPEND.create_depend_cpos(cpo, depend_args) do
cpos = depend_cpos ++ [cpo]
{:ok, {cpos, state}}
else
{:error, %{__exception__: true}} = result -> result
end
else
{:error, %{__exception__: true}} = result -> result
end
end
def express_pattern(_codi, pattern, opts) do
new_error_result(m: "proxy pattern #{inspect(pattern)} invalid", v: opts)
end
end
|
lib/codi/pattern/struct/struct.ex
| 0.893968
| 0.739446
|
struct.ex
|
starcoder
|
defmodule EllipticCurve.Utils.Integer do
@moduledoc false
use Bitwise
def modulo(x, n) do
rem(x, n)
|> correctNegativeModulo(n)
end
defp correctNegativeModulo(r, n) when r < 0 do
r + n
end
defp correctNegativeModulo(r, _n) do
r
end
def ipow(base, p, acc \\ 1)
def ipow(base, p, acc) when p > 0 do
ipow(base, p - 1, base * acc)
end
def ipow(_base, _p, acc) do
acc
end
def between(minimum, maximum) when minimum < maximum do
range = maximum - minimum + 1
{bytesNeeded, mask} = calculateParameters(range)
# We apply the mask to reduce the amount of attempts we might need
# to make to get a number that is in range. This is somewhat like
# the commonly used 'modulo trick', but without the bias:
#
# "Let's say you invoke secure_rand(0, 60). When the other code
# generates a random integer, you might get 243. If you take
# (243 & 63)-- noting that the mask is 63-- you get 51. Since
# 51 is less than 60, we can return this without bias. If we
# got 255, then 255 & 63 is 63. 63 > 60, so we try again.
#
# The purpose of the mask is to reduce the number of random
# numbers discarded for the sake of ensuring an unbiased
# distribution. In the example above, 243 would discard, but
# (243 & 63) is in the range of 0 and 60."
#
# (Source: <NAME>)
randomNumber =
:crypto.strong_rand_bytes(bytesNeeded)
|> :binary.bin_to_list()
|> bytesToNumber &&& mask
if randomNumber < range do
minimum + randomNumber
else
# Outside of the acceptable range, throw it away and try again.
# We don't try any modulo tricks, as this would introduce bias.
between(minimum, maximum)
end
end
defp bytesToNumber(randomBytes, randomNumber \\ 0, i \\ 0)
defp bytesToNumber([randomByte | otherRandomBytes], randomNumber, i) do
bytesToNumber(
otherRandomBytes,
randomNumber ||| randomByte <<< (8 * i),
i + 1
)
end
defp bytesToNumber([], randomNumber, _i) do
randomNumber
end
defp calculateParameters(range) do
calculateParameters(range, 1, 0)
end
defp calculateParameters(range, mask, bitsNeeded) when range > 0 do
calculateParameters(
range >>> 1,
mask <<< 1 ||| 1,
bitsNeeded + 1
)
end
defp calculateParameters(_range, mask, bitsNeeded) do
{div(bitsNeeded, 8) + 1, mask}
end
end
|
lib/utils/integer.ex
| 0.824356
| 0.544378
|
integer.ex
|
starcoder
|
defmodule Nostrum.Struct.Component do
@moduledoc """
Components are a framework for adding interactive elements to the messages your app or bot sends. They're accessible, customizable, and easy to use. There are several different types of components; this documentation will outline the basics of this new framework and each example.
> Components have been broken out into individual modules for easy distinction between them and to separate helper functions and individual type checking between component types - especially as more components are added by Discord.
Each of the components are provided all of the valid types through this module to avoid repetition and allow new components to be added quicker and easier.
## Action Row
An Action Row is a non-interactive container component for other types of components. It has a `type: 1` and a sub-array of `components` of other types.
- You can have up to 5 Action Rows per message
- An Action Row cannot contain another Action Row
- An Action Row containing buttons cannot also contain a select menu
## Buttons
Buttons are interactive components that render on messages. They have a `type: 2`, They can be clicked by users. Buttons in Nostrum are further separated into two types, detailed below. Only the [Interaction Button](#module-interaction-buttons-non-link-buttons) will fire a `Nostrum.Struct.Interaction` when pressed.

- Buttons must exist inside an Action Row
- An Action Row can contain up to 5 buttons
- An Action Row containing buttons cannot also contain a select menu
For more information check out the [Discord API Button Styles](https://discord.com/developers/docs/interactions/message-components#button-object-button-styles) for more information.
## Link Buttons
- Link buttons **do not** send an `interaction` to your app when clicked
- Link buttons **must** have a `url`, and **cannot** have a `custom_id`
- Link buttons will **always** use `style: 5`
#### Link `style: 5`

## Interaction Buttons ( Non-link Buttons )
> Discord calls these buttons "Non-link Buttons" due to the fact that they do not contain a url. However it would be more accurate to call them an "Interaction Button" as they **do** fire an interaction when clicked which is far more useful for your applications interactivity. As such they are referred to as "Interaction Button" throughout the rest of this module.
- Interaction buttons **must** have a `custom_id`, and **cannot** have a `url`
- Can have one of the below `:style` applied.
#### Primary `style: 1`

#### Secondary `style: 2`

#### Success `style: 3`

#### Danger `style: 4`

## 🐼 ~~Emoji Buttons~~
> Note: The discord documentation and marketing material in relation to buttons indicates that there are three kinds of buttons: 🐼 **Emoji Buttons**, **Link Buttons** & **Non-Link Buttons**. When in fact all buttons can contain an emoji. Because of this reason 🐼 **Emoji Buttons** are not included as a separate type. Emojis will be instead handled by the two included ( superior ) button types.

> The field requirements are already becoming convoluted especially considering everything so far is all still a "Component". Using the sub types and helper functions will ensure all of the rules are followed when creating components.
## Select Menu
Select menus are another interactive component that renders on messages. On desktop, clicking on a select menu opens a dropdown-style UI; on mobile, tapping a select menu opens up a half-sheet with the options.

Select menus support single-select and multi-select behavior, meaning you can prompt a user to choose just one item from a list, or multiple. When a user finishes making their choice by clicking out of the dropdown or closing the half-sheet, your app will receive an interaction.
- Select menus **must** be sent inside an Action Row
- An Action Row can contain **only one** select menu
- An Action Row containing a select menu **cannot** also contain buttons
## Text Input
Text inputs are an interactive component that render on modals. They can be used to collect short-form or long-form text.
- Text inputs **must** be sent inside an Action Row
- An Action Row can contain **only one** text input
- An Action Row containing a text input **cannot** also contain buttons or a select menu
Can be used to collect short-form or long-form text.
- For short-form text, use `style: 1`
- For long-form text, use `style: 2`
Text inputs are only allowed to be sent as part of an Interaction response that opens a MODAL.
"""
@moduledoc since: "0.5.0"
defmacro __using__(_opts) do
quote do
alias Nostrum.Struct.Component.{ActionRow, Button, Option, SelectMenu, TextInput}
alias Nostrum.Struct.{Component, Emoji}
alias Nostrum.Util
@before_compile Component
end
end
defmacro __before_compile__(_env) do
quote do
alias Nostrum.Struct.Component
defp new(opts \\ []) do
@defaults
|> to_component(opts)
end
defp update(%Component{} = component, opts \\ []) do
component
|> Map.from_struct()
|> to_component(opts)
end
defp to_component(component_map, opts) do
opts
|> Stream.reject(fn {_, v} -> v == nil end)
|> Enum.into(component_map)
|> Stream.filter(fn {k, _} -> k in allowed_keys() end)
|> Enum.into(%{})
|> flatten()
|> Component.to_struct()
end
defp allowed_keys, do: Map.keys(@defaults)
## Destroy all structs and ensure nested map
def flatten(map), do: :maps.map(&do_flatten/2, map)
defp do_flatten(_key, value), do: enm(value)
defp enm(list) when is_list(list), do: Enum.map(list, &enm/1)
defp enm(%{__struct__: _} = component),
do: :maps.map(&do_flatten/2, Map.from_struct(component))
defp enm(data), do: data
end
end
@doc """
Create a component from the given keyword list of options
> Note: While using this function directly, you are not guaranteed to produce a valid component and it is the responsibility of the user to ensure they are passing a valid combination of component attributes. eg. if you pass a button component both a `custom_id`, and a `url`, the component is invalid as only one of these fields is allowed.
"""
@callback new(opts :: [keyword()]) :: t()
@doc """
Updates a component with the parameters provided.
> Note: While using this function directly, you are not guaranteed to produce a valid component and it is the responsibility of the user to ensure they are passing a valid combination of component attributes. eg. if you pass a button component both a `custom_id`, and a `url`, the component is invalid as only one of these fields is allowed.
"""
@callback update(t(), opts :: [keyword()]) :: t()
alias Nostrum.Struct.Component.{ActionRow, Button, Option, SelectMenu, TextInput}
alias Nostrum.Struct.Emoji
alias Nostrum.Util
@derive Jason.Encoder
defstruct [
:type,
:custom_id,
:disabled,
:style,
:label,
:emoji,
:url,
:options,
:placeholder,
:min_values,
:max_values,
:min_length,
:max_length,
:required,
:value,
:components
]
@typedoc """
The currently valid component types.
"""
@type t :: ActionRow.t() | Button.t() | SelectMenu.t() | TextInput.t()
@typedoc """
The type of component.
Valid for All Types.
| | Component Types |
|------|-----|
| `1` | Action Row |
| `2` | Button |
| `3` | SelectMenu |
| `4` | TextInput |
Check out the [Discord API Message Component Types](https://discord.com/developers/docs/interactions/message-components#component-object-component-types) for more information.
"""
@type type :: integer()
@typedoc """
Used to identify the command when the interraction is sent to you from the user.
Valid for [Interaction Buttons](#module-interaction-button), [Select Menus](#module-select-menu), and [Text Input](#module-text-input).
"""
@type custom_id :: String.t() | nil
@typedoc """
Indicates if the component is disabled or not.
Valid for [Buttons](#module-buttons) & [Select Menus](#module-select-menu).
"""
@type disabled :: boolean() | nil
@typedoc """
Indicates the style.
Valid for Valid for [Interaction Buttons](#module-interaction-button) and [Text Input](#module-text-input).
"""
@type style :: integer() | nil
@typedoc """
A string that appears on the button, max 80 characters.
Valid for [Buttons](#module-buttons)
"""
@type label :: String.t() | nil
@typedoc """
A partial emoji to display on the object.
Valid for [Buttons](#module-buttons)
"""
@type emoji :: %{id: Emoji.id(), name: Emoji.name(), animated: Emoji.animated()} | nil
@typedoc """
A url for link buttons.
Valid for: [Buttons](#module-buttons)
"""
@type url :: String.t() | nil
@typedoc """
A list of options for select menus, max 25.
Valid for [Select Menus](#module-select-menu).
"""
@type options :: [Option.t()] | nil
@typedoc """
Placeholder text if nothing is selected, max 100 characters
Valid for [Select Menus](#module-select-menu) and [Text Input](#module-text-input).
"""
@type placeholder :: String.t() | nil
@typedoc """
The minimum number of permitted selections. Minimum value 0, max 25.
Valid for [Select Menus](#module-select-menu).
"""
@type min_values :: integer() | nil
@typedoc """
The maximum number of permitted selections. Minimum value 0, max 25.
Valid for [Select Menus](#module-select-menu).
"""
@type max_values :: integer() | nil
@typedoc """
The minimum length of the text input. Minimum value 0, max 4000.
Valid for [Text Input](#module-text-input).
"""
@typedoc since: "0.5.1"
@type min_length :: integer() | nil
@typedoc """
The maximum length of the text input. Minimum value 1, max 4000.
Valid for [Text Input](#module-text-input).
"""
@typedoc since: "0.5.1"
@type max_length :: integer() | nil
@typedoc """
Indicates if the text input is required.
Valid for [Text Input](#module-text-input).
"""
@typedoc since: "0.5.1"
@type required :: boolean() | nil
@typedoc """
A pre-filled value for the text input, max 4000 characters.
Valid for [Text Input](#module-text-input).
"""
@typedoc since: "0.5.1"
@type value :: String.t() | nil
@typedoc """
A list of components to place inside an action row.
Due to constraints of action rows, this can either be a list of up to five buttons, a single select menu, or a single text input.
Valid for [Action Row](#module-action-row).
"""
@type components :: [SelectMenu.t() | Button.t() | nil]
@spec to_struct(map()) :: struct
def to_struct(%{} = map) do
new =
map
|> Map.new(fn {k, v} -> {Util.maybe_to_atom(k), v} end)
|> Map.update(:options, nil, &Util.cast(&1, {:list, {:struct, Option}}))
|> Map.update(:components, nil, &Util.cast(&1, {:list, {:struct, __MODULE__}}))
%__MODULE__{}
|> Kernel.struct(new)
end
end
|
lib/nostrum/struct/component.ex
| 0.896518
| 0.76344
|
component.ex
|
starcoder
|
defmodule Membrane.FLV do
@moduledoc """
Format utilities and internal struct definitions for Membrane FLV Plugin
"""
@typedoc """
List of audio codecs supported by the FLV format.
"""
@type audio_codec_t() ::
:pcm
| :adpcm
| :MP3
| :pcmle
| :nellymoser_16k_mono
| :nellymoser_8k_mono
| :nellymoser
| :g711_a_law
| :g711_mu_law
| :AAC
| :Speex
| :MP3_8k
| :device_specific
@typedoc """
List of video codecs supported by the FLV format.
"""
@type video_codec_t() ::
:sorenson_h263 | :screen_video | :vp6 | :vp6_with_alpha | :screen_video_2 | :H264
@spec index_to_sound_format(non_neg_integer()) :: audio_codec_t()
def index_to_sound_format(0), do: :pcm
def index_to_sound_format(1), do: :adpcm
def index_to_sound_format(2), do: :MP3
def index_to_sound_format(3), do: :pcmle
def index_to_sound_format(4), do: :nellymoser_16k_mono
def index_to_sound_format(5), do: :nellymoser_8k_mono
def index_to_sound_format(6), do: :nellymoser
def index_to_sound_format(7), do: :g711_a_law
def index_to_sound_format(8), do: :g711_mu_law
def index_to_sound_format(10), do: :AAC
def index_to_sound_format(11), do: :Speex
def index_to_sound_format(14), do: :MP3_8k
def index_to_sound_format(15), do: :device_specific
def index_to_sound_format(index), do: raise("Unknown audio index #{inspect(index)}")
@spec sound_format_to_index(audio_codec_t()) :: non_neg_integer()
def sound_format_to_index(:pcm), do: 0
def sound_format_to_index(:adpcm), do: 1
def sound_format_to_index(:MP3), do: 2
def sound_format_to_index(:pcmle), do: 3
def sound_format_to_index(:nellymoser_16k_mono), do: 4
def sound_format_to_index(:nellymoser_8k_mono), do: 5
def sound_format_to_index(:nellymoser), do: 6
def sound_format_to_index(:g711_a_law), do: 7
def sound_format_to_index(:g711_mu_law), do: 8
def sound_format_to_index(:AAC), do: 10
def sound_format_to_index(:Speex), do: 11
def sound_format_to_index(:MP3_8k), do: 14
def sound_format_to_index(:device_specific), do: 15
def sound_format_to_index(sound_format),
do: raise("Unknown sound format #{inspect(sound_format)}")
@spec index_to_video_codec(non_neg_integer()) :: video_codec_t()
def index_to_video_codec(2), do: :sorenson_h263
def index_to_video_codec(3), do: :screen_video
def index_to_video_codec(4), do: :vp6
def index_to_video_codec(5), do: :vp6_with_alpha
def index_to_video_codec(6), do: :screen_video_2
def index_to_video_codec(7), do: :H264
def index_to_video_codec(index), do: raise("Unknown video index #{inspect(index)}")
@spec video_codec_to_index(video_codec_t()) :: non_neg_integer()
def video_codec_to_index(:sorenson_h263), do: 2
def video_codec_to_index(:screen_video), do: 3
def video_codec_to_index(:vp6), do: 4
def video_codec_to_index(:vp6_with_alpha), do: 5
def video_codec_to_index(:screen_video_2), do: 6
def video_codec_to_index(:H264), do: 7
def video_codec_to_index(video_codec), do: raise("Unknown video codec #{inspect(video_codec)}")
defmodule Header do
@moduledoc false
@enforce_keys [:audio_present?, :video_present?]
defstruct @enforce_keys
@type t() :: %__MODULE__{
audio_present?: boolean(),
video_present?: boolean()
}
end
defmodule Packet do
@moduledoc false
@enforce_keys [
:pts,
:dts,
:stream_id,
:type,
:payload,
:codec
]
defstruct @enforce_keys ++
[
codec_params: %{},
frame_type: :interframe
]
@type t() :: %__MODULE__{
pts: timestamp_t(),
dts: timestamp_t() | nil,
stream_id: stream_id_t(),
type: type_t(),
payload: binary(),
codec: Membrane.FLV.audio_codec_t() | Membrane.FLV.video_codec_t(),
codec_params: video_params_t() | audio_params_t(),
frame_type: frame_type_t()
}
defguard is_audio(packet) when packet.type in [:audio, :audio_config]
defguard is_video(packet) when packet.type in [:video, :video_config]
@type type_t() :: :audio | :video | :audio_config | :video_config
@type stream_id_t() :: non_neg_integer()
@type timestamp_t() :: non_neg_integer()
@type audio_params_t() :: %{sound_rate: non_neg_integer(), sound_format: :mono | :stereo}
@type video_params_t() :: %{composition_time: non_neg_integer()}
@type frame_type_t() :: :keyframe | :interframe
end
end
|
lib/membrane_flv_plugin/format.ex
| 0.789558
| 0.473536
|
format.ex
|
starcoder
|
defmodule NervesHub.Client do
@moduledoc """
A behaviour module for customizing if and when firmware updates get applied.
By default NervesHub applies updates as soon as it knows about them from the
NervesHub server and doesn't give warning before rebooting. This let's
devices hook into the decision making process and monitor the update's
progress.
# Example
```elixir
defmodule MyApp.NervesHubClient do
@behaviour NervesHub.Client
# May return:
# * `:apply` - apply the action immediately
# * `:ignore` - don't apply the action, don't ask again.
# * `{:reschedule, timeout_in_milliseconds}` - call this function again later.
@impl NervesHub.Client
def update_available(data) do
if SomeInternalAPI.is_now_a_good_time_to_update?(data) do
:apply
else
{:reschedule, 60_000}
end
end
end
```
To have NervesHub invoke it, add the following to your `config.exs`:
```elixir
config :nerves_hub, client: MyApp.NervesHubClient
```
"""
require Logger
@typedoc "Update that comes over a socket."
@type update_data :: map()
@typedoc "Supported responses from `update_available/1`"
@type update_response :: :apply | :ignore | {:reschedule, pos_integer()}
@typedoc "Firmware update progress, completion or error report"
@type fwup_message ::
{:ok, non_neg_integer(), String.t()}
| {:warning, non_neg_integer(), String.t()}
| {:error, non_neg_integer(), String.t()}
| {:progress, 0..100}
@doc """
Called to find out what to do when a firmware update is available.
May return one of:
* `apply` - Download and apply the update right now.
* `ignore` - Don't download and apply this update.
* `{:reschedule, timeout}` - Defer making a decision. Call this function again in `timeout` milliseconds.
"""
@callback update_available(update_data()) :: update_response()
@doc """
Called on firmware update reports.
The return value of this function is not checked.
"""
@callback handle_fwup_message(fwup_message()) :: :ok
@doc """
Called when downloading a firmware update fails.
The return value of this function is not checked.
"""
@callback handle_error(any()) :: :ok
@doc """
This function is called internally by NervesHub to notify clients.
"""
@spec update_available(module(), update_data()) :: update_response()
def update_available(client, data) do
case apply_wrap(client, :update_available, [data]) do
:apply ->
:apply
:ignore ->
:ignore
{:reschedule, timeout} when timeout > 0 ->
{:reschedule, timeout}
wrong ->
Logger.error(
"[NervesHub] Client: #{client}.update_available/1 bad return value: #{inspect(wrong)} Applying update."
)
:apply
end
end
@doc """
This function is called internally by NervesHub to notify clients of fwup progress.
"""
@spec handle_fwup_message(module(), fwup_message()) :: :ok
def handle_fwup_message(client, data) do
_ = apply_wrap(client, :handle_fwup_message, [data])
:ok
end
@doc """
This function is called internally by NervesHub to notify clients of fwup errors.
"""
@spec handle_error(module(), any()) :: :ok
def handle_error(client, data) do
_ = apply_wrap(client, :handle_error, [data])
end
# Catches exceptions and exits
defp apply_wrap(client, function, args) do
apply(client, function, args)
catch
:error, reason -> {:error, reason}
:exit, reason -> {:exit, reason}
err -> err
end
end
|
lib/nerves_hub/client.ex
| 0.801315
| 0.751648
|
client.ex
|
starcoder
|
defmodule Stripe.Coupon do
@moduledoc """
Work with Stripe coupon objects.
You can:
- Create a coupon
- Retrieve a coupon
- Update a coupon
- Delete a coupon
- list all coupons
Stripe API reference: https://stripe.com/docs/api#coupons
"""
@type t :: %__MODULE__{}
defstruct [
:id, :object, :amount_off, :created, :currency, :duration, :duration_in_months,
:livemode, :max_redemptions, :metadata, :percent_off, :redeem_by, :times_redeemed
]
@plural_endpoint "coupons"
@schema %{
id: [:retrieve, :create],
object: [:retrieve],
amount_off: [:create, :retrieve],
created: [:retrieve],
currency: [:create, :retrieve],
duration: [:create, :retrieve],
duration_in_months: [:create, :retrieve],
livemode: [:retrieve],
max_redemptions: [:create, :retrieve],
metadata: [:create, :retrieve, :update],
percent_off: [:create, :retrieve],
redeem_by: [:create, :retrieve],
times_redeemed: [:create, :retrieve]
}
@nullable_keys [
:amount_off, :currency, :duration_in_months, :max_redemptions,
:metadata, :percent_off, :redeem_by, :times_redeemed
]
@doc """
Create a coupon.
"""
@spec create(map, Keyword.t) :: {:ok, t} | {:error, Stripe.api_error_struct}
def create(changes, opts \\ []) do
Stripe.Request.create(@plural_endpoint, changes, @schema, opts)
end
@doc """
Retrieve a coupon.
"""
@spec retrieve(binary, Keyword.t) :: {:ok, t} | {:error, Stripe.api_error_struct}
def retrieve(id, opts \\ []) do
endpoint = @plural_endpoint <> "/" <> id
Stripe.Request.retrieve(endpoint, opts)
end
@doc """
Update a coupon.
Takes the `id` and a map of changes.
"""
@spec update(binary, map, list) :: {:ok, t} | {:error, Stripe.api_error_struct}
def update(id, changes, opts \\ []) do
endpoint = @plural_endpoint <> "/" <> id
Stripe.Request.update(endpoint, changes, @schema, @nullable_keys, opts)
end
@doc """
Delete a coupon.
"""
@spec delete(binary, list) :: :ok | {:error, Stripe.api_error_struct}
def delete(id, opts \\ []) do
endpoint = @plural_endpoint <> "/" <> id
Stripe.Request.delete(endpoint, %{}, opts)
end
@doc """
List all coupons.
"""
@spec list(map, Keyword.t) :: {:ok, Stripe.List.t} | {:error, Stripe.api_error_struct}
def list(params \\ %{}, opts \\ []) do
endpoint = @plural_endpoint
Stripe.Request.retrieve(params, endpoint, opts)
end
end
|
lib/stripe/coupon.ex
| 0.733547
| 0.476153
|
coupon.ex
|
starcoder
|
defmodule WeatherflowTempest.Protocol do
@moduledoc """
The Weatherflow Protocol has a lot of magic fields. This parses and converts them to make the returned objects more intelligible.
Byte-effecient arrays are unpacked into named fields based on the protocol docs published by Weathperflow.
The following field standardizations are made to all event types:
* "type" fields are removed.
* "evt" fields containing the raw un-parsed event data are removed
* "uptime" fields containing seconds-of-uptime are converted to human readable uptime strings
* "timestamp" field containing the epoch time are converted to DateTime
All fields that are converted are named using atoms rather than strings, however fields that are not changed retain their string-based keys.
"""
use Bitwise
use Timex
@doc """
Accepts the result tuple of Jason.decode()
If the JSON could not be decoded bubble the error up to be handled by the client.
Otherwise parse the event types defined by the Weatherflow spec.
Returns a tuple containing an atom matching the event "type" field, followed by the parsed object as a map.
"""
@spec handle_json({atom(), Map.t()}) :: Map.t()
def handle_json({:error, %Jason.DecodeError{}} = arg) do
arg
end
def handle_json({:ok, %{"type" => "evt_precip"} = obj}) do
event = Map.new(:timestamp, DateTime.from_unix!(Enum.at(obj["evt"], 0)))
{:evt_precip, obj
|> Map.delete("type")
|> Map.put(:event, event)
|> Map.delete("evt") }
end
def handle_json({:ok, %{"type" => "evt_strike"} = obj}) do
event = Map.new()
|> Map.put(:timestamp, DateTime.from_unix!(Enum.at(obj["evt"], 0)))
|> Map.put(:distance_km, Enum.at(obj["evt"], 1))
|> Map.put(:energy, Enum.at(obj, 2))
{:evt_strike, obj
|> Map.delete("type")
|> Map.put(:event, event)
|> Map.delete("evt") }
end
def handle_json({:ok, %{"type" => "rapid_wind"} = obj}) do
observation = Map.new()
|> Map.put(:timestamp, DateTime.from_unix!(Enum.at(obj["ob"], 0)))
|> Map.put(:windspeed_mps, Enum.at(obj["ob"], 1))
|> Map.put(:wind_direction_degrees, Enum.at(obj["ob"], 2))
{:rapid_wind, obj
|> Map.delete("type")
|> Map.put(:observation, observation)
|> Map.delete("ob")}
end
def handle_json({:ok, %{"type" => "obs_air"} = obj}) do
observations = Enum.map(obj["obs"], &parse_air_observation/1)
|> Enum.sort_by(&(&1.timestamp), {:asc, Date})
{:obs_air, obj
|> Map.delete("type")
|> Map.put(:observations, observations)
|> Map.delete("obs")}
end
def handle_json({:ok, %{"type" => "obs_sky"} = obj}) do
observations = Enum.map(obj["obs"], &parse_sky_observation/1)
|> Enum.sort_by(&(&1.timestamp), {:asc, Date})
{:obs_sky, obj
|> Map.delete("type")
|> Map.put(:observations, observations)
|> Map.delete("obs")}
end
def handle_json({:ok, %{"type" => "obs_st"} = obj}) do
observations = Enum.map(obj["obs"], &parse_tempest_observation/1)
|> Enum.sort_by(&(&1.timestamp), {:asc, Date})
{:obs_st, obj
|> Map.delete("type")
|> Map.put(:observations, observations)
|> Map.delete("obs")}
end
def handle_json({:ok, %{"type" => "device_status"} = obj}) do
{:device_status, obj
|> Map.delete("type")
|> Map.put(:sensor_status, parse_device_sensor_status(obj["sensor_status"]))
|> Map.delete("sensor_status")
|> Map.put(:uptime, uptime_seconds_to_string(obj["uptime"]))
|> Map.delete("uptime")
|> Map.put(:timestamp, DateTime.from_unix!(obj["timestamp"]))
|> Map.delete("timestamp")}
end
def handle_json({:ok, %{"type" => "hub_status"} = obj}) do
{:hub_status, obj
|> Map.delete("type")
|> Map.put(:uptime, uptime_seconds_to_string(obj["uptime"]))
|> Map.delete("uptime")
|> Map.put(:timestamp, DateTime.from_unix!(obj["timestamp"]))
|> Map.delete("timestamp")
|> Map.put(:radio_stats, parse_hub_radio_stats(obj["radio_stats"]))
|> Map.delete("radio_stats")
|> Map.put(:reset_flags, parse_hub_reset_flags(obj["reset_flags"]))
|> Map.delete("reset_flags")}
end
defp precip_type(int_type) do
case int_type do
0 -> :none
1 -> :rain
2 -> :hail
end
end
defp parse_air_observation(obj) do
Map.new()
|> Map.put(:timestamp, DateTime.from_unix!(Enum.at(obj, 0)))
|> Map.put(:station_pressure_MB, Enum.at(obj, 1))
|> Map.put(:air_temperature_C, Enum.at(obj, 2))
|> Map.put(:relative_humidity_percent, Enum.at(obj, 3))
|> Map.put(:lightningstrike_count, Enum.at(obj, 4))
|> Map.put(:lightningstrike_avg_distance_km, Enum.at(obj, 5))
|> Map.put(:battery, Enum.at(obj, 6))
|> Map.put(:reportinterval_minutes, Enum.at(obj, 7))
end
defp parse_sky_observation(obj) do
Map.new()
|> Map.put(:timestamp, DateTime.from_unix!(Enum.at(obj, 0)))
|> Map.put(:illuminance_lux, Enum.at(obj, 1))
|> Map.put(:uv_index, Enum.at(obj, 2))
|> Map.put(:rain_accumulated_mm, Enum.at(obj, 3))
|> Map.put(:wind_lull_ms, Enum.at(obj, 4))
|> Map.put(:wind_avg_ms, Enum.at(obj, 5))
|> Map.put(:wind_gust_ms, Enum.at(obj, 6))
|> Map.put(:wind_direction_degrees, Enum.at(obj, 7))
|> Map.put(:battery_volts, Enum.at(obj, 8))
|> Map.put(:reportinterval_minutes, Enum.at(obj, 9))
|> Map.put(:solar_radiation_wm2, Enum.at(obj, 10))
|> Map.put(:local_day_rain_accumulation, Enum.at(obj, 11))
|> Map.put(:precipitation_type, precip_type(Enum.at(obj, 12)))
|> Map.put(:wind_sample_interval_seconds, Enum.at(obj, 13))
end
defp parse_tempest_observation(obj) do
Map.new()
|> Map.put(:timestamp, DateTime.from_unix!(Enum.at(obj, 0)))
|> Map.put(:wind_lull_ms, Enum.at(obj, 1))
|> Map.put(:wind_avg_ms, Enum.at(obj, 2))
|> Map.put(:wind_gust_ms, Enum.at(obj, 3))
|> Map.put(:wind_direction_degrees, Enum.at(obj, 4))
|> Map.put(:wind_sample_interval_seconds, Enum.at(obj, 5))
|> Map.put(:station_pressure_MB, Enum.at(obj, 6))
|> Map.put(:air_temperature_C, Enum.at(obj, 7))
|> Map.put(:relative_humidity_percent, Enum.at(obj, 8))
|> Map.put(:illuminance_lux, Enum.at(obj, 9))
|> Map.put(:uv_index, Enum.at(obj, 10))
|> Map.put(:solar_radiation_wm2, Enum.at(obj, 11))
|> Map.put(:precip_accumulated_mm, Enum.at(obj, 12))
|> Map.put(:precipitation_type, precip_type(Enum.at(obj, 13)))
|> Map.put(:lightningstrike_avg_distance_km, Enum.at(obj, 14))
|> Map.put(:lightningstrike_count, Enum.at(obj, 15))
|> Map.put(:battery_volts, Enum.at(obj, 16))
|> Map.put(:reportinterval_minutes, Enum.at(obj, 17))
end
defp parse_device_sensor_status(bf) do
%{
sensors_okay: (bf == 0),
lightning_failed: (bf &&& 0b000000001) == 0b000000001,
lightning_noise: (bf &&& 0b000000010) == 0b000000010,
lightning_disturber: (bf &&& 0b000000100) == 0b000000100,
pressure_failed: (bf &&& 0b000001000) == 0b000001000,
temperature_failed: (bf &&& 0b000010000) == 0b000010000,
rh_failed: (bf &&& 0b000100000) == 0b000100000,
wind_failed: (bf &&& 0b001000000) == 0b001000000,
precip_failed: (bf &&& 0b010000000) == 0b010000000,
light_uv_failed: (bf &&& 0b100000000) == 0b100000000
}
end
defp uptime_seconds_to_string(up_seconds) do
uptime = Duration.from_seconds(up_seconds)
Timex.format_duration(uptime, :humanized)
end
defp parse_hub_radio_stats(stats_array) do
%{
version: Enum.at(stats_array, 0),
reboot_count: Enum.at(stats_array, 1),
i2c_bus_error_count: Enum.at(stats_array, 2),
radio_status: parse_radio_status(Enum.at(stats_array, 3)),
radio_network_id: Enum.at(stats_array, 4)
}
end
defp parse_radio_status(status) do
case status do
0 -> "Radio Off"
1 -> "Radio On"
3 -> "Radio Active"
end
end
defp parse_hub_reset_flags(flag_string) do
flag_string
|> String.split(",")
|> Enum.map(&reset_flag_to_string/1)
end
defp reset_flag_to_string(f) do
case f do
"BOR" -> "Brownout reset"
"PIN" -> "PIN reset"
"POR" -> "Power reset"
"SFT" -> "Software reset"
"WDG" -> "Watchdog reset"
"WWD" -> "Window watchdog reset"
"LPW" -> "Low-power reset"
end
end
end
|
lib/weatherflow_tempest/protocol.ex
| 0.835785
| 0.583856
|
protocol.ex
|
starcoder
|
defmodule WxDefines do
@moduledoc """
```
The definitions from the WxErlang header files, plus definitions of colours.
To use these definitions in your module, add "use WxDefines" at the top of the
module.
Note that the module also imports the Bitwise module so that the "|||" and "&&&"
binary operators can be use to combine flag definitionws.
"""
defmacro __using__(_opts) do
quote do
import Bitwise
@wxDefaultSize {-1, -1}
@wxDefaultPosition {-1, -1}
@wxBLACK {0, 0, 0}
@wxWHITE {255, 255, 255}
@wxRED {255, 0, 0}
@wxLIME {0, 255, 0}
@wxBLUE {0, 0, 255}
@wxYELLOW {255, 255, 0}
@wxCYAN {0, 255, 255}
@wxAQUA {0, 255, 255}
@wxMAGENTA {255, 0, 255}
@wxFUCHSIA {255, 0, 255}
@wxSILVER {192, 192, 192}
@wxGRAY {128, 128, 128}
@wxMAROON {128, 0, 0}
@wxOLIVE {128, 128, 0}
@wxGREEN {0, 128, 0}
@wxPURPLE {128, 0, 128}
@wxTEAL {0, 128, 128}
@wxNAVY {0, 0, 128}
@wxMAROON {128, 0, 0}
@wxDARK_RED {139, 0, 0}
@wxBROWN {165, 42, 42}
@wxFIREBRICK {178, 34, 34}
@wxCRIMSON {220, 20, 60}
@wxRED {255, 0, 0}
@wxTOMATO {255, 99, 71}
@wxCORAL {255, 127, 80}
@wxINDIAN_RED {205, 92, 92}
@wxLIGHT_CORAL {240, 128, 128}
@wxDARK_SALMON {233, 150, 122}
@wxSALMON {250, 128, 114}
@wxLIGHT_SALMON {255, 160, 122}
@wxORANGE_RED {255, 69, 0}
@wxDARK_ORANGE {255, 140, 0}
@wxORANGE {255, 165, 0}
@wxGOLD {255, 215, 0}
@wxDARK_GOLDEN_ROD {184, 134, 11}
@wxGOLDEN_ROD {218, 165, 32}
@wxPALE_GOLDEN_ROD {238, 232, 170}
@wxDARK_KHAKI {189, 183, 107}
@wxKHAKI {240, 230, 140}
@wxOLIVE {128, 128, 0}
@wxYELLOW {255, 255, 0}
@wxYELLOW_GREEN {154, 205, 50}
@wxDARK_OLIVE_GREEN {85, 107, 47}
@wxOLIVE_DRAB {107, 142, 35}
@wxLAWN_GREEN {124, 252, 0}
@wxCHART_REUSE {127, 255, 0}
@wxGREEN_YELLOW {173, 255, 47}
@wxDARK_GREEN {0, 100, 0}
@wxGREEN {0, 128, 0}
@wxFOREST_GREEN {34, 139, 34}
@wxLIME {0, 255, 0}
@wxLIME_GREEN {50, 205, 50}
@wxLIGHT_GREEN {144, 238, 144}
@wxPALE_GREEN {152, 251, 152}
@wxDARK_SEA_GREEN {143, 188, 143}
@wxMEDIUM_SPRING_GREEN {0, 250, 154}
@wxSPRING_GREEN {0, 255, 127}
@wxSEA_GREEN {46, 139, 87}
@wxMEDIUM_AQUA_MARINE {102, 205, 170}
@wxMEDIUM_SEA_GREEN {60, 179, 113}
@wxLIGHT_SEA_GREEN {32, 178, 170}
@wxDARK_SLATE_GRAY {47, 79, 79}
@wxTEAL {0, 128, 128}
@wxDARK_CYAN {0, 139, 139}
@wxAQUA {0, 255, 255}
@wxCYAN {0, 255, 255}
@wxLIGHT_CYAN {224, 255, 255}
@wxDARK_TURQUOISE {0, 206, 209}
@wxTURQUOISE {64, 224, 208}
@wxMEDIUM_TURQUOISE {72, 209, 204}
@wxPALE_TURQUOISE {175, 238, 238}
@wxAQUA_MARINE {127, 255, 212}
@wxPOWDER_BLUE {176, 224, 230}
@wxCADET_BLUE {95, 158, 160}
@wxSTEEL_BLUE {70, 130, 180}
@wxCORN_FLOWER_BLUE {100, 149, 237}
@wxDEEP_SKY_BLUE {0, 191, 255}
@wxDODGER_BLUE {30, 144, 255}
@wxLIGHT_BLUE {173, 216, 230}
@wxSKY_BLUE {135, 206, 235}
@wxLIGHT_SKY_BLUE {135, 206, 250}
@wxMIDNIGHT_BLUE {25, 25, 112}
@wxNAVY {0, 0, 128}
@wxDARK_BLUE {0, 0, 139}
@wxMEDIUM_BLUE {0, 0, 205}
@wxBLUE {0, 0, 255}
@wxROYAL_BLUE {65, 105, 225}
@wxBLUE_VIOLET {138, 43, 226}
@wxINDIGO {75, 0, 130}
@wxDARK_SLATE_BLUE {72, 61, 139}
@wxSLATE_BLUE {106, 90, 205}
@wxMEDIUM_SLATE_BLUE {123, 104, 238}
@wxMEDIUM_PURPLE {147, 112, 219}
@wxDARK_MAGENTA {139, 0, 139}
@wxDARK_VIOLET {148, 0, 211}
@wxDARK_ORCHID {153, 50, 204}
@wxMEDIUM_ORCHID {186, 85, 211}
@wxPURPLE {128, 0, 128}
@wxTHISTLE {216, 191, 216}
@wxPLUM {221, 160, 221}
@wxVIOLET {238, 130, 238}
@wxMAGENTA {255, 0, 255}
@wxFUCHSIA {255, 0, 255}
@wxORCHID {218, 112, 214}
@wxMEDIUM_VIOLET_RED {199, 21, 133}
@wxPALE_VIOLET_RED {219, 112, 147}
@wxDEEP_PINK {255, 20, 147}
@wxHOT_PINK {255, 105, 180}
@wxLIGHT_PINK {255, 182, 193}
@wxPINK {255, 192, 203}
@wxANTIQUE_WHITE {250, 235, 215}
@wxBEIGE {245, 245, 220}
@wxBISQUE {255, 228, 196}
@wxBLANCHED_ALMOND {255, 235, 205}
@wxWHEAT {245, 222, 179}
@wxCORN_SILK {255, 248, 220}
@wxLEMON_CHIFFON {255, 250, 205}
@wxLIGHT_GOLDEN_ROD_YELLOW {250, 250, 210}
@wxLIGHT_YELLOW {255, 255, 224}
@wxSADDLE_BROWN {139, 69, 19}
@wxSIENNA {160, 82, 45}
@wxCHOCOLATE {210, 105, 30}
@wxPERU {205, 133, 63}
@wxSANDY_BROWN {244, 164, 96}
@wxBURLY_WOOD {222, 184, 135}
@wxTAN {210, 180, 140}
@wxROSY_BROWN {188, 143, 143}
@wxMOCCASIN {255, 228, 181}
@wxNAVAJO_WHITE {255, 222, 173}
@wxPEACH_PUFF {255, 218, 185}
@wxMISTY_ROSE {255, 228, 225}
@wxLAVENDER_BLUSH {255, 240, 245}
@wxLINEN {250, 240, 230}
@wxOLD_LACE {253, 245, 230}
@wxPAPAYA_WHIP {255, 239, 213}
@wxSEA_SHELL {255, 245, 238}
@wxMINT_CREAM {245, 255, 250}
@wxSLATE_GRAY {112, 128, 144}
@wxLIGHT_SLATE_GRAY {119, 136, 153}
@wxLIGHT_STEEL_BLUE {176, 196, 222}
@wxLAVENDER {230, 230, 250}
@wxFLORAL_WHITE {255, 250, 240}
@wxALICE_BLUE {240, 248, 255}
@wxGHOST_WHITE {248, 248, 255}
@wxHONEYDEW {240, 255, 240}
@wxIVORY {255, 255, 240}
@wxAZURE {240, 255, 255}
@wxSNOW {255, 250, 250}
@wxBLACK {0, 0, 0}
@wxDIM_GRAY {105, 105, 105}
@wxDIM_GREY {105, 105, 105}
@wxGRAY {128, 128, 128}
@wxGREY {128, 128, 128}
@wxDARK_GRAY {169, 169, 169}
@wxDARK_GREY {169, 169, 169}
@wxSILVER {192, 192, 192}
@wxLIGHT_GRAY {211, 211, 211}
@wxLIGHT_GREY {211, 211, 211}
@wxGAINSBORO {220, 220, 220}
@wxWHITE_SMOKE {245, 245, 245}
@wxWHITE {255, 255, 255}
@wxBK_RIGHT 128
@wxBK_LEFT 64
@wxBK_BOTTOM 32
@wxBK_TOP 16
@wxBK_DEFAULT 0
# wxBK_TOP | wxBK_BOTTOM | wxBK_LEFT | wxBK_RIGHT -> 240
@wxBK_ALIGN_MASK 240
@wxBU_EXACTFIT 1
@wxBU_AUTODRAW 4
@wxBU_NOAUTODRAW 0
@wxBU_BOTTOM 512
@wxBU_RIGHT 256
@wxBU_TOP 128
@wxBU_LEFT 64
# wxBU_LEFT | wxBU_TOP | wxBU_RIGHT | wxBU_BOTTOM -> 960
@wxBU_ALIGN_MASK 960
@wxCHK_ALLOW_3RD_STATE_FOR_USER 8192
@wxCHK_3STATE 4096
@wxCHK_2STATE 0
@wxCHOICE_WIDTH 200
@wxCHOICE_HEIGHT 150
@wxCLRP_DEFAULT_STYLE 0
@wxCLRP_USE_TEXTCTRL 2
@wxCLRP_SHOW_LABEL 8
@wxC2S_HTML_SYNTAX 4
@wxC2S_CSS_SYNTAX 2
@wxC2S_NAME 1
# ERROR @wxInvalidDateTime ?wxDefaultDateTime
@wxBUFFER_CLIENT_AREA 2
@wxBUFFER_VIRTUAL_AREA 1
@wxALWAYS_NATIVE_DOUBLE_BUFFER 0
@wxPRINT_QUALITY_DRAFT -4
@wxPRINT_QUALITY_LOW -3
@wxPRINT_QUALITY_MEDIUM -2
@wxPRINT_QUALITY_HIGH -1
@wxLANDSCAPE 2
@wxPORTRAIT 1
@wxSIZE_FORCE 16
@wxSIZE_NO_ADJUSTMENTS 8
@wxSIZE_ALLOW_MINUS_ONE 4
@wxSIZE_USE_EXISTING 0
@wxSIZE_AUTO_HEIGHT 2
@wxSIZE_AUTO_WIDTH 1
@wxSETUP 131_072
@wxMORE 65536
@wxHELP 32768
@wxRESET 16384
@wxBACKWARD 8192
@wxFORWARD 4096
# wxSIZE_AUTO_WIDTH | wxSIZE_AUTO_HEIGHT -> 3
@wxSIZE_AUTO 3
# 16#00000100 | 16#00000200 | 16#00000400 | 16#00000800 -> 16
@wxICON_MASK 16
@wxICON_ASTERISK 2048
@wxICON_STOP 512
@wxICON_INFORMATION 2048
@wxICON_QUESTION 1024
@wxICON_ERROR 512
@wxICON_WARNING 256
@wxICON_HAND 512
@wxICON_EXCLAMATION 256
@wxNO_DEFAULT 128
@wxYES_DEFAULT 0
@wxCANCEL 16
@wxNO 8
@wxOK 4
@wxYES 2
# wxYES | wxNO -> 10
@wxYES_NO 10
@wxLI_VERTICAL 8
@wxLI_HORIZONTAL 4
@wxBI_EXPAND 8192
@wxST_DOTS_END 4
@wxST_DOTS_MIDDLE 2
@wxST_NO_AUTORESIZE 1
@wxST_SIZEGRIP 16
@wxTC_OWNERDRAW 1024
@wxTC_MULTILINE 512
@wxTC_BOTTOM 128
@wxTC_RIGHT 64
@wxTC_LEFT 32
@wxTC_TOP 0
@wxTC_FIXEDWIDTH 32
@wxTC_RIGHTJUSTIFY 16
@wxSP_WRAP 8192
@wxSP_ARROW_KEYS 4096
@wxSP_VERTICAL 8
@wxSP_HORIZONTAL 4
@wxSB_VERTICAL 8
@wxSB_HORIZONTAL 4
@wxRB_USE_CHECKBOX 16
@wxRB_SINGLE 8
@wxRB_GROUP 4
@wxRA_USE_CHECKBOX 16
@wxRA_VERTICAL 8
@wxRA_HORIZONTAL 4
@wxRA_SPECIFY_ROWS 8
@wxRA_SPECIFY_COLS 4
@wxRA_TOPTOBOTTOM 2
@wxRA_LEFTTORIGHT 1
@wxCB_DROPDOWN 32
@wxCB_READONLY 16
@wxCB_SORT 8
@wxCB_SIMPLE 4
@wxLB_INT_HEIGHT 2048
@wxLB_HSCROLL 1_073_741_824
@wxLB_ALWAYS_SB 1024
@wxLB_NEEDED_SB 512
@wxLB_OWNERDRAW 256
@wxLB_EXTENDED 128
@wxLB_MULTIPLE 64
@wxLB_SINGLE 32
@wxLB_SORT 16
@wxFIXED_LENGTH 1024
@wxCOLOURED 2048
@wxMENU_TEAROFF 1
@wxMB_DOCKABLE 1
@wxFRAME_NO_WINDOW_MENU 256
@wxFRAME_DRAWER 32
@wxDIALOG_EX_CONTEXTHELP 128
@wxFRAME_EX_CONTEXTHELP 128
@wxWS_EX_CONTEXTHELP 128
@wxDIALOG_EX_METAL 64
@wxFRAME_EX_METAL 64
@wxWS_EX_PROCESS_UI_UPDATES 32
@wxWS_EX_PROCESS_IDLE 16
@wxWS_EX_THEMED_BACKGROUND 8
@wxWS_EX_TRANSIENT 4
@wxWS_EX_BLOCK_EVENTS 2
@wxWS_EX_VALIDATE_RECURSIVELY 1
@wxNO_FULL_REPAINT_ON_RESIZE 0
@wxFULL_REPAINT_ON_RESIZE 65536
@wxPOPUP_WINDOW 131_072
# ERROR @wxBACKINGSTORE ?wxRETAINED
# ERROR @wxRETAINED wxe_util:get_const(wxRETAINED
@wxWANTS_CHARS 262_144
@wxTAB_TRAVERSAL 524_288
@wxTRANSPARENT_WINDOW 1_048_576
@wxCLIP_SIBLINGS 536_870_912
@wxCLIP_CHILDREN 4_194_304
@wxALWAYS_SHOW_SB 8_388_608
@wxNO_BORDER 2_097_152
@wxSTATIC_BORDER 16_777_216
@wxSIMPLE_BORDER 33_554_432
@wxBORDER 33_554_432
@wxRAISED_BORDER 67_108_864
@wxSUNKEN_BORDER 134_217_728
@wxDOUBLE_BORDER 268_435_456
@wxCAPTION 536_870_912
@wxHSCROLL 1_073_741_824
@wxVSCROLL 2_147_483_648
# ERROR @wxDEFAULT_CONTROL_BORDER wxe_util:get_const(wxDEFAULT_CONTROL_BORDER
@wxCENTER_ON_SCREEN 2
@wxCENTRE_ON_SCREEN 2
@wxCENTER_FRAME 0
# ERROR @wxBYTE_ORDER wxe_util:get_const(wxBYTE_ORDER
@wxPDP_ENDIAN 3412
@wxLITTLE_ENDIAN 1234
@wxBIG_ENDIAN 4321
# ERROR @wxHAS_INT64 wxe_util:get_const(wxHAS_INT64
@wxNOT_FOUND -1
# ERROR @wxWINDOW_STYLE_MASK (?wxVSCROLL bor ?wxHSCROLL bor ?wxBORDER_MASK bor ?wxALWAYS_SHOW_SB bor ?wxCLIP_CHILDREN bor ?wxCLIP_SIBLINGS bor ?wxTRANSPARENT_WINDOW bor ?wxTAB_TRAVERSAL bor ?wxWANTS_CHARS bor ?wxRETAINED bor ?wxPOPUP_WINDOW bor ?wxFULL_REPAINT_ON_RESIZE
# wxCAPTION | wxSYSTEM_MENU | wxCLOSE_BOX -> 536877056
@wxDEFAULT_DIALOG_STYLE 536_877_056
# wxDEFAULT_DIALOG_STYLE | wxRESIZE_BORDER | wxOK | wxCANCEL | wxCENTRE -> 536877141
@wxCHOICEDLG_STYLE 536_877_141
@wxDIALOG_NO_PARENT 1
@wxID_FILTERLISTCTRL 7001
@wxID_TREECTRL 7000
# wxDEFAULT_DIALOG_STYLE | wxRESIZE_BORDER -> 536877120
@wxDD_DEFAULT_STYLE 536_877_120
@wxDD_NEW_DIR_BUTTON 0
@wxDD_DIR_MUST_EXIST 512
@wxDD_CHANGE_DIR 256
@wxFD_DEFAULT_STYLE 1
@wxDIRP_DEFAULT_STYLE 8
@wxDIRP_USE_TEXTCTRL 2
# wxFLP_OPEN | wxFLP_FILE_MUST_EXIST -> 9216
@wxFLP_DEFAULT_STYLE 9216
@wxFLP_USE_TEXTCTRL 2
@wxDIRP_CHANGE_DIR 16
@wxDIRP_DIR_MUST_EXIST 8
@wxFLP_CHANGE_DIR 16384
@wxFLP_FILE_MUST_EXIST 8192
@wxFLP_OVERWRITE_PROMPT 4096
@wxFLP_SAVE 2048
@wxFLP_OPEN 1024
@wxFNTP_MAXPOINT_SIZE 100
# wxFNTP_FONTDESC_AS_LABEL | wxFNTP_USEFONT_FOR_LABEL -> 24
@wxFNTP_DEFAULT_STYLE 24
@wxFNTP_USE_TEXTCTRL 2
@wxFNTP_USEFONT_FOR_LABEL 16
@wxFNTP_FONTDESC_AS_LABEL 8
@wxFRAME_SHAPED 16
@wxFRAME_FLOAT_ON_PARENT 8
@wxFRAME_TOOL_WINDOW 4
@wxFRAME_NO_TASKBAR 2
@wxGAUGE_EMULATE_INDETERMINATE_MODE 1
@wxGA_SMOOTH 32
@wxGA_VERTICAL 8
@wxGA_HORIZONTAL 4
# ERROR @wxGetDisplayDepth ?wxDisplayDepth
@wxLAYOUT_QUERY 256
@wxLAYOUT_MRU_LENGTH 16
@wxLAYOUT_LENGTH_X 0
@wxLAYOUT_LENGTH_Y 8
# wxSW_3DSASH | wxSW_3DBORDER -> 192
@wxSW_3D 192
@wxSW_3DBORDER 128
@wxSW_3DSASH 64
@wxSW_BORDER 32
@wxSW_NOBORDER 0
@wxSASH_DRAG_LEFT_DOWN 2
@wxSASH_DRAG_DRAGGING 1
@wxSASH_DRAG_NONE 0
@wxHASH_SIZE_DEFAULT 1000
@wxIMAGELIST_DRAW_FOCUSED 8
@wxIMAGELIST_DRAW_SELECTED 4
@wxIMAGELIST_DRAW_TRANSPARENT 2
@wxIMAGELIST_DRAW_NORMAL 1
@wxLAYOUT_DEFAULT_MARGIN 0
# wxLIST_HITTEST_ONITEMICON | wxLIST_HITTEST_ONITEMLABEL | wxLIST_HITTEST_ONITEMSTATEICON -> 672
@wxLIST_HITTEST_ONITEM 672
@wxLIST_HITTEST_TORIGHT 2048
@wxLIST_HITTEST_TOLEFT 1024
@wxLIST_HITTEST_ONITEMSTATEICON 512
@wxLIST_HITTEST_ONITEMRIGHT 256
@wxLIST_HITTEST_ONITEMLABEL 128
@wxLIST_HITTEST_ONITEMICON 32
@wxLIST_HITTEST_NOWHERE 4
@wxLIST_HITTEST_BELOW 2
@wxLIST_HITTEST_ABOVE 1
@wxLIST_STATE_SOURCE 256
@wxLIST_STATE_PICKED 128
@wxLIST_STATE_INUSE 64
@wxLIST_STATE_FILTERED 32
@wxLIST_STATE_DISABLED 16
@wxLIST_STATE_CUT 8
@wxLIST_STATE_SELECTED 4
@wxLIST_STATE_FOCUSED 2
@wxLIST_STATE_DROPHILITED 1
@wxLIST_STATE_DONTCARE 0
@wxLIST_MASK_FORMAT 64
@wxLIST_MASK_WIDTH 32
@wxLIST_SET_ITEM 16
@wxLIST_MASK_DATA 8
@wxLIST_MASK_IMAGE 4
@wxLIST_MASK_TEXT 2
@wxLIST_MASK_STATE 1
@wxLC_USER_TEXT 512
# wxLC_SORT_ASCENDING | wxLC_SORT_DESCENDING -> 49152
@wxLC_MASK_SORT 49152
# wxLC_ALIGN_TOP | wxLC_ALIGN_LEFT -> 192
@wxLC_MASK_ALIGN 192
# wxLC_ICON | wxLC_SMALL_ICON | wxLC_LIST | wxLC_REPORT -> 60
@wxLC_MASK_TYPE 60
@wxLC_SORT_DESCENDING 32768
@wxLC_SORT_ASCENDING 16384
@wxLC_SINGLE_SEL 8192
@wxLC_NO_SORT_HEADER 4096
@wxLC_NO_HEADER 2048
@wxLC_EDIT_LABELS 1024
@wxLC_VIRTUAL 512
@wxLC_AUTOARRANGE 256
@wxLC_ALIGN_LEFT 128
@wxLC_ALIGN_TOP 64
@wxLC_REPORT 32
@wxLC_LIST 16
@wxLC_SMALL_ICON 8
@wxLC_ICON 4
@wxLC_HRULES 2
@wxLC_VRULES 1
# ERROR @wxTRACE_OleCalls ?wxEmptyString
@wxTraceRefCount 8
@wxTraceResAlloc 4
@wxTraceMessages 2
@wxTraceMemAlloc 1
@wxNB_FLAT 2048
@wxNB_NOPAGETHEME 1024
@wxNB_MULTILINE 512
@wxNB_FIXEDWIDTH 256
@wxNB_RIGHT 128
@wxNB_LEFT 64
@wxNB_BOTTOM 32
@wxNB_TOP 16
@wxNB_DEFAULT 0
@wxPB_USE_TEXTCTRL 2
@wxID_PREVIEW_GOTO 8
@wxID_PREVIEW_LAST 7
@wxID_PREVIEW_FIRST 6
@wxID_PREVIEW_ZOOM 5
@wxID_PREVIEW_PRINT 4
@wxID_PREVIEW_PREVIOUS 3
@wxID_PREVIEW_NEXT 2
@wxID_PREVIEW_CLOSE 1
# wxPREVIEW_PREVIOUS | wxPREVIEW_NEXT | wxPREVIEW_ZOOM | wxPREVIEW_FIRST | wxPREVIEW_GOTO | wxPREVIEW_LAST -> 126
@wxPREVIEW_DEFAULT 126
@wxPREVIEW_GOTO 64
@wxPREVIEW_LAST 32
@wxPREVIEW_FIRST 16
@wxPREVIEW_ZOOM 8
@wxPREVIEW_NEXT 4
@wxPREVIEW_PREVIOUS 2
@wxPREVIEW_PRINT 1
@wxPD_CAN_SKIP 128
@wxPD_REMAINING_TIME 64
@wxPD_SMOOTH 32
@wxPD_ESTIMATED_TIME 16
@wxPD_ELAPSED_TIME 8
@wxPD_AUTO_HIDE 4
@wxPD_APP_MODAL 2
@wxPD_CAN_ABORT 1
# wxHSCROLL | wxVSCROLL -> 3221225472
@wxScrolledWindowStyle 3_221_225_472
@wxSB_RAISED 2
@wxSB_FLAT 1
@wxSB_NORMAL 0
@wxSTC_CMD_WORDRIGHTENDEXTEND 2442
@wxSTC_CMD_WORDRIGHTEND 2441
@wxSTC_CMD_WORDLEFTENDEXTEND 2440
@wxSTC_CMD_WORDLEFTEND 2439
@wxSTC_CMD_STUTTEREDPAGEDOWNEXTEND 2438
@wxSTC_CMD_STUTTEREDPAGEDOWN 2437
@wxSTC_CMD_STUTTEREDPAGEUPEXTEND 2436
@wxSTC_CMD_STUTTEREDPAGEUP 2435
@wxSTC_CMD_PAGEDOWNRECTEXTEND 2434
@wxSTC_CMD_PAGEUPRECTEXTEND 2433
@wxSTC_CMD_LINEENDRECTEXTEND 2432
@wxSTC_CMD_VCHOMERECTEXTEND 2431
@wxSTC_CMD_HOMERECTEXTEND 2430
@wxSTC_CMD_CHARRIGHTRECTEXTEND 2429
@wxSTC_CMD_CHARLEFTRECTEXTEND 2428
@wxSTC_CMD_LINEUPRECTEXTEND 2427
@wxSTC_CMD_LINEDOWNRECTEXTEND 2426
@wxSTC_CMD_PARAUPEXTEND 2416
@wxSTC_CMD_PARAUP 2415
@wxSTC_CMD_PARADOWNEXTEND 2414
@wxSTC_CMD_PARADOWN 2413
@wxSTC_CMD_DELLINERIGHT 2396
@wxSTC_CMD_DELLINELEFT 2395
@wxSTC_CMD_WORDPARTRIGHTEXTEND 2393
@wxSTC_CMD_WORDPARTRIGHT 2392
@wxSTC_CMD_WORDPARTLEFTEXTEND 2391
@wxSTC_CMD_WORDPARTLEFT 2390
@wxSTC_CMD_LINECOPY 2455
@wxSTC_CMD_VCHOMEWRAPEXTEND 2454
@wxSTC_CMD_VCHOMEWRAP 2453
@wxSTC_CMD_LINEENDWRAPEXTEND 2452
@wxSTC_CMD_LINEENDWRAP 2451
@wxSTC_CMD_HOMEWRAPEXTEND 2450
@wxSTC_CMD_HOMEWRAP 2349
@wxSTC_CMD_LINEENDDISPLAYEXTEND 2348
@wxSTC_CMD_LINEENDDISPLAY 2347
@wxSTC_CMD_HOMEDISPLAYEXTEND 2346
@wxSTC_CMD_HOMEDISPLAY 2345
@wxSTC_CMD_DELETEBACKNOTLINE 2344
@wxSTC_CMD_LINESCROLLUP 2343
@wxSTC_CMD_LINESCROLLDOWN 2342
@wxSTC_CMD_UPPERCASE 2341
@wxSTC_CMD_LOWERCASE 2340
@wxSTC_CMD_LINEDUPLICATE 2404
@wxSTC_CMD_LINETRANSPOSE 2339
@wxSTC_CMD_LINEDELETE 2338
@wxSTC_CMD_LINECUT 2337
@wxSTC_CMD_DELWORDRIGHT 2336
@wxSTC_CMD_DELWORDLEFT 2335
@wxSTC_CMD_ZOOMOUT 2334
@wxSTC_CMD_ZOOMIN 2333
@wxSTC_CMD_VCHOMEEXTEND 2332
@wxSTC_CMD_VCHOME 2331
@wxSTC_CMD_FORMFEED 2330
@wxSTC_CMD_NEWLINE 2329
@wxSTC_CMD_BACKTAB 2328
@wxSTC_CMD_TAB 2327
@wxSTC_CMD_DELETEBACK 2326
@wxSTC_CMD_CANCEL 2325
@wxSTC_CMD_EDITTOGGLEOVERTYPE 2324
@wxSTC_CMD_PAGEDOWNEXTEND 2323
@wxSTC_CMD_PAGEDOWN 2322
@wxSTC_CMD_PAGEUPEXTEND 2321
@wxSTC_CMD_PAGEUP 2320
@wxSTC_CMD_DOCUMENTENDEXTEND 2319
@wxSTC_CMD_DOCUMENTEND 2318
@wxSTC_CMD_DOCUMENTSTARTEXTEND 2317
@wxSTC_CMD_DOCUMENTSTART 2316
@wxSTC_CMD_LINEENDEXTEND 2315
@wxSTC_CMD_LINEEND 2314
@wxSTC_CMD_HOMEEXTEND 2313
@wxSTC_CMD_HOME 2312
@wxSTC_CMD_WORDRIGHTEXTEND 2311
@wxSTC_CMD_WORDRIGHT 2310
@wxSTC_CMD_WORDLEFTEXTEND 2309
@wxSTC_CMD_WORDLEFT 2308
@wxSTC_CMD_CHARRIGHTEXTEND 2307
@wxSTC_CMD_CHARRIGHT 2306
@wxSTC_CMD_CHARLEFTEXTEND 2305
@wxSTC_CMD_CHARLEFT 2304
@wxSTC_CMD_LINEUPEXTEND 2303
@wxSTC_CMD_LINEUP 2302
@wxSTC_CMD_LINEDOWNEXTEND 2301
@wxSTC_CMD_LINEDOWN 2300
@wxSTC_CMD_CLEAR 2180
@wxSTC_CMD_PASTE 2179
@wxSTC_CMD_COPY 2178
@wxSTC_CMD_CUT 2177
@wxSTC_CMD_UNDO 2176
@wxSTC_CMD_SELECTALL 2013
@wxSTC_CMD_REDO 2011
@wxSTC_SPICE_COMMENTLINE 8
@wxSTC_SPICE_VALUE 7
@wxSTC_SPICE_DELIMITER 6
@wxSTC_SPICE_NUMBER 5
@wxSTC_SPICE_KEYWORD3 4
@wxSTC_SPICE_KEYWORD2 3
@wxSTC_SPICE_KEYWORD 2
@wxSTC_SPICE_IDENTIFIER 1
@wxSTC_SPICE_DEFAULT 0
@wxSTC_OPAL_DEFAULT 32
@wxSTC_OPAL_BOOL_CONST 8
@wxSTC_OPAL_PAR 7
@wxSTC_OPAL_STRING 6
@wxSTC_OPAL_SORT 5
@wxSTC_OPAL_KEYWORD 4
@wxSTC_OPAL_INTEGER 3
@wxSTC_OPAL_COMMENT_LINE 2
@wxSTC_OPAL_COMMENT_BLOCK 1
@wxSTC_OPAL_SPACE 0
@wxSTC_INNO_IDENTIFIER 12
@wxSTC_INNO_STRING_SINGLE 11
@wxSTC_INNO_STRING_DOUBLE 10
@wxSTC_INNO_KEYWORD_USER 9
@wxSTC_INNO_KEYWORD_PASCAL 8
@wxSTC_INNO_COMMENT_PASCAL 7
@wxSTC_INNO_PREPROC_INLINE 6
@wxSTC_INNO_PREPROC 5
@wxSTC_INNO_SECTION 4
@wxSTC_INNO_PARAMETER 3
@wxSTC_INNO_KEYWORD 2
@wxSTC_INNO_COMMENT 1
@wxSTC_INNO_DEFAULT 0
@wxSTC_CSOUND_STRINGEOL 15
@wxSTC_CSOUND_GLOBAL_VAR 14
@wxSTC_CSOUND_IRATE_VAR 13
@wxSTC_CSOUND_KRATE_VAR 12
@wxSTC_CSOUND_ARATE_VAR 11
@wxSTC_CSOUND_PARAM 10
@wxSTC_CSOUND_COMMENTBLOCK 9
@wxSTC_CSOUND_USERKEYWORD 8
@wxSTC_CSOUND_HEADERSTMT 7
@wxSTC_CSOUND_OPCODE 6
@wxSTC_CSOUND_IDENTIFIER 5
@wxSTC_CSOUND_INSTR 4
@wxSTC_CSOUND_OPERATOR 3
@wxSTC_CSOUND_NUMBER 2
@wxSTC_CSOUND_COMMENT 1
@wxSTC_CSOUND_DEFAULT 0
@wxSTC_FS_BINNUMBER 23
@wxSTC_FS_HEXNUMBER 22
@wxSTC_FS_ERROR 21
@wxSTC_FS_LABEL 20
@wxSTC_FS_ASM 19
@wxSTC_FS_CONSTANT 18
@wxSTC_FS_STRINGEOL 17
@wxSTC_FS_DATE 16
@wxSTC_FS_IDENTIFIER 15
@wxSTC_FS_OPERATOR 14
@wxSTC_FS_PREPROCESSOR 13
@wxSTC_FS_STRING 12
@wxSTC_FS_NUMBER 11
@wxSTC_FS_KEYWORD4 10
@wxSTC_FS_KEYWORD3 9
@wxSTC_FS_KEYWORD2 8
@wxSTC_FS_KEYWORD 7
@wxSTC_FS_COMMENTDOCKEYWORDERROR 6
@wxSTC_FS_COMMENTDOCKEYWORD 5
@wxSTC_FS_COMMENTLINEDOC 4
@wxSTC_FS_COMMENTDOC 3
@wxSTC_FS_COMMENTLINE 2
@wxSTC_FS_COMMENT 1
@wxSTC_FS_DEFAULT 0
@wxSTC_ST_SPEC_SEL 16
@wxSTC_ST_CHARACTER 15
@wxSTC_ST_ASSIGN 14
@wxSTC_ST_KWSEND 13
@wxSTC_ST_SPECIAL 12
@wxSTC_ST_RETURN 11
@wxSTC_ST_GLOBAL 10
@wxSTC_ST_NIL 9
@wxSTC_ST_SUPER 8
@wxSTC_ST_SELF 7
@wxSTC_ST_BOOL 6
@wxSTC_ST_BINARY 5
@wxSTC_ST_SYMBOL 4
@wxSTC_ST_COMMENT 3
@wxSTC_ST_NUMBER 2
@wxSTC_ST_STRING 1
@wxSTC_ST_DEFAULT 0
@wxSTC_SQL_QUOTEDIDENTIFIER 23
@wxSTC_SQL_USER4 22
@wxSTC_SQL_USER3 21
@wxSTC_SQL_USER2 20
@wxSTC_SQL_USER1 19
@wxSTC_SQL_COMMENTDOCKEYWORDERROR 18
@wxSTC_SQL_COMMENTDOCKEYWORD 17
@wxSTC_SQL_WORD2 16
@wxSTC_SQL_COMMENTLINEDOC 15
@wxSTC_SQL_SQLPLUS_COMMENT 13
@wxSTC_SQL_IDENTIFIER 11
@wxSTC_SQL_OPERATOR 10
@wxSTC_SQL_SQLPLUS_PROMPT 9
@wxSTC_SQL_SQLPLUS 8
@wxSTC_SQL_CHARACTER 7
@wxSTC_SQL_STRING 6
@wxSTC_SQL_WORD 5
@wxSTC_SQL_NUMBER 4
@wxSTC_SQL_COMMENTDOC 3
@wxSTC_SQL_COMMENTLINE 2
@wxSTC_SQL_COMMENT 1
@wxSTC_SQL_DEFAULT 0
@wxSTC_REBOL_WORD8 28
@wxSTC_REBOL_WORD7 27
@wxSTC_REBOL_WORD6 26
@wxSTC_REBOL_WORD5 25
@wxSTC_REBOL_WORD4 24
@wxSTC_REBOL_WORD3 23
@wxSTC_REBOL_WORD2 22
@wxSTC_REBOL_WORD 21
@wxSTC_REBOL_IDENTIFIER 20
@wxSTC_REBOL_TIME 19
@wxSTC_REBOL_DATE 18
@wxSTC_REBOL_URL 17
@wxSTC_REBOL_EMAIL 16
@wxSTC_REBOL_FILE 15
@wxSTC_REBOL_TAG 14
@wxSTC_REBOL_ISSUE 13
@wxSTC_REBOL_MONEY 12
@wxSTC_REBOL_BINARY 11
@wxSTC_REBOL_TUPLE 10
@wxSTC_REBOL_PAIR 9
@wxSTC_REBOL_NUMBER 8
@wxSTC_REBOL_BRACEDSTRING 7
@wxSTC_REBOL_QUOTEDSTRING 6
@wxSTC_REBOL_CHARACTER 5
@wxSTC_REBOL_OPERATOR 4
@wxSTC_REBOL_PREFACE 3
@wxSTC_REBOL_COMMENTBLOCK 2
@wxSTC_REBOL_COMMENTLINE 1
@wxSTC_REBOL_DEFAULT 0
@wxSTC_T3_USER3 19
@wxSTC_T3_USER2 18
@wxSTC_T3_USER1 17
@wxSTC_T3_HTML_STRING 16
@wxSTC_T3_HTML_DEFAULT 15
@wxSTC_T3_HTML_TAG 14
@wxSTC_T3_MSG_PARAM 13
@wxSTC_T3_LIB_DIRECTIVE 12
@wxSTC_T3_X_STRING 11
@wxSTC_T3_D_STRING 10
@wxSTC_T3_S_STRING 9
@wxSTC_T3_IDENTIFIER 8
@wxSTC_T3_NUMBER 7
@wxSTC_T3_KEYWORD 6
@wxSTC_T3_OPERATOR 5
@wxSTC_T3_LINE_COMMENT 4
@wxSTC_T3_BLOCK_COMMENT 3
@wxSTC_T3_PREPROCESSOR 2
@wxSTC_T3_X_DEFAULT 1
@wxSTC_T3_DEFAULT 0
@wxSTC_HA_COMMENTBLOCK3 16
@wxSTC_HA_COMMENTBLOCK2 15
@wxSTC_HA_COMMENTBLOCK 14
@wxSTC_HA_COMMENTLINE 13
@wxSTC_HA_INSTANCE 12
@wxSTC_HA_OPERATOR 11
@wxSTC_HA_IMPORT 10
@wxSTC_HA_DATA 9
@wxSTC_HA_CAPITAL 8
@wxSTC_HA_MODULE 7
@wxSTC_HA_CLASS 6
@wxSTC_HA_CHARACTER 5
@wxSTC_HA_STRING 4
@wxSTC_HA_NUMBER 3
@wxSTC_HA_KEYWORD 2
@wxSTC_HA_IDENTIFIER 1
@wxSTC_HA_DEFAULT 0
@wxSTC_CAML_COMMENT3 15
@wxSTC_CAML_COMMENT2 14
@wxSTC_CAML_COMMENT1 13
@wxSTC_CAML_COMMENT 12
@wxSTC_CAML_STRING 11
@wxSTC_CAML_CHAR 9
@wxSTC_CAML_NUMBER 8
@wxSTC_CAML_OPERATOR 7
@wxSTC_CAML_LINENUM 6
@wxSTC_CAML_KEYWORD3 5
@wxSTC_CAML_KEYWORD2 4
@wxSTC_CAML_KEYWORD 3
@wxSTC_CAML_TAGNAME 2
@wxSTC_CAML_IDENTIFIER 1
@wxSTC_CAML_DEFAULT 0
@wxSTC_VHDL_USERWORD 14
@wxSTC_VHDL_STDTYPE 13
@wxSTC_VHDL_STDPACKAGE 12
@wxSTC_VHDL_STDFUNCTION 11
@wxSTC_VHDL_ATTRIBUTE 10
@wxSTC_VHDL_STDOPERATOR 9
@wxSTC_VHDL_KEYWORD 8
@wxSTC_VHDL_STRINGEOL 7
@wxSTC_VHDL_IDENTIFIER 6
@wxSTC_VHDL_OPERATOR 5
@wxSTC_VHDL_STRING 4
@wxSTC_VHDL_NUMBER 3
@wxSTC_VHDL_COMMENTLINEBANG 2
@wxSTC_VHDL_COMMENT 1
@wxSTC_VHDL_DEFAULT 0
@wxSTC_ASN1_OPERATOR 10
@wxSTC_ASN1_TYPE 9
@wxSTC_ASN1_DESCRIPTOR 8
@wxSTC_ASN1_ATTRIBUTE 7
@wxSTC_ASN1_KEYWORD 6
@wxSTC_ASN1_SCALAR 5
@wxSTC_ASN1_OID 4
@wxSTC_ASN1_STRING 3
@wxSTC_ASN1_IDENTIFIER 2
@wxSTC_ASN1_COMMENT 1
@wxSTC_ASN1_DEFAULT 0
@wxSTC_SH_HERE_Q 13
@wxSTC_SH_HERE_DELIM 12
@wxSTC_SH_BACKTICKS 11
@wxSTC_SH_PARAM 10
@wxSTC_SH_SCALAR 9
@wxSTC_SH_IDENTIFIER 8
@wxSTC_SH_OPERATOR 7
@wxSTC_SH_CHARACTER 6
@wxSTC_SH_STRING 5
@wxSTC_SH_WORD 4
@wxSTC_SH_NUMBER 3
@wxSTC_SH_COMMENTLINE 2
@wxSTC_SH_ERROR 1
@wxSTC_SH_DEFAULT 0
@wxSTC_APDL_FUNCTION 12
@wxSTC_APDL_ARGUMENT 11
@wxSTC_APDL_STARCOMMAND 10
@wxSTC_APDL_SLASHCOMMAND 9
@wxSTC_APDL_COMMAND 8
@wxSTC_APDL_PROCESSOR 7
@wxSTC_APDL_WORD 6
@wxSTC_APDL_OPERATOR 5
@wxSTC_APDL_STRING 4
@wxSTC_APDL_NUMBER 3
@wxSTC_APDL_COMMENTBLOCK 2
@wxSTC_APDL_COMMENT 1
@wxSTC_APDL_DEFAULT 0
@wxSTC_AU3_UDF 15
@wxSTC_AU3_COMOBJ 14
@wxSTC_AU3_EXPAND 13
@wxSTC_AU3_SPECIAL 12
@wxSTC_AU3_PREPROCESSOR 11
@wxSTC_AU3_SENT 10
@wxSTC_AU3_VARIABLE 9
@wxSTC_AU3_OPERATOR 8
@wxSTC_AU3_STRING 7
@wxSTC_AU3_MACRO 6
@wxSTC_AU3_KEYWORD 5
@wxSTC_AU3_FUNCTION 4
@wxSTC_AU3_NUMBER 3
@wxSTC_AU3_COMMENTBLOCK 2
@wxSTC_AU3_COMMENT 1
@wxSTC_AU3_DEFAULT 0
@wxSTC_SN_USER 19
@wxSTC_SN_SIGNAL 14
@wxSTC_SN_REGEXTAG 13
@wxSTC_SN_STRINGEOL 12
@wxSTC_SN_IDENTIFIER 11
@wxSTC_SN_OPERATOR 10
@wxSTC_SN_PREPROCESSOR 9
@wxSTC_SN_WORD3 8
@wxSTC_SN_WORD2 7
@wxSTC_SN_STRING 6
@wxSTC_SN_WORD 5
@wxSTC_SN_NUMBER 4
@wxSTC_SN_COMMENTLINEBANG 3
@wxSTC_SN_COMMENTLINE 2
@wxSTC_SN_CODE 1
@wxSTC_SN_DEFAULT 0
@wxSTC_GC_OPERATOR 9
@wxSTC_GC_STRING 8
@wxSTC_GC_COMMAND 7
@wxSTC_GC_CONTROL 6
@wxSTC_GC_ATTRIBUTE 5
@wxSTC_GC_EVENT 4
@wxSTC_GC_GLOBAL 3
@wxSTC_GC_COMMENTBLOCK 2
@wxSTC_GC_COMMENTLINE 1
@wxSTC_GC_DEFAULT 0
@wxSTC_KIX_IDENTIFIER 31
@wxSTC_KIX_OPERATOR 9
@wxSTC_KIX_FUNCTIONS 8
@wxSTC_KIX_KEYWORD 7
@wxSTC_KIX_MACRO 6
@wxSTC_KIX_VAR 5
@wxSTC_KIX_NUMBER 4
@wxSTC_KIX_STRING2 3
@wxSTC_KIX_STRING1 2
@wxSTC_KIX_COMMENT 1
@wxSTC_KIX_DEFAULT 0
@wxSTC_V_USER 19
@wxSTC_V_STRINGEOL 12
@wxSTC_V_IDENTIFIER 11
@wxSTC_V_OPERATOR 10
@wxSTC_V_PREPROCESSOR 9
@wxSTC_V_WORD3 8
@wxSTC_V_WORD2 7
@wxSTC_V_STRING 6
@wxSTC_V_WORD 5
@wxSTC_V_NUMBER 4
@wxSTC_V_COMMENTLINEBANG 3
@wxSTC_V_COMMENTLINE 2
@wxSTC_V_COMMENT 1
@wxSTC_V_DEFAULT 0
@wxSTC_MSSQL_COLUMN_NAME_2 16
@wxSTC_MSSQL_DEFAULT_PREF_DATATYPE 15
@wxSTC_MSSQL_STORED_PROCEDURE 14
@wxSTC_MSSQL_FUNCTION 13
@wxSTC_MSSQL_GLOBAL_VARIABLE 12
@wxSTC_MSSQL_SYSTABLE 11
@wxSTC_MSSQL_DATATYPE 10
@wxSTC_MSSQL_STATEMENT 9
@wxSTC_MSSQL_COLUMN_NAME 8
@wxSTC_MSSQL_VARIABLE 7
@wxSTC_MSSQL_IDENTIFIER 6
@wxSTC_MSSQL_OPERATOR 5
@wxSTC_MSSQL_STRING 4
@wxSTC_MSSQL_NUMBER 3
@wxSTC_MSSQL_LINE_COMMENT 2
@wxSTC_MSSQL_COMMENT 1
@wxSTC_MSSQL_DEFAULT 0
@wxSTC_ERLANG_UNKNOWN 31
@wxSTC_ERLANG_NODE_NAME 13
@wxSTC_ERLANG_SEPARATOR 12
@wxSTC_ERLANG_RECORD 11
@wxSTC_ERLANG_MACRO 10
@wxSTC_ERLANG_CHARACTER 9
@wxSTC_ERLANG_FUNCTION_NAME 8
@wxSTC_ERLANG_ATOM 7
@wxSTC_ERLANG_OPERATOR 6
@wxSTC_ERLANG_STRING 5
@wxSTC_ERLANG_KEYWORD 4
@wxSTC_ERLANG_NUMBER 3
@wxSTC_ERLANG_VARIABLE 2
@wxSTC_ERLANG_COMMENT 1
@wxSTC_ERLANG_DEFAULT 0
@wxSTC_METAPOST_EXTRA 6
@wxSTC_METAPOST_TEXT 5
@wxSTC_METAPOST_COMMAND 4
@wxSTC_METAPOST_SYMBOL 3
@wxSTC_METAPOST_GROUP 2
@wxSTC_METAPOST_SPECIAL 1
@wxSTC_METAPOST_DEFAULT 0
@wxSTC_TEX_TEXT 5
@wxSTC_TEX_COMMAND 4
@wxSTC_TEX_SYMBOL 3
@wxSTC_TEX_GROUP 2
@wxSTC_TEX_SPECIAL 1
@wxSTC_TEX_DEFAULT 0
@wxSTC_YAML_ERROR 8
@wxSTC_YAML_TEXT 7
@wxSTC_YAML_DOCUMENT 6
@wxSTC_YAML_REFERENCE 5
@wxSTC_YAML_NUMBER 4
@wxSTC_YAML_KEYWORD 3
@wxSTC_YAML_IDENTIFIER 2
@wxSTC_YAML_COMMENT 1
@wxSTC_YAML_DEFAULT 0
@wxSTC_LOT_ABORT 6
@wxSTC_LOT_FAIL 5
@wxSTC_LOT_PASS 4
@wxSTC_LOT_SET 3
@wxSTC_LOT_BREAK 2
@wxSTC_LOT_HEADER 1
@wxSTC_LOT_DEFAULT 0
@wxSTC_CLW_DEPRECATED 16
@wxSTC_CLW_ERROR 15
@wxSTC_CLW_STANDARD_EQUATE 14
@wxSTC_CLW_ATTRIBUTE 13
@wxSTC_CLW_STRUCTURE_DATA_TYPE 12
@wxSTC_CLW_BUILTIN_PROCEDURES_FUNCTION 11
@wxSTC_CLW_RUNTIME_EXPRESSIONS 10
@wxSTC_CLW_COMPILER_DIRECTIVE 9
@wxSTC_CLW_KEYWORD 8
@wxSTC_CLW_PICTURE_STRING 7
@wxSTC_CLW_REAL_CONSTANT 6
@wxSTC_CLW_INTEGER_CONSTANT 5
@wxSTC_CLW_USER_IDENTIFIER 4
@wxSTC_CLW_STRING 3
@wxSTC_CLW_COMMENT 2
@wxSTC_CLW_LABEL 1
@wxSTC_CLW_DEFAULT 0
@wxSTC_MMIXAL_INCLUDE 17
@wxSTC_MMIXAL_SYMBOL 16
@wxSTC_MMIXAL_OPERATOR 15
@wxSTC_MMIXAL_HEX 14
@wxSTC_MMIXAL_REGISTER 13
@wxSTC_MMIXAL_STRING 12
@wxSTC_MMIXAL_CHAR 11
@wxSTC_MMIXAL_REF 10
@wxSTC_MMIXAL_NUMBER 9
@wxSTC_MMIXAL_OPERANDS 8
@wxSTC_MMIXAL_OPCODE_POST 7
@wxSTC_MMIXAL_OPCODE_UNKNOWN 6
@wxSTC_MMIXAL_OPCODE_VALID 5
@wxSTC_MMIXAL_OPCODE_PRE 4
@wxSTC_MMIXAL_OPCODE 3
@wxSTC_MMIXAL_LABEL 2
@wxSTC_MMIXAL_COMMENT 1
@wxSTC_MMIXAL_LEADWS 0
@wxSTC_NSIS_COMMENTBOX 18
@wxSTC_NSIS_FUNCTIONDEF 17
@wxSTC_NSIS_PAGEEX 16
@wxSTC_NSIS_SECTIONGROUP 15
@wxSTC_NSIS_NUMBER 14
@wxSTC_NSIS_STRINGVAR 13
@wxSTC_NSIS_MACRODEF 12
@wxSTC_NSIS_IFDEFINEDEF 11
@wxSTC_NSIS_SUBSECTIONDEF 10
@wxSTC_NSIS_SECTIONDEF 9
@wxSTC_NSIS_USERDEFINED 8
@wxSTC_NSIS_LABEL 7
@wxSTC_NSIS_VARIABLE 6
@wxSTC_NSIS_FUNCTION 5
@wxSTC_NSIS_STRINGRQ 4
@wxSTC_NSIS_STRINGLQ 3
@wxSTC_NSIS_STRINGDQ 2
@wxSTC_NSIS_COMMENT 1
@wxSTC_NSIS_DEFAULT 0
@wxSTC_PS_BADSTRINGCHAR 15
@wxSTC_PS_BASE85STRING 14
@wxSTC_PS_HEXSTRING 13
@wxSTC_PS_TEXT 12
@wxSTC_PS_PAREN_PROC 11
@wxSTC_PS_PAREN_DICT 10
@wxSTC_PS_PAREN_ARRAY 9
@wxSTC_PS_IMMEVAL 8
@wxSTC_PS_LITERAL 7
@wxSTC_PS_KEYWORD 6
@wxSTC_PS_NAME 5
@wxSTC_PS_NUMBER 4
@wxSTC_PS_DSC_VALUE 3
@wxSTC_PS_DSC_COMMENT 2
@wxSTC_PS_COMMENT 1
@wxSTC_PS_DEFAULT 0
@wxSTC_ESCRIPT_WORD3 11
@wxSTC_ESCRIPT_WORD2 10
@wxSTC_ESCRIPT_BRACE 9
@wxSTC_ESCRIPT_IDENTIFIER 8
@wxSTC_ESCRIPT_OPERATOR 7
@wxSTC_ESCRIPT_STRING 6
@wxSTC_ESCRIPT_WORD 5
@wxSTC_ESCRIPT_NUMBER 4
@wxSTC_ESCRIPT_COMMENTDOC 3
@wxSTC_ESCRIPT_COMMENTLINE 2
@wxSTC_ESCRIPT_COMMENT 1
@wxSTC_ESCRIPT_DEFAULT 0
@wxSTC_LOUT_STRINGEOL 10
@wxSTC_LOUT_IDENTIFIER 9
@wxSTC_LOUT_OPERATOR 8
@wxSTC_LOUT_STRING 7
@wxSTC_LOUT_WORD4 6
@wxSTC_LOUT_WORD3 5
@wxSTC_LOUT_WORD2 4
@wxSTC_LOUT_WORD 3
@wxSTC_LOUT_NUMBER 2
@wxSTC_LOUT_COMMENT 1
@wxSTC_LOUT_DEFAULT 0
@wxSTC_POV_WORD8 16
@wxSTC_POV_WORD7 15
@wxSTC_POV_WORD6 14
@wxSTC_POV_WORD5 13
@wxSTC_POV_WORD4 12
@wxSTC_POV_WORD3 11
@wxSTC_POV_WORD2 10
@wxSTC_POV_BADDIRECTIVE 9
@wxSTC_POV_DIRECTIVE 8
@wxSTC_POV_STRINGEOL 7
@wxSTC_POV_STRING 6
@wxSTC_POV_IDENTIFIER 5
@wxSTC_POV_OPERATOR 4
@wxSTC_POV_NUMBER 3
@wxSTC_POV_COMMENTLINE 2
@wxSTC_POV_COMMENT 1
@wxSTC_POV_DEFAULT 0
@wxSTC_CSS_ATTRIBUTE 16
@wxSTC_CSS_IDENTIFIER2 15
@wxSTC_CSS_SINGLESTRING 14
@wxSTC_CSS_DOUBLESTRING 13
@wxSTC_CSS_DIRECTIVE 12
@wxSTC_CSS_IMPORTANT 11
@wxSTC_CSS_ID 10
@wxSTC_CSS_COMMENT 9
@wxSTC_CSS_VALUE 8
@wxSTC_CSS_UNKNOWN_IDENTIFIER 7
@wxSTC_CSS_IDENTIFIER 6
@wxSTC_CSS_OPERATOR 5
@wxSTC_CSS_UNKNOWN_PSEUDOCLASS 4
@wxSTC_CSS_PSEUDOCLASS 3
@wxSTC_CSS_CLASS 2
@wxSTC_CSS_TAG 1
@wxSTC_CSS_DEFAULT 0
@wxSTC_F_CONTINUATION 14
@wxSTC_F_LABEL 13
@wxSTC_F_OPERATOR2 12
@wxSTC_F_PREPROCESSOR 11
@wxSTC_F_WORD3 10
@wxSTC_F_WORD2 9
@wxSTC_F_WORD 8
@wxSTC_F_IDENTIFIER 7
@wxSTC_F_OPERATOR 6
@wxSTC_F_STRINGEOL 5
@wxSTC_F_STRING2 4
@wxSTC_F_STRING1 3
@wxSTC_F_NUMBER 2
@wxSTC_F_COMMENT 1
@wxSTC_F_DEFAULT 0
@wxSTC_ASM_EXTINSTRUCTION 14
@wxSTC_ASM_STRINGEOL 13
@wxSTC_ASM_CHARACTER 12
@wxSTC_ASM_COMMENTBLOCK 11
@wxSTC_ASM_DIRECTIVEOPERAND 10
@wxSTC_ASM_DIRECTIVE 9
@wxSTC_ASM_REGISTER 8
@wxSTC_ASM_MATHINSTRUCTION 7
@wxSTC_ASM_CPUINSTRUCTION 6
@wxSTC_ASM_IDENTIFIER 5
@wxSTC_ASM_OPERATOR 4
@wxSTC_ASM_STRING 3
@wxSTC_ASM_NUMBER 2
@wxSTC_ASM_COMMENT 1
@wxSTC_ASM_DEFAULT 0
@wxSTC_SCRIPTOL_PREPROCESSOR 15
@wxSTC_SCRIPTOL_CLASSNAME 14
@wxSTC_SCRIPTOL_TRIPLE 13
@wxSTC_SCRIPTOL_IDENTIFIER 12
@wxSTC_SCRIPTOL_OPERATOR 11
@wxSTC_SCRIPTOL_KEYWORD 10
@wxSTC_SCRIPTOL_STRINGEOL 9
@wxSTC_SCRIPTOL_CHARACTER 8
@wxSTC_SCRIPTOL_STRING 7
@wxSTC_SCRIPTOL_NUMBER 6
@wxSTC_SCRIPTOL_COMMENTBLOCK 5
@wxSTC_SCRIPTOL_CSTYLE 4
@wxSTC_SCRIPTOL_PERSISTENT 3
@wxSTC_SCRIPTOL_COMMENTLINE 2
@wxSTC_SCRIPTOL_WHITE 1
@wxSTC_SCRIPTOL_DEFAULT 0
@wxSTC_MATLAB_DOUBLEQUOTESTRING 8
@wxSTC_MATLAB_IDENTIFIER 7
@wxSTC_MATLAB_OPERATOR 6
@wxSTC_MATLAB_STRING 5
@wxSTC_MATLAB_KEYWORD 4
@wxSTC_MATLAB_NUMBER 3
@wxSTC_MATLAB_COMMAND 2
@wxSTC_MATLAB_COMMENT 1
@wxSTC_MATLAB_DEFAULT 0
@wxSTC_FORTH_LOCALE 11
@wxSTC_FORTH_STRING 10
@wxSTC_FORTH_NUMBER 9
@wxSTC_FORTH_PREWORD2 8
@wxSTC_FORTH_PREWORD1 7
@wxSTC_FORTH_DEFWORD 6
@wxSTC_FORTH_KEYWORD 5
@wxSTC_FORTH_CONTROL 4
@wxSTC_FORTH_IDENTIFIER 3
@wxSTC_FORTH_COMMENT_ML 2
@wxSTC_FORTH_COMMENT 1
@wxSTC_FORTH_DEFAULT 0
@wxSTC_NNCRONTAB_IDENTIFIER 10
@wxSTC_NNCRONTAB_ENVIRONMENT 9
@wxSTC_NNCRONTAB_STRING 8
@wxSTC_NNCRONTAB_NUMBER 7
@wxSTC_NNCRONTAB_ASTERISK 6
@wxSTC_NNCRONTAB_MODIFIER 5
@wxSTC_NNCRONTAB_KEYWORD 4
@wxSTC_NNCRONTAB_SECTION 3
@wxSTC_NNCRONTAB_TASK 2
@wxSTC_NNCRONTAB_COMMENT 1
@wxSTC_NNCRONTAB_DEFAULT 0
@wxSTC_EIFFEL_STRINGEOL 8
@wxSTC_EIFFEL_IDENTIFIER 7
@wxSTC_EIFFEL_OPERATOR 6
@wxSTC_EIFFEL_CHARACTER 5
@wxSTC_EIFFEL_STRING 4
@wxSTC_EIFFEL_WORD 3
@wxSTC_EIFFEL_NUMBER 2
@wxSTC_EIFFEL_COMMENTLINE 1
@wxSTC_EIFFEL_DEFAULT 0
@wxSTC_LISP_MULTI_COMMENT 12
@wxSTC_LISP_SPECIAL 11
@wxSTC_LISP_OPERATOR 10
@wxSTC_LISP_IDENTIFIER 9
@wxSTC_LISP_STRINGEOL 8
@wxSTC_LISP_STRING 6
@wxSTC_LISP_SYMBOL 5
@wxSTC_LISP_KEYWORD_KW 4
@wxSTC_LISP_KEYWORD 3
@wxSTC_LISP_NUMBER 2
@wxSTC_LISP_COMMENT 1
@wxSTC_LISP_DEFAULT 0
@wxSTC_BAAN_WORD2 10
@wxSTC_BAAN_STRINGEOL 9
@wxSTC_BAAN_IDENTIFIER 8
@wxSTC_BAAN_OPERATOR 7
@wxSTC_BAAN_PREPROCESSOR 6
@wxSTC_BAAN_STRING 5
@wxSTC_BAAN_WORD 4
@wxSTC_BAAN_NUMBER 3
@wxSTC_BAAN_COMMENTDOC 2
@wxSTC_BAAN_COMMENT 1
@wxSTC_BAAN_DEFAULT 0
@wxSTC_ADA_ILLEGAL 11
@wxSTC_ADA_COMMENTLINE 10
@wxSTC_ADA_LABEL 9
@wxSTC_ADA_STRINGEOL 8
@wxSTC_ADA_STRING 7
@wxSTC_ADA_CHARACTEREOL 6
@wxSTC_ADA_CHARACTER 5
@wxSTC_ADA_DELIMITER 4
@wxSTC_ADA_NUMBER 3
@wxSTC_ADA_IDENTIFIER 2
@wxSTC_ADA_WORD 1
@wxSTC_ADA_DEFAULT 0
@wxSTC_AVE_WORD6 16
@wxSTC_AVE_WORD5 15
@wxSTC_AVE_WORD4 14
@wxSTC_AVE_WORD3 13
@wxSTC_AVE_WORD2 12
@wxSTC_AVE_WORD1 11
@wxSTC_AVE_OPERATOR 10
@wxSTC_AVE_IDENTIFIER 9
@wxSTC_AVE_STRINGEOL 8
@wxSTC_AVE_ENUM 7
@wxSTC_AVE_STRING 6
@wxSTC_AVE_WORD 3
@wxSTC_AVE_NUMBER 2
@wxSTC_AVE_COMMENT 1
@wxSTC_AVE_DEFAULT 0
@wxSTC_CONF_DIRECTIVE 9
@wxSTC_CONF_IP 8
@wxSTC_CONF_OPERATOR 7
@wxSTC_CONF_STRING 6
@wxSTC_CONF_PARAMETER 5
@wxSTC_CONF_EXTENSION 4
@wxSTC_CONF_IDENTIFIER 3
@wxSTC_CONF_NUMBER 2
@wxSTC_CONF_COMMENT 1
@wxSTC_CONF_DEFAULT 0
@wxSTC_DIFF_ADDED 6
@wxSTC_DIFF_DELETED 5
@wxSTC_DIFF_POSITION 4
@wxSTC_DIFF_HEADER 3
@wxSTC_DIFF_COMMAND 2
@wxSTC_DIFF_COMMENT 1
@wxSTC_DIFF_DEFAULT 0
@wxSTC_MAKE_IDEOL 9
@wxSTC_MAKE_TARGET 5
@wxSTC_MAKE_OPERATOR 4
@wxSTC_MAKE_IDENTIFIER 3
@wxSTC_MAKE_PREPROCESSOR 2
@wxSTC_MAKE_COMMENT 1
@wxSTC_MAKE_DEFAULT 0
@wxSTC_BAT_OPERATOR 7
@wxSTC_BAT_IDENTIFIER 6
@wxSTC_BAT_COMMAND 5
@wxSTC_BAT_HIDE 4
@wxSTC_BAT_LABEL 3
@wxSTC_BAT_WORD 2
@wxSTC_BAT_COMMENT 1
@wxSTC_BAT_DEFAULT 0
@wxSTC_ERR_JAVA_STACK 20
@wxSTC_ERR_TIDY 19
@wxSTC_ERR_ABSF 18
@wxSTC_ERR_IFORT 17
@wxSTC_ERR_IFC 16
@wxSTC_ERR_ELF 15
@wxSTC_ERR_PHP 14
@wxSTC_ERR_DIFF_MESSAGE 13
@wxSTC_ERR_DIFF_DELETION 12
@wxSTC_ERR_DIFF_ADDITION 11
@wxSTC_ERR_DIFF_CHANGED 10
@wxSTC_ERR_CTAG 9
@wxSTC_ERR_LUA 8
@wxSTC_ERR_NET 7
@wxSTC_ERR_PERL 6
@wxSTC_ERR_BORLAND 5
@wxSTC_ERR_CMD 4
@wxSTC_ERR_MS 3
@wxSTC_ERR_GCC 2
@wxSTC_ERR_PYTHON 1
@wxSTC_ERR_DEFAULT 0
@wxSTC_LUA_WORD8 19
@wxSTC_LUA_WORD7 18
@wxSTC_LUA_WORD6 17
@wxSTC_LUA_WORD5 16
@wxSTC_LUA_WORD4 15
@wxSTC_LUA_WORD3 14
@wxSTC_LUA_WORD2 13
@wxSTC_LUA_STRINGEOL 12
@wxSTC_LUA_IDENTIFIER 11
@wxSTC_LUA_OPERATOR 10
@wxSTC_LUA_PREPROCESSOR 9
@wxSTC_LUA_LITERALSTRING 8
@wxSTC_LUA_CHARACTER 7
@wxSTC_LUA_STRING 6
@wxSTC_LUA_WORD 5
@wxSTC_LUA_NUMBER 4
@wxSTC_LUA_COMMENTDOC 3
@wxSTC_LUA_COMMENTLINE 2
@wxSTC_LUA_COMMENT 1
@wxSTC_LUA_DEFAULT 0
@wxSTC_L_COMMENT 4
@wxSTC_L_MATH 3
@wxSTC_L_TAG 2
@wxSTC_L_COMMAND 1
@wxSTC_L_DEFAULT 0
@wxSTC_PROPS_KEY 5
@wxSTC_PROPS_DEFVAL 4
@wxSTC_PROPS_ASSIGNMENT 3
@wxSTC_PROPS_SECTION 2
@wxSTC_PROPS_COMMENT 1
@wxSTC_PROPS_DEFAULT 0
@wxSTC_B_BINNUMBER 18
@wxSTC_B_HEXNUMBER 17
@wxSTC_B_ERROR 16
@wxSTC_B_LABEL 15
@wxSTC_B_ASM 14
@wxSTC_B_CONSTANT 13
@wxSTC_B_KEYWORD4 12
@wxSTC_B_KEYWORD3 11
@wxSTC_B_KEYWORD2 10
@wxSTC_B_STRINGEOL 9
@wxSTC_B_DATE 8
@wxSTC_B_IDENTIFIER 7
@wxSTC_B_OPERATOR 6
@wxSTC_B_PREPROCESSOR 5
@wxSTC_B_STRING 4
@wxSTC_B_KEYWORD 3
@wxSTC_B_NUMBER 2
@wxSTC_B_COMMENT 1
@wxSTC_B_DEFAULT 0
@wxSTC_RB_UPPER_BOUND 41
@wxSTC_RB_STDERR 40
@wxSTC_RB_STDOUT 31
@wxSTC_RB_STDIN 30
@wxSTC_RB_WORD_DEMOTED 29
@wxSTC_RB_STRING_QW 28
@wxSTC_RB_STRING_QR 27
@wxSTC_RB_STRING_QX 26
@wxSTC_RB_STRING_QQ 25
@wxSTC_RB_STRING_Q 24
@wxSTC_RB_HERE_QX 23
@wxSTC_RB_HERE_QQ 22
@wxSTC_RB_HERE_Q 21
@wxSTC_RB_HERE_DELIM 20
@wxSTC_RB_DATASECTION 19
@wxSTC_RB_BACKTICKS 18
@wxSTC_RB_CLASS_VAR 17
@wxSTC_RB_INSTANCE_VAR 16
@wxSTC_RB_MODULE_NAME 15
@wxSTC_RB_SYMBOL 14
@wxSTC_RB_GLOBAL 13
@wxSTC_RB_REGEX 12
@wxSTC_RB_IDENTIFIER 11
@wxSTC_RB_OPERATOR 10
@wxSTC_RB_DEFNAME 9
@wxSTC_RB_CLASSNAME 8
@wxSTC_RB_CHARACTER 7
@wxSTC_RB_STRING 6
@wxSTC_RB_WORD 5
@wxSTC_RB_NUMBER 4
@wxSTC_RB_POD 3
@wxSTC_RB_COMMENTLINE 2
@wxSTC_RB_ERROR 1
@wxSTC_RB_DEFAULT 0
@wxSTC_PL_POD_VERB 31
@wxSTC_PL_STRING_QW 30
@wxSTC_PL_STRING_QR 29
@wxSTC_PL_STRING_QX 28
@wxSTC_PL_STRING_QQ 27
@wxSTC_PL_STRING_Q 26
@wxSTC_PL_HERE_QX 25
@wxSTC_PL_HERE_QQ 24
@wxSTC_PL_HERE_Q 23
@wxSTC_PL_HERE_DELIM 22
@wxSTC_PL_DATASECTION 21
@wxSTC_PL_BACKTICKS 20
@wxSTC_PL_LONGQUOTE 19
@wxSTC_PL_REGSUBST 18
@wxSTC_PL_REGEX 17
@wxSTC_PL_VARIABLE_INDEXER 16
@wxSTC_PL_SYMBOLTABLE 15
@wxSTC_PL_HASH 14
@wxSTC_PL_ARRAY 13
@wxSTC_PL_SCALAR 12
@wxSTC_PL_IDENTIFIER 11
@wxSTC_PL_OPERATOR 10
@wxSTC_PL_PREPROCESSOR 9
@wxSTC_PL_PUNCTUATION 8
@wxSTC_PL_CHARACTER 7
@wxSTC_PL_STRING 6
@wxSTC_PL_WORD 5
@wxSTC_PL_NUMBER 4
@wxSTC_PL_POD 3
@wxSTC_PL_COMMENTLINE 2
@wxSTC_PL_ERROR 1
@wxSTC_PL_DEFAULT 0
@wxSTC_HPHP_OPERATOR 127
@wxSTC_HPHP_HSTRING_VARIABLE 126
@wxSTC_HPHP_COMMENTLINE 125
@wxSTC_HPHP_COMMENT 124
@wxSTC_HPHP_VARIABLE 123
@wxSTC_HPHP_NUMBER 122
@wxSTC_HPHP_WORD 121
@wxSTC_HPHP_SIMPLESTRING 120
@wxSTC_HPHP_HSTRING 119
@wxSTC_HPHP_DEFAULT 118
@wxSTC_HPA_IDENTIFIER 117
@wxSTC_HPA_OPERATOR 116
@wxSTC_HPA_DEFNAME 115
@wxSTC_HPA_CLASSNAME 114
@wxSTC_HPA_TRIPLEDOUBLE 113
@wxSTC_HPA_TRIPLE 112
@wxSTC_HPA_WORD 111
@wxSTC_HPA_CHARACTER 110
@wxSTC_HPA_STRING 109
@wxSTC_HPA_NUMBER 108
@wxSTC_HPA_COMMENTLINE 107
@wxSTC_HPA_DEFAULT 106
@wxSTC_HPA_START 105
@wxSTC_HPHP_COMPLEX_VARIABLE 104
@wxSTC_HP_IDENTIFIER 102
@wxSTC_HP_OPERATOR 101
@wxSTC_HP_DEFNAME 100
@wxSTC_HP_CLASSNAME 99
@wxSTC_HP_TRIPLEDOUBLE 98
@wxSTC_HP_TRIPLE 97
@wxSTC_HP_WORD 96
@wxSTC_HP_CHARACTER 95
@wxSTC_HP_STRING 94
@wxSTC_HP_NUMBER 93
@wxSTC_HP_COMMENTLINE 92
@wxSTC_HP_DEFAULT 91
@wxSTC_HP_START 90
@wxSTC_HBA_STRINGEOL 87
@wxSTC_HBA_IDENTIFIER 86
@wxSTC_HBA_STRING 85
@wxSTC_HBA_WORD 84
@wxSTC_HBA_NUMBER 83
@wxSTC_HBA_COMMENTLINE 82
@wxSTC_HBA_DEFAULT 81
@wxSTC_HBA_START 80
@wxSTC_HB_STRINGEOL 77
@wxSTC_HB_IDENTIFIER 76
@wxSTC_HB_STRING 75
@wxSTC_HB_WORD 74
@wxSTC_HB_NUMBER 73
@wxSTC_HB_COMMENTLINE 72
@wxSTC_HB_DEFAULT 71
@wxSTC_HB_START 70
@wxSTC_HJA_REGEX 67
@wxSTC_HJA_STRINGEOL 66
@wxSTC_HJA_SYMBOLS 65
@wxSTC_HJA_SINGLESTRING 64
@wxSTC_HJA_DOUBLESTRING 63
@wxSTC_HJA_KEYWORD 62
@wxSTC_HJA_WORD 61
@wxSTC_HJA_NUMBER 60
@wxSTC_HJA_COMMENTDOC 59
@wxSTC_HJA_COMMENTLINE 58
@wxSTC_HJA_COMMENT 57
@wxSTC_HJA_DEFAULT 56
@wxSTC_HJA_START 55
@wxSTC_HJ_REGEX 52
@wxSTC_HJ_STRINGEOL 51
@wxSTC_HJ_SYMBOLS 50
@wxSTC_HJ_SINGLESTRING 49
@wxSTC_HJ_DOUBLESTRING 48
@wxSTC_HJ_KEYWORD 47
@wxSTC_HJ_WORD 46
@wxSTC_HJ_NUMBER 45
@wxSTC_HJ_COMMENTDOC 44
@wxSTC_HJ_COMMENTLINE 43
@wxSTC_HJ_COMMENT 42
@wxSTC_HJ_DEFAULT 41
@wxSTC_HJ_START 40
@wxSTC_H_SGML_BLOCK_DEFAULT 31
@wxSTC_H_SGML_1ST_PARAM_COMMENT 30
@wxSTC_H_SGML_COMMENT 29
@wxSTC_H_SGML_ENTITY 28
@wxSTC_H_SGML_SPECIAL 27
@wxSTC_H_SGML_ERROR 26
@wxSTC_H_SGML_SIMPLESTRING 25
@wxSTC_H_SGML_DOUBLESTRING 24
@wxSTC_H_SGML_1ST_PARAM 23
@wxSTC_H_SGML_COMMAND 22
@wxSTC_H_SGML_DEFAULT 21
@wxSTC_H_XCCOMMENT 20
@wxSTC_H_VALUE 19
@wxSTC_H_QUESTION 18
@wxSTC_H_CDATA 17
@wxSTC_H_ASPAT 16
@wxSTC_H_ASP 15
@wxSTC_H_SCRIPT 14
@wxSTC_H_XMLEND 13
@wxSTC_H_XMLSTART 12
@wxSTC_H_TAGEND 11
@wxSTC_H_ENTITY 10
@wxSTC_H_COMMENT 9
@wxSTC_H_OTHER 8
@wxSTC_H_SINGLESTRING 7
@wxSTC_H_DOUBLESTRING 6
@wxSTC_H_NUMBER 5
@wxSTC_H_ATTRIBUTEUNKNOWN 4
@wxSTC_H_ATTRIBUTE 3
@wxSTC_H_TAGUNKNOWN 2
@wxSTC_H_TAG 1
@wxSTC_H_DEFAULT 0
@wxSTC_TCL_BLOCK_COMMENT 21
@wxSTC_TCL_COMMENT_BOX 20
@wxSTC_TCL_WORD8 19
@wxSTC_TCL_WORD7 18
@wxSTC_TCL_WORD6 17
@wxSTC_TCL_WORD5 16
@wxSTC_TCL_WORD4 15
@wxSTC_TCL_WORD3 14
@wxSTC_TCL_WORD2 13
@wxSTC_TCL_WORD 12
@wxSTC_TCL_EXPAND 11
@wxSTC_TCL_MODIFIER 10
@wxSTC_TCL_SUB_BRACE 9
@wxSTC_TCL_SUBSTITUTION 8
@wxSTC_TCL_IDENTIFIER 7
@wxSTC_TCL_OPERATOR 6
@wxSTC_TCL_IN_QUOTE 5
@wxSTC_TCL_WORD_IN_QUOTE 4
@wxSTC_TCL_NUMBER 3
@wxSTC_TCL_COMMENTLINE 2
@wxSTC_TCL_COMMENT 1
@wxSTC_TCL_DEFAULT 0
@wxSTC_C_GLOBALCLASS 19
@wxSTC_C_COMMENTDOCKEYWORDERROR 18
@wxSTC_C_COMMENTDOCKEYWORD 17
@wxSTC_C_WORD2 16
@wxSTC_C_COMMENTLINEDOC 15
@wxSTC_C_REGEX 14
@wxSTC_C_VERBATIM 13
@wxSTC_C_STRINGEOL 12
@wxSTC_C_IDENTIFIER 11
@wxSTC_C_OPERATOR 10
@wxSTC_C_PREPROCESSOR 9
@wxSTC_C_UUID 8
@wxSTC_C_CHARACTER 7
@wxSTC_C_STRING 6
@wxSTC_C_WORD 5
@wxSTC_C_NUMBER 4
@wxSTC_C_COMMENTDOC 3
@wxSTC_C_COMMENTLINE 2
@wxSTC_C_COMMENT 1
@wxSTC_C_DEFAULT 0
@wxSTC_P_DECORATOR 15
@wxSTC_P_WORD2 14
@wxSTC_P_STRINGEOL 13
@wxSTC_P_COMMENTBLOCK 12
@wxSTC_P_IDENTIFIER 11
@wxSTC_P_OPERATOR 10
@wxSTC_P_DEFNAME 9
@wxSTC_P_CLASSNAME 8
@wxSTC_P_TRIPLEDOUBLE 7
@wxSTC_P_TRIPLE 6
@wxSTC_P_WORD 5
@wxSTC_P_CHARACTER 4
@wxSTC_P_STRING 3
@wxSTC_P_NUMBER 2
@wxSTC_P_COMMENTLINE 1
@wxSTC_P_DEFAULT 0
@wxSTC_LEX_AUTOMATIC 1000
@wxSTC_LEX_SPICE 78
@wxSTC_LEX_OPAL 77
@wxSTC_LEX_INNOSETUP 76
@wxSTC_LEX_FREEBASIC 75
@wxSTC_LEX_CSOUND 74
@wxSTC_LEX_FLAGSHIP 73
@wxSTC_LEX_SMALLTALK 72
@wxSTC_LEX_REBOL 71
@wxSTC_LEX_TADS3 70
@wxSTC_LEX_PHPSCRIPT 69
@wxSTC_LEX_HASKELL 68
@wxSTC_LEX_PUREBASIC 67
@wxSTC_LEX_BLITZBASIC 66
@wxSTC_LEX_CAML 65
@wxSTC_LEX_VHDL 64
@wxSTC_LEX_ASN1 63
@wxSTC_LEX_BASH 62
@wxSTC_LEX_APDL 61
@wxSTC_LEX_AU3 60
@wxSTC_LEX_SPECMAN 59
@wxSTC_LEX_GUI4CLI 58
@wxSTC_LEX_KIX 57
@wxSTC_LEX_VERILOG 56
@wxSTC_LEX_MSSQL 55
@wxSTC_LEX_OCTAVE 54
@wxSTC_LEX_ERLANG 53
@wxSTC_LEX_FORTH 52
@wxSTC_LEX_POWERBASIC 51
@wxSTC_LEX_METAPOST 50
@wxSTC_LEX_TEX 49
@wxSTC_LEX_YAML 48
@wxSTC_LEX_LOT 47
@wxSTC_LEX_CLWNOCASE 46
@wxSTC_LEX_CLW 45
@wxSTC_LEX_MMIXAL 44
@wxSTC_LEX_NSIS 43
@wxSTC_LEX_PS 42
@wxSTC_LEX_ESCRIPT 41
@wxSTC_LEX_LOUT 40
@wxSTC_LEX_POV 39
@wxSTC_LEX_CSS 38
@wxSTC_LEX_F77 37
@wxSTC_LEX_FORTRAN 36
@wxSTC_LEX_CPPNOCASE 35
@wxSTC_LEX_ASM 34
@wxSTC_LEX_SCRIPTOL 33
@wxSTC_LEX_MATLAB 32
@wxSTC_LEX_BAAN 31
@wxSTC_LEX_VBSCRIPT 28
@wxSTC_LEX_BULLANT 27
@wxSTC_LEX_NNCRONTAB 26
@wxSTC_LEX_TCL 25
@wxSTC_LEX_EIFFELKW 24
@wxSTC_LEX_EIFFEL 23
@wxSTC_LEX_RUBY 22
@wxSTC_LEX_LISP 21
@wxSTC_LEX_ADA 20
@wxSTC_LEX_AVE 19
@wxSTC_LEX_PASCAL 18
@wxSTC_LEX_CONF 17
@wxSTC_LEX_DIFF 16
@wxSTC_LEX_LUA 15
@wxSTC_LEX_LATEX 14
@wxSTC_LEX_XCODE 13
@wxSTC_LEX_BATCH 12
@wxSTC_LEX_MAKEFILE 11
@wxSTC_LEX_ERRORLIST 10
@wxSTC_LEX_PROPERTIES 9
@wxSTC_LEX_VB 8
@wxSTC_LEX_SQL 7
@wxSTC_LEX_PERL 6
@wxSTC_LEX_XML 5
@wxSTC_LEX_HTML 4
@wxSTC_LEX_CPP 3
@wxSTC_LEX_PYTHON 2
@wxSTC_LEX_NULL 1
@wxSTC_LEX_CONTAINER 0
@wxSTC_SCMOD_ALT 4
@wxSTC_SCMOD_CTRL 2
@wxSTC_SCMOD_SHIFT 1
@wxSTC_SCMOD_NORM 0
@wxSTC_KEY_DIVIDE 312
@wxSTC_KEY_SUBTRACT 311
@wxSTC_KEY_ADD 310
@wxSTC_KEY_RETURN 13
@wxSTC_KEY_TAB 9
@wxSTC_KEY_BACK 8
@wxSTC_KEY_ESCAPE 7
@wxSTC_KEY_INSERT 309
@wxSTC_KEY_DELETE 308
@wxSTC_KEY_NEXT 307
@wxSTC_KEY_PRIOR 306
@wxSTC_KEY_END 305
@wxSTC_KEY_HOME 304
@wxSTC_KEY_RIGHT 303
@wxSTC_KEY_LEFT 302
@wxSTC_KEY_UP 301
@wxSTC_KEY_DOWN 300
@wxSTC_MODEVENTMASKALL 8191
@wxSTC_MULTILINEUNDOREDO 4096
@wxSTC_MOD_BEFOREDELETE 2048
@wxSTC_MOD_BEFOREINSERT 1024
@wxSTC_MOD_CHANGEMARKER 512
@wxSTC_LASTSTEPINUNDOREDO 256
@wxSTC_MULTISTEPUNDOREDO 128
@wxSTC_PERFORMED_REDO 64
@wxSTC_PERFORMED_UNDO 32
@wxSTC_PERFORMED_USER 16
@wxSTC_MOD_CHANGEFOLD 8
@wxSTC_MOD_CHANGESTYLE 4
@wxSTC_MOD_DELETETEXT 2
@wxSTC_MOD_INSERTTEXT 1
@wxSTC_KEYWORDSET_MAX 8
@wxSTC_ALPHA_NOALPHA 256
@wxSTC_ALPHA_OPAQUE 255
@wxSTC_ALPHA_TRANSPARENT 0
@wxSTC_SEL_LINES 2
@wxSTC_SEL_RECTANGLE 1
@wxSTC_SEL_STREAM 0
@wxSTC_CARET_EVEN 8
@wxSTC_CARET_JUMPS 16
@wxSTC_CARET_STRICT 4
@wxSTC_CARET_SLOP 1
@wxSTC_VISIBLE_STRICT 4
@wxSTC_VISIBLE_SLOP 1
@wxSTC_CURSORWAIT 4
@wxSTC_CURSORNORMAL -1
@wxSTC_EDGE_BACKGROUND 2
@wxSTC_EDGE_LINE 1
@wxSTC_EDGE_NONE 0
@wxSTC_CACHE_DOCUMENT 3
@wxSTC_CACHE_PAGE 2
@wxSTC_CACHE_CARET 1
@wxSTC_CACHE_NONE 0
@wxSTC_WRAPVISUALFLAGLOC_START_BY_TEXT 2
@wxSTC_WRAPVISUALFLAGLOC_END_BY_TEXT 1
@wxSTC_WRAPVISUALFLAGLOC_DEFAULT 0
@wxSTC_WRAPVISUALFLAG_START 2
@wxSTC_WRAPVISUALFLAG_END 1
@wxSTC_WRAPVISUALFLAG_NONE 0
@wxSTC_WRAP_CHAR 2
@wxSTC_WRAP_WORD 1
@wxSTC_WRAP_NONE 0
@wxSTC_TIME_FOREVER 10_000_000
@wxSTC_FOLDFLAG_BOX 1
@wxSTC_FOLDFLAG_LEVELNUMBERS 64
@wxSTC_FOLDFLAG_LINEAFTER_CONTRACTED 16
@wxSTC_FOLDFLAG_LINEAFTER_EXPANDED 8
@wxSTC_FOLDFLAG_LINEBEFORE_CONTRACTED 4
@wxSTC_FOLDFLAG_LINEBEFORE_EXPANDED 2
@wxSTC_FOLDLEVELNUMBERMASK 4095
@wxSTC_FOLDLEVELUNINDENT 131_072
@wxSTC_FOLDLEVELCONTRACTED 65536
@wxSTC_FOLDLEVELBOXFOOTERFLAG 32768
@wxSTC_FOLDLEVELBOXHEADERFLAG 16384
@wxSTC_FOLDLEVELHEADERFLAG 8192
@wxSTC_FOLDLEVELWHITEFLAG 4096
@wxSTC_FOLDLEVELBASE 1024
@wxSTC_FIND_POSIX 4_194_304
@wxSTC_FIND_REGEXP 2_097_152
@wxSTC_FIND_WORDSTART 1_048_576
@wxSTC_FIND_MATCHCASE 4
@wxSTC_FIND_WHOLEWORD 2
@wxSTC_PRINT_COLOURONWHITEDEFAULTBG 4
@wxSTC_PRINT_COLOURONWHITE 3
@wxSTC_PRINT_BLACKONWHITE 2
@wxSTC_PRINT_INVERTLIGHT 1
@wxSTC_PRINT_NORMAL 0
@wxSTC_INDICS_MASK 224
@wxSTC_INDIC2_MASK 128
@wxSTC_INDIC1_MASK 64
@wxSTC_INDIC0_MASK 32
@wxSTC_INDIC_ROUNDBOX 7
@wxSTC_INDIC_BOX 6
@wxSTC_INDIC_HIDDEN 5
@wxSTC_INDIC_STRIKE 4
@wxSTC_INDIC_DIAGONAL 3
@wxSTC_INDIC_TT 2
@wxSTC_INDIC_SQUIGGLE 1
@wxSTC_INDIC_PLAIN 0
@wxSTC_INDIC_MAX 7
@wxSTC_CASE_LOWER 2
@wxSTC_CASE_UPPER 1
@wxSTC_CASE_MIXED 0
@wxSTC_CHARSET_8859_15 1000
@wxSTC_CHARSET_THAI 222
@wxSTC_CHARSET_VIETNAMESE 163
@wxSTC_CHARSET_ARABIC 178
@wxSTC_CHARSET_HEBREW 177
@wxSTC_CHARSET_JOHAB 130
@wxSTC_CHARSET_TURKISH 162
@wxSTC_CHARSET_SYMBOL 2
@wxSTC_CHARSET_SHIFTJIS 128
@wxSTC_CHARSET_CYRILLIC 1251
@wxSTC_CHARSET_RUSSIAN 204
@wxSTC_CHARSET_OEM 255
@wxSTC_CHARSET_MAC 77
@wxSTC_CHARSET_HANGUL 129
@wxSTC_CHARSET_GREEK 161
@wxSTC_CHARSET_GB2312 134
@wxSTC_CHARSET_EASTEUROPE 238
@wxSTC_CHARSET_CHINESEBIG5 136
@wxSTC_CHARSET_BALTIC 186
@wxSTC_CHARSET_DEFAULT 1
@wxSTC_CHARSET_ANSI 0
@wxSTC_STYLE_MAX 127
@wxSTC_STYLE_LASTPREDEFINED 39
@wxSTC_STYLE_CALLTIP 38
@wxSTC_STYLE_INDENTGUIDE 37
@wxSTC_STYLE_CONTROLCHAR 36
@wxSTC_STYLE_BRACEBAD 35
@wxSTC_STYLE_BRACELIGHT 34
@wxSTC_STYLE_LINENUMBER 33
@wxSTC_STYLE_DEFAULT 32
@wxSTC_MARGIN_FORE 3
@wxSTC_MARGIN_BACK 2
@wxSTC_MARGIN_NUMBER 1
@wxSTC_MARGIN_SYMBOL 0
@wxSTC_MASK_FOLDERS 4_261_412_864
@wxSTC_MARKNUM_FOLDEROPEN 31
@wxSTC_MARKNUM_FOLDER 30
@wxSTC_MARKNUM_FOLDERSUB 29
@wxSTC_MARKNUM_FOLDERTAIL 28
@wxSTC_MARKNUM_FOLDERMIDTAIL 27
@wxSTC_MARKNUM_FOLDEROPENMID 26
@wxSTC_MARKNUM_FOLDEREND 25
@wxSTC_MARK_CHARACTER 10000
@wxSTC_MARK_FULLRECT 26
@wxSTC_MARK_PIXMAP 25
@wxSTC_MARK_ARROWS 24
@wxSTC_MARK_DOTDOTDOT 23
@wxSTC_MARK_BACKGROUND 22
@wxSTC_MARK_CIRCLEMINUSCONNECTED 21
@wxSTC_MARK_CIRCLEMINUS 20
@wxSTC_MARK_CIRCLEPLUSCONNECTED 19
@wxSTC_MARK_CIRCLEPLUS 18
@wxSTC_MARK_TCORNERCURVE 17
@wxSTC_MARK_LCORNERCURVE 16
@wxSTC_MARK_BOXMINUSCONNECTED 15
@wxSTC_MARK_BOXMINUS 14
@wxSTC_MARK_BOXPLUSCONNECTED 13
@wxSTC_MARK_BOXPLUS 12
@wxSTC_MARK_TCORNER 11
@wxSTC_MARK_LCORNER 10
@wxSTC_MARK_VLINE 9
@wxSTC_MARK_PLUS 8
@wxSTC_MARK_MINUS 7
@wxSTC_MARK_ARROWDOWN 6
@wxSTC_MARK_EMPTY 5
@wxSTC_MARK_SHORTARROW 4
@wxSTC_MARK_SMALLRECT 3
@wxSTC_MARK_ARROW 2
@wxSTC_MARK_ROUNDRECT 1
@wxSTC_MARK_CIRCLE 0
@wxSTC_MARKER_MAX 31
@wxSTC_CP_DBCS 1
@wxSTC_CP_UTF8 65001
@wxSTC_EOL_LF 2
@wxSTC_EOL_CR 1
@wxSTC_EOL_CRLF 0
@wxSTC_WS_VISIBLEAFTERINDENT 2
@wxSTC_WS_VISIBLEALWAYS 1
@wxSTC_WS_INVISIBLE 0
@wxSTC_LEXER_START 4000
@wxSTC_OPTIONAL_START 3000
@wxSTC_START 2000
@wxSTC_INVALID_POSITION -1
@wxSTC_USE_POPUP 1
@wxTEXT_ATTR_TABS 1024
@wxTEXT_ATTR_RIGHT_INDENT 512
@wxTEXT_ATTR_LEFT_INDENT 256
@wxTEXT_ATTR_ALIGNMENT 128
# wxTEXT_ATTR_FONT_FACE | wxTEXT_ATTR_FONT_SIZE | wxTEXT_ATTR_FONT_WEIGHT | wxTEXT_ATTR_FONT_ITALIC | wxTEXT_ATTR_FONT_UNDERLINE -> 124
@wxTEXT_ATTR_FONT 124
@wxTEXT_ATTR_FONT_UNDERLINE 64
@wxTEXT_ATTR_FONT_ITALIC 32
@wxTEXT_ATTR_FONT_WEIGHT 16
@wxTEXT_ATTR_FONT_SIZE 8
@wxTEXT_ATTR_FONT_FACE 4
@wxTEXT_ATTR_BACKGROUND_COLOUR 2
@wxTEXT_ATTR_TEXT_COLOUR 1
@wxTEXT_TYPE_ANY 0
@wxTE_CAPITALIZE 0
@wxTE_RICH2 32768
@wxTE_BESTWRAP 0
@wxTE_WORDWRAP 1
@wxTE_CHARWRAP 16384
@wxTE_DONTWRAP 1_073_741_824
@wxTE_NOHIDESEL 8192
@wxTE_AUTO_URL 4096
@wxTE_PASSWORD <PASSWORD>
@wxTE_PROCESS_ENTER 1024
@wxTE_RICH 128
# ERROR @wxTE_CENTRE ?wxTE_CENTER
@wxTE_RIGHT 512
@wxTE_CENTER 256
@wxTE_LEFT 0
@wxTE_PROCESS_TAB 64
@wxTE_MULTILINE 32
@wxTE_READONLY 16
@wxTE_AUTO_SCROLL 8
@wxTE_NO_VSCROLL 2
@wxHAS_TEXT_WINDOW_STREAM 0
# wxOK | wxCANCEL | wxCENTRE | wxWS_EX_VALIDATE_RECURSIVELY -> 21
@wxTextEntryDialogStyle 21
@wxTOPLEVEL_EX_DIALOG 8
# wxSYSTEM_MENU | wxRESIZE_BORDER | wxMINIMIZE_BOX | wxMAXIMIZE_BOX | wxCLOSE_BOX | wxCAPTION | wxCLIP_CHILDREN -> 541072960
@wxDEFAULT_FRAME_STYLE 541_072_960
@wxRESIZE_BORDER 64
@wxTINY_CAPTION_VERT 128
@wxTINY_CAPTION_HORIZ 256
@wxMAXIMIZE_BOX 512
@wxMINIMIZE_BOX 1024
@wxSYSTEM_MENU 2048
@wxCLOSE_BOX 4096
@wxMAXIMIZE 8192
@wxMINIMIZE 16384
@wxICONIZE 16384
@wxSTAY_ON_TOP 32768
# wxTR_HAS_BUTTONS | wxTR_LINES_AT_ROOT -> 9
@wxTR_DEFAULT_STYLE 9
@wxTR_FULL_ROW_HIGHLIGHT 8192
@wxTR_HIDE_ROOT 2048
@wxTR_ROW_LINES 1024
@wxTR_EDIT_LABELS 512
@wxTR_HAS_VARIABLE_ROW_HEIGHT 128
@wxTR_EXTENDED 64
@wxTR_MULTIPLE 32
@wxTR_SINGLE 0
@wxTR_TWIST_BUTTONS 16
@wxTR_LINES_AT_ROOT 8
@wxTR_NO_LINES 4
@wxTR_HAS_BUTTONS 1
@wxTR_NO_BUTTONS 0
@wxFILTER_EXCLUDE_CHAR_LIST 128
@wxFILTER_INCLUDE_CHAR_LIST 64
@wxFILTER_EXCLUDE_LIST 32
@wxFILTER_INCLUDE_LIST 16
@wxFILTER_NUMERIC 8
@wxFILTER_ALPHANUMERIC 4
@wxFILTER_ALPHA 2
@wxFILTER_ASCII 1
@wxFILTER_NONE 0
# ERROR @wxBETA_NUMBER wxe_util:get_const(wxBETA_NUMBER
# ERROR @wxSUBRELEASE_NUMBER wxe_util:get_const(wxSUBRELEASE_NUMBER
# ERROR @wxRELEASE_NUMBER wxe_util:get_const(wxRELEASE_NUMBER
# ERROR @wxMINOR_VERSION wxe_util:get_const(wxMINOR_VERSION
# ERROR @wxMAJOR_VERSION wxe_util:get_const(wxMAJOR_VERSION
@wxAuiManager_actionNone 0
@wxAuiManager_actionResize 1
@wxAuiManager_actionClickButton 2
@wxAuiManager_actionClickCaption 3
@wxAuiManager_actionDragToolbarPane 4
@wxAuiManager_actionDragFloatingPane 5
@wxAuiPaneInfo_optionFloating 0
@wxAuiPaneInfo_optionHidden 1
@wxAuiPaneInfo_optionLeftDockable 2
@wxAuiPaneInfo_optionRightDockable 3
@wxAuiPaneInfo_optionTopDockable 4
@wxAuiPaneInfo_optionBottomDockable 5
@wxAuiPaneInfo_optionFloatable 6
@wxAuiPaneInfo_optionMovable 7
@wxAuiPaneInfo_optionResizable 8
@wxAuiPaneInfo_optionPaneBorder 9
@wxAuiPaneInfo_optionCaption 10
@wxAuiPaneInfo_optionGripper 11
@wxAuiPaneInfo_optionDestroyOnClose 12
@wxAuiPaneInfo_optionToolbar 13
@wxAuiPaneInfo_optionActive 14
@wxAuiPaneInfo_optionGripperTop 15
@wxAuiPaneInfo_optionMaximized 16
@wxAuiPaneInfo_optionDockFixed 17
@wxAuiPaneInfo_buttonClose 18
@wxAuiPaneInfo_buttonMaximize 19
@wxAuiPaneInfo_buttonMinimize 20
@wxAuiPaneInfo_buttonPin 21
@wxAuiPaneInfo_buttonCustom1 22
@wxAuiPaneInfo_buttonCustom2 23
@wxAuiPaneInfo_buttonCustom3 24
@wxAuiPaneInfo_savedHiddenState 25
@wxAuiPaneInfo_actionPane 26
@wxBitmap_Pixmap 0
@wxBitmap_Pixbuf 1
@wxDateTime_Gregorian 0
@wxDateTime_Julian 1
@wxDateTime_Country_Unknown 0
@wxDateTime_Country_Default 1
@wxDateTime_Country_WesternEurope_Start 2
# ERROR @wxDateTime_Country_EEC ?Country_WesternEurope_Start
# ERROR @wxDateTime_France (?Country_WesternEurope_Start+1
# ERROR @wxDateTime_Germany (?Country_WesternEurope_Start+2
# ERROR @wxDateTime_UK (?Country_WesternEurope_Start+3
# ERROR @wxDateTime_Country_WesternEurope_End ?UK
# ERROR @wxDateTime_Russia (?UK+1
# ERROR @wxDateTime_USA (?UK+2
@wxDateTime_Gr_Unknown 0
@wxDateTime_Gr_Standard 1
@wxDateTime_Gr_Alaska 2
@wxDateTime_Gr_Albania 3
# ERROR @wxDateTime_Gr_Austria ?Gr_Unknown
# ERROR @wxDateTime_Gr_Austria_Brixen (?Gr_Unknown+1
# ERROR @wxDateTime_Gr_Austria_Salzburg ?Gr_Austria_Brixen
# ERROR @wxDateTime_Gr_Austria_Tyrol ?Gr_Austria_Brixen
# ERROR @wxDateTime_Gr_Austria_Carinthia (?Gr_Austria_Brixen+1
# ERROR @wxDateTime_Gr_Austria_Styria ?Gr_Austria_Carinthia
# ERROR @wxDateTime_Gr_Belgium (?Gr_Austria_Carinthia+1
# ERROR @wxDateTime_Gr_Bulgaria ?Gr_Unknown
# ERROR @wxDateTime_Gr_Bulgaria_1 (?Gr_Unknown+1
# ERROR @wxDateTime_Gr_Bulgaria_2 (?Gr_Unknown+2
# ERROR @wxDateTime_Gr_Bulgaria_3 (?Gr_Unknown+3
# ERROR @wxDateTime_Gr_Canada ?Gr_Unknown
# ERROR @wxDateTime_Gr_China ?Gr_Unknown
# ERROR @wxDateTime_Gr_China_1 (?Gr_Unknown+1
# ERROR @wxDateTime_Gr_China_2 (?Gr_Unknown+2
# ERROR @wxDateTime_Gr_Czechoslovakia (?Gr_Unknown+3
# ERROR @wxDateTime_Gr_Denmark (?Gr_Unknown+4
# ERROR @wxDateTime_Gr_Egypt (?Gr_Unknown+5
# ERROR @wxDateTime_Gr_Estonia (?Gr_Unknown+6
# ERROR @wxDateTime_Gr_Finland (?Gr_Unknown+7
# ERROR @wxDateTime_Gr_France (?Gr_Unknown+8
# ERROR @wxDateTime_Gr_France_Alsace (?Gr_Unknown+9
# ERROR @wxDateTime_Gr_France_Lorraine (?Gr_Unknown+10
# ERROR @wxDateTime_Gr_France_Strasbourg (?Gr_Unknown+11
# ERROR @wxDateTime_Gr_Germany ?Gr_Unknown
# ERROR @wxDateTime_Gr_Germany_Catholic (?Gr_Unknown+1
# ERROR @wxDateTime_Gr_Germany_Prussia (?Gr_Unknown+2
# ERROR @wxDateTime_Gr_Germany_Protestant (?Gr_Unknown+3
# ERROR @wxDateTime_Gr_GreatBritain (?Gr_Unknown+4
# ERROR @wxDateTime_Gr_Greece (?Gr_Unknown+5
# ERROR @wxDateTime_Gr_Hungary (?Gr_Unknown+6
# ERROR @wxDateTime_Gr_Ireland ?Gr_GreatBritain
# ERROR @wxDateTime_Gr_Italy ?Gr_Standard
# ERROR @wxDateTime_Gr_Japan ?Gr_Unknown
# ERROR @wxDateTime_Gr_Japan_1 (?Gr_Unknown+1
# ERROR @wxDateTime_Gr_Japan_2 (?Gr_Unknown+2
# ERROR @wxDateTime_Gr_Japan_3 (?Gr_Unknown+3
# ERROR @wxDateTime_Gr_Latvia (?Gr_Unknown+4
# ERROR @wxDateTime_Gr_Lithuania (?Gr_Unknown+5
# ERROR @wxDateTime_Gr_Luxemburg (?Gr_Unknown+6
# ERROR @wxDateTime_Gr_Netherlands ?Gr_Belgium
# ERROR @wxDateTime_Gr_Netherlands_Groningen (?Gr_Belgium+1
# ERROR @wxDateTime_Gr_Netherlands_Gelderland (?Gr_Belgium+2
# ERROR @wxDateTime_Gr_Netherlands_Utrecht (?Gr_Belgium+3
# ERROR @wxDateTime_Gr_Netherlands_Friesland (?Gr_Belgium+4
# ERROR @wxDateTime_Gr_Norway ?Gr_Denmark
# ERROR @wxDateTime_Gr_Poland ?Gr_Standard
# ERROR @wxDateTime_Gr_Portugal ?Gr_Standard
# ERROR @wxDateTime_Gr_Romania (?Gr_Standard+1
# ERROR @wxDateTime_Gr_Russia (?Gr_Standard+2
# ERROR @wxDateTime_Gr_Scotland ?Gr_GreatBritain
# ERROR @wxDateTime_Gr_Spain ?Gr_Standard
# ERROR @wxDateTime_Gr_Sweden ?Gr_Finland
# ERROR @wxDateTime_Gr_Switzerland ?Gr_Unknown
# ERROR @wxDateTime_Gr_Switzerland_Catholic (?Gr_Unknown+1
# ERROR @wxDateTime_Gr_Switzerland_Protestant (?Gr_Unknown+2
# ERROR @wxDateTime_Gr_Turkey (?Gr_Unknown+3
# ERROR @wxDateTime_Gr_USA ?Gr_GreatBritain
# ERROR @wxDateTime_Gr_Wales ?Gr_GreatBritain
# ERROR @wxDateTime_Gr_Yugoslavia (?Gr_GreatBritain+1
@wxDateTime_Jan 0
@wxDateTime_Feb 1
@wxDateTime_Mar 2
@wxDateTime_Apr 3
@wxDateTime_May 4
@wxDateTime_Jun 5
@wxDateTime_Jul 6
@wxDateTime_Aug 7
@wxDateTime_Sep 8
@wxDateTime_Oct 9
@wxDateTime_Nov 10
@wxDateTime_Dec 11
@wxDateTime_Inv_Month 12
@wxDateTime_Name_Full 1
@wxDateTime_Name_Abbr 2
@wxDateTime_Local 0
@wxDateTime_GMT_12 1
@wxDateTime_GMT_11 2
@wxDateTime_GMT_10 3
@wxDateTime_GMT_9 4
@wxDateTime_GMT_8 5
@wxDateTime_GMT_7 6
@wxDateTime_GMT_6 7
@wxDateTime_GMT_5 8
@wxDateTime_GMT_4 9
@wxDateTime_GMT_3 10
@wxDateTime_GMT_2 11
@wxDateTime_GMT_1 12
@wxDateTime_GMT0 13
@wxDateTime_GMT1 14
@wxDateTime_GMT2 15
@wxDateTime_GMT3 16
@wxDateTime_GMT4 17
@wxDateTime_GMT5 18
@wxDateTime_GMT6 19
@wxDateTime_GMT7 20
@wxDateTime_GMT8 21
@wxDateTime_GMT9 22
@wxDateTime_GMT10 23
@wxDateTime_GMT11 24
@wxDateTime_GMT12 25
@wxDateTime_GMT13 26
# ERROR @wxDateTime_WET ?GMT0
# ERROR @wxDateTime_WEST ?GMT1
# ERROR @wxDateTime_CET ?GMT1
# ERROR @wxDateTime_CEST ?GMT2
# ERROR @wxDateTime_EET ?GMT2
# ERROR @wxDateTime_EEST ?GMT3
# ERROR @wxDateTime_MSK ?GMT3
# ERROR @wxDateTime_MSD ?GMT4
# ERROR @wxDateTime_AST ?GMT_4
# ERROR @wxDateTime_ADT ?GMT_3
# ERROR @wxDateTime_EST ?GMT_5
# ERROR @wxDateTime_EDT ?GMT_4
# ERROR @wxDateTime_CST ?GMT_6
# ERROR @wxDateTime_CDT ?GMT_5
# ERROR @wxDateTime_MST ?GMT_7
# ERROR @wxDateTime_MDT ?GMT_6
# ERROR @wxDateTime_PST ?GMT_8
# ERROR @wxDateTime_PDT ?GMT_7
# ERROR @wxDateTime_HST ?GMT_10
# ERROR @wxDateTime_AKST ?GMT_9
# ERROR @wxDateTime_AKDT ?GMT_8
# ERROR @wxDateTime_A_WST ?GMT8
# ERROR @wxDateTime_A_CST (?GMT13 bor ?+ bor ?1
# ERROR @wxDateTime_A_EST ?GMT10
# ERROR @wxDateTime_A_ESST ?GMT11
# ERROR @wxDateTime_NZST ?GMT12
# ERROR @wxDateTime_NZDT ?GMT13
# ERROR @wxDateTime_UTC ?GMT0
@wxDateTime_Sun 0
@wxDateTime_Mon 1
@wxDateTime_Tue 2
@wxDateTime_Wed 3
@wxDateTime_Thu 4
@wxDateTime_Fri 5
@wxDateTime_Sat 6
@wxDateTime_Inv_WeekDay 7
@wxDateTime_Default_First 0
@wxDateTime_Monday_First 1
@wxDateTime_Sunday_First 2
# ERROR @wxDateTime_Inv_Year ?SHRT_MIN
# wxOK | wxCANCEL | wxYES | wxNO | wxHELP | wxNO_DEFAULT -> 32926
@wxDialog_ButtonSizerFlags 32926
@wxGrid_wxGRID_CELLCTRL 2000
@wxGrid_wxGRID_TOPCTRL 2001
@wxGrid_wxGRID_TEXTCTRL 2100
@wxGrid_wxGRID_CHECKBOX 2101
@wxGrid_wxGRID_CHOICE 2102
@wxGrid_wxGRID_COMBOBOX 2103
@wxGrid_WXGRID_CURSOR_SELECT_CELL 0
@wxGrid_WXGRID_CURSOR_RESIZE_ROW 1
@wxGrid_WXGRID_CURSOR_RESIZE_COL 2
@wxGrid_WXGRID_CURSOR_SELECT_ROW 3
@wxGrid_WXGRID_CURSOR_SELECT_COL 4
@wxGrid_WXGRID_CURSOR_MOVE_COL 5
@wxGrid_wxGridSelectCells 0
@wxGrid_wxGridSelectRows 1
@wxGrid_wxGridSelectColumns 2
@wxGridCellAttr_Any 0
@wxGridCellAttr_Default 1
@wxGridCellAttr_Cell 2
@wxGridCellAttr_Row 3
@wxGridCellAttr_Col 4
@wxGridCellAttr_Merged 5
@wxGridCellAttr_UnsetOverflow -1
@wxGridCellAttr_Overflow 0
@wxGridCellAttr_SingleCell 1
@wxGridCellAttr_Unset -1
@wxGridCellAttr_ReadWrite 0
@wxGridCellAttr_ReadOnly 1
@wxHelpEvent_Origin_Unknown 0
@wxHelpEvent_Origin_Keyboard 1
@wxHelpEvent_Origin_HelpButton 2
@wxHtmlEasyPrinting_FontMode_Explicit 0
@wxHtmlEasyPrinting_FontMode_Standard 1
@wxNavigationKeyEvent_IsBackward 0
@wxNavigationKeyEvent_IsForward 1
@wxNavigationKeyEvent_WinChange 2
@wxNavigationKeyEvent_FromTab 4
@wxNotebook_SetSelection_SendEvent 1
@wxProgressDialog_Uncancelable -1
@wxProgressDialog_Canceled 0
@wxProgressDialog_Continue 1
@wxProgressDialog_Finished 2
@wxSizerItem_Item_None 0
@wxSizerItem_Item_Window 1
@wxSizerItem_Item_Sizer 2
@wxSizerItem_Item_Spacer 3
@wxSizerItem_Item_Max 4
@wxTextCtrl_SetValue_SendEvent 1
@wxTextCtrl_SetValue_SelectionOnly 2
@wxWindow_MoveBefore 0
@wxWindow_MoveAfter 1
@wxWindowGTK_ScrollDir_Horz 0
@wxWindowGTK_ScrollDir_Vert 1
@wxWindowGTK_ScrollDir_Max 2
@wxWindowGTK_ScrollUnit_Line 0
@wxWindowGTK_ScrollUnit_Page 1
@wxWindowGTK_ScrollUnit_Max 2
@wxACCEL_NORMAL 0
@wxACCEL_ALT 1
@wxACCEL_CTRL 2
@wxACCEL_SHIFT 4
@wxACCEL_CMD 2
@wxPRINT_WINDOWS 1
@wxPRINT_POSTSCRIPT 2
@wxBK_HITTEST_NOWHERE 1
@wxBK_HITTEST_ONICON 2
@wxBK_HITTEST_ONLABEL 4
# wxBK_HITTEST_ONICON | wxBK_HITTEST_ONLABEL -> 6
@wxBK_HITTEST_ONITEM 6
@wxBK_HITTEST_ONPAGE 8
@wxCAL_SUNDAY_FIRST 0
@wxCAL_MONDAY_FIRST 1
@wxCAL_SHOW_HOLIDAYS 2
@wxCAL_NO_YEAR_CHANGE 4
@wxCAL_NO_MONTH_CHANGE 12
@wxCAL_SEQUENTIAL_MONTH_SELECTION 16
@wxCAL_SHOW_SURROUNDING_WEEKS 32
@wxDP_DEFAULT 0
@wxDP_SPIN 1
@wxDP_DROPDOWN 2
@wxDP_SHOWCENTURY 4
@wxDP_ALLOWNONE 8
@wxDefaultCoord -1
@wxDIRCTRL_DIR_ONLY 16
@wxDIRCTRL_SELECT_FIRST 32
@wxDIRCTRL_SHOW_FILTERS 64
@wxDIRCTRL_3D_INTERNAL 128
@wxDIRCTRL_EDIT_LABELS 256
@wxDrag_CopyOnly 0
@wxDrag_AllowMove 1
@wxDrag_DefaultMove 3
@wxMOUSE_BTN_ANY -1
@wxMOUSE_BTN_NONE 0
@wxMOUSE_BTN_LEFT 1
@wxMOUSE_BTN_MIDDLE 2
@wxMOUSE_BTN_RIGHT 3
@wxFD_OPEN 1
@wxFD_SAVE 2
@wxFD_OVERWRITE_PROMPT 4
@wxFD_FILE_MUST_EXIST 16
@wxFD_MULTIPLE 32
@wxFD_CHANGE_DIR 128
@wxFD_PREVIEW 256
@wxFONTFLAG_DEFAULT 0
@wxFONTFLAG_ITALIC 1
@wxFONTFLAG_SLANT 2
@wxFONTFLAG_LIGHT 3
@wxFONTFLAG_BOLD 4
@wxFONTFLAG_ANTIALIASED 5
@wxFONTFLAG_NOT_ANTIALIASED 6
@wxFONTFLAG_UNDERLINED 7
@wxFONTFLAG_STRIKETHROUGH 8
# wxFONTFLAG_ITALIC | wxFONTFLAG_SLANT | wxFONTFLAG_LIGHT | wxFONTFLAG_BOLD | wxFONTFLAG_ANTIALIASED | wxFONTFLAG_NOT_ANTIALIASED | wxFONTFLAG_UNDERLINED | wxFONTFLAG_STRIKETHROUGH -> 15
@wxFONTFLAG_MASK 15
@wx_GL_RGBA 1
@wx_GL_BUFFER_SIZE 2
@wx_GL_LEVEL 3
@wx_GL_DOUBLEBUFFER 4
@wx_GL_STEREO 5
@wx_GL_AUX_BUFFERS 6
@wx_GL_MIN_RED 7
@wx_GL_MIN_GREEN 8
@wx_GL_MIN_BLUE 9
@wx_GL_MIN_ALPHA 10
@wx_GL_DEPTH_SIZE 11
@wx_GL_STENCIL_SIZE 12
@wx_GL_MIN_ACCUM_RED 13
@wx_GL_MIN_ACCUM_GREEN 14
@wx_GL_MIN_ACCUM_BLUE 15
@wx_GL_MIN_ACCUM_ALPHA 16
@wxPAGE_ODD 0
@wxPAGE_EVEN 1
@wxPAGE_ALL 2
@wxBMP_24BPP 24
@wxBMP_8BPP 8
@wxBMP_8BPP_GREY 9
@wxBMP_8BPP_GRAY 9
@wxBMP_8BPP_RED 10
@wxBMP_8BPP_PALETTE 11
@wxBMP_4BPP 4
@wxBMP_1BPP 1
@wxBMP_1BPP_BW 2
@wxIMAGE_RESOLUTION_INCHES 1
@wxIMAGE_RESOLUTION_CM 2
@wxIMAGE_LIST_NORMAL 0
@wxIMAGE_LIST_SMALL 1
@wxIMAGE_LIST_STATE 2
@wxLIST_NEXT_ABOVE 0
@wxLIST_NEXT_ALL 1
@wxLIST_NEXT_BELOW 2
@wxLIST_NEXT_LEFT 3
@wxLIST_NEXT_RIGHT 4
@wxNB_HITTEST_NOWHERE 1
@wxNB_HITTEST_ONICON 2
@wxNB_HITTEST_ONLABEL 4
@wxNB_HITTEST_ONITEM 6
@wxNB_HITTEST_ONPAGE 8
@wxTB_HORIZONTAL 4
@wxTB_TOP 4
@wxTB_VERTICAL 8
@wxTB_LEFT 8
@wxTB_3DBUTTONS 16
@wxTB_FLAT 32
@wxTB_DOCKABLE 64
@wxTB_NOICONS 128
@wxTB_TEXT 256
@wxTB_NODIVIDER 512
@wxTB_NOALIGN 1024
@wxTB_HORZ_LAYOUT 2048
# wxTB_HORZ_LAYOUT | wxTB_TEXT -> 2304
@wxTB_HORZ_TEXT 2304
@wxTB_NO_TOOLTIPS 4096
@wxTB_BOTTOM 8192
@wxTB_RIGHT 16384
@wxFULLSCREEN_NOMENUBAR 1
@wxFULLSCREEN_NOTOOLBAR 2
@wxFULLSCREEN_NOSTATUSBAR 4
@wxFULLSCREEN_NOBORDER 8
@wxFULLSCREEN_NOCAPTION 16
# wxFULLSCREEN_NOMENUBAR | wxFULLSCREEN_NOTOOLBAR | wxFULLSCREEN_NOSTATUSBAR | wxFULLSCREEN_NOBORDER | wxFULLSCREEN_NOCAPTION -> 31
@wxFULLSCREEN_ALL 31
@wxEXEC_ASYNC 0
@wxEXEC_SYNC 1
@wxEXEC_NOHIDE 2
@wxEXEC_MAKE_GROUP_LEADER 4
@wxEXEC_NODISABLE 8
@wxID_NONE -3
@wxID_SEPARATOR -2
@wxID_ANY -1
@wxID_LOWEST 4999
@wxID_OPEN 5000
@wxID_CLOSE 5001
@wxID_NEW 5002
@wxID_SAVE 5003
@wxID_SAVEAS 5004
@wxID_REVERT 5005
@wxID_EXIT 5006
@wxID_UNDO 5007
@wxID_REDO 5008
@wxID_HELP 5009
@wxID_PRINT 5010
@wxID_PRINT_SETUP 5011
@wxID_PAGE_SETUP 5012
@wxID_PREVIEW 5013
@wxID_ABOUT 5014
@wxID_HELP_CONTENTS 5015
@wxID_HELP_INDEX 5016
@wxID_HELP_SEARCH 5017
@wxID_HELP_COMMANDS 5018
@wxID_HELP_PROCEDURES 5019
@wxID_HELP_CONTEXT 5020
@wxID_CLOSE_ALL 5021
@wxID_PREFERENCES 5022
@wxID_EDIT 5030
@wxID_CUT 5031
@wxID_COPY 5032
@wxID_PASTE 5033
@wxID_CLEAR 5034
@wxID_FIND 5035
@wxID_DUPLICATE 5036
@wxID_SELECTALL 5037
@wxID_DELETE 5038
@wxID_REPLACE 5039
@wxID_REPLACE_ALL 5040
@wxID_PROPERTIES 5041
@wxID_VIEW_DETAILS 5042
@wxID_VIEW_LARGEICONS 5043
@wxID_VIEW_SMALLICONS 5044
@wxID_VIEW_LIST 5045
@wxID_VIEW_SORTDATE 5046
@wxID_VIEW_SORTNAME 5047
@wxID_VIEW_SORTSIZE 5048
@wxID_VIEW_SORTTYPE 5049
@wxID_FILE 5050
@wxID_FILE1 5051
@wxID_FILE2 5052
@wxID_FILE3 5053
@wxID_FILE4 5054
@wxID_FILE5 5055
@wxID_FILE6 5056
@wxID_FILE7 5057
@wxID_FILE8 5058
@wxID_FILE9 5059
@wxID_OK 5100
@wxID_CANCEL 5101
@wxID_APPLY 5102
@wxID_YES 5103
@wxID_NO 5104
@wxID_STATIC 5105
@wxID_FORWARD 5106
@wxID_BACKWARD 5107
@wxID_DEFAULT 5108
@wxID_MORE 5109
@wxID_SETUP 5110
@wxID_RESET 5111
@wxID_CONTEXT_HELP 5112
@wxID_YESTOALL 5113
@wxID_NOTOALL 5114
@wxID_ABORT 5115
@wxID_RETRY 5116
@wxID_IGNORE 5117
@wxID_ADD 5118
@wxID_REMOVE 5119
@wxID_UP 5120
@wxID_DOWN 5121
@wxID_HOME 5122
@wxID_REFRESH 5123
@wxID_STOP 5124
@wxID_INDEX 5125
@wxID_BOLD 5126
@wxID_ITALIC 5127
@wxID_JUSTIFY_CENTER 5128
@wxID_JUSTIFY_FILL 5129
@wxID_JUSTIFY_RIGHT 5130
@wxID_JUSTIFY_LEFT 5131
@wxID_UNDERLINE 5132
@wxID_INDENT 5133
@wxID_UNINDENT 5134
@wxID_ZOOM_100 5135
@wxID_ZOOM_FIT 5136
@wxID_ZOOM_IN 5137
@wxID_ZOOM_OUT 5138
@wxID_UNDELETE 5139
@wxID_REVERT_TO_SAVED 5140
@wxID_SYSTEM_MENU 5200
@wxID_CLOSE_FRAME 5201
@wxID_MOVE_FRAME 5202
@wxID_RESIZE_FRAME 5203
@wxID_MAXIMIZE_FRAME 5204
@wxID_ICONIZE_FRAME 5205
@wxID_RESTORE_FRAME 5206
@wxID_FILEDLGG 5900
@wxID_HIGHEST 5999
@wxJOYSTICK1 0
@wxJOYSTICK2 1
@wxIMAGE_QUALITY_NORMAL 0
@wxIMAGE_QUALITY_HIGH 1
@wxLIST_ALIGN_DEFAULT 0
@wxLIST_ALIGN_LEFT 1
@wxLIST_ALIGN_TOP 2
@wxLIST_ALIGN_SNAP_TO_GRID 3
@wxUSER_ATTENTION_INFO 1
@wxUSER_ATTENTION_ERROR 2
@wxBROWSER_NEW_WINDOW 1
@wxDEFAULT 70
@wxDECORATIVE 71
@wxROMAN 72
@wxSCRIPT 73
@wxSWISS 74
@wxMODERN 75
@wxTELETYPE 76
@wxVARIABLE 80
@wxFIXED 81
@wxNORMAL 90
@wxLIGHT 91
@wxBOLD 92
@wxITALIC 93
@wxSLANT 94
@wxSOLID 100
@wxDOT 101
@wxLONG_DASH 102
@wxSHORT_DASH 103
@wxDOT_DASH 104
@wxUSER_DASH 105
@wxTRANSPARENT 106
@wxSTIPPLE_MASK_OPAQUE 107
@wxSTIPPLE_MASK 108
@wxSTIPPLE 110
@wxBDIAGONAL_HATCH 111
@wxCROSSDIAG_HATCH 112
@wxFDIAGONAL_HATCH 113
@wxCROSS_HATCH 114
@wxHORIZONTAL_HATCH 115
@wxVERTICAL_HATCH 116
@wxFIRST_HATCH 111
@wxLAST_HATCH 116
@wxJOIN_BEVEL 120
@wxJOIN_MITER 121
@wxJOIN_ROUND 122
@wxCAP_ROUND 130
@wxCAP_PROJECTING 131
@wxCAP_BUTT 132
@wxJOY_BUTTON_ANY -1
@wxJOY_BUTTON1 1
@wxJOY_BUTTON2 2
@wxJOY_BUTTON3 4
@wxJOY_BUTTON4 8
@wxLIST_AUTOSIZE -1
@wxLIST_AUTOSIZE_USEHEADER -2
@wxStrip_Mnemonics 1
@wxStrip_Accel 2
# wxStrip_Mnemonics | wxStrip_Accel -> 3
@wxStrip_All 3
@wxFLOOD_SURFACE 1
@wxFLOOD_BORDER 2
@wxLIST_RECT_BOUNDS 0
@wxLIST_RECT_ICON 1
@wxLIST_RECT_LABEL 2
@wxODDEVEN_RULE 1
@wxWINDING_RULE 2
@wxLIST_FIND_UP 0
@wxLIST_FIND_DOWN 1
@wxLIST_FIND_LEFT 2
@wxLIST_FIND_RIGHT 3
@wxTOOL_TOP 1
@wxTOOL_BOTTOM 2
@wxTOOL_LEFT 3
@wxTOOL_RIGHT 4
@wxMM_TEXT 1
@wxMM_LOMETRIC 2
@wxMM_HIMETRIC 3
@wxMM_LOENGLISH 4
@wxMM_HIENGLISH 5
@wxMM_TWIPS 6
@wxMM_ISOTROPIC 7
@wxMM_ANISOTROPIC 8
@wxMM_POINTS 9
@wxMM_METRIC 10
@wxEVENT_PROPAGATE_NONE 0
# ERROR @wxEVENT_PROPAGATE_MAX ?INT_MAX
@wxCLEAR 0
@wxROP_BLACK 0
@wxBLIT_BLACKNESS 0
# wxCLEAR+1 -> 1
@wxXOR 1
@wxROP_XORPEN 1
@wxBLIT_SRCINVERT 1
# wxXOR+1 -> 2
@wxINVERT 2
@wxROP_NOT 2
@wxBLIT_DSTINVERT 2
# wxINVERT+1 -> 3
@wxOR_REVERSE 3
@wxROP_MERGEPENNOT 3
@wxBLIT_00DD0228 3
# wxOR_REVERSE+1 -> 4
@wxAND_REVERSE 4
@wxROP_MASKPENNOT 4
@wxBLIT_SRCERASE 4
# wxAND_REVERSE+1 -> 5
@wxCOPY 5
@wxROP_COPYPEN 5
@wxBLIT_SRCCOPY 5
# wxCOPY+1 -> 6
@wxAND 6
@wxROP_MASKPEN 6
@wxBLIT_SRCAND 6
# wxAND+1 -> 7
@wxAND_INVERT 7
@wxROP_MASKNOTPEN 7
@wxBLIT_00220326 7
# wxAND_INVERT+1 -> 8
@wxNO_OP 8
@wxROP_NOP 8
@wxBLIT_00AA0029 8
# wxNO_OP+1 -> 9
@wxNOR 9
@wxROP_NOTMERGEPEN 9
@wxBLIT_NOTSRCERASE 9
# wxNOR+1 -> 10
@wxEQUIV 10
@wxROP_NOTXORPEN 10
@wxBLIT_00990066 10
# wxEQUIV+1 -> 11
@wxSRC_INVERT 11
@wxROP_NOTCOPYPEN 11
@wxBLIT_NOTSCRCOPY 11
# wxSRC_INVERT+1 -> 12
@wxOR_INVERT 12
@wxROP_MERGENOTPEN 12
@wxBLIT_MERGEPAINT 12
# wxOR_INVERT+1 -> 13
@wxNAND 13
@wxROP_NOTMASKPEN 13
@wxBLIT_007700E6 13
# wxNAND+1 -> 14
@wxOR 14
@wxROP_MERGEPEN 14
@wxBLIT_SRCPAINT 14
# wxOR+1 -> 15
@wxSET 15
@wxROP_WHITE 15
@wxBLIT_WHITENESS 15
@wxALIGN_NOT 0
@wxALIGN_CENTER_HORIZONTAL 256
@wxALIGN_CENTRE_HORIZONTAL 256
@wxALIGN_LEFT 0
@wxALIGN_TOP 0
@wxALIGN_RIGHT 512
@wxALIGN_BOTTOM 1024
@wxALIGN_CENTER_VERTICAL 2048
@wxALIGN_CENTRE_VERTICAL 2048
# wxALIGN_CENTER_HORIZONTAL | wxALIGN_CENTER_VERTICAL -> 2304
@wxALIGN_CENTER 2304
@wxALIGN_CENTRE 2304
@wxALIGN_MASK 3840
@wxAUI_BUTTON_CLOSE 101
@wxAUI_BUTTON_MAXIMIZE_RESTORE 102
@wxAUI_BUTTON_MINIMIZE 103
@wxAUI_BUTTON_PIN 104
@wxAUI_BUTTON_OPTIONS 105
@wxAUI_BUTTON_WINDOWLIST 106
@wxAUI_BUTTON_LEFT 107
@wxAUI_BUTTON_RIGHT 108
@wxAUI_BUTTON_UP 109
@wxAUI_BUTTON_DOWN 110
@wxAUI_BUTTON_CUSTOM1 201
@wxAUI_BUTTON_CUSTOM2 202
@wxAUI_BUTTON_CUSTOM3 203
@wxAUI_DOCK_NONE 0
@wxAUI_DOCK_TOP 1
@wxAUI_DOCK_RIGHT 2
@wxAUI_DOCK_BOTTOM 3
@wxAUI_DOCK_LEFT 4
@wxAUI_DOCK_CENTER 5
@wxAUI_DOCK_CENTRE 5
@wxAUI_MGR_ALLOW_FLOATING 0
@wxAUI_MGR_ALLOW_ACTIVE_PANE 1
@wxAUI_MGR_TRANSPARENT_DRAG 2
@wxAUI_MGR_TRANSPARENT_HINT 3
@wxAUI_MGR_VENETIAN_BLINDS_HINT 4
@wxAUI_MGR_RECTANGLE_HINT 5
@wxAUI_MGR_HINT_FADE 6
@wxAUI_MGR_NO_VENETIAN_BLINDS_FADE 7
# wxAUI_MGR_ALLOW_FLOATING | wxAUI_MGR_TRANSPARENT_HINT | wxAUI_MGR_HINT_FADE | wxAUI_MGR_NO_VENETIAN_BLINDS_FADE -> 7
@wxAUI_MGR_DEFAULT 7
@wxAUI_NB_TOP 0
@wxAUI_NB_LEFT 1
@wxAUI_NB_RIGHT 2
@wxAUI_NB_BOTTOM 3
@wxAUI_NB_TAB_SPLIT 4
@wxAUI_NB_TAB_MOVE 5
@wxAUI_NB_TAB_EXTERNAL_MOVE 6
@wxAUI_NB_TAB_FIXED_WIDTH 7
@wxAUI_NB_SCROLL_BUTTONS 8
@wxAUI_NB_WINDOWLIST_BUTTON 9
@wxAUI_NB_CLOSE_BUTTON 10
@wxAUI_NB_CLOSE_ON_ACTIVE_TAB 11
@wxAUI_NB_CLOSE_ON_ALL_TABS 12
@wxAUI_NB_MIDDLE_CLICK_CLOSE 13
# wxAUI_NB_TOP | wxAUI_NB_TAB_SPLIT | wxAUI_NB_TAB_MOVE | wxAUI_NB_SCROLL_BUTTONS | wxAUI_NB_CLOSE_ON_ACTIVE_TAB | wxAUI_NB_MIDDLE_CLICK_CLOSE -> 15
@wxAUI_NB_DEFAULT_STYLE 15
@wxAUI_BUTTON_STATE_NORMAL 0
@wxAUI_BUTTON_STATE_HOVER 1
@wxAUI_BUTTON_STATE_PRESSED 2
@wxAUI_BUTTON_STATE_DISABLED 3
@wxAUI_BUTTON_STATE_HIDDEN 4
@wxAUI_BUTTON_STATE_CHECKED 5
@wxAUI_GRADIENT_NONE 0
@wxAUI_GRADIENT_VERTICAL 1
@wxAUI_GRADIENT_HORIZONTAL 2
@wxAUI_DOCKART_SASH_SIZE 0
@wxAUI_DOCKART_CAPTION_SIZE 1
@wxAUI_DOCKART_GRIPPER_SIZE 2
@wxAUI_DOCKART_PANE_BORDER_SIZE 3
@wxAUI_DOCKART_PANE_BUTTON_SIZE 4
@wxAUI_DOCKART_BACKGROUND_COLOUR 5
@wxAUI_DOCKART_SASH_COLOUR 6
@wxAUI_DOCKART_ACTIVE_CAPTION_COLOUR 7
@wxAUI_DOCKART_ACTIVE_CAPTION_GRADIENT_COLOUR 8
@wxAUI_DOCKART_INACTIVE_CAPTION_COLOUR 9
@wxAUI_DOCKART_INACTIVE_CAPTION_GRADIENT_COLOUR 10
@wxAUI_DOCKART_ACTIVE_CAPTION_TEXT_COLOUR 11
@wxAUI_DOCKART_INACTIVE_CAPTION_TEXT_COLOUR 12
@wxAUI_DOCKART_BORDER_COLOUR 13
@wxAUI_DOCKART_GRIPPER_COLOUR 14
@wxAUI_DOCKART_CAPTION_FONT 15
@wxAUI_DOCKART_GRADIENT_TYPE 16
@wxAUI_INSERT_PANE 0
@wxAUI_INSERT_ROW 1
@wxAUI_INSERT_DOCK 2
@wxBG_STYLE_SYSTEM 0
@wxBG_STYLE_COLOUR 1
@wxBG_STYLE_CUSTOM 2
@wxBITMAP_TYPE_INVALID 0
@wxBITMAP_TYPE_BMP 1
@wxBITMAP_TYPE_BMP_RESOURCE 2
@wxBITMAP_TYPE_RESOURCE 2
# wxBITMAP_TYPE_BMP_RESOURCE+1 -> 3
@wxBITMAP_TYPE_ICO 3
# wxBITMAP_TYPE_BMP_RESOURCE+2 -> 4
@wxBITMAP_TYPE_ICO_RESOURCE 4
# wxBITMAP_TYPE_BMP_RESOURCE+3 -> 5
@wxBITMAP_TYPE_CUR 5
# wxBITMAP_TYPE_BMP_RESOURCE+4 -> 6
@wxBITMAP_TYPE_CUR_RESOURCE 6
# wxBITMAP_TYPE_BMP_RESOURCE+5 -> 7
@wxBITMAP_TYPE_XBM 7
# wxBITMAP_TYPE_BMP_RESOURCE+6 -> 8
@wxBITMAP_TYPE_XBM_DATA 8
# wxBITMAP_TYPE_BMP_RESOURCE+7 -> 9
@wxBITMAP_TYPE_XPM 9
# wxBITMAP_TYPE_BMP_RESOURCE+8 -> 10
@wxBITMAP_TYPE_XPM_DATA 10
# wxBITMAP_TYPE_BMP_RESOURCE+9 -> 11
@wxBITMAP_TYPE_TIF 11
# wxBITMAP_TYPE_BMP_RESOURCE+10 -> 12
@wxBITMAP_TYPE_TIF_RESOURCE 12
# wxBITMAP_TYPE_BMP_RESOURCE+11 -> 13
@wxBITMAP_TYPE_GIF 13
# wxBITMAP_TYPE_BMP_RESOURCE+12 -> 14
@wxBITMAP_TYPE_GIF_RESOURCE 14
# wxBITMAP_TYPE_BMP_RESOURCE+13 -> 15
@wxBITMAP_TYPE_PNG 15
# wxBITMAP_TYPE_BMP_RESOURCE+14 -> 16
@wxBITMAP_TYPE_PNG_RESOURCE 16
# wxBITMAP_TYPE_BMP_RESOURCE+15 -> 17
@wxBITMAP_TYPE_JPEG 17
# wxBITMAP_TYPE_BMP_RESOURCE+16 -> 18
@wxBITMAP_TYPE_JPEG_RESOURCE 18
# wxBITMAP_TYPE_BMP_RESOURCE+17 -> 19
@wxBITMAP_TYPE_PNM 19
# wxBITMAP_TYPE_BMP_RESOURCE+18 -> 20
@wxBITMAP_TYPE_PNM_RESOURCE 20
# wxBITMAP_TYPE_BMP_RESOURCE+19 -> 21
@wxBITMAP_TYPE_PCX 21
# wxBITMAP_TYPE_BMP_RESOURCE+20 -> 22
@wxBITMAP_TYPE_PCX_RESOURCE 22
# wxBITMAP_TYPE_BMP_RESOURCE+21 -> 23
@wxBITMAP_TYPE_PICT 23
# wxBITMAP_TYPE_BMP_RESOURCE+22 -> 24
@wxBITMAP_TYPE_PICT_RESOURCE 24
# wxBITMAP_TYPE_BMP_RESOURCE+23 -> 25
@wxBITMAP_TYPE_ICON 25
# wxBITMAP_TYPE_BMP_RESOURCE+24 -> 26
@wxBITMAP_TYPE_ICON_RESOURCE 26
# wxBITMAP_TYPE_BMP_RESOURCE+25 -> 27
@wxBITMAP_TYPE_ANI 27
# wxBITMAP_TYPE_BMP_RESOURCE+26 -> 28
@wxBITMAP_TYPE_IFF 28
# wxBITMAP_TYPE_BMP_RESOURCE+27 -> 29
@wxBITMAP_TYPE_TGA 29
# wxBITMAP_TYPE_BMP_RESOURCE+28 -> 30
@wxBITMAP_TYPE_MACCURSOR 30
# wxBITMAP_TYPE_BMP_RESOURCE+29 -> 31
@wxBITMAP_TYPE_MACCURSOR_RESOURCE 31
@wxBITMAP_TYPE_ANY 50
@wxBORDER_DEFAULT 0
@wxBORDER_NONE 2_097_152
@wxBORDER_STATIC 16_777_216
@wxBORDER_SIMPLE 33_554_432
@wxBORDER_RAISED 67_108_864
@wxBORDER_SUNKEN 134_217_728
@wxBORDER_DOUBLE 268_435_456
@wxBORDER_THEME 268_435_456
@wxBORDER_MASK 522_190_848
@wxCAL_BORDER_NONE 0
@wxCAL_BORDER_SQUARE 1
@wxCAL_BORDER_ROUND 2
@wxCAL_HITTEST_NOWHERE 0
@wxCAL_HITTEST_HEADER 1
@wxCAL_HITTEST_DAY 2
@wxCAL_HITTEST_INCMONTH 3
@wxCAL_HITTEST_DECMONTH 4
@wxCAL_HITTEST_SURROUNDING_WEEK 5
@wxCHK_UNCHECKED 0
@wxCHK_CHECKED 1
@wxCHK_UNDETERMINED 2
@wxClientData_None 0
@wxClientData_Object 1
@wxClientData_Void 2
@wxDF_INVALID 0
@wxDF_TEXT 1
@wxDF_BITMAP 2
@wxDF_METAFILE 3
@wxDF_SYLK 4
@wxDF_DIF 5
@wxDF_TIFF 6
@wxDF_OEMTEXT 7
@wxDF_DIB 8
@wxDF_PALETTE 9
@wxDF_PENDATA 10
@wxDF_RIFF 11
@wxDF_WAVE 12
@wxDF_UNICODETEXT 13
@wxDF_ENHMETAFILE 14
@wxDF_FILENAME 15
@wxDF_LOCALE 16
@wxDF_PRIVATE 20
@wxDF_HTML 30
@wxDF_MAX 31
@wxLEFT 16
@wxRIGHT 32
@wxUP 64
@wxDOWN 128
@wxTOP 64
@wxBOTTOM 128
@wxNORTH 64
@wxSOUTH 128
@wxWEST 16
@wxEAST 32
# wxUP | wxDOWN | wxRIGHT | wxLEFT -> 240
@wxALL 240
@wxDragError 0
@wxDragNone 1
@wxDragCopy 2
@wxDragMove 3
@wxDragLink 4
@wxDragCancel 5
@wxDUPLEX_SIMPLEX 0
@wxDUPLEX_HORIZONTAL 1
@wxDUPLEX_VERTICAL 2
@wxLeft 0
@wxTop 1
@wxRight 2
@wxBottom 3
@wxWidth 4
@wxHeight 5
@wxCentre 6
@wxCenter 6
# wxCentre+1 -> 7
@wxCentreX 7
# wxCentre+2 -> 8
@wxCentreY 8
@wxFR_REPLACEDIALOG 1
@wxFR_NOUPDOWN 2
@wxFR_NOMATCHCASE 4
@wxFR_NOWHOLEWORD 8
@wxFR_DOWN 1
@wxFR_WHOLEWORD 2
@wxFR_MATCHCASE 4
@wxFLEX_GROWMODE_NONE 0
@wxFLEX_GROWMODE_SPECIFIED 1
@wxFLEX_GROWMODE_ALL 2
@wxFONTENCODING_SYSTEM -1
@wxFONTENCODING_DEFAULT 0
@wxFONTENCODING_ISO8859_1 1
@wxFONTENCODING_ISO8859_2 2
@wxFONTENCODING_ISO8859_3 3
@wxFONTENCODING_ISO8859_4 4
@wxFONTENCODING_ISO8859_5 5
@wxFONTENCODING_ISO8859_6 6
@wxFONTENCODING_ISO8859_7 7
@wxFONTENCODING_ISO8859_8 8
@wxFONTENCODING_ISO8859_9 9
@wxFONTENCODING_ISO8859_10 10
@wxFONTENCODING_ISO8859_11 11
@wxFONTENCODING_ISO8859_12 12
@wxFONTENCODING_ISO8859_13 13
@wxFONTENCODING_ISO8859_14 14
@wxFONTENCODING_ISO8859_15 15
@wxFONTENCODING_ISO8859_MAX 16
@wxFONTENCODING_KOI8 17
@wxFONTENCODING_KOI8_U 18
@wxFONTENCODING_ALTERNATIVE 19
@wxFONTENCODING_BULGARIAN 20
@wxFONTENCODING_CP437 21
@wxFONTENCODING_CP850 22
@wxFONTENCODING_CP852 23
@wxFONTENCODING_CP855 24
@wxFONTENCODING_CP866 25
@wxFONTENCODING_CP874 26
@wxFONTENCODING_CP932 27
@wxFONTENCODING_CP936 28
@wxFONTENCODING_CP949 29
@wxFONTENCODING_CP950 30
@wxFONTENCODING_CP1250 31
@wxFONTENCODING_CP1251 32
@wxFONTENCODING_CP1252 33
@wxFONTENCODING_CP1253 34
@wxFONTENCODING_CP1254 35
@wxFONTENCODING_CP1255 36
@wxFONTENCODING_CP1256 37
@wxFONTENCODING_CP1257 38
@wxFONTENCODING_CP12_MAX 39
@wxFONTENCODING_UTF7 40
@wxFONTENCODING_UTF8 41
@wxFONTENCODING_EUC_JP 42
@wxFONTENCODING_UTF16BE 43
@wxFONTENCODING_UTF16LE 44
@wxFONTENCODING_UTF32BE 45
@wxFONTENCODING_UTF32LE 46
@wxFONTENCODING_MACROMAN 47
@wxFONTENCODING_MACJAPANESE 48
@wxFONTENCODING_MACCHINESETRAD 49
@wxFONTENCODING_MACKOREAN 50
@wxFONTENCODING_MACARABIC 51
@wxFONTENCODING_MACHEBREW 52
@wxFONTENCODING_MACGREEK 53
@wxFONTENCODING_MACCYRILLIC 54
@wxFONTENCODING_MACDEVANAGARI 55
@wxFONTENCODING_MACGURMUKHI 56
@wxFONTENCODING_MACGUJARATI 57
@wxFONTENCODING_MACORIYA 58
@wxFONTENCODING_MACBENGALI 59
@wxFONTENCODING_MACTAMIL 60
@wxFONTENCODING_MACTELUGU 61
@wxFONTENCODING_MACKANNADA 62
@wxFONTENCODING_MACMALAJALAM 63
@wxFONTENCODING_MACSINHALESE 64
@wxFONTENCODING_MACBURMESE 65
@wxFONTENCODING_MACKHMER 66
@wxFONTENCODING_MACTHAI 67
@wxFONTENCODING_MACLAOTIAN 68
@wxFONTENCODING_MACGEORGIAN 69
@wxFONTENCODING_MACARMENIAN 70
@wxFONTENCODING_MACCHINESESIMP 71
@wxFONTENCODING_MACTIBETAN 72
@wxFONTENCODING_MACMONGOLIAN 73
@wxFONTENCODING_MACETHIOPIC 74
@wxFONTENCODING_MACCENTRALEUR 75
@wxFONTENCODING_MACVIATNAMESE 76
@wxFONTENCODING_MACARABICEXT 77
@wxFONTENCODING_MACSYMBOL 78
@wxFONTENCODING_MACDINGBATS 79
@wxFONTENCODING_MACTURKISH 80
@wxFONTENCODING_MACCROATIAN 81
@wxFONTENCODING_MACICELANDIC 82
@wxFONTENCODING_MACROMANIAN 83
@wxFONTENCODING_MACCELTIC 84
@wxFONTENCODING_MACGAELIC 85
@wxFONTENCODING_MACKEYBOARD 86
@wxFONTENCODING_MAX 87
@wxFONTENCODING_MACMIN 47
@wxFONTENCODING_MACMAX 86
# ERROR @wxFONTENCODING_UTF16 wxe_util:get_const(wxFONTENCODING_UTF16
# ERROR @wxFONTENCODING_UTF32 wxe_util:get_const(wxFONTENCODING_UTF32
# ERROR @wxFONTENCODING_UNICODE ?wxFONTENCODING_UTF32
@wxFONTENCODING_GB2312 28
@wxFONTENCODING_BIG5 30
@wxFONTENCODING_SHIFT_JIS 27
@wxFONTFAMILY_DEFAULT 70
@wxFONTFAMILY_DECORATIVE 71
@wxFONTFAMILY_ROMAN 72
@wxFONTFAMILY_SCRIPT 73
@wxFONTFAMILY_SWISS 74
@wxFONTFAMILY_MODERN 75
@wxFONTFAMILY_TELETYPE 76
# wxTELETYPE+1 -> 77
@wxFONTFAMILY_MAX 77
@wxFONTFAMILY_UNKNOWN 77
@wxFONTSTYLE_NORMAL 90
@wxFONTSTYLE_ITALIC 93
@wxFONTSTYLE_SLANT 94
# wxSLANT+1 -> 95
@wxFONTSTYLE_MAX 95
@wxFONTWEIGHT_NORMAL 90
@wxFONTWEIGHT_LIGHT 91
@wxFONTWEIGHT_BOLD 92
# wxBOLD+1 -> 93
@wxFONTWEIGHT_MAX 93
@wxCENTRE 1
@wxCENTER 1
@wxHT_NOWHERE 0
@wxHT_SCROLLBAR_FIRST 0
# wxHT_NOWHERE+1 -> 1
@wxHT_SCROLLBAR_ARROW_LINE_1 1
# wxHT_NOWHERE+2 -> 2
@wxHT_SCROLLBAR_ARROW_LINE_2 2
# wxHT_NOWHERE+3 -> 3
@wxHT_SCROLLBAR_ARROW_PAGE_1 3
# wxHT_NOWHERE+4 -> 4
@wxHT_SCROLLBAR_ARROW_PAGE_2 4
# wxHT_NOWHERE+5 -> 5
@wxHT_SCROLLBAR_THUMB 5
# wxHT_NOWHERE+6 -> 6
@wxHT_SCROLLBAR_BAR_1 6
# wxHT_NOWHERE+7 -> 7
@wxHT_SCROLLBAR_BAR_2 7
# wxHT_NOWHERE+8 -> 8
@wxHT_SCROLLBAR_LAST 8
# wxHT_NOWHERE+9 -> 9
@wxHT_WINDOW_OUTSIDE 9
# wxHT_NOWHERE+10 -> 10
@wxHT_WINDOW_INSIDE 10
# wxHT_NOWHERE+11 -> 11
@wxHT_WINDOW_VERT_SCROLLBAR 11
# wxHT_NOWHERE+12 -> 12
@wxHT_WINDOW_HORZ_SCROLLBAR 12
# wxHT_NOWHERE+13 -> 13
@wxHT_WINDOW_CORNER 13
# wxHT_NOWHERE+14 -> 14
@wxHT_MAX 14
@wxIDLE_PROCESS_ALL 0
@wxIDLE_PROCESS_SPECIFIED 1
@wxITEM_SEPARATOR -1
@wxITEM_NORMAL 0
@wxITEM_CHECK 1
@wxITEM_RADIO 2
@wxITEM_MAX 3
@wxK_BACK 8
@wxK_TAB 9
@wxK_RETURN 13
@wxK_ESCAPE 27
@wxK_SPACE 32
@wxK_DELETE 127
@wxK_START 300
@wxK_LBUTTON 301
@wxK_RBUTTON 302
@wxK_CANCEL 303
@wxK_MBUTTON 304
@wxK_CLEAR 305
@wxK_SHIFT 306
@wxK_ALT 307
@wxK_CONTROL 308
@wxK_MENU 309
@wxK_PAUSE 310
@wxK_CAPITAL 311
@wxK_END 312
@wxK_HOME 313
@wxK_LEFT 314
@wxK_UP 315
@wxK_RIGHT 316
@wxK_DOWN 317
@wxK_SELECT 318
@wxK_PRINT 319
@wxK_EXECUTE 320
@wxK_SNAPSHOT 321
@wxK_INSERT 322
@wxK_HELP 323
@wxK_NUMPAD0 324
@wxK_NUMPAD1 325
@wxK_NUMPAD2 326
@wxK_NUMPAD3 327
@wxK_NUMPAD4 328
@wxK_NUMPAD5 329
@wxK_NUMPAD6 330
@wxK_NUMPAD7 331
@wxK_NUMPAD8 332
@wxK_NUMPAD9 333
@wxK_MULTIPLY 334
@wxK_ADD 335
@wxK_SEPARATOR 336
@wxK_SUBTRACT 337
@wxK_DECIMAL 338
@wxK_DIVIDE 339
@wxK_F1 340
@wxK_F2 341
@wxK_F3 342
@wxK_F4 343
@wxK_F5 344
@wxK_F6 345
@wxK_F7 346
@wxK_F8 347
@wxK_F9 348
@wxK_F10 349
@wxK_F11 350
@wxK_F12 351
@wxK_F13 352
@wxK_F14 353
@wxK_F15 354
@wxK_F16 355
@wxK_F17 356
@wxK_F18 357
@wxK_F19 358
@wxK_F20 359
@wxK_F21 360
@wxK_F22 361
@wxK_F23 362
@wxK_F24 363
@wxK_NUMLOCK 364
@wxK_SCROLL 365
@wxK_PAGEUP 366
@wxK_PAGEDOWN 367
@wxK_NUMPAD_SPACE 368
@wxK_NUMPAD_TAB 369
@wxK_NUMPAD_ENTER 370
@wxK_NUMPAD_F1 371
@wxK_NUMPAD_F2 372
@wxK_NUMPAD_F3 373
@wxK_NUMPAD_F4 374
@wxK_NUMPAD_HOME 375
@wxK_NUMPAD_LEFT 376
@wxK_NUMPAD_UP 377
@wxK_NUMPAD_RIGHT 378
@wxK_NUMPAD_DOWN 379
@wxK_NUMPAD_PAGEUP 380
@wxK_NUMPAD_PAGEDOWN 381
@wxK_NUMPAD_END 382
@wxK_NUMPAD_BEGIN 383
@wxK_NUMPAD_INSERT 384
@wxK_NUMPAD_DELETE 385
@wxK_NUMPAD_EQUAL 386
@wxK_NUMPAD_MULTIPLY 387
@wxK_NUMPAD_ADD 388
@wxK_NUMPAD_SEPARATOR 389
@wxK_NUMPAD_SUBTRACT 390
@wxK_NUMPAD_DECIMAL 391
@wxK_NUMPAD_DIVIDE 392
@wxK_WINDOWS_LEFT 393
@wxK_WINDOWS_RIGHT 394
@wxK_WINDOWS_MENU 395
@wxK_COMMAND 396
@wxK_SPECIAL1 193
@wxK_SPECIAL2 194
@wxK_SPECIAL3 195
@wxK_SPECIAL4 196
@wxK_SPECIAL5 197
@wxK_SPECIAL6 198
@wxK_SPECIAL7 199
@wxK_SPECIAL8 200
@wxK_SPECIAL9 201
@wxK_SPECIAL10 202
@wxK_SPECIAL11 203
@wxK_SPECIAL12 204
@wxK_SPECIAL13 205
@wxK_SPECIAL14 206
@wxK_SPECIAL15 207
@wxK_SPECIAL16 208
@wxK_SPECIAL17 209
@wxK_SPECIAL18 210
@wxK_SPECIAL19 211
@wxK_SPECIAL20 212
@wxMOD_NONE 0
@wxMOD_ALT 1
@wxMOD_CONTROL 2
# wxMOD_ALT | wxMOD_CONTROL -> 3
@wxMOD_ALTGR 3
@wxMOD_SHIFT 4
@wxMOD_META 8
@wxMOD_WIN 8
# ERROR @wxMOD_CMD wxe_util:get_const(wxMOD_CMD
@wxMOD_ALL 65535
@wxKEY_NONE 0
@wxKEY_INTEGER 1
@wxKEY_STRING 2
@wxKILL_OK 0
@wxKILL_BAD_SIGNAL 1
@wxKILL_ACCESS_DENIED 2
@wxKILL_NO_PROCESS 3
@wxKILL_ERROR 4
@wxKILL_NOCHILDREN 0
@wxKILL_CHILDREN 1
@wxLAYOUT_NONE 0
@wxLAYOUT_TOP 1
@wxLAYOUT_LEFT 2
@wxLAYOUT_RIGHT 3
@wxLAYOUT_BOTTOM 4
@wxLayout_Default 0
@wxLayout_LeftToRight 1
@wxLayout_RightToLeft 2
@wxLAYOUT_HORIZONTAL 0
@wxLAYOUT_VERTICAL 1
@wxLIST_FORMAT_LEFT 0
@wxLIST_FORMAT_RIGHT 1
@wxLIST_FORMAT_CENTRE 2
@wxLIST_FORMAT_CENTER 2
@wxHORIZONTAL 4
@wxVERTICAL 8
# wxVERTICAL | wxHORIZONTAL -> 12
@wxBOTH 12
@wxPAPER_NONE 0
@wxPAPER_LETTER 1
@wxPAPER_LEGAL 2
@wxPAPER_A4 3
@wxPAPER_CSHEET 4
@wxPAPER_DSHEET 5
@wxPAPER_ESHEET 6
@wxPAPER_LETTERSMALL 7
@wxPAPER_TABLOID 8
@wxPAPER_LEDGER 9
@wxPAPER_STATEMENT 10
@wxPAPER_EXECUTIVE 11
@wxPAPER_A3 12
@wxPAPER_A4SMALL 13
@wxPAPER_A5 14
@wxPAPER_B4 15
@wxPAPER_B5 16
@wxPAPER_FOLIO 17
@wxPAPER_QUARTO 18
@wxPAPER_10X14 19
@wxPAPER_11X17 20
@wxPAPER_NOTE 21
@wxPAPER_ENV_9 22
@wxPAPER_ENV_10 23
@wxPAPER_ENV_11 24
@wxPAPER_ENV_12 25
@wxPAPER_ENV_14 26
@wxPAPER_ENV_DL 27
@wxPAPER_ENV_C5 28
@wxPAPER_ENV_C3 29
@wxPAPER_ENV_C4 30
@wxPAPER_ENV_C6 31
@wxPAPER_ENV_C65 32
@wxPAPER_ENV_B4 33
@wxPAPER_ENV_B5 34
@wxPAPER_ENV_B6 35
@wxPAPER_ENV_ITALY 36
@wxPAPER_ENV_MONARCH 37
@wxPAPER_ENV_PERSONAL 38
@wxPAPER_FANFOLD_US 39
@wxPAPER_FANFOLD_STD_GERMAN 40
@wxPAPER_FANFOLD_LGL_GERMAN 41
@wxPAPER_ISO_B4 42
@wxPAPER_JAPANESE_POSTCARD 43
@wxPAPER_9X11 44
@wxPAPER_10X11 45
@wxPAPER_15X11 46
@wxPAPER_ENV_INVITE 47
@wxPAPER_LETTER_EXTRA 48
@wxPAPER_LEGAL_EXTRA 49
@wxPAPER_TABLOID_EXTRA 50
@wxPAPER_A4_EXTRA 51
@wxPAPER_LETTER_TRANSVERSE 52
@wxPAPER_A4_TRANSVERSE 53
@wxPAPER_LETTER_EXTRA_TRANSVERSE 54
@wxPAPER_A_PLUS 55
@wxPAPER_B_PLUS 56
@wxPAPER_LETTER_PLUS 57
@wxPAPER_A4_PLUS 58
@wxPAPER_A5_TRANSVERSE 59
@wxPAPER_B5_TRANSVERSE 60
@wxPAPER_A3_EXTRA 61
@wxPAPER_A5_EXTRA 62
@wxPAPER_B5_EXTRA 63
@wxPAPER_A2 64
@wxPAPER_A3_TRANSVERSE 65
@wxPAPER_A3_EXTRA_TRANSVERSE 66
@wxPAPER_DBL_JAPANESE_POSTCARD 67
@wxPAPER_A6 68
@wxPAPER_JENV_KAKU2 69
@wxPAPER_JENV_KAKU3 70
@wxPAPER_JENV_CHOU3 71
@wxPAPER_JENV_CHOU4 72
@wxPAPER_LETTER_ROTATED 73
@wxPAPER_A3_ROTATED 74
@wxPAPER_A4_ROTATED 75
@wxPAPER_A5_ROTATED 76
@wxPAPER_B4_JIS_ROTATED 77
@wxPAPER_B5_JIS_ROTATED 78
@wxPAPER_JAPANESE_POSTCARD_ROTATED 79
@wxPAPER_DBL_JAPANESE_POSTCARD_ROTATED 80
@wxPAPER_A6_ROTATED 81
@wxPAPER_JENV_KAKU2_ROTATED 82
@wxPAPER_JENV_KAKU3_ROTATED 83
@wxPAPER_JENV_CHOU3_ROTATED 84
@wxPAPER_JENV_CHOU4_ROTATED 85
@wxPAPER_B6_JIS 86
@wxPAPER_B6_JIS_ROTATED 87
@wxPAPER_12X11 88
@wxPAPER_JENV_YOU4 89
@wxPAPER_JENV_YOU4_ROTATED 90
@wxPAPER_P16K 91
@wxPAPER_P32K 92
@wxPAPER_P32KBIG 93
@wxPAPER_PENV_1 94
@wxPAPER_PENV_2 95
@wxPAPER_PENV_3 96
@wxPAPER_PENV_4 97
@wxPAPER_PENV_5 98
@wxPAPER_PENV_6 99
@wxPAPER_PENV_7 100
@wxPAPER_PENV_8 101
@wxPAPER_PENV_9 102
@wxPAPER_PENV_10 103
@wxPAPER_P16K_ROTATED 104
@wxPAPER_P32K_ROTATED 105
@wxPAPER_P32KBIG_ROTATED 106
@wxPAPER_PENV_1_ROTATED 107
@wxPAPER_PENV_2_ROTATED 108
@wxPAPER_PENV_3_ROTATED 109
@wxPAPER_PENV_4_ROTATED 110
@wxPAPER_PENV_5_ROTATED 111
@wxPAPER_PENV_6_ROTATED 112
@wxPAPER_PENV_7_ROTATED 113
@wxPAPER_PENV_8_ROTATED 114
@wxPAPER_PENV_9_ROTATED 115
@wxPAPER_PENV_10_ROTATED 116
@wxPRINTBIN_DEFAULT 0
@wxPRINTBIN_ONLYONE 1
@wxPRINTBIN_LOWER 2
@wxPRINTBIN_MIDDLE 3
@wxPRINTBIN_MANUAL 4
@wxPRINTBIN_ENVELOPE 5
@wxPRINTBIN_ENVMANUAL 6
@wxPRINTBIN_AUTO 7
@wxPRINTBIN_TRACTOR 8
@wxPRINTBIN_SMALLFMT 9
@wxPRINTBIN_LARGEFMT 10
@wxPRINTBIN_LARGECAPACITY 11
@wxPRINTBIN_CASSETTE 12
@wxPRINTBIN_FORMSOURCE 13
@wxPRINTBIN_USER 14
@wxPRINT_MODE_NONE 0
@wxPRINT_MODE_PREVIEW 1
@wxPRINT_MODE_FILE 2
@wxPRINT_MODE_PRINTER 3
@wxPRINT_MODE_STREAM 4
@wxPRINTER_NO_ERROR 0
@wxPRINTER_CANCELLED 1
@wxPRINTER_ERROR 2
@wxOutRegion 0
@wxPartRegion 1
@wxInRegion 2
@wxRGN_AND 0
@wxRGN_COPY 1
@wxRGN_DIFF 2
@wxRGN_OR 3
@wxRGN_XOR 4
@wxUnconstrained 0
@wxAsIs 1
@wxPercentOf 2
@wxAbove 3
@wxBelow 4
@wxLeftOf 5
@wxRightOf 6
@wxSameAs 7
@wxAbsolute 8
@wxSASH_STATUS_OK 0
@wxSASH_STATUS_OUT_OF_RANGE 1
@wxSASH_TOP 0
@wxSASH_RIGHT 1
@wxSASH_BOTTOM 2
@wxSASH_LEFT 3
@wxSASH_NONE 100
@wxSHUTDOWN_POWEROFF 0
@wxSHUTDOWN_REBOOT 1
@wxSIGNONE 0
@wxSIGHUP 1
@wxSIGINT 2
@wxSIGQUIT 3
@wxSIGILL 4
@wxSIGTRAP 5
@wxSIGABRT 6
@wxSIGIOT 6
# wxSIGABRT+1 -> 7
@wxSIGEMT 7
# wxSIGABRT+2 -> 8
@wxSIGFPE 8
# wxSIGABRT+3 -> 9
@wxSIGKILL 9
# wxSIGABRT+4 -> 10
@wxSIGBUS 10
# wxSIGABRT+5 -> 11
@wxSIGSEGV 11
# wxSIGABRT+6 -> 12
@wxSIGSYS 12
# wxSIGABRT+7 -> 13
@wxSIGPIPE 13
# wxSIGABRT+8 -> 14
@wxSIGALRM 14
# wxSIGABRT+9 -> 15
@wxSIGTERM 15
@wxCURSOR_NONE 0
@wxCURSOR_ARROW 1
@wxCURSOR_RIGHT_ARROW 2
@wxCURSOR_BULLSEYE 3
@wxCURSOR_CHAR 4
@wxCURSOR_CROSS 5
@wxCURSOR_HAND 6
@wxCURSOR_IBEAM 7
@wxCURSOR_LEFT_BUTTON 8
@wxCURSOR_MAGNIFIER 9
@wxCURSOR_MIDDLE_BUTTON 10
@wxCURSOR_NO_ENTRY 11
@wxCURSOR_PAINT_BRUSH 12
@wxCURSOR_PENCIL 13
@wxCURSOR_POINT_LEFT 14
@wxCURSOR_POINT_RIGHT 15
@wxCURSOR_QUESTION_ARROW 16
@wxCURSOR_RIGHT_BUTTON 17
@wxCURSOR_SIZENESW 18
@wxCURSOR_SIZENS 19
@wxCURSOR_SIZENWSE 20
@wxCURSOR_SIZEWE 21
@wxCURSOR_SIZING 22
@wxCURSOR_SPRAYCAN 23
@wxCURSOR_WAIT 24
@wxCURSOR_WATCH 25
@wxCURSOR_BLANK 26
@wxCURSOR_DEFAULT 27
@wxCURSOR_ARROWWAIT 28
@wxCURSOR_MAX 29
@wxSTRETCH_NOT 0
@wxSHRINK 4096
@wxGROW 8192
@wxEXPAND 8192
@wxSHAPED 16384
@wxFIXED_MINSIZE 32768
@wxTILE 49152
@wxADJUST_MINSIZE 0
@wxSYS_COLOUR_SCROLLBAR 0
@wxSYS_COLOUR_BACKGROUND 1
@wxSYS_COLOUR_DESKTOP 1
# wxSYS_COLOUR_BACKGROUND+1 -> 2
@wxSYS_COLOUR_ACTIVECAPTION 2
# wxSYS_COLOUR_BACKGROUND+2 -> 3
@wxSYS_COLOUR_INACTIVECAPTION 3
# wxSYS_COLOUR_BACKGROUND+3 -> 4
@wxSYS_COLOUR_MENU 4
# wxSYS_COLOUR_BACKGROUND+4 -> 5
@wxSYS_COLOUR_WINDOW 5
# wxSYS_COLOUR_BACKGROUND+5 -> 6
@wxSYS_COLOUR_WINDOWFRAME 6
# wxSYS_COLOUR_BACKGROUND+6 -> 7
@wxSYS_COLOUR_MENUTEXT 7
# wxSYS_COLOUR_BACKGROUND+7 -> 8
@wxSYS_COLOUR_WINDOWTEXT 8
# wxSYS_COLOUR_BACKGROUND+8 -> 9
@wxSYS_COLOUR_CAPTIONTEXT 9
# wxSYS_COLOUR_BACKGROUND+9 -> 10
@wxSYS_COLOUR_ACTIVEBORDER 10
# wxSYS_COLOUR_BACKGROUND+10 -> 11
@wxSYS_COLOUR_INACTIVEBORDER 11
# wxSYS_COLOUR_BACKGROUND+11 -> 12
@wxSYS_COLOUR_APPWORKSPACE 12
# wxSYS_COLOUR_BACKGROUND+12 -> 13
@wxSYS_COLOUR_HIGHLIGHT 13
# wxSYS_COLOUR_BACKGROUND+13 -> 14
@wxSYS_COLOUR_HIGHLIGHTTEXT 14
# wxSYS_COLOUR_BACKGROUND+14 -> 15
@wxSYS_COLOUR_BTNFACE 15
@wxSYS_COLOUR_3DFACE 15
# wxSYS_COLOUR_BTNFACE+1 -> 16
@wxSYS_COLOUR_BTNSHADOW 16
@wxSYS_COLOUR_3DSHADOW 16
# wxSYS_COLOUR_BTNSHADOW+1 -> 17
@wxSYS_COLOUR_GRAYTEXT 17
# wxSYS_COLOUR_BTNSHADOW+2 -> 18
@wxSYS_COLOUR_BTNTEXT 18
# wxSYS_COLOUR_BTNSHADOW+3 -> 19
@wxSYS_COLOUR_INACTIVECAPTIONTEXT 19
# wxSYS_COLOUR_BTNSHADOW+4 -> 20
@wxSYS_COLOUR_BTNHIGHLIGHT 20
@wxSYS_COLOUR_BTNHILIGHT 20
@wxSYS_COLOUR_3DHIGHLIGHT 20
@wxSYS_COLOUR_3DHILIGHT 20
# wxSYS_COLOUR_BTNHIGHLIGHT+1 -> 21
@wxSYS_COLOUR_3DDKSHADOW 21
# wxSYS_COLOUR_BTNHIGHLIGHT+2 -> 22
@wxSYS_COLOUR_3DLIGHT 22
# wxSYS_COLOUR_BTNHIGHLIGHT+3 -> 23
@wxSYS_COLOUR_INFOTEXT 23
# wxSYS_COLOUR_BTNHIGHLIGHT+4 -> 24
@wxSYS_COLOUR_INFOBK 24
# wxSYS_COLOUR_BTNHIGHLIGHT+5 -> 25
@wxSYS_COLOUR_LISTBOX 25
# wxSYS_COLOUR_BTNHIGHLIGHT+6 -> 26
@wxSYS_COLOUR_HOTLIGHT 26
# wxSYS_COLOUR_BTNHIGHLIGHT+7 -> 27
@wxSYS_COLOUR_GRADIENTACTIVECAPTION 27
# wxSYS_COLOUR_BTNHIGHLIGHT+8 -> 28
@wxSYS_COLOUR_GRADIENTINACTIVECAPTION 28
# wxSYS_COLOUR_BTNHIGHLIGHT+9 -> 29
@wxSYS_COLOUR_MENUHILIGHT 29
# wxSYS_COLOUR_BTNHIGHLIGHT+10 -> 30
@wxSYS_COLOUR_MENUBAR 30
# wxSYS_COLOUR_BTNHIGHLIGHT+11 -> 31
@wxSYS_COLOUR_MAX 31
@wxSYS_CAN_DRAW_FRAME_DECORATIONS 1
@wxSYS_CAN_ICONIZE_FRAME 2
@wxSYS_TABLET_PRESENT 3
@wxSYS_OEM_FIXED_FONT 10
@wxSYS_ANSI_FIXED_FONT 11
@wxSYS_ANSI_VAR_FONT 12
@wxSYS_SYSTEM_FONT 13
@wxSYS_DEVICE_DEFAULT_FONT 14
@wxSYS_DEFAULT_PALETTE 15
@wxSYS_SYSTEM_FIXED_FONT 16
@wxSYS_DEFAULT_GUI_FONT 17
@wxSYS_ICONTITLE_FONT 17
@wxSYS_MOUSE_BUTTONS 1
@wxSYS_BORDER_X 2
@wxSYS_BORDER_Y 3
@wxSYS_CURSOR_X 4
@wxSYS_CURSOR_Y 5
@wxSYS_DCLICK_X 6
@wxSYS_DCLICK_Y 7
@wxSYS_DRAG_X 8
@wxSYS_DRAG_Y 9
@wxSYS_EDGE_X 10
@wxSYS_EDGE_Y 11
@wxSYS_HSCROLL_ARROW_X 12
@wxSYS_HSCROLL_ARROW_Y 13
@wxSYS_HTHUMB_X 14
@wxSYS_ICON_X 15
@wxSYS_ICON_Y 16
@wxSYS_ICONSPACING_X 17
@wxSYS_ICONSPACING_Y 18
@wxSYS_WINDOWMIN_X 19
@wxSYS_WINDOWMIN_Y 20
@wxSYS_SCREEN_X 21
@wxSYS_SCREEN_Y 22
@wxSYS_FRAMESIZE_X 23
@wxSYS_FRAMESIZE_Y 24
@wxSYS_SMALLICON_X 25
@wxSYS_SMALLICON_Y 26
@wxSYS_HSCROLL_Y 27
@wxSYS_VSCROLL_X 28
@wxSYS_VSCROLL_ARROW_X 29
@wxSYS_VSCROLL_ARROW_Y 30
@wxSYS_VTHUMB_Y 31
@wxSYS_CAPTION_Y 32
@wxSYS_MENU_Y 33
@wxSYS_NETWORK_PRESENT 34
@wxSYS_PENWINDOWS_PRESENT 35
@wxSYS_SHOW_SOUNDS 36
@wxSYS_SWAP_BUTTONS 37
@wxSYS_SCREEN_NONE 0
@wxSYS_SCREEN_TINY 1
@wxSYS_SCREEN_PDA 2
@wxSYS_SCREEN_SMALL 3
@wxSYS_SCREEN_DESKTOP 4
@wxTEXT_ALIGNMENT_DEFAULT 0
@wxTEXT_ALIGNMENT_LEFT 1
@wxTEXT_ALIGNMENT_CENTRE 2
@wxTEXT_ALIGNMENT_CENTER 2
# wxTEXT_ALIGNMENT_CENTRE+1 -> 3
@wxTEXT_ALIGNMENT_RIGHT 3
# wxTEXT_ALIGNMENT_CENTRE+2 -> 4
@wxTEXT_ALIGNMENT_JUSTIFIED 4
@wxTE_HT_UNKNOWN -2
@wxTE_HT_BEFORE -1
@wxTE_HT_ON_TEXT 0
@wxTE_HT_BELOW 1
@wxTE_HT_BEYOND 2
@wxTOOL_STYLE_BUTTON 1
@wxTOOL_STYLE_SEPARATOR 2
@wxTOOL_STYLE_CONTROL 3
@wxTreeItemIcon_Normal 0
@wxTreeItemIcon_Selected 1
@wxTreeItemIcon_Expanded 2
@wxTreeItemIcon_SelectedExpanded 3
@wxTreeItemIcon_Max 4
@wxUPDATE_UI_NONE 0
@wxUPDATE_UI_RECURSE 1
@wxUPDATE_UI_FROMIDLE 2
@wxUPDATE_UI_PROCESS_ALL 0
@wxUPDATE_UI_PROCESS_SPECIFIED 1
@wxWINDOW_VARIANT_NORMAL 0
@wxWINDOW_VARIANT_SMALL 1
@wxWINDOW_VARIANT_MINI 2
@wxWINDOW_VARIANT_LARGE 3
@wxWINDOW_VARIANT_MAX 4
@wxXRC_USE_LOCALE 1
@wxXRC_NO_SUBCLASSING 2
@wxXRC_NO_RELOADING 4
# From "splitter.h"
@wxSP_BORDER 512
@wxSP_NO_XP_THEME 1024
@wxSP_3DBORDER 512
@wxSP_3DSASH 256
@wxSP_LIVE_UPDATE 128
@wxSP_PERMIT_UNSPLIT 64
@wxSP_NOSASH 16
@wxSP_NOBORDER 0
@wxSP_3D @wxSP_3DBORDER ||| @wxSP_3DSASH
end
end
end
|
lib/wxDefines.ex
| 0.590071
| 0.521167
|
wxDefines.ex
|
starcoder
|
defmodule ExRabbit do
@moduledoc """
ExRabbit is a module that conveniently manages your connection to RabbitMQ and its channels for you.
## How it works
For each module given to act as a consumer, it will assert the exchange, queue, binding and qos according to the
output of the `config/0` callback on your module. Upon each message received your module's `handler/2` callback is
called providing the message and properties from RabbitMQ. A `AMQP.Channel` is reserved for each consumer in order to
prevent other consumer/publisher errors from affecting this consumer.
## Defining your module
Your module must implement the `ExRabbit` `@behaviour` as described
[here](https://hexdocs.pm/elixir/master/Module.html#content).
### `config/0`
This callback must return a keyword list dictating what options to use when setting up the consumer. Examples can be
found in the [examples](https://github.com/vinli/ex_rabbit/tree/master/examples) directory.
### `handler/2`
This callback must return an `{:ok, result}` or `{:error, reason}` tuple. If an `:ok` tuple is returned the message
will be acknowledged. Otherwise, if an `:error` tuple is returned, the message will be rejected. This behavior is
taken care of for you in `ExRabbit.Consumer.Consumer` from where your callback is called.
"""
alias ExRabbit.Publisher
@doc """
This callback must return a keyword list dictating what options to use when setting up the consumer. Examples can be
found in the [examples](https://github.com/vinli/ex_rabbit/tree/master/examples) directory.
"""
@callback config() :: {:ok, list()} | {:error, String.t}
@doc """
This callback must return an `{:ok, result}` or `{:error, reason}` tuple. If an `:ok` tuple is returned the message
will be acknowledged. Otherwise, if an `:error` tuple is returned, the message will be rejected. This behavior is
taken care of for you in `ExRabbit.Consumer.Consumer` from where your callback is called.
"""
@callback handler(map(), map()) :: {:ok, list(keyword())} | {:error, String.t}
@doc """
Publish a message to an exchange. Possible options are available from:
https://github.com/pma/amqp/blob/master/lib/amqp/basic.ex
"""
@spec publish(String.t, String.t, map() | String.t, keyword()) :: :ok
def publish(exchange, routing_key, payload, options \\ [])
def publish(exchange, routing_key, payload, options),
do: Publisher.publish(exchange, routing_key, payload, options)
end
|
lib/ex_rabbit.ex
| 0.890103
| 0.555857
|
ex_rabbit.ex
|
starcoder
|
defmodule Pow.Store.Backend.MnesiaCache.Unsplit do
@moduledoc """
GenServer that handles network split recovery for
`Pow.Store.Backend.MnesiaCache`.
This should be run on node(s) that has the `Pow.Store.Backend.MnesiaCache`
GenServer running. It'll subscribe to the Mnesia system messages, and listen
for `:inconsistent_database` system events. The first node to set the global
lock will find the island with the oldest node and restore that nodes table
into all the partitioned nodes.
If a table unrelated to Pow is also affected, an error will be logged and the
network will stay partitioned. If you don't mind potential data loss for any
of your tables in Mnesia, you can set `flush_tables: :all` to restore all the
affected tables from the oldest node in the cluster.
For better control, you can use
[`unsplit`](https://github.com/uwiger/unsplit) instead of this module.
## Usage
To start the GenServer, add it to your application `start/2` method:
defmodule MyApp.Application do
use Application
def start(_type, _args) do
children = [
MyApp.Repo,
MyAppWeb.Endpoint,
{Pow.Store.Backend.MnesiaCache, extra_db_nodes: {Node, :list, []}},
Pow.Store.Backend.MnesiaCache.Unsplit
]
opts = [strategy: :one_for_one, name: MyAppWeb.Supervisor]
Supervisor.start_link(children, opts)
end
# ...
end
## Auto initialize cluster
If nodes are lazily connected a race condition can occur in which the
`Pow.Store.Backend.MnesiaCache` is running on each node without being
connected in a Mnesia cluster.
To ensure that cluster will automatically initialize,
`Pow.Store.Backend.MnesiaCache.Unsplit` will reset the most recent node's
Mnesia schema when connecting to another node or a cluster. This will only
occur if the Mnesia node has never been connected to the other node(s) and
the other node currently runs the Mnesia cache GenServer.
The `Pow.Store.Backend.MnesiaCache` GenServer will be restarted, using the same
`:extra_db_nodes` configuration as when it was first initialized. Therefor
it's important that a MFA is used like `{Node, :list, []}` for the auto
initialization to succeed.
Please be aware the reset of the Mnesia node will result in data loss for the
node.
## Strategy for multiple libraries using the Mnesia instance
It's strongly recommended to take into account any libraries that will be
using Mnesia for storage before using this module.
A common example would be a job queue, where a potential solution to prevent
data loss is to simply keep the job queue table on only one server instead of
replicating it among all nodes. When a network partition occurs, it won't be
part of the affected tables so this module can self-heal without the job
queue table set in `:flush_tables`.
There may still be data loss if nodes are lazily connected. Please read the
"Auto initialize cluster" section above.
## Initialization options
* `:flush_tables` - list of tables that may be flushed and restored from
the oldest node in the cluster. Defaults to `false` when only the
MnesiaCache table will be flushed. Use `:all` if you want to flush all
affected tables. Be aware that this may cause data loss.
* `:auto_initialize_cluster` - a boolean value to automatically initialize
the Mnesia cluster by resetting the node Mnesia schema when new nodes are
connected, defaults to `true`.
"""
use GenServer
require Logger
alias Pow.Config
@mnesia_cache_tab Pow.Store.Backend.MnesiaCache
@spec start_link(Config.t()) :: GenServer.on_start()
def start_link(config) do
GenServer.start_link(__MODULE__, config, name: __MODULE__)
end
# Callbacks
@impl true
@spec init(Config.t()) :: {:ok, map()}
def init(config) do
{:ok, _node} = :mnesia.subscribe(:system)
:ok = :net_kernel.monitor_nodes(true)
{:ok, %{config: config}}
end
@impl true
@spec handle_info({:nodeup | :nodedown, atom()}, map()) :: {:noreply, map()}
def handle_info({:nodeup, node}, %{config: config} = state) do
:global.trans({__MODULE__, self()}, fn -> autoinit(node, config) end)
{:noreply, state}
end
@impl true
@spec handle_info({:mnesia_system_event, {:inconsistent_database, any(), any()}} | any(), map()) :: {:noreply, map()}
def handle_info({:mnesia_system_event, {:inconsistent_database, _context, node}}, %{config: config} = state) do
:global.trans({__MODULE__, self()}, fn -> autoheal(node, config) end)
{:noreply, state}
end
@impl true
def handle_info(_event, state) do
{:noreply, state}
end
defp autoinit(node, config) do
cond do
Config.get(config, :auto_initialize_cluster, true) != true ->
:ok
node in :mnesia.system_info(:db_nodes) ->
:ok
is_nil(:rpc.call(node, Process, :whereis, [Pow.Store.Backend.MnesiaCache])) ->
:ok
true ->
do_autoinit(node, config)
end
end
defp do_autoinit(node, config) do
local_cluster_nodes = :mnesia.system_info(:running_db_nodes)
remote_cluster_nodes = :rpc.call(node, :mnesia, :system_info, [:running_db_nodes])
case {local_cluster_nodes, remote_cluster_nodes} do
{[_local_node], [_remote_node]} ->
Logger.info("Connection to #{inspect node} established with no mnesia cluster found for either #{inspect node()} or #{inspect node}")
{local_node_uptime, _} = :erlang.statistics(:wall_clock)
{remote_node_uptime, _} = :rpc.call(node, :erlang, :statistics, [:wall_clock])
if local_node_uptime < remote_node_uptime do
reset_node(node, config)
else
Logger.info("Skipping reset for #{inspect node()} as #{inspect node} is the most recent node")
end
{[_local_node], _remote_cluster_nodes} ->
Logger.info("Connection to #{inspect node} established with no mnesia cluster running on #{inspect node()}")
reset_node(node, config)
{_local_cluster_nodes, _remote_cluster_node_or_nodes} ->
Logger.info("Connection to #{inspect node} established with #{inspect node()} already being part of a mnesia cluster")
end
end
defp reset_node(node, _config) do
Logger.warn("Resetting mnesia on #{inspect node()} and restarting the mnesia cache to connect to #{inspect node}")
:mnesia.stop()
:mnesia.delete_schema([node()])
Process.exit(Process.whereis(Pow.Store.Backend.MnesiaCache), :kill)
end
defp autoheal(node, config) do
:running_db_nodes
|> :mnesia.system_info()
|> Enum.member?(node)
|> case do
true ->
Logger.info("The node #{inspect node} has already been healed and joined the mnesia cluster")
:ok
false ->
Logger.warn("Detected a netsplit in the mnesia cluster with node #{inspect node}")
heal(node, config)
end
end
defp heal(node, config) do
node
|> affected_tables()
|> force_reload(node, config)
end
defp affected_tables(node) do
:tables
|> :mnesia.system_info()
|> List.delete(:schema)
|> List.foldl([], fn table, acc ->
nodes = get_all_nodes_for_table(table)
is_shared = node in nodes && node() in nodes
case is_shared do
true -> [table | acc]
false -> acc
end
end)
end
defp get_all_nodes_for_table(table) do
[:ram_copies, :disc_copies, :disc_only_copies]
|> Enum.map(&:mnesia.table_info(table, &1))
|> Enum.concat()
end
defp force_reload(tables, node, config) do
flushable_tables =
case Config.get(config, :flush_tables, false) do
false -> [@mnesia_cache_tab]
:all -> tables
tables -> Enum.uniq([@mnesia_cache_tab | tables])
end
maybe_force_reload(tables, flushable_tables, node)
end
defp maybe_force_reload(tables, flushable_tables, node) do
case tables -- flushable_tables do
[] ->
do_force_reload(tables, node)
unflushable_tables ->
Logger.error("Can't force reload unexpected tables #{inspect unflushable_tables} to heal #{inspect node}")
{:error, {:unexpected_tables, tables}}
end
end
defp do_force_reload(tables, node) do
[master_nodes, nodes] = sorted_cluster_islands(node)
for node <- nodes do
:stopped = :rpc.call(node, :mnesia, :stop, [])
for table <- tables, do: :ok = :rpc.call(node, :mnesia, :set_master_nodes, [table, master_nodes])
:ok = :rpc.block_call(node, :mnesia, :start, [])
:ok = :rpc.call(node, :mnesia, :wait_for_tables, [tables, :timer.seconds(15)])
Logger.info("The node #{inspect node} has been healed and joined the mnesia cluster #{inspect master_nodes}")
end
:ok
end
defp sorted_cluster_islands(node) do
island_a = :mnesia.system_info(:running_db_nodes)
island_b = :rpc.call(node, :mnesia, :system_info, [:running_db_nodes])
Enum.sort([island_a, island_b], &older?/2)
end
defp older?(island_a, island_b) do
all_nodes = get_all_nodes_for_table(@mnesia_cache_tab)
island_nodes = Enum.concat(island_a, island_b)
oldest_node = all_nodes |> Enum.reverse() |> Enum.find(&(&1 in island_nodes))
oldest_node in island_a
end
end
|
lib/pow/store/backend/mnesia_cache/unsplit.ex
| 0.836454
| 0.536434
|
unsplit.ex
|
starcoder
|
defmodule ControlNode.Namespace.Workflow do
@moduledoc false
alias ControlNode.{Namespace, Release}
defmodule Data do
@moduledoc false
@type t :: %__MODULE__{
namespace_spec: Namespace.Spec.t(),
release_spec: Release.Spec.t(),
release_state: Release.State.t(),
health_check_timer: reference()
}
defstruct namespace_spec: nil,
release_spec: nil,
release_state: nil,
deploy_attempts: 0,
health_check_timer: nil
end
def init("CONNECT") do
actions = [
{:change_callback_module, ControlNode.Namespace.Initialize},
{:next_event, :internal, :connect_release_state}
]
{:initialize, actions}
end
def init("OBSERVE") do
actions = [
{:change_callback_module, ControlNode.Namespace.Initialize},
{:next_event, :internal, :observe_release_state}
]
{:initialize, actions}
end
def init("MANAGE") do
actions = [
{:change_callback_module, ControlNode.Namespace.Initialize},
{:next_event, :internal, :load_release_state}
]
{:initialize, actions}
end
@doc """
When there is no release running on any host for a given namespace, the workflow
switches to managing the namespace and wait request for new deployment
"""
def next(:initialize, :not_running, _) do
actions = [{:change_callback_module, Namespace.Manage}]
{:manage, actions}
end
def next(:initialize, :partially_running, version) do
actions = [
{:change_callback_module, Namespace.Deploy},
{:next_event, :internal, {:ensure_running, version}}
]
{:deploy, actions}
end
def next(:initialize, :running, _version) do
actions = [
{:change_callback_module, Namespace.Manage},
{:next_event, :internal, :schedule_health_check}
]
{:manage, actions}
end
def next(:initialize, :connect_release_state, _) do
actions = [{:change_callback_module, Namespace.Connect}]
{:connect, actions}
end
def next(:initialize, :observe_release_state, _) do
actions = [{:change_callback_module, Namespace.Observe}]
{:observe, actions}
end
def next(:deploy, :executed, {"OBSERVE", _version}) do
actions = [
{:change_callback_module, Namespace.Initialize},
{:next_event, :internal, :observe_release_state}
]
{:initialize, actions}
end
def next(:deploy, :executed, {"MANAGE", version}), do: initialize_with_version(version)
def next(state_name, :trigger_deployment, version) when state_name in [:observe, :manage] do
actions = [
{:change_callback_module, Namespace.Deploy},
{:next_event, :internal, {:ensure_running, version}}
]
{:deploy, actions}
end
# When the release has been stopped the FSM state remains the same as it
# awaits for a new deploy request
def next(:manage, :release_stopped, _) do
{:manage, []}
end
def next(:manage, :nodedown, version), do: initialize_with_version(version)
defp initialize_with_version(version) do
actions = [
{:change_callback_module, Namespace.Initialize},
{:next_event, :internal, {:load_release_state, version}}
]
{:initialize, actions}
end
end
|
lib/control_node/namespace/workflow.ex
| 0.715225
| 0.483039
|
workflow.ex
|
starcoder
|
defmodule IndifferentAccess do
@moduledoc """
Transforms a map into a struct or map supporting indifferent access.
Primary intended usage is via `IndifferentAccess.Plug`, see docs there.
"""
alias IndifferentAccess.Params
@doc """
Returns a struct or map accessible by Atom keys with several configuration optons determining behavior. See examples below.
## Examples
iex> IndifferentAccess.indifferentize(%{"schedulers" => "4"})
%IndifferentAccess.Params{params: %{"schedulers" => "4"}, opts: [as: :struct, strategy: :replace]}
iex> IndifferentAccess.indifferentize(%{"schedulers" => "4"}, as: :struct, strategy: :replace)[:schedulers]
"4"
iex> IndifferentAccess.indifferentize(%{"schedulers" => %{"tls" => "3"}}, as: :struct, strategy: :replace)[:schedulers][:tls]
"3"
iex> IndifferentAccess.indifferentize(%{"schedulers" => %{"tls" => "3"}}, as: :struct, strategy: :static)[:schedulers][:tls]
nil
iex> IndifferentAccess.indifferentize(%{"schedulers" => %{"tls" => "3"}}, as: :map, strategy: :replace)[:schedulers][:tls]
"3"
iex> IndifferentAccess.indifferentize(%{"schedulers" => %{"tls" => "3", "random_string" => "2"}}, as: :map, strategy: :replace)[:schedulers]
%{:tls => "3", "random_string" => "2"}
"""
def indifferentize(params, opts \\ [as: :struct, strategy: :replace]) when is_map(params) do
case opts[:as] do
:map -> indifferentize_map(params, opts)
:struct -> Params.new(params, opts)
end
end
@doc """
Returns a map with String keys replaced or supplemented by Atom keys where equivalent atoms exist.
## Examples
iex> IndifferentAccess.indifferentize_map(%{"schedulers" => "4"}, strategy: :replace)
%{schedulers: "4"}
iex> IndifferentAccess.indifferentize_map(%{"schedulers" => "4"}, strategy: :augment)
%{:schedulers => "4", "schedulers" => "4"}
iex> IndifferentAccess.indifferentize_map(%{"schedulers" => %{"tls" => "3", "others" => "2"}}, strategy: :replace)
%{schedulers: %{"others" => "2", :tls => "3"}}
iex> IndifferentAccess.indifferentize_map(%{"schedulers" => %{"tls" => "3", "others" => "2"}}, strategy: :static)
** (RuntimeError) `strategy: :static` is only valid within IndifferentAccess.Params struct, not when using `as: :map` option in plug or indifferentize_map directly
"""
def indifferentize_map(map, opts \\ [])
def indifferentize_map(map, opts) when is_map(map) do
if Map.get(map, :__struct__) do
map
else
map_keys = Map.keys(map)
Enum.reduce(map, %{}, fn
{key, value}, accum when is_binary(key) ->
indifferent_value = indifferentize_map(value, opts)
case opts[:strategy] do
:augment ->
if existing_atom(key),
do:
accum
|> Map.put_new(existing_atom(key), indifferent_value)
|> Map.put(key, indifferent_value),
else: Map.put(accum, key, indifferent_value)
:replace ->
if existing_atom(key) && existing_atom(key) not in map_keys,
do:
accum
|> Map.put_new(existing_atom(key), indifferent_value)
|> Map.delete(key),
else: Map.put(accum, key, indifferent_value)
:static ->
raise "`strategy: :static` is only valid within IndifferentAccess.Params struct," <>
" not when using `as: :map` option in plug or indifferentize_map directly"
end
{key, value}, accum ->
indifferent_value = indifferentize_map(value, opts)
Map.put(accum, key, indifferent_value)
end)
end
end
def indifferentize_map(list, opts) when is_list(list),
do: Enum.map(list, &indifferentize_map(&1, opts))
def indifferentize_map(other, _opts), do: other
defp existing_atom(key) do
String.to_existing_atom(key)
rescue
ArgumentError -> nil
end
end
|
lib/indifferent_access.ex
| 0.846308
| 0.458834
|
indifferent_access.ex
|
starcoder
|
defmodule ExDockerCompose.Subcommands do
@moduledoc """
A module that is the source of truth for the supported subcommands on `docker-compose`.
as well as how they should be used.
"""
@supported_subcommands [
:build, :bundle, :config, :create,
:down, :events, :exec, :help,
:images, :kill, :logs, :pause,
:port, :ps, :pull, :push,
:restart, :rm, :run, :scale,
:start, :stop, :top, :unpause, :up
]
@doc """
Get a list of all supported subcommands
## Examples
iex> ExDockerCompose.Subcommands.get_supported_subcommands()
[:build, :bundle, :config, :create,
:down, :events, :exec, :help,
:images, :kill, :logs, :pause,
:port, :ps, :pull, :push,
:restart, :rm, :run, :scale,
:start, :stop, :top, :unpause, :up]
"""
@spec get_supported_subcommands() :: subcommands :: List.t
def get_supported_subcommands do
@supported_subcommands
end
@doc """
Build the full command that should be running, this function will parse
the arguments and will build a command line commnad out of the subcommand and
the arguments and their parameters
## Parameters
- `compose_bin` - The full path to the `docker-compose` binary
- `subcommand` - The subcommand that `docker-compose` will run.
- `compose_opts` - A list of parameters and their optional arguments used for `docker-compose`
cli - meaning not for the subcommand. For example: `:f` (`-f`) or `:no_ansi` (`--no-ansi`)
Every item on the `List` is either an atom (`:d`, `:timeout`) or a tuple where the first item is
the an `Atom` that is the paramter and the rest are arguments to the paramter (space separated).
For example: `[{:p, "project-name"}, {:f, "docker-compose.yml"}, :skip_hostname_check]`
- `opts` - A list of parameters and their optional arguments **for the subcommand**.
The structure is similar to the one described in `compose_opts`, but those will come
**after** the subcommand.
For example: `[:d, {:t, 10}]`.
## TODO
Both `opts` and `compose_opts` currently only support **one** argument for a subcommand, i.e.
`[:d, {:t, 10},...]` is ok while `[:d, {:t, 10, 20}]` is not.
"""
@spec build_command(compose_bin :: String.t, subcommand :: Atom.t, compose_opts :: List.t, opts :: List.t)
:: {:ok, full_command :: String.t} | :no_such_command
def build_command(compose_bin, subcommand, compose_opts, opts) do
case Enum.member?(@supported_subcommands, subcommand) do
true -> {:ok, tidy_command([compose_bin, build_cli_params(compose_opts), subcommand, build_cli_params(opts)])}
false -> :no_such_command
end
end
@doc false
@spec tidy_command(items :: List.t) :: full_command :: String.t
defp tidy_command([compose_exec | items]) do
Enum.reduce(items, compose_exec, fn
(nil, full_command) -> full_command
(item, full_command) -> "#{full_command} #{item}"
end)
end
@doc false
defp build_cli_params([]) do
nil
end
defp build_cli_params(opts) do
cli_param = fn(param) ->
string_param = String.replace(Atom.to_string(param), "_", "-")
case String.length(string_param) do
1 -> "-#{string_param}"
_ -> "--#{string_param}"
end
end
opts |>
Enum.map_join(" ", fn
{param, argument} -> "#{cli_param.(param)} #{argument}"
param -> cli_param.(param)
end)
end
end
|
lib/ex_docker_compose/subcommands.ex
| 0.692434
| 0.588771
|
subcommands.ex
|
starcoder
|
defmodule Fixerio do
@moduledoc """
Provides access to Fixer.io like API with support for multiple endpoint
to use as fallback.
"""
import Fixerio.API
alias Fixerio.Config
@doc """
Gets the latest conversion rates. Uses the `/latest` API endpoint.
parameters:
* `options` (optioal): Map with optional keys `base` and `symbols`
Returns:
`%{ base: "USD", date: "2019-08-09", rates: %{ "AUD" => 1.46, "CZK" => 23.064, "IDR" => 14189.998, ... }}` in case of success
`{:error, %{errors: []}` in case of failure
## Examples
iex> Fixerio.get_latest %{base: :USD}
{:ok,
%{
base: "USD",
date: "2019-08-09",
rates: %{
"AUD" => 1.469190927,
"CZK" => 23.0648330059,
"IDR" => 14189.9982139668,
.
.
.
}
}}
iex> Fixerio.get_latest %{base: :USD}
{:error,
%{
errors: [
%{reason: "not found", url: "https://my-api.herokuapp.com"},
%{
reason: "Error. base_currency_access_restricted",
url: "http://data.fixer.io"
}
]
}}
iex> Fixerio.get_latest(%{symbols: [:AUD, :BRL, :CNY]})
{:ok,
%{
base: "USD",
date: "2019-08-09",
rates: %{"AUD" => 1.469190927, "BRL" => 3.9287372745, "CNY" => 7.0583139846}
}}
"""
def get_latest(options \\ %{base: :EUR}) do
base = if options[:base], do: options[:base], else: :USD
options = Map.put(options, :base, base)
request_with_fallback("latest", options)
end
@doc """
Coverts the given amount from the `from` currency to `to` currency, optionally
based on rate of a specific day.
parameters:
* `amount` (required): Integer - the amount to be converted
* `from` (required): Atom - current currency of the amount
* `to` (required): Atom - converted currency of the amount
* `date` (optional): Date - rate of the day to be used
Returns:
below data format in case of success
```
{:ok,
%{
date: "2019-08-09",
info: %{rate: 0.8930166101, timestamp: 1565380266},
query: %{amount: 90000, from: :USD, to: :EUR},
result: 80371.494909
}
}
```
or
`{:error, "Failed. No API available."}` in case of failure
## Examples
iex> Fixerio.convert(90000, :USD, :EUR)
{:ok,
%{
date: "2019-08-09",
info: %{rate: 0.8930166101, timestamp: 1565380266},
query: %{amount: 90000, from: :USD, to: :EUR},
result: 80371.494909
}}
"""
def convert(amount, from, to, date \\ nil) do
method = if date, do: Date.to_string(date), else: "latest"
options = %{base: from, symbols: [to]}
to_in_str = Atom.to_string(to)
case request_with_fallback(method, options) do
{:ok, %{date: date, rates: %{^to_in_str => rate}}} ->
data = %{
query: %{
from: from,
to: to,
amount: amount
},
info: %{
timestamp: DateTime.utc_now |> DateTime.to_unix(),
rate: rate
},
date: date,
result: amount * rate
}
{:ok, data}
{:error, _} ->
{:error, "Failed. No API available."}
end
end
@doc """
Gets the conversion rates on a day in past.
parameters:
* `date` (required): Date
* `options` (optioal): Map with optional keys `base` and `symbols`
Returns:
`%{ base: "USD", date: "2019-08-09", rates: %{ "AUD" => 1.46, "CZK" => 23.064, "IDR" => 14189.998, ... }}` in case of success
`{:error, %{errors: []}` in case of failure
## Examples
iex> Fixerio.historical_data(Date.utc_today)
{:ok,
%{
base: "EUR",
date: "2019-08-09",
rates: %{
"AUD" => 1.6452,
"BGN" => 1.9558,
"BRL" => 4.3994,
.
.
.
}
}}
"""
def historical_data(date, options \\ %{base: :EUR}) do
base = if options[:base], do: options[:base], else: :USD
options = Map.put(options, :base, base)
date = Date.to_string(date)
request_with_fallback(date, options)
end
def get_currencies do
# hard coded currecncies list
# thsi does not included extra currency data provided by fixer.io
[:USD,:AED,:ARS,:AUD,:BGN,:BRL,:BSD,:CAD,:CHF,:CLP,:CNY,:COP,:CZK,:DKK,:DOP,:EGP,:EUR,:FJD,:GBP,:GTQ,:HKD,:HRK,:HUF,:IDR,:ILS,:INR,:ISK,:JPY,:KRW,:KZT,:MXN,:MYR,:NOK,:NZD,:PAB,:PEN,:PHP,:PKR,:PLN,:PYG,:RON,:RUB,:SAR,:SEK,:SGD,:THB,:TRY,:TWD,:UAH,:UYU,:VND,:ZAR]
end
defp request_with_fallback(method, options) do
case request(method, options) do
{:ok, data} ->
{:ok, data}
{:error, reason} ->
error_lists = [%{url: Config.default_api[:url], reason: reason}]
result = Enum.reduce_while(Config.fallback_apis, [], fn api, err_list ->
options = Map.merge(options, %{url: api[:url], api_key: api[:api_key]})
case request(method, options) do
{:ok, data} ->
{:halt, {:ok, data}}
{:error, reason} ->
err_list = err_list ++ [%{url: options[:url], reason: reason}]
{:cont, err_list}
end
end)
case result do
{:ok, data} ->
{:ok, data}
[h | t] ->
errors = error_lists ++ [h | t]
{:error, %{errors: errors}}
_ ->
{:error, %{errors: [%{url: "all", reason: "Something went wrong"}]}}
end
end
end
end
|
lib/fixerio.ex
| 0.895808
| 0.548613
|
fixerio.ex
|
starcoder
|
defmodule Google.Bigtable.V2.ReadRowsRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
table_name: String.t(),
app_profile_id: String.t(),
rows: Google.Bigtable.V2.RowSet.t(),
filter: Google.Bigtable.V2.RowFilter.t(),
rows_limit: integer
}
defstruct [:table_name, :app_profile_id, :rows, :filter, :rows_limit]
field(:table_name, 1, type: :string)
field(:app_profile_id, 5, type: :string)
field(:rows, 2, type: Google.Bigtable.V2.RowSet)
field(:filter, 3, type: Google.Bigtable.V2.RowFilter)
field(:rows_limit, 4, type: :int64)
end
defmodule Google.Bigtable.V2.ReadRowsResponse do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
chunks: [Google.Bigtable.V2.ReadRowsResponse.CellChunk.t()],
last_scanned_row_key: String.t()
}
defstruct [:chunks, :last_scanned_row_key]
field(:chunks, 1, repeated: true, type: Google.Bigtable.V2.ReadRowsResponse.CellChunk)
field(:last_scanned_row_key, 2, type: :bytes)
end
defmodule Google.Bigtable.V2.ReadRowsResponse.CellChunk do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
row_status: {atom, any},
row_key: String.t(),
family_name: Google.Protobuf.StringValue.t(),
qualifier: Google.Protobuf.BytesValue.t(),
timestamp_micros: integer,
labels: [String.t()],
value: String.t(),
value_size: integer
}
defstruct [
:row_status,
:row_key,
:family_name,
:qualifier,
:timestamp_micros,
:labels,
:value,
:value_size
]
oneof(:row_status, 0)
field(:row_key, 1, type: :bytes)
field(:family_name, 2, type: Google.Protobuf.StringValue)
field(:qualifier, 3, type: Google.Protobuf.BytesValue)
field(:timestamp_micros, 4, type: :int64)
field(:labels, 5, repeated: true, type: :string)
field(:value, 6, type: :bytes)
field(:value_size, 7, type: :int32)
field(:reset_row, 8, type: :bool, oneof: 0)
field(:commit_row, 9, type: :bool, oneof: 0)
end
defmodule Google.Bigtable.V2.SampleRowKeysRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
table_name: String.t(),
app_profile_id: String.t()
}
defstruct [:table_name, :app_profile_id]
field(:table_name, 1, type: :string)
field(:app_profile_id, 2, type: :string)
end
defmodule Google.Bigtable.V2.SampleRowKeysResponse do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
row_key: String.t(),
offset_bytes: integer
}
defstruct [:row_key, :offset_bytes]
field(:row_key, 1, type: :bytes)
field(:offset_bytes, 2, type: :int64)
end
defmodule Google.Bigtable.V2.MutateRowRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
table_name: String.t(),
app_profile_id: String.t(),
row_key: String.t(),
mutations: [Google.Bigtable.V2.Mutation.t()]
}
defstruct [:table_name, :app_profile_id, :row_key, :mutations]
field(:table_name, 1, type: :string)
field(:app_profile_id, 4, type: :string)
field(:row_key, 2, type: :bytes)
field(:mutations, 3, repeated: true, type: Google.Bigtable.V2.Mutation)
end
defmodule Google.Bigtable.V2.MutateRowResponse do
@moduledoc false
use Protobuf, syntax: :proto3
defstruct []
end
defmodule Google.Bigtable.V2.MutateRowsRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
table_name: String.t(),
app_profile_id: String.t(),
entries: [Google.Bigtable.V2.MutateRowsRequest.Entry.t()]
}
defstruct [:table_name, :app_profile_id, :entries]
field(:table_name, 1, type: :string)
field(:app_profile_id, 3, type: :string)
field(:entries, 2, repeated: true, type: Google.Bigtable.V2.MutateRowsRequest.Entry)
end
defmodule Google.Bigtable.V2.MutateRowsRequest.Entry do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
row_key: String.t(),
mutations: [Google.Bigtable.V2.Mutation.t()]
}
defstruct [:row_key, :mutations]
field(:row_key, 1, type: :bytes)
field(:mutations, 2, repeated: true, type: Google.Bigtable.V2.Mutation)
end
defmodule Google.Bigtable.V2.MutateRowsResponse do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
entries: [Google.Bigtable.V2.MutateRowsResponse.Entry.t()]
}
defstruct [:entries]
field(:entries, 1, repeated: true, type: Google.Bigtable.V2.MutateRowsResponse.Entry)
end
defmodule Google.Bigtable.V2.MutateRowsResponse.Entry do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
index: integer,
status: Google.Rpc.Status.t()
}
defstruct [:index, :status]
field(:index, 1, type: :int64)
field(:status, 2, type: Google.Rpc.Status)
end
defmodule Google.Bigtable.V2.CheckAndMutateRowRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
table_name: String.t(),
app_profile_id: String.t(),
row_key: String.t(),
predicate_filter: Google.Bigtable.V2.RowFilter.t(),
true_mutations: [Google.Bigtable.V2.Mutation.t()],
false_mutations: [Google.Bigtable.V2.Mutation.t()]
}
defstruct [
:table_name,
:app_profile_id,
:row_key,
:predicate_filter,
:true_mutations,
:false_mutations
]
field(:table_name, 1, type: :string)
field(:app_profile_id, 7, type: :string)
field(:row_key, 2, type: :bytes)
field(:predicate_filter, 6, type: Google.Bigtable.V2.RowFilter)
field(:true_mutations, 4, repeated: true, type: Google.Bigtable.V2.Mutation)
field(:false_mutations, 5, repeated: true, type: Google.Bigtable.V2.Mutation)
end
defmodule Google.Bigtable.V2.CheckAndMutateRowResponse do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
predicate_matched: boolean
}
defstruct [:predicate_matched]
field(:predicate_matched, 1, type: :bool)
end
defmodule Google.Bigtable.V2.ReadModifyWriteRowRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
table_name: String.t(),
app_profile_id: String.t(),
row_key: String.t(),
rules: [Google.Bigtable.V2.ReadModifyWriteRule.t()]
}
defstruct [:table_name, :app_profile_id, :row_key, :rules]
field(:table_name, 1, type: :string)
field(:app_profile_id, 4, type: :string)
field(:row_key, 2, type: :bytes)
field(:rules, 3, repeated: true, type: Google.Bigtable.V2.ReadModifyWriteRule)
end
defmodule Google.Bigtable.V2.ReadModifyWriteRowResponse do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
row: Google.Bigtable.V2.Row.t()
}
defstruct [:row]
field(:row, 1, type: Google.Bigtable.V2.Row)
end
defmodule Google.Bigtable.V2.Bigtable.Service do
@moduledoc false
use GRPC.Service, name: "google.bigtable.v2.Bigtable"
rpc(:ReadRows, Google.Bigtable.V2.ReadRowsRequest, stream(Google.Bigtable.V2.ReadRowsResponse))
rpc(
:SampleRowKeys,
Google.Bigtable.V2.SampleRowKeysRequest,
stream(Google.Bigtable.V2.SampleRowKeysResponse)
)
rpc(:MutateRow, Google.Bigtable.V2.MutateRowRequest, Google.Bigtable.V2.MutateRowResponse)
rpc(
:MutateRows,
Google.Bigtable.V2.MutateRowsRequest,
stream(Google.Bigtable.V2.MutateRowsResponse)
)
rpc(
:CheckAndMutateRow,
Google.Bigtable.V2.CheckAndMutateRowRequest,
Google.Bigtable.V2.CheckAndMutateRowResponse
)
rpc(
:ReadModifyWriteRow,
Google.Bigtable.V2.ReadModifyWriteRowRequest,
Google.Bigtable.V2.ReadModifyWriteRowResponse
)
end
defmodule Google.Bigtable.V2.Bigtable.Stub do
@moduledoc false
use GRPC.Stub, service: Google.Bigtable.V2.Bigtable.Service
end
|
lib/grpc/data/bigtable.pb.ex
| 0.748444
| 0.462837
|
bigtable.pb.ex
|
starcoder
|
defmodule Mix.Tasks.Deps do
use Mix.Task
import Mix.Dep, only: [loaded: 1, format_dep: 1, format_status: 1, check_lock: 2]
@shortdoc "List dependencies and their status"
@moduledoc ~S"""
List all dependencies and their status.
Dependencies must be specified in the `mix.exs` file in one of
the following formats:
{ app, requirement }
{ app, opts }
{ app, requirement, opts }
Where:
* app is an atom
* requirement is a version requirement or a regular expression
* opts is a keyword list of options
Here are some examples:
{:foobar, git: "https://github.com/elixir-lang/foobar.git", tag: "0.1"}
To specify particular versions, regardless of the tag, do:
{:barbat, "~> 0.1", github: "elixir-lang/barbat"}
When using a package manager, options can be skipped:
{:pkgbaz, "~> 0.1"}
When using umbrella applications, one may also specify:
{:myapp, in_umbrella: true}
The dependencies versions are expected to follow Semantic Versioning
and the requirements must be specified as defined in the `Version`
module.
Below we provide a more detailed look into the available options.
## Mix options
* `:app` - When set to false, does not read the app file for this dependency
* `:env` - The environment to run the dependency on, defaults to :prod
* `:compile` - A command to compile the dependency, defaults to a mix,
rebar or make command
* `:optional` - The dependency is optional and used only to specify requirements
* `:only` - The dependency will belongs only to the given environments, useful
when declaring dev- or test-only dependencies
* `:override` - If set to true the dependency will override any other
definitions of itself by other dependencies
## Git options (`:git`)
* `:git` - The git repository URI
* `:github` - A shortcut for specifying git repos from github, uses `git:`
* `:ref` - The reference to checkout (may be a branch, a commit sha or a tag)
* `:branch` - The git branch to checkout
* `:tag` - The git tag to checkout
* `:submodules` - When true, initialize submodules for the repo
## Path options (`:path`)
* `:path` - The path for the dependency
* `:in_umbrella` - When true, sets a path dependency pointing to "../#{app}",
sharing the same environment as the current application
## mix deps task
This taslk lists all dependencies in the following format:
* APP VERSION (SCM)
[locked at REF]
STATUS
It supports the following options:
* `--all` - check all dependencies, regardless of specified environment
"""
def run(args) do
Mix.Project.get! # Require the project to be available
{ opts, _, _ } = OptionParser.parse(args)
if opts[:all] do
loaded_opts = []
else
loaded_opts = [env: Mix.env]
end
shell = Mix.shell
lock = Mix.Dep.Lock.read
Enum.each loaded(loaded_opts), fn %Mix.Dep{scm: scm} = dep ->
dep = check_lock(dep, lock)
shell.info "* #{format_dep(dep)}"
if formatted = scm.format_lock(dep.opts) do
shell.info " locked at #{formatted}"
end
shell.info " #{format_status dep}"
end
end
end
|
lib/mix/lib/mix/tasks/deps.ex
| 0.776114
| 0.471406
|
deps.ex
|
starcoder
|
defmodule UltraDark.Blockchain.Block do
alias UltraDark.Blockchain.Block
alias UltraDark.Utilities
alias UltraDark.Transaction
alias Decimal, as: D
defstruct index: nil,
hash: nil,
previous_hash: nil,
difficulty: nil,
nonce: 0,
timestamp: nil,
merkle_root: nil,
transactions: []
@doc """
When the first node on the UltraDark network spins up, there won't be any blocks in the chain.
In order to create a base from which all nodes can agree, we create a block called a genesis block.
This block has the data structure that a block would have, but has hard-coded values. This block
never needs to be verified by nodes, as it doesn't contain any actual data. The block mined after the
genesis block must reference the hash of the genesis block as its previous_hash to be valid
"""
def initialize do
%Block{
index: 0,
hash: "79644A8F062F1BA9F7A32AF2242C04711A634D42F0628ADA6B985B3D21296EEA",
difficulty: 4.0,
timestamp: DateTime.utc_now() |> DateTime.to_string(),
transactions: [
%{
inputs: [],
outputs: [
%{
txoid: "79644A8F062F1BA9F7A32AF2242C04711A634D42F0628ADA6B985B3D21296EEA:0",
data: "GENESIS BLOCK",
addr: nil,
amount: nil
}
]
}
]
}
end
@doc """
Takes the previous block as an argument (This is the way we create every block except the genesis block)
"""
@spec initialize(Block) :: Block
def initialize(%{index: index, hash: previous_hash}) do
%Block{
index: index + 1,
previous_hash: previous_hash,
difficulty: 4.0,
timestamp: DateTime.utc_now() |> DateTime.to_string()
}
end
@doc """
The process of mining consists of hashing the index of the block, the hash of the previous block (thus linking the current and previous block),
the timestamp at which the block was generated, the merkle root of the transactions within the block, and a random nonce. We then check
to see whether the number represented by the hash is lower than the mining difficulty. If the value of the hash is lower, it is a valid block,
and we can broadcast the block to other nodes on the network.
"""
@spec mine(Block) :: Block
def mine(block) do
%{
index: index,
previous_hash: previous_hash,
timestamp: timestamp,
nonce: nonce,
merkle_root: merkle_root
} = block
block = %{
block
| hash:
Utilities.sha3_base16([
Integer.to_string(index),
previous_hash,
timestamp,
Integer.to_string(nonce),
merkle_root
])
}
if hash_beat_target?(block) do
block
else
mine(%{block | nonce: block.nonce + 1})
end
end
@doc """
Because the hash is a Base16 string, and not an integer, we must first convert the hash to an integer, and afterwards compare it to the target
"""
@spec hash_beat_target?(Block) :: boolean
def hash_beat_target?(%{hash: hash, difficulty: difficulty}) do
{integer_value_of_hash, _} = Integer.parse(hash, 16)
integer_value_of_hash < calculate_target(difficulty)
end
@doc """
The target is a number based off of the block difficulty. The higher the block difficulty, the lower the target. When a block is being mined,
the goal is to find a hash that is lower in numerical value than the target. The maximum target (when the difficulty is 0) is
115792089237316195423570985008687907853269984665640564039457584007913129639935, which means any hash is valid.
"""
@spec calculate_target(float) :: number
def calculate_target(difficulty) do
(round(:math.pow(16, 64 - difficulty))) - 1
end
@spec calculate_block_reward(number) :: Decimal
def calculate_block_reward(block_index) do
D.div(D.new(100), D.new(:math.pow(2, Integer.floor_div(block_index, 200000))))
end
@spec total_block_fees(list) :: Decimal
def total_block_fees(transactions) do
transactions |> Enum.reduce(D.new(0), fn tx, acc -> D.add(acc, Transaction.calculate_fee(tx)) end)
end
end
|
lib/block.ex
| 0.797281
| 0.713057
|
block.ex
|
starcoder
|
defmodule Raem.Instituicoes do
@moduledoc """
The Instituicoes context.
"""
import Ecto.Query, warn: false
alias Raem.Repo
alias Raem.Instituicoes.Instituicao
@doc """
Returns the list of instituicoes.
## Examples
iex> list_instituicoes()
[%Instituicao{}, ...]
"""
def list_instituicoes do
Repo.all(Instituicao)
end
@doc """
Gets a single instituicao.
Raises `Ecto.NoResultsError` if the Instituicao does not exist.
## Examples
iex> get_instituicao!(123)
%Instituicao{}
iex> get_instituicao!(456)
** (Ecto.NoResultsError)
"""
def get_instituicao!(id), do: Repo.get!(Instituicao, id)
@doc """
Creates a instituicao.
## Examples
iex> create_instituicao(%{field: value})
{:ok, %Instituicao{}}
iex> create_instituicao(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_instituicao(attrs \\ %{}) do
%Instituicao{}
|> Instituicao.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a instituicao.
## Examples
iex> update_instituicao(instituicao, %{field: new_value})
{:ok, %Instituicao{}}
iex> update_instituicao(instituicao, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_instituicao(%Instituicao{} = instituicao, attrs) do
instituicao
|> Instituicao.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a Instituicao.
## Examples
iex> delete_instituicao(instituicao)
{:ok, %Instituicao{}}
iex> delete_instituicao(instituicao)
{:error, %Ecto.Changeset{}}
"""
def delete_instituicao(%Instituicao{} = instituicao) do
Repo.delete(instituicao)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking instituicao changes.
## Examples
iex> change_instituicao(instituicao)
%Ecto.Changeset{source: %Instituicao{}}
"""
def change_instituicao(%Instituicao{} = instituicao) do
Instituicao.changeset(instituicao, %{})
end
end
|
RAEM/raem/lib/raem/instituicoes/instituicoes.ex
| 0.770162
| 0.416292
|
instituicoes.ex
|
starcoder
|
defmodule Content.Audio.Predictions do
@moduledoc """
Module to convert a Message.Predictions.t() struct into the
appropriate audio struct, whether it's a NextTrainCountdown.t(),
TrainIsArriving.t(), TrainIsBoarding.t(), or TrackChange.t().
"""
require Logger
require Content.Utilities
alias Content.Audio.TrackChange
alias Content.Audio.TrainIsBoarding
alias Content.Audio.TrainIsArriving
alias Content.Audio.Approaching
alias Content.Audio.NextTrainCountdown
@heavy_rail_routes ["Red", "Orange", "Blue"]
@spec from_sign_content(
{Signs.Utilities.SourceConfig.source(), Content.Message.Predictions.t()},
Content.line_location(),
boolean()
) :: nil | Content.Audio.t()
def from_sign_content(
{%Signs.Utilities.SourceConfig{} = src, %Content.Message.Predictions{} = predictions},
line,
multi_source?
) do
cond do
TrackChange.park_track_change?(predictions) and predictions.minutes == :boarding ->
%TrackChange{
destination: predictions.destination,
route_id: predictions.route_id,
berth: predictions.stop_id
}
predictions.minutes == :boarding ->
%TrainIsBoarding{
destination: predictions.destination,
trip_id: predictions.trip_id,
route_id: predictions.route_id,
track_number: Content.Utilities.stop_track_number(predictions.stop_id)
}
predictions.minutes == :arriving ->
%TrainIsArriving{
destination: predictions.destination,
trip_id: predictions.trip_id,
platform: src.platform,
route_id: predictions.route_id
}
predictions.minutes == :approaching and (line == :top or multi_source?) and
predictions.route_id in @heavy_rail_routes ->
%Approaching{
destination: predictions.destination,
trip_id: predictions.trip_id,
platform: src.platform,
route_id: predictions.route_id,
new_cars?: predictions.new_cars?
}
predictions.minutes == :approaching ->
%NextTrainCountdown{
destination: predictions.destination,
route_id: predictions.route_id,
minutes: 1,
verb: if(src.terminal?, do: :departs, else: :arrives),
track_number: Content.Utilities.stop_track_number(predictions.stop_id),
platform: src.platform
}
predictions.minutes == :max_time ->
%NextTrainCountdown{
destination: predictions.destination,
route_id: predictions.route_id,
minutes: div(Content.Utilities.max_time_seconds(), 60),
verb: if(src.terminal?, do: :departs, else: :arrives),
track_number: Content.Utilities.stop_track_number(predictions.stop_id),
platform: src.platform
}
is_integer(predictions.minutes) ->
%NextTrainCountdown{
destination: predictions.destination,
route_id: predictions.route_id,
minutes: predictions.minutes,
verb: if(src.terminal?, do: :departs, else: :arrives),
track_number: Content.Utilities.stop_track_number(predictions.stop_id),
platform: src.platform
}
end
end
end
|
lib/content/audio/predictions.ex
| 0.742328
| 0.432363
|
predictions.ex
|
starcoder
|
defmodule PhoenixIntegration do
@moduledoc """
Lightweight server-side integration test functions for Phoenix. Works within the existing
Phoenix.ConnTest framework and emphasizes both speed and readability.
## Configuration
### Step 1
You need to tell phoenix_integration which endpoint to use. Add the following to your phoenix application's `config/test.exs` file.
```elixir
config :phoenix_integration,
endpoint: MyApp.Endpoint
```
Where MyApp is the name of your application.
Do this up before compiling phoenix_integration as part of step 2. If you change the endpoint in the config file, you will need to recompile the phoenix_integration dependency.
Phoenix_integration will produce warnings if your HTML likely doesn't do what you meant. (For example, it will warn you if two text fields have the same name.) You can turn those off by adding `warnings: false` to the config.
### Step 2
Add PhoenixIntegration to the deps section of your application's `mix.exs` file
```elixir
defp deps do
[
# ...
{:phoenix_integration, "~> 0.8", only: :test}
# ...
]
end
```
Don't forget to run `mix deps.get`
### Step 3
Create a test/support/integration_case.ex file. Mine simply looks like this:
```elixir
defmodule MyApp.IntegrationCase do
use ExUnit.CaseTemplate
using do
quote do
use MyApp.ConnCase
use PhoenixIntegration
end
end
end
```
Alternately you could place the call to `use PhoenixIntegration` in your conn_case.ex file. Just make sure it is after the definition of `@endpoint`.
## Overview
phoenix_integration provides two assertion and six request functions to be used
alongside the existing `get`, `post`, `put`, `patch`, and `delete` utilities
inside of a Phoenix.ConnTest test suite.
The goal is to chain together a string of requests and assertions that thouroughly
exercise your application in as lightweight and readable manner as possible.
Each function accepts a conn and some other data, and returns a conn intended to be
passed into the next function via a pipe.
### Examples
test "Basic page flow", %{conn: conn} do
# get the root index page
get( conn, page_path(conn, :index) )
# click/follow through the various about pages
|> follow_link( "About Us" )
|> follow_link( "Contact" )
|> follow_link( "Privacy" )
|> follow_link( "Terms of Service" )
|> follow_link( "Home" )
|> assert_response( status: 200, path: page_path(conn, :index) )
end
test "Create new user", %{conn: conn} do
# get the root index page
get( conn, page_path(conn, :index) )
# click/follow through the various about pages
|> follow_link( "Sign Up" )
|> follow_form( %{ user: %{
name: "<NAME>",
email: "<EMAIL>",
password: "<PASSWORD>",
confirm_password: "<PASSWORD>"
}} )
|> assert_response(
status: 200,
path: page_path(conn, :index),
html: "New User" )
end
### Simulate multiple users
Since all user state is held in the conn that is being passed around (just like when
a user is hitting your application in a browser), you can simulate multiple users
simply by tracking separate conns for them.
In the example below, I'm assuming an application-specific `test_sign_in` function, which
itself uses the `follow_*` functions to sign a given user in.
Notice how `user_conn` is tracked and reused. This keeps the state the user builds
up as the various links are followed, just like it would be when a proper browser is used.
### Example
test "admin grants a user permissions", %{conn: conn, user: user, admin: admin} do
# sign in the user and admin
user_conn = test_sign_in( conn, user )
admin_conn = test_sign_in( conn, admin )
# user can't see a restricted page
user_conn = get( user_conn, page_path(conn, :index) )
|> follow_link( "Restricted" )
|> assert_response( status: 200, path: session_path(conn, :new) )
|> refute_response( body: "Restricted Content" )
# admin grants the user permission
get( admin_conn, page_path(conn, :index) )
|> follow_link( "Admin Dashboard" )
|> follow_form( %{ user: %{
permissoin: "ok_to_do_thing"
}} )
|> assert_response(
status: 200,
path: admin_path(conn, :index),
html: "Permission Granted" )
# the user should now be able to see the restricted page
get( user_conn, page_path(conn, :index) )
|> follow_link( "Restricted" )
|> assert_response(
status: 200,
path: restricted_path(conn, :index),
html: "Restricted Content"
)
end
### Tip
You can intermix `IO.inspect` calls in the pipe chain to help with debugging. This
will print the current state of the conn into the console.
test "Basic page flow", %{conn: conn} do
# get the root index page
get( conn, page_path(conn, :index) )
|> follow_link( "About Us" )
|> IO.inspect
|> follow_link( "Home" )
|> assert_response( status: 200, path: page_path(conn, :index) )
end
I like to use `assert_response` pretty heavily to make sure the content I expect
is really there and to make sure I am traveling to the right locations.
test "Basic page flow", %{conn: conn} do
get(conn, page_path(conn, :index) )
|> assert_response(
status: 200,
path: page_path(conn, :index),
html: "Test App"
)
|> follow_link( "About" )
|> assert_response(
status: 200,
path: about_path(conn, :index),
html: "About Test App"
)
|> follow_link( "Contact" )
|> assert_response(
status: 200,
path: about_path(conn, :contact),
html: "Contact"
)
|> follow_link( "Home" )
|> assert_response(
status: 200,
path: page_path(conn, :index),
html: "Test App"
)
end
### What phoenix_integration is NOT
phoenix_integration is not a client-side acceptence test suite. It does not use
a real browser and does not exercise javascript code that lives there. It's focus
is on fast, readable, server-side integration.
Try using a tool like [`Hound`](https://hex.pm/packages/hound) for full-stack
integration tests.
"""
defmacro __using__(_opts) do
quote do
import PhoenixIntegration.Assertions
import PhoenixIntegration.Requests
end
# quote
end
# defmacro
end
|
lib/phoenix_integration.ex
| 0.76869
| 0.8288
|
phoenix_integration.ex
|
starcoder
|
defmodule Concentrate.Filter.FakeTrips do
@moduledoc "Fake implementation of Filter.GTFS.Trips"
def route_id("trip"), do: "route"
def route_id(_), do: nil
def direction_id("trip"), do: 1
def direction_id(_), do: nil
end
defmodule Concentrate.Filter.FakeCancelledTrips do
@moduledoc "Fake implementation of Filter.Alerts.CancelledTrips"
def route_cancelled?("route", {1970, 1, 2}) do
true
end
def route_cancelled?("route", unix) do
unix > 86_405 and unix < 86_410
end
def route_cancelled?(route, time) when is_binary(route) and is_integer(time) do
false
end
def route_cancelled?(route, {_, _, _}) when is_binary(route) do
false
end
def trip_cancelled?("trip", {1970, 1, 1}) do
true
end
def trip_cancelled?("trip", unix) do
unix > 5 and unix < 10
end
def trip_cancelled?(trip, time) when is_binary(trip) and is_integer(time) do
false
end
def trip_cancelled?(trip, {_, _, _}) when is_binary(trip) do
false
end
end
defmodule Concentrate.Filter.FakeClosedStops do
@moduledoc "Fake implementation of Filter.Alerts.ClosedStops"
alias Concentrate.Alert.InformedEntity
def stop_closed_for("stop", unix) do
cond do
unix < 5 ->
[]
unix > 10 ->
[]
true ->
[
InformedEntity.new(trip_id: "trip", route_id: "route")
]
end
end
def stop_closed_for("route_stop", _) do
[
InformedEntity.new(route_id: "other_route")
]
end
def stop_closed_for(_, _) do
[]
end
end
defmodule Concentrate.Filter.FakeShuttles do
@moduledoc "Fake implementation of Filter.Alerts.Shuttles"
def trip_shuttling?("trip", "route", _, {1970, 1, 1}), do: true
def trip_shuttling?("trip", "single_direction", 0, {1970, 1, 1}), do: true
def trip_shuttling?(_trip_id, _route_id, _direction_id, {_, _, _}), do: false
def stop_shuttling_on_route("route", "shuttle_1", 8), do: :through
def stop_shuttling_on_route("route", "shuttle_2", 8), do: :through
def stop_shuttling_on_route("route", "shuttle_start", 8), do: :start
def stop_shuttling_on_route("route", "shuttle_stop", 8), do: :stop
def stop_shuttling_on_route("single_direction", "shuttle_1", 8), do: :through
def stop_shuttling_on_route(_, _, dt) when is_integer(dt), do: nil
end
|
test/support/filter/fakes.ex
| 0.596551
| 0.402803
|
fakes.ex
|
starcoder
|
defmodule GraphQL.Node do
@moduledoc """
Functions to create all different types of nodes of a GraphQL operation.
Usually, this module should not be used directly, since it is easier to use
the function from `GraphQL.QueryBuilder`.
"""
@enforce_keys [:node_type]
defstruct node_type: nil,
name: nil,
alias: nil,
type: nil,
arguments: nil,
nodes: nil,
directives: nil
@typedoc """
The GraphQL query element that this node represents.
The four node types are:
- field: a single field of a GraphQL schema, may have arguments and other nodes
- fragment_ref: a reference to a fragment, used inside fields to import fragment fields
- fragment: a fragment definition, with name, type and fields
- inline_fragment: much like a fragment, but being inline, it does not need a name
"""
@type node_type :: :field | :fragment_ref | :fragment | :inline_fragment
@typedoc """
A GraphQL identifier that is not a GraphQL keyword (like mutation, query and fragment)
Used to identify fields, aliases and fragments.
"""
@type name :: String.t() | atom()
@typedoc """
A two-element tuple where the first position is the name of the field and the
second element is the alias of the field.
"""
@type name_and_alias :: {name(), name()}
@typedoc """
A struct representing a GraphQL operation node.
A %Node{} struct can be represent a field, a fragment, an inline fragment or a
fragment reference, identified by the `:node_type` field.
The `name` represents how this node is identified within the GraphQL operation.
The `alias` is only used when the `:node_type` is `:field`, and as the name
suggests, represents the alias of the field's name.
The `arguments` is a map with all the arguments used by a node, and it's only
valid when thew `:node_type` is `:field`.
The `type` is only used when `:node_type` is `:fragment` or `:inline_fragment`,
and represents the GraphQL type of the fragment.
The `nodes` is a list of child nodes, that can used to query for complex
objects.
The `directives` field is an enum with all the graphQL directives to be
applied on a node node.
"""
@type t :: %__MODULE__{
node_type: node_type(),
name: name(),
alias: name(),
type: String.t(),
arguments: map() | Keyword.t(),
nodes: [t()],
directives: map() | Keyword.t()
}
@doc """
Creates a simple field, with no arguments or sub nodes.
The `name` parameter can be an atom or string, or a two-element tuple with
atoms or strings, where the first element is the actual name of the field and
the second element is the alias of the field.
## GraphQL example
A query with a simple field inside another field:
```
query {
user {
id <---- Simple field
}
}
```
A query with a simple field with an alias:
```
query {
user {
theId: id <---- Simple field with alias
}
}
```
## Examples
iex> field(:my_field)
%GraphQL.Node{node_type: :field, name: :my_field}
iex> field({:my_field, "field_alias"})
%GraphQL.Node{node_type: :field, name: :my_field, alias: "field_alias"}
"""
@spec field(name() | name_and_alias()) :: t()
def field(name_spec)
def field({name, an_alias}) do
%__MODULE__{
node_type: :field,
name: name,
alias: an_alias
}
end
def field(name) do
%__MODULE__{
node_type: :field,
name: name
}
end
@doc """
Creates a field with arguments and sub nodes.
The `name` parameter can be an atom or string, or a two-element tuple with
atoms or strings, where the first element is the actual name of the field and
the second element is the alias of the field.
The `arguments` parameter is a map.
The `nodes` argument is a list of `%GraphQL.Node{}` structs.
## GraphQL Example
A query with a field that has arguments, an alias and subfields
```
query {
someObject: object(slug: "the-object") { <----- Field with an alias and arguments
field <----- Sub field
anotherField <----- Sub field
}
}
```
## Examples
iex> field(:my_field, %{id: "id"}, [ field(:subfield) ] )
%GraphQL.Node{node_type: :field, name: :my_field, arguments: %{id: "id"}, nodes: [%GraphQL.Node{node_type: :field, name: :subfield}]}
iex> field({:my_field, "field_alias"}, %{id: "id"}, [ field(:subfield) ] )
%GraphQL.Node{node_type: :field, name: :my_field, alias: "field_alias", arguments: %{id: "id"}, nodes: [%GraphQL.Node{node_type: :field, name: :subfield}]}
"""
@spec field(name() | name_and_alias(), map(), [t()], [any()]) :: t()
def field(name_spec, arguments, nodes, directives \\ nil)
def field({name, an_alias}, arguments, nodes, directives) do
%__MODULE__{
node_type: :field,
name: name,
alias: an_alias,
arguments: arguments,
nodes: nodes,
directives: directives
}
end
def field(name, arguments, nodes, directives) do
%__MODULE__{
node_type: :field,
name: name,
arguments: arguments,
nodes: nodes,
directives: directives
}
end
@doc """
Creates a reference to a fragment.
A fragment reference is used inside a field to import the fields of a fragment.
## GraphQL Example
```
query {
object {
...fieldsFromFragment <----- Fragment Reference
}
}
```
## Examples
iex> fragment("myFields")
%GraphQL.Node{node_type: :fragment_ref, name: "myFields"}
"""
@spec fragment(name()) :: t()
def fragment(name) do
%__MODULE__{
node_type: :fragment_ref,
name: name
}
end
@doc """
Creates a fragment.
A fragment is used to share fields between other fields
## GraphQL Example
```
query {
object {
...fieldsFromFragment
}
}
fragment fieldsFromFragment on Type { <------ Fragment
field1
field2
}
```
## Examples
iex> fragment("myFields", "SomeType", [field(:field)])
%GraphQL.Node{node_type: :fragment, name: "myFields", type: "SomeType", nodes: [%GraphQL.Node{node_type: :field, name: :field}]}
"""
@spec fragment(name(), name(), [t()]) :: t()
def fragment(name, type, fields) do
%__MODULE__{
node_type: :fragment,
name: name,
type: type,
nodes: fields
}
end
@doc """
Creates an inline fragment.
An inline fragment is used to conditionally add fields on another field depending
on its type
## GraphQL Example
```
query {
object {
... on Type { <------ Inline Fragment
field1
field2
}
}
}
```
## Examples
iex> inline_fragment("SomeType", [field(:field)])
%GraphQL.Node{node_type: :inline_fragment, type: "SomeType", nodes: [%GraphQL.Node{node_type: :field, name: :field}]}
"""
@spec inline_fragment(name(), [t()]) :: t()
def inline_fragment(type, fields) do
%__MODULE__{
node_type: :inline_fragment,
type: type,
nodes: fields
}
end
end
|
lib/graphql/node.ex
| 0.924866
| 0.89058
|
node.ex
|
starcoder
|
defmodule Glock.Conn do
@moduledoc """
Defines the glock connection struct that serves as
the configuration state of an initialized glock process.
The struct tracks all configuration settings and arguments
passed into the connection when it is initialized and provides
common default values for all settings except for the host
and path of the remote websocket server.
Provides utility functions for creating and ensuring the proper
default values are set within the connection struct.
"""
@type t :: %__MODULE__{
client: pid,
connect_opts: %{
connect_timeout: non_neg_integer,
retry: non_neg_integer,
retry_timeout: non_neg_integer,
transport: :tcp | :tls
},
handler_init_args: term,
headers: [binary],
host: charlist,
monitor: reference,
path: charlist,
port: non_neg_integer,
stream: reference,
stream_state: term,
ws_opts: %{
compress: boolean,
closing_timeout: non_neg_integer,
keepalive: non_neg_integer
}
}
defstruct client: nil,
connect_opts: %{
connect_timeout: 60_000,
retry: 10,
retry_timeout: 300,
transport: :tcp
},
handler_init_args: %{},
headers: [],
host: nil,
monitor: nil,
path: nil,
port: 80,
stream: nil,
stream_state: nil,
ws_opts: %{
compress: false,
closing_timeout: 15_000,
keepalive: 5_000
}
@doc """
Reduces over a keyword list of arguments for configuring the
glock process and adds them to an empty instance of the `Glock.Conn.t`
struct. Configs are merged with values passed by the user superseding
default values with the exception of the http protocol which is locked
to HTTP/1.1 for websocket compatibility.
"""
@spec new(keyword) :: Glock.Conn.t()
def new(opts) do
opts
|> Enum.reduce(struct(__MODULE__), &put_opts/2)
|> validate_required()
end
defp validate_required(%__MODULE__{host: host, path: path}) when host == nil or path == nil do
raise Glock.ConnError, message: "Must supply valid socket host and path"
end
defp validate_required(conn), do: conn
defp put_opts({required, value}, conn) when required in [:host, :path] do
Map.put(conn, required, to_charlist(value))
end
defp put_opts({:connect_opts, value}, conn) do
merged_opts =
conn.connect_opts
|> Map.merge(value, fn _key, _default, override -> override end)
%{conn | connect_opts: merged_opts}
end
defp put_opts({:ws_opts, value}, conn) do
%{
conn
| ws_opts: Map.merge(conn.ws_opts, value, fn _key, _default, override -> override end)
}
end
defp put_opts({key, value}, conn), do: Map.put(conn, key, value)
end
|
lib/glock/conn.ex
| 0.717408
| 0.414277
|
conn.ex
|
starcoder
|
defmodule ExSchedule do
@moduledoc """
Module providing a way to declare actions happening on an interval basis.
Defining a schedule
```
defmodule YourApp.Schedules.Developer do
use ExSchedule
schedule every: {6, :hours} do
Developer.eat(:pizza)
end
schedule every: :hour, first_in: {20, :minutes} do
Developer.drink(:coffee)
end
end
```
Adding the schedule to the supervision tree
```
defmodule YourApp.Application do
use Application
import Supervisor.Spec
def start(_type, _args) do
opts = [strategy: :one_for_one, name: YourApp.Supervisor]
children = [supervisor(YourApp.Schedules.Developer, [[name: :developer_schedule]])]
Supervisor.start_link(children, opts)
end
end
```
Supported options:
# Intervals
`every`: The interval on which to run the task
`first_in`: How much time to wait before the first run, defaults to 0
Examples
```
schedule every: :millisecond, do: Task.execute # every millisecond
schedule every: :second, do: Task.execute # every second
schedule every: :minute, do: Task.execute # every minute
schedule every: :hour, do: Task.execute # every hour
schedule every: :day, do: Task.execute # every day
schedule every: {2, :milliseconds}, do: Task.execute # every 2 milliseconds
schedule every: {2, :seconds}, do: Task.execute # every 2 seconds
schedule every: {2, :minutes}, do: Task.execute # every 2 minutes
schedule every: {2, :hours}, do: Task.execute # every 2 hours
schedule every: {2, :days}, do: Task.execute # every 2 days
schedule every: {2, :hours}, first_in: 0, do:
Task.execute # every 2 hours first in 0 seconds
end
schedule every: {2, :hours}, first_in: {20, :minutes} do
Task.execute # every 2 hours first in 20 minutes
end
```
# Failure handling
`:max_failures` - Number of times to fail for the task process to be restarted, defaults to `:infinity`
Examples
```
schedule every: {2, :days}, max_failures: 5, do: something
```
"""
defmacro __using__(_opts) do
quote location: :keep do
require Logger
Module.register_attribute(__MODULE__, :schedules, accumulate: true)
use Supervisor
import ExSchedule
@doc "Starts the Schedule with the given arguments"
@spec start_link(list()) :: GenServer.on_start()
def start_link(opts) do
with namespace <- normalize_namespace(opts[:namespace]) do
Supervisor.start_link(
__MODULE__,
put_in(opts[:namespace], namespace),
name: name(opts[:name], namespace)
)
end
end
@doc false
def init(opts) do
schedules() |> Enum.map(&child_spec/1) |> Supervisor.init(strategy: :one_for_one)
end
@doc "Returns the namespace of the schedule"
def namespace do
self() |> Process.info([:links]) |> Access.get(:links) |> Enum.at(0) |> namespace
end
def namespace(server) when is_pid(server) do
server
|> Process.info([:registered_name])
|> Access.get(:registered_name)
|> namespace
end
def namespace(server) when is_atom(server) do
size = __MODULE__ |> to_string |> byte_size
case server |> to_string do
<<_module::bytes-size(size)>> <> "." <> namespace -> normalize_namespace(namespace)
_ -> nil
end
end
### Callbacks
@doc false
def handle_call(:state, _from, state), do: {:reply, state, state}
defp normalize_namespace(ns) when is_bitstring(ns), do: String.to_atom(ns)
defp normalize_namespace(ns), do: ns
defp child_spec(schedule) do
%{id: schedule.id, start: {ExSchedule.ScheduledTask, :start_link, [schedule]}}
end
defp name(nil, nil), do: __MODULE__
defp name(nil, ns), do: :"#{__MODULE__}.#{ns}"
defp name(name, _namespace), do: name
@before_compile ExSchedule
end
end
@doc false
defmacro __before_compile__(env) do
quote do
@doc "Returns the list of scheduled tasks"
@spec schedules() :: list(map())
def schedules, do: unquote(Module.get_attribute(env.module, :schedules))
end
end
@doc "A macro that defines a recurrent task"
@spec schedule(list(), list()) :: tuple()
defmacro schedule(options, do: expression) do
id = make_ref() |> inspect
quote do
def handle_task(unquote(id)), do: unquote(expression)
@schedules Macro.escape(%{
id: unquote(id),
module: __MODULE__,
name: unquote(options)[:name],
max_failures: unquote(options)[:max_failures] || :infinity,
interval: ExSchedule.interval(unquote(options)[:every]),
first_in: ExSchedule.interval(unquote(options)[:first_in]) || 0
})
end
end
def interval(n) when n in [nil, 0], do: 0
def interval(n) when is_number(n), do: n * 1000
def interval({_, :millisecond}), do: 1
def interval({n, :milliseconds}), do: n
def interval(:second), do: interval({1, :seconds})
def interval(:minute), do: interval({1, :minutes})
def interval(:hour), do: interval({1, :hours})
def interval(:day), do: interval({24, :hours})
def interval({1, duration}) when duration in [:second, :minute, :hour, :day] do
apply(__MODULE__, :interval, [duration])
end
def interval({n, duration}) when duration in [:seconds, :minutes, :hours] do
apply(:timer, duration, [n])
end
end
|
lib/ex_schedule.ex
| 0.900355
| 0.917303
|
ex_schedule.ex
|
starcoder
|
defmodule AdventOfCode.Y2021.Day2 do
@moduledoc """
--- Day 2: Dive! ---
Now, you need to figure out how to pilot this thing.
It seems like the submarine can take a series of commands like forward 1, down 2, or up 3:
forward X increases the horizontal position by X units.
down X increases the depth by X units.
up X decreases the depth by X units.
Note that since you're on a submarine, down and up affect your depth, and so they have the opposite result of what you might expect.
"""
@doc """
Day 1 - Part 1
The submarine seems to already have a planned course (your puzzle input). You should probably figure out where it's going. For example:
forward 5
down 5
forward 8
up 3
down 8
forward 2
Your horizontal position and depth both start at 0. The steps above would then modify them as follows:
forward 5 adds 5 to your horizontal position, a total of 5.
down 5 adds 5 to your depth, resulting in a value of 5.
forward 8 adds 8 to your horizontal position, a total of 13.
up 3 decreases your depth by 3, resulting in a value of 2.
down 8 adds 8 to your depth, resulting in a value of 10.
forward 2 adds 2 to your horizontal position, a total of 15.
After following these instructions, you would have a horizontal position of 15 and a depth of 10. (Multiplying these together produces 150.)
Calculate the horizontal position and depth you would have after following the planned course. What do you get if you multiply your final horizontal position by your final depth?
## Examples
iex> AdventOfCode.Y2021.Day2.part1()
%{answer: 2091984, position: %{x: 1968, y: 1063}}
"""
def part1 do
{x, y} =
AdventOfCode.etl_file("lib/y_2021/d2/input.txt", &parse_row/1)
|> Enum.reduce({0, 0}, fn [direction, change], {x, y} ->
compute_change(direction, change, {x, y})
end)
%{position: %{x: x, y: y}, answer: x * y}
end
defp parse_row(s) do
s
|> String.split(" ")
|> normalize_input()
end
defp normalize_input([direction, s]) do
{n, ""} = Integer.parse(s)
[direction, n]
end
defp compute_change("forward", change, {x, y}), do: {x + change, y}
defp compute_change("down", change, {x, y}), do: {x, y + change}
defp compute_change("up", change, {x, y}) when y > change, do: {x, y - change}
defp compute_change("up", _change, {x, _y}), do: {x, 0}
@doc """
Part 2
--- Part Two ---
Based on your calculations, the planned course doesn't seem to make any sense. You find the submarine manual and discover that the process is actually slightly more complicated.
In addition to horizontal position and depth, you'll also need to track a third value, aim, which also starts at 0. The commands also mean something entirely different than you first thought:
down X increases your aim by X units.
up X decreases your aim by X units.
forward X does two things:
It increases your horizontal position by X units.
It increases your depth by your aim multiplied by X.
Again note that since you're on a submarine, down and up do the opposite of what you might expect: "down" means aiming in the positive direction.
Now, the above example does something different:
forward 5 adds 5 to your horizontal position, a total of 5. Because your aim is 0, your depth does not change.
down 5 adds 5 to your aim, resulting in a value of 5.
forward 8 adds 8 to your horizontal position, a total of 13. Because your aim is 5, your depth increases by 8*5=40.
up 3 decreases your aim by 3, resulting in a value of 2.
down 8 adds 8 to your aim, resulting in a value of 10.
forward 2 adds 2 to your horizontal position, a total of 15. Because your aim is 10, your depth increases by 2*10=20 to a total of 60.
After following these new instructions, you would have a horizontal position of 15 and a depth of 60. (Multiplying these produces 900.)
Using this new interpretation of the commands, calculate the horizontal position and depth you would have after following the planned course. What do you get if you multiply your final horizontal position by your final depth?
## Examples
iex> AdventOfCode.Y2021.Day2.part2()
%{answer: 1968*1060092, position: %{aim: 1063, x: 1968, y: 1060092}}
iex> AdventOfCode.Y2021.Day2.compute_change_with_aim("forward", 5, {0, 0, 0})
{5, 0, 0}
iex> AdventOfCode.Y2021.Day2.compute_change_with_aim("down", 5, {5, 0, 0})
{5, 0, 5}
iex> AdventOfCode.Y2021.Day2.compute_change_with_aim("forward", 8, {5, 0, 5})
{13, 40, 5}
iex> AdventOfCode.Y2021.Day2.compute_change_with_aim("up", 3, {13, 40, 5})
{13, 40, 2}
iex> AdventOfCode.Y2021.Day2.compute_change_with_aim("down", 8, {13, 40, 2})
{13, 40, 10}
iex> AdventOfCode.Y2021.Day2.compute_change_with_aim("forward", 2, {13, 40, 10})
{15, 60, 10}
"""
def part2() do
{f_x, f_y, f_aim} =
AdventOfCode.etl_file("lib/y_2021/d2/input.txt", &parse_row/1)
|> Enum.reduce({0, 0, 0}, fn [direction, change], {x, y, aim} ->
compute_change_with_aim(direction, change, {x, y, aim})
end)
%{position: %{x: f_x, y: f_y, aim: f_aim}, answer: f_x * f_y}
end
def compute_change_with_aim("forward", change, {x, y, aim}) when change > 0 and aim > 0,
do: {x + change, y + change * aim, aim}
def compute_change_with_aim("forward", change, {x, y, aim}),
do: {x + change, y, aim}
def compute_change_with_aim("down", change, {x, y, aim}),
do: {x, y, aim + change}
def compute_change_with_aim("up", change, {x, y, aim}),
do: {x, y, aim - change}
end
|
lib/y_2021/d2/day2.ex
| 0.823186
| 0.911731
|
day2.ex
|
starcoder
|
defmodule Stripe.Charges do
@moduledoc """
Handles charges to the Stripe API.
(API ref: https://stripe.com/docs/api#charges)
Operations:
- create
- update
- get one
- list
- count
- refund
- refund partial
"""
@endpoint "charges"
@doc """
Creates a charge for a customer or card. You must pass in the amount, and also a source for the charge
that can be a token or customer. See the Stripe docs for proper source specs.
## Examples
```
params = [
source: [
object: "card",
number: "4111111111111111",
exp_month: 10,
exp_year: 2020,
country: "US",
name: "<NAME>",
cvc: 123
],
description: "1000 Widgets"
]
{:ok, result} = Stripe.Charges.create 1000,params
```
"""
def create(amount, params) do
create amount, params, Stripe.config_or_env_key
end
@doc """
Creates a charge for a customer or card. You must pass in the amount, and also a source for the charge
that can be a token or customer. See the Stripe docs for proper source specs.
Using a given stripe key to apply against the account associated.
## Examples
{:ok, result} = Stripe.Charges.create 1000,params, key
"""
def create(amount, params, key) do
#default currency
params = Keyword.put_new params, :currency, "USD"
#drop in the amount
params = Keyword.put_new params, :amount, amount
Stripe.make_request_with_key(:post, @endpoint, key, params)
|> Stripe.Util.handle_stripe_response
end
@doc """
Lists out charges from your account with a default limit of 10. You can override this by passing in a limit.
## Examples
```
{:ok, charges} = Stripe.Charges.list(100)
```
"""
def list(limit \\ 10) do
list Stripe.config_or_env_key, limit
end
@doc """
Lists out charges from your account with a default limit of 10. You can override this by passing in a limit.
Using a given stripe key to apply against the account associated.
## Examples
```
{:ok, charges} = Stripe.Charges.list(key, 100)
```
"""
def list(key, limit) do
Stripe.make_request_with_key(:get, "#{@endpoint}?limit=#{limit}", key)
|> Stripe.Util.handle_stripe_response
end
@doc """
Updates a charge with changeable information (see the Stripe docs on what you can change)
## Examples
```
params = [description: "Changed charge"]
{:ok, charge} = Stripe.Charges.change("charge_id", params)
```
"""
def change(id, params) do
change id, params, Stripe.config_or_env_key
end
@doc """
Updates a charge with changeable information (see the Stripe docs on what you can change)
Using a given stripe key to apply against the account associated.
## Examples
```
params = [description: "Changed charge"]
{:ok, charge} = Stripe.Charges.change("charge_id", params, key)
```
"""
def change(id, params, key) do
Stripe.make_request_with_key(:post, "#{@endpoint}/#{id}", key, params)
|> Stripe.Util.handle_stripe_response
end
@doc """
Captures a charge that is currently pending. Note: you can default a charge to be automatically captured by setting `capture: true` in the charge create params.
## Example
```
{:ok, charge} = Stripe.Charges.capture("charge_id")
```
"""
def capture(id) do
capture id, Stripe.config_or_env_key
end
@doc """
Captures a charge that is currently pending. Note: you can default a charge to be automatically captured by setting `capture: true` in the charge create params.
Using a given stripe key to apply against the account associated.
## Example
```
{:ok, charge} = Stripe.Charges.capture("charge_id", key)
```
"""
def capture(id,key) do
Stripe.make_request_with_key(:post, "#{@endpoint}/#{id}/capture", key)
|> Stripe.Util.handle_stripe_response
end
@doc """
Retrieves a given charge.
## Example
```
{:ok, charge} = Stripe.Charges.get("charge_id")
```
"""
def get(id) do
get id, Stripe.config_or_env_key
end
@doc """
Retrieves a given charge.
Using a given stripe key to apply against the account associated.
## Example
```
{:ok, charge} = Stripe.Charges.get("charge_id", key)
```
"""
def get(id, key) do
Stripe.make_request_with_key(:get, "#{@endpoint}/#{id}", key)
|> Stripe.Util.handle_stripe_response
end
@doc """
Refunds a charge completely. Use `refund_partial` if you just want to... you know... partially refund
## Example
```
{:ok, charge} = Stripe.Charges.refund("charge_id")
```
"""
def refund(id) do
refund id, Stripe.config_or_env_key
end
@doc """
Refunds a charge completely. Use `refund_partial` if you just want to... you know... partially refund
Using a given stripe key to apply against the account associated.
## Example
```
{:ok, charge} = Stripe.Charges.refund("charge_id", key)
"""
def refund(id, key) do
Stripe.make_request_with_key(:post, "#{@endpoint}/#{id}/refunds", key)
|> Stripe.Util.handle_stripe_response
end
@doc """
Refunds a charge partially; the amount is required.
## Example
```
{:ok, charge} = Stripe.Charges.refund_partial("charge_id",500)
```
"""
def refund_partial(id, amount) do
refund_partial id, amount, Stripe.config_or_env_key
end
@doc """
Refunds a charge partially; the amount is required.
Using a given stripe key to apply against the account associated.
## Example
```
{:ok, charge} = Stripe.Charges.refund_partial("charge_id",500, key)
```
"""
def refund_partial(id, amount, key) do
params = [amount: amount]
Stripe.make_request_with_key(:post, "#{@endpoint}/#{id}/refunds", key, params)
|> Stripe.Util.handle_stripe_response
end
@doc """
Count number of charges.
## Example
```
{:ok, cnt} = Stripe.Charges.count
```
"""
def count do
count Stripe.config_or_env_key
end
@doc """
Count number of charges.
Using a given stripe key to apply against the account associated.
## Example
```
{:ok, cnt} = Stripe.Charges.count key
```
"""
def count(key) do
Stripe.Util.count "#{@endpoint}", key
end
end
|
lib/stripe/charges.ex
| 0.83193
| 0.852014
|
charges.ex
|
starcoder
|
defmodule BST do
@moduledoc """
Handles operations for working with binary search trees.
"""
@modes ~w(in_order pre_order post_order reverse)a
alias BST.Node
@doc """
Creates new node
## Examples
iex> BST.new(2)
%BST.Node{data: 2}
"""
def new(data, left \\ nil, right \\ nil) do
Node.new(data, left, right)
end
@doc """
Inserts a node into the tree.
## Examples
iex> root = BST.new(2)
iex> BST.insert(root, 3)
%BST.Node{data: 2, right: %BST.Node{data: 3}}
"""
# At leaf - return new node
def insert(nil, data) do
new(data)
end
# Lower value than current node - recurse down left subtree
def insert(%Node{left: left, right: right, data: data}, value)
when value < data do
new(data, insert(left, value), right)
end
# Greater value than current node - recurse down right subtree
def insert(%Node{left: left, right: right, data: data}, value)
when value > data do
new(data, left, insert(right, value))
end
# Equal - just return node
def insert(%Node{left: left, right: right, data: data}, _value) do
new(data, left, right)
end
@doc """
Inserts multiple nodes into the tree.
## Examples
iex> root = BST.new(2)
iex> tree = BST.insert_many(root, [5, 50])
iex> tree.right.right.data
50
"""
def insert_many(%Node{} = root, nodes) when is_list(nodes) do
Enum.reduce(nodes, root, &insert(&2, &1))
end
@doc """
Verifies the tree is valid.
## Examples
iex> BST.new(2) |> BST.insert(3) |> BST.verify?()
true
"""
def verify?(%Node{} = node) do
do_verify?(node, nil, nil)
end
# At leaf: this branch must be valid
defp do_verify?(nil, _min, _max) do
true
end
# Node violates min / max limits
defp do_verify?(%Node{data: data}, _min, max) when is_number(max) and data > max do
false
end
defp do_verify?(%Node{data: data}, min, _max) when is_number(min) and data < min do
false
end
# Verify left and right subtrees, recursively
defp do_verify?(%Node{left: left, right: right, data: data}, min, max) do
do_verify?(left, min, data) and do_verify?(right, data, max)
end
@doc """
Traverses tree, in one of four different modes.
"""
def traverse(node, callback, mode \\ :in_order)
def traverse(nil, _callback, _mode) do
nil
end
def traverse(%Node{left: left, right: right} = node, callback, :in_order)
when is_function(callback, 1) do
traverse(left, callback, :in_order)
callback.(node.data)
traverse(right, callback, :in_order)
end
def traverse(%Node{left: left, right: right} = node, callback, :pre_order)
when is_function(callback, 1) do
callback.(node.data)
traverse(left, callback, :pre_order)
traverse(right, callback, :pre_order)
end
def traverse(%Node{left: left, right: right} = node, callback, :post_order)
when is_function(callback, 1) do
traverse(left, callback, :post_order)
traverse(right, callback, :post_order)
callback.(node.data)
end
def traverse(%Node{left: left, right: right} = node, callback, :reverse)
when is_function(callback, 1) do
traverse(right, callback, :reverse)
callback.(node.data)
traverse(left, callback, :reverse)
end
@doc """
Collects node values from tree into a list, given a traversal mode.
## Examples
iex> tree = BST.new(2) |> BST.insert_many([20, 200])
iex> tree |> BST.collect()
[2, 20, 200]
iex> tree |> BST.collect(:reverse)
[200, 20, 2]
"""
def collect(%Node{} = node, mode \\ :in_order) when mode in @modes do
{:ok, pid} = Agent.start(fn -> [] end)
traverse(node, &do_collect(pid, &1), mode)
Agent.get(pid, & &1) |> Enum.reverse()
end
defp do_collect(pid, value) when is_pid(pid) do
Agent.update(pid, &[value | &1])
end
@doc """
Searches tree for node with given value.
## Examples
iex> BST.new(2) |> BST.insert(3) |> BST.search(3)
%BST.Node{data: 3}
iex> BST.new(1) |> BST.insert(5) |> BST.search(30)
nil
"""
def search(nil, _value) do
nil
end
def search(%Node{data: data} = node, value) when data == value do
node
end
def search(%Node{data: data, left: left}, value) when value < data do
search(left, value)
end
def search(%Node{data: data, right: right}, value) when value > data do
search(right, value)
end
@doc """
Retrieves smallest node in tree.
## Examples
iex> tree = BST.new(200) |> BST.insert(2) |> BST.insert(33) |> BST.insert(3) |> BST.find_min()
iex> tree.data
2
"""
def find_min(%Node{left: nil} = node) do
node
end
def find_min(%Node{left: left}) do
find_min(left)
end
@doc """
Removes node from tree.
## Examples
iex> tree = BST.new(3) |> BST.insert(2) |> BST.insert(1) |> BST.delete(2)
iex> tree.left.data
1
"""
def delete(nil, _value) do
nil
end
# Node has no children
def delete(%Node{data: data, left: nil, right: nil}, value)
when data == value do
nil
end
# Node has one child
def delete(%Node{data: data, left: %Node{} = left, right: nil}, value)
when data == value do
left
end
def delete(%Node{data: data, left: nil, right: %Node{} = right}, value)
when data == value do
right
end
# Node has two children
def delete(%Node{data: data, left: %Node{} = left, right: %Node{} = right}, value)
when data == value do
# Get left-most child of right
successor = find_min(right)
# Move successor up to this node, and replace right branch without it
right_without_successor = delete(right, successor.data)
new(successor.data, left, right_without_successor)
end
# Recurse down left or right subtrees
def delete(%Node{data: data, left: left, right: right}, value)
when value < data do
new(data, delete(left, value), right)
end
def delete(%Node{data: data, left: left, right: right}, value)
when value > data do
new(data, left, delete(right, value))
end
@doc """
Removes multiple nodes from tree.
## Examples
iex> tree = BST.new(2) |> BST.insert_many([5, 50])
iex> BST.delete_many(tree, [5, 50])
%BST.Node{data: 2, left: nil, right: nil}
"""
def delete_many(%Node{} = root, nodes) when is_list(nodes) do
Enum.reduce(nodes, root, &delete(&2, &1))
end
end
|
lib/bst.ex
| 0.917082
| 0.767167
|
bst.ex
|
starcoder
|
defmodule Cldr.PluralRules.Extract do
@moduledoc """
Extract all the plural rules for many locales from the CLDR file that defines them.
"""
@doc """
Extract all the plural rules for many locales from the CLDR file that defines them.
"""
def from(file) do
{xml, _} = :xmerl_scan.file(file)
allRules = find('/supplementalData/plurals/pluralRules', xml)
Enum.map allRules, &extract_rules/1
end
@doc """
Alias for `:xmerl_xpath.string`.
The `xpath` argument needs to be a char list.
"""
defp find(xpath, document) do
:xmerl_xpath.string(xpath, document)
end
@doc """
Extract the plural rules for a given set of locales.
# Return value
This function returns a list. Each element of the list is a tuple where the first element is the list of the locales and where the second argument is a list of the plural rules (see the extract_rule/1 function for the structure of it).
# Implementation
This is an example of the XML element from which we should extract the rules:
```xml
<pluralRules locales="am bn fa gu hi kn mr zu">
<pluralRule count="one">i = 0 or n = 1 @integer 0, 1 @decimal 0.0~1.0, 0.00~0.04</pluralRule>
<pluralRule count="other"> @integer 2~17, 100, 1000, 10000, 100000, 1000000, … @decimal 1.1~2.6, 10.0, 100.0, 1000.0, 10000.0, 100000.0, 1000000.0, …</pluralRule>
</pluralRules>
```
The `locales` attribute of the `pluralRules` element is converted to a list of strings, like this: `["am", "bn", "fa", "gu", "hi", "kn", "mr", "zu"]`.
Then, each plural rule is parsed (see the extract_rule/1 function for implementation details).
"""
defp extract_rules(rules) do
locales = find('string(/pluralRules/@locales)', rules)
|> elem(2)
|> to_string
|> String.split
individual_rules = find('/pluralRules/pluralRule', rules)
individual_rules_extracted = Enum.map individual_rules, &extract_rule/1
{locales, individual_rules_extracted}
end
@doc """
Extract a plural rule.
# Return value
This function returns a tuple. The first element is the mnemonic tag for the plural category and the second one is the string representation of the rule associated without the samples provided by CLDR.
# Implementation
This is an example of the XML element from which we should extract the plural rule:
```xml
<pluralRule count="one">i = 0 or n = 1 @integer 0, 1 @decimal 0.0~1.0, 0.00~0.04</pluralRule>
```
This function would return {:one, "i = 0 or n = 1"} with this XML element.
"""
defp extract_rule(rule) do
tag = find('string(/pluralRule/@count)', rule)
|> elem(2)
|> List.to_atom
rule = find('string(/pluralRule[text()])', rule)
|> elem(2)
|> to_string
# Remove the samples from the rule
|> String.split("@", parts: 2)
|> List.first
|> String.rstrip
{tag, rule}
end
end
|
lib/cldr/plural_rules/extract.ex
| 0.857052
| 0.793626
|
extract.ex
|
starcoder
|
defmodule AstraeaVirgoWeb.ErrorView do
use AstraeaVirgoWeb, :view
@moduledoc """
Response for Error Request
"""
defp server_error_message(:internal), do: "Internal Server Error"
defp server_error_message(:backwards_clock), do: "Generate ID Error"
defp server_error_message(_), do: "Returned Error by Cache or Database"
@doc """
Response
## validation.json
Response Request Params Error
Response: Object
| field | type | descript | value |
|--------|--------|---------------------------|-------------------------------|
| error | string | Error Type | "ParamsValidationError" |
| detail | Object | Field detailed error info | {field_name: error_info_list} |
Example:
```json
{
"error": "ParamsValidationError",
"detail": {
"duration": [
"not a number. got: \"18000000\"",
"has wrong type; expected type: integer, got: \"18000000\""
],
"id": [
"not valid"
]
}
}
```
## unauthenticated.json
Response for Unauthenticated Token
Response: Object
| field | type | descript | value |
|-------|--------|------------|-------------------|
| error | string | Error Type | "Unauthenticated" |
## unauthorized.json
Response for Unauthorized Token
Response: Object
| field | type | descript | value |
|--------|--------|------------|----------------|
| error | string | Error Type | "Unauthorized" |
| detail | string | reason | |
Example:
```json
{
"error": "Unauthorized",
"detail": "insufficient_permission"
}
```
## invalid_token.json
Response for invalid Token
Response: Object
| field | type | descript | value |
|--------|--------|------------|----------------|
| error | string | Error Type | "InvalidToken" |
## no_resource_found.json
Response for No Resource Error
Response: Object
| field | type | descript | value |
|--------|--------|------------|--------------------|
| error | string | Error Type | "ResourceNotFound" |
## server_error.json
Response for Server Error
Response: Object
| field | type | descript | value |
|--------|--------|------------|-----------------------|
| error | string | Error Type | "InternalServerError" |
| detail | string | message | |
Example:
```json
{
"error": "InternalServerError",
"detail": "Internal Server Error"
}
```
"""
def render("validation.json", assigns) do
%{
error: "ParamsValidationError",
detail: assigns.fields,
}
end
def render("unauthenticated.json", _assigns) do
%{
error: "Unauthenticated",
}
end
def render("unauthorized.json", assigns) do
%{
error: "Unauthorized",
detail: to_string(assigns.reason),
}
end
def render("invalid_token.json", _assigns) do
%{
error: "InvalidToken",
}
end
def render("no_resource_found.json", _assigns) do
%{
error: "ResourceNotFound",
}
end
def render("server_error.json", assigns) do
# TODO: log assigns.reason
IO.inspect(assigns.reason)
%{
error: "InternalServerError",
detail: server_error_message(assigns.type),
}
end
# By default, Phoenix returns the status message from
# the template name. For example, "404.json" becomes
# "Not Found".
def template_not_found(template, _assigns) do
%{errors: %{detail: Phoenix.Controller.status_message_from_template(template)}}
end
end
|
lib/virgo_web/views/error_view.ex
| 0.814901
| 0.661141
|
error_view.ex
|
starcoder
|
defmodule Disco.Query do
@moduledoc """
The query specification.
A query in `Disco` is a struct which has the fields representing potential parameters
for the query itself.
This module defines a behaviour with a set of default callback implementations to execute
a query on an aggregate.
## Define a query
Here's how to implement a simple query without params:
```
defmodule MyApp.QuerySimple do
use Disco.Query
def run(%__MODULE__{} = _query), do: "result"
end
```
If you might need some params:
```
defmodule MyApp.QueryWithParams do
use Disco.Query, foo: nil
def run(%__MODULE__{} = query), do: query.foo
end
```
It's also possible to apply validations on the params. Refer to [Vex](https://github.com/CargoSense/vex) for more details.
```
defmodule MyApp.QueryWithValidations do
use Disco.Query, foo: nil, bar: nil
# param `foo` is required, `bar` isn't.
validates(:foo, presence: true)
def run(%__MODULE__{} = query), do: query.foo
end
```
## Overriding default functions
As you can see, the simplest implementation only requires to implement `run/1` callback,
while the others are already implemented by default. Sometimes you might need a custom
initialization or validation function, that's why it's possible to override `new/1` and
`validate/1`.
## Usage example
_NOTE: `Disco.Factories.ExampleQuery` has been defined in `test/support/examples/example_query.ex`._
```
iex> alias Disco.Factories.ExampleQuery, as: Query
iex> Query.new(%{foo: "bar"}) == %Query{foo: "bar"}
true
iex> Query.new(%{foo: "bar"}) |> Query.validate()
{:ok, %Query{foo: "bar"}}
iex> Query.new() |> Query.validate()
{:error, %{foo: ["must be present"]}}
iex> Query.run(%Query{foo: "bar"})
%{foo: "bar"}
```
"""
@type error :: {:error, %{atom() => [binary()]} | binary()}
@typedoc """
Result of the query, it might be anything.
"""
@type result :: any()
@doc """
Called to initialize a query.
"""
@callback new(params :: map()) :: map()
@doc """
Called to validate the query.
"""
@callback validate(query :: map()) :: {:ok, map()} | error()
@doc """
Called to run the query.
"""
@callback run(query :: map() | error()) :: result()
@doc """
Called to init, validate and run the query all at once.
This function is particularly useful when you don't want to call `new/1`, `validate/1` and
`run/1` manually.
"""
@callback execute(params :: map()) :: result()
@doc """
Defines the struct fields and the default callbacks to implement the behaviour to run a query.
## Options
The only argument accepted is a Keyword list of fields for the query struct.
"""
defmacro __using__(attrs) do
quote do
@behaviour Disco.Query
import Disco.Query
defstruct unquote(attrs)
use ExConstructor, :init
use Vex.Struct
@doc """
Initializes a query.
"""
@spec new(params :: map()) :: map()
def new(%{} = params \\ %{}), do: init(params)
@doc """
Validates an initialized query.
"""
@spec validate(query :: map()) :: {:ok, map()} | Disco.Query.error()
def validate(%__MODULE__{} = query) do
case Vex.validate(query) do
{:ok, query} = ok -> ok
{:error, errors} -> {:error, handle_validation_errors(errors)}
end
end
@doc """
Inits, validates and runs the query all at once.
"""
@spec execute(params :: map()) :: any()
def execute(%{} = params \\ %{}) do
with %__MODULE__{} = cmd_struct <- new(params),
{:ok, cmd} <- validate(cmd_struct) do
run(cmd)
else
{:error, _errors} = error -> error
end
end
@spec struct_to_map(struct :: map()) :: map()
@doc """
Converts a struct like `Ecto.Schema` to normal maps.
"""
def struct_to_map(struct) do
struct
|> Map.from_struct()
|> Map.delete(:__meta__)
end
defoverridable new: 1, validate: 1, struct_to_map: 1
end
end
@doc false
def handle_validation_errors(errors) do
Enum.reduce(errors, %{}, fn {_, key, _, msg}, acc ->
Map.put(acc, key, [msg])
end)
end
end
|
lib/disco/query.ex
| 0.9191
| 0.930584
|
query.ex
|
starcoder
|
defmodule NashvilleZoneLookup.Zoning.LandUseCondition do
@moduledoc ~S"""
The conditions under which a `NashvilleZoneLookup.Domain.LandUse` can be performed.
Many `NashvilleZoneLookup.Domain.LandUse` are unconditionally permitted ("Permitted by
right" in Nashville's terminology) for a particular
`NashvilleZoneLookup.Domain.Zone`. However, other `NashvilleZoneLookup.Domain.LandUse`
are only permitted under specific conditions. For example, the Land Use
may require approval from a zoning board, or can only be permitted withing
an overlay district.
A Land Use Condition is identified by a unique `:code`.
It also includes a textual `:description` briefly describing the condition
in a complete sentence.
Perhaps most importantly,
a Land Use Condition must fall into a `:category`,
one of `[:permitted, :conditionally_permitted, :not_permitted]`. This makes
it simple to translate various Land Use Conditions into visual cues and
workflows for the user.
A Land Use Condition can optionally include an `:info_link`, a valid
URI pointing to a more complete description of the condition.
## Example
iex> %NashvilleZoneLookup.Zoning.LandUseCondition.p
%NashvilleZoneLookup.Zoning.LandUseCondition{
id: 1,
category: 'permitted',
code: "P",
description: "Permitted by right.",
}
"""
use Ecto.Schema
import Ecto.Changeset
alias NashvilleZoneLookup.Zoning.LandUseCondition
# Internal Constants
@category_conditional "conditional"
@category_not_permitted "not_permitted"
@category_permitted "permitted"
@categories {@category_permitted, @category_not_permitted, @category_conditional}
schema "land_use_conditions" do
field(:category, :string)
field(:description, :string)
field(:info_link, :string)
field(:code, :string)
timestamps()
end
@doc false
def changeset(%LandUseCondition{} = land_use_condition, attrs) do
land_use_condition
|> cast(attrs, [:category, :code, :description, :info_link])
|> validate_required([:category, :code, :description])
|> validate_inclusion(:category, @categories)
|> unique_constraint(:code)
end
def land_use_condition_for_code(code) do
%{
"P" => p(),
"NP" => np(),
"PC" => pc(),
"SE" => se(),
"A" => a(),
"O" => o()
}[code]
end
## Public Constants
def p do
%LandUseCondition{
id: 1,
category: @category_permitted,
code: "P",
description: "Permitted by right."
}
end
def np do
%LandUseCondition{
id: 2,
category: @category_not_permitted,
code: "NP",
description: "Not permitted."
}
end
def pc do
%LandUseCondition{
id: 3,
category: @category_conditional,
code: "PC",
description: "Permitted subject to certain conditions.",
info_link:
"https://library.municode.com/tn/metro_government_of_nashville_and_davidson_county/codes/code_of_ordinances?nodeId=CD_TIT17ZO_CH17.16LAUSDEST_ARTIIUSPECOPC"
}
end
def se do
%LandUseCondition{
id: 4,
category: @category_conditional,
code: "SE",
description: "Permitted by special exception with Board of Zoning Appeals approval.",
info_link:
"https://library.municode.com/tn/metro_government_of_nashville_and_davidson_county/codes/code_of_ordinances?nodeId=CD_TIT17ZO_CH17.16LAUSDEST_ARTIIIUSPESPEXSE"
}
end
def a do
%LandUseCondition{
id: 5,
category: @category_conditional,
code: "A",
description: "Permitted as accessory to principal use.",
info_link:
"https://library.municode.com/tn/metro_government_of_nashville_and_davidson_county/codes/code_of_ordinances?nodeId=CD_TIT17ZO_CH17.16LAUSDEST_ARTIVUSPEACA"
}
end
def o do
%LandUseCondition{
id: 6,
category: @category_conditional,
code: "O",
description: "Permitted only within an overlay district.",
info_link:
"https://library.municode.com/tn/metro_government_of_nashville_and_davidson_county/codes/code_of_ordinances?nodeId=CD_TIT17ZO_CH17.36OVDI"
}
end
end
|
lib/nashville_zone_lookup/zoning/land_use_condition.ex
| 0.886002
| 0.647171
|
land_use_condition.ex
|
starcoder
|
defmodule Rubbergloves.Annotatable do
@moduledoc"""
Add simple annotations to elixir methods which can be used later to do some funkiness see https://medium.com/@cowen/annotations-in-elixir-450015ecdd97
## Usage
```
defmodule Example do
use Rubbergloves.Annotatable, [:bar, :foo]
@bar true
def bar_method do end
@foo [:test]
@bar true
def foo_bar_method do end
def no_annotation_method do end
@baz "ads"
def undefined_annotation_method do end
end
```
And later:
```
Example.list_annotations()
```
Gives:
```
%{
bar_method: [%{annotation: :bar, value: true}],
foo_bar_method: [
%{annotation: :bar, value: true},
%{annotation: :foo, value: [:test]}
]
}
```
"""
defmacro __using__(args) do
quote do
def __on_annotation_(_) do
quote do end
end
@annotations %{}
@supported_annotations unquote(args)
@on_definition { unquote(__MODULE__), :__on_definition__ }
@before_compile { unquote(__MODULE__), :__before_compile__ }
import Rubbergloves.Annotatable
require Rubbergloves.Annotatable
end
end
def __on_definition__(env, _kind, name, _args, _guards, _body) do
Module.get_attribute(env.module, :supported_annotations) |> Enum.each(&annotate_method(&1, env.module, name))
end
def annotate_method(annotation, module, method) do
annotations = Module.get_attribute(module, :annotations)
value = Module.get_attribute(module, annotation)
Module.delete_attribute(module, annotation)
update_annotations(annotation, annotations, module, method, value)
end
def update_annotations(_, _, _, _, nil), do: :no_op
def update_annotations(annotation, annotations, module, method, value) do
method_annotations = Map.get(annotations, method, []) ++ [%{ annotation: annotation, value: value}]
Module.put_attribute(module, :annotations, annotations |> Map.put(method, method_annotations))
end
defmacro __before_compile__(_env) do
quote do
def annotations() do
@annotations
end
def annotated_with(annotation) do
@annotations
|> Map.keys
|> Enum.filter(fn method ->
Map.get(@annotations, method, [])
|> Enum.map(fn a -> a.annotation end)
|> Enum.member?(annotation)
end)
end
end
end
end
|
lib/annotations/annotatable.ex
| 0.849207
| 0.827863
|
annotatable.ex
|
starcoder
|
defmodule Robotica.Scheduler.Sequence do
@moduledoc """
Load and process a schedule sequence
"""
require Logger
alias Robotica.Config.Loader
if Application.compile_env(:robotica_common, :compile_config_files) do
@filename Application.compile_env(:robotica, :sequences_file)
@external_resource @filename
@data Loader.sequences(@filename)
defp get_data, do: @data
else
defp get_data do
filename = Application.get_env(:robotica, :sequences_file)
Loader.sequences(filename)
end
end
defp add_id_to_steps([], _, _), do: []
defp add_id_to_steps([step | tail], sequence_name, n) do
id = "#{sequence_name}_#{n}"
step = %{step | id: id}
[step | add_id_to_steps(tail, sequence_name, n + 1)]
end
defp filter_options(sequence, options) do
Enum.filter(sequence, fn step ->
case step.options do
nil -> true
soptions -> Enum.any?(soptions, fn option -> MapSet.member?(options, option) end)
end
end)
end
defp get_sequence(sequence_name, options) do
Map.fetch!(get_data(), sequence_name)
|> add_id_to_steps(sequence_name, 0)
|> filter_options(options)
end
defp get_corrected_start_time(start_time, sequence) do
Enum.reduce_while(sequence, start_time, fn step, acc ->
if step.zero_time do
{:halt, acc}
else
time = DateTime.add(acc, -step.required_time, :second)
{:cont, time}
end
end)
end
defp schedule_steps([], _), do: []
defp schedule_steps([step | tail], start_time) do
required_time = start_time
latest_time =
case step.latest_time do
nil -> 300
latest_time -> latest_time
end
latest_time = DateTime.add(required_time, latest_time, :second)
scheduled_step = %RoboticaCommon.ScheduledStep{
required_time: required_time,
latest_time: latest_time,
tasks: step.tasks,
id: step.id,
repeat_number: step.repeat_number
}
next_start_time = DateTime.add(start_time, step.required_time, :second)
[scheduled_step | schedule_steps(tail, next_start_time)]
end
defp repeat_step(step) do
cond do
step.repeat_count <= 0 ->
step
is_nil(step.repeat_time) ->
step
true ->
required_time = step.required_time
repeat_time = step.repeat_time * (step.repeat_count + 1)
extra_time =
if repeat_time >= required_time do
0
else
required_time - repeat_time
end
list = [{step.repeat_count + 1, step.repeat_time + extra_time}]
list =
step.repeat_count..1
|> Enum.reduce(list, fn i, acc ->
[{i, step.repeat_time} | acc]
end)
Enum.map(list, fn {i, required_time} ->
%RoboticaCommon.SourceStep{
step
| required_time: required_time,
repeat_number: i
}
end)
end
end
defp expand_sequence(start_time, {sequence_name, options}) do
Logger.debug("Loading sequence #{inspect(sequence_name)} for #{inspect(start_time)}.")
sequence = get_sequence(sequence_name, options)
start_time = get_corrected_start_time(start_time, sequence)
Logger.debug(
"Actual start time for sequence #{inspect(sequence_name)} is #{inspect(start_time)}."
)
sequence
|> Enum.map(fn step -> repeat_step(step) end)
|> List.flatten()
|> schedule_steps(start_time)
end
defp expand_sequences(start_time, sequence_details) do
Enum.map(sequence_details, fn sequence_detail ->
expand_sequence(start_time, sequence_detail)
end)
|> List.flatten()
end
def expand_schedule(schedule) do
Enum.map(schedule, fn {start_time, sequence_details} ->
expand_sequences(start_time, sequence_details)
end)
|> List.flatten()
end
def sort_schedule(scheduled_steps) do
scheduled_steps
|> Enum.sort(fn x, y -> DateTime.compare(x.required_time, y.required_time) == :lt end)
end
end
|
robotica/lib/robotica/scheduler/sequence.ex
| 0.699254
| 0.512144
|
sequence.ex
|
starcoder
|
defprotocol Db.Backend do
@doc """
When implementing access to a new type of database, a struct containing query information should be created.
The db framework has a module generate a query and returns it to route through the protocol.
Whatever is returned by execute will be returned to the client unless the query is not inlined in which case
:ok will be returned.
The general guideline is that a protocol implementor should return :ok for insertions and deletions and
{:ok, any} for lookups. If an object isn't present, {:error, :notfound} should be used.
In the case of general errors, {:error, any} should be returned.
"""
def execute(any)
end
defprotocol Db.Router do
@doc """
Return a module that implements Db.
If there is no matching term, route should return {:error, :no_route_found}.
"""
defstruct module: :undefined, inline: true
def route(any)
end
defmodule Db do
@moduledoc """
All database access goes through this module.
This is a convenient location to record metrics and implement access to other databases.
This provides an interface to a dets table.
"""
require Logger
@doc """
Return a query to be handled by implementors of Db.Backend.
The first argument passed in is the command passed in to Db.execute,
and the second argument is the data required.
"""
@callback handle(any, any) :: any
@spec execute(atom, any) :: :ok | {:ok, any} | {:error, :notfound | :no_route_found} | {:error, any}
def execute(command, args) do
try do
case Db.Router.route(command) do
e = {:error, :no_route_found} -> e
route = %Db.Router{inline: inline} ->
case inline do
true -> handle_execute(route, command, args)
false ->
spawn(fn -> handle_execute(route, command, args) end)
:ok
end
end
rescue
_ in Protocol.UndefinedError -> {:error, :no_route_found}
end
end
@spec handle_execute(%Db.Router{}, any, any) :: :ok | {:ok, any} | {:error, :notfound | :no_route_found} | {:error, any}
defp handle_execute(%Db.Router{module: module}, command, args) do
query = module.handle(command, args)
Db.Backend.execute(query)
end
end
|
lib/db.ex
| 0.74055
| 0.434641
|
db.ex
|
starcoder
|
defmodule ExSozu do
@moduledoc """
Provides the main API to interface with Sōzu and handles the connection.
## Answers
Answers from Sōzu should be handled in a `GenServer.handle_info/2` callback.
Messages will be in these formats:
{:answer, %ExSozu.Answer{status: :ok}}
{:answer, %ExSozu.Answer{status: :processing}}
{:answer, %ExSozu.Answer{status: :error}}
## Reconnections
ExSozu will try to recover automatically from network errors: if there's a network
error, ExSozu will try to reconnect after `500` milliseconds. If it fails to do so, it'll
try again in `500 * 2` milliseconds, then in `500 * 2 * 2` milliseconds, etc.
"""
use GenServer
require Logger
alias ExSozu.Answer
alias ExSozu.Command
alias ExSozu.Protocol
@callback handle_info({:answer, Answer.t()}, state :: term) :: term
defstruct socket: nil, partial: nil, retries: 0
@sock_path Application.fetch_env!(:exsozu, :sock_path)
@sock_opts [:local, :binary, active: :once]
@retry_delay 500
@doc false
def start_link, do: GenServer.start_link(__MODULE__, %__MODULE__{}, name: __MODULE__)
@doc false
def init(state) do
case :gen_tcp.connect({:local, @sock_path}, 0, @sock_opts) do
{:ok, socket} ->
{:ok, %{state | socket: socket}}
{:error, _} ->
Logger.error("Cannot connect to Sōzu, trying to reconnect...")
send(self(), :reconnect)
{:ok, state}
end
end
# API
@doc """
Sends `command` to Sōzu.
"""
@spec command(Command.t()) :: {:ok, command_id :: String.t()}
def command(command), do: GenServer.call(__MODULE__, {:command, command})
@doc """
Sends a list of commands to Sōzu.
"""
@spec pipeline([Command.t()]) :: {:ok, command_ids :: [String.t()]}
def pipeline(commands), do: GenServer.call(__MODULE__, {:pipeline, commands})
# Callbacks
@doc false
def handle_call({:command, command}, from, state = %{socket: socket}) do
id = caller_to_id(from)
command = Protocol.encode!(%{command | id: id})
:ok = :gen_tcp.send(socket, command)
{:reply, {:ok, id}, state}
end
@doc false
def handle_call({:pipeline, commands}, from, state = %{socket: socket}) do
commands =
Enum.map(commands, fn command ->
%{command | id: caller_to_id(from, :unique)}
end)
:ok = :gen_tcp.send(socket, commands |> Protocol.encode!())
{:reply, {:ok, Enum.map(commands, & &1.id)}, state}
end
@doc false
def handle_info({:tcp, socket, message}, state) do
:inet.setopts(socket, active: :once)
{answers, partial} = Protocol.decode!(message, state.partial)
for answer = %Answer{id: id} <- answers,
do: id_to_pid(id) |> Process.send({:answer, answer}, [])
{:noreply, %{state | partial: partial}}
end
@doc false
def handle_info({:tcp_closed, _port}, state) do
Logger.error("Connection lost, trying to reconnect...")
send(self(), :reconnect)
{:noreply, %{state | socket: nil}}
end
@doc false
def handle_info(:reconnect, state = %{retries: retries}) do
case :gen_tcp.connect({:local, @sock_path}, 0, @sock_opts) do
{:ok, socket} ->
Logger.info("Reconnected!")
{:noreply, %{state | socket: socket, retries: 0}}
{:error, _} ->
delay = round(@retry_delay * :math.pow(2, retries))
Logger.warn("Could not connect to Sōzu, retrying in #{delay / 1000} seconds...")
Process.send_after(self(), :reconnect, delay)
{:noreply, %{state | socket: nil, retries: retries + 1}}
end
end
defp caller_to_id(caller), do: :erlang.term_to_binary(caller) |> Base.encode64()
defp caller_to_id(caller, :unique),
do: :erlang.term_to_binary({caller, make_ref()}) |> Base.encode64()
defp id_to_pid(id) do
case id |> Base.decode64!() |> :erlang.binary_to_term() do
{{pid, _ref}, _tag} -> pid
{pid, _ref} -> pid
end
end
end
|
lib/exsozu.ex
| 0.673621
| 0.400925
|
exsozu.ex
|
starcoder
|
import Kernel, except: [apply: 3]
defmodule Ecto.Query.Builder.CTE do
@moduledoc false
alias Ecto.Query.Builder
@doc """
Escapes the CTE name.
iex> escape(quote(do: "FOO"), __ENV__)
"FOO"
"""
@spec escape(Macro.t, Macro.Env.t) :: Macro.t
def escape(name, _env) when is_bitstring(name), do: name
def escape({:^, _, [expr]}, _env), do: expr
def escape(expr, env) do
case Macro.expand_once(expr, env) do
^expr ->
Builder.error! "`#{Macro.to_string(expr)}` is not a valid CTE name. " <>
"It must be a literal string or an interpolated variable."
expr ->
escape(expr, env)
end
end
@doc """
Builds a quoted expression.
The quoted expression should evaluate to a query at runtime.
If possible, it does all calculations at compile time to avoid
runtime work.
"""
@spec build(Macro.t, Macro.t, Macro.t, Macro.Env.t) :: Macro.t
def build(query, name, cte, env) do
Builder.apply_query(query, __MODULE__, [escape(name, env), build_cte(name, cte, env)], env)
end
@spec build_cte(Macro.t, Macro.t, Macro.Env.t) :: Macro.t
def build_cte(_name, {:^, _, [expr]}, _env) do
quote do: Ecto.Queryable.to_query(unquote(expr))
end
def build_cte(_name, {:fragment, _, _} = fragment, env) do
{expr, {params, :acc}} = Builder.escape(fragment, :any, {[], :acc}, [], env)
params = Builder.escape_params(params)
quote do
%Ecto.Query.QueryExpr{
expr: unquote(expr),
params: unquote(params),
file: unquote(env.file),
line: unquote(env.line)
}
end
end
def build_cte(name, cte, env) do
case Macro.expand_once(cte, env) do
^cte ->
Builder.error! "`#{Macro.to_string(cte)}` is not a valid CTE (named: #{Macro.to_string(name)}). " <>
"The CTE must be an interpolated query, such as ^existing_query or a fragment."
cte ->
build_cte(name, cte, env)
end
end
@doc """
The callback applied by `build/4` to build the query.
"""
@spec apply(Ecto.Queryable.t, bitstring, Ecto.Queryable.t) :: Ecto.Query.t
def apply(%Ecto.Query{with_ctes: with_expr} = query, name, with_query) do
with_expr = with_expr || %Ecto.Query.WithExpr{}
queries = List.keystore(with_expr.queries, name, 0, {name, with_query})
with_expr = %{with_expr | queries: queries}
%{query | with_ctes: with_expr}
end
def apply(query, name, with_query) do
apply(Ecto.Queryable.to_query(query), name, with_query)
end
end
|
lib/ecto/query/builder/cte.ex
| 0.730963
| 0.407746
|
cte.ex
|
starcoder
|
defmodule Phoenix.Endpoint.Cowboy2Adapter do
@moduledoc """
The Cowboy2 adapter for Phoenix.
## Endpoint configuration
This adapter uses the following endpoint configuration:
* `:http` - the configuration for the HTTP server. It accepts all options
as defined by [`Plug.Cowboy`](https://hexdocs.pm/plug_cowboy/). Defaults
to `false`
* `:https` - the configuration for the HTTPS server. It accepts all options
as defined by [`Plug.Cowboy`](https://hexdocs.pm/plug_cowboy/). Defaults
to `false`
* `:drainer` - a drainer process that triggers when your application is
shutting to wait for any on-going request to finish. It accepts all
options as defined by [`Plug.Cowboy`](https://hexdocs.pm/plug_cowboy/Plug.Cowboy.Drainer.html).
Defaults to `[]` and can be disabled by setting it to false.
## Custom dispatch options
You can provide custom dispatch options in order to use Phoenix's
builtin Cowboy server with custom handlers. For example, to handle
raw WebSockets [as shown in Cowboy's docs](https://github.com/ninenines/cowboy/tree/master/examples)).
The options are passed to both `:http` and `:https` keys in the
endpoint configuration. However, once you pass your custom dispatch
options, you will need to manually wire the Phoenix endpoint by
adding the following rule:
{:_, Phoenix.Endpoint.Cowboy2Handler, {MyAppWeb.Endpoint, []}}
For example:
config :myapp, MyAppWeb.Endpoint,
http: [dispatch: [
{:_, [
{"/foo", MyAppWeb.CustomHandler, []},
{:_, Phoenix.Endpoint.Cowboy2Handler, {MyAppWeb.Endpoint, []}}
]}]]
It is also important to specify your handlers first, otherwise
Phoenix will intercept the requests before they get to your handler.
"""
require Logger
@doc false
def child_specs(endpoint, config) do
otp_app = Keyword.fetch!(config, :otp_app)
refs_and_specs =
for {scheme, port} <- [http: 4000, https: 4040], opts = config[scheme] do
port = :proplists.get_value(:port, opts, port)
unless port do
Logger.error(":port for #{scheme} config is nil, cannot start server")
raise "aborting due to nil port"
end
opts = [port: port_to_integer(port), otp_app: otp_app] ++ :proplists.delete(:port, opts)
child_spec(scheme, endpoint, opts)
end
{refs, child_specs} = Enum.unzip(refs_and_specs)
if drainer = (refs != [] && Keyword.get(config, :drainer, [])) do
child_specs ++ [{Plug.Cowboy.Drainer, Keyword.put_new(drainer, :refs, refs)}]
else
child_specs
end
end
defp child_spec(scheme, endpoint, config) do
if scheme == :https do
Application.ensure_all_started(:ssl)
end
dispatches = [{:_, Phoenix.Endpoint.Cowboy2Handler, {endpoint, endpoint.init([])}}]
config = Keyword.put_new(config, :dispatch, [{:_, dispatches}])
ref = Module.concat(endpoint, scheme |> Atom.to_string() |> String.upcase())
spec = Plug.Cowboy.child_spec(ref: ref, scheme: scheme, plug: {endpoint, []}, options: config)
spec = update_in(spec.start, &{__MODULE__, :start_link, [scheme, endpoint, &1]})
{ref, spec}
end
@doc false
def start_link(scheme, endpoint, {m, f, [ref | _] = a}) do
# ref is used by Ranch to identify its listeners, defaulting
# to plug.HTTP and plug.HTTPS and overridable by users.
case apply(m, f, a) do
{:ok, pid} ->
Logger.info(fn -> info(scheme, endpoint, ref) end)
{:ok, pid}
{:error, {:shutdown, {_, _, {{_, {:error, :eaddrinuse}}, _}}}} = error ->
Logger.error [info(scheme, endpoint, ref), " failed, port already in use"]
error
{:error, _} = error ->
error
end
end
defp info(scheme, endpoint, ref) do
server = "cowboy #{Application.spec(:cowboy)[:vsn]}"
"Running #{inspect endpoint} with #{server} at #{bound_address(scheme, ref)}"
end
defp bound_address(scheme, ref) do
case :ranch.get_addr(ref) do
{:local, unix_path} ->
"#{unix_path} (#{scheme}+unix)"
{addr, port} ->
"#{:inet.ntoa(addr)}:#{port} (#{scheme})"
end
end
# TODO: Deprecate {:system, env_var} once we require Elixir v1.9+
defp port_to_integer({:system, env_var}), do: port_to_integer(System.get_env(env_var))
defp port_to_integer(port) when is_binary(port), do: String.to_integer(port)
defp port_to_integer(port) when is_integer(port), do: port
end
|
lib/phoenix/endpoint/cowboy2_adapter.ex
| 0.846038
| 0.449272
|
cowboy2_adapter.ex
|
starcoder
|
defmodule Retex.Node.Type do
@moduledoc """
The NodeType if part of the alpha network, the discrimination part of the network
that check if a specific class exists. If this is the case, it propagates the activations
down to the select node types. They will select an attribute and check for its existance.
"""
defstruct class: nil, id: nil
@type t :: %Retex.Node.Type{}
def new(class) do
item = %__MODULE__{class: class}
%{item | id: Retex.hash(item)}
end
defimpl Retex.Protocol.Activation do
def activate(
%Retex.Node.Type{class: class} = neighbor,
%Retex{graph: _graph} = rete,
%Retex.Wme{identifier: "$" <> _identifier = var} = wme,
bindings,
tokens
) do
new_bindings = Map.merge(bindings, %{var => class})
rete
|> Retex.create_activation(neighbor, wme)
|> Retex.add_token(neighbor, wme, new_bindings, tokens)
|> Retex.continue_traversal(new_bindings, neighbor, wme)
end
def activate(
%Retex.Node.Type{class: "$" <> _variable = var} = neighbor,
%Retex{graph: _graph} = rete,
%Retex.Wme{identifier: identifier} = wme,
bindings,
tokens
) do
rete
|> Retex.create_activation(neighbor, wme)
|> Retex.add_token(neighbor, wme, Map.merge(bindings, %{var => identifier}), tokens)
|> Retex.continue_traversal(Map.merge(bindings, %{var => identifier}), neighbor, wme)
end
def activate(
%Retex.Node.Type{class: identifier} = neighbor,
%Retex{} = rete,
%Retex.Wme{identifier: identifier} = wme,
bindings,
tokens
) do
rete
|> Retex.create_activation(neighbor, wme)
|> Retex.add_token(neighbor, wme, bindings, tokens)
|> Retex.continue_traversal(bindings, neighbor, wme)
end
def activate(
%Retex.Node.Type{class: _class},
%Retex{graph: _graph} = rete,
%Retex.Wme{identifier: _identifier} = _wme,
_bindings,
_tokens
) do
Retex.stop_traversal(rete, %{})
end
@spec active?(%{id: any}, Retex.t()) :: boolean()
def active?(%{id: id}, %Retex{activations: activations}) do
Enum.any?(Map.get(activations, id, []))
end
end
defimpl Inspect do
def inspect(node, _opts) do
"#{inspect(node.class)}"
end
end
end
|
lib/nodes/type_node.ex
| 0.73848
| 0.487978
|
type_node.ex
|
starcoder
|
defmodule Mix.Tasks.Scenic.New.Example do
@moduledoc """
Generates a starter Scenic application.
This is the easiest way to set up a new Scenic project.
## Install `scenic.new`
```bash
mix archive.install hex scenic_new
```
## Build the Starter Application
First, navigate the command-line to the directory where you want to create
your new Scenic application. Then run the following commands: (change
`my_app` to the name of your application)
```bash
mix scenic.new my_app
cd my_app
mix do deps.get, scenic.run
```
## Running and Debugging
Once the application and its dependencies are set up, there are two main ways
to run it.
If you want to run your application under `IEx` so that you can debug it,
simply run
```bash
iex -S mix
```
This works just like any other Elixir application.
If you want to run your application outside of `IEx`, you should start it
like this:
```bash
mix scenic.run
```
## The Starter Application
The starter application created by the generator above shows the basics of
building a Scenic application. It has four scenes, two components, and a
simulated sensor.
Scene | Description
--------- | -----------
Splash | The Splash scene is configured to run when the application is started in the `config/config.exs` file. It runs a simple animation, then transitions to the Sensor scene. It also shows how intercept basic user input to exit the scene early.
Sensor | The Sensor scene depicts a simulated temperature sensor. The sensor is always running and updates it's data through the `Scenic.SensorPubSub` server.
Sensor spec | The Sensor scene implemendted using specs
Primitives | The Primitives scenes displays an overview of the basic primitive types and some of the styles that can be applied to them.
Components | The Components scene shows the basic components that come with Scenic. The crash button will cause a match error that will crash the scene, showing how the supervision tree restarts the scene. It also shows how to receive events from components.
Component | Description
--------- | -----------
Nav | The navigation bar at the top of the main scenes shows how to navigate between scenes and how to construct a simple component and pass a parameter to it. Note that it references a clock, creating a nested component. The clock is positioned by dynamically querying the width of the ViewPort
Notes | The notes section at the bottom of each scene is very simple and also shows passing in custom data from the parent.
The simulated temperature sensor doesn't collect any actual data, but does
show how you would set up a real sensor and publish data from it into the
`Scenic.SensorPubSub` service.
## What to read next
Next, you should read guides describing the overall Scenic structure. This is
in the documentation for Scenic itself
"""
use Mix.Task
import Mix.Generator
alias ScenicNew.Common
@shortdoc "Creates a new Scenic v#{Common.scenic_version()} application"
@switches [
app: :string,
module: :string
]
# --------------------------------------------------------
def run(argv) do
{opts, argv} = OptionParser.parse!(argv, strict: @switches)
case argv do
[] ->
Mix.Tasks.Help.run(["scenic.new"])
[path | _] ->
Common.elixir_version_check!()
app = opts[:app] || Path.basename(Path.expand(path))
Common.check_application_name!(app, !opts[:app])
mod = opts[:module] || Macro.camelize(app)
Common.check_mod_name_validity!(mod)
Common.check_mod_name_availability!(mod)
unless path == "." do
Common.check_directory_existence!(path)
File.mkdir_p!(path)
end
File.cd!(path, fn ->
generate(app, mod, path, opts)
end)
end
end
# --------------------------------------------------------
defp generate(app, mod, path, _opts) do
assigns = [
app: app,
mod: mod,
elixir_version: get_version(System.version()),
scenic_version: Common.scenic_version()
]
create_file(".formatter.exs", Common.formatter(assigns))
create_file(".gitignore", Common.gitignore(assigns))
create_file("README.md", readme_template(assigns))
create_file("mix.exs", mix_exs_template(assigns))
create_directory("config")
create_file("config/config.exs", config_template(assigns))
create_directory("lib")
create_file("lib/#{app}.ex", app_template(assigns))
create_file("lib/assets.ex", assets_template(assigns))
create_directory("assets")
create_file("assets/images/attribution.txt", Common.attribution(assigns))
create_file("assets/images/fairy_grove.jpg", Common.fairy_grove())
create_file("assets/images/cyanoramphus_zealandicus_1849.jpg", Common.cyanoramphus())
create_file("assets/readme.txt", Common.assets_readme(assigns))
create_directory("lib/scenes")
create_file("lib/scenes/components.ex", scene_components_template(assigns))
create_file("lib/scenes/sensor.ex", scene_sensor_template(assigns))
create_file("lib/scenes/sensor_spec.ex", scene_sensor_spec_template(assigns))
create_file("lib/scenes/primitives.ex", scene_primitives_template(assigns))
create_file("lib/scenes/transforms.ex", scene_transforms_template(assigns))
create_file("lib/scenes/sprites.ex", scene_sprites_template(assigns))
create_file("lib/scenes/fills.ex", scene_fills_template(assigns))
create_file("lib/scenes/strokes.ex", scene_strokes_template(assigns))
create_file("lib/scenes/readme.txt", Common.scene_readme(assigns))
create_directory("lib/components")
create_file("lib/components/nav.ex", nav_template(assigns))
create_file("lib/components/notes.ex", notes_template(assigns))
create_file("lib/components/readme.txt", Common.comp_readme(assigns))
create_directory("lib/pubsub")
create_file("lib/pubsub/supervisor.ex", pubsub_sup_template(assigns))
create_file("lib/pubsub/temperature.ex", pubsub_temp_template(assigns))
create_file("lib/pubsub/readme.txt", Common.pubsub_readme(assigns))
"""
Your Scenic project was created successfully.
Next steps for getting started:
$ cd #{path}
$ mix deps.get
You can start your app with:
$ mix scenic.run
You can also run it interactively like this:
$ iex -S mix
"""
|> Mix.shell().info()
end
# --------------------------------------------------------
defp get_version(version) do
{:ok, version} = Version.parse(version)
"#{version.major}.#{version.minor}" <>
case version.pre do
[h | _] -> "-#{h}"
[] -> ""
end
end
# ============================================================================
# template files
templates = [
# formatter: "templates/formatter.exs",
# gitignore: "templates/gitignore",
readme: "templates/new_example/README.md.eex",
mix_exs: "templates/new_example/mix.exs.eex",
config: "templates/new_example/config/config.exs.eex",
app: "templates/new_example/lib/app.ex.eex",
assets: "templates/new_example/lib/assets.ex.eex",
nav: "templates/new_example/lib/components/nav.ex.eex",
notes: "templates/new_example/lib/components/notes.ex.eex",
scene_components: "templates/new_example/lib/scenes/components.ex.eex",
scene_sensor: "templates/new_example/lib/scenes/sensor.ex.eex",
scene_sensor_spec: "templates/new_example/lib/scenes/sensor_spec.ex.eex",
scene_primitives: "templates/new_example/lib/scenes/primitives.ex.eex",
scene_transforms: "templates/new_example/lib/scenes/transforms.ex.eex",
scene_sprites: "templates/new_example/lib/scenes/sprites.ex.eex",
scene_fills: "templates/new_example/lib/scenes/fills.ex.eex",
scene_strokes: "templates/new_example/lib/scenes/strokes.ex.eex",
pubsub_sup: "templates/new_example/lib/pubsub/supervisor.ex.eex",
pubsub_temp: "templates/new_example/lib/pubsub/temperature.ex.eex"
]
Enum.each(templates, fn {name, content} ->
embed_template(name, from_file: content)
end)
end
|
lib/mix/tasks/new_example.ex
| 0.886525
| 0.755975
|
new_example.ex
|
starcoder
|
defmodule GenEvent.Stream do
@moduledoc """
Defines a `GenEvent` stream.
This is a struct returned by `stream/2`. The struct is public and
contains the following fields:
* `:manager` - the manager reference given to `GenEvent.stream/2`
* `:timeout` - the timeout between events, defaults to `:infinity`
"""
defstruct manager: nil, timeout: :infinity
@type t :: %__MODULE__{
manager: GenEvent.manager,
timeout: timeout}
@doc false
def init({_pid, _ref} = state) do
{:ok, state}
end
@doc false
def handle_event(event, _state) do
# We do this to trick dialyzer to not complain about non-local returns.
case :erlang.phash2(1, 1) do
0 -> exit({:bad_event, event})
1 -> :remove_handler
end
end
@doc false
def handle_call(msg, _state) do
# We do this to trick dialyzer to not complain about non-local returns.
reason = {:bad_call, msg}
case :erlang.phash2(1, 1) do
0 -> exit(reason)
1 -> {:remove_handler, reason}
end
end
@doc false
def handle_info(_msg, state) do
{:ok, state}
end
@doc false
def terminate(_reason, _state) do
:ok
end
@doc false
def code_change(_old, state, _extra) do
{:ok, state}
end
end
defimpl Enumerable, for: GenEvent.Stream do
def reduce(stream, acc, fun) do
start_fun = fn() -> start(stream) end
next_fun = &next(stream, &1)
stop_fun = &stop(stream, &1)
Stream.resource(start_fun, next_fun, stop_fun).(acc, wrap_reducer(fun))
end
def count(_stream) do
{:error, __MODULE__}
end
def member?(_stream, _item) do
{:error, __MODULE__}
end
defp wrap_reducer(fun) do
fn
{:ack, manager, ref, event}, acc ->
send manager, {ref, :ok}
fun.(event, acc)
{:async, _manager, _ref, event}, acc ->
fun.(event, acc)
{:sync, manager, ref, event}, acc ->
try do
fun.(event, acc)
after
send manager, {ref, :ok}
end
end
end
defp start(%{manager: manager} = stream) do
try do
{:ok, {pid, ref}} = :gen.call(manager, self(),
{:add_process_handler, self(), self()}, :infinity)
mon_ref = Process.monitor(pid)
{pid, ref, mon_ref}
catch
:exit, reason -> exit({reason, {__MODULE__, :start, [stream]}})
end
end
defp next(%{timeout: timeout} = stream, {pid, ref, mon_ref} = acc) do
self = self()
receive do
# Got an async event.
{_from, {^pid, ^ref}, {:notify, event}} ->
{[{:async, pid, ref, event}], acc}
# Got a sync event.
{_from, {^pid, ^ref}, {:sync_notify, event}} ->
{[{:sync, pid, ref, event}], acc}
# Got an ack event.
{_from, {^pid, ^ref}, {:ack_notify, event}} ->
{[{:ack, pid, ref, event}], acc}
# The handler was removed. Stop iteration, resolve the
# event later. We need to demonitor now, otherwise DOWN
# appears with higher priority in the shutdown process.
{:gen_event_EXIT, {^pid, ^ref}, _reason} = event ->
Process.demonitor(mon_ref, [:flush])
send(self, event)
{:halt, {:removed, acc}}
# The manager died. Stop iteration, resolve the event later.
{:DOWN, ^mon_ref, _, _, _} = event ->
send(self, event)
{:halt, {:removed, acc}}
after
timeout ->
exit({:timeout, {__MODULE__, :next, [stream, acc]}})
end
end
# If we reach this branch, we know the handler was already
# removed, so we don't trigger a request for doing so.
defp stop(stream, {:removed, {pid, ref, mon_ref} = acc}) do
case wait_for_handler_removal(pid, ref, mon_ref) do
:ok ->
flush_events(ref)
{:error, reason} ->
exit({reason, {__MODULE__, :stop, [stream, acc]}})
end
end
# If we reach this branch, the handler was not removed yet,
# so we trigger a request for doing so.
defp stop(stream, {pid, ref, _} = acc) do
_ = GenEvent.remove_handler(pid, {pid, ref}, :shutdown)
stop(stream, {:removed, acc})
end
defp wait_for_handler_removal(pid, ref, mon_ref) do
receive do
{:gen_event_EXIT, {^pid, ^ref}, _reason} ->
Process.demonitor(mon_ref, [:flush])
:ok
{:DOWN, ^mon_ref, _, _, reason} ->
{:error, reason}
end
end
defp flush_events(ref) do
receive do
{_from, {_pid, ^ref}, {notify, _event}} when notify in [:notify, :ack_notify, :sync_notify] ->
flush_events(ref)
after
0 -> :ok
end
end
end
|
lib/elixir/lib/gen_event/stream.ex
| 0.727298
| 0.46035
|
stream.ex
|
starcoder
|
defmodule Mix.Tasks.Compile.Asn1 do
@moduledoc """
A mix compiler for the ASN.1 format leveraging Erlang's `:asn1_ct`.
Once installed, the compiler can be enabled by changing project configuration in `mix.exs`:
def project() do
[
# ...
compilers: [:asn1] ++ Mix.compilers(),
asn1_options: [:maps]
]
end
Then, you can place your `.asn1` files in the `asn1` folder. The files will be compiled to `src`
as Erlang modules that will be picked up by the Erlang compiler.
The `:asn1_ct` compiler accepts many options that are described in the
[documentation](http://erlang.org/doc/man/asn1ct.html#compile-1) - they can be passed using the
`asn1_options` project configuration (in the same place where the `compilers` configuration
lives). It is recommended to at least set the options to `[:maps]` so that the decoding
and encoding passes use maps rather than records.
## Command line options
* `--force` - forces compilation regardless of modification times
* `--verbose` - inform about each compiled file
## Configuration
* `:asn1_paths` - directories to find source files. Defaults to `["asn1"]`.
* `:erlc_paths` - directories to store generated source files. Defaults to `["src"]` (also
used by the erlang compiler).
* `:asn1_options` - compilation options that apply to ASN.1's compiler.
All available options are descrived in the
[documentation](http://erlang.org/doc/man/asn1ct.html#compile-2).
"""
# Support Elixir <= 1.6
if Code.ensure_loaded?(Mix.Task.Compiler) do
use Mix.Task.Compiler
else
use Mix.Task
end
@recursive true
@manifest ".compile.asn1"
@manifest_vsn 1
@min_mtime {{1970, 1, 1}, {0, 0, 0}}
@switches [force: :boolean, verbose: :boolean, warnings_as_errors: :boolean]
@doc """
Runs this task.
"""
def run(args) do
{opts, _, _} = OptionParser.parse(args, switches: @switches)
project = Mix.Project.config()
source_paths = Keyword.get(project, :asn1_paths, ["asn1"])
dest_path = List.first(Keyword.fetch!(project, :erlc_paths))
verbose? = Keyword.get(opts, :verbose, false)
# TODO: warnings_as_errors
options = Keyword.get(project, :asn1_options, [])
File.mkdir_p!(dest_path)
targets = extract_targets(source_paths, dest_path, Keyword.get(opts, :force, false))
compile(manifest(), targets, verbose?, fn input, output ->
options = options ++ [:noobj, outdir: to_charlist(Path.dirname(output))]
:asn1ct.compile(to_charlist(input), options)
end)
end
@doc """
Returns manifests used by this compiler.
"""
def manifests(), do: [manifest()]
defp manifest(), do: Path.join(Mix.Project.manifest_path(), @manifest)
@doc """
Cleans up compilation artifacts.
"""
def clean() do
remove_files(read_manifest(manifest()))
File.rm(manifest())
end
defp module_files(dir, module) do
[
Path.join(dir, "#{module}.erl"),
Path.join(dir, "#{module}.hrl"),
Path.join(dir, "#{module}.asn1db")
]
end
defp extract_targets(source_paths, dest_path, force?) do
for source <- extract_files(List.wrap(source_paths), ["asn1", "asn", "py"]) do
module = module_name(source)
if force? or stale?(source, module_files(dest_path, module)) do
{:stale, source, Path.join(dest_path, "#{module}.erl")}
else
{:ok, source, Path.join(dest_path, "#{module}.erl")}
end
end
end
defp module_name(file) do
file |> Path.basename() |> Path.rootname()
end
defp extract_files(paths, exts) when is_list(exts) do
extract_files(paths, "*.{#{Enum.join(exts, ",")}}")
end
defp extract_files(paths, pattern) do
Enum.flat_map(paths, fn path ->
case File.stat(path) do
{:ok, %{type: :directory}} -> Path.wildcard("#{path}/**/#{pattern}")
{:ok, %{type: :regular}} -> [path]
_ -> []
end
end)
|> Enum.uniq()
end
defp stale?(source, targets) do
modified_target =
targets
|> Enum.map(&last_modified/1)
|> Enum.reject(&(&1 == @min_mtime))
|> Enum.min(fn -> @min_mtime end)
last_modified(source) > modified_target
end
defp last_modified(file) do
case File.stat(file) do
{:ok, %{mtime: mtime}} -> mtime
{:error, _} -> @min_mtime
end
end
defp compile(manifest, targets, verbose?, callback) do
stale = for {:stale, src, dest} <- targets, do: {src, dest}
previous = read_manifest(manifest)
removed = Enum.reject(previous, fn dest -> Enum.any?(targets, &match?({_, _, ^dest}, &1)) end)
entries =
Enum.reject(previous, fn dest ->
dest in removed || Enum.any?(stale, &match?({_, ^dest}, &1))
end)
if stale == [] and removed == [] do
:noop
else
remove_files(removed)
compiling_n(length(stale), "asn1")
{status, new_entries} = compile(stale, callback, verbose?)
write_manifest(manifest, entries ++ new_entries)
status
end
end
defp compile(stale, callback, verbose?) do
stale
|> Enum.map(fn {input, output} ->
case callback.(input, output) do
:ok ->
verbose? && Mix.shell().info("Compiled #{input}")
{:ok, [output]}
{:error, _errors} ->
{:error, []}
end
end)
|> Enum.reduce({:ok, []}, fn {status1, entries1}, {status2, entries2} ->
status = if status1 == :error or status2 == :error, do: :error, else: :ok
{status, entries1 ++ entries2}
end)
end
defp compiling_n(1, ext), do: Mix.shell().info("Compiling 1 file (.#{ext})")
defp compiling_n(n, ext), do: Mix.shell().info("Compiling #{n} files (.#{ext})")
defp remove_files(to_remove) do
to_remove
|> Enum.flat_map(&module_files(Path.dirname(&1), module_name(&1)))
|> Enum.each(&File.rm/1)
end
defp read_manifest(file) do
try do
file |> File.read!() |> :erlang.binary_to_term()
rescue
_ -> []
else
{@manifest_vsn, data} when is_list(data) -> data
_ -> []
end
end
defp write_manifest(file, entries) do
Path.dirname(file) |> File.mkdir_p!()
File.write!(file, :erlang.term_to_binary({@manifest_vsn, entries}))
end
end
|
lib/mix/tasks/compile.asn1.ex
| 0.737442
| 0.444685
|
compile.asn1.ex
|
starcoder
|
defmodule Macro.Env do
@moduledoc """
A struct that holds compile time environment information.
The current environment can be accessed at any time as
`__ENV__/0`. Inside macros, the caller environment can be
accessed as `__CALLER__/0`.
An instance of `Macro.Env` must not be modified by hand. If you need to
create a custom environment to pass to `Code.eval_quoted/3`, use the
following trick:
def make_custom_env do
import SomeModule, only: [some_function: 2]
alias A.B.C
__ENV__
end
You may then call `make_custom_env()` to get a struct with the desired
imports and aliases included.
It contains the following fields:
* `module` - the current module name
* `file` - the current file name as a binary
* `line` - the current line as an integer
* `function` - a tuple as `{atom, integer}`, where the first
element is the function name and the second its arity; returns
`nil` if not inside a function
* `context` - the context of the environment; it can be `nil`
(default context), inside a guard or inside a match
* `aliases` - a list of two-element tuples, where the first
element is the aliased name and the second one the actual name
* `requires` - the list of required modules
* `functions` - a list of functions imported from each module
* `macros` - a list of macros imported from each module
* `macro_aliases` - a list of aliases defined inside the current macro
* `context_modules` - a list of modules defined in the current context
* `lexical_tracker` - PID of the lexical tracker which is responsible for
keeping user info
* `vars` - a list keeping all defined variables as `{var, context}`
The following fields are private and must not be accessed or relied on:
* `export_vars` - a list keeping all variables to be exported in a
construct (may be `nil`)
* `match_vars` - controls how "new" variables are handled. Inside a
match it is a list with all variables in a match. Outside of a match
is either `:warn` or `:apply`
* `prematch_vars` - a list of variables defined before a match (is
`nil` when not inside a match)
"""
@type name_arity :: {atom, arity}
@type file :: binary
@type line :: non_neg_integer
@type aliases :: [{module, module}]
@type macro_aliases :: [{module, {integer, module}}]
@type context :: :match | :guard | nil
@type requires :: [module]
@type functions :: [{module, [name_arity]}]
@type macros :: [{module, [name_arity]}]
@type context_modules :: [module]
@type vars :: [{atom, atom | non_neg_integer}]
@type lexical_tracker :: pid | nil
@type local :: atom | nil
@opaque export_vars :: vars | nil
@opaque match_vars :: vars | :warn | :apply
@opaque prematch_vars :: vars | nil
@type t :: %{
__struct__: __MODULE__,
module: atom,
file: file,
line: line,
function: name_arity | nil,
context: context,
requires: requires,
aliases: aliases,
functions: functions,
macros: macros,
macro_aliases: aliases,
context_modules: context_modules,
vars: vars,
export_vars: export_vars,
match_vars: match_vars,
prematch_vars: prematch_vars,
lexical_tracker: lexical_tracker
}
def __struct__ do
%{
__struct__: __MODULE__,
module: nil,
file: "nofile",
line: 0,
function: nil,
context: nil,
requires: [],
aliases: [],
functions: [],
macros: [],
macro_aliases: [],
context_modules: [],
vars: [],
lexical_tracker: nil,
export_vars: nil,
match_vars: :warn,
prematch_vars: nil
}
end
def __struct__(kv) do
Enum.reduce(kv, __struct__(), fn {k, v}, acc -> :maps.update(k, v, acc) end)
end
@doc """
Returns a keyword list containing the file and line
information as keys.
"""
@spec location(t) :: keyword
def location(env)
def location(%{__struct__: Macro.Env, file: file, line: line}) do
[file: file, line: line]
end
@doc """
Returns whether the compilation environment is currently
inside a guard.
"""
@spec in_guard?(t) :: boolean
def in_guard?(env)
def in_guard?(%{__struct__: Macro.Env, context: context}), do: context == :guard
@doc """
Returns whether the compilation environment is currently
inside a match clause.
"""
@spec in_match?(t) :: boolean
def in_match?(env)
def in_match?(%{__struct__: Macro.Env, context: context}), do: context == :match
@doc """
Returns the environment stacktrace.
"""
@spec stacktrace(t) :: list
def stacktrace(%{__struct__: Macro.Env} = env) do
cond do
is_nil(env.module) ->
[{:elixir_compiler, :__FILE__, 1, relative_location(env)}]
is_nil(env.function) ->
[{env.module, :__MODULE__, 0, relative_location(env)}]
true ->
{name, arity} = env.function
[{env.module, name, arity, relative_location(env)}]
end
end
defp relative_location(env) do
[file: Path.relative_to_cwd(env.file), line: env.line]
end
end
|
lib/elixir/lib/macro/env.ex
| 0.902303
| 0.537284
|
env.ex
|
starcoder
|
defmodule Linguist.MemorizedVocabulary do
alias Linguist.Cldr.Number.Cardinal
alias Linguist.Compiler
alias Linguist.{LocaleError, NoTranslationError}
defmodule TranslationDecodeError do
defexception [:message]
end
@moduledoc """
Defines lookup functions for given translation locales, binding interopolation
Locales are defined with the `locale/2` function, accepting a locale name and
a String path to evaluate for the translations list.
For example, given the following translations :
locale "en", [
flash: [
notice: [
hello: "hello %{first} %{last}",
]
],
users: [
title: "Users",
]
]
locale "fr", Path.join([__DIR__, "fr.exs"])
this module will respond to these functions :
t("en", "flash.notice.hello", bindings \\ []), do: # ...
t("en", "users.title", bindings \\ []), do: # ...
t("fr", "flash.notice.hello", bindings \\ []), do: # ...
"""
def t(locale, path, bindings \\ [])
def t(nil, _, _), do: raise(LocaleError, nil)
def t(locale, path, bindings) do
pluralization_key = Application.fetch_env!(:linguist, :pluralization_key)
norm_locale = normalize_locale(locale)
if Keyword.has_key?(bindings, pluralization_key) do
plural_atom =
bindings
|> Keyword.get(pluralization_key)
|> Cardinal.plural_rule(norm_locale)
do_t(norm_locale, "#{path}.#{plural_atom}", bindings)
else
do_t(norm_locale, path, bindings)
end
end
def t!(locale, path, bindings \\ []) do
case t(locale, path, bindings) do
{:ok, translation} ->
translation
{:error, :no_translation} ->
raise %NoTranslationError{message: "#{locale}: #{path}"}
end
end
# sobelow_skip ["DOS.StringToAtom"]
defp do_t(locale, translation_key, bindings) do
case :ets.lookup(:translations_registry, "#{locale}.#{translation_key}") do
[] ->
{:error, :no_translation}
[{_, string}] ->
translation =
Compiler.interpol_rgx()
|> Regex.split(string, on: [:head, :tail])
|> Enum.reduce("", fn
<<"%{" <> rest>>, acc ->
key = String.to_atom(String.trim_trailing(rest, "}"))
acc <> to_string(Keyword.fetch!(bindings, key))
segment, acc ->
acc <> segment
end)
{:ok, translation}
end
end
def locales do
tuple =
:ets.lookup(:translations_registry, "memorized_vocabulary.locales")
|> List.first()
if tuple do
elem(tuple, 1)
end
end
def add_locale(name) do
current_locales = locales() || []
:ets.insert(
:translations_registry,
{"memorized_vocabulary.locales", [name | current_locales]}
)
end
def update_translations(locale_name, loaded_source) do
loaded_source
|> Enum.map(fn {key, translation_string} ->
:ets.insert(:translations_registry, {"#{locale_name}.#{key}", translation_string})
end)
end
@doc """
Embeds locales from provided source
* name - The String name of the locale, ie "en", "fr"
* source - The String file path to load YAML from that returns a structured list of translations
Examples
locale "es", Path.join([__DIR__, "es.yml"])
"""
def locale(name, source) do
loaded_source = Linguist.MemorizedVocabulary._load_yaml_file(source)
update_translations(name, loaded_source)
add_locale(name)
end
@doc """
Function used internally to load a yaml file. Please use
the `locale` macro with a path to a yaml file - this function
will not work as expected if called directly.
"""
def _load_yaml_file(source) do
if :ets.info(:translations_registry) == :undefined do
:ets.new(:translations_registry, [:named_table, :set, :protected])
end
{decode_status, [file_data]} = YamlElixir.read_all_from_file(source)
if decode_status != :ok do
raise %TranslationDecodeError{message: "Decode failed for file #{source}"}
end
%{paths: paths} =
file_data
|> Enum.reduce(
%{paths: %{}, current_prefix: ""},
&Linguist.MemorizedVocabulary._yaml_reducer/2
)
paths
end
@doc """
Recursive function used internally for loading yaml files.
Not intended for external use
"""
def _yaml_reducer({key, value}, acc) when is_binary(value) do
key_name =
if acc.current_prefix == "" do
key
else
"#{acc.current_prefix}.#{key}"
end
%{
paths: Map.put(acc.paths, key_name, value),
current_prefix: acc.current_prefix
}
end
def _yaml_reducer({key, value}, acc) do
next_prefix =
if acc.current_prefix == "" do
key
else
"#{acc.current_prefix}.#{key}"
end
reduced =
Enum.reduce(
value,
%{
paths: acc.paths,
current_prefix: next_prefix
},
&Linguist.MemorizedVocabulary._yaml_reducer/2
)
%{
paths: Map.merge(acc.paths, reduced.paths),
current_prefix: acc.current_prefix
}
end
# @privatedoc
# Takes a locale as an argument, checks if the string contains a `-`, if so
# splits the string on the `-` downcases the first part and upcases the second part.
# With a locale that contains no `-` the string is downcased, and if the locale contains more
# than one `-`, a LocaleError is raised.
def normalize_locale(locale) do
if String.match?(locale, ~r/-/) do
case String.split(locale, "-") do
[lang, country] ->
Enum.join([String.downcase(lang), String.upcase(country)], "-")
_ ->
raise(LocaleError, locale)
end
else
String.downcase(locale)
end
end
end
|
lib/linguist/memorized_vocabulary.ex
| 0.787073
| 0.482185
|
memorized_vocabulary.ex
|
starcoder
|
defmodule Cldr.Eternal.Priv do
@moduledoc false
# This module contains code private to the Eternal project, basically just
# providing utility functions and macros. Nothing too interesting to see here
# beyond shorthands for common blocks.
# we need is_table/1
import Cldr.Eternal.Table
alias Cldr.Eternal.Table
# we also need logging
require Logger
@doc """
Provides a safe execution environment for ETS actions.
If any errors occur inside ETS, we simply return a false value. It should be
noted that the table is passed through purely as sugar so we can use inline
anonymous functions.
"""
@spec ets_try(table :: Table.t(), fun :: function) :: any
def ets_try(table, fun) when is_table(table) and is_function(fun, 1) do
fun.(table)
rescue
_ -> false
end
@doc """
Gifts away an ETS table to another process.
This must be called from within the owning process.
"""
@spec gift(table :: Table.t(), pid :: pid) :: any | false
def gift(table, pid) when is_table(table) and is_pid(pid),
do: ets_try(table, &:ets.give_away(&1, pid, :gift))
@doc """
Sets the Heir of an ETS table to a given process.
This must be called from within the owning process.
"""
@spec heir(table :: Table.t(), pid :: pid) :: any | false
def heir(table, pid) when is_table(table) and is_pid(pid),
do: ets_try(table, &:ets.setopts(&1, {:heir, pid, :heir}))
@doc """
Logs a message inside a noisy environment.
If the options contains a truthy quiet flag, no logging occurs.
"""
@spec log(msg :: any, opts :: Keyword.t()) :: :ok
def log(msg, opts) when is_list(opts) do
noisy(opts, fn ->
Logger.debug(fn -> "[eternal] #{msg}" end)
end)
end
@doc """
Executes a function only in a noisy environment.
Noisy environments are determined by the opts having a falsy quiet flag.
"""
@spec noisy(opts :: Keyword.t(), fun :: function) :: :ok
def noisy(opts, fun) when is_list(opts) and is_function(fun, 0) do
!Keyword.get(opts, :quiet) && fun.()
:ok
end
@doc """
Converts a PID to a Binary using `inspect/1`.
"""
@spec spid(pid :: pid) :: spid :: binary
def spid(pid),
do: inspect(pid)
@doc """
Determines if a list of arguments are correctly formed.
"""
defmacro is_opts(name, ets_opts, opts) do
quote do
is_atom(unquote(name)) and
is_list(unquote(ets_opts)) and
is_list(unquote(opts))
end
end
@doc """
Determines if a 3-tuple is a {module, function, args}
"""
defmacro is_mfa(module, function, args) do
quote do
is_atom(unquote(module)) and
is_atom(unquote(function)) and
is_list(unquote(args))
end
end
end
|
lib/cldr/eternal/priv.ex
| 0.603348
| 0.489381
|
priv.ex
|
starcoder
|
defmodule Phoenix.Tracker.State do
@moduledoc """
Provides an ORSWOT CRDT.
"""
alias Phoenix.Tracker.{State, Clock}
@type name :: term
@type topic :: String.t
@type key :: term
@type meta :: map
@type ets_id :: :ets.tid
@type clock :: pos_integer
@type tag :: {name, clock}
@type cloud :: MapSet.t
@type clouds :: %{name => cloud}
@type context :: %{name => clock}
@type values :: ets_id | :extracted | %{tag => {pid, topic, key, meta}}
@type value :: {{topic, pid, key}, meta, tag}
@type key_meta :: {key, meta}
@type delta :: %State{mode: :delta}
@type pid_lookup :: {pid, topic, key}
@type t :: %State{
replica: name,
context: context,
clouds: clouds,
values: values,
pids: ets_id,
mode: :unset | :delta | :normal,
delta: :unset | delta,
replicas: %{name => :up | :down},
range: {context, context}
}
defstruct replica: nil,
context: %{},
clouds: %{},
values: nil,
pids: nil,
mode: :unset,
delta: :unset,
replicas: %{},
range: {%{}, %{}}
@compile {:inline, tag: 1, clock: 1, put_tag: 2, delete_tag: 2, remove_delta_tag: 2}
@doc """
Creates a new set for the replica.
## Examples
iex> Phoenix.Tracker.State.new(:replica1)
%Phoenix.Tracker.State{...}
"""
@spec new(name, atom) :: t
def new(replica, shard_name) do
reset_delta(%State{
replica: replica,
context: %{replica => 0},
mode: :normal,
values: :ets.new(shard_name, [:named_table, :protected, :ordered_set]),
pids: :ets.new(:pids, [:duplicate_bag]),
replicas: %{replica => :up}})
end
@doc """
Returns the causal context for the set.
"""
@spec clocks(t) :: {name, context}
def clocks(%State{replica: rep, context: ctx}), do: {rep, ctx}
@doc """
Adds a new element to the set.
"""
@spec join(t, pid, topic, key, meta) :: t
def join(%State{} = state, pid, topic, key, meta \\ %{}) do
add(state, pid, topic, key, meta)
end
@doc """
Removes an element from the set.
"""
@spec leave(t, pid, topic, key) :: t
def leave(%State{pids: pids} = state, pid, topic, key) do
pids
|> :ets.match_object({pid, topic, key})
|> case do
[{^pid, ^topic, ^key}] -> remove(state, pid, topic, key)
[] -> state
end
end
@doc """
Removes all elements from the set for the given pid.
"""
@spec leave(t, pid) :: t
def leave(%State{pids: pids} = state, pid) do
pids
|> :ets.lookup(pid)
|> Enum.reduce(state, fn {^pid, topic, key}, acc ->
remove(acc, pid, topic, key)
end)
end
@doc """
Returns a list of elements in the set belonging to an online replica.
"""
@spec online_list(t) :: [value]
def online_list(%State{values: values} = state) do
replicas = down_replicas(state)
:ets.select(values, [{ {:_, :_, {:"$1", :_}},
not_in(:"$1", replicas), [:"$_"]}])
end
@doc """
Returns a list of elements for the topic who belong to an online replica.
"""
@spec get_by_topic(t, topic) :: [key_meta]
def get_by_topic(%State{values: values} = state, topic) do
tracked_values(values, topic, down_replicas(state))
end
@doc """
Returns a list of elements for the topic who belong to an online replica.
"""
@spec get_by_key(t, topic, key) :: [key_meta]
def get_by_key(%State{values: values} = state, topic, key) do
case tracked_key(values, topic, key, down_replicas(state)) do
[] -> []
[_|_] = metas -> metas
end
end
@doc """
Performs table lookup for tracked elements in the topic.
Filters out those present on downed replicas.
"""
def tracked_values(table, topic, down_replicas) do
:ets.select(table,
[{{{topic, :_, :"$1"}, :"$2", {:"$3", :_}},
not_in(:"$3", down_replicas),
[{{:"$1", :"$2"}}]}])
end
@doc """
Performs table lookup for tracked key in the topic.
Filters out those present on downed replicas.
"""
def tracked_key(table, topic, key, down_replicas) do
:ets.select(table,
[{{{topic, :"$1", key}, :"$2", {:"$3", :_}},
not_in(:"$3", down_replicas),
[{{:"$1", :"$2"}}]}])
end
defp not_in(_pos, []), do: []
defp not_in(pos, replicas), do: [not: ors(pos, replicas)]
defp ors(pos, [rep]), do: {:"=:=", pos, {rep}}
defp ors(pos, [rep | rest]), do: {:or, {:"=:=", pos, {rep}}, ors(pos, rest)}
@doc """
Returns the element matching the pid, topic, and key.
"""
@spec get_by_pid(t, pid, topic, key) :: value | nil
def get_by_pid(%State{values: values}, pid, topic, key) do
case :ets.lookup(values, {topic, pid, key}) do
[] -> nil
[one] -> one
end
end
@doc """
Returns all elements for the pid.
"""
@spec get_by_pid(t, pid) :: [value]
def get_by_pid(%State{pids: pids, values: values}, pid) do
case :ets.lookup(pids, pid) do
[] -> []
matches ->
:ets.select(values, Enum.map(matches, fn {^pid, topic, key} ->
{{{topic, pid, key}, :_, :_}, [], [:"$_"]}
end))
end
end
@doc """
Checks if set has a non-empty delta.
"""
@spec has_delta?(t) :: boolean
def has_delta?(%State{delta: %State{clouds: clouds}}) do
Enum.find(clouds, fn {_name, cloud} -> MapSet.size(cloud) != 0 end)
end
@doc """
Resets the set's delta.
"""
@spec reset_delta(t) :: t
def reset_delta(%State{context: ctx, replica: replica} = state) do
delta_ctx = Map.take(ctx, [replica])
delta = %State{replica: replica,
values: %{},
range: {delta_ctx, delta_ctx},
mode: :delta}
%State{state | delta: delta}
end
@doc """
Extracts the set's elements from ets into a mergeable list.
Used when merging two sets.
"""
@spec extract(t, remote_ref :: name, context) :: t | {t, values}
def extract(%State{mode: :delta, values: values, clouds: clouds} = state, remote_ref, remote_context) do
{start_ctx, end_ctx} = state.range
known_keys = Map.keys(remote_context)
pruned_clouds = Map.take(clouds, known_keys)
pruned_start = Map.take(start_ctx, known_keys)
pruned_end = Map.take(end_ctx, known_keys)
map = Enum.reduce(values, [], fn
{{^remote_ref, _clock}, _data}, acc -> acc
{{replica, _clock} = tag, data}, acc ->
if Map.has_key?(remote_context, replica) do
[{tag, data} | acc]
else
acc
end
end) |> :maps.from_list()
%State{state | values: map, clouds: pruned_clouds, range: {pruned_start, pruned_end}}
end
def extract(%State{mode: :normal, values: values, clouds: clouds} = state, remote_ref, remote_context) do
pruned_clouds = Map.take(clouds, Map.keys(remote_context))
# fn {{topic, pid, key}, meta, {replica, clock}} when replica !== remote_ref ->
# {{replica, clock}, {pid, topic, key, meta}}
# end
ms = [{
{{:"$1", :"$2", :"$3"}, :"$4", {:"$5", :"$6"}},
[{:"=/=", :"$5", {:const, remote_ref}}],
[{{{{:"$5", :"$6"}}, {{:"$2", :"$1", :"$3", :"$4"}}}}]
}]
data =
foldl(values, [], ms, fn {{replica, _} = tag, data}, acc ->
if match?(%{^replica => _}, remote_context) do
[{tag, data} | acc]
else
acc
end
end)
{%State{state | clouds: pruned_clouds, pids: nil, values: nil, delta: :unset}, Map.new(data)}
end
@doc """
Merges two sets, or a delta into a set.
Returns a 3-tuple of the updated set, and the joined and left elements.
## Examples
iex> {s1, joined, left} =
Phoenix.Tracker.State.merge(s1, Phoenix.Tracker.State.extract(s2))
{%Phoenix.Tracker.State{}, [...], [...]}
"""
@spec merge(local :: t, {remote :: t, values} | delta) :: {new_local :: t, joins :: [value], leaves :: [value]}
def merge(%State{} = local, %State{mode: :delta, values: remote_map} = remote) do
merge(local, remote, remote_map)
end
def merge(%State{} = local, {%State{} = remote, remote_map}) do
merge(local, remote, remote_map)
end
defp merge(local, remote, remote_map) do
{pids, joins} = accumulate_joins(local, remote_map)
{clouds, delta, leaves} = observe_removes(local, remote, remote_map)
true = :ets.insert(local.values, joins)
true = :ets.insert(local.pids, pids)
known_remote_context = Map.take(remote.context, Map.keys(local.context))
ctx = Clock.upperbound(local.context, known_remote_context)
new_state =
%State{local | clouds: clouds, delta: delta}
|> put_context(ctx)
|> compact()
{new_state, joins, leaves}
end
@spec accumulate_joins(t, values) :: joins :: {[pid_lookup], [values]}
defp accumulate_joins(local, remote_map) do
%State{context: context, clouds: clouds} = local
Enum.reduce(remote_map, {[], []}, fn {{replica, _} = tag, {pid, topic, key, meta}}, {pids, adds} ->
if not match?(%{^replica => _}, context) or in?(context, clouds, tag) do
{pids, adds}
else
{[{pid, topic, key} | pids], [{{topic, pid, key}, meta, tag} | adds]}
end
end)
end
@spec observe_removes(t, t, map) :: {clouds, delta, leaves :: [value]}
defp observe_removes(%State{pids: pids, values: values, delta: delta} = local, remote, remote_map) do
unioned_clouds = union_clouds(local, remote)
%State{context: remote_context, clouds: remote_clouds, replica: replica} = remote
init = {unioned_clouds, delta, []}
# fn {_, _, {^replica, _}} = result -> result end
ms = [{{:_, :_, {replica, :_}}, [], [:"$_"]}]
foldl(values, init, ms, fn {{topic, pid, key} = values_key, _, tag} = el, {clouds, delta, leaves} ->
if not match?(%{^tag => _}, remote_map) and in?(remote_context, remote_clouds, tag) do
:ets.delete(values, values_key)
:ets.match_delete(pids, {pid, topic, key})
{delete_tag(clouds, tag), remove_delta_tag(delta, tag), [el | leaves]}
else
{clouds, delta, leaves}
end
end)
end
defp put_tag(clouds, {name, _clock} = tag) do
case clouds do
%{^name => cloud} -> %{clouds | name => MapSet.put(cloud, tag)}
_ -> Map.put(clouds, name, MapSet.new([tag]))
end
end
defp delete_tag(clouds, {name, _clock} = tag) do
case clouds do
%{^name => cloud} -> %{clouds | name => MapSet.delete(cloud, tag)}
_ -> clouds
end
end
defp union_clouds(%State{mode: :delta} = local, %State{} = remote) do
Enum.reduce(remote.clouds, local.clouds, fn {name, remote_cloud}, acc ->
Map.update(acc, name, remote_cloud, &MapSet.union(&1, remote_cloud))
end)
end
defp union_clouds(%State{mode: :normal, context: local_ctx} = local, %State{} = remote) do
Enum.reduce(remote.clouds, local.clouds, fn {name, remote_cloud}, acc ->
if Map.has_key?(local_ctx, name) do
Map.update(acc, name, remote_cloud, &MapSet.union(&1, remote_cloud))
else
acc
end
end)
end
def merge_deltas(%State{mode: :delta} = local, %State{mode: :delta, values: remote_values} = remote) do
%{values: local_values, range: {local_start, local_end}, context: local_context, clouds: local_clouds} = local
%{range: {remote_start, remote_end}, context: remote_context, clouds: remote_clouds} = remote
if Clock.dominates_or_equal?(local_end, remote_start) do
new_start = Clock.lowerbound(local_start, remote_start)
new_end = Clock.upperbound(local_end, remote_end)
clouds = union_clouds(local, remote)
filtered_locals = for {tag, value} <- local_values,
match?(%{^tag => _}, remote_values) or not in?(remote_context, remote_clouds, tag),
do: {tag, value}
merged_vals = for {tag, value} <- remote_values,
not match?(%{^tag => _}, local_values) and not in?(local_context, local_clouds, tag),
into: filtered_locals,
do: {tag, value}
{:ok, %State{local | clouds: clouds, values: Map.new(merged_vals), range: {new_start, new_end}}}
else
{:error, :not_contiguous}
end
end
@doc """
Marks a replica as up in the set and returns rejoined users.
"""
@spec replica_up(t, name) :: {t, joins :: [values], leaves :: []}
def replica_up(%State{replicas: replicas, context: ctx} = state, replica) do
{%State{state |
context: Map.put_new(ctx, replica, 0),
replicas: Map.put(replicas, replica, :up)}, replica_users(state, replica), []}
end
@doc """
Marks a replica as down in the set and returns left users.
"""
@spec replica_down(t, name) :: {t, joins:: [], leaves :: [values]}
def replica_down(%State{replicas: replicas} = state, replica) do
{%State{state | replicas: Map.put(replicas, replica, :down)}, [], replica_users(state, replica)}
end
@doc """
Removes all elements for replicas that are permanently gone.
"""
@spec remove_down_replicas(t, name) :: t
def remove_down_replicas(%State{mode: :normal, context: ctx, values: values, pids: pids} = state, replica) do
new_ctx = Map.delete(ctx, replica)
# fn {key, _, {^replica, _}} -> key end
ms = [{{:"$1", :_, {replica, :_}}, [], [:"$1"]}]
foldl(values, nil, ms, fn {topic, pid, key} = values_key, _ ->
:ets.delete(values, values_key)
:ets.match_delete(pids, {pid, topic, key})
nil
end)
new_clouds = Map.delete(state.clouds, replica)
new_delta = remove_down_replicas(state.delta, replica)
%State{state | context: new_ctx, clouds: new_clouds, delta: new_delta}
end
def remove_down_replicas(%State{mode: :delta, range: range} = delta, replica) do
{start_ctx, end_ctx} = range
new_start = Map.delete(start_ctx, replica)
new_end = Map.delete(end_ctx, replica)
new_clouds = Map.delete(delta.clouds, replica)
new_vals = Enum.reduce(delta.values, delta.values, fn
{{^replica, _clock} = tag, {_pid, _topic, _key, _meta}}, vals ->
Map.delete(vals, tag)
{{_replica, _clock} = _tag, {_pid, _topic, _key, _meta}}, vals ->
vals
end)
%State{delta | range: {new_start, new_end}, clouds: new_clouds, values: new_vals}
end
@doc """
Returns the dize of the delta.
"""
@spec delta_size(delta) :: pos_integer
def delta_size(%State{mode: :delta, clouds: clouds, values: values}) do
Enum.reduce(clouds, map_size(values), fn {_name, cloud}, sum ->
sum + MapSet.size(cloud)
end)
end
@spec add(t, pid, topic, key, meta) :: t
defp add(%State{} = state, pid, topic, key, meta) do
state
|> bump_clock()
|> do_add(pid, topic, key, meta)
end
defp do_add(%State{delta: delta} = state, pid, topic, key, meta) do
tag = tag(state)
true = :ets.insert(state.values, {{topic, pid, key}, meta, tag})
true = :ets.insert(state.pids, {pid, topic, key})
new_delta = %State{delta | values: Map.put(delta.values, tag, {pid, topic, key, meta})}
%State{state | delta: new_delta}
end
@spec remove(t, pid, topic, key) :: t
defp remove(%State{pids: pids, values: values} = state, pid, topic, key) do
[{{^topic, ^pid, ^key}, _meta, tag}] = :ets.lookup(values, {topic, pid, key})
1 = :ets.select_delete(values, [{{{topic, pid, key}, :_, :_}, [], [true]}])
1 = :ets.select_delete(pids, [{{pid, topic, key}, [], [true]}])
pruned_clouds = delete_tag(state.clouds, tag)
new_delta = remove_delta_tag(state.delta, tag)
bump_clock(%State{state | clouds: pruned_clouds, delta: new_delta})
end
@spec remove_delta_tag(delta, tag) :: delta
defp remove_delta_tag(%{mode: :delta, values: values, clouds: clouds} = delta, tag) do
%{delta | clouds: put_tag(clouds, tag), values: Map.delete(values, tag)}
end
@doc """
Compacts a sets causal history.
Called as needed and after merges.
"""
@spec compact(t) :: t
def compact(%State{context: ctx, clouds: clouds} = state) do
{new_ctx, new_clouds} =
Enum.reduce(clouds, {ctx, clouds}, fn {name, cloud}, {ctx_acc, clouds_acc} ->
{new_ctx, new_cloud} = do_compact(ctx_acc, Enum.sort(MapSet.to_list(cloud)))
{new_ctx, Map.put(clouds_acc, name, MapSet.new(new_cloud))}
end)
put_context(%State{state | clouds: new_clouds}, new_ctx)
end
@spec do_compact(context, sorted_cloud_list :: list) :: {context, cloud}
defp do_compact(ctx, cloud) do
Enum.reduce(cloud, {ctx, []}, fn {replica, clock} = tag, {ctx_acc, cloud_acc} ->
case ctx_acc do
%{^replica => ctx_clock} when ctx_clock + 1 == clock ->
{%{ctx_acc | replica => clock}, cloud_acc}
%{^replica => ctx_clock} when ctx_clock >= clock ->
{ctx_acc, cloud_acc}
_ when clock == 1 ->
{Map.put(ctx_acc, replica, clock), cloud_acc}
_ ->
{ctx_acc, [tag | cloud_acc]}
end
end)
end
@compile {:inline, in?: 3, in_ctx?: 3, in_clouds?: 3}
defp in?(context, clouds, {replica, clock} = tag) do
in_ctx?(context, replica, clock) or in_clouds?(clouds, replica, tag)
end
defp in_ctx?(ctx, replica, clock) do
case ctx do
%{^replica => replica_clock} -> replica_clock >= clock
_ -> false
end
end
defp in_clouds?(clouds, replica, tag) do
case clouds do
%{^replica => cloud} -> MapSet.member?(cloud, tag)
_ -> false
end
end
@spec tag(t) :: tag
defp tag(%State{replica: rep} = state), do: {rep, clock(state)}
@spec clock(t) :: clock
defp clock(%State{replica: rep, context: ctx}), do: Map.get(ctx, rep, 0)
@spec bump_clock(t) :: t
defp bump_clock(%State{mode: :normal, replica: rep, clouds: clouds, context: ctx, delta: delta} = state) do
new_clock = clock(state) + 1
new_ctx = Map.put(ctx, rep, new_clock)
%State{state |
clouds: put_tag(clouds, {rep, new_clock}),
delta: %State{delta | clouds: put_tag(delta.clouds, {rep, new_clock})}}
|> put_context(new_ctx)
end
defp put_context(%State{delta: delta, replica: rep} = state, new_ctx) do
{start_clock, end_clock} = delta.range
new_end = Map.put(end_clock, rep, Map.get(new_ctx, rep, 0))
%State{state |
context: new_ctx,
delta: %State{delta | range: {start_clock, new_end}}}
end
@spec down_replicas(t) :: [name]
defp down_replicas(%State{replicas: replicas}) do
for {replica, :down} <- replicas, do: replica
end
@spec replica_users(t, name) :: [value]
defp replica_users(%State{values: values}, replica) do
:ets.match_object(values, {:_, :_, {replica, :_}})
end
@fold_batch_size 1000
defp foldl(table, initial, ms, func) do
foldl(:ets.select(table, ms, @fold_batch_size), initial, func)
end
defp foldl(:"$end_of_table", acc, _func), do: acc
defp foldl({objects, cont}, acc, func) do
foldl(:ets.select(cont), Enum.reduce(objects, acc, func), func)
end
end
|
lib/phoenix/tracker/state.ex
| 0.865494
| 0.429429
|
state.ex
|
starcoder
|
defmodule ElixirSense.Core.Normalized.Code.CursorContext do
@moduledoc false
@doc """
Receives a string and returns the cursor context.
This function receives a string with incomplete Elixir code,
representing a cursor position, and based on the string, it
provides contextual information about said position. The
return of this function can then be used to provide tips,
suggestions, and autocompletion functionality.
This function provides a best-effort detection and may not be
accurate under certain circumstances. See the "Limitations"
section below.
Consider adding a catch-all clause when handling the return
type of this function as new cursor information may be added
in future releases.
## Examples
iex> Code.cursor_context("")
:expr
iex> Code.cursor_context("hello_wor")
{:local_or_var, 'hello_wor'}
## Return values
* `{:alias, charlist}` - the context is an alias, potentially
a nested one, such as `Hello.Wor` or `HelloWor`
* `{:dot, inside_dot, charlist}` - the context is a dot
where `inside_dot` is either a `{:var, charlist}`, `{:alias, charlist}`,
`{:module_attribute, charlist}`, `{:unquoted_atom, charlist}` or a `dot`
itself. If a var is given, this may either be a remote call or a map
field access. Examples are `Hello.wor`, `:hello.wor`, `hello.wor`,
`Hello.nested.wor`, `hello.nested.wor`, and `@hello.world`
* `{:dot_arity, inside_dot, charlist}` - the context is a dot arity
where `inside_dot` is either a `{:var, charlist}`, `{:alias, charlist}`,
`{:module_attribute, charlist}`, `{:unquoted_atom, charlist}` or a `dot`
itself. If a var is given, it must be a remote arity. Examples are
`Hello.world/`, `:hello.world/`, `hello.world/2`, and `@hello.world/2`
* `{:dot_call, inside_dot, charlist}` - the context is a dot
call. This means parentheses or space have been added after the expression.
where `inside_dot` is either a `{:var, charlist}`, `{:alias, charlist}`,
`{:module_attribute, charlist}`, `{:unquoted_atom, charlist}` or a `dot`
itself. If a var is given, it must be a remote call. Examples are
`Hello.world(`, `:hello.world(`, `Hello.world `, `hello.world(`, `hello.world `,
and `@hello.world(`
* `:expr` - may be any expression. Autocompletion may suggest an alias,
local or var
* `{:local_or_var, charlist}` - the context is a variable or a local
(import or local) call, such as `hello_wor`
* `{:local_arity, charlist}` - the context is a local (import or local)
call, such as `hello_world/`
* `{:local_call, charlist}` - the context is a local (import or local)
call, such as `hello_world(` and `hello_world `
* `{:module_attribute, charlist}` - the context is a module attribute, such
as `@hello_wor`
* `:none` - no context possible
* `:unquoted_atom` - the context is an unquoted atom. This can be either
previous atoms or all available `:erlang` modules
## Limitations
* There is no context for operators
* The current algorithm only considers the last line of the input
* Context does not yet track strings, sigils, etc.
* Arguments of functions calls are not currently recognized
"""
@doc since: "1.12.0"
@spec cursor_context(List.Chars.t(), keyword()) ::
{:alias, charlist}
| {:dot, inside_dot, charlist}
| {:dot_arity, inside_dot, charlist}
| {:dot_call, inside_dot, charlist}
| :expr
| {:local_or_var, charlist}
| {:local_arity, charlist}
| {:local_call, charlist}
| {:module_attribute, charlist}
| :none
| {:unquoted_atom, charlist}
when inside_dot:
{:alias, charlist}
| {:dot, inside_dot, charlist}
| {:module_attribute, charlist}
| {:unquoted_atom, charlist}
| {:var, charlist}
def cursor_context(string, opts \\ [])
def cursor_context(binary, opts) when is_binary(binary) and is_list(opts) do
binary =
case :binary.matches(binary, "\n") do
[] ->
binary
matches ->
{position, _} = List.last(matches)
binary_part(binary, position + 1, byte_size(binary) - position - 1)
end
do_cursor_context(String.to_charlist(binary), opts)
end
def cursor_context(charlist, opts) when is_list(charlist) and is_list(opts) do
chunked = Enum.chunk_by(charlist, &(&1 == ?\n))
case list_last(chunked, []) do
[?\n | _] -> do_cursor_context([], opts)
rest -> do_cursor_context(rest, opts)
end
end
def cursor_context(other, opts) do
cursor_context(to_charlist(other), opts)
end
@operators '\\<>+-*/:=|&~^@%'
@non_closing_punctuation '.,([{;'
@closing_punctuation ')]}'
@space '\t\s'
@closing_identifier '?!'
@operators_and_non_closing_puctuation @operators ++ @non_closing_punctuation
@non_identifier @closing_identifier ++
@operators ++ @non_closing_punctuation ++ @closing_punctuation ++ @space
defp do_cursor_context(list, _opts) do
reverse = Enum.reverse(list)
case strip_spaces(reverse, 0) do
# It is empty
{[], _} ->
:expr
{[?: | _], 0} ->
{:unquoted_atom, ''}
{[?@ | _], 0} ->
{:module_attribute, ''}
{[?. | rest], _} ->
dot(rest, '')
# It is a local or remote call with parens
{[?( | rest], _} ->
call_to_cursor_context(rest)
# A local arity definition
{[?/ | rest], _} ->
case identifier_to_cursor_context(rest) do
{:local_or_var, acc} -> {:local_arity, acc}
{:dot, base, acc} -> {:dot_arity, base, acc}
_ -> :none
end
# Starting a new expression
{[h | _], _} when h in @operators_and_non_closing_puctuation ->
:expr
# It is a local or remote call without parens
{rest, spaces} when spaces > 0 ->
call_to_cursor_context(rest)
# It is an identifier
_ ->
identifier_to_cursor_context(reverse)
end
end
defp strip_spaces([h | rest], count) when h in @space, do: strip_spaces(rest, count + 1)
defp strip_spaces(rest, count), do: {rest, count}
defp call_to_cursor_context(reverse) do
case identifier_to_cursor_context(reverse) do
{:local_or_var, acc} -> {:local_call, acc}
{:dot, base, acc} -> {:dot_call, base, acc}
_ -> :none
end
end
defp identifier_to_cursor_context(reverse) do
case identifier(reverse) do
# Parse :: first to avoid ambiguity with atoms
{:alias, false, '::' ++ _, _} -> :none
{kind, _, '::' ++ _, acc} -> alias_or_local_or_var(kind, acc)
# Now handle atoms, any other atom is unexpected
{_kind, _, ':' ++ _, acc} -> {:unquoted_atom, acc}
{:atom, _, _, _} -> :none
# Parse .. first to avoid ambiguity with dots
{:alias, false, _, _} -> :none
{kind, _, '..' ++ _, acc} -> alias_or_local_or_var(kind, acc)
# Module attributes
{:alias, _, '@' ++ _, _} -> :none
{:identifier, _, '@' ++ _, acc} -> {:module_attribute, acc}
# Everything else
{:alias, _, '.' ++ rest, acc} -> nested_alias(rest, acc)
{:identifier, _, '.' ++ rest, acc} -> dot(rest, acc)
{kind, _, _, acc} -> alias_or_local_or_var(kind, acc)
:none -> :none
end
end
defp nested_alias(rest, acc) do
case identifier_to_cursor_context(rest) do
{:alias, prev} -> {:alias, prev ++ '.' ++ acc}
_ -> :none
end
end
defp dot(rest, acc) do
case identifier_to_cursor_context(rest) do
{:local_or_var, prev} -> {:dot, {:var, prev}, acc}
{:unquoted_atom, _} = prev -> {:dot, prev, acc}
{:alias, _} = prev -> {:dot, prev, acc}
{:dot, _, _} = prev -> {:dot, prev, acc}
{:module_attribute, _} = prev -> {:dot, prev, acc}
_ -> :none
end
end
defp alias_or_local_or_var(:alias, acc), do: {:alias, acc}
defp alias_or_local_or_var(:identifier, acc), do: {:local_or_var, acc}
defp alias_or_local_or_var(_, _), do: :none
defp identifier([?? | rest]), do: check_identifier(rest, [??])
defp identifier([?! | rest]), do: check_identifier(rest, [?!])
defp identifier(rest), do: check_identifier(rest, [])
defp check_identifier([h | _], _acc) when h in @non_identifier, do: :none
defp check_identifier(rest, acc), do: rest_identifier(rest, acc)
defp rest_identifier([h | rest], acc) when h not in @non_identifier do
rest_identifier(rest, [h | acc])
end
defp rest_identifier(rest, acc) do
case String.Tokenizer.tokenize(acc) do
{kind, _, [], _, ascii_only?, _} -> {kind, ascii_only?, rest, acc}
_ -> :none
end
end
# taken from https://github.com/elixir-lang/elixir/blob/v1.12/lib/elixir/lib/list.ex
@compile {:inline, list_last: 2}
# defp list_last(list, default \\ nil)
defp list_last([], default), do: default
defp list_last([head], _default), do: head
defp list_last([_ | tail], default), do: list_last(tail, default)
end
|
lib/elixir_sense/core/normalized/code/cursor_context.ex
| 0.786418
| 0.563918
|
cursor_context.ex
|
starcoder
|
defmodule Plug.Cowboy do
@moduledoc """
Adapter interface to the Cowboy2 webserver.
## Options
* `:ip` - the ip to bind the server to.
Must be either a tuple in the format `{a, b, c, d}` with each value in `0..255` for IPv4,
or a tuple in the format `{a, b, c, d, e, f, g, h}` with each value in `0..65535` for IPv6,
or a tuple in the format `{:local, path}` for a unix socket at the given `path`.
* `:port` - the port to run the server.
Defaults to 4000 (http) and 4040 (https).
Must be 0 when `:ip` is a `{:local, path}` tuple.
* `:dispatch` - manually configure Cowboy's dispatch.
If this option is used, the given plug won't be initialized
nor dispatched to (and doing so becomes the user's responsibility).
* `:ref` - the reference name to be used.
Defaults to `plug.HTTP` (http) and `plug.HTTPS` (https).
Note, the default reference name does not contain the port so in order
to serve the same plug on multiple ports you need to set the `:ref` accordingly,
e.g.: `ref: MyPlug_HTTP_4000`, `ref: MyPlug_HTTP_4001`, etc.
This is the value that needs to be given on shutdown.
* `:compress` - Cowboy will attempt to compress the response body.
Defaults to false.
* `:stream_handlers` - List of Cowboy `stream_handlers`,
see [Cowboy docs](https://ninenines.eu/docs/en/cowboy/2.5/manual/cowboy_http/).
* `:protocol_options` - Specifies remaining protocol options,
see [Cowboy docs](https://ninenines.eu/docs/en/cowboy/2.5/manual/cowboy_http/).
* `:transport_options` - A keyword list specifying transport options,
see [ranch docs](https://ninenines.eu/docs/en/ranch/1.6/manual/ranch/).
By default `:num_acceptors` will be set to `100` and `:max_connections`
to `16_384`.
All other options are given as `:socket_opts` to the underlying transport.
When running on HTTPS, any SSL configuration should be given directly to the
adapter. See `https/3` for an example and read `Plug.SSL.configure/1` to
understand about our SSL defaults. When using a unix socket, OTP 21+ is
required for `Plug.Static` and `Plug.Conn.send_file/3` to behave correctly.
## Instrumentation
PlugCowboy uses the `:telemetry` library for instrumentation. The following
span events are published alongside the plug pipeline:
* `[:plug_adapter, :call, :start]` - dispatched at the beginning of all
calls to the pipeline.
* Measurements: `%{system_time: System.system_time}`
* Metadata: `%{conn: Plug.Conn.t, adapter: :plug_cowboy, plug: module}`
* `[:plug_adapter, :call, :stop]` - dispatched when a request finishes
processing succesfully.
* Measurements: `%{duration: native_time}`
* Metadata: `%{conn: Plug.Conn.t, adapter: :plug_cowboy, plug: module}`
* `[:plug_adapter, :call, :exception]` - dispatched whenever there are
errors inside the pipeline.
* Measurements: `%{duration: native_time}`
* Metadata: `%{conn: Plug.Conn.t, adapter: :plug_cowboy, plug: module, kind: kind, reason: reason, stacktrace: stacktrace}`
Additionally, an event is published when Cowboy sends an early error response
before the plug pipeline is called:
* `[:plug_cowboy, :early_error]` - dispatched when Cowboy sends an early error response.
* Measurements: `%{system_time: System.system_time}`
* Metadata: `%{reason: term, request: %{method: binary, path: binary}, response: %{status: integer, headers: list, body: binary}}`
"""
require Logger
@doc false
def start(_type, _args) do
Logger.add_translator({Plug.Cowboy.Translator, :translate})
Supervisor.start_link([], strategy: :one_for_one)
end
# Made public with @doc false for testing.
@doc false
def args(scheme, plug, plug_opts, cowboy_options) do
{cowboy_options, non_keyword_options} = Enum.split_with(cowboy_options, &match?({_, _}, &1))
cowboy_options
|> normalize_cowboy_options(scheme)
|> to_args(scheme, plug, plug_opts, non_keyword_options)
end
@doc """
Runs cowboy under http.
## Example
# Starts a new interface
Plug.Cowboy.http MyPlug, [], port: 80
# The interface above can be shutdown with
Plug.Cowboy.shutdown MyPlug.HTTP
"""
@spec http(module(), Keyword.t(), Keyword.t()) ::
{:ok, pid} | {:error, :eaddrinuse} | {:error, term}
def http(plug, opts, cowboy_options \\ []) do
run(:http, plug, opts, cowboy_options)
end
@doc """
Runs cowboy under https.
Besides the options described in the module documentation,
this function sets defaults and accepts all options defined
in `Plug.SSL.configure/2`.
## Example
# Starts a new interface
Plug.Cowboy.https MyPlug, [],
port: 443,
password: "<PASSWORD>",
otp_app: :my_app,
keyfile: "priv/ssl/key.pem",
certfile: "priv/ssl/cert.pem",
dhfile: "priv/ssl/dhparam.pem"
# The interface above can be shutdown with
Plug.Cowboy.shutdown MyPlug.HTTPS
"""
@spec https(module(), Keyword.t(), Keyword.t()) ::
{:ok, pid} | {:error, :eaddrinuse} | {:error, term}
def https(plug, opts, cowboy_options \\ []) do
Application.ensure_all_started(:ssl)
run(:https, plug, opts, cowboy_options)
end
@doc """
Shutdowns the given reference.
"""
def shutdown(ref) do
:cowboy.stop_listener(ref)
end
@transport_options [
:connection_type,
:handshake_timeout,
:max_connections,
:logger,
:num_acceptors,
:shutdown,
:socket,
:socket_opts,
# Special cases supported by plug but not ranch
:acceptors
]
@doc """
A function for starting a Cowboy2 server under Elixir v1.5+ supervisors.
It supports all options as specified in the module documentation plus it
requires the follow two options:
* `:scheme` - either `:http` or `:https`
* `:plug` - such as `MyPlug` or `{MyPlug, plug_opts}`
## Examples
Assuming your Plug module is named `MyApp` you can add it to your
supervision tree by using this function:
children = [
{Plug.Cowboy, scheme: :http, plug: MyApp, options: [port: 4040]}
]
Supervisor.start_link(children, strategy: :one_for_one)
"""
def child_spec(opts) do
scheme = Keyword.fetch!(opts, :scheme)
{plug, plug_opts} =
case Keyword.fetch!(opts, :plug) do
{_, _} = tuple -> tuple
plug -> {plug, []}
end
# We support :options for backwards compatibility.
cowboy_opts =
opts
|> Keyword.drop([:scheme, :plug, :options])
|> Kernel.++(Keyword.get(opts, :options, []))
cowboy_args = args(scheme, plug, plug_opts, cowboy_opts)
[ref, transport_opts, proto_opts] = cowboy_args
{ranch_module, cowboy_protocol, transport_opts} =
case scheme do
:http ->
{:ranch_tcp, :cowboy_clear, transport_opts}
:https ->
%{socket_opts: socket_opts} = transport_opts
socket_opts =
socket_opts
|> Keyword.put_new(:next_protocols_advertised, ["h2", "http/1.1"])
|> Keyword.put_new(:alpn_preferred_protocols, ["h2", "http/1.1"])
{:ranch_ssl, :cowboy_tls, %{transport_opts | socket_opts: socket_opts}}
end
{id, start, restart, shutdown, type, modules} =
:ranch.child_spec(ref, ranch_module, transport_opts, cowboy_protocol, proto_opts)
%{id: id, start: start, restart: restart, shutdown: shutdown, type: type, modules: modules}
end
## Helpers
@protocol_options [:compress, :stream_handlers]
defp run(scheme, plug, opts, cowboy_options) do
case Application.ensure_all_started(:cowboy) do
{:ok, _} ->
nil
{:error, {:cowboy, _}} ->
raise "could not start the Cowboy application. Please ensure it is listed as a dependency in your mix.exs"
end
start =
case scheme do
:http -> :start_clear
:https -> :start_tls
other -> :erlang.error({:badarg, [other]})
end
apply(:cowboy, start, args(scheme, plug, opts, cowboy_options))
end
defp normalize_cowboy_options(cowboy_options, :http) do
Keyword.put_new(cowboy_options, :port, 4000)
end
defp normalize_cowboy_options(cowboy_options, :https) do
cowboy_options
|> Keyword.put_new(:port, 4040)
|> Plug.SSL.configure()
|> case do
{:ok, options} -> options
{:error, message} -> fail(message)
end
end
defp to_args(opts, scheme, plug, plug_opts, non_keyword_opts) do
{timeout, opts} = Keyword.pop(opts, :timeout)
if timeout do
Logger.warn("the :timeout option for Cowboy webserver has no effect and must be removed")
end
opts = Keyword.delete(opts, :otp_app)
{ref, opts} = Keyword.pop(opts, :ref)
{dispatch, opts} = Keyword.pop(opts, :dispatch)
{protocol_options, opts} = Keyword.pop(opts, :protocol_options, [])
dispatch = :cowboy_router.compile(dispatch || dispatch_for(plug, plug_opts))
{extra_options, opts} = Keyword.split(opts, @protocol_options)
extra_options = set_stream_handlers(extra_options)
protocol_and_extra_options = :maps.from_list(protocol_options ++ extra_options)
protocol_options = Map.merge(%{env: %{dispatch: dispatch}}, protocol_and_extra_options)
{transport_options, socket_options} = Keyword.pop(opts, :transport_options, [])
option_keys = Keyword.keys(socket_options)
for opt <- @transport_options, opt in option_keys do
option_deprecation_warning(opt)
end
{num_acceptors, socket_options} = Keyword.pop(socket_options, :num_acceptors, 100)
{num_acceptors, socket_options} = Keyword.pop(socket_options, :acceptors, num_acceptors)
{max_connections, socket_options} = Keyword.pop(socket_options, :max_connections, 16_384)
socket_options = non_keyword_opts ++ socket_options
transport_options =
transport_options
|> Keyword.put_new(:num_acceptors, num_acceptors)
|> Keyword.put_new(:max_connections, max_connections)
|> Keyword.update(
:socket_opts,
socket_options,
&(&1 ++ socket_options)
)
|> Map.new()
[ref || build_ref(plug, scheme), transport_options, protocol_options]
end
@default_stream_handlers [Plug.Cowboy.Stream]
defp set_stream_handlers(opts) do
compress = Keyword.get(opts, :compress)
stream_handlers = Keyword.get(opts, :stream_handlers)
case {compress, stream_handlers} do
{true, nil} ->
Keyword.put_new(opts, :stream_handlers, [:cowboy_compress_h | @default_stream_handlers])
{true, _} ->
raise "cannot set both compress and stream_handlers at once. " <>
"If you wish to set compress, please add `:cowboy_compress_h` to your stream handlers."
{_, nil} ->
Keyword.put_new(opts, :stream_handlers, @default_stream_handlers)
{_, _} ->
opts
end
end
defp build_ref(plug, scheme) do
Module.concat(plug, scheme |> to_string |> String.upcase())
end
defp dispatch_for(plug, opts) do
opts = plug.init(opts)
[{:_, [{:_, Plug.Cowboy.Handler, {plug, opts}}]}]
end
defp fail(message) do
raise ArgumentError, "could not start Cowboy2 adapter, " <> message
end
defp option_deprecation_warning(:acceptors),
do: option_deprecation_warning(:acceptors, :num_acceptors)
defp option_deprecation_warning(option),
do: option_deprecation_warning(option, option)
defp option_deprecation_warning(option, expected_option) do
warning =
"using :#{option} in options is deprecated. Please pass " <>
":#{expected_option} to the :transport_options keyword list instead"
IO.warn(warning)
end
end
|
lib/plug/cowboy.ex
| 0.908442
| 0.553867
|
cowboy.ex
|
starcoder
|
defmodule Ueberauth.Strategy.Slack do
@moduledoc """
Implements an ÜeberauthSlack strategy for authentication with slack.com.
When configuring the strategy in the Üeberauth providers, you can specify some defaults.
* `uid_field` - The field to use as the UID field. This can be any populated field in the info struct. Default `:email`
* `default_scope` - The scope to request by default from slack (permissions). Default "users:read"
* `oauth2_module` - The OAuth2 module to use. Default Ueberauth.Strategy.Slack.OAuth
```elixir
config :ueberauth, Ueberauth,
providers: [
slack: { Ueberauth.Strategy.Slack, [uid_field: :nickname, default_scope: "users:read,users:write"] }
]
```
"""
use Ueberauth.Strategy,
uid_field: :email,
default_scope: "users:read",
oauth2_module: Ueberauth.Strategy.Slack.OAuth
alias Ueberauth.Auth.Info
alias Ueberauth.Auth.Credentials
alias Ueberauth.Auth.Extra
alias Ueberauth.Strategy.Helpers
# When handling the request just redirect to Slack
@doc false
def handle_request!(conn) do
scopes = conn.params["scope"] || option(conn, :default_scope)
opts = [scope: scopes]
opts =
if conn.params["state"], do: Keyword.put(opts, :state, conn.params["state"]), else: opts
team = option(conn, :team)
opts = if team, do: Keyword.put(opts, :team, team), else: opts
callback_url = callback_url(conn)
callback_url =
if String.ends_with?(callback_url, "?"),
do: String.slice(callback_url, 0..-2),
else: callback_url
opts =
opts
|> Keyword.put(:redirect_uri, callback_url)
|> Helpers.with_state_param(conn)
module = option(conn, :oauth2_module)
redirect!(conn, apply(module, :authorize_url!, [opts]))
end
# When handling the callback, if there was no errors we need to
# make two calls. The first, to fetch the slack auth is so that we can get hold of
# the user id so we can make a query to fetch the user info.
# So that it is available later to build the auth struct, we put it in the private section of the conn.
@doc false
def handle_callback!(%Plug.Conn{params: %{"code" => code}} = conn) do
module = option(conn, :oauth2_module)
params = [code: code]
redirect_uri = get_redirect_uri(conn)
options = %{
options: [
client_options: [redirect_uri: redirect_uri]
]
}
token = apply(module, :get_token!, [params, options])
# Ported from:
# https://github.com/emilsoman/ueberauth_slack/blob/4b428e06f6287bb72d4314398fd91fc2f2e5839c/lib/ueberauth/strategy/slack.ex#L64
case token do
%{access_token: nil, other_params: %{"authed_user" => %{"access_token" => access_token}}} ->
token =
token
|> put_in([Access.key(:other_params), "scope"], token.other_params["authed_user"]["scope"])
|> Map.put(:access_token, access_token)
|> Map.put(:token_type, token.other_params["authed_user"]["token_type"])
conn
|> store_token(token)
|> fetch_identity(token)
%{access_token: nil} ->
set_errors!(conn, [error(token.other_params["error"], token.other_params["error_description"])])
token ->
conn
|> store_token(token)
|> fetch_auth(token)
|> fetch_identity(token)
|> fetch_user(token)
|> fetch_team(token)
end
end
# If we don't match code, then we have an issue
@doc false
def handle_callback!(conn) do
set_errors!(conn, [error("missing_code", "No code received")])
end
# We store the token for use later when fetching the slack auth and user and constructing the auth struct.
@doc false
defp store_token(conn, token) do
put_private(conn, :slack_token, token)
end
# Remove the temporary storage in the conn for our data. Run after the auth struct has been built.
@doc false
def handle_cleanup!(conn) do
conn
|> put_private(:slack_auth, nil)
|> put_private(:slack_identity, nil)
|> put_private(:slack_user, nil)
|> put_private(:slack_token, nil)
end
# The structure of the requests is such that it is difficult to provide cusomization for the uid field.
# instead, we allow selecting any field from the info struct
@doc false
def uid(conn) do
Map.get(info(conn), option(conn, :uid_field))
end
@doc false
def credentials(conn) do
token = conn.private.slack_token
auth = conn.private[:slack_auth]
identity = conn.private[:slack_identity]
user = conn.private[:slack_user]
scope_string = token.other_params["scope"] || ""
scopes = String.split(scope_string, ",")
%Credentials{
token: token.access_token,
refresh_token: token.refresh_token,
expires_at: token.expires_at,
token_type: token.token_type,
expires: !!token.expires_at,
scopes: scopes,
other:
Map.merge(
%{
user: get_in(auth, ["user"]),
user_id: get_in(auth, ["user_id"]) || get_in(identity, ["user", "id"]),
team: get_in(auth, ["team"]) || get_in(identity, ["team", "name"]),
team_id: get_in(auth, ["team_id"]) || get_in(identity, ["team", "id"]),
team_domain: get_in(identity, ["team", "domain"]),
team_url: get_in(auth, ["url"])
},
user_credentials(user)
)
}
end
@doc false
def info(conn) do
user = conn.private[:slack_user]
auth = conn.private[:slack_auth]
identity = conn.private[:slack_identity]
profile = get_in(user, ["profile"]) || get_in(identity, ["user"]) || %{}
image_urls =
profile
|> Map.keys()
|> Enum.filter(&(&1 =~ ~r/^image_/))
|> Enum.into(%{}, &{&1, profile[&1]})
team_image_urls =
(identity || %{})
|> Map.get("team", %{})
|> Enum.filter(fn {key, _value} -> key =~ ~r/^image_/ end)
|> Enum.into(%{}, fn {key, value} -> {"team_#{key}", value} end)
%Info{
name: name_from_user(user) || get_in(identity, ["user", "name"]),
nickname: get_in(user, ["name"]),
email: get_in(profile, ["email"]),
image: get_in(profile, ["image_48"]),
urls:
image_urls
|> Map.merge(team_image_urls)
|> Map.merge(%{
team_url: get_in(auth, ["url"])
})
}
end
@doc false
def extra(conn) do
%Extra{
raw_info: %{
auth: conn.private[:slack_auth],
identity: conn.private[:slack_identity],
token: conn.private[:slack_token],
user: conn.private[:slack_user],
team: conn.private[:slack_team]
}
}
end
defp user_credentials(nil), do: %{}
defp user_credentials(user) do
%{
has_2fa: user["has_2fa"],
is_admin: user["is_admin"],
is_owner: user["is_owner"],
is_primary_owner: user["is_primary_owner"],
is_restricted: user["is_restricted"],
is_ultra_restricted: user["is_ultra_restricted"]
}
end
# Before we can fetch the user, we first need to fetch the auth to find out what the user id is.
defp fetch_auth(conn, token) do
scope_string = token.other_params["scope"] || ""
scopes = String.split(scope_string, ",")
case Ueberauth.Strategy.Slack.OAuth.get(token, "/auth.test") do
{:ok, %OAuth2.Response{status_code: 401, body: _body}} ->
set_errors!(conn, [error("token", "unauthorized")])
{:ok, %OAuth2.Response{status_code: status_code, body: auth}}
when status_code in 200..399 ->
cond do
auth["ok"] ->
put_private(conn, :slack_auth, auth)
auth["error"] == "invalid_auth" && Enum.member?(scopes, "identity.basic") ->
# If the token has only the "identity.basic" scope then it may error
# at the "auth.test" endpoint but still succeed at the
# "identity.basic" endpoint.
# In this case we rely on fetch_identity to set the error if the
# token is invalid.
conn
true ->
set_errors!(conn, [error(auth["error"], auth["error"])])
end
{:error, %OAuth2.Error{reason: reason}} ->
set_errors!(conn, [error("OAuth2", reason)])
end
end
# https://github.com/ueberauth/ueberauth_slack/issues/35#issuecomment-616433473
defp fetch_identity(conn, token) do
scope_string = token.other_params["authed_user"]["scope"] || ""
scopes = String.split(scope_string, ",")
user_token = OAuth2.AccessToken.new(token.other_params["authed_user"])
case "identity.basic" in scopes do
false ->
conn
true ->
get_users_identity(conn, user_token)
end
end
defp get_users_identity(conn, token) do
case Ueberauth.Strategy.Slack.OAuth.get(token, "/users.identity") do
{:ok, %OAuth2.Response{status_code: 401, body: _body}} ->
set_errors!(conn, [error("token", "unauthorized")])
{:ok, %OAuth2.Response{status_code: status_code, body: identity}}
when status_code in 200..399 ->
if identity["ok"] do
put_private(conn, :slack_identity, identity)
else
set_errors!(conn, [error(identity["error"], identity["error"])])
end
{:error, %OAuth2.Error{reason: reason}} ->
set_errors!(conn, [error("OAuth2", reason)])
end
end
# If the call to fetch the auth fails, we're going to have failures already in place.
# If this happens don't try and fetch the user and just let it fail.
defp fetch_user(%Plug.Conn{assigns: %{ueberauth_failure: _fails}} = conn, _), do: conn
# Given the auth and token we can now fetch the user.
defp fetch_user(conn, token) do
scope_string = token.other_params["scope"] || ""
scopes = String.split(scope_string, ",")
case "users:read" in scopes do
false ->
conn
true ->
get_users_info(conn, token)
end
end
defp get_users_info(conn, token) do
opts = %{user: conn.private.slack_auth["user_id"]}
case Ueberauth.Strategy.Slack.OAuth.get(token, "/users.info", opts) do
{:ok, %OAuth2.Response{status_code: 401, body: _body}} ->
set_errors!(conn, [error("token", "unauthorized")])
{:ok, %OAuth2.Response{status_code: status_code, body: user}}
when status_code in 200..399 ->
if user["ok"] do
put_private(conn, :slack_user, user["user"])
else
set_errors!(conn, [error(user["error"], user["error"])])
end
{:error, %OAuth2.Error{reason: reason}} ->
set_errors!(conn, [error("OAuth2", reason)])
end
end
defp fetch_team(%Plug.Conn{assigns: %{ueberauth_failure: _fails}} = conn, _), do: conn
defp fetch_team(conn, token) do
scope_string = token.other_params["scope"] || ""
scopes = String.split(scope_string, ",")
case "team:read" in scopes do
false ->
conn
true ->
get_team_info(conn, token)
end
end
defp get_team_info(conn, token) do
case Ueberauth.Strategy.Slack.OAuth.get(token, "/team.info") do
{:ok, %OAuth2.Response{status_code: 401, body: _body}} ->
set_errors!(conn, [error("token", "unauthorized")])
{:ok, %OAuth2.Response{status_code: status_code, body: team}}
when status_code in 200..399 ->
if team["ok"] do
put_private(conn, :slack_team, team["team"])
else
set_errors!(conn, [error(team["error"], team["error"])])
end
{:error, %OAuth2.Error{reason: reason}} ->
set_errors!(conn, [error("OAuth2", reason)])
end
end
# Fetch the name to use. We try to start with the most specific name avaialble and
# fallback to the least.
defp name_from_user(nil), do: nil
defp name_from_user(user) do
[
user["profile"]["real_name_normalized"],
user["profile"]["real_name"],
user["real_name"],
user["name"]
]
|> Enum.reject(&(&1 == "" || &1 == nil))
|> List.first()
end
defp option(conn, key) do
Keyword.get(options(conn), key, Keyword.get(default_options(), key))
end
defp get_redirect_uri(%Plug.Conn{} = conn) do
config = Application.get_env(:ueberauth, Ueberauth)
redirect_uri = Keyword.get(config, :redirect_uri)
if is_nil(redirect_uri) do
callback_url(conn)
else
redirect_uri
end
end
end
|
lib/ueberauth/strategy/slack.ex
| 0.802478
| 0.669154
|
slack.ex
|
starcoder
|
defmodule Algae.Internal do
@moduledoc false
@type ast() :: {atom(), any(), any()}
@doc """
Construct a data type AST
"""
@spec data_ast(module() | [module()], ast()) :: ast()
def data_ast(lines) when is_list(lines) do
{field_values, field_types, specs, args, defaults} = module_elements(lines)
quote do
@type t :: %__MODULE__{unquote_splicing(field_types)}
defstruct unquote(field_values)
@doc "Positional constructor, with args in the same order as they were defined in"
@spec new(unquote_splicing(specs)) :: t()
def new(unquote_splicing(args)) do
struct(__MODULE__, unquote(defaults))
end
defoverridable [new: unquote(Enum.count(args))]
end
end
def data_ast(modules, {:none, _, _}) do
full_module = modules |> List.wrap() |> Module.concat()
quote do
defmodule unquote(full_module) do
@type t :: %__MODULE__{}
defstruct []
@doc "Default #{__MODULE__} struct"
@spec new() :: t()
def new, do: struct(__MODULE__)
defoverridable [new: 0]
end
end
end
def data_ast(caller_module, type) do
default = default_value(type)
field = module_to_field(caller_module)
quote do
@type t :: %unquote(caller_module){
unquote(field) => unquote(type)
}
defstruct [{unquote(field), unquote(default)}]
@doc "Default #{__MODULE__} struct"
@spec new() :: t()
def new, do: struct(__MODULE__)
@doc "Constructor helper for piping"
@spec new(unquote(type)) :: t()
def new(field), do: struct(__MODULE__, [unquote(field), field])
defoverridable [new: 0, new: 1]
end
end
@spec data_ast([module()], any(), ast()) :: ast()
def data_ast(name, default, type_ctx) do
full_module = Module.concat(name)
field = module_to_field(name)
quote do
defmodule unquote(full_module) do
@type t :: %unquote(full_module){
unquote(field) => unquote(type_ctx)
}
defstruct [{unquote(field), unquote(default)}]
@doc "Default #{__MODULE__} struct. Value defaults to #{inspect unquote(default)}."
@spec new() :: t()
def new, do: struct(__MODULE__)
@doc "Helper for initializing struct with a specific value"
@spec new(unquote(type_ctx)) :: t()
def new(value), do: struct(__MODULE__, [{unquote(field), value}])
end
end
end
@spec embedded_data_ast() :: ast()
def embedded_data_ast do
quote do
@type t :: %__MODULE__{}
defstruct []
@doc "Default #{__MODULE__} struct"
@spec new() :: t()
def new, do: struct(__MODULE__)
end
end
def embedded_data_ast(module_ctx, default, type_ctx) do
field = module_to_field(module_ctx)
quote do
@type t :: %__MODULE__{
unquote(field) => unquote(type_ctx)
}
defstruct [{unquote(field), unquote(default)}]
@doc "Default #{__MODULE__} struct"
@spec new(unquote(type_ctx)) :: t()
def new(field \\ unquote(default)), do: struct(__MODULE__, [field])
defoverridable [new: 1]
end
end
@type field :: {atom(), [any()], [any()]}
@type type :: {atom(), [any()], [any()]}
@spec module_elements([ast()])
:: {
[{field(), any()}],
[{field(), type()}],
[type],
[{:\\, [], any()}],
[{field(), any()}]
}
def module_elements(lines) do
List.foldr(lines, {[], [], [], [], []},
fn(line, {value_acc, type_acc, typespec_acc, acc_arg, acc_mapping}) ->
{field, type, default_value} = normalize_elements(line)
arg = {field, [], Elixir}
{
[{field, default_value} | value_acc],
[{field, type} | type_acc],
[type | typespec_acc],
[{:\\, [], [arg, default_value]} | acc_arg],
[{field, arg} | acc_mapping]
}
end)
end
@spec normalize_elements(ast()) :: {atom(), type(), any()}
def normalize_elements({:::, _, [{field, _, _}, type]}) do
{field, type, default_value(type)}
end
def normalize_elements({:\\, _, [{:::, _, [{field, _, _}, type]}, default]}) do
{field, type, default}
end
@spec or_types([ast()], module()) :: [ast()]
def or_types({:\\, _, [{:::, _, [_, types]}, _]}, module_ctx) do
or_types(types, module_ctx)
end
def or_types([head | tail], module_ctx) do
Enum.reduce(tail, call_type(head, module_ctx), fn(module, acc) ->
{:|, [], [call_type(module, module_ctx), acc]}
end)
end
@spec modules(module(), [module()]) :: [module()]
def modules(top, module_ctx), do: [top | extract_name(module_ctx)]
@spec call_type(module(), [module()]) :: ast()
def call_type(new_module, module_ctx) do
full_module = List.wrap(module_ctx) ++ submodule_name(new_module)
{{:., [], [{:__aliases__, [alias: false], full_module}, :t]}, [], []}
end
@spec submodule_name({:defdata, any(), [{:::, any(), [any()]}]})
:: [module()]
def submodule_name({:defdata, _, [{:::, _, [body, _]}]}) do
body
|> case do
{:\\, _, [inner_module_ctx, _]} -> inner_module_ctx
{:__aliases__, _, module} -> module
outer_module_ctx -> outer_module_ctx
end
|> List.wrap()
end
def submodule_name({:defdata, _, [{:\\, _, [{:::, _, [{:__aliases__, _, module}, _]}, _]}]}) do
List.wrap(module)
end
def submodule_name({:defdata, _, [{:__aliases__, _, module}, _]}) do
List.wrap(module)
end
@spec extract_name({any(), any(), atom()} | [module()]) :: [module()]
def extract_name({_, _, inner_name}), do: List.wrap(inner_name)
def extract_name(module_chain) when is_list(module_chain), do: module_chain
def module_to_field(modules) when is_list(modules) do
modules
|> List.last()
|> module_to_field()
end
def module_to_field(module) do
module
|> Atom.to_string()
|> String.split(".")
|> List.last()
|> String.downcase()
|> String.trim_leading("elixir.")
|> String.to_atom()
end
# credo:disable-for-lines:21 Credo.Check.Refactor.CyclomaticComplexity
def default_value({{:., _, [{_, _, [:String]}, :t]}, _, _}), do: ""
def default_value({{:., _, [{_, _, adt}, :t]}, _, []}) do
quote do: unquote(Module.concat(adt)).new()
end
def default_value([_]), do: []
def default_value({type, _, _}) do
case type do
:boolean -> false
:number -> 0
:integer -> 0
:float -> 0.0
:pos_integer -> 1
:non_neg_integer -> 0
:bitstring -> ""
:charlist -> []
[] -> []
:list -> []
:map -> %{}
:fun -> &Quark.id/1
:-> -> &Quark.id/1
:any -> nil
:t -> raise %Algae.Internal.NeedsExplicitDefaultError{message: "Type is lone `t`"}
atom -> atom
end
end
end
|
lib/algae/internal.ex
| 0.73173
| 0.466846
|
internal.ex
|
starcoder
|
defmodule StaffNotesWeb.SlidingSessionTimeout do
@moduledoc """
Module `Plug` that times out the user's session after a period of inactivity.
Because this project uses [OAuth](https://oauth.net/), our application's permissions can be
revoked by the owner of the account and our app would never know. By periodically timing out the
session, we force the application to re-authenticate the logged in user, thereby reaffirming that
they still authorize our application to use their information.
## Options
* `:timeout` — Number of seconds of inactivity required for the session to time out
_(**default:** one hour or 3,600 seconds)_
## Examples
Configuring in the application configuration files:
```
config :staff_notes, StaffNotesWeb.SlidingSessionTimeout,
timeout: 1_234
```
Configuring when including the plug in a pipeline:
```
plug(StaffNotesWeb.SlidingSessionTimeout, timeout: 1_234)
```
"""
@behaviour Plug
import Plug.Conn
alias Phoenix.Controller
alias StaffNotesWeb.Router
require Logger
@doc """
API used by Plug to configure the session timeout.
"""
@spec init(Keyword.t()) :: Keyword.t()
def init(options \\ []) do
defaults()
|> Keyword.merge(Application.get_env(get_app(), __MODULE__) || [])
|> Keyword.merge(options)
end
@doc """
API used by Plug to invoke the session timeout check on every request.
"""
@spec call(Plug.Conn.t(), Keyword.t()) :: Plug.Conn.t()
def call(conn, options) do
timeout_at = get_session(conn, :timeout_at)
if timeout_at && now() > timeout_at do
conn
|> logout_user
|> Controller.redirect(to: Router.Helpers.auth_path(conn, :index, from: conn.request_path))
|> halt
else
new_timeout = calculate_timeout(options[:timeout])
put_session(conn, :timeout_at, new_timeout)
end
end
defp calculate_timeout(timeout), do: now() + timeout
defp defaults do
[
timeout: 3_600
]
end
defp get_app, do: Application.get_application(__MODULE__)
defp logout_user(conn) do
conn
|> clear_session
|> configure_session([:renew])
|> assign(:timed_out?, true)
end
defp now, do: DateTime.to_unix(DateTime.utc_now())
end
|
lib/staff_notes_web/plugs/sliding_session_timeout.ex
| 0.847463
| 0.696694
|
sliding_session_timeout.ex
|
starcoder
|
defmodule Exq.Redis.JobStat do
@moduledoc """
The JobStat module encapsulates storing system-wide stats on top of Redis
It aims to be compatible with the Sidekiq stats format.
"""
require Logger
alias Exq.Support.{Binary, Process, Job, Time}
alias Exq.Redis.{Connection, JobQueue}
def record_processed_commands(namespace, _job, current_date \\ DateTime.utc_now()) do
{time, date} = Time.format_current_date(current_date)
[
["INCR", JobQueue.full_key(namespace, "stat:processed")],
["INCR", JobQueue.full_key(namespace, "stat:processed_rt:#{time}")],
["EXPIRE", JobQueue.full_key(namespace, "stat:processed_rt:#{time}"), 120],
["INCR", JobQueue.full_key(namespace, "stat:processed:#{date}")]
]
end
def record_processed(redis, namespace, job, current_date \\ DateTime.utc_now()) do
instr = record_processed_commands(namespace, job, current_date)
{:ok, [count, _, _, _]} = Connection.qp(redis, instr)
{:ok, count}
end
def record_failure_commands(namespace, _error, _job, current_date \\ DateTime.utc_now()) do
{time, date} = Time.format_current_date(current_date)
[
["INCR", JobQueue.full_key(namespace, "stat:failed")],
["INCR", JobQueue.full_key(namespace, "stat:failed_rt:#{time}")],
["EXPIRE", JobQueue.full_key(namespace, "stat:failed_rt:#{time}"), 120],
["INCR", JobQueue.full_key(namespace, "stat:failed:#{date}")]
]
end
def record_failure(redis, namespace, error, job, current_date \\ DateTime.utc_now()) do
instr = record_failure_commands(namespace, error, job, current_date)
{:ok, [count, _, _, _]} = Connection.qp(redis, instr)
{:ok, count}
end
def add_process_commands(namespace, process_info, serialized_process \\ nil) do
serialized = serialized_process || Exq.Support.Process.encode(process_info)
[["SADD", JobQueue.full_key(namespace, "processes"), serialized]]
end
def add_process(redis, namespace, process_info, serialized_process \\ nil) do
instr = add_process_commands(namespace, process_info, serialized_process)
Connection.qp!(redis, instr)
:ok
end
def remove_process_commands(namespace, process_info, serialized_process \\ nil) do
serialized = serialized_process || Exq.Support.Process.encode(process_info)
[["SREM", JobQueue.full_key(namespace, "processes"), serialized]]
end
def remove_process(redis, namespace, process_info, serialized_process \\ nil) do
instr = remove_process_commands(namespace, process_info, serialized_process)
Connection.qp!(redis, instr)
:ok
end
def cleanup_processes(redis, namespace, host) do
Connection.smembers!(redis, JobQueue.full_key(namespace, "processes"))
|> Enum.map(fn serialized -> {Process.decode(serialized), serialized} end)
|> Enum.filter(fn {process, _} -> process.host == host end)
|> Enum.each(fn {process, serialized} ->
remove_process(redis, namespace, process, serialized)
end)
:ok
end
def busy(redis, namespace) do
Connection.scard!(redis, JobQueue.full_key(namespace, "processes"))
end
def processes(redis, namespace) do
list = Connection.smembers!(redis, JobQueue.full_key(namespace, "processes")) || []
Enum.map(list, &Process.decode/1)
end
def find_failed(redis, namespace, jid) do
redis
|> Connection.zrange!(JobQueue.full_key(namespace, "dead"), 0, -1)
|> JobQueue.search_jobs(jid)
end
def remove_queue(redis, namespace, queue) do
Connection.qp(redis, [
["SREM", JobQueue.full_key(namespace, "queues"), queue],
["DEL", JobQueue.queue_key(namespace, queue)]
])
end
def remove_failed(redis, namespace, jid) do
{:ok, failure} = find_failed(redis, namespace, jid)
Connection.qp(redis, [
["DECR", JobQueue.full_key(namespace, "stat:failed")],
["ZREM", JobQueue.full_key(namespace, "dead"), Job.encode(failure)]
])
end
def clear_failed(redis, namespace) do
Connection.qp(redis, [
["SET", JobQueue.full_key(namespace, "stat:failed"), 0],
["DEL", JobQueue.full_key(namespace, "dead")]
])
end
def clear_processes(redis, namespace) do
Connection.del!(redis, JobQueue.full_key(namespace, "processes"))
end
def realtime_stats(redis, namespace) do
{:ok, [failure_keys, success_keys]} =
Connection.qp(redis, [
["KEYS", JobQueue.full_key(namespace, "stat:failed_rt:*")],
["KEYS", JobQueue.full_key(namespace, "stat:processed_rt:*")]
])
formatter = realtime_stats_formatter(redis, namespace)
failures = formatter.(failure_keys, "stat:failed_rt:")
successes = formatter.(success_keys, "stat:processed_rt:")
{:ok, failures, successes}
end
defp realtime_stats_formatter(redis, namespace) do
fn keys, ns ->
if Enum.empty?(keys) do
[]
else
{:ok, counts} = Connection.qp(redis, Enum.map(keys, &["GET", &1]))
Enum.map(keys, &Binary.take_prefix(&1, JobQueue.full_key(namespace, ns)))
|> Enum.zip(counts)
end
end
end
def get_count(redis, namespace, key) do
case Connection.get!(redis, JobQueue.full_key(namespace, "stat:#{key}")) do
:undefined ->
0
nil ->
0
count when is_integer(count) ->
count
count ->
{val, _} = Integer.parse(count)
val
end
end
end
|
lib/exq/redis/job_stat.ex
| 0.654453
| 0.507141
|
job_stat.ex
|
starcoder
|
defmodule AWS.SFN do
@moduledoc """
AWS Step Functions
AWS Step Functions is a service that lets you coordinate the components of
distributed applications and microservices using visual workflows.
You can use Step Functions to build applications from individual
components, each of which performs a discrete function, or *task*, allowing
you to scale and change applications quickly. Step Functions provides a
console that helps visualize the components of your application as a series
of steps. Step Functions automatically triggers and tracks each step, and
retries steps when there are errors, so your application executes
predictably and in the right order every time. Step Functions logs the
state of each step, so you can quickly diagnose and debug any issues.
Step Functions manages operations and underlying infrastructure to ensure
your application is available at any scale. You can run tasks on AWS, your
own servers, or any system that has access to AWS. You can access and use
Step Functions using the console, the AWS SDKs, or an HTTP API. For more
information about Step Functions, see the * [AWS Step Functions Developer
Guide](https://docs.aws.amazon.com/step-functions/latest/dg/welcome.html)
*.
"""
@doc """
Creates an activity. An activity is a task that you write in any
programming language and host on any machine that has access to AWS Step
Functions. Activities must poll Step Functions using the `GetActivityTask`
API action and respond using `SendTask*` API actions. This function lets
Step Functions know the existence of your activity and returns an
identifier for use in a state machine and when polling from the activity.
<note> This operation is eventually consistent. The results are best effort
and may not reflect very recent updates and changes.
</note> <note> `CreateActivity` is an idempotent API. Subsequent requests
won’t create a duplicate resource if it was already created.
`CreateActivity`'s idempotency check is based on the activity `name`. If a
following request has different `tags` values, Step Functions will ignore
these differences and treat it as an idempotent request of the previous. In
this case, `tags` will not be updated, even if they are different.
</note>
"""
def create_activity(client, input, options \\ []) do
request(client, "CreateActivity", input, options)
end
@doc """
Creates a state machine. A state machine consists of a collection of states
that can do work (`Task` states), determine to which states to transition
next (`Choice` states), stop an execution with an error (`Fail` states),
and so on. State machines are specified using a JSON-based, structured
language. For more information, see [Amazon States
Language](https://docs.aws.amazon.com/step-functions/latest/dg/concepts-amazon-states-language.html)
in the AWS Step Functions User Guide.
<note> This operation is eventually consistent. The results are best effort
and may not reflect very recent updates and changes.
</note> <note> `CreateStateMachine` is an idempotent API. Subsequent
requests won’t create a duplicate resource if it was already created.
`CreateStateMachine`'s idempotency check is based on the state machine
`name`, `definition`, `type`, and `LoggingConfiguration`. If a following
request has a different `roleArn` or `tags`, Step Functions will ignore
these differences and treat it as an idempotent request of the previous. In
this case, `roleArn` and `tags` will not be updated, even if they are
different.
</note>
"""
def create_state_machine(client, input, options \\ []) do
request(client, "CreateStateMachine", input, options)
end
@doc """
Deletes an activity.
"""
def delete_activity(client, input, options \\ []) do
request(client, "DeleteActivity", input, options)
end
@doc """
Deletes a state machine. This is an asynchronous operation: It sets the
state machine's status to `DELETING` and begins the deletion process.
<note> For `EXPRESS`state machines, the deletion will happen eventually
(usually less than a minute). Running executions may emit logs after
`DeleteStateMachine` API is called.
</note>
"""
def delete_state_machine(client, input, options \\ []) do
request(client, "DeleteStateMachine", input, options)
end
@doc """
Describes an activity.
<note> This operation is eventually consistent. The results are best effort
and may not reflect very recent updates and changes.
</note>
"""
def describe_activity(client, input, options \\ []) do
request(client, "DescribeActivity", input, options)
end
@doc """
Describes an execution.
<note> This operation is eventually consistent. The results are best effort
and may not reflect very recent updates and changes.
</note> This API action is not supported by `EXPRESS` state machines.
"""
def describe_execution(client, input, options \\ []) do
request(client, "DescribeExecution", input, options)
end
@doc """
Describes a state machine.
<note> This operation is eventually consistent. The results are best effort
and may not reflect very recent updates and changes.
</note>
"""
def describe_state_machine(client, input, options \\ []) do
request(client, "DescribeStateMachine", input, options)
end
@doc """
Describes the state machine associated with a specific execution.
<note> This operation is eventually consistent. The results are best effort
and may not reflect very recent updates and changes.
</note> This API action is not supported by `EXPRESS` state machines.
"""
def describe_state_machine_for_execution(client, input, options \\ []) do
request(client, "DescribeStateMachineForExecution", input, options)
end
@doc """
Used by workers to retrieve a task (with the specified activity ARN) which
has been scheduled for execution by a running state machine. This initiates
a long poll, where the service holds the HTTP connection open and responds
as soon as a task becomes available (i.e. an execution of a task of this
type is needed.) The maximum time the service holds on to the request
before responding is 60 seconds. If no task is available within 60 seconds,
the poll returns a `taskToken` with a null string.
<important> Workers should set their client side socket timeout to at least
65 seconds (5 seconds higher than the maximum time the service may hold the
poll request).
Polling with `GetActivityTask` can cause latency in some implementations.
See [Avoid Latency When Polling for Activity
Tasks](https://docs.aws.amazon.com/step-functions/latest/dg/bp-activity-pollers.html)
in the Step Functions Developer Guide.
</important>
"""
def get_activity_task(client, input, options \\ []) do
request(client, "GetActivityTask", input, options)
end
@doc """
Returns the history of the specified execution as a list of events. By
default, the results are returned in ascending order of the `timeStamp` of
the events. Use the `reverseOrder` parameter to get the latest events
first.
If `nextToken` is returned, there are more results available. The value of
`nextToken` is a unique pagination token for each page. Make the call again
using the returned token to retrieve the next page. Keep all other
arguments unchanged. Each pagination token expires after 24 hours. Using an
expired pagination token will return an *HTTP 400 InvalidToken* error.
This API action is not supported by `EXPRESS` state machines.
"""
def get_execution_history(client, input, options \\ []) do
request(client, "GetExecutionHistory", input, options)
end
@doc """
Lists the existing activities.
If `nextToken` is returned, there are more results available. The value of
`nextToken` is a unique pagination token for each page. Make the call again
using the returned token to retrieve the next page. Keep all other
arguments unchanged. Each pagination token expires after 24 hours. Using an
expired pagination token will return an *HTTP 400 InvalidToken* error.
<note> This operation is eventually consistent. The results are best effort
and may not reflect very recent updates and changes.
</note>
"""
def list_activities(client, input, options \\ []) do
request(client, "ListActivities", input, options)
end
@doc """
Lists the executions of a state machine that meet the filtering criteria.
Results are sorted by time, with the most recent execution first.
If `nextToken` is returned, there are more results available. The value of
`nextToken` is a unique pagination token for each page. Make the call again
using the returned token to retrieve the next page. Keep all other
arguments unchanged. Each pagination token expires after 24 hours. Using an
expired pagination token will return an *HTTP 400 InvalidToken* error.
<note> This operation is eventually consistent. The results are best effort
and may not reflect very recent updates and changes.
</note> This API action is not supported by `EXPRESS` state machines.
"""
def list_executions(client, input, options \\ []) do
request(client, "ListExecutions", input, options)
end
@doc """
Lists the existing state machines.
If `nextToken` is returned, there are more results available. The value of
`nextToken` is a unique pagination token for each page. Make the call again
using the returned token to retrieve the next page. Keep all other
arguments unchanged. Each pagination token expires after 24 hours. Using an
expired pagination token will return an *HTTP 400 InvalidToken* error.
<note> This operation is eventually consistent. The results are best effort
and may not reflect very recent updates and changes.
</note>
"""
def list_state_machines(client, input, options \\ []) do
request(client, "ListStateMachines", input, options)
end
@doc """
List tags for a given resource.
Tags may only contain Unicode letters, digits, white space, or these
symbols: `_ . : / = + - @`.
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
Used by activity workers and task states using the
[callback](https://docs.aws.amazon.com/step-functions/latest/dg/connect-to-resource.html#connect-wait-token)
pattern to report that the task identified by the `taskToken` failed.
"""
def send_task_failure(client, input, options \\ []) do
request(client, "SendTaskFailure", input, options)
end
@doc """
Used by activity workers and task states using the
[callback](https://docs.aws.amazon.com/step-functions/latest/dg/connect-to-resource.html#connect-wait-token)
pattern to report to Step Functions that the task represented by the
specified `taskToken` is still making progress. This action resets the
`Heartbeat` clock. The `Heartbeat` threshold is specified in the state
machine's Amazon States Language definition (`HeartbeatSeconds`). This
action does not in itself create an event in the execution history.
However, if the task times out, the execution history contains an
`ActivityTimedOut` entry for activities, or a `TaskTimedOut` entry for for
tasks using the [job
run](https://docs.aws.amazon.com/step-functions/latest/dg/connect-to-resource.html#connect-sync)
or
[callback](https://docs.aws.amazon.com/step-functions/latest/dg/connect-to-resource.html#connect-wait-token)
pattern.
<note> The `Timeout` of a task, defined in the state machine's Amazon
States Language definition, is its maximum allowed duration, regardless of
the number of `SendTaskHeartbeat` requests received. Use `HeartbeatSeconds`
to configure the timeout interval for heartbeats.
</note>
"""
def send_task_heartbeat(client, input, options \\ []) do
request(client, "SendTaskHeartbeat", input, options)
end
@doc """
Used by activity workers and task states using the
[callback](https://docs.aws.amazon.com/step-functions/latest/dg/connect-to-resource.html#connect-wait-token)
pattern to report that the task identified by the `taskToken` completed
successfully.
"""
def send_task_success(client, input, options \\ []) do
request(client, "SendTaskSuccess", input, options)
end
@doc """
Starts a state machine execution.
<note> `StartExecution` is idempotent. If `StartExecution` is called with
the same name and input as a running execution, the call will succeed and
return the same response as the original request. If the execution is
closed or if the input is different, it will return a 400
`ExecutionAlreadyExists` error. Names can be reused after 90 days.
</note>
"""
def start_execution(client, input, options \\ []) do
request(client, "StartExecution", input, options)
end
@doc """
Stops an execution.
This API action is not supported by `EXPRESS` state machines.
"""
def stop_execution(client, input, options \\ []) do
request(client, "StopExecution", input, options)
end
@doc """
Add a tag to a Step Functions resource.
An array of key-value pairs. For more information, see [Using Cost
Allocation
Tags](https://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/cost-alloc-tags.html)
in the *AWS Billing and Cost Management User Guide*, and [Controlling
Access Using IAM
Tags](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_iam-tags.html).
Tags may only contain Unicode letters, digits, white space, or these
symbols: `_ . : / = + - @`.
"""
def tag_resource(client, input, options \\ []) do
request(client, "TagResource", input, options)
end
@doc """
Remove a tag from a Step Functions resource
"""
def untag_resource(client, input, options \\ []) do
request(client, "UntagResource", input, options)
end
@doc """
Updates an existing state machine by modifying its `definition`, `roleArn`,
or `loggingConfiguration`. Running executions will continue to use the
previous `definition` and `roleArn`. You must include at least one of
`definition` or `roleArn` or you will receive a `MissingRequiredParameter`
error.
<note> All `StartExecution` calls within a few seconds will use the updated
`definition` and `roleArn`. Executions started immediately after calling
`UpdateStateMachine` may use the previous state machine `definition` and
`roleArn`.
</note>
"""
def update_state_machine(client, input, options \\ []) do
request(client, "UpdateStateMachine", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, Poison.Parser.t() | nil, Poison.Response.t()}
| {:error, Poison.Parser.t()}
| {:error, HTTPoison.Error.t()}
defp request(client, action, input, options) do
client = %{client | service: "states"}
host = build_host("states", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.0"},
{"X-Amz-Target", "AWSStepFunctions.#{action}"}
]
payload = Poison.Encoder.encode(input, %{})
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, %HTTPoison.Response{status_code: 200, body: ""} = response} ->
{:ok, nil, response}
{:ok, %HTTPoison.Response{status_code: 200, body: body} = response} ->
{:ok, Poison.Parser.parse!(body, %{}), response}
{:ok, %HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body, %{})
{:error, error}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
|
lib/aws/sfn.ex
| 0.919967
| 0.820577
|
sfn.ex
|
starcoder
|
defmodule RDF do
@moduledoc """
The top-level module of RDF.ex.
RDF.ex consists of:
- modules for the nodes of an RDF graph
- `RDF.Term`
- `RDF.IRI`
- `RDF.BlankNode`
- `RDF.Literal`
- the `RDF.Literal.Datatype` system
- a facility for the mapping of URIs of a vocabulary to Elixir modules and
functions: `RDF.Vocabulary.Namespace`
- modules for the construction of statements
- `RDF.Triple`
- `RDF.Quad`
- `RDF.Statement`
- modules for collections of statements
- `RDF.Description`
- `RDF.Graph`
- `RDF.Dataset`
- `RDF.Data`
- `RDF.List`
- `RDF.Diff`
- functions to construct and execute basic graph pattern queries: `RDF.Query`
- functions for working with RDF serializations: `RDF.Serialization`
- behaviours for the definition of RDF serialization formats
- `RDF.Serialization.Format`
- `RDF.Serialization.Decoder`
- `RDF.Serialization.Encoder`
- and the implementation of various RDF serialization formats
- `RDF.NTriples`
- `RDF.NQuads`
- `RDF.Turtle`
This top-level module provides shortcut functions for the construction of the
basic elements and structures of RDF and some general helper functions.
For a general introduction you may refer to the guides on the [homepage](https://rdf-elixir.dev).
"""
alias RDF.{
IRI,
Namespace,
Literal,
BlankNode,
Triple,
Quad,
Statement,
Description,
Graph,
Dataset,
PrefixMap
}
import RDF.Guards
import RDF.Utils.Bootstrapping
defdelegate default_base_iri(), to: RDF.IRI, as: :default_base
@standard_prefixes PrefixMap.new(
xsd: xsd_iri_base(),
rdf: rdf_iri_base(),
rdfs: rdfs_iri_base()
)
@doc """
A fixed set prefixes that will always be part of the `default_prefixes/0`.
```elixir
#{inspect(@standard_prefixes, pretty: true)}
```
See `default_prefixes/0`, if you don't want these standard prefixes to be part
of the default prefixes.
"""
def standard_prefixes(), do: @standard_prefixes
@doc """
A user-defined `RDF.PrefixMap` of prefixes to IRI namespaces.
This prefix map will be used implicitly wherever a prefix map is expected, but
not provided. For example, when you don't pass a prefix map to the Turtle serializer,
this prefix map will be used.
By default the `standard_prefixes/0` are part of this prefix map, but you can
define additional default prefixes via the `default_prefixes` compile-time
configuration.
For example:
config :rdf,
default_prefixes: %{
ex: "http://example.com/"
}
You can also set `:default_prefixes` to a module-function tuple `{mod, fun}`
with a function which should be called to determine the default prefixes.
If you don't want the `standard_prefixes/0` to be part of the default prefixes,
or you want to map the standard prefixes to different namespaces (strongly discouraged!),
you can set the `use_standard_prefixes` compile-time configuration flag to `false`.
config :rdf,
use_standard_prefixes: false
"""
case Application.get_env(:rdf, :default_prefixes, %{}) do
{mod, fun} ->
if Application.get_env(:rdf, :use_standard_prefixes, true) do
def default_prefixes() do
PrefixMap.merge!(@standard_prefixes, apply(unquote(mod), unquote(fun), []))
end
else
def default_prefixes(), do: apply(unquote(mod), unquote(fun), [])
end
default_prefixes ->
@default_prefixes PrefixMap.new(default_prefixes)
if Application.get_env(:rdf, :use_standard_prefixes, true) do
def default_prefixes() do
PrefixMap.merge!(@standard_prefixes, @default_prefixes)
end
else
def default_prefixes(), do: @default_prefixes
end
end
@doc """
Returns the `default_prefixes/0` with additional prefix mappings.
The `prefix_mappings` can be given in any format accepted by `RDF.PrefixMap.new/1`.
"""
def default_prefixes(prefix_mappings) do
default_prefixes() |> PrefixMap.merge!(prefix_mappings)
end
defdelegate read_string(string, opts), to: RDF.Serialization
defdelegate read_string!(string, opts), to: RDF.Serialization
defdelegate read_stream(stream, opts \\ []), to: RDF.Serialization
defdelegate read_stream!(stream, opts \\ []), to: RDF.Serialization
defdelegate read_file(filename, opts \\ []), to: RDF.Serialization
defdelegate read_file!(filename, opts \\ []), to: RDF.Serialization
defdelegate write_string(data, opts), to: RDF.Serialization
defdelegate write_string!(data, opts), to: RDF.Serialization
defdelegate write_stream(data, opts), to: RDF.Serialization
defdelegate write_file(data, filename, opts \\ []), to: RDF.Serialization
defdelegate write_file!(data, filename, opts \\ []), to: RDF.Serialization
@doc """
Checks if the given value is a RDF resource.
## Examples
Supposed `EX` is a `RDF.Vocabulary.Namespace` and `Foo` is not.
iex> RDF.resource?(RDF.iri("http://example.com/resource"))
true
iex> RDF.resource?(EX.resource)
true
iex> RDF.resource?(EX.Resource)
true
iex> RDF.resource?(Foo.Resource)
false
iex> RDF.resource?(RDF.bnode)
true
iex> RDF.resource?(RDF.XSD.integer(42))
false
iex> RDF.resource?(42)
false
"""
def resource?(value)
def resource?(%IRI{}), do: true
def resource?(%BlankNode{}), do: true
def resource?(qname) when maybe_ns_term(qname) do
case Namespace.resolve_term(qname) do
{:ok, iri} -> resource?(iri)
_ -> false
end
end
def resource?(_), do: false
@doc """
Checks if the given value is a RDF term.
## Examples
Supposed `EX` is a `RDF.Vocabulary.Namespace` and `Foo` is not.
iex> RDF.term?(RDF.iri("http://example.com/resource"))
true
iex> RDF.term?(EX.resource)
true
iex> RDF.term?(EX.Resource)
true
iex> RDF.term?(Foo.Resource)
false
iex> RDF.term?(RDF.bnode)
true
iex> RDF.term?(RDF.XSD.integer(42))
true
iex> RDF.term?(42)
false
"""
def term?(value)
def term?(%Literal{}), do: true
def term?(value), do: resource?(value)
defdelegate uri?(value), to: IRI, as: :valid?
defdelegate iri?(value), to: IRI, as: :valid?
defdelegate uri(value), to: IRI, as: :new
defdelegate iri(value), to: IRI, as: :new
defdelegate uri!(value), to: IRI, as: :new!
defdelegate iri!(value), to: IRI, as: :new!
@doc """
Checks if the given value is a blank node.
## Examples
iex> RDF.bnode?(RDF.bnode)
true
iex> RDF.bnode?(RDF.iri("http://example.com/resource"))
false
iex> RDF.bnode?(42)
false
"""
def bnode?(%BlankNode{}), do: true
def bnode?(_), do: false
defdelegate bnode(), to: BlankNode, as: :new
defdelegate bnode(id), to: BlankNode, as: :new
@doc """
Checks if the given value is a RDF literal.
"""
def literal?(%Literal{}), do: true
def literal?(_), do: false
defdelegate literal(value), to: Literal, as: :new
defdelegate literal(value, opts), to: Literal, as: :new
defdelegate triple(s, p, o), to: Triple, as: :new
defdelegate triple(tuple), to: Triple, as: :new
defdelegate quad(s, p, o, g), to: Quad, as: :new
defdelegate quad(tuple), to: Quad, as: :new
defdelegate statement(s, p, o), to: Statement, as: :new
defdelegate statement(s, p, o, g), to: Statement, as: :new
defdelegate statement(tuple), to: Statement, as: :new
defdelegate description(subject, opts \\ []), to: Description, as: :new
defdelegate graph(), to: Graph, as: :new
defdelegate graph(arg), to: Graph, as: :new
defdelegate graph(arg1, arg2), to: Graph, as: :new
defdelegate dataset(), to: Dataset, as: :new
defdelegate dataset(arg), to: Dataset, as: :new
defdelegate dataset(arg1, arg2), to: Dataset, as: :new
defdelegate diff(arg1, arg2), to: RDF.Diff
defdelegate list?(resource, graph), to: RDF.List, as: :node?
defdelegate list?(description), to: RDF.List, as: :node?
def list(native_list), do: RDF.List.from(native_list)
def list(head, %Graph{} = graph), do: RDF.List.new(head, graph)
def list(native_list, opts), do: RDF.List.from(native_list, opts)
defdelegate prefix_map(prefixes), to: RDF.PrefixMap, as: :new
defdelegate property_map(property_map), to: RDF.PropertyMap, as: :new
defdelegate langString(value, opts), to: RDF.LangString, as: :new
defdelegate lang_string(value, opts), to: RDF.LangString, as: :new
for term <- ~w[type subject predicate object first rest value]a do
defdelegate unquote(term)(), to: RDF.NS.RDF
@doc false
defdelegate unquote(term)(s, o), to: RDF.NS.RDF
@doc false
defdelegate unquote(term)(s, o1, o2), to: RDF.NS.RDF
@doc false
defdelegate unquote(term)(s, o1, o2, o3), to: RDF.NS.RDF
@doc false
defdelegate unquote(term)(s, o1, o2, o3, o4), to: RDF.NS.RDF
@doc false
defdelegate unquote(term)(s, o1, o2, o3, o4, o5), to: RDF.NS.RDF
end
defdelegate langString(), to: RDF.NS.RDF
defdelegate lang_string(), to: RDF.NS.RDF, as: :langString
defdelegate unquote(nil)(), to: RDF.NS.RDF
defdelegate __base_iri__(), to: RDF.NS.RDF
end
|
lib/rdf.ex
| 0.897749
| 0.880951
|
rdf.ex
|
starcoder
|
defmodule Bonny.Config do
@moduledoc """
Operator configuration interface
"""
@doc """
Kubernetes API Group of this operator
"""
@spec group() :: binary
def group do
default = "#{project_name()}.example.com"
Application.get_env(:bonny, :group, default)
end
@doc """
The name of the operator.
Name must consist of only lowercase letters and hyphens.
Defaults to hyphenated mix project app name. E.g.: `:hello_operator` becomes `hello-operator`
"""
@spec name() :: binary
def name() do
:bonny
|> Application.get_env(:operator_name, project_name())
|> dns_safe_name
end
@doc """
Kubernetes service account name to run operator as.
*Note:* if a kube config file is provided, this service account will still be created
and assigned to pods, but the *config file auth will be used* when making requests to the Kube API.
Name must consist of only lowercase letters and hyphens.
Defaults to hyphenated mix project app name. E.g.: `:hello_operator` becomes `hello-operator`
"""
@spec service_account() :: binary
def service_account() do
:bonny
|> Application.get_env(:service_account_name, project_name())
|> dns_safe_name
end
defp project_name() do
Mix.Project.config()
|> Keyword.fetch!(:app)
|> Atom.to_string()
|> String.replace("_", "-")
end
defp dns_safe_name(str) do
str
|> String.downcase()
|> String.replace(~r/[^a-z-]/, "-\\1\\g{1}")
end
@doc """
Labels to apply to all operator resources.
*Note:* These are only applied to the resoures that compose the operator itself,
not the resources created by the operator.
This can be set in config.exs:
```
config :bonny, labels: %{foo: "bar", quz: "baz"}
```
"""
@spec labels() :: map()
def labels() do
Application.get_env(:bonny, :labels, %{})
end
@doc """
List of all controller modules to watch.
This *must* be set in config.exs:
```
config :bonny, controllers: [MyController1, MyController2]
```
"""
@spec controllers() :: list(atom)
def controllers() do
Application.get_env(:bonny, :controllers, [])
end
@doc """
The namespace to watch for `Namespaced` CRDs.
Defaults to `default`
This can be set via environment variable:
```shell
BONNY_POD_NAMESPACE=prod
iex -S mix
```
Bonny sets `BONNY_POD_NAMESPACE` on all Kubernetes deployments to the namespace the operator is deployed in.
"""
@spec namespace() :: binary
def namespace() do
System.get_env("BONNY_POD_NAMESPACE") || "default"
end
@doc """
`K8s.Cluster` name used for this operator. Defaults to `:default`
"""
@spec cluster_name() :: atom
def cluster_name() do
Application.get_env(:bonny, :cluster_name, :default)
end
end
|
lib/bonny/config.ex
| 0.843348
| 0.470676
|
config.ex
|
starcoder
|
import Kojin.Id
defmodule Kojin.Pod.PodType do
@moduledoc """
A set of functions for defining _Plain Old Data_ types (i.e. POD Types).
"""
alias Kojin.Pod.PodType
use TypedStruct
@typedoc """
A `Kojin.Pod.PodType` defines a type that can be used to type fields
in objects and arrays in a schema.
- `id`: The identifier for the _POD_ type.
- `doc`: Documentation for the type
- `variable_size?`: Boolean indicating if type is _fixed size_, for purposes
of serialization
- `item_type`: Used only for array types to refer to the type of items in
the array.
"""
typedstruct enforce: true do
field(:id, atom)
field(:doc, binary)
field(:variable_size?, boolean)
field(:package, binary | nil)
end
@doc """
Creates `Kojin.Pod.PodType` from `id`, `doc` and
`options`.
- `variable_size?` Annotation indicating the object is not fixed size
- `item_type` For arrays, the type of items in the array
## Examples
iex> t = Kojin.Pod.PodType.pod_type(:number, "A number")
...> (%Kojin.Pod.PodType{id: :number, doc: "A number"} = t) && :match
:match
Id must be snake case
iex> _t = Kojin.Pod.PodType.pod_type(:SomeNumber, "A number")
** (RuntimeError) PodType id `SomeNumber` must be snake case.
"""
def pod_type(id, doc, opts \\ []) when is_atom(id) and is_binary(doc) do
if !is_snake(id), do: raise("PodType id `#{id}` must be snake case.")
defaults = [variable_size?: false, package: nil]
opts = Keyword.merge(defaults, opts)
%PodType{
id: id,
doc: doc,
variable_size?: opts[:variable_size?],
package: opts[:package]
}
end
end
defmodule Kojin.Pod.PodTypeRef do
@moduledoc """
Models a reference to a non-standard `Kojin.Pod.PodType` defined
in a `Kojin.Pod.PodPackage`. The purpose is to decouple the definition
of the type from its identity. Types are identified by `dot` qualified
names:
# Examples
- `"package.subpackage.user_defined_type"` Refers to type `:user_defined_type` in
package `[ :package, :subpackage ]`
- `"root.user_defined_type"` Refers to type `:user_defined_type` in package
`[ :root ]`
- `:user_defined_type` Refers to type `:user_defined_type` in the _empty package_ `[]`,
where _empty package_ implies current package.
"""
use TypedStruct
alias Kojin.Pod.PodTypeRef
typedstruct enforce: true do
field(:type_id, atom)
field(:type_path, list(atom))
end
@doc """
Create a `Kojin.Pod.PodTypeRef` from a *snake case* `dot` qualified name.
## Examples
iex> alias Kojin.Pod.PodTypeRef
...> PodTypeRef.pod_type_ref("root.grandparent.parent.child_type")
alias Kojin.Pod.PodTypeRef
%PodTypeRef{
type_id: :child_type,
type_path: [ :root, :grandparent, :parent ]
}
iex> alias Kojin.Pod.PodTypeRef
...> PodTypeRef.pod_type_ref(:some_type)
alias Kojin.Pod.PodTypeRef
%PodTypeRef{
type_id: :some_type,
type_path: []
}
"""
def pod_type_ref(qualified_name) when is_binary(qualified_name) do
parts = String.split(qualified_name, ".")
if(Enum.any?(parts, fn part -> !is_snake(part) end)) do
raise("PodTypeRef qualified name `#{qualified_name}` must be snake case.")
end
%PodTypeRef{
type_id: String.to_atom(List.last(parts)),
type_path:
Enum.map(Enum.slice(parts, 0, Enum.count(parts) - 1), fn part -> String.to_atom(part) end)
}
end
def pod_type_ref(name) when is_atom(name) do
%PodTypeRef{
type_id: name,
type_path: []
}
end
end
defmodule Kojin.Pod.PodTypes do
@moduledoc """
Provides a set of predefined types.
"""
import Kojin.Pod.PodType
import Kojin.Pod.PodTypeRef
@std_types %{
string: pod_type(:string, "One or more characters", variable_size?: true, package: :std),
int64: pod_type(:int64, "64 bit integer", package: :std),
int32: pod_type(:int32, "32 bit integer", package: :std),
int16: pod_type(:int16, "16 bit integer", package: :std),
int8: pod_type(:int8, "64 bit integer", package: :std),
uint64: pod_type(:uint64, "64 bit unsigned integer", package: :std),
uint32: pod_type(:uint32, "32 bit unsigned integer", package: :std),
uint16: pod_type(:uint16, "16 bit unsigned integer", package: :std),
uint8: pod_type(:uint8, "8 bit unsigned integer", package: :std),
char: pod_type(:char, "Single ASCII character", package: :std),
uchar: pod_type(:uchar, "Single ASCII unsigned character", package: :std),
date: pod_type(:date, "A date", package: :std),
timestamp:
pod_type(:timestamp, "A timestamp that includes both date and time", package: :std),
double: pod_type(:double, "64-bit floating point number", package: :std),
boolean: pod_type(:boolean, "A boolean (true/false) value", package: :std),
uuid: pod_type(:boolean, "A boolean (true/false) value", package: :std)
}
@example_tests @std_types
|> Enum.map(fn {_name, type} ->
"""
iex> Kojin.Pod.PodTypes.pod_type(:#{type.id})
#{inspect(type)}
"""
end)
@doc """
Return the std type identified by the provided atom.
## Examples
#{@example_tests}
}
iex> Kojin.Pod.PodTypes.pod_type(:user_defined_type)
%Kojin.Pod.PodTypeRef{
type_id: :user_defined_type,
type_path: []
}
"""
def pod_type(%Kojin.Pod.PodType{} = pod_type), do: pod_type
def pod_type(%Kojin.Pod.PodTypeRef{} = pod_type_ref), do: pod_type_ref
def pod_type(%Kojin.Pod.PodArray{} = pod_array), do: pod_array
def pod_type(%Kojin.Pod.PodMap{} = pod_map), do: pod_map
def pod_type(:string), do: @std_types.string
def pod_type(:int64), do: @std_types.int64
def pod_type(:int32), do: @std_types.int32
def pod_type(:int16), do: @std_types.int16
def pod_type(:int8), do: @std_types.int8
def pod_type(:uint64), do: @std_types.uint64
def pod_type(:uint32), do: @std_types.uint32
def pod_type(:uint16), do: @std_types.uint16
def pod_type(:uint8), do: @std_types.uint8
def pod_type(:char), do: @std_types.char
def pod_type(:uchar), do: @std_types.uchar
def pod_type(:date), do: @std_types.date
def pod_type(:timestamp), do: @std_types.timestamp
def pod_type(:double), do: @std_types.double
def pod_type(:boolean), do: @std_types.boolean
def pod_type(:uuid), do: @std_types.uuid
def pod_type(t) when is_atom(t), do: pod_type_ref(t)
def pod_type(t) when is_binary(t), do: pod_type_ref(t)
@doc """
Map of std types indexed by atom
"""
def std(), do: @std_types
@doc """
Returns a referred to type (e.g. user defined type)
"""
def ref_type(%Kojin.Pod.PodType{} = _pod_type), do: nil
def ref_type(%Kojin.Pod.PodTypeRef{} = pod_type_ref), do: pod_type_ref
def ref_type(%Kojin.Pod.PodArray{} = pod_array), do: ref_type(pod_array.item_type)
def ref_type(%Kojin.Pod.PodMap{} = pod_map), do: ref_type(pod_map.value_type)
def is_pod_map?(%Kojin.Pod.PodMap{} = _), do: true
def is_pod_map?(_), do: false
def is_pod_array?(%Kojin.Pod.PodArray{} = _), do: true
def is_pod_array?(_), do: false
end
|
lib/kojin/pod/pod_type.ex
| 0.86988
| 0.538437
|
pod_type.ex
|
starcoder
|
defmodule Betazoids.Collector do
@moduledoc """
Betazoids.Collector is a process that collects stats from the Betazoids
messenger group
It uses Betazoids.Facebook to interact with the Facebook Graph API.
Collector consists to taking a short-lived token and extending it to a
long-lived token. This token is then used to periodically fetch data and
results from the Betazoids messenger group
Functions prefixed with `req_http_` are **impure** functions that call out
via HTTP to Facebook. These have been specifically noted so that you
carefully tread around these. Functions to process the responses of these
HTTP requests are "pure" functions, but they still hit the database
"""
use Supervisor
import Ecto.Query
alias Betazoids.CollectorLog
alias Betazoids.Facebook
alias Betazoids.Repo
@betazoids_thread_id "438866379596318"
def start_link do
Supervisor.start_link(__MODULE__, [], [name: Betazoids.Collector])
end
def init([]) do
children = [
worker(Task, [__MODULE__, :collect_thread!, []], [name: Betazoids.Collector.ThreadProcessor])
]
supervise(children, strategy: :one_for_one)
end
@doc """
WARNING: impure function!
Make a request to the betazoids thread and gets the head (latest)
"""
def req_http_betazoids_head! do
path = Facebook.thread(@betazoids_thread_id, graph_explorer_access_token)
case Facebook.get!(path) do
%HTTPoison.Response{status_code: 200, body: body} ->
comments = body.comments.data
paging = body.comments.paging
members = body.to.data
last_updated = body.updated_time
{:ok, %{comments: comments, paging: paging, members: members, last_updated: last_updated}}
%HTTPoison.Response{status_code: 400, body: body} -> {:error, body}
%HTTPoison.Error{reason: reason} -> {:error, reason}
end
end
@doc """
WARNING: impure function!
Make a request to the betazoids at the given url
"""
def req_http_betazoids_next!(next_url) do
path = path_from_url(reauth_url(next_url))
case Facebook.get!(path) do
%HTTPoison.Response{status_code: 200, body: body} ->
if length(body.data) == 0 do
{:ok, %{done: true}}
else
{:ok, %{comments: body.data, paging: body.paging}}
end
%HTTPoison.Response{status_code: 400, body: body} -> {:error, body}
%HTTPoison.Error{reason: reason} -> {:error, reason}
end
end
def save_betazoid_members! do
case req_http_betazoids_head! do
{:ok, %{members: members}} ->
db_members = members |> Enum.map fn(m) -> create_facebook_user(m) end
{:ok, %{members: db_members}}
{:error, message} ->
{:error, message}
end
end
@doc """
This is main task of the Collector. It starts from the head, and collect
each message all the way to the beginning
"""
def collect_thread! do
case last_collector_log do
[] -> {:ok, collector_log} = fetch_head!
[%CollectorLog{done: true}] -> {:ok, collector_log} = fetch_head!
[last_log] -> collector_log = last_log
end
{:ok, collector_log} = fetch_next!(collector_log)
IO.puts "**********************************************************"
IO.puts """
CollectorLog #{collector_log.id}
has fetched #{collector_log.fetch_count} times,
fetching #{collector_log.message_count} message
"""
IO.puts "**********************************************************"
end
def fetch_head! do
{:ok, res} = req_http_betazoids_head!
process_head(res.comments, res.paging.next)
end
def fetch_next!(collector_log, tracer \\ []) do
IO.puts "********************************************"
IO.puts "tracer #{inspect tracer}"
IO.puts "********************************************"
if collector_log.done do
IO.puts """
done fetching #{collector_log.message_count} messages
in #{collector_log.fetch_count} fetches
"""
{:ok, collector_log}
else
{:ok, res} = req_http_betazoids_next!(collector_log.next_url)
case res do
%{done: true} -> process_done(collector_log)
%{comments: comments, paging: paging} ->
{:ok, collector_log} = process_next(collector_log, comments, paging.next)
end
:timer.sleep(1500)
fetch_next!(collector_log, tracer ++ [collector_log.fetch_count])
end
end
def process_head(comments, next_url) do
Repo.transaction fn ->
{:ok, collector_log} = create_collector_log
changeset = CollectorLog.changeset(collector_log, %{
fetch_count: 1,
# message_count: length(comments),
next_url: next_url
})
{:ok, collector_log} = Repo.update(changeset)
process_comments(comments, collector_log)
collector_log
end
end
def process_done(collector_log) do
changeset = CollectorLog.changeset(collector_log, %{done: true})
Repo.update(changeset)
end
def process_next(collector_log, comments, next_url) do
changeset = CollectorLog.changeset(collector_log, %{
fetch_count: collector_log.fetch_count + 1,
# message_count: collector_log.message_count + length(comments),
next_url: next_url
})
IO.puts """
#{collector_log.fetch_count} -
total: #{collector_log.message_count},
add #{length(comments)} comments,
first: #{List.first(comments).created_time}
"""
Repo.transaction fn ->
{:ok, updated} = Repo.update(changeset)
{:ok, updated} = process_comments(comments, updated)
updated
end
end
def process_comments_old(comments, collector_log, next_url) do
cache = betazoids_member_cache
Enum.each comments, fn(c) ->
unless Map.has_key?(c, :message), do: c = Map.put(c, :message, nil)
{:ok, {message, collector_log}} = create_facebook_message(c, collector_log, cache)
end
{:ok, collector_log}
end
def process_comments([], collector_log, _cache) do
{:ok, collector_log}
end
def process_comments(comments, collector_log, cache \\ %{}) do
if cache == %{}, do: cache = betazoids_member_cache
[head|tail] = comments
unless Map.has_key?(head, :message), do: head = Map.put(head, :message, nil)
{:ok, {message, collector_log}} = create_facebook_message(head, collector_log, cache)
process_comments(tail, collector_log, cache)
end
def last_collector_log do
query = from cl in CollectorLog,
order_by: [desc: cl.id],
limit: 1,
select: cl
Repo.all(query)
end
def create_collector_log do
changeset = CollectorLog.changeset(%CollectorLog{}, %{})
Repo.insert(changeset)
end
def create_facebook_user(%{id: id, name: name}) do
changeset = Facebook.User.changeset(%Facebook.User{}, %{
facebook_id: id,
name: name})
case Repo.insert(changeset) do
{:ok, user} ->
IO.puts "YAY created #{user.name}"
{:ok, user}
{:error, changeset} ->
IO.puts "BOO errored"
IO.puts Enum.map(changeset.errors, fn({k,v}) -> "#{k} #{v}" end)
{:error, changeset}
end
end
def create_facebook_message(%{
id: id,
from: from_hash,
message: message,
created_time: created_time},
collector_log,
user_cache \\ %{}) do
user_id = database_id_from_cache(user_cache, from_hash.id)
{:ok, ecto_date} = parse_date(created_time)
changeset = Facebook.Message.changeset(%Facebook.Message{}, %{
facebook_id: id,
user_id: user_id,
text: message,
created_at: ecto_date,
collector_log_id: collector_log.id,
# DETAIL(yu): We increment the fetch count because we don't persist an
# updated fetch count on the CollectorLog until all of the comments for a
# fetch batch have been persisted
collector_log_fetch_count: collector_log.fetch_count + 1
})
after_callback = fn ->
cs = CollectorLog.changeset(collector_log, %{message_count: collector_log.message_count + 1})
{:ok, collector_log} = Repo.update(cs)
collector_log
end
case Idempotence.create(
Repo,
Facebook.Message,
:facebook_id,
changeset,
after_callback: after_callback
) do
# TODO(yu): this is just plain wrong right? We don't want the
# after_callback to execute if it fails to create.
# Let's write a test for this in the CollectorTest
{:ok, %{created: true, model: message, callbacks: callbacks}} ->
{:ok, {message, callbacks.after}}
{:ok, %{created: false, model: message, callbacks: callbacks}} ->
{:ok, {message, collector_log}}
end
end
defp database_id_from_cache(cache, facebook_id) do
case cache[facebook_id] do
%Facebook.User{id: id} -> id
nil ->
query = from u in Facebook.User,
where: u.facebook_id == ^facebook_id,
select: u
case Repo.all(query) do
[] -> raise "No user found for #{facebook_id}, shouldn't happen"
[%Facebook.User{id: id}] -> id
end
end
end
def betazoids_member_cache do
query = from u in Facebook.User,
select: u
Repo.all(query)
|> Enum.reduce %{}, fn(u, cache) -> Map.put(cache, u.facebook_id, u) end
end
def path_from_url(url) do
String.slice(url, 31..-1)
end
def parse_date(raw_date) do
{:ok, timex_date} = Timex.DateFormat.parse(raw_date, "{ISO}")
{:ok, ecto_raw_date} = Timex.DateFormat.format(timex_date, "{ISOz}")
Ecto.DateTime.cast(ecto_raw_date)
end
def graph_explorer_access_token do
query = from fat in Facebook.AccessToken,
order_by: [desc: fat.id],
limit: 1,
select: fat
[%Facebook.AccessToken{token: token}] = Repo.all(query)
token
end
def reauth_url(next_url) do
[base_url, query_params] = String.split(next_url, "?")
base_url <> "?" <> (query_params
|> String.split("&")
|> Enum.map_join("&", fn(params) ->
case params do
"access_token=" <> _ -> "access_token=#{graph_explorer_access_token}"
anything -> anything
end
end))
end
end
|
lib/betazoids/collector.ex
| 0.773174
| 0.525673
|
collector.ex
|
starcoder
|
defmodule Square.Customers do
@moduledoc """
Documentation for `Square.Customers`.
"""
@doc """
Lists a business's customers.
```
def list_customers(client, [cursor: nil, sort_field: nil, sort_order: nil])
```
### Parameters
| Parameter | Type | Tags | Description |
| --- | --- | --- | --- |
| `cursor` | `String` | Query, Optional | A pagination cursor returned by a previous call to this endpoint.<br>Provide this to retrieve the next set of results for your original query.<br><br>See the [Pagination guide](https://developer.squareup.com/docs/working-with-apis/pagination) for more information. |
| `sort_field` | [`String (Customer Sort Field)`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/customer-sort-field.md) | Query, Optional | Indicates how Customers should be sorted.<br><br>Default: `DEFAULT`. |
| `sort_order` | [`String (Sort Order)`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/sort-order.md) | Query, Optional | Indicates whether Customers should be sorted in ascending (`ASC`) or<br>descending (`DESC`) order.<br><br>Default: `ASC`. |
### Response Type
[`List Customers Response Map`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/list-customers-response.md)
### Example Usage
iex> Square.client |> Square.Customers.list_customers()
"""
@spec list_customers(Tesla.Client.t(), list) :: {:error, any} | {:ok, Tesla.Env.t()}
def list_customers(client, params \\ []), do: Tesla.get(client, "customers", query: params)
@doc """
Creates a new customer for a business, which can have associated cards on file.
You must provide __at least one__ of the following values in your request to this
endpoint:
- `given_name`
- `family_name`
- `company_name`
- `email_address`
- `phone_number`
```
def create_customer(client, body)
```
### Parameters
| Parameter | Type | Tags | Description |
| --- | --- | --- | --- |
| `body` | [`Create Customer Request Map`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/create-customer-request.md) | Body, Required | A map containing the fields to POST for the request.<br><br>See the corresponding map definition for field details. |
### Response Type
[`Create Customer Response Map`](/doc/models/create-customer-response.md)
### Example Usage
iex> body = %{
given_name: "Amelia",
family_name: "Earhart",
email_address: "<EMAIL>",
address: %{
address_line_1: "500 Electric Ave.",
address_line_2: "Suite 600",
locality: "New York",
administrative_district_level_1: "NY",
postal_code: "10003",
country: "US"
},
phone_number: "1-212-555-4240",
reference_id: "Your reference ID",
note: "a customers"
}
iex> Square.client |> Square.Customer.create_customer(body)
"""
@spec create_customer(Tesla.Client.t(), map) :: {:error, any} | {:ok, Tesla.Env.t()}
def create_customer(client, body \\ %{}), do: Tesla.post(client, "customers", body)
@doc """
Searches the customer profiles associated with a Square account.
Calling SearchCustomers without an explicit query parameter returns all
customer profiles ordered alphabetically based on `given_name` and `family_name`.
```
def search_customers(client, body)
```
### Parameters
| Parameter | Type | Tags | Description |
| --- | --- | --- | --- |
| `body` | [`Search Customers Request Map`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/search-customers-request.md) | Body, Required | An object containing the fields to POST for the request.<br><br>See the corresponding object definition for field details. |
### Response Type
[`Search Customers Response Map`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/search-customers-response.md)
### Example Usage
iex> body = %{
limit: 2,
query: %{
filter: %{
creation_source: %{
values: ["THIRD_PARTY"],
rule: "INCLUDE"
},
created_at: %{
start_at: "2018-01-01T00:00:00-00:00",
end_at: "2018-02-01T00:00:00-00:00"
},
group_ids: %{
all: ["545AXB44B4XXWMVQ4W8SBT3HHF"]
},
}
},
sort: %{
field: "CREATED_AT",
order: "ASC"
}
}
iex> Square.client |> Square.Customers.search_customers(client, body)
"""
@spec search_customers(Tesla.Client.t(), map) :: {:error, any} | {:ok, Tesla.Env.t()}
def search_customers(client, body \\ %{}), do: Tesla.post(client, "customers/search", body)
@doc """
Deletes a customer from a business, along with any linked cards on file. When two profiles
are merged into a single profile, that profile is assigned a new `customer_id`. You must use the
new `customer_id` to delete merged profiles.
```
def delete_customer(client, customer_id)
```
### Parameters
| Parameter | Type | Tags | Description |
| --- | --- | --- | --- |
| `customer_id` | `String` | Template, Required | The ID of the customer to delete. |
### Response Type
[`Delete Customer Response Map`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/delete-customer-response.md)
### Example Usage
iex> customer_id = "customer_id8"
iex> Square.client |> Square.Customers.delete_customer(customer_id)
"""
@spec delete_customer(Tesla.Client.t(), binary) :: {:error, any} | {:ok, Tesla.Env.t()}
def delete_customer(client, customer_id), do: Tesla.delete(client, "customers/#{customer_id}")
@doc """
Returns details for a single customer.
```
def retrieve_customer(client, customer_id)
```
### Parameters
| Parameter | Type | Tags | Description |
| --- | --- | --- | --- |
| `customer_id` | `String` | Template, Required | The ID of the customer to retrieve. |
### Response Type
[`Retrieve Customer Response Map`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/retrieve-customer-response.md)
### Example Usage
iex> customer_id = "customer_id8"
iex> Square.client |> Square.Customers.retrieve_customer(customer_id)
"""
@spec retrieve_customer(Tesla.Client.t(), binary) ::
{:error, any} | {:ok, Tesla.Env.t()}
def retrieve_customer(client, customer_id), do: Tesla.get(client, "customers/#{customer_id}")
@doc """
Updates the details of an existing customer. When two profiles are merged
into a single profile, that profile is assigned a new `customer_id`. You must use
the new `customer_id` to update merged profiles.
You cannot edit a customer's cards on file with this endpoint. To make changes
to a card on file, you must delete the existing card on file with the
[DeleteCustomerCard](#endpoint-deletecustomercard) endpoint, then create a new one with the
[CreateCustomerCard](#endpoint-createcustomercard) endpoint.
```
def update_customer(client, customer_id, body)
```
### Parameters
| Parameter | Type | Tags | Description |
| --- | --- | --- | --- |
| `customer_id` | `String` | Template, Required | The ID of the customer to update. |
| `body` | [`Update Customer Request Map`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/update-customer-request.md) | Body, Required | An object containing the fields to POST for the request.<br><br>See the corresponding object definition for field details. |
### Response Type
[`Update Customer Response Map`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/update-customer-response.md)
### Example Usage
iex> customer_id = "customer_id8"
iex> body = %{
email_address: "<EMAIL>",
phone_number: "",
note: "updated customer note"
}
iex> Square.client |> Square.Customers.update_customer(customer_id, body)
"""
@spec update_customer(Tesla.Client.t(), binary, map) ::
{:error, any} | {:ok, Tesla.Env.t()}
def update_customer(client, customer_id, body),
do: Tesla.put(client, "customers/#{customer_id}", body)
@doc """
Adds a card on file to an existing customer.
As with charges, calls to `CreateCustomerCard` are idempotent. Multiple
calls with the same card nonce return the same card record that was created
with the provided nonce during the _first_ call.
```
def create_customer_card(client, customer_id, body)
```
### Parameters
| Parameter | Type | Tags | Description |
| --- | --- | --- | --- |
| `customer_id` | `String` | Template, Required | The Square ID of the customer profile the card is linked to. |
| `body` | [`Create Customer Card Request Map`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/create-customer-card-request.md) | Body, Required | An object containing the fields to POST for the request.<br><br>See the corresponding object definition for field details. |
### Response Type
[`Create Customer Card Response Map`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/create-customer-card-response.md)
### Example Usage
iex> customer_id = "customer_id8"
iex> body = %{
card_nonce: "Your card nonce",
billing_address: %{
address_line_1: "500 Electric Ave",
address_line_2: "Suite 600",
locality: "New York",
administrative_district_level_1: "NY",
postal_code: "10003",
country: "US",
cardholder_name: "<NAME>"
}
}
iex> Square.client |> Square.Customers.create_customer_card(customer_id, body)
"""
@spec create_customer_card(Tesla.Client.t(), binary, map) ::
{:error, any} | {:ok, Tesla.Env.t()}
def create_customer_card(client, customer_id, body),
do: Tesla.post(client, "customers/#{customer_id}/cards", body)
@doc """
Removes a card on file from a customer.
```
def delete_customer_card(client, customer_id, card_id)
```
### Parameters
| Parameter | Type | Tags | Description |
| --- | --- | --- | --- |
| `customer_id` | `String` | Template, Required | The ID of the customer that the card on file belongs to. |
| `card_id` | `String` | Template, Required | The ID of the card on file to delete. |
### Response Type
[`Delete Customer Card Response Map`](https://github.com/square/square-ruby-sdk/blob/master/doc/models/delete-customer-card-response.md)
### Example Usage
iex> customer_id = "customer_id8"
iex> card_id = "card_id4"
iex> Square.client |> Square.Customers.delete_customer_card(customer_id, card_id)
"""
@spec delete_customer_card(Tesla.Client.t(), binary, binary) ::
{:error, any} | {:ok, Tesla.Env.t()}
def delete_customer_card(client, customer_id, card_id),
do: Tesla.delete(client, "customers/#{customer_id}/cards/#{card_id}")
@doc """
Adds a group membership to a customer.
The customer is identified by the `customer_id` value
and the customer group is identified by the `group_id` value.
```
def add_group_to_customer(client, customer_id, group_id)
```
### Parameters
| Parameter | Type | Tags | Description |
| --- | --- | --- | --- |
| `customer_id` | `String` | Template, Required | The ID of the customer to add to a group. |
| `group_id` | `String` | Template, Required | The ID of the customer group to add the customer to. |
### Response Type
[`Add Group to Customer Response Map`](https://github.com/square/square-ruby-sdk/blob/maste/doc/models/add-group-to-customer-response.md)
### Example Usage
iex> customer_id = "customer_id8"
iex> group_id = "group_id0"
iex> Square.client |> Square.Customers.add_group_to_customer(customer_id, group_id)
"""
@spec add_group_to_customer(Tesla.Client.t(), binary, binary) ::
{:error, any} | {:ok, Tesla.Env.t()}
def add_group_to_customer(client, customer_id, group_id),
do: Tesla.put(client, "customers/#{customer_id}/groups/#{group_id}", %{})
@doc """
Removes a group membership from a customer.
The customer is identified by the `customer_id` value
and the customer group is identified by the `group_id` value.
```
def remove_group_from_customer(client, customer_id, group_id)
```
### Parameters
| Parameter | Type | Tags | Description |
| --- | --- | --- | --- |
| `customer_id` | `String` | Template, Required | The ID of the customer to remove from the group. |
| `group_id` | `String` | Template, Required | The ID of the customer group to remove the customer from. |
### Response Type
[`Remove Group From Customer Response Map`](https://github.com/square/square-ruby-sdk/blob/maste/doc/models/remove-group-from-customer-response.md)
### Example Usage
iex> customer_id = "customer_id8"
iex> group_id = "group_id0"
iex> Square.client |> Square.Customers.remove_group_from_customer(customer_id, group_id)
"""
@spec remove_group_from_customer(Tesla.Client.t(), binary, binary) ::
{:error, any} | {:ok, Tesla.Env.t()}
def remove_group_from_customer(client, customer_id, group_id),
do: Tesla.delete(client, "customers/#{customer_id}/groups/#{group_id}")
end
|
lib/api/customers_api.ex
| 0.936146
| 0.791338
|
customers_api.ex
|
starcoder
|
defmodule ExDoc.Language do
@moduledoc false
@typep spec_ast() :: term()
@typedoc """
The map has the following keys:
* `:module` - the module
* `:docs` - the docs chunk
* `:language` - the language callback
* `:id` - module page name
* `:title` - module display title
* `:type` - module type
* `:line` - the line where the code is located
* `:callback_types` - a list of types that are considered callbacks
* `:nesting_info` - a `{nested_title, nested_context}` tuple or `nil`.
For example, `"A.B.C"` becomes `{"C", "A.B"}`.
* `:private` - a map with language-specific data
"""
@type module_data() :: %{
module: module(),
docs: tuple(),
language: module(),
id: String.t(),
title: String.t(),
type: atom() | nil,
line: non_neg_integer(),
callback_types: [atom()],
nesting_info: {String.t(), String.t()} | nil,
private: map()
}
@doc """
Returns a map with module information.
"""
@callback module_data(module(), tuple(), ExDoc.Config.t()) :: module_data() | :skip
@doc """
Returns a map with function information or an atom `:skip`.
The map has the following keys:
* `:line` - the line where the code is located
* `:specs` - a list of specs that will be later formatted by `c:typespec/2`
* `:doc_fallback` - if set, a 0-arity function that returns DocAST which
will be used as fallback to empty docs on the function node
* `:extra_annotations` - additional annotations
"""
@callback function_data(entry :: tuple(), module_data()) ::
%{
line: non_neg_integer() | nil,
specs: [spec_ast()],
doc_fallback: (() -> ExDoc.DocAST.t()) | nil,
extra_annotations: [String.t()]
}
| :skip
@doc """
Returns a map with callback information.
The map has the following keys:
* `:line` - the line where the code is located
* `:signature` - the signature
* `:specs` - a list of specs that will be later formatted by `c:typespec/2`
* `:extra_annotations` - additional annotations
"""
@callback callback_data(entry :: tuple(), module_data()) ::
%{
line: non_neg_integer() | nil,
signature: [binary()],
specs: [spec_ast()],
extra_annotations: [String.t()]
}
@doc """
Returns a map with type information.
The map has the following keys:
* `:type` - `:type` or `:opaque`
* `:line` - the line where the code is located
* `:signature` - the signature
* `:spec` - a spec that will be later formatted by `c:typespec/2`
"""
@callback type_data(entry :: tuple(), spec :: term()) ::
%{
type: :type | :opaque,
line: non_neg_integer(),
signature: [binary()],
spec: spec_ast()
}
@doc """
Autolinks docs.
"""
@callback autolink_doc(doc :: ExDoc.DocAST.t(), opts :: keyword()) :: ExDoc.DocAST.t()
@doc """
Autolinks typespecs.
"""
@callback autolink_spec(spec :: term(), opts :: keyword()) :: iodata()
@doc """
Returns information for syntax highlighting.
"""
@callback highlight_info() :: %{
language_name: String.t(),
lexer: module(),
opts: keyword()
}
def get(:elixir, _module), do: {:ok, ExDoc.Language.Elixir}
def get(:erlang, _module), do: {:ok, ExDoc.Language.Erlang}
def get(language, module) when is_atom(language) and is_atom(module) do
IO.warn(
"skipping module #{module}, reason: unsupported language (#{language})",
[]
)
:error
end
end
|
lib/ex_doc/language.ex
| 0.885455
| 0.688141
|
language.ex
|
starcoder
|
defmodule Jeeves.Service do
@moduledoc """
Implement a service consisting of a pool of workers, all running in
their own application.
### Prerequisites
You'll need to add poolboy to your project dependencies.
### Usage
To create the service:
* Create a module that implements the API you want. This API will be
expressed as a set of public functions. Each function will automatically
receive the current state in a variable (by default named `state`). There is
no need to declare this as a parameter.[<small>why?</small>](#why-magic-state).
If a function wants to change the state, it must end with a call to the
`Jeeves.Common.update_state/2` function (which will have been
imported into your module automatically).
* Add the line `use Jeeves.Service` to the top of this module.
### Options
You can pass a keyword list to `use Jeeves.Service:`
* `state_name:` _atom_
The default name for the state variable is (unimaginatively) `state`.
Use `state_name` to override this. For example, the previous
example named the state `options`, and inside the `recognize` function
your could write `options.algorithm` to look up the algorithm to use.
* `pool: [ ` _options_ ` ]`
Set options for the service pool. One or more of:
* `min: n`
The minimum number of workers that should be active, and by extension
the number of workers started when the pool is run. Default is 2.
* `max: n`
The maximum number of workers. If all workers are busy and a new request
arrives, a new worker will be started to handle it if the current worker
count is less than `max`. Excess idle workers will be quietly killed off
in the background. Default value is `(min+1)*2`.
* `showcode:` _boolean_
If truthy, dump a representation of the generated code to STDOUT during
compilation.
* `timeout:` integer or float
Specify the timeout to be used when the client calls workers in the pool.
If all workers are busy, and none becomes free in that time, an OTP
exception is raised. An integer specifies the timeout in milliseconds, and
a float in seconds (so 1.5 is the same as 1500).
## Consuming the Service
Each service runs in an independent application. These applications
are referenced by the main application.
The main application lists the services it uses in its `mix.exs`
file.
«todo: finish this»
### State
Each worker has independent state. This state is initialized in two stages.
First, the main application maintains a list of services it uses in its
`mix.exs` file:
@services [
prime_factors: [
args: [ max_calc_time: 10_000 ]
]
]
When the main application starts, it starts each service application in turn.
As each starts, it passes the arguments in the `args` list to the function
`setup_worker_state` in the service. This function does what is required to
create a state that can be passed to each worker when it is started.
For example, our PrimeFactors service might want to maintain a cache
of previously calculated results, shared between all the workers. It
could dothis by creating an agent in the
`setup_worker_state`function and adding its pid to the state it
returns.
def setup_worker_state(initial_state) do
{ :ok, pid } = Agent.start_link(fn -> %{} end)
initial_state
|> Enum.info(%{ cache: pid })
end
Each worker would be able to access that agent via the state it
receives:
def factor(n) do
# yes, two workers could calculate the same value in parallel... :)
case Agent.get(state.cache, fn map -> map[n] end) do
nil ->
result = complex_calculation(n)
Agent.update(state.cache, fn map -> Map.put(map, n, result) end)
result
result ->
result
end
end
"""
alias Jeeves.Util.PreprocessorState, as: PS
@doc false
defmacro __using__(opts \\ []) do
generate_application_service(__CALLER__.module, opts)
end
@doc false
def generate_application_service(caller, opts) do
name = Keyword.get(opts, :service_name, nil)
state = Keyword.get(opts, :state, :no_state)
PS.start_link(caller, opts)
quote do
import Kernel, except: [ def: 2 ]
import Jeeves.Common, only: [ def: 2, set_state: 1, set_state: 2 ]
use Application
@before_compile { unquote(__MODULE__), :generate_code }
@name unquote(name) || Module.concat( __MODULE__, PoolSupervisor)
def start(_, _) do
{ :ok, self() }
end
def run() do
run(unquote(state))
end
def run(state) do
Jeeves.Scheduler.start_new_pool(worker_module: __MODULE__.Worker,
pool_opts: unquote(opts[:pool] || [ min: 1, max: 4]),
name: @name,
state: setup_worker_state(state))
end
def setup_worker_state(initial_state), do: initial_state
defoverridable setup_worker_state: 1
end
|> Jeeves.Common.maybe_show_generated_code(opts)
end
@doc false
defmacro generate_code(_) do
{ options, apis, handlers, implementations, delegators } =
Jeeves.Common.create_functions_from_originals(__CALLER__.module, __MODULE__)
PS.stop(__CALLER__.module)
quote do
unquote_splicing(delegators)
defmodule Worker do
use GenServer
def start_link(args) do
GenServer.start_link(__MODULE__, args)
end
unquote_splicing(apis)
unquote_splicing(handlers)
defmodule Implementation do
unquote_splicing(implementations)
end
end
end
|> Jeeves.Common.maybe_show_generated_code(options)
end
@doc false
defdelegate generate_api_call(options,function), to: Jeeves.Named
@doc false
defdelegate generate_handle_call(options,function), to: Jeeves.Named
@doc false
defdelegate generate_implementation(options,function), to: Jeeves.Named
@doc false
def generate_delegator(options, {call, _body}) do
quote do
def unquote(call), do: unquote(delegate_body(options, call))
end
end
@doc false
def delegate_body(options, call) do
timeout = options[:timeout] || 5000
request = Jeeves.Named.call_signature(call)
quote do
Jeeves.Scheduler.run(@name, unquote(request), unquote(timeout))
end
end
end
|
lib/jeeves/service.ex
| 0.826397
| 0.736614
|
service.ex
|
starcoder
|
defmodule Ash.Query.Operator.LessThan do
@moduledoc """
left < right
Does not simplify, but is used as the simplification value for
`Ash.Query.Operator.LessThanOrEqual`, `Ash.Query.Operator.GreaterThan` and
`Ash.Query.Operator.GreaterThanOrEqual`.
When comparing predicates, it is mutually exclusive with `Ash.Query.Operator.IsNil`.
Additionally, it compares as mutually inclusive with any `Ash.Query.Operator.Eq` and
any `Ash.Query.Operator.LessThan` who's right sides are less than it, and mutually
exclusive with any `Ash.Query.Operator.Eq` or `Ash.Query.Operator.GreaterThan` who's
right side's are greater than or equal to it.
"""
use Ash.Query.Operator, operator: :<, predicate?: true
alias Ash.Query.Operator.{Eq, IsNil}
def new(%Ref{attribute: %{type: type}} = left, right) do
case Ash.Type.cast_input(type, right) do
{:ok, casted} -> {:ok, left, casted}
:error -> {:ok, left, right}
end
end
def new(left, right) do
{:known, left < right}
end
def evaluate(%{left: left, right: right}) do
left < right
end
def bulk_compare(all_predicates) do
all_predicates
|> Enum.group_by(& &1.left)
|> Enum.flat_map(fn {_, all_predicates} ->
predicates =
all_predicates
|> Enum.filter(&(&1.__struct__ in [__MODULE__, Eq]))
|> Enum.sort_by(& &1.right)
nil_exclusive(all_predicates) ++
inclusive_values(predicates) ++ exclusive_values(predicates)
end)
end
defp inclusive_values(sorted_predicates, acc \\ [])
defp inclusive_values([], acc), do: acc
defp inclusive_values([%Eq{} = first | rest], acc) do
rest
|> Enum.reject(&(&1.right == first.right))
|> Enum.filter(&(&1.__struct__ == __MODULE__))
|> case do
[] ->
inclusive_values(rest, acc)
other ->
new_acc =
other
|> Enum.map(&Ash.SatSolver.left_implies_right(first, &1))
|> Kernel.++(acc)
inclusive_values(rest, new_acc)
end
end
defp inclusive_values([%__MODULE__{} = first | rest], acc) do
rest
|> Enum.reject(&(&1.right == first.right))
|> Enum.filter(&(&1.__struct__ == Eq))
|> case do
[] ->
inclusive_values(rest, acc)
other ->
new_acc =
other
|> Enum.map(&Ash.SatSolver.right_implies_left(first, &1))
|> Kernel.++(acc)
inclusive_values(rest, new_acc)
end
end
defp exclusive_values(sorted_predicates, acc \\ [])
defp exclusive_values([], acc), do: acc
defp exclusive_values([%__MODULE__{} = first | rest], acc) do
case Enum.filter(rest, &(&1.__struct__ == Eq)) do
[] ->
exclusive_values(rest, acc)
other ->
new_acc =
other
|> Enum.map(&Ash.SatSolver.left_excludes_right(first, &1))
|> Kernel.++(acc)
exclusive_values(rest, new_acc)
end
end
defp exclusive_values([_ | rest], acc) do
exclusive_values(rest, acc)
end
defp nil_exclusive(predicates) do
is_nils = Enum.filter(predicates, &(&1.__struct__ == IsNil))
case is_nils do
[] ->
[]
is_nils ->
predicates
|> Enum.filter(&(&1.__struct__ == __MODULE__))
|> Enum.flat_map(fn lt ->
Ash.SatSolver.mutually_exclusive([lt | is_nils])
end)
end
end
end
|
lib/ash/query/operator/less_than.ex
| 0.871235
| 0.661371
|
less_than.ex
|
starcoder
|
defmodule Timex.Ecto.DateTimeWithTimezone.Query do
@moduledoc """
Timex.Ecto.DateTimeWithTimezone ist ein zusammengesetzter Datentyp `{dt:datetimetz, tz:timezone}`. Beim Vergleich
von diesen Datentypen muss man immer den `dt`-Teil dieses Datentyps vergleichen, ansonsten wird nur die String
Repräsentation verglichen.
"""
@doc """
Compares 2 Datetimes with time zone using `=` operator.
"""
defmacro datetime_equal(left, right)
# left and right are both pinned variables
defmacro datetime_equal({:^, _, _} = left, {:^, _, _} = right) do
quote do
fragment(
"(?).dt = (?).dt",
type(unquote(left), Timex.Ecto.DateTimeWithTimezone),
type(unquote(right), Timex.Ecto.DateTimeWithTimezone)
)
end
end
# left is a pinned variable, right is a field in the database
defmacro datetime_equal({:^, _, _} = left, right) do
quote do
fragment(
"(?).dt = (?::datetimetz).dt",
type(unquote(left), Timex.Ecto.DateTimeWithTimezone),
unquote(right)
)
end
end
# left is a field in the database, right is a pinned variable
defmacro datetime_equal(left, {:^, _, _} = right) do
quote do
fragment(
"(?::datetimetz).dt = (?).dt",
unquote(left),
type(unquote(right), Timex.Ecto.DateTimeWithTimezone)
)
end
end
# both fields are database variables
defmacro datetime_equal(left, right) do
quote do
fragment(
"(?::datetimetz).dt = (?::datetimetz).dt",
unquote(left),
unquote(right)
)
end
end
@doc """
Compares 2 Datetimes with time zone using `>` operator.
"""
defmacro datetime_greater_than(left, right)
# left and right are both pinned variables
defmacro datetime_greater_than({:^, _, _} = left, {:^, _, _} = right) do
quote do
fragment(
"(?).dt > (?).dt",
type(unquote(left), Timex.Ecto.DateTimeWithTimezone),
type(unquote(right), Timex.Ecto.DateTimeWithTimezone)
)
end
end
# left is a pinned variable, right is a field in the database
defmacro datetime_greater_than({:^, _, _} = left, right) do
quote do
fragment(
"(?).dt > (?::datetimetz).dt",
type(unquote(left), Timex.Ecto.DateTimeWithTimezone),
unquote(right)
)
end
end
# left is a field in the database, right is a pinned variable
defmacro datetime_greater_than(left, {:^, _, _} = right) do
quote do
fragment(
"(?::datetimetz).dt > (?).dt",
unquote(left),
type(unquote(right), Timex.Ecto.DateTimeWithTimezone)
)
end
end
# both fields are database variables
defmacro datetime_greater_than(left, right) do
quote do
fragment(
"(?::datetimetz).dt > (?::datetimetz).dt",
unquote(left),
unquote(right)
)
end
end
@doc """
Compares 2 Datetimes with time zone using `>=` operator.
"""
defmacro datetime_greater_than_or_equal(left, right)
# left and right are both pinned variables
defmacro datetime_greater_than_or_equal({:^, _, _} = left, {:^, _, _} = right) do
quote do
fragment(
"(?).dt >= (?).dt",
type(unquote(left), Timex.Ecto.DateTimeWithTimezone),
type(unquote(right), Timex.Ecto.DateTimeWithTimezone)
)
end
end
# left is a pinned variable, right is a field in the database
defmacro datetime_greater_than_or_equal({:^, _, _} = left, right) do
quote do
fragment(
"(?).dt >= (?::datetimetz).dt",
type(unquote(left), Timex.Ecto.DateTimeWithTimezone),
unquote(right)
)
end
end
# left is a field in the database, right is a pinned variable
defmacro datetime_greater_than_or_equal(left, {:^, _, _} = right) do
quote do
fragment(
"(?::datetimetz).dt >= (?).dt",
unquote(left),
type(unquote(right), Timex.Ecto.DateTimeWithTimezone)
)
end
end
# both fields are database fields
defmacro datetime_greater_than_or_equal(left, right) do
quote do
fragment(
"(?::datetimetz).dt >= (?::datetimetz).dt",
unquote(left),
unquote(right)
)
end
end
@doc """
Compares 2 Datetimes with time zone using `<` operator.
"""
defmacro datetime_less_than(left, right)
# left and right are both pinned variables
defmacro datetime_less_than({:^, _, _} = left, {:^, _, _} = right) do
quote do
fragment(
"(?).dt < (?).dt",
type(unquote(left), Timex.Ecto.DateTimeWithTimezone),
type(unquote(right), Timex.Ecto.DateTimeWithTimezone)
)
end
end
# left is a pinned variable, right is a field in the database
defmacro datetime_less_than({:^, _, _} = left, right) do
quote do
fragment(
"(?).dt < (?::datetimetz).dt",
type(unquote(left), Timex.Ecto.DateTimeWithTimezone),
unquote(right)
)
end
end
# left is a field in the database, right is a pinned variable
defmacro datetime_less_than(left, {:^, _, _} = right) do
quote do
fragment(
"(?::datetimetz).dt < (?).dt",
unquote(left),
type(unquote(right), Timex.Ecto.DateTimeWithTimezone)
)
end
end
# both fields are database variables
defmacro datetime_less_than(left, right) do
quote do
fragment(
"(?::datetimetz).dt < (?::datetimetz).dt",
unquote(left),
unquote(right)
)
end
end
@doc """
Compares 2 Datetimes with time zone using `<=` operator.
"""
defmacro datetime_less_than_or_equal(left, right)
# left and right are both pinned variables
defmacro datetime_less_than_or_equal({:^, _, _} = left, {:^, _, _} = right) do
quote do
fragment(
"(?).dt <= (?).dt",
type(unquote(left), Timex.Ecto.DateTimeWithTimezone),
type(unquote(right), Timex.Ecto.DateTimeWithTimezone)
)
end
end
# left is a pinned variable, right is a field in the database
defmacro datetime_less_than_or_equal({:^, _, _} = left, right) do
quote do
fragment(
"(?).dt <= (?::datetimetz).dt",
type(unquote(left), Timex.Ecto.DateTimeWithTimezone),
unquote(right)
)
end
end
# left is a field in the database, right is a pinned variable
defmacro datetime_less_than_or_equal(left, {:^, _, _} = right) do
quote do
fragment(
"(?::datetimetz).dt <= (?).dt",
unquote(left),
type(unquote(right), Timex.Ecto.DateTimeWithTimezone)
)
end
end
# both fields are database variables
defmacro datetime_less_than_or_equal(left, right) do
quote do
fragment(
"(?::datetimetz).dt <= (?::datetimetz).dt",
unquote(left),
unquote(right)
)
end
end
@doc """
Tests two given intervals for overlap.
math: [interval_start_left, interval_end_left) union [interval_start_right, interval_end_right) != Interval.empty
Dieses Makro brauch dass auch datetime_less_than importiert wurde.
source: https://stackoverflow.com/questions/3269434/whats-the-most-efficient-way-to-test-two-integer-ranges-for-overlap
"""
defmacro datetime_intervals_overlap(
interval_start_left,
interval_end_left,
interval_start_right,
interval_end_right
) do
quote do
fragment(
"(? and ?)",
datetime_less_than(
unquote(interval_start_left),
unquote(interval_end_right)
),
datetime_less_than(
unquote(interval_start_right),
unquote(interval_end_left)
)
)
end
end
@doc """
Casting for Datetimes with time zone, uses the datetime part only.
Useful for `order_by`, for example
"""
defmacro datetime_with_timezone(datetime)
# `datetime` is a database pinned variable
defmacro datetime_with_timezone({:^, _, _} = datetime) do
quote do
fragment(
"(?).dt",
type(unquote(datetime), Timex.Ecto.DateTimeWithTimezone)
)
end
end
# `datetime` is a database variable
defmacro datetime_with_timezone(datetime) do
quote do
fragment("(?).dt::timestamptz", unquote(datetime))
end
end
end
|
lib/timex/ecto/datetimetz_query.ex
| 0.918063
| 0.716842
|
datetimetz_query.ex
|
starcoder
|
defmodule ApiVersioner.SetVersion do
@moduledoc """
`ApiVersioner.SetVersion` is a *plug* module for setting an API version.
The module allows the API version identifier to be stored inside of the
`:assigns` map within the `%Plug.Conn{}` structure.
Identification of a requested API version is done by reading a specified
HTTP request header. If a value inside of the the header corresponds to at
least one of the application's mime types, the extension of the mime type
is considered to be the API version.
For example if the aplication's mimes are set to:
config :mime, :types, %{
"application/vnd.app.v1+json" => [:v1],
"application/vnd.app.v2+json" => [:v2]
}
and the API versioner's configuration is done the following way:
plug ApiVersioner.SetVersion, accepts: [:v1, :v2], header: "accept"
then whenever the *Accept* header is either
`Accept: "application/vnd.app.v1+json"` or
`Accept: "application/vnd.app.v2+json"` the API version will be set
either to `:v1` or `:v2` correspondingly.
It is important to note that the `:header` option can be omitted
when used HTTP header is *Accept*:
# Uses the 'Accept' header by default
plug ApiVersioner.SetVersion, accepts: [:v1, :v2]
Also, as a fallback situation when no version can be determined the
`default` option can come in handy:
# When no version is found API version will be set to default :v1
plug ApiVersioner.SetVersion, accepts: [:v1, :v2], default: :v1
In some cases API version can be set by simply omitting all the options
except for the `:default`. This way the version will just be set to the
default value if it is present among application's mime types.
"""
import Plug.Conn
# Default HTTP header to check
@default_header "accept"
@doc false
def init(opts) do
%{
accepts: opts[:accepts] || [], # Accepted versions
default: opts[:default], # Default version
header: opts[:header] || @default_header # HTTP header to check
}
end
@doc false
def call(conn, opts) do
header_value = get_header(conn, opts.header)
case MIME.extensions(header_value) do
[version | _tail] ->
set_version(conn, version)
[] ->
set_default(conn, opts.default)
end
end
# Returns a value of a given HTTP header
defp get_header(conn, header) do
case get_req_header(conn, header) do
[header_value | _tail] ->
header_value
[] ->
nil
end
end
# Set version to a given value
defp set_version(conn, version) do
assign(conn, :version, version)
end
# Sets version to default if default is valid
defp set_default(conn, default) do
if MIME.has_type?(default) do
set_version(conn, default)
else
conn
end
end
end
|
lib/set_version.ex
| 0.869341
| 0.411229
|
set_version.ex
|
starcoder
|
defmodule EWallet.BalanceFetcher do
@moduledoc """
Handles the retrieval and formatting of balances from the local ledger.
"""
alias EWalletDB.{Token, User}
alias LocalLedger.Wallet
@spec all(map()) :: {:ok, %EWalletDB.Wallet{}} | {:error, atom()}
@doc """
Prepare the list of balances and turn them into a suitable format for
EWalletAPI using a user_id.
## Examples
res = BalanceFetcher.all(%{"user_id" => "usr_12345678901234567890123456"})
case res do
{:ok, wallets} ->
# Everything went well, do something.
# response is the response returned by the ledger (LocalLedger for
# example).
{:error, code, description} ->
# Something went wrong on the other side (LocalLedger maybe) and the
# retrieval failed.
end
"""
def all(%{"user_id" => id}) do
case User.get(id) do
nil ->
{:error, :user_id_not_found}
user ->
wallet = User.get_primary_wallet(user)
format_all(wallet)
end
end
@doc """
Prepare the list of balances and turn them into a suitable format for
EWalletAPI using a provider_user_id.
## Examples
res = BalanceFetcher.all(%{"provider_user_id" => "123"})
case res do
{:ok, wallets} ->
# Everything went well, do something.
# response is the response returned by the ledger (LocalLedger for
# example).
{:error, code, description} ->
# Something went wrong on the other side (LocalLedger maybe) and the
# retrieval failed.
end
"""
def all(%{"provider_user_id" => provider_user_id}) do
case User.get_by_provider_user_id(provider_user_id) do
nil ->
{:error, :provider_user_id_not_found}
user ->
wallet = User.get_primary_wallet(user)
format_all(wallet)
end
end
@doc """
Prepare the list of balances and turn them into a suitable format for
EWalletAPI using only a wallet.
## Examples
res = BalanceFetcher.all(%Wallet{})
case res do
{:ok, wallets} ->
# Everything went well, do something.
# response is the response returned by the local ledger (LocalLedger for
# example).
{:error, code, description} ->
# Something went wrong on the other side (LocalLedger maybe) and the
# retrieval failed.
end
"""
def all(%{"wallet" => wallet}) do
format_all(wallet)
end
@doc """
Prepare the list of balances and turn them into a suitable format for
EWalletAPI using only an address.
## Examples
res = BalanceFetcher.all(%{"address" => "d26fc18f-d403-4a39-a039-21e2bc713688"})
case res do
{:ok, wallets} ->
# Everything went well, do something.
# response is the response returned by the local ledger (LocalLedger for
# example).
{:error, code, description} ->
# Something went wrong on the other side (LocalLedger maybe) and the
# retrieval failed.
end
"""
def all(%{"address" => address}) do
address |> EWalletDB.Wallet.get() |> format_all()
end
@doc """
Prepare the list of balances and turn them into a
suitable format for EWalletAPI using a user and a token_id
## Examples
res = Wallet.get_balance(user, "tok_OMG_01cbennsd8q4xddqfmewpwzxdy")
case res do
{:ok, wallets} ->
# Everything went well, do something.
# response is the response returned by the local ledger (LocalLedger for
# example).
{:error, code, description} ->
# Something went wrong on the other side (LocalLedger maybe) and the
# retrieval failed.
end
"""
def get(%User{} = user, %Token{} = token) do
user_wallet = User.get_primary_wallet(user)
get(token.id, user_wallet)
end
@doc """
Prepare the list of balances and turn them into a
suitable format for EWalletAPI using a token_id and an address
## Examples
res = Wallet.get_balance("tok_OMG_01cbennsd8q4xddqfmewpwzxdy", "22a83591-d684-4bfd-9310-6bdecdec4f81")
case res do
{:ok, wallets} ->
# Everything went well, do something.
# response is the response returned by the local ledger (LocalLedger for
# example).
{:error, code, description} ->
# Something went wrong on the other side (LocalLedger maybe) and the
# retrieval failed.
end
"""
def get(id, wallet) do
id |> Wallet.get_balance(wallet.address) |> process_response(wallet, :one)
end
defp format_all(wallet) do
wallet.address |> Wallet.all_balances() |> process_response(wallet, :all)
end
defp process_response({:ok, data}, wallet, type) do
balances =
type
|> load_tokens(data)
|> map_tokens(data)
{:ok, Map.put(wallet, :balances, balances)}
end
defp load_tokens(:all, _), do: Token.all()
defp load_tokens(:one, amounts) do
amounts |> Map.keys() |> Token.get_all()
end
defp map_tokens(tokens, amounts) do
Enum.map(tokens, fn token ->
%{
token: token,
amount: amounts[token.id] || 0
}
end)
end
end
|
apps/ewallet/lib/ewallet/fetchers/balance_fetcher.ex
| 0.742048
| 0.430566
|
balance_fetcher.ex
|
starcoder
|
defmodule Elidactyl.ChangesetCase do
use ExUnit.CaseTemplate
using do
quote do
import unquote(__MODULE__), only: [
assert_invalid: 2, assert_invalid: 3,
assert_valid: 1, assert_valid: 2
]
end
end
@doc """
Asserts that the given changeset is invalid, and that when the assertion_expression is applied to
the error_message it results in a truthy value.
"""
defmacro assert_invalid(changeset, field) when is_atom(field) do
quote do
c = unquote(changeset)
with :non_valid_changeset <- unquote(__MODULE__).validate_changeset(c),
:ok <- unquote(__MODULE__).validate_field(c, unquote(field)),
{message, _opts} <- Keyword.get(c.errors, unquote(field)) do
assert true
else
:invalid_changeset ->
raise "assert_invalid/2 requires a changeset for the first argument"
:valid_changeset ->
flunk("#{inspect(c.data.__struct__)} is valid, expected at least one field to be invalid")
:invalid_field ->
raise "field :#{unquote(field)} not found in #{inspect(c.data.__struct__)}"
_ ->
flunk(":#{unquote(field)} field is valid, expected it to be invalid")
end
end
end
defmacro assert_invalid(changeset, field, assertion_expression) when is_atom(field) do
expr = Macro.to_string(assertion_expression)
quote do
c = unquote(changeset)
with :non_valid_changeset <- unquote(__MODULE__).validate_changeset(c),
:ok <- unquote(__MODULE__).validate_field(c, unquote(field)),
{message, _opts} <- Keyword.get(c.errors, unquote(field)) do
var!(error_message) = message
if unquote(assertion_expression) do
assert true
else
flunk """
Expression did not match error message
#{IO.ANSI.cyan()}error_message:#{IO.ANSI.reset()} #{inspect(message)}
#{IO.ANSI.cyan()}expression:#{IO.ANSI.reset()} #{unquote(expr)}
"""
end
else
:invalid_changeset ->
raise "assert_invalid/3 requires a changeset for the first argument"
:valid_changeset ->
flunk("#{inspect(c.data.__struct__)} is valid, expected at least one field to be invalid")
:invalid_field ->
raise "field :#{unquote(field)} not found in #{inspect(c.data.__struct__)}"
_ ->
flunk(":#{unquote(field)} field is valid, expected it to be invalid")
end
end
end
defmacro assert_valid(changeset) do
quote do
c = unquote(changeset)
with :valid_changeset <- unquote(__MODULE__).validate_changeset(c) do
assert true
else
:invalid_changeset ->
raise "assert_valid/2 requires a changeset for the first argument"
:valid_changeset ->
flunk("#{inspect(c.data.__struct__)} is invalid, expected to be valid")
end
end
end
defmacro assert_valid(changeset, field) when is_atom(field) do
quote do
c = unquote(changeset)
with valid when valid != :invalid_changeset <- unquote(__MODULE__).validate_changeset(c),
:ok <- unquote(__MODULE__).validate_field(c, unquote(field)),
nil <- Keyword.get(c.errors, unquote(field)) do
assert true
else
:invalid_changeset ->
raise "assert_valid/2 requires a changeset for the first argument"
:invalid_field ->
raise "field :#{unquote(field)} not found in #{inspect(c.data.__struct__)}"
_ ->
flunk(":#{unquote(field)} field is invalid, expected it to be valid")
end
end
end
def validate_changeset(%Ecto.Changeset{valid?: true}), do: :valid_changeset
def validate_changeset(%Ecto.Changeset{}), do: :non_valid_changeset
def validate_changeset(_), do: :invalid_changeset
def validate_field(%Ecto.Changeset{data: data}, field) do
if data |> Map.keys() |> Enum.member?(field) do
:ok
else
:invalid_field
end
end
def validate_field(_, _), do: :invalid_changeset
end
|
test/support/changeset_case.ex
| 0.707809
| 0.559741
|
changeset_case.ex
|
starcoder
|
defmodule Mux.Data.RealTime do
@moduledoc """
This module provides functions that interact with the `real-time` endpoints
Note, these API documentation links may break periodically as we update documentation titles.
- [Dimensions](https://api-docs.mux.com/#real-time-get)
- [Metrics](https://api-docs.mux.com/#real-time-get-1)
- [Breakdown](https://api-docs.mux.com/#real-time-get-2)
- [HistogramTimeseries](https://api-docs.mux.com/#real-time-get-3)
- [Timeseries](https://api-docs.mux.com/#real-time-get-4)
"""
alias Mux.{Base, Fixtures}
@doc """
List of available real-time dimensions
Returns `{:ok, dimensions, raw_env}`.
## Examples
iex> client = Mux.client("my_token_id", "my_token_secret")
iex> {:ok, dimensions, _env} = Mux.Data.RealTime.dimensions(client)
iex> dimensions
#{inspect(Fixtures.realtime_dimensions()["data"])}
"""
def dimensions(client) do
Base.get(client, build_base_path() <> "/dimensions")
end
@doc """
List of available real-time metrics
Returns `{:ok, metrics, raw_env}`.
## Examples
iex> client = Mux.client("my_token_id", "my_token_secret")
iex> {:ok, metrics, _env} = Mux.Data.RealTime.metrics(client)
iex> metrics
#{inspect(Fixtures.realtime_metrics()["data"])}
"""
def metrics(client) do
Base.get(client, build_base_path() <> "/metrics")
end
@doc """
Get breakdown information for a specific dimension and metric along with the number of concurrent viewers and negative impact score.
Returns `{:ok, breakdown, raw_env}`.
## Examples
iex> client = Mux.client("my_token_id", "my_token_secret")
iex> {:ok, breakdown, _env} = Mux.Data.RealTime.breakdown(client, "playback-failure-percentage", dimension: "country", timestamp: 1_547_853_000, filters: ["operating_system:windows"])
iex> breakdown
#{inspect(Fixtures.realtime_breakdown()["data"])}
"""
def breakdown(client, metric, params \\ []) do
Base.get(client, build_base_path(metric) <> "/breakdown", query: params)
end
@doc """
List histogram timeseries information for a specific metric
Returns `{:ok, histogram_timeseries, raw_env}`.
## Examples
iex> client = Mux.client("my_token_id", "my_token_secret")
iex> {:ok, histogram_timeseries, _env} = Mux.Data.RealTime.histogram_timeseries(client, "video-startup-time", filters: ["operating_system:windows", "country:US"])
iex> histogram_timeseries
#{inspect(Fixtures.realtime_histogram_timeseries()["data"])}
"""
def histogram_timeseries(client, metric, params \\ []) do
Base.get(client, build_base_path(metric) <> "/histogram-timeseries", query: params)
end
@doc """
List timeseries information for a specific metric along with the number of concurrent viewers.
Returns `{:ok, timeseries, raw_env}`.
## Examples
iex> client = Mux.client("my_token_id", "my_token_secret")
iex> {:ok, timeseries, _env} = Mux.Data.RealTime.timeseries(client, "playback-failure-percentage", filters: ["operating_system:windows", "country:US"])
iex> timeseries
#{inspect(Fixtures.realtime_timeseries()["data"])}
"""
def timeseries(client, metric, params \\ []) do
Base.get(client, build_base_path(metric) <> "/timeseries", query: params)
end
defp build_base_path(), do: "/data/v1/realtime"
defp build_base_path(metric), do: build_base_path() <> "/metrics/#{metric}"
end
|
lib/mux/data/real_time.ex
| 0.915691
| 0.587884
|
real_time.ex
|
starcoder
|
defmodule VintageNetWiFi.Utils do
@moduledoc """
Various utility functions for handling WiFi information
"""
@doc "Converts 1 to true, 0 to false"
def bit_to_boolean(1), do: true
def bit_to_boolean(0), do: false
@type frequency_info() :: %{
band: VintageNetWiFi.AccessPoint.band(),
channel: non_neg_integer(),
dbm_to_percent: function()
}
@doc """
Convert power in dBm to a percent
The returned percentage is intended to shown to users
like to show a number of bars or some kind of signal
strength.
See [Displaying Associated and Scanned Signal
Levels](https://web.archive.org/web/20141222024740/http://www.ces.clemson.edu/linux/nm-ipw2200.shtml).
"""
@spec dbm_to_percent(number(), number(), number()) :: 1..100
def dbm_to_percent(dbm, best_dbm, _worst_dbm) when dbm >= best_dbm do
100
end
def dbm_to_percent(dbm, best_dbm, worst_dbm) do
delta = best_dbm - worst_dbm
delta2 = delta * delta
percent =
100 -
(best_dbm - dbm) * (15 * delta + 62 * (best_dbm - dbm)) /
delta2
# Constrain the percent to integers and never go to 0
# (Kernel.floor/1 was added to Elixir 1.8, so don't use it)
max(:erlang.floor(percent), 1)
end
@doc """
Get information about a WiFi frequency
The frequency should be pass in MHz. The result is more
information about the frequency that may be helpful to
users.
"""
@spec frequency_info(non_neg_integer()) :: frequency_info()
def(frequency_info(2412), do: band2_4(1))
def frequency_info(2417), do: band2_4(2)
def frequency_info(2422), do: band2_4(3)
def frequency_info(2427), do: band2_4(4)
def frequency_info(2432), do: band2_4(5)
def frequency_info(2437), do: band2_4(6)
def frequency_info(2442), do: band2_4(7)
def frequency_info(2447), do: band2_4(8)
def frequency_info(2452), do: band2_4(9)
def frequency_info(2457), do: band2_4(10)
def frequency_info(2462), do: band2_4(11)
def frequency_info(2467), do: band2_4(12)
def frequency_info(2472), do: band2_4(13)
def frequency_info(2484), do: band2_4(14)
def frequency_info(5035), do: band5(7)
def frequency_info(5040), do: band5(8)
def frequency_info(5045), do: band5(9)
def frequency_info(5055), do: band5(11)
def frequency_info(5060), do: band5(12)
def frequency_info(5080), do: band5(16)
def frequency_info(5160), do: band5(32)
def frequency_info(5170), do: band5(34)
def frequency_info(5180), do: band5(36)
def frequency_info(5190), do: band5(38)
def frequency_info(5200), do: band5(40)
def frequency_info(5210), do: band5(42)
def frequency_info(5220), do: band5(44)
def frequency_info(5230), do: band5(46)
def frequency_info(5240), do: band5(48)
def frequency_info(5250), do: band5(50)
def frequency_info(5260), do: band5(52)
def frequency_info(5270), do: band5(54)
def frequency_info(5280), do: band5(56)
def frequency_info(5290), do: band5(58)
def frequency_info(5300), do: band5(60)
def frequency_info(5310), do: band5(62)
def frequency_info(5320), do: band5(64)
def frequency_info(5340), do: band5(68)
def frequency_info(5480), do: band5(96)
def frequency_info(5500), do: band5(100)
def frequency_info(5510), do: band5(102)
def frequency_info(5520), do: band5(104)
def frequency_info(5530), do: band5(106)
def frequency_info(5540), do: band5(108)
def frequency_info(5550), do: band5(110)
def frequency_info(5560), do: band5(112)
def frequency_info(5570), do: band5(114)
def frequency_info(5580), do: band5(116)
def frequency_info(5590), do: band5(118)
def frequency_info(5600), do: band5(120)
def frequency_info(5610), do: band5(122)
def frequency_info(5620), do: band5(124)
def frequency_info(5630), do: band5(126)
def frequency_info(5640), do: band5(128)
def frequency_info(5660), do: band5(132)
def frequency_info(5670), do: band5(134)
def frequency_info(5680), do: band5(136)
def frequency_info(5690), do: band5(138)
def frequency_info(5700), do: band5(140)
def frequency_info(5710), do: band5(142)
def frequency_info(5720), do: band5(144)
def frequency_info(5745), do: band5(149)
def frequency_info(5755), do: band5(151)
def frequency_info(5765), do: band5(153)
def frequency_info(5775), do: band5(155)
def frequency_info(5785), do: band5(157)
def frequency_info(5795), do: band5(159)
def frequency_info(5805), do: band5(161)
def frequency_info(5825), do: band5(165)
def frequency_info(5845), do: band5(169)
def frequency_info(5865), do: band5(173)
def frequency_info(4915), do: band5(183)
def frequency_info(4920), do: band5(184)
def frequency_info(4925), do: band5(185)
def frequency_info(4935), do: band5(187)
def frequency_info(4940), do: band5(188)
def frequency_info(4945), do: band5(189)
def frequency_info(4960), do: band5(192)
def frequency_info(4980), do: band5(196)
def frequency_info(_unknown) do
%{band: :unknown, channel: 0, dbm_to_percent: fn dbm -> dbm_to_percent(dbm, -20, -83.7) end}
end
defp band2_4(channel) do
%{
band: :wifi_2_4_ghz,
channel: channel,
dbm_to_percent: fn dbm -> dbm_to_percent(dbm, -20, -83.7) end
}
end
defp band5(channel) do
%{
band: :wifi_5_ghz,
channel: channel,
dbm_to_percent: fn dbm -> dbm_to_percent(dbm, -44, -89) end
}
end
end
|
lib/vintage_net_wifi/utils.ex
| 0.81309
| 0.63375
|
utils.ex
|
starcoder
|
defmodule Proj.Geodesic do
@moduledoc """
Provides functions to solve problems involving geodesic lines.
Common problems this can solve:
- Finding the distance between two locations
- Finding the bearings between two locations
- Finding the resulting location after moving `x` metres forwards facing a
certain bearing from a given location
- Plotting a set of points in a line between two locations
"""
@on_load :load
defstruct [:geod, :a, :f]
defimpl Inspect, for: Proj.Geodesic do
def inspect(geod, _opts) do
{a, f} = Proj.Geodesic.params(geod)
"#Proj.Geodesic<#{a}, #{f}>"
end
end
def load do
filename = :filename.join(:code.priv_dir(:proj), 'geodesic_nif')
:ok = :erlang.load_nif(filename, 0)
end
@doc """
Creates a new `Proj.Geodesic` specification for the planet's ellipsoid
parameters, where `a` represents the equatorial radius in metres, and `f`
represents the flattening.
iex> Proj.Geodesic.init(6378137, 1 / 298.257223563)
#Proj.Geodesic<6378137.0, 0.0033528106647474805>
"""
def init(_a, _f) do
raise "NIF not loaded"
end
@doc """
Returns a `Proj.Geodesic` specification for the Earth's ellipsoid parameters
as specified by WGS84.
"""
def wgs84 do
raise "NIF not loaded"
end
@doc """
Calculates the resultant coordinates and bearing after travelling a given
distance forwards along a geodesic line through a given starting point and
azimuth (bearing).
Return value is in the format `{{lat, lng}, bearing}`.
All coordinates and bearings are given in degrees. `distance` is in metres.
iex> wgs84 = Proj.Geodesic.wgs84
iex> Proj.Geodesic.direct(wgs84, {51.501476, -0.140634}, 60, 100)
{{51.50192539979596, -0.1393868003258145}, 60.00097609168357}
"""
def direct(_geod, _coords, _azimuth, _distance) do
raise "NIF not loaded"
end
@doc """
Calculates the length of the geodesic line between two points and the bearing
of the line at each point.
Return value is in the format `{distance, bearing_a, bearing_b}`.
All coordinates and bearings are given in degrees. `distance` is in metres.
iex> wgs84 = Proj.Geodesic.wgs84
iex> Proj.Geodesic.inverse(wgs84, {51.501476, -0.140634}, {48.8584, 2.2945})
{341549.6819692767, 148.44884919324866, 150.31979086555856}
"""
def inverse(_geod, _coords_a, _coords_b) do
raise "NIF not loaded"
end
@doc """
Calculates the resulting position after travelling `distance` metres forwards
from `coords` facing a bearing of `azimuth`.
This is a convenience wrapper around `Proj.Geodesic.direct/4` which uses the
WGS84 ellipsoid and only returns the resulting coordinates.
Return value is in the format `{lat, lng}`.
All coordinates and bearings are given in degrees.
iex> Proj.Geodesic.travel({51.501476, -0.140634}, 60, 100)
{51.50192539979596, -0.1393868003258145}
"""
def travel(coords, azimuth, distance) do
{result_coords, _azimuth} = direct(wgs84(), coords, azimuth, distance)
result_coords
end
@doc """
Calculates the distance in metres between two points.
This is a convenience wrapper around `Proj.Geodesic.inverse/3` which uses the
WGS84 ellipsoid and only returns the resulting distance.
All coordinates are given in degrees.
iex> Proj.Geodesic.distance({51.501476, -0.140634}, {48.8584, 2.2945})
341549.6819692767
"""
def distance(coords_a, coords_b) do
{result_distance, _azimuth_a, _azimuth_b} = inverse(wgs84(), coords_a, coords_b)
result_distance
end
@doc """
Gets the equatorial radius in metres and flattening of a given `Proj.Geodesic`
ellipsoid specification.
Return value is in the format `{equatorial_radius, flattening}`
iex> wgs84 = Proj.Geodesic.wgs84
iex> Proj.Geodesic.params(wgs84)
{6378137.0, 0.0033528106647474805}
"""
def params(geod) do
{geod.a, geod.f}
end
end
|
lib/proj/geodesic.ex
| 0.920745
| 0.837021
|
geodesic.ex
|
starcoder
|
defmodule Day1 do
@moduledoc """
Compute the Manhattan distance between two points given a path.
"""
@doc """
Compute manhattan distance of steps specified in a file"
"""
def distance_file(file_path), do: distance(File.read!(file_path))
@doc """
Given a string of steps, compute the manhattan distance between start and end
"""
def distance(path) do
start_point = {0, 0}
steps = parse_path(path)
visited = MapSet.new |> MapSet.put(start_point)
{_heading, end_point, duplicate_point, _visited} = Enum.reduce(steps, {:north, start_point, nil, visited}, &take_step/2)
{manhattan_distance(start_point, end_point), manhattan_distance(start_point, duplicate_point)}
end
@doc """
Compute the manhattan distance between two points
"""
def manhattan_distance(_point, nil), do: nil
def manhattan_distance({x1, y1}, {x2, y2}) do
abs(x1 - x2) + abs(y1 - y2)
end
defp parse_path(str) do
str
|> String.split(",")
|> Enum.map(&String.trim/1)
|> Enum.map(&convert_step/1)
|> List.flatten
end
defp convert_step(<<dir, size :: binary>>) do
steps = String.to_integer(size)
[direction(dir), List.duplicate(:straight, steps)]
end
defp direction(?R), do: :right
defp direction(?L), do: :left
defp take_step(:right, {heading, curr, dup, visited}), do: {turn_right(heading), curr, dup, visited}
defp take_step(:left, {heading, curr, dup, visited}), do: {turn_left(heading), curr, dup, visited}
defp take_step(:straight, {heading, curr, dup, visited}) do
pos = update_position(heading, curr)
{new_dup, new_visited} = update_visited(pos, dup, visited)
{heading, pos, new_dup, new_visited}
end
defp update_position(:north, {x, y}), do: {x, y+1}
defp update_position(:east, {x, y}), do: {x+1, y}
defp update_position(:south, {x, y}), do: {x, y-1}
defp update_position(:west, {x, y}), do: {x-1, y}
defp turn_right(:north), do: :east
defp turn_right(:east), do: :south
defp turn_right(:south), do: :west
defp turn_right(:west), do: :north
defp turn_left(:north), do: :west
defp turn_left(:west), do: :south
defp turn_left(:south), do: :east
defp turn_left(:east), do: :north
defp update_visited(point, dup, visited) do
new_dup =
case dup do
nil -> if MapSet.member?(visited, point), do: point, else: nil
x -> x
end
{new_dup, MapSet.put(visited, point)}
end
end
|
day1/lib/day1.ex
| 0.846197
| 0.799011
|
day1.ex
|
starcoder
|
defmodule Exglicko2.Player do
@moduledoc """
A single entity that can take part in a game.
Players have a `:rating`, `:deviation`, and `:volatility`.
"""
@e 2.71828182845904523536028747135266249775724709369995
@convergence_tolerance 0.000001
@glicko_conversion_factor 173.7178
@unrated_glicko_rating 1500
@enforce_keys [
:rating,
:deviation,
:volatility
]
defstruct [
:rating,
:deviation,
:volatility
]
@doc """
Returns a new `Exglicko2.Player` suited to new players.
"""
def new do
%__MODULE__{
rating: 0.0,
deviation: 2.0,
volatility: 0.06
}
end
@doc """
Returns a new `Exglicko2.Player` with the given values.
"""
def new(rating, deviation, volatility) do
%__MODULE__{
rating: rating,
deviation: deviation,
volatility: volatility
}
end
@doc """
Get the first-generation Glicko rating of a player.
## Examples
iex> Exglicko2.Player.new(0.0, 1.2, 0.06)
...> |> Exglicko2.Player.to_glicko()
{1500.0, 208.46136, 0.06}
"""
def to_glicko(%__MODULE__{rating: rating, deviation: deviation, volatility: volatility}) do
{
@glicko_conversion_factor * rating + @unrated_glicko_rating,
deviation * @glicko_conversion_factor,
volatility
}
end
@doc """
Creates a player from a first-generation Glicko rating.
## Examples
iex> Exglicko2.Player.from_glicko({1500.0, 350, 0.06})
%Exglicko2.Player{rating: 0.0, deviation: 2.014761872416068, volatility: 0.06}
"""
def from_glicko({rating, deviation, volatility}) do
new(
(rating - @unrated_glicko_rating)/@glicko_conversion_factor,
deviation/@glicko_conversion_factor,
volatility
)
end
@doc """
Creates a "composite player" from the given enumerable of ratings.
The resulting player will have a rating, deviation, and volatility that is the average of all given players.
Also accepts a single player, in which case that player is returned.
"""
def composite(players)
def composite(players) when is_list(players) do
%__MODULE__{
rating: Enum.map(players, & &1.rating) |> mean(),
deviation: Enum.map(players, & &1.deviation) |> mean(),
volatility: Enum.map(players, & &1.volatility) |> mean()
}
end
def composite(%__MODULE__{} = player) do
player
end
defp mean(values) when is_list(values) do
Enum.sum(values) / Enum.count(values)
end
def update_rating(%__MODULE__{deviation: deviation} = player, results, system_constant) do
player_variance = variance(player, results)
player_improvement = improvement(player, results)
new_volatility = new_volatility(player, player_variance, player_improvement, system_constant)
new_pre_rating_deviation = :math.sqrt(square(deviation) + square(new_volatility))
new_deviation = 1 / :math.sqrt((1/square(new_pre_rating_deviation)) + (1 / player_variance))
new_rating = new_rating(player, results, new_deviation)
new(new_rating, new_deviation, new_volatility)
end
defp new_rating(%__MODULE__{rating: rating}, results, new_deviation) do
sum_term =
results
|> Enum.map(fn {opponent, score} ->
g(opponent.deviation) * (score - e(rating, opponent.rating, opponent.deviation))
end)
|> Enum.sum()
rating + square(new_deviation) * sum_term
end
defp new_volatility(%__MODULE__{rating: rating, deviation: deviation, volatility: volatility}, player_variance, player_improvement, system_constant) do
f = &new_volatility_inner_template(&1, rating, deviation, player_variance, volatility, system_constant)
starting_lower_bound = ln(square(volatility))
starting_upper_bound =
if square(player_improvement) > (square(volatility) + player_variance) do
ln(square(player_improvement) - square(volatility) - player_variance)
else
k =
Stream.iterate(1, &(&1 + 1))
|> Stream.drop_while(&(f.(starting_lower_bound - &1 * system_constant) < 0))
|> Enum.at(0)
starting_lower_bound - k * system_constant
end
f_a = f.(starting_lower_bound)
f_b = f.(starting_upper_bound)
final_lower_bound =
Stream.iterate(
{starting_lower_bound, starting_upper_bound, f_a, f_b},
fn {a, b, f_a, f_b} ->
c = a + ((a - b) * f_a / (f_b - f_a))
f_c = f.(c)
if (f_c * f_b) < 0 do
{b, c, f_b, f_c}
else
{a, c, f_a/2, f_c}
end
end
)
|> Stream.drop_while(fn {a, b, _f_a, _f_b} ->
abs(b - a) > @convergence_tolerance
end)
|> Enum.at(0)
|> elem(0)
exp(final_lower_bound / 2)
end
defp new_volatility_inner_template(x, delta, phi, v, sigma, tau) do
a = ln(square(sigma))
numerator = exp(x) * (square(delta) - square(phi) - v - exp(x))
denominator = 2 * square(square(phi) + v + exp(x))
(numerator / denominator) - ((x - a) / square(tau))
end
defp improvement(player, results) do
sum = Enum.map(results, fn {opponent, score} ->
g(opponent.deviation) * (score - e(player.rating, opponent.rating, opponent.deviation))
end)
|> Enum.sum()
sum * variance(player, results)
end
defp variance(%__MODULE__{rating: rating}, results) do
sum = Enum.map(results, fn {opponent, _score} ->
square(g(opponent.deviation)) *
e(rating, opponent.rating, opponent.deviation) *
(1 - e(rating, opponent.rating, opponent.deviation))
end)
|> Enum.sum()
1 / sum
end
defp e(mu, mu_j, phi_j) do
1 / (1 + exp(-g(phi_j) * (mu - mu_j)))
end
defp g(phi) do
1/:math.sqrt(1 + (3 * square(phi) / square(pi())))
end
defp ln(x) do
:math.log(x)
end
defp pi do
:math.pi()
end
defp square(n) do
:math.pow(n, 2)
end
defp exp(n) do
:math.pow(@e, n)
end
end
|
lib/exglicko2/player.ex
| 0.936973
| 0.623721
|
player.ex
|
starcoder
|
defmodule Openstex.Swift.V1 do
@moduledoc ~S"""
Helper functions to assist in building requests for openstack compatible swift apis.
Builds a request in a format that subsequently is easily modified. The request may ultimately be sent to
an openstack/swift compliant api with a library such as `:hackney`. See
[ex_hubic](https://hex.pm/packages/ex_hubic) for an example implementation.
## Example
client = Client.Swift
account = client.swift().get_account()
Openstex.Swift.V1.account_info(account) |> ExOvh.request()
"""
alias HTTPipe.Conn
alias Openstex.Utils
alias Openstex.Transformation.{Body, Url}
# CONTAINER RELATED REQUESTS
@doc ~S"""
Get account details and containers for given account.
## Api
GET /v1/{account}
## Example
as implemented a client from the `ExOvh` library
client = Client.Swift
account = client.swift().get_account()
Openstex.Swift.V1.account_info(account) |> client.request()
"""
@spec account_info(String.t()) :: Conn.t()
def account_info(account) do
req =
%HTTPipe.Request{
method: :get,
url: account
}
|> Url.add_query_string(%{"format" => "json"})
Map.put(Conn.new(), :request, req)
end
@doc ~S"""
Create a new container.
## Api
PUT /v1/{account}/{container}
## Arguments
- `container`: name of the container to be created
- `account`: account of user accessing swift service
- `opts`:
- `read_acl`: headers for the container read access control list.
- Examples:
1. For giving public read access: `[read_acl: ".r:*" ]`, *note:* `.r:` can be any of `.ref:`, `.referer:`, or `.referrer:`.
2. For giving a `*.some_website.com` read access: `[read_acl: ".r:.some_website.com"]`
3. For giving a user accountread access, [read_acl: `user_account`]
4. See [Swift Docs](https://github.com/openstack/swift/blob/master/swift/common/middleware/acl.py#L50) for more examples
5. For giving write access and list access: `[read_acl: ".r:*,.rlistings"]`
- `write_acl`: headers for the container write access control list. *Note:* For `X-Container-Write` referrers are not supported.
- Examples:
1. For giving write access to a user account: `[write_acl: "user_account"]`
- `headers`: other metadata headers to be applied to the container.
- Examples:
1. Appying changes to the CORS restrictions for a container.
eg:
`[headers: [{"X-Container-Meta-Access-Control-Allow-Origin", "http://localhost:4000"}]]` # allowed origins to make cross-origin requests.
`[headers: [{"X-Container-Meta-Access-Control-Max-Age", "1000"}]]` # validity of preflight requests in seconds.
Other CORS headers include `X-Container-Meta-Access-Control-Allow-Headers`, `X-Container-Meta-Access-Control-Expose-Headers`
## Example
as implemented a client from the `ExOvh` library
client = Client.Swift
account = client.swift().get_account()
Openstex.Swift.V1.create_container("new_container", account) |> client.request()
"""
@spec create_container(String.t(), String.t(), Keyword.t()) :: Conn.t()
def create_container(container, account, opts \\ []) do
read_acl = Keyword.get(opts, :read_acl, nil)
write_acl = Keyword.get(opts, :write_acl, nil)
headers =
opts
|> Keyword.get(:headers, [])
|> Enum.into(%{})
additional_headers =
cond do
read_acl == nil && write_acl == nil -> %{}
read_acl != nil && write_acl == nil -> %{"X-Container-Read" => read_acl}
read_acl == nil && write_acl != nil -> %{"X-Container-Write" => write_acl}
true -> %{"X-Container-Read" => read_acl, "X-Container-Write" => write_acl}
end
headers = Map.merge(headers, additional_headers)
req =
%HTTPipe.Request{
method: :put,
url: account <> "/" <> container,
headers: headers
}
|> Url.add_query_string(%{"format" => "json"})
Map.put(Conn.new(), :request, req)
end
@doc ~S"""
Modify a container. See docs for possible changes to [container metadata](http://developer.openstack.org/api-ref-objectstorage-v1.html)
which are achieved by sending changes in the request headers.
## Api
POST /v1/{account}/{container}
## Arguments
- `container`: name of the container to be created
- `account`: account of user accessing swift service
- `opts`:
- `read_acl`: headers for the container read access control list.
- Examples:
1. For giving public read access: `[read_acl: ".r:*" ]`
2. For giving a `*.some_website.com` read access: `[read_acl: ".r:.some_website.com"]`
3. For giving a user accountread access, [read_acl: `user_account`]
4. See [Swift Docs](https://github.com/openstack/swift/blob/master/swift/common/middleware/acl.py#L50) for more examples
5. For giving write access and list access: `[read_acl: ".r:*,.rlistings"]`
- `write_acl`: headers for the container write access control list.
- Example:
1. For giving write access to a user account: `[write_acl: "user_account"]`
- `headers`: other metadata headers to be applied to the container.
- Examples:
1. Appying changes to the CORS restrictions for a container.
eg:
`[headers: [{"X-Container-Meta-Access-Control-Allow-Origin", "http://localhost:4000"}]]` # allowed origins to make cross-origin requests.
`[headers: [{"X-Container-Meta-Access-Control-Max-Age", "1000"}]]` # validity of preflight requests in seconds.
Other CORS headers include `X-Container-Meta-Access-Control-Allow-Headers`, `X-Container-Meta-Access-Control-Expose-Headers`
## Example
as implemented a client from the `ExOvh` library
client = Client.Swift
account = client.swift().get_account()
headers = []
Openstex.Swift.V1.modify_container("new_container", account, headers) |> client.request()
"""
@spec modify_container(String.t(), String.t(), Keyword.t()) :: Conn.t()
def modify_container(container, account, opts \\ []) do
container
|> create_container(account, opts)
|> Conn.put_req_method(:post)
end
@doc ~S"""
Delete a container
## Api
DELETE /v1/{account}/{container}
## Example
as implemented a client from the `ExOvh` library
client = Client.Swift
account = client.swift().get_account()
Openstex.Swift.V1.delete_container("new_container", account) |> client.request(query)
"""
@spec delete_container(String.t(), String.t()) :: Conn.t()
def delete_container(container, account) do
req =
%HTTPipe.Request{
method: :delete,
url: account <> "/" <> container
}
|> Url.add_query_string(%{"format" => "json"})
Map.put(Conn.new(), :request, req)
end
@doc ~S"""
Get information about the container
## Api
DELETE /v1/{account}/{container}
## Example
as implemented a client from the `ExOvh` library
client = Client.Swift
account = client.swift().get_account()
query = Openstex.Swift.V1.container_info("new_container", account) |> client.request()
"""
@spec container_info(String.t(), String.t()) :: Conn.t()
def container_info(container, account) do
req =
%HTTPipe.Request{
method: :head,
url: account <> "/" <> container
}
|> Url.add_query_string(%{"format" => "json"})
Map.put(Conn.new(), :request, req)
end
# OBJECT RELATED REQUESTS
@doc ~S"""
List objects in a container
## Api
GET /v1/{account}/{container}
## Example
as implemented a client from the `ExOvh` library
client = Client.Swift
account = client.swift().get_account()
Openstex.Swift.V1.get_objects("new_container", account) |> client.request()
"""
@spec get_objects(String.t(), String.t()) :: Conn.t()
def get_objects(container, account) do
req =
%HTTPipe.Request{
method: :get,
url: account <> "/" <> container
}
|> Url.add_query_string(%{"format" => "json"})
Map.put(Conn.new(), :request, req)
end
@doc ~S"""
Get/Download a specific object (file)
## Api
GET /v1/{account}/{container}/{object}
## Example
as implemented a client from the `ExOvh` library
client = Client.Swift
account = client.swift().get_account()
server_object = "server_file.txt"
container = "new_container"
Openstex.Swift.V1.get_object(server_object, container, account) |> client.request(query)
## Arguments
- `server_object`: The path name of the object in the server
- `container`: The container of the object in the server
- `account`: The account accessing the object
- `opts`:
- `headers`: Additional headers metadata in the request. Eg `[headers: [{"If-None-Match", "<local_file_md5>"}]`,
this example would return `304` if the local file md5 was the same as the object etag on the server.
"""
@spec get_object(String.t(), String.t(), String.t(), Keyword.t()) :: Conn.t()
def get_object(server_object, container, account, opts \\ []) do
headers =
opts
|> Keyword.get(:headers, [])
|> Enum.into(%{})
server_object = Utils.remove_if_has_trailing_slash(server_object)
req = %HTTPipe.Request{
method: :get,
url: account <> "/" <> container <> "/" <> server_object,
headers: headers
}
Map.put(Conn.new(), :request, req)
end
@doc """
Create or replace an object (file).
## Api
PUT /v1/{account}/{container}/{object}
## Example
as implemented a client from the `ExOvh` library
client = Client.Swift
account = client.swift().get_account()
container = "new_container"
object_name = "client_file.txt"
client_object_pathname = Kernel.to_string(:code.priv_dir(:openstex)) <> "/" <> object_name
Openstex.Swift.V1.create_object(container, account, client_object_pathname, [server_object: "server_file.txt"])
|> client.request(query)
## Arguments
- `container`: container to upload the file to
- `account`: account uploading the file.
- `client_object_pathname`: path of the file being uploaded.
- `opts`:
- `server_object`: filename under which the file will be stored on the openstack object storage server. defaults to the `client_object_pathname` if none given.
- `multipart_manifest`: Defaults to `:false`. If `:true`, adds `multipart-manifest=put` to the query string. This option should be set to `:true` when uploading the manifest for a large static object.
- `x_object_manifest`: Relevant to dynamic upload of large objects. Defaults to `:false`. If set, modifies the `X-Object-Manifest` header. The format used should be `[x_object_manifest: "container/myobject/"]`.
- `chunked_transfer`: Defaults to `:false`, if `:true, set the `Transfer-Encoding` to `chunked`.
- `content_type`: Defaults to `:false`, otherwise changes the `Content-Type` header, which changes the MIME type for the object. Eg, `[content_type: "image/jpeg"]`
- `x_detect_content_type`: Defaults to `:false`, otherwise changes the `X-Detect-Content-Type` header, the `X-Detect-Content-Type` header will be ignored and the actual file MIME type will be autodetected. Eg, `[x_detect_content_type: :true]`
- `e_tag`: Defaults to `:true`, if `:true`, sets the `ETag` header of the file. Enhances upload integrity. If set to `:false`, the `ETag` header will be excluded.
- `content_disposition`: Defaults to `:false`. Otherwise the `Content-Disposition` header can be changed from the default browser behaviour `inline` to another value. Eg `[content_disposition: "attachment; my_file.pdf"]`
- `delete_after`: Defaults to `:false`. Otherwise the `X-Delete-After` header can be added so that the object is deleted after n seconds. Eg `[delete_after: (24 * 60 * 60)]` will delete the object in 1 day.
- `e_tag`: Defaults to `:true`, if `:true`, sets the `ETag` header of the file. Enhances upload integrity. If set to `:false`, the `ETag` header will be excluded.
## Notes
See the openstack docs for more information relating to [object uploads](http://docs.openstack.org/developer/swift/api/object_api_v1_overview.html) and
[large object uploads](http://docs.openstack.org/developer/swift/overview_large_objects.html).
For uploading large objects, the operation typically involves multiple queries so a [Helper function](https://github.com/stephenmoloney/openstex/lib/swift/v1/helpers.ex) is planned
for large uploads.
Large objects are categorized as those over 5GB in size.
There are two ways of uploading large files - dynamic uploads and static uploads. See [here](http://docs.openstack.org/developer/swift/overview_large_objects.html#direct-api) for more information.
"""
@spec create_object(String.t(), String.t(), String.t(), list) :: Conn.t() | File.posix()
def create_object(container, account, client_object_pathname, opts \\ []) do
server_object = Keyword.get(opts, :server_object, Path.basename(client_object_pathname))
# headers
x_object_manifest = Keyword.get(opts, :x_object_manifest, false)
x_object_manifest =
if x_object_manifest != false, do: URI.encode(x_object_manifest), else: x_object_manifest
chunked_transfer = Keyword.get(opts, :chunked_transfer, false)
content_type = Keyword.get(opts, :content_type, false)
x_detect_content_type = Keyword.get(opts, :x_detect_content_type, false)
content_disposition = Keyword.get(opts, :content_disposition, false)
delete_after = Keyword.get(opts, :delete_after, false)
e_tag = Keyword.get(opts, :e_tag, true)
# query_string
multipart_manifest = Keyword.get(opts, :multipart_manifest, false)
case File.read(client_object_pathname) do
{:ok, binary_object} ->
path = account <> "/" <> container <> "/" <> server_object
multipart_querystring =
case multipart_manifest do
true -> %{"multipart-manifest" => "put"}
false -> %{}
end
headers =
if x_object_manifest != false, do: [{"X-Object-Manifest", x_object_manifest}], else: []
headers =
if chunked_transfer != false,
do: headers ++ [{"Transfer-Encoding", "chunked"}],
else: headers
headers =
if content_type != false, do: headers ++ [{"Content-Type", content_type}], else: headers
headers =
if x_detect_content_type != false,
do: headers ++ [{"X-Detect-Content-Type", "true"}],
else: headers
headers =
if e_tag != false,
do: headers ++ [{"ETag", Base.encode16(:erlang.md5(binary_object), case: :lower)}],
else: headers
headers =
if content_disposition != false,
do: headers ++ [{"Content-Disposition", content_disposition}],
else: headers
headers =
if delete_after != false,
do: headers ++ [{"X-Delete-After", delete_after}],
else: headers
req = %HTTPipe.Request{
method: :put,
url: path,
headers:
headers
|> Enum.map(fn {k, v} -> {String.downcase(k), v} end)
|> Enum.into(%{})
}
req =
req
|> Url.add_query_string(%{"format" => "json"})
|> Url.add_query_string(multipart_querystring)
Conn.new()
|> Map.put(:request, req)
|> Body.apply(binary_object)
{:error, posix_error} ->
posix_error
end
end
@doc """
Delete an Object (Delete a file)
## Api
DELETE /v1/{account}/{container}/{object}
## Example
as implemented a client from the `ExOvh` library
client = Client.Swift
account = client.swift().get_account()
container = "new_container"
server_object = "server_file.txt"
Openstex.Swift.V1.delete_object(server_object, container, account, server_object) |> client.request(query)
"""
@spec delete_object(String.t(), String.t(), String.t()) :: Conn.t()
def delete_object(server_object, container, account) do
server_object = Utils.remove_if_has_trailing_slash(server_object)
req = %HTTPipe.Request{
method: :delete,
url: account <> "/" <> container <> "/" <> server_object
}
Map.put(Conn.new(), :request, req)
end
# PSEUDOFOLDER RELATED REQUESTS
@doc """
List all objects and psuedofolders in a psuedofolder for a given container.
## Api
GET /v1/{account}/{container}?prefix=pseudofolder&delimiter=/
## Notes
- Query for only the top level objects and pseudofolders
- Query execution will *not* return nested objects and pseudofolders
- In order to view nested objects and pseudofolders, the function should be called recursively. See
`Openstex.Helpers.list_pseudofolders_recursively/2` and `Openstex.Helpers.list_all_objects/3`.
## Example
as implemented a client from the `ExOvh` library
client = Client.Swift
account = client.swift().get_account()
Openstex.Swift.V1.get_objects_in_folder("test_folder/", "default", account) |> client.request(query)
"""
@spec get_objects_in_folder(String.t(), String.t(), String.t()) :: Conn.t()
def get_objects_in_folder(pseudofolder \\ "", container, account) do
conn = get_objects(container, account)
request = Map.fetch!(conn, :request)
request =
request
|> Url.add_query_string(%{"delimiter" => "/"})
|> Url.add_query_string(%{"prefix" => pseudofolder})
Map.put(conn, :request, request)
end
end
|
lib/openstex/swift/v1.ex
| 0.873963
| 0.419172
|
v1.ex
|
starcoder
|
defmodule ExUnit.CaseTemplate do
@moduledoc """
Defines a module template to be used throughout your test suite.
This is useful when there are a set of setup callbacks or a set
of functions that should be shared between test modules.
Let's imagine that you create a `MyCase` module that calls `use
ExUnit.CaseTemplate`. When a test case module calls `use MyCase`, the
following things hold true:
* All the functionality that `MyCase` would have had available from
`use ExUnit.Case` is available (same as if `MyCase` called `use
ExUnit.Case` directly)
* All the functions and macros defined in `MyCase` are imported into
the test case
* The `setup` and `setup_all` callbacks that you define in `MyCase`
get used in the test case module
The options that you pass to `use MyCase` get also passed to `use
ExUnit.Case` under the hood. This means you can do things like `use
MyCase, async: true`. You can also access this options in `using/2`.
## Example
defmodule MyCase do
use ExUnit.CaseTemplate
setup do
IO.puts("This will run before each test that uses this case")
end
end
defmodule MyTest do
use MyCase, async: true
test "truth" do
assert true
end
end
If you need to "hook" into `use MyCase` and do other things as well,
you can use the `using/2` macro. See its documentation for more
information and examples.
defmodule MyCase do
use ExUnit.CaseTemplate
using do
quote do
import MyApp.TestHelpers
end
end
end
"""
@doc false
defmacro __using__(_) do
quote do
use ExUnit.Callbacks
import ExUnit.Assertions
import unquote(__MODULE__)
defmacro __using__(opts) do
unquote(__MODULE__).__proxy__(__MODULE__, opts)
end
defoverridable __using__: 1
end
end
@doc false
# We inject this code in the module that calls "use MyTemplate".
def __proxy__(module, opts) do
quote do
use ExUnit.Case, unquote(opts)
setup_all context do
unquote(module).__ex_unit__(:setup_all, context)
end
setup context do
unquote(module).__ex_unit__(:setup, context)
end
end
end
@doc """
Allows a developer to customize the using block
when the case template is used.
You can use an optional `var` argument when calling `using/2`. ExUnit
will pass whatever argument you pass to `use MyCase` as this `var` argument. See the examples below for clarification.
## Example
defmodule MyCase do
use ExUnit.CaseTemplate
using do
quote do
# This code is injected into every case that calls "use MyCase"
alias MyApp.FunModule
end
end
end
You can specify an argument to `using/2`:
defmodule MyCase do
use ExUnit.CaseTemplate
using options do
quote do
if unquote(options)[:import_helpers] do
import MyApp.TestHelpers
end
end
end
end
The second argument passed to `use MyCase` gets forwarded to `using/2` too:
defmodule SomeTestCase do
use MyCase, async: true, import_helpers: true, async: true
test "the truth" do
# truth/0 comes from MyApp.TestHelpers:
assert truth()
end
end
> #### Sharing options with `use ExUnit.Case` {: .warning}
>
> The second argument that you pass to `use MyCase` is *also* passed
> as the second argument to `use ExUnit.Case`.
"""
defmacro using(var \\ quote(do: _), do: block) do
quote do
defmacro __using__(unquote(var) = opts) do
parent = unquote(__MODULE__).__proxy__(__MODULE__, opts)
result = unquote(block)
{:__block__, [], [parent, result]}
end
end
end
end
|
lib/ex_unit/lib/ex_unit/case_template.ex
| 0.851119
| 0.641892
|
case_template.ex
|
starcoder
|
defmodule Lapin.Connection do
@moduledoc """
RabbitMQ connection handler
This module handles the RabbitMQ connection. It also provides a behaviour for
worker module implementation. The worker module should use the `Lapin.Connection`
behaviour and implement the callbacks it needs.
When using the `Lapin.Connection` behaviour a `publish/4` function is injected in
the worker module as a shortcut to the `Lapin.Connection.publish/5` function
which removes the need for passing in the connection and is publicly callable
to publish messages on the connection configured for the implementing module.
"""
use Connection
require Logger
alias AMQP.Channel
alias Lapin.{Consumer, Exchange, Message, Producer, Queue}
alias Lapin.Message.Payload
@typedoc """
Connection configuration
The following keys are supported:
- module: module using the `Lapin.Connection` behaviour
- uri: AMQP URI (String.t | URI.t)
- host: broker hostname (string | charlist), *default: 'localhost'*
- port: broker port (string | integer), *default: 5672*
- virtual_host: broker vhost (string), *default: "/"*
- username: username (string)
- password: password (string)
- auth_mechanisms: broker auth_mechanisms ([:amqplain | :external | :plain]), *default: amqp_client default*
- ssl_options: ssl options ([:ssl:ssl_option]), *default: none*
- producers: producers to configure ([Producer.config]), *default: []*
- consumers: consumers to configure ([Consumer.config]), *default: []*
"""
@type config :: [consumers: [Consumer.config()], producers: [Producer.config()]]
@typedoc "Connection"
@type t :: GenServer.server()
@typedoc "Callback result"
@type on_callback :: :ok | {:error, message :: String.t()}
@typedoc "Reason for message rejection"
@type reason :: term
@typedoc "`handle_deliver/2` callback result"
@type on_deliver :: :ok | {:reject, reason} | term
@doc """
Called when receiving a `basic.cancel` from the broker.
"""
@callback handle_cancel(Channel.t()) :: on_callback
@doc """
Called when receiving a `basic.cancel_ok` from the broker.
"""
@callback handle_cancel_ok(Channel.t()) :: on_callback
@doc """
Called when receiving a `basic.consume_ok` from the broker.
This signals successul registration as a consumer.
"""
@callback handle_consume_ok(Channel.t()) :: on_callback
@doc """
Called when receiving a `basic.deliver` from the broker.
Return values from this callback determine message acknowledgement:
- `:ok`: Message was processed by the consumer and should be removed from queue
- `{:reject, reason}`: Message was not processed and should be rejected
Any other return value requeues the message to prevent data loss.
A crash in the callback code will however reject the message to prevent loops
if the message was already delivered before.
The `reason` term can be used by the application
to signal the reason of rejection and is logged in debug.
"""
@callback handle_deliver(Channel.t(), Message.t()) :: on_deliver
@doc """
Called when completing a `basic.publish` with the broker.
Message transmission to the broker is successful when this callback is called.
"""
@callback handle_publish(Channel.t(), Message.t()) :: on_callback
@doc """
Called when receiving a `basic.return` from the broker.
This signals an undeliverable returned message from the broker.
"""
@callback handle_return(Channel.t(), Message.t()) :: on_callback
@doc """
Called before `handle_deliver/2` to get the payload type.
Should return a data type instance to decode the payload into.
A `Lapin.Message.Payload` implementation must be provided for this type. The
default implementation leaves the payload unaltered.
"""
@callback payload_for(Channel.t(), Message.t()) :: Payload.t()
defmacro __using__(_) do
quote do
alias Lapin.{Consumer, Message}
@behaviour Lapin.Connection
def handle_cancel(_consumer), do: :ok
def handle_cancel_ok(_consumer), do: :ok
def handle_consume_ok(_consumer), do: :ok
def handle_deliver(_consumer, _message), do: :ok
def handle_publish(_consumer, _message), do: :ok
def handle_return(_consumer, _message), do: :ok
def payload_for(_consumer, _message), do: <<>>
defoverridable Lapin.Connection
def publish(exchange, routing_key, message, options \\ []) do
Lapin.Connection.publish(__MODULE__, exchange, routing_key, message, options)
end
end
end
@backoff 1_000
@connection_default_params [connection_timeout: @backoff]
@default_rabbitmq_host 'localhost'
@default_rabbitmq_port 5672
@doc """
Starts a `Lapin.Connection` with the specified configuration
"""
@spec start_link(config, options :: GenServer.options()) :: GenServer.on_start()
def start_link(configuration, options \\ []) do
{:ok, configuration} = cleanup_configuration(configuration)
Connection.start_link(__MODULE__, configuration, options)
end
def init(configuration) do
Process.flag(:trap_exit, true)
{:connect, :init,
%{configuration: configuration, consumers: [], producers: [], connection: nil, module: nil}}
end
@doc """
Closes the connection
"""
@spec close(connection :: t) :: on_callback()
def close(connection), do: GenServer.stop(connection)
def terminate(_reason, %{connection: nil}), do: :ok
def terminate(_reason, %{connection: connection}) do
AMQP.Connection.close(connection)
end
@doc """
Publishes a message to the specified exchange with the given routing_key
"""
@spec publish(
connection :: t(),
String.t(),
String.t(),
Payload.t(),
options :: Keyword.t()
) :: on_callback
def publish(connection, exchange, routing_key, payload, options \\ []) do
Connection.call(connection, {:publish, exchange, routing_key, payload, options})
end
def handle_call(
{:publish, _exchange, _routing_key, _payload, _options},
_from,
%{connection: nil} = state
) do
{:reply, {:error, :not_connected}, state}
end
def handle_call(
{:publish, exchange, routing_key, payload, options},
_from,
%{producers: producers, module: module} = state
) do
with {:ok, %Producer{pattern: pattern} = producer} <- Producer.get(producers, exchange),
mandatory <- pattern.mandatory(producer),
persistent <- pattern.persistent(producer),
options <- Keyword.merge([mandatory: mandatory, persistent: persistent], options),
meta <- %{content_type: Payload.content_type(payload)},
{:ok, payload} <- Payload.encode(payload),
:ok <- Producer.publish(producer, exchange, routing_key, payload, options) do
message = %Message{meta: Enum.into(options, meta), payload: payload}
if not pattern.confirm(producer) or Producer.confirm(producer) do
Logger.debug(fn -> "Published #{inspect(message)} on #{inspect(producer)}" end)
{:reply, module.handle_publish(producer, message), state}
else
error = "Error publishing #{inspect(message)}"
Logger.debug(fn -> error end)
{:reply, {:error, error}, state}
end
else
{:error, error} ->
Logger.debug(fn -> "Error sending message: #{inspect(error)}" end)
{:reply, {:error, error}, state}
end
end
def handle_info(
{:basic_cancel, %{consumer_tag: consumer_tag}},
%{consumers: consumers, module: module} = state
) do
case Consumer.get(consumers, consumer_tag) do
{:ok, consumer} ->
Logger.debug(fn -> "Broker cancelled consumer for #{inspect(consumer)}" end)
module.handle_cancel(consumer)
{:error, :not_found} ->
Logger.warn(
"Broker cancelled consumer_tag '#{consumer_tag}' for locally unknown consumer"
)
end
{:stop, :normal, state}
end
def handle_info(
{:basic_cancel_ok, %{consumer_tag: consumer_tag}},
%{consumers: consumers, module: module} = state
) do
with {:ok, consumer} <- Consumer.get(consumers, consumer_tag),
:ok <- module.handle_cancel_ok(consumer) do
Logger.debug(fn -> "Broker confirmed cancelling consumer for #{inspect(consumer)}" end)
else
{:error, :not_found} ->
Logger.debug(fn ->
"Broker confirmed cancelling consumer for locally unknown tag '#{consumer_tag}'"
end)
error ->
Logger.error("Error handling broker cancel for '#{consumer_tag}': #{inspect(error)}")
end
{:noreply, state}
end
def handle_info(
{:basic_consume_ok, %{consumer_tag: consumer_tag}},
%{consumers: consumers, module: module} = state
) do
with {:ok, consumer} <- Consumer.get(consumers, consumer_tag),
:ok <- module.handle_consume_ok(consumer) do
Logger.debug(fn -> "Broker registered consumer for #{inspect(consumer)}" end)
else
{:error, :not_found} ->
Logger.warn(
"Broker registered consumer_tag '#{consumer_tag}' for locally unknown consumer"
)
error ->
Logger.error("Error handling broker register for '#{consumer_tag}': #{inspect(error)}")
end
{:noreply, state}
end
def handle_info(
{:basic_return, payload, %{exchange: exchange} = meta},
%{producers: producers, module: module} = state
) do
message = %Message{meta: meta, payload: payload}
with {:ok, producer} <- Producer.get(producers, exchange),
:ok <- module.handle_return(producer, message) do
Logger.debug(fn -> "Broker returned message #{inspect(message)}" end)
else
{:error, :not_found} ->
Logger.warn("Broker returned message #{inspect(message)} for locally unknown channel")
error ->
Logger.debug(fn -> "Error handling returned message: #{inspect(error)}" end)
end
{:noreply, state}
end
def handle_info({:DOWN, _, :process, _pid, _reason}, state) do
Logger.warn("Connection down, restarting...")
{:stop, :normal, state}
end
def handle_info(
{:basic_deliver, payload, %{consumer_tag: consumer_tag} = meta},
%{consumers: consumers, module: module} = state
) do
message = %Message{meta: meta, payload: payload}
case Consumer.get(consumers, consumer_tag) do
{:ok, consumer} ->
spawn(fn -> consume(module, consumer, message) end)
{:error, :not_found} ->
Logger.error("Error processing message #{inspect(message)}, no local consumer")
end
{:noreply, state}
end
defp consume(
module,
%Consumer{pattern: pattern} = consumer,
%Message{
meta: %{delivery_tag: delivery_tag, redelivered: redelivered} = meta,
payload: payload
} = message
) do
with ack <- pattern.ack(consumer),
payload_for <- module.payload_for(consumer, message),
content_type <- Payload.content_type(payload_for),
meta <- Map.put(meta, :content_type, content_type),
{:ok, payload} <- Payload.decode_into(payload_for, payload),
message <- %Message{message | meta: meta, payload: payload},
:ok <- module.handle_deliver(consumer, message) do
Logger.debug(fn -> "Consuming message #{delivery_tag}" end)
consume_ack(ack, consumer, delivery_tag)
else
{:reject, reason} ->
case Consumer.reject_message(consumer, delivery_tag, false) do
:ok ->
Logger.error("Rejected message #{delivery_tag}: #{inspect(reason)}")
:ok
{:error, reason} ->
Logger.debug("Failed rejecting message #{delivery_tag}: #{inspect(reason)}")
end
reason ->
case Consumer.reject_message(consumer, delivery_tag, not redelivered) do
:ok ->
Logger.error("Rejected message #{delivery_tag}: #{inspect(reason)}")
:ok
{:error, reason} ->
Logger.debug("Failed rejecting message #{delivery_tag}: #{inspect(reason)}")
end
end
rescue
exception ->
case Consumer.reject_message(consumer, delivery_tag, not redelivered) do
:ok ->
Logger.error(
"Rejected message #{delivery_tag}: #{Exception.format(:error, exception, __STACKTRACE__)}"
)
:ok
{:error, reason} ->
Logger.debug("Failed rejecting message #{delivery_tag}: #{inspect(reason)}")
end
end
defp consume_ack(true = _consumer_ack, consumer, delivery_tag) do
case Consumer.ack_message(consumer, delivery_tag) do
:ok ->
Logger.debug("Consumed message #{delivery_tag}, ACK sent")
:ok
error ->
Logger.debug("ACK failed for message #{delivery_tag}")
error
end
end
defp consume_ack(false = _ack, _channel, delivery_tag) do
Logger.debug(fn -> "Consumed message #{delivery_tag}, ACK not required" end)
:ok
end
def connect(_info, %{configuration: configuration} = state) do
module = Keyword.get(configuration, :module)
with configuration <- Keyword.merge(@connection_default_params, configuration),
{:ok, connection} <- AMQP.Connection.open(configuration),
{:ok, config_channel} <- Channel.open(connection),
{:ok, exchanges} <- declare_exchanges(configuration, config_channel),
{:ok, queues} <- declare_queues(configuration, config_channel),
:ok <- bind_exchanges(exchanges, config_channel),
:ok <- bind_queues(queues, config_channel),
{:ok, producers} <- create_producers(configuration, connection),
{:ok, consumers} <- create_consumers(configuration, connection),
:ok <- Channel.close(config_channel) do
Process.monitor(connection.pid)
{:ok,
%{
state
| module: module,
producers: producers,
consumers: consumers,
connection: connection
}}
else
{:error, error} ->
Logger.error(fn ->
"Connection error: #{inspect(error)} for #{module}, backing off for #{@backoff}"
end)
{:backoff, @backoff, state}
end
end
defp declare_exchanges(configuration, channel) do
exchanges =
configuration
|> Keyword.get(:exchanges, [])
|> Enum.map(fn {name, options} ->
name
|> Atom.to_string()
|> Exchange.new(options)
end)
{Enum.each(exchanges, &Exchange.declare(&1, channel)), exchanges}
end
defp bind_exchanges(exchanges, channel), do: Enum.each(exchanges, &Exchange.bind(&1, channel))
defp declare_queues(configuration, channel) do
queues =
configuration
|> Keyword.get(:queues, [])
|> Enum.map(fn {name, options} ->
name
|> Atom.to_string()
|> Queue.new(options)
end)
{Enum.each(queues, &Queue.declare(&1, channel)), queues}
end
defp bind_queues(queues, channel), do: Enum.each(queues, &Queue.bind(&1, channel))
defp create_producers(configuration, connection) do
producers =
configuration
|> Keyword.get(:producers, [])
|> Enum.map(&Producer.create(connection, &1))
{:ok, producers}
end
defp create_consumers(configuration, connection) do
consumers =
configuration
|> Keyword.get(:consumers, [])
|> Enum.map(&Consumer.create(connection, &1))
{:ok, consumers}
end
defp cleanup_configuration(configuration) do
with :ok <- check_mandatory_params(configuration, [:module]),
{uri, configuration} <-
Keyword.get_and_update(configuration, :uri, fn uri ->
{map_uri(uri), :pop}
end),
configuration <- Keyword.merge(configuration, uri),
{_, configuration} <-
Keyword.get_and_update(configuration, :host, fn host ->
{host, map_host(host)}
end),
{_, configuration} <-
Keyword.get_and_update(configuration, :port, fn port ->
{port, map_port(port)}
end),
{_, configuration} <-
Keyword.get_and_update(configuration, :virtual_host, fn vhost ->
{vhost, map_vhost(vhost)}
end),
{_, configuration} <-
Keyword.get_and_update(configuration, :auth_mechanisms, fn
mechanisms when is_list(mechanisms) ->
{mechanisms, Enum.map(mechanisms, &map_auth_mechanism(&1))}
_ ->
:pop
end) do
{:ok, configuration}
else
{:error, :missing_params, missing_params} ->
params = Enum.join(missing_params, ", ")
error =
"Error creating connection #{inspect(configuration)}: missing mandatory params: #{params}"
Logger.error(error)
{:error, error}
end
end
defp map_uri(nil), do: []
defp map_uri(uri) when is_binary(uri) do
uri
|> URI.parse()
|> map_uri()
end
defp map_uri(%URI{} = uri) do
uri
|> Map.from_struct()
|> Enum.to_list()
|> uri_to_list()
end
defp uri_to_list(uri) when is_list(uri) do
with {path, uri} <- Keyword.pop(uri, :path),
{userinfo, uri} <- Keyword.pop(uri, :userinfo),
uri <- Keyword.drop(uri, [:authority, :query, :fragment, :scheme]),
[username, password] <- map_userinfo(userinfo) do
uri
|> Keyword.put(:virtual_host, map_vhost(path))
|> Keyword.put(:username, username)
|> Keyword.put(:password, password)
|> Enum.reject(fn {_k, v} -> v === nil end)
end
end
defp map_userinfo(userinfo) when is_binary(userinfo) do
parts =
userinfo
|> String.split(":", parts: 2)
[Enum.at(parts, 0), Enum.at(parts, 1)]
end
defp map_userinfo(_), do: [nil, nil]
defp map_vhost(nil), do: "/"
defp map_vhost(path) do
case String.replace_leading(path, "/", "") do
"" -> "/"
vhost -> vhost
end
end
defp map_auth_mechanism(:amqplain), do: &:amqp_auth_mechanisms.amqplain/3
defp map_auth_mechanism(:external), do: &:amqp_auth_mechanisms.external/3
defp map_auth_mechanism(:plain), do: &:amqp_auth_mechanisms.plain/3
defp map_auth_mechanism(auth_mechanism), do: auth_mechanism
defp map_host(nil), do: @default_rabbitmq_host
defp map_host(host) when is_binary(host), do: String.to_charlist(host)
defp map_host(host), do: host
defp map_port(nil), do: @default_rabbitmq_port
defp map_port(port) when is_binary(port), do: String.to_integer(port)
defp map_port(port), do: port
defp check_mandatory_params(configuration, params) do
if Enum.all?(params, &Keyword.has_key?(configuration, &1)) do
:ok
else
missing_params = Enum.reject(params, &Keyword.has_key?(configuration, &1))
{:error, :missing_params, missing_params}
end
end
end
|
lib/lapin/connection.ex
| 0.85223
| 0.478041
|
connection.ex
|
starcoder
|
defmodule RateLimit do
@moduledoc """
RateLimit provides an API for rate limiting HTTP requests based on the requester IP.
"""
use GenServer
@doc """
Setup rate limiting for a specific scope.
We use scopes to allow namespacing of rate limiting, so multiple consumers can
use the same rate limit gen_server
"""
def setup(scope, rate_limit, rate_period) do
{:ok, _pid} = RateLimitSup.start_child(scope, rate_limit, rate_period)
:ok
end
@doc """
Perform check on requesters IP
"""
def check(scope, ip) do
result = RateLimitETS.update_counter(scope, ip)
count_result(result)
end
@doc """
Return the rate_limit for scope
"""
def limit(scope) do
RateLimitETS.limit(scope)
end
@doc """
GenServer start_link callback
"""
def start_link([scope, limit, period]) do
GenServer.start_link(__MODULE__, {scope, limit, period}, [])
end
@doc """
Gen Server start_link callback
"""
def start_link(scope, limit, period) do
GenServer.start_link(__MODULE__, {scope, limit, period}, [])
end
@doc """
Callback from GenServer to initialise state
"""
def init({scope, limit, period} = state) do
# Create and initialise counters for scope
RateLimitETS.init_counters(scope, limit, period)
# Use Erlang's OTP timer functionality to notify us
# when our time period has elapsed.
{:ok, _} = :timer.send_interval(TimeSource.interval(period), :reset_counters)
{:ok, state}
end
def handle_info(:reset_counters, []) do
{:noreply, []}
end
def handle_info(:reset_counters, {scope, _, _} = state) do
# Callback handling for periodic reset_counters
RateLimitETS.reset_counters(scope)
{:noreply, state}
end
def handle_call(_, _, state) do
{:noreply, state}
end
def handle_cast(_, state) do
{:noreply, state}
end
@doc """
Translate the record returned from ETS into the API types.
"""
def count_result(r) do
case r do
{count, limit, next_reset} when count == limit ->
{:rate_exceeded, 0, next_reset}
{count, limit, next_reset} ->
{:ok, limit - count - 1, next_reset}
:rate_not_set ->
:rate_not_set
end
end
end
|
lib/rate_limit.ex
| 0.817319
| 0.529203
|
rate_limit.ex
|
starcoder
|
defmodule Tock do
@moduledoc """
Tock is a library for mocking remote function calls made by `Task.Supervisor`.
## Usage
When working in a distributed system `Task.Supervisor` provides a mechanism
for calling functions on a remote node.
{ MyRemoteTaskSupervisor, remote_node }
|> Task.Supervisor.async(MyRemoteModule, :remote_fun, [])
|> Task.await()
Tock allows you to easily mock a remote application. This eliminates the need
to mock your own code. Instead, mock the behavior of an application running on
a remote node.
use ExUnit.Case, async: true
test "invokes add on a remote node" do
MyRemoteTaskSupervisor
|> Tock.start()
|> Tock.expect(MyRemoteMod, :add, fn(x, y) -> x + y end)
assert { MyRemoteTaskSupervisor, node() }
|> Task.Supervisor.async(MyRemoteModule, :add, [2, 3])
|> Task.await() == 5
end
All expectations are defined based on the current process. This allows
multiple tests to run concurrently when using the same named
`Task.Supervisor`.
"""
@doc """
Expects `fun` on `module` with an arity defined by `code` to be invoked `n`
times.
When `expect/5` is invoked, any previously declared stub for the same module,
function and arity will be removed. This will ensure that a remote function
called more than `n` times will timeout. If a `stub/4` is invoked after
`expect/5` for the same `module`, `fun` and arity, the stub will be used after
all expectations are fulfilled.
## Examples
Expect `MyRemoteMod.add/2` to be called once:
expect(MyRemoteTaskSupervisor, MyRemoteMod, :add, fn(x, y) -> x + y end)
Expect `MyRemoteMod.add/2` to be called 5 times:
expect(MyRemoteTaskSupervisor, MyRemoteMod, :add, 5, fn(x, y) -> x + y end)
`expect/5` can also be invoked multiple times for the same `module`, `fun` and
arity allowing you to define different results on each call:
MyRemoteTaskSupervisor
|> expect(MyRemoteMod, :add, fn(x, y) -> x + y end)
|> expect(MyRemoteMod, :add, fn(x, y) -> x * y end)
"""
@spec expect(atom | pid, module, atom, non_neg_integer, fun) :: pid
def expect(tock, module, fun, n \\ 1, code) do
signature = { module, fun, :erlang.fun_info(code)[:arity] }
expects = { signature, List.duplicate({ :expect, code }, n) }
Tock.Server.put_expectation(tock, expects)
end
@doc """
Start a mock `Task.Supervisor`.
"""
@spec start(atom) :: pid
def start(name) do
case Tock.Supervisor.start_server([name: name]) do
{ :ok, pid } ->
Tock.Server.join(pid)
{ :error, { :already_started, pid } } ->
Tock.Server.join(pid)
end
end
@doc """
Allows `fun` on `module` with an arity defined by `code` to be invoked zero or
more times.
If expectations and stubs are defined for the same `module`, `fun` and arity
the stub is invoked after all expectations are fulfilled.
## Examples
Allow `MyRemoteMod` to be invoked zero or more times:
stub(MyRemoteTaskSupervisor, MyRemoteMod, :add, fn(x, y) -> x + y end)
`stub/4` will overwrite any previous calls to `stub/4`.
"""
@spec stub(atom | pid, module, atom, function) :: pid
def stub(tock, module, fun, code) do
signature = { module, fun, :erlang.fun_info(code)[:arity] }
expects = { signature, [{ :stub, code }] }
Tock.Server.put_expectation(tock, expects)
end
end
|
lib/tock.ex
| 0.914123
| 0.620406
|
tock.ex
|
starcoder
|
defmodule RDF.Turtle.Encoder.State do
@moduledoc false
alias RDF.{BlankNode, Description}
def start_link(data, base, prefixes) do
Agent.start_link(fn -> %{data: data, base: base, prefixes: prefixes} end)
end
def stop(state) do
Agent.stop(state)
end
def data(state), do: Agent.get(state, & &1.data)
def base(state), do: Agent.get(state, & &1.base)
def prefixes(state), do: Agent.get(state, & &1.prefixes)
def list_nodes(state), do: Agent.get(state, & &1.list_nodes)
def bnode_ref_counter(state), do: Agent.get(state, & &1.bnode_ref_counter)
def bnode_ref_counter(state, bnode) do
bnode_ref_counter(state) |> Map.get(bnode, 0)
end
def base_iri(state) do
with {:ok, base} <- base(state) do
RDF.iri(base)
else
_ -> nil
end
end
def list_values(head, state), do: Agent.get(state, & &1.list_values[head])
def preprocess(state) do
with data = data(state),
{bnode_ref_counter, list_parents} = bnode_info(data),
{list_nodes, list_values} = valid_lists(list_parents, bnode_ref_counter, data) do
Agent.update(state, &Map.put(&1, :bnode_ref_counter, bnode_ref_counter))
Agent.update(state, &Map.put(&1, :list_nodes, list_nodes))
Agent.update(state, &Map.put(&1, :list_values, list_values))
end
end
defp bnode_info(data) do
data
|> RDF.Data.descriptions()
|> Enum.reduce(
{%{}, %{}},
fn %Description{subject: subject} = description, {bnode_ref_counter, list_parents} ->
list_parents =
if match?(%BlankNode{}, subject) and
to_list?(description, Map.get(bnode_ref_counter, subject, 0)),
do: Map.put_new(list_parents, subject, nil),
else: list_parents
Enum.reduce(description.predications, {bnode_ref_counter, list_parents}, fn
{predicate, objects}, {bnode_ref_counter, list_parents} ->
Enum.reduce(Map.keys(objects), {bnode_ref_counter, list_parents}, fn
%BlankNode{} = object, {bnode_ref_counter, list_parents} ->
{
# Note: The following conditional produces imprecise results
# (sometimes the occurrence in the subject counts, sometimes it doesn't),
# but is sufficient for the current purpose of handling the
# case of a statement with the same subject and object bnode.
Map.update(
bnode_ref_counter,
object,
if(subject == object, do: 2, else: 1),
&(&1 + 1)
),
if predicate == RDF.rest() do
Map.put_new(list_parents, object, subject)
else
list_parents
end
}
_, {bnode_ref_counter, list_parents} ->
{bnode_ref_counter, list_parents}
end)
end)
end
)
end
@list_properties MapSet.new([
RDF.Utils.Bootstrapping.rdf_iri("first"),
RDF.Utils.Bootstrapping.rdf_iri("rest")
])
@dialyzer {:nowarn_function, to_list?: 2}
defp to_list?(%Description{} = description, 1) do
Description.count(description) == 2 and
Description.predicates(description) |> MapSet.equal?(@list_properties)
end
defp to_list?(%Description{} = description, 0),
do: RDF.list?(description)
defp to_list?(_, _),
do: false
defp valid_lists(list_parents, bnode_ref_counter, data) do
head_nodes = for {list_node, nil} <- list_parents, do: list_node
all_list_nodes =
MapSet.new(
for {list_node, _} <- list_parents, Map.get(bnode_ref_counter, list_node, 0) < 2 do
list_node
end
)
Enum.reduce(head_nodes, {MapSet.new(), %{}}, fn head_node, {valid_list_nodes, list_values} ->
with list when not is_nil(list) <-
RDF.List.new(head_node, data),
list_nodes = RDF.List.nodes(list),
true <-
Enum.all?(list_nodes, fn
%BlankNode{} = list_node ->
MapSet.member?(all_list_nodes, list_node)
_ ->
false
end) do
{
Enum.reduce(list_nodes, valid_list_nodes, fn list_node, valid_list_nodes ->
MapSet.put(valid_list_nodes, list_node)
end),
Map.put(list_values, head_node, RDF.List.values(list))
}
else
_ -> {valid_list_nodes, list_values}
end
end)
end
end
|
lib/rdf/serializations/turtle_encoder_state.ex
| 0.63023
| 0.553686
|
turtle_encoder_state.ex
|
starcoder
|
defmodule RF69.Frequency do
@moduledoc false
import RF69.Util, only: [write_reg: 3]
@type t() ::
314
| 315
| 316
| 433
| 434
| 435
| 863
| 864
| 865
| 866
| 867
| 868
| 869
| 870
| 902
| 903
| 904
| 905
| 906
| 907
| 908
| 909
| 910
| 911
| 912
| 913
| 914
| 915
| 916
| 917
| 918
| 919
| 920
| 921
| 922
| 923
| 924
| 925
| 926
| 927
def set_frequency(%{frequency: 314} = rf69) do
rf69
|> write_reg(:FRFMSB, :FRFMSB_314)
|> write_reg(:FRFMID, :FRFMID_314)
|> write_reg(:FRFLSB, :FRFLSB_314)
end
def set_frequency(%{frequency: 315} = rf69) do
rf69
|> write_reg(:FRFMSB, :FRFMSB_315)
|> write_reg(:FRFMID, :FRFMID_315)
|> write_reg(:FRFLSB, :FRFLSB_315)
end
def set_frequency(%{frequency: 316} = rf69) do
rf69
|> write_reg(:FRFMSB, :FRFMSB_316)
|> write_reg(:FRFMID, :FRFMID_316)
|> write_reg(:FRFLSB, :FRFLSB_316)
end
def set_frequency(%{frequency: 433} = rf69) do
rf69
|> write_reg(:FRFMSB, :FRFMSB_433)
|> write_reg(:FRFMID, :FRFMID_433)
|> write_reg(:FRFLSB, :FRFLSB_433)
end
def set_frequency(%{frequency: 434} = rf69) do
rf69
|> write_reg(:FRFMSB, :FRFMSB_434)
|> write_reg(:FRFMID, :FRFMID_434)
|> write_reg(:FRFLSB, :FRFLSB_434)
end
def set_frequency(%{frequency: 435} = rf69) do
rf69
|> write_reg(:FRFMSB, :FRFMSB_435)
|> write_reg(:FRFMID, :FRFMID_435)
|> write_reg(:FRFLSB, :FRFLSB_435)
end
def set_frequency(%{frequency: 863} = rf69) do
rf69
|> write_reg(:FRFMSB, :FRFMSB_863)
|> write_reg(:FRFMID, :FRFMID_863)
|> write_reg(:FRFLSB, :FRFLSB_863)
end
def set_frequency(%{frequency: 864} = rf69) do
rf69
|> write_reg(:FRFMSB, :FRFMSB_864)
|> write_reg(:FRFMID, :FRFMID_864)
|> write_reg(:FRFLSB, :FRFLSB_864)
end
def set_frequency(%{frequency: 865} = rf69) do
rf69
|> write_reg(:FRFMSB, :FRFMSB_865)
|> write_reg(:FRFMID, :FRFMID_865)
|> write_reg(:FRFLSB, :FRFLSB_865)
end
def set_frequency(%{frequency: 866} = rf69) do
rf69
|> write_reg(:FRFMSB, :FRFMSB_866)
|> write_reg(:FRFMID, :FRFMID_866)
|> write_reg(:FRFLSB, :FRFLSB_866)
end
def set_frequency(%{frequency: 867} = rf69) do
rf69
|> write_reg(:FRFMSB, :FRFMSB_867)
|> write_reg(:FRFMID, :FRFMID_867)
|> write_reg(:FRFLSB, :FRFLSB_867)
end
def set_frequency(%{frequency: 868} = rf69) do
rf69
|> write_reg(:FRFMSB, :FRFMSB_868)
|> write_reg(:FRFMID, :FRFMID_868)
|> write_reg(:FRFLSB, :FRFLSB_868)
end
def set_frequency(%{frequency: 869} = rf69) do
rf69
|> write_reg(:FRFMSB, :FRFMSB_869)
|> write_reg(:FRFMID, :FRFMID_869)
|> write_reg(:FRFLSB, :FRFLSB_869)
end
def set_frequency(%{frequency: 870} = rf69) do
rf69
|> write_reg(:FRFMSB, :FRFMSB_870)
|> write_reg(:FRFMID, :FRFMID_870)
|> write_reg(:FRFLSB, :FRFLSB_870)
end
def set_frequency(%{frequency: 902} = rf69) do
rf69
|> write_reg(:FRFMSB, :FRFMSB_902)
|> write_reg(:FRFMID, :FRFMID_902)
|> write_reg(:FRFLSB, :FRFLSB_902)
end
def set_frequency(%{frequency: 903} = rf69) do
rf69
|> write_reg(:FRFMSB, :FRFMSB_903)
|> write_reg(:FRFMID, :FRFMID_903)
|> write_reg(:FRFLSB, :FRFLSB_903)
end
def set_frequency(%{frequency: 904} = rf69) do
rf69
|> write_reg(:FRFMSB, :FRFMSB_904)
|> write_reg(:FRFMID, :FRFMID_904)
|> write_reg(:FRFLSB, :FRFLSB_904)
end
def set_frequency(%{frequency: 905} = rf69) do
rf69
|> write_reg(:FRFMSB, :FRFMSB_905)
|> write_reg(:FRFMID, :FRFMID_905)
|> write_reg(:FRFLSB, :FRFLSB_905)
end
def set_frequency(%{frequency: 906} = rf69) do
rf69
|> write_reg(:FRFMSB, :FRFMSB_906)
|> write_reg(:FRFMID, :FRFMID_906)
|> write_reg(:FRFLSB, :FRFLSB_906)
end
def set_frequency(%{frequency: 907} = rf69) do
rf69
|> write_reg(:FRFMSB, :FRFMSB_907)
|> write_reg(:FRFMID, :FRFMID_907)
|> write_reg(:FRFLSB, :FRFLSB_907)
end
def set_frequency(%{frequency: 908} = rf69) do
rf69
|> write_reg(:FRFMSB, :FRFMSB_908)
|> write_reg(:FRFMID, :FRFMID_908)
|> write_reg(:FRFLSB, :FRFLSB_908)
end
def set_frequency(%{frequency: 909} = rf69) do
rf69
|> write_reg(:FRFMSB, :FRFMSB_909)
|> write_reg(:FRFMID, :FRFMID_909)
|> write_reg(:FRFLSB, :FRFLSB_909)
end
def set_frequency(%{frequency: 910} = rf69) do
rf69
|> write_reg(:FRFMSB, :FRFMSB_910)
|> write_reg(:FRFMID, :FRFMID_910)
|> write_reg(:FRFLSB, :FRFLSB_910)
end
def set_frequency(%{frequency: 911} = rf69) do
rf69
|> write_reg(:FRFMSB, :FRFMSB_911)
|> write_reg(:FRFMID, :FRFMID_911)
|> write_reg(:FRFLSB, :FRFLSB_911)
end
def set_frequency(%{frequency: 912} = rf69) do
rf69
|> write_reg(:FRFMSB, :FRFMSB_912)
|> write_reg(:FRFMID, :FRFMID_912)
|> write_reg(:FRFLSB, :FRFLSB_912)
end
def set_frequency(%{frequency: 913} = rf69) do
rf69
|> write_reg(:FRFMSB, :FRFMSB_913)
|> write_reg(:FRFMID, :FRFMID_913)
|> write_reg(:FRFLSB, :FRFLSB_913)
end
def set_frequency(%{frequency: 914} = rf69) do
rf69
|> write_reg(:FRFMSB, :FRFMSB_914)
|> write_reg(:FRFMID, :FRFMID_914)
|> write_reg(:FRFLSB, :FRFLSB_914)
end
def set_frequency(%{frequency: 915} = rf69) do
rf69
|> write_reg(:FRFMSB, :FRFMSB_915)
|> write_reg(:FRFMID, :FRFMID_915)
|> write_reg(:FRFLSB, :FRFLSB_915)
end
def set_frequency(%{frequency: 916} = rf69) do
rf69
|> write_reg(:FRFMSB, :FRFMSB_916)
|> write_reg(:FRFMID, :FRFMID_916)
|> write_reg(:FRFLSB, :FRFLSB_916)
end
def set_frequency(%{frequency: 917} = rf69) do
rf69
|> write_reg(:FRFMSB, :FRFMSB_917)
|> write_reg(:FRFMID, :FRFMID_917)
|> write_reg(:FRFLSB, :FRFLSB_917)
end
def set_frequency(%{frequency: 918} = rf69) do
rf69
|> write_reg(:FRFMSB, :FRFMSB_918)
|> write_reg(:FRFMID, :FRFMID_918)
|> write_reg(:FRFLSB, :FRFLSB_918)
end
def set_frequency(%{frequency: 919} = rf69) do
rf69
|> write_reg(:FRFMSB, :FRFMSB_919)
|> write_reg(:FRFMID, :FRFMID_919)
|> write_reg(:FRFLSB, :FRFLSB_919)
end
def set_frequency(%{frequency: 920} = rf69) do
rf69
|> write_reg(:FRFMSB, :FRFMSB_920)
|> write_reg(:FRFMID, :FRFMID_920)
|> write_reg(:FRFLSB, :FRFLSB_920)
end
def set_frequency(%{frequency: 921} = rf69) do
rf69
|> write_reg(:FRFMSB, :FRFMSB_921)
|> write_reg(:FRFMID, :FRFMID_921)
|> write_reg(:FRFLSB, :FRFLSB_921)
end
def set_frequency(%{frequency: 922} = rf69) do
rf69
|> write_reg(:FRFMSB, :FRFMSB_922)
|> write_reg(:FRFMID, :FRFMID_922)
|> write_reg(:FRFLSB, :FRFLSB_922)
end
def set_frequency(%{frequency: 923} = rf69) do
rf69
|> write_reg(:FRFMSB, :FRFMSB_923)
|> write_reg(:FRFMID, :FRFMID_923)
|> write_reg(:FRFLSB, :FRFLSB_923)
end
def set_frequency(%{frequency: 924} = rf69) do
rf69
|> write_reg(:FRFMSB, :FRFMSB_924)
|> write_reg(:FRFMID, :FRFMID_924)
|> write_reg(:FRFLSB, :FRFLSB_924)
end
def set_frequency(%{frequency: 925} = rf69) do
rf69
|> write_reg(:FRFMSB, :FRFMSB_925)
|> write_reg(:FRFMID, :FRFMID_925)
|> write_reg(:FRFLSB, :FRFLSB_925)
end
def set_frequency(%{frequency: 926} = rf69) do
rf69
|> write_reg(:FRFMSB, :FRFMSB_926)
|> write_reg(:FRFMID, :FRFMID_926)
|> write_reg(:FRFLSB, :FRFLSB_926)
end
def set_frequency(%{frequency: 927} = rf69) do
rf69
|> write_reg(:FRFMSB, :FRFMSB_927)
|> write_reg(:FRFMID, :FRFMID_927)
|> write_reg(:FRFLSB, :FRFLSB_927)
end
def set_frequency(%{frequency: freq}) do
raise "Unknown frequency: #{freq}"
end
end
|
lib/rf69/frequency.ex
| 0.656878
| 0.454048
|
frequency.ex
|
starcoder
|
defmodule Delaunay.Utils do
use Bitwise
@moduledoc """
Documentation for Delaunay.Utils
"""
@doc """
pseudoAngle: Monotonically increases with real angle, but doesn't need expensive trigonometry
"""
def pseudoAngle(dx, dy) do
p = dx / (abs(dx) + abs(dy))
(if (dy > 0), do: 3 - p, else: 1 + p) / 4 # [0..1]
end
@doc """
"""
def dist(ax, ay, bx, by) do
dx = ax - bx
dy = ay - by
dx * dx + dy * dy
end
@doc """
"""
def orient(px, py, qx, qy, rx, ry) do
(qy - py) * (rx - qx) - (qx - px) * (ry - qy) < 0
end
@doc """
"""
def inCircle(ax, ay, bx, by, cx, cy, px, py) do
dx = ax - px
dy = ay - py
ex = bx - px
ey = by - py
fx = cx - px
fy = cy - py
ap = dx * dx + dy * dy
bp = ex * ex + ey * ey
cp = fx * fx + fy * fy
dx * (ey * cp - bp * fy) -
dy * (ex * cp - bp * fx) +
ap * (ex * fy - ey * fx) < 0
end
@doc """
"""
def circumradius(ax, ay, bx, by, cx, cy) do
dx = bx - ax
dy = by - ay
ex = cx - ax
ey = cy - ay
bl = dx * dx + dy * dy
cl = ex * ex + ey * ey
d = if ((dx * ey - dy * ex) == 0), do: :math.pow(2, 50), else: 0.5 / (dx * ey - dy * ex)
x = (ey * bl - dy * cl) * d
y = (dx * cl - ex * bl) * d
x * x + y * y
end
@doc """
"""
def circumcenter(ax, ay, bx, by, cx, cy) do
dx = bx - ax
dy = by - ay
ex = cx - ax
ey = cy - ay
bl = dx * dx + dy * dy
cl = ex * ex + ey * ey
d = if ((dx * ey - dy * ex) == 0), do: :math.pow(2, 50), else: 0.5 / (dx * ey - dy * ex)
x = ax + (ey * bl - dy * cl) * d
y = ay + (dx * cl - ex * bl) * d
{x, y}
end
@doc """
"""
def quicksort(ids, dists, left, right) do
if (right - left <= 20) do
List.foldl(
((left + 1)..right)
|> Enum.to_list,
ids,
fn i, ids ->
tmp = ids
|> Enum.at(i)
tmp_dist = dists
|> Enum.at(tmp)
swipeRight(ids, dists, tmp_dist, left, i - 1)
|> (
fn {j, l} ->
l
|> List.replace_at(j + 1, tmp)
end).()
end
)
else
median = (left + right)
|> bsr(1)
i = left + 1
new_ids = ids
|> swap(median, i)
|> (fn l -> if (dist_x(l, dists, left) > dist_x(l, dists, right)) do
l
|> swap(left, right)
else
l
end
end).()
|> (fn l -> if (dist_x(l, dists, i) > dist_x(l, dists, right)) do
l
|> swap(i, right)
else
l
end
end).()
|> (fn l -> if (dist_x(l, dists, left) > dist_x(l, dists, i)) do
l
|> swap(left, i)
else
l
end
end).()
tmp = new_ids
|> Enum.at(i)
tmp_dist = dists
|> Enum.at(tmp)
new_ids
|> swapIds(dists, tmp_dist, i + 1, right - 1)
|> (fn {i, j, l} ->
{
i,
j,
l
|> List.replace_at(
left + 1,
l
|> Enum.at(j)
)
|> List.replace_at(j, tmp)
}
end).()
|> (fn {i, j, l} ->
if (right - i + 1 >= j - left) do
l
|> quicksort(dists, i, right)
|> quicksort(dists, left, j - 1)
else
l
|> quicksort(dists, left, j - 1)
|> quicksort(dists, i, right)
end
end).()
end
end
@doc """
"""
def dist_x(ids, dists, x) do
dists
|> Enum.at(
ids
|> Enum.at(x)
)
end
@doc """
"""
def swapIds(ids, dists, tmp_dist, i, j) do
cond do
dist_x(ids, dists, i) < tmp_dist ->
ids
|> swapIds(dists, tmp_dist, i + 1, j)
dist_x(ids, dists, j) > tmp_dist ->
ids
|> swapIds(dists, tmp_dist, i, j - 1)
j < i ->
{i, j, ids}
true ->
ids
|> swap(i, j)
|> swapIds(dists, tmp_dist, i, j)
end
end
@doc """
"""
def swipeRight(ids, dists, tmp_dist, left, j) do
if (j >= left && Enum.at(dists, Enum.at(ids, j)) > tmp_dist) do
swipeRight(
ids
|> List.replace_at(
j + 1,
ids
|> Enum.at(j)
),
dists,
tmp_dist,
left,
j - 1
)
else
{j, ids}
end
end
@doc """
"""
def swap(list, i, j) do
tmp = list
|> Enum.at(i)
list
|> List.replace_at(
i,
(
list
|> Enum.at(j))
)
|> List.replace_at(j, tmp)
end
@doc """
"""
def defaultGetX({x, _y}) do
x
end
@doc """
"""
def defaultGetY({_x, y}) do
y
end
end
|
lib/delaunay/utils.ex
| 0.620852
| 0.468365
|
utils.ex
|
starcoder
|
defmodule AstraeaVirgoWeb.OrganizationView do
use AstraeaVirgoWeb, :view
@moduledoc """
Response for Organization API
"""
@doc """
Response
## index.json
Response for index Organization API: `GET /api/organizations`
Response: list of Object
| field | type | required | null | descript |
|-------------|--------|----------|------|----------|
| id | ID | yes | no | |
| name | string | yes | no | 名称 |
| formal_name | string | no | yes | 全称 |
| url | URL | no | yes | 官网 |
| logo | File | no | yes | |
Example:
```json
[
{
"id": "CN",
"name": "China",
"formal_name": "People's Republic of China",
"url": "http://www.gov.cn/",
"logo": {
"href": "https://upload.wikimedia.org/wikipedia/commons/f/fa/Flag_of_the_People%27s_Republic_of_China.svg",
"mime": "image/svg+xml"
}
},
{
"id": "TW",
"name": "Taiwan",
"formal_name": "Taiwan, Province of China",
"logo": {
"href": "https://upload.wikimedia.org/wikipedia/commons/7/72/Flag_of_the_Republic_of_China.svg",
"mime": "image/svg+xml"
}
}
]
```
## show.json
Response for show Organization API:
- `GET /api/organizations/<organization_id>`
- `PUT /api/organization/<organization_id>`
Response: Object
| field | type | required | null | descript |
|-------------|--------|----------|------|----------|
| id | ID | yes | no | |
| name | string | yes | no | 名称 |
| formal_name | string | no | yes | 全称 |
| url | URL | no | yes | 官网 |
| logo | File | no | yes | |
Example:
```json
{
"id": "CN",
"name": "China",
"formal_name": "People's Republic of China",
"url": "http://www.gov.cn/",
"logo": {
"href": "https://upload.wikimedia.org/wikipedia/commons/f/fa/Flag_of_the_People%27s_Republic_of_China.svg",
"mime": "image/svg+xml"
}
}
```
## create.json
Response for create Organization API: `POST /api/organizations`
Response: Object
| field | type | required | null | descript |
|-----------------|------|----------|------|---------------|
| organization_id | ID | yes | no | 组织机构的 ID |
Example:
```json
{"organization_id": "CN"}
```
"""
def render("index.json", assigns), do: assigns.data
def render("show.json", assigns), do: assigns.data
def render("create.json", assigns) do
%{
organization_id: assigns.organization_id
}
end
end
|
lib/virgo_web/views/organization_view.ex
| 0.73077
| 0.660866
|
organization_view.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.